rclone-api 1.4.32__py2.py3-none-any.whl → 1.5.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,572 +1,574 @@
1
- Metadata-Version: 2.2
2
- Name: rclone_api
3
- Version: 1.4.32
4
- Summary: rclone api in python
5
- Home-page: https://github.com/zackees/rclone-api
6
- License: BSD 3-Clause License
7
- Keywords: rclone,api,python,fast,sftp,s3,backblaze
8
- Classifier: Programming Language :: Python :: 3
9
- Requires-Python: >=3.10
10
- Description-Content-Type: text/markdown
11
- License-File: LICENSE
12
- Requires-Dist: pyright>=1.1.393
13
- Requires-Dist: python-dotenv>=1.0.0
14
- Requires-Dist: certifi>=2025.1.31
15
- Requires-Dist: psutil
16
- Requires-Dist: boto3<=1.35.99,>=1.20.1
17
- Requires-Dist: sqlmodel>=0.0.23
18
- Requires-Dist: psycopg2-binary>=2.9.10
19
- Requires-Dist: httpx>=0.28.1
20
- Dynamic: home-page
21
-
22
- # rclone-api
23
-
24
-
25
- ![perpetualmaniac_faster_400fd528-df15-4a04-8ad3-3cca786d7bca (2)](https://github.com/user-attachments/assets/65138e38-b115-447c-849a-4adbd27e4b67)
26
-
27
-
28
- <!--
29
- [![Linting](https://github.com/zackees/rclone-api/actions/workflows/lint.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/lint.yml)
30
- [![MacOS_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_macos.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_macos.yml)
31
- [![Ubuntu_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_ubuntu.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_ubuntu.yml)
32
- [![Win_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_win.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_win.yml)
33
- -->
34
-
35
-
36
- Got a lot of data to transfer quickly? This package is for you.
37
-
38
- This library was built out of necessity to transfer large amounts of AI training data. Aggressive defaults means this api will transfer faster than rclone does in stock settings.
39
-
40
- You must have [rclone](https://rclone.org/) in your path to use this library. You'd want this anyway because rclone is still under heavy developement.
41
-
42
- # Install
43
-
44
- `pip install rclone-api`
45
-
46
- pypi link: https://pypi.org/project/rclone-api/
47
-
48
- # Quick
49
-
50
- In addition to providing easy python use for rclone, this package provides additional features:
51
-
52
- * Aggressive default settings for copying / syncing operations for extreme performance.
53
- * Database Support: Dump repo information to an sqlite/postgres/mysql database.
54
- * One repo path -> table.
55
- * Scoped objects for:
56
- * Mounts.
57
- * File servers.
58
- * Enforces correct cleanup
59
- * Mounts are easier - platform specific setup and teardown.
60
- * Resumable multi-part uploads when s3 is the destination.
61
- * Fast diffing src/dst repos as a stream of `list[str]`.
62
- * Find which files need are missing and need to be copied.
63
- * Efficiently build pipelines to select copy strategy based on file size.
64
- * Walk a directory.
65
- * Breath first.
66
- * Depth first.
67
- * Use the HttpServer to slice out byte ranges from extremely large files.
68
-
69
-
70
- ## Example
71
-
72
- ```python
73
-
74
- from rclone_api import Rclone, DirListing, Config
75
-
76
- RCLONE_CONFIG = Config("""
77
- [dst]
78
- type = s3
79
- account = *********
80
- key = ************
81
- """)
82
-
83
-
84
- def test_ls_glob_png(self) -> None:
85
- rclone = Rclone(RCLONE_CONFIG)
86
- path = f"dst:{BUCKET_NAME}/my_data"
87
- listing: DirListing = rclone.ls(path, glob="*.png")
88
- self.assertGreater(len(listing.files), 0)
89
- for file in listing.files:
90
- self.assertIsInstance(file, File)
91
- # test that it ends with .png
92
- self.assertTrue(file.name.endswith(".png"))
93
- # there should be no directories with this glob
94
- self.assertEqual(len(listing.dirs), 0)
95
- ```
96
-
97
- ## API
98
-
99
- ```python
100
-
101
- # from rclone_api import Rclone
102
- # Rclone is the main api entry point.
103
- class Rclone:
104
- def __init__(
105
- self, rclone_conf: Path | Config, rclone_exe: Path | None = None
106
- ) -> None:
107
- from rclone_api.rclone_impl import RcloneImpl
108
-
109
- self.impl: RcloneImpl = RcloneImpl(rclone_conf, rclone_exe)
110
-
111
- def webgui(self, other_args: list[str] | None = None) -> Process:
112
- """Launch the Rclone web GUI."""
113
- return self.impl.webgui(other_args=other_args)
114
-
115
- def launch_server(
116
- self,
117
- addr: str,
118
- user: str | None = None,
119
- password: str | None = None,
120
- other_args: list[str] | None = None,
121
- ) -> Process:
122
- """Launch the Rclone server so it can receive commands"""
123
- return self.impl.launch_server(
124
- addr=addr, user=user, password=password, other_args=other_args
125
- )
126
-
127
- def remote_control(
128
- self,
129
- addr: str,
130
- user: str | None = None,
131
- password: str | None = None,
132
- capture: bool | None = None,
133
- other_args: list[str] | None = None,
134
- ) -> CompletedProcess:
135
- return self.impl.remote_control(
136
- addr=addr,
137
- user=user,
138
- password=password,
139
- capture=capture,
140
- other_args=other_args,
141
- )
142
-
143
- def obscure(self, password: str) -> str:
144
- """Obscure a password for use in rclone config files."""
145
- return self.impl.obscure(password=password)
146
-
147
- def ls_stream(
148
- self,
149
- path: str,
150
- max_depth: int = -1,
151
- fast_list: bool = False,
152
- ) -> FilesStream:
153
- """
154
- List files in the given path
155
-
156
- Args:
157
- src: Remote path to list
158
- max_depth: Maximum recursion depth (-1 for unlimited)
159
- fast_list: Use fast list (only use when getting THE entire data repository from the root/bucket, or it's small)
160
- """
161
- return self.impl.ls_stream(path=path, max_depth=max_depth, fast_list=fast_list)
162
-
163
- def save_to_db(
164
- self,
165
- src: str,
166
- db_url: str,
167
- max_depth: int = -1,
168
- fast_list: bool = False,
169
- ) -> None:
170
- """
171
- Save files to a database (sqlite, mysql, postgres)
172
-
173
- Args:
174
- src: Remote path to list, this will be used to populate an entire table, so always use the root-most path.
175
- db_url: Database URL, like sqlite:///data.db or mysql://user:pass@localhost/db or postgres://user:pass@localhost/db
176
- max_depth: Maximum depth to traverse (-1 for unlimited)
177
- fast_list: Use fast list (only use when getting THE entire data repository from the root/bucket)
178
-
179
- """
180
- return self.impl.save_to_db(
181
- src=src, db_url=db_url, max_depth=max_depth, fast_list=fast_list
182
- )
183
-
184
- def ls(
185
- self,
186
- path: Dir | Remote | str | None = None,
187
- max_depth: int | None = None,
188
- glob: str | None = None,
189
- order: Order = Order.NORMAL,
190
- listing_option: ListingOption = ListingOption.ALL,
191
- ) -> DirListing:
192
- return self.impl.ls(
193
- path=path,
194
- max_depth=max_depth,
195
- glob=glob,
196
- order=order,
197
- listing_option=listing_option,
198
- )
199
-
200
- def listremotes(self) -> list[Remote]:
201
- return self.impl.listremotes()
202
-
203
- def diff(
204
- self,
205
- src: str,
206
- dst: str,
207
- min_size: (
208
- str | None
209
- ) = None, # e. g. "1MB" - see rclone documentation: https://rclone.org/commands/rclone_check/
210
- max_size: (
211
- str | None
212
- ) = None, # e. g. "1GB" - see rclone documentation: https://rclone.org/commands/rclone_check/
213
- diff_option: DiffOption = DiffOption.COMBINED,
214
- fast_list: bool = True,
215
- size_only: bool | None = None,
216
- checkers: int | None = None,
217
- other_args: list[str] | None = None,
218
- ) -> Generator[DiffItem, None, None]:
219
- """Be extra careful with the src and dst values. If you are off by one
220
- parent directory, you will get a huge amount of false diffs."""
221
- return self.impl.diff(
222
- src=src,
223
- dst=dst,
224
- min_size=min_size,
225
- max_size=max_size,
226
- diff_option=diff_option,
227
- fast_list=fast_list,
228
- size_only=size_only,
229
- checkers=checkers,
230
- other_args=other_args,
231
- )
232
-
233
- def walk(
234
- self,
235
- path: Dir | Remote | str,
236
- max_depth: int = -1,
237
- breadth_first: bool = True,
238
- order: Order = Order.NORMAL,
239
- ) -> Generator[DirListing, None, None]:
240
- """Walk through the given path recursively.
241
-
242
- Args:
243
- path: Remote path or Remote object to walk through
244
- max_depth: Maximum depth to traverse (-1 for unlimited)
245
-
246
- Yields:
247
- DirListing: Directory listing for each directory encountered
248
- """
249
- return self.impl.walk(
250
- path=path, max_depth=max_depth, breadth_first=breadth_first, order=order
251
- )
252
-
253
- def scan_missing_folders(
254
- self,
255
- src: Dir | Remote | str,
256
- dst: Dir | Remote | str,
257
- max_depth: int = -1,
258
- order: Order = Order.NORMAL,
259
- ) -> Generator[Dir, None, None]:
260
- """Walk through the given path recursively.
261
-
262
- WORK IN PROGRESS!!
263
-
264
- Args:
265
- src: Source directory or Remote to walk through
266
- dst: Destination directory or Remote to walk through
267
- max_depth: Maximum depth to traverse (-1 for unlimited)
268
-
269
- Yields:
270
- DirListing: Directory listing for each directory encountered
271
- """
272
- return self.impl.scan_missing_folders(
273
- src=src, dst=dst, max_depth=max_depth, order=order
274
- )
275
-
276
- def cleanup(
277
- self, path: str, other_args: list[str] | None = None
278
- ) -> CompletedProcess:
279
- """Cleanup any resources used by the Rclone instance."""
280
- return self.impl.cleanup(path=path, other_args=other_args)
281
-
282
- def copy_to(
283
- self,
284
- src: File | str,
285
- dst: File | str,
286
- check: bool | None = None,
287
- verbose: bool | None = None,
288
- other_args: list[str] | None = None,
289
- ) -> CompletedProcess:
290
- """Copy one file from source to destination.
291
-
292
- Warning - slow.
293
-
294
- """
295
- return self.impl.copy_to(
296
- src=src, dst=dst, check=check, verbose=verbose, other_args=other_args
297
- )
298
-
299
- def copy_files(
300
- self,
301
- src: str,
302
- dst: str,
303
- files: list[str] | Path,
304
- check: bool | None = None,
305
- max_backlog: int | None = None,
306
- verbose: bool | None = None,
307
- checkers: int | None = None,
308
- transfers: int | None = None,
309
- low_level_retries: int | None = None,
310
- retries: int | None = None,
311
- retries_sleep: str | None = None,
312
- metadata: bool | None = None,
313
- timeout: str | None = None,
314
- max_partition_workers: int | None = None,
315
- multi_thread_streams: int | None = None,
316
- other_args: list[str] | None = None,
317
- ) -> list[CompletedProcess]:
318
- """Copy multiple files from source to destination.
319
-
320
- Args:
321
- payload: Dictionary of source and destination file paths
322
- """
323
- return self.impl.copy_files(
324
- src=src,
325
- dst=dst,
326
- files=files,
327
- check=check,
328
- max_backlog=max_backlog,
329
- verbose=verbose,
330
- checkers=checkers,
331
- transfers=transfers,
332
- low_level_retries=low_level_retries,
333
- retries=retries,
334
- retries_sleep=retries_sleep,
335
- metadata=metadata,
336
- timeout=timeout,
337
- max_partition_workers=max_partition_workers,
338
- multi_thread_streams=multi_thread_streams,
339
- other_args=other_args,
340
- )
341
-
342
- def copy(
343
- self,
344
- src: Dir | str,
345
- dst: Dir | str,
346
- check: bool | None = None,
347
- transfers: int | None = None,
348
- checkers: int | None = None,
349
- multi_thread_streams: int | None = None,
350
- low_level_retries: int | None = None,
351
- retries: int | None = None,
352
- other_args: list[str] | None = None,
353
- ) -> CompletedProcess:
354
- """Copy files from source to destination.
355
-
356
- Args:
357
- src: Source directory
358
- dst: Destination directory
359
- """
360
- return self.impl.copy(
361
- src=src,
362
- dst=dst,
363
- check=check,
364
- transfers=transfers,
365
- checkers=checkers,
366
- multi_thread_streams=multi_thread_streams,
367
- low_level_retries=low_level_retries,
368
- retries=retries,
369
- other_args=other_args,
370
- )
371
-
372
- def purge(self, path: Dir | str) -> CompletedProcess:
373
- """Purge a directory"""
374
- return self.impl.purge(path=path)
375
-
376
- def delete_files(
377
- self,
378
- files: str | File | list[str] | list[File],
379
- check: bool | None = None,
380
- rmdirs=False,
381
- verbose: bool | None = None,
382
- max_partition_workers: int | None = None,
383
- other_args: list[str] | None = None,
384
- ) -> CompletedProcess:
385
- """Delete a directory"""
386
- return self.impl.delete_files(
387
- files=files,
388
- check=check,
389
- rmdirs=rmdirs,
390
- verbose=verbose,
391
- max_partition_workers=max_partition_workers,
392
- other_args=other_args,
393
- )
394
-
395
- def exists(self, path: Dir | Remote | str | File) -> bool:
396
- """Check if a file or directory exists."""
397
- return self.impl.exists(path=path)
398
-
399
- def is_synced(self, src: str | Dir, dst: str | Dir) -> bool:
400
- """Check if two directories are in sync."""
401
- return self.impl.is_synced(src=src, dst=dst)
402
-
403
- def modtime(self, src: str) -> str | Exception:
404
- """Get the modification time of a file or directory."""
405
- return self.impl.modtime(src=src)
406
-
407
- def modtime_dt(self, src: str) -> datetime | Exception:
408
- """Get the modification time of a file or directory."""
409
- return self.impl.modtime_dt(src=src)
410
-
411
- def write_text(
412
- self,
413
- text: str,
414
- dst: str,
415
- ) -> Exception | None:
416
- """Write text to a file."""
417
- return self.impl.write_text(text=text, dst=dst)
418
-
419
- def write_bytes(
420
- self,
421
- data: bytes,
422
- dst: str,
423
- ) -> Exception | None:
424
- """Write bytes to a file."""
425
- return self.impl.write_bytes(data=data, dst=dst)
426
-
427
- def read_bytes(self, src: str) -> bytes | Exception:
428
- """Read bytes from a file."""
429
- return self.impl.read_bytes(src=src)
430
-
431
- def read_text(self, src: str) -> str | Exception:
432
- """Read text from a file."""
433
- return self.impl.read_text(src=src)
434
-
435
- def copy_bytes(
436
- self,
437
- src: str,
438
- offset: int | SizeSuffix,
439
- length: int | SizeSuffix,
440
- outfile: Path,
441
- other_args: list[str] | None = None,
442
- ) -> Exception | None:
443
- """Copy a slice of bytes from the src file to dst."""
444
- return self.impl.copy_bytes(
445
- src=src,
446
- offset=offset,
447
- length=length,
448
- outfile=outfile,
449
- other_args=other_args,
450
- )
451
-
452
- def copy_dir(
453
- self, src: str | Dir, dst: str | Dir, args: list[str] | None = None
454
- ) -> CompletedProcess:
455
- """Copy a directory from source to destination."""
456
- # convert src to str, also dst
457
- return self.impl.copy_dir(src=src, dst=dst, args=args)
458
-
459
- def copy_remote(
460
- self, src: Remote, dst: Remote, args: list[str] | None = None
461
- ) -> CompletedProcess:
462
- """Copy a remote to another remote."""
463
- return self.impl.copy_remote(src=src, dst=dst, args=args)
464
-
465
- def copy_file_s3_resumable(
466
- self,
467
- src: str, # src:/Bucket/path/myfile.large.zst
468
- dst_dir: str, # dst:/Bucket/path/myfile.large.zst-parts/part.{part_number:05d}.start-end
469
- part_infos: list[PartInfo] | None = None,
470
- upload_threads: int = 8, # Number of reader and writer threads to use
471
- merge_threads: int = 4, # Number of threads to use for merging the parts
472
- ) -> Exception | None:
473
- """Copy a file in parts."""
474
- return self.impl.copy_file_s3_resumable(
475
- src=src,
476
- dst_dir=dst_dir,
477
- part_infos=part_infos,
478
- upload_threads=upload_threads,
479
- merge_threads=merge_threads,
480
- )
481
-
482
- def mount(
483
- self,
484
- src: Remote | Dir | str,
485
- outdir: Path,
486
- allow_writes: bool | None = False,
487
- use_links: bool | None = None,
488
- vfs_cache_mode: str | None = None,
489
- verbose: bool | None = None,
490
- cache_dir: Path | None = None,
491
- cache_dir_delete_on_exit: bool | None = None,
492
- log: Path | None = None,
493
- other_args: list[str] | None = None,
494
- ) -> Mount:
495
- """Mount a remote or directory to a local path.
496
-
497
- Args:
498
- src: Remote or directory to mount
499
- outdir: Local path to mount to
500
-
501
- Returns:
502
- CompletedProcess from the mount command execution
503
-
504
- Raises:
505
- subprocess.CalledProcessError: If the mount operation fails
506
- """
507
- return self.impl.mount(
508
- src=src,
509
- outdir=outdir,
510
- allow_writes=allow_writes,
511
- use_links=use_links,
512
- vfs_cache_mode=vfs_cache_mode,
513
- verbose=verbose,
514
- cache_dir=cache_dir,
515
- cache_dir_delete_on_exit=cache_dir_delete_on_exit,
516
- log=log,
517
- other_args=other_args,
518
- )
519
-
520
- def serve_http(
521
- self,
522
- src: str,
523
- addr: str = "localhost:8080",
524
- other_args: list[str] | None = None,
525
- ) -> HttpServer:
526
- """Serve a remote or directory via HTTP. The returned HttpServer has a client which can be used to
527
- fetch files or parts.
528
-
529
- Args:
530
- src: Remote or directory to serve
531
- addr: Network address and port to serve on (default: localhost:8080)
532
- """
533
- return self.impl.serve_http(src=src, addr=addr, other_args=other_args)
534
-
535
- def size_files(
536
- self,
537
- src: str,
538
- files: list[str],
539
- fast_list: bool = False, # Recommend that this is False
540
- other_args: list[str] | None = None,
541
- check: bool | None = False,
542
- verbose: bool | None = None,
543
- ) -> SizeResult | Exception:
544
- """Get the size of a list of files. Example of files items: "remote:bucket/to/file"."""
545
- return self.impl.size_files(
546
- src=src,
547
- files=files,
548
- fast_list=fast_list,
549
- other_args=other_args,
550
- check=check,
551
- verbose=verbose,
552
- )
553
-
554
- def size_file(self, src: str) -> SizeSuffix | Exception:
555
- """Get the size of a file."""
556
- return self.impl.size_file(src=src)
557
- ```
558
-
559
-
560
- # Contributing
561
-
562
- ```bash
563
- git clone https://github.comn/zackees/rclone-api
564
- cd rclone-api
565
- ./install
566
- ./lint
567
- ./test
568
- ```
569
-
570
- # Windows
571
-
572
- This environment requires you to use `git-bash`.
1
+ Metadata-Version: 2.2
2
+ Name: rclone_api
3
+ Version: 1.5.0
4
+ Summary: rclone api in python
5
+ Home-page: https://github.com/zackees/rclone-api
6
+ License: BSD 3-Clause License
7
+ Keywords: rclone,api,python,fast,sftp,s3,backblaze
8
+ Classifier: Programming Language :: Python :: 3
9
+ Requires-Python: >=3.10
10
+ Description-Content-Type: text/markdown
11
+ License-File: LICENSE
12
+ Requires-Dist: pyright>=1.1.393
13
+ Requires-Dist: python-dotenv>=1.0.0
14
+ Requires-Dist: certifi>=2025.1.31
15
+ Requires-Dist: psutil
16
+ Requires-Dist: boto3<=1.35.99,>=1.20.1
17
+ Requires-Dist: sqlmodel>=0.0.23
18
+ Requires-Dist: psycopg2-binary>=2.9.10
19
+ Requires-Dist: httpx>=0.28.1
20
+ Requires-Dist: download>=0.3.5
21
+ Requires-Dist: appdirs>=1.4.4
22
+ Dynamic: home-page
23
+
24
+ # rclone-api
25
+
26
+
27
+ ![perpetualmaniac_faster_400fd528-df15-4a04-8ad3-3cca786d7bca (2)](https://github.com/user-attachments/assets/65138e38-b115-447c-849a-4adbd27e4b67)
28
+
29
+
30
+ <!--
31
+ [![Linting](https://github.com/zackees/rclone-api/actions/workflows/lint.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/lint.yml)
32
+ [![MacOS_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_macos.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_macos.yml)
33
+ [![Ubuntu_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_ubuntu.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_ubuntu.yml)
34
+ [![Win_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_win.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_win.yml)
35
+ -->
36
+
37
+
38
+ Got a lot of data to transfer quickly? This package is for you.
39
+
40
+ This library was built out of necessity to transfer large amounts of AI training data. Aggressive defaults means this api will transfer faster than rclone does in stock settings.
41
+
42
+ You can have [rclone](https://rclone.org/) in your path or else the api will download it.
43
+
44
+ # Install
45
+
46
+ `pip install rclone-api`
47
+
48
+ pypi link: https://pypi.org/project/rclone-api/
49
+
50
+ # Quick
51
+
52
+ In addition to providing easy python use for rclone, this package provides additional features:
53
+
54
+ * Aggressive default settings for copying / syncing operations for extreme performance.
55
+ * Database Support: Dump repo information to an sqlite/postgres/mysql database.
56
+ * One repo path -> table.
57
+ * Scoped objects for:
58
+ * Mounts.
59
+ * File servers.
60
+ * Enforces correct cleanup
61
+ * Mounts are easier - platform specific setup and teardown.
62
+ * Resumable multi-part uploads when s3 is the destination.
63
+ * Fast diffing src/dst repos as a stream of `list[str]`.
64
+ * Find which files need are missing and need to be copied.
65
+ * Efficiently build pipelines to select copy strategy based on file size.
66
+ * Walk a directory.
67
+ * Breath first.
68
+ * Depth first.
69
+ * Use the HttpServer to slice out byte ranges from extremely large files.
70
+
71
+
72
+ ## Example
73
+
74
+ ```python
75
+
76
+ from rclone_api import Rclone, DirListing, Config
77
+
78
+ RCLONE_CONFIG = Config("""
79
+ [dst]
80
+ type = s3
81
+ account = *********
82
+ key = ************
83
+ """)
84
+
85
+
86
+ def test_ls_glob_png(self) -> None:
87
+ rclone = Rclone(RCLONE_CONFIG)
88
+ path = f"dst:{BUCKET_NAME}/my_data"
89
+ listing: DirListing = rclone.ls(path, glob="*.png")
90
+ self.assertGreater(len(listing.files), 0)
91
+ for file in listing.files:
92
+ self.assertIsInstance(file, File)
93
+ # test that it ends with .png
94
+ self.assertTrue(file.name.endswith(".png"))
95
+ # there should be no directories with this glob
96
+ self.assertEqual(len(listing.dirs), 0)
97
+ ```
98
+
99
+ ## API
100
+
101
+ ```python
102
+
103
+ # from rclone_api import Rclone
104
+ # Rclone is the main api entry point.
105
+ class Rclone:
106
+ def __init__(
107
+ self, rclone_conf: Path | Config, rclone_exe: Path | None = None
108
+ ) -> None:
109
+ from rclone_api.rclone_impl import RcloneImpl
110
+
111
+ self.impl: RcloneImpl = RcloneImpl(rclone_conf, rclone_exe)
112
+
113
+ def webgui(self, other_args: list[str] | None = None) -> Process:
114
+ """Launch the Rclone web GUI."""
115
+ return self.impl.webgui(other_args=other_args)
116
+
117
+ def launch_server(
118
+ self,
119
+ addr: str,
120
+ user: str | None = None,
121
+ password: str | None = None,
122
+ other_args: list[str] | None = None,
123
+ ) -> Process:
124
+ """Launch the Rclone server so it can receive commands"""
125
+ return self.impl.launch_server(
126
+ addr=addr, user=user, password=password, other_args=other_args
127
+ )
128
+
129
+ def remote_control(
130
+ self,
131
+ addr: str,
132
+ user: str | None = None,
133
+ password: str | None = None,
134
+ capture: bool | None = None,
135
+ other_args: list[str] | None = None,
136
+ ) -> CompletedProcess:
137
+ return self.impl.remote_control(
138
+ addr=addr,
139
+ user=user,
140
+ password=password,
141
+ capture=capture,
142
+ other_args=other_args,
143
+ )
144
+
145
+ def obscure(self, password: str) -> str:
146
+ """Obscure a password for use in rclone config files."""
147
+ return self.impl.obscure(password=password)
148
+
149
+ def ls_stream(
150
+ self,
151
+ path: str,
152
+ max_depth: int = -1,
153
+ fast_list: bool = False,
154
+ ) -> FilesStream:
155
+ """
156
+ List files in the given path
157
+
158
+ Args:
159
+ src: Remote path to list
160
+ max_depth: Maximum recursion depth (-1 for unlimited)
161
+ fast_list: Use fast list (only use when getting THE entire data repository from the root/bucket, or it's small)
162
+ """
163
+ return self.impl.ls_stream(path=path, max_depth=max_depth, fast_list=fast_list)
164
+
165
+ def save_to_db(
166
+ self,
167
+ src: str,
168
+ db_url: str,
169
+ max_depth: int = -1,
170
+ fast_list: bool = False,
171
+ ) -> None:
172
+ """
173
+ Save files to a database (sqlite, mysql, postgres)
174
+
175
+ Args:
176
+ src: Remote path to list, this will be used to populate an entire table, so always use the root-most path.
177
+ db_url: Database URL, like sqlite:///data.db or mysql://user:pass@localhost/db or postgres://user:pass@localhost/db
178
+ max_depth: Maximum depth to traverse (-1 for unlimited)
179
+ fast_list: Use fast list (only use when getting THE entire data repository from the root/bucket)
180
+
181
+ """
182
+ return self.impl.save_to_db(
183
+ src=src, db_url=db_url, max_depth=max_depth, fast_list=fast_list
184
+ )
185
+
186
+ def ls(
187
+ self,
188
+ path: Dir | Remote | str | None = None,
189
+ max_depth: int | None = None,
190
+ glob: str | None = None,
191
+ order: Order = Order.NORMAL,
192
+ listing_option: ListingOption = ListingOption.ALL,
193
+ ) -> DirListing:
194
+ return self.impl.ls(
195
+ path=path,
196
+ max_depth=max_depth,
197
+ glob=glob,
198
+ order=order,
199
+ listing_option=listing_option,
200
+ )
201
+
202
+ def listremotes(self) -> list[Remote]:
203
+ return self.impl.listremotes()
204
+
205
+ def diff(
206
+ self,
207
+ src: str,
208
+ dst: str,
209
+ min_size: (
210
+ str | None
211
+ ) = None, # e. g. "1MB" - see rclone documentation: https://rclone.org/commands/rclone_check/
212
+ max_size: (
213
+ str | None
214
+ ) = None, # e. g. "1GB" - see rclone documentation: https://rclone.org/commands/rclone_check/
215
+ diff_option: DiffOption = DiffOption.COMBINED,
216
+ fast_list: bool = True,
217
+ size_only: bool | None = None,
218
+ checkers: int | None = None,
219
+ other_args: list[str] | None = None,
220
+ ) -> Generator[DiffItem, None, None]:
221
+ """Be extra careful with the src and dst values. If you are off by one
222
+ parent directory, you will get a huge amount of false diffs."""
223
+ return self.impl.diff(
224
+ src=src,
225
+ dst=dst,
226
+ min_size=min_size,
227
+ max_size=max_size,
228
+ diff_option=diff_option,
229
+ fast_list=fast_list,
230
+ size_only=size_only,
231
+ checkers=checkers,
232
+ other_args=other_args,
233
+ )
234
+
235
+ def walk(
236
+ self,
237
+ path: Dir | Remote | str,
238
+ max_depth: int = -1,
239
+ breadth_first: bool = True,
240
+ order: Order = Order.NORMAL,
241
+ ) -> Generator[DirListing, None, None]:
242
+ """Walk through the given path recursively.
243
+
244
+ Args:
245
+ path: Remote path or Remote object to walk through
246
+ max_depth: Maximum depth to traverse (-1 for unlimited)
247
+
248
+ Yields:
249
+ DirListing: Directory listing for each directory encountered
250
+ """
251
+ return self.impl.walk(
252
+ path=path, max_depth=max_depth, breadth_first=breadth_first, order=order
253
+ )
254
+
255
+ def scan_missing_folders(
256
+ self,
257
+ src: Dir | Remote | str,
258
+ dst: Dir | Remote | str,
259
+ max_depth: int = -1,
260
+ order: Order = Order.NORMAL,
261
+ ) -> Generator[Dir, None, None]:
262
+ """Walk through the given path recursively.
263
+
264
+ WORK IN PROGRESS!!
265
+
266
+ Args:
267
+ src: Source directory or Remote to walk through
268
+ dst: Destination directory or Remote to walk through
269
+ max_depth: Maximum depth to traverse (-1 for unlimited)
270
+
271
+ Yields:
272
+ DirListing: Directory listing for each directory encountered
273
+ """
274
+ return self.impl.scan_missing_folders(
275
+ src=src, dst=dst, max_depth=max_depth, order=order
276
+ )
277
+
278
+ def cleanup(
279
+ self, path: str, other_args: list[str] | None = None
280
+ ) -> CompletedProcess:
281
+ """Cleanup any resources used by the Rclone instance."""
282
+ return self.impl.cleanup(path=path, other_args=other_args)
283
+
284
+ def copy_to(
285
+ self,
286
+ src: File | str,
287
+ dst: File | str,
288
+ check: bool | None = None,
289
+ verbose: bool | None = None,
290
+ other_args: list[str] | None = None,
291
+ ) -> CompletedProcess:
292
+ """Copy one file from source to destination.
293
+
294
+ Warning - slow.
295
+
296
+ """
297
+ return self.impl.copy_to(
298
+ src=src, dst=dst, check=check, verbose=verbose, other_args=other_args
299
+ )
300
+
301
+ def copy_files(
302
+ self,
303
+ src: str,
304
+ dst: str,
305
+ files: list[str] | Path,
306
+ check: bool | None = None,
307
+ max_backlog: int | None = None,
308
+ verbose: bool | None = None,
309
+ checkers: int | None = None,
310
+ transfers: int | None = None,
311
+ low_level_retries: int | None = None,
312
+ retries: int | None = None,
313
+ retries_sleep: str | None = None,
314
+ metadata: bool | None = None,
315
+ timeout: str | None = None,
316
+ max_partition_workers: int | None = None,
317
+ multi_thread_streams: int | None = None,
318
+ other_args: list[str] | None = None,
319
+ ) -> list[CompletedProcess]:
320
+ """Copy multiple files from source to destination.
321
+
322
+ Args:
323
+ payload: Dictionary of source and destination file paths
324
+ """
325
+ return self.impl.copy_files(
326
+ src=src,
327
+ dst=dst,
328
+ files=files,
329
+ check=check,
330
+ max_backlog=max_backlog,
331
+ verbose=verbose,
332
+ checkers=checkers,
333
+ transfers=transfers,
334
+ low_level_retries=low_level_retries,
335
+ retries=retries,
336
+ retries_sleep=retries_sleep,
337
+ metadata=metadata,
338
+ timeout=timeout,
339
+ max_partition_workers=max_partition_workers,
340
+ multi_thread_streams=multi_thread_streams,
341
+ other_args=other_args,
342
+ )
343
+
344
+ def copy(
345
+ self,
346
+ src: Dir | str,
347
+ dst: Dir | str,
348
+ check: bool | None = None,
349
+ transfers: int | None = None,
350
+ checkers: int | None = None,
351
+ multi_thread_streams: int | None = None,
352
+ low_level_retries: int | None = None,
353
+ retries: int | None = None,
354
+ other_args: list[str] | None = None,
355
+ ) -> CompletedProcess:
356
+ """Copy files from source to destination.
357
+
358
+ Args:
359
+ src: Source directory
360
+ dst: Destination directory
361
+ """
362
+ return self.impl.copy(
363
+ src=src,
364
+ dst=dst,
365
+ check=check,
366
+ transfers=transfers,
367
+ checkers=checkers,
368
+ multi_thread_streams=multi_thread_streams,
369
+ low_level_retries=low_level_retries,
370
+ retries=retries,
371
+ other_args=other_args,
372
+ )
373
+
374
+ def purge(self, path: Dir | str) -> CompletedProcess:
375
+ """Purge a directory"""
376
+ return self.impl.purge(path=path)
377
+
378
+ def delete_files(
379
+ self,
380
+ files: str | File | list[str] | list[File],
381
+ check: bool | None = None,
382
+ rmdirs=False,
383
+ verbose: bool | None = None,
384
+ max_partition_workers: int | None = None,
385
+ other_args: list[str] | None = None,
386
+ ) -> CompletedProcess:
387
+ """Delete a directory"""
388
+ return self.impl.delete_files(
389
+ files=files,
390
+ check=check,
391
+ rmdirs=rmdirs,
392
+ verbose=verbose,
393
+ max_partition_workers=max_partition_workers,
394
+ other_args=other_args,
395
+ )
396
+
397
+ def exists(self, path: Dir | Remote | str | File) -> bool:
398
+ """Check if a file or directory exists."""
399
+ return self.impl.exists(path=path)
400
+
401
+ def is_synced(self, src: str | Dir, dst: str | Dir) -> bool:
402
+ """Check if two directories are in sync."""
403
+ return self.impl.is_synced(src=src, dst=dst)
404
+
405
+ def modtime(self, src: str) -> str | Exception:
406
+ """Get the modification time of a file or directory."""
407
+ return self.impl.modtime(src=src)
408
+
409
+ def modtime_dt(self, src: str) -> datetime | Exception:
410
+ """Get the modification time of a file or directory."""
411
+ return self.impl.modtime_dt(src=src)
412
+
413
+ def write_text(
414
+ self,
415
+ text: str,
416
+ dst: str,
417
+ ) -> Exception | None:
418
+ """Write text to a file."""
419
+ return self.impl.write_text(text=text, dst=dst)
420
+
421
+ def write_bytes(
422
+ self,
423
+ data: bytes,
424
+ dst: str,
425
+ ) -> Exception | None:
426
+ """Write bytes to a file."""
427
+ return self.impl.write_bytes(data=data, dst=dst)
428
+
429
+ def read_bytes(self, src: str) -> bytes | Exception:
430
+ """Read bytes from a file."""
431
+ return self.impl.read_bytes(src=src)
432
+
433
+ def read_text(self, src: str) -> str | Exception:
434
+ """Read text from a file."""
435
+ return self.impl.read_text(src=src)
436
+
437
+ def copy_bytes(
438
+ self,
439
+ src: str,
440
+ offset: int | SizeSuffix,
441
+ length: int | SizeSuffix,
442
+ outfile: Path,
443
+ other_args: list[str] | None = None,
444
+ ) -> Exception | None:
445
+ """Copy a slice of bytes from the src file to dst."""
446
+ return self.impl.copy_bytes(
447
+ src=src,
448
+ offset=offset,
449
+ length=length,
450
+ outfile=outfile,
451
+ other_args=other_args,
452
+ )
453
+
454
+ def copy_dir(
455
+ self, src: str | Dir, dst: str | Dir, args: list[str] | None = None
456
+ ) -> CompletedProcess:
457
+ """Copy a directory from source to destination."""
458
+ # convert src to str, also dst
459
+ return self.impl.copy_dir(src=src, dst=dst, args=args)
460
+
461
+ def copy_remote(
462
+ self, src: Remote, dst: Remote, args: list[str] | None = None
463
+ ) -> CompletedProcess:
464
+ """Copy a remote to another remote."""
465
+ return self.impl.copy_remote(src=src, dst=dst, args=args)
466
+
467
+ def copy_file_s3_resumable(
468
+ self,
469
+ src: str, # src:/Bucket/path/myfile.large.zst
470
+ dst: str, # dst:/Bucket/path/myfile.large.zst
471
+ part_infos: list[PartInfo] | None = None,
472
+ upload_threads: int = 8, # Number of reader and writer threads to use
473
+ merge_threads: int = 4, # Number of threads to use for merging the parts
474
+ ) -> Exception | None:
475
+ """Copy a file in parts."""
476
+ return self.impl.copy_file_s3_resumable(
477
+ src=src,
478
+ dst=dst,
479
+ part_infos=part_infos,
480
+ upload_threads=upload_threads,
481
+ merge_threads=merge_threads,
482
+ )
483
+
484
+ def mount(
485
+ self,
486
+ src: Remote | Dir | str,
487
+ outdir: Path,
488
+ allow_writes: bool | None = False,
489
+ use_links: bool | None = None,
490
+ vfs_cache_mode: str | None = None,
491
+ verbose: bool | None = None,
492
+ cache_dir: Path | None = None,
493
+ cache_dir_delete_on_exit: bool | None = None,
494
+ log: Path | None = None,
495
+ other_args: list[str] | None = None,
496
+ ) -> Mount:
497
+ """Mount a remote or directory to a local path.
498
+
499
+ Args:
500
+ src: Remote or directory to mount
501
+ outdir: Local path to mount to
502
+
503
+ Returns:
504
+ CompletedProcess from the mount command execution
505
+
506
+ Raises:
507
+ subprocess.CalledProcessError: If the mount operation fails
508
+ """
509
+ return self.impl.mount(
510
+ src=src,
511
+ outdir=outdir,
512
+ allow_writes=allow_writes,
513
+ use_links=use_links,
514
+ vfs_cache_mode=vfs_cache_mode,
515
+ verbose=verbose,
516
+ cache_dir=cache_dir,
517
+ cache_dir_delete_on_exit=cache_dir_delete_on_exit,
518
+ log=log,
519
+ other_args=other_args,
520
+ )
521
+
522
+ def serve_http(
523
+ self,
524
+ src: str,
525
+ addr: str = "localhost:8080",
526
+ other_args: list[str] | None = None,
527
+ ) -> HttpServer:
528
+ """Serve a remote or directory via HTTP. The returned HttpServer has a client which can be used to
529
+ fetch files or parts.
530
+
531
+ Args:
532
+ src: Remote or directory to serve
533
+ addr: Network address and port to serve on (default: localhost:8080)
534
+ """
535
+ return self.impl.serve_http(src=src, addr=addr, other_args=other_args)
536
+
537
+ def size_files(
538
+ self,
539
+ src: str,
540
+ files: list[str],
541
+ fast_list: bool = False, # Recommend that this is False
542
+ other_args: list[str] | None = None,
543
+ check: bool | None = False,
544
+ verbose: bool | None = None,
545
+ ) -> SizeResult | Exception:
546
+ """Get the size of a list of files. Example of files items: "remote:bucket/to/file"."""
547
+ return self.impl.size_files(
548
+ src=src,
549
+ files=files,
550
+ fast_list=fast_list,
551
+ other_args=other_args,
552
+ check=check,
553
+ verbose=verbose,
554
+ )
555
+
556
+ def size_file(self, src: str) -> SizeSuffix | Exception:
557
+ """Get the size of a file."""
558
+ return self.impl.size_file(src=src)
559
+ ```
560
+
561
+
562
+ # Contributing
563
+
564
+ ```bash
565
+ git clone https://github.comn/zackees/rclone-api
566
+ cd rclone-api
567
+ ./install
568
+ ./lint
569
+ ./test
570
+ ```
571
+
572
+ # Windows
573
+
574
+ This environment requires you to use `git-bash`.