rclone-api 1.4.24__py2.py3-none-any.whl → 1.4.27__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
rclone_api/__init__.py CHANGED
@@ -366,33 +366,6 @@ class Rclone:
366
366
  """Read text from a file."""
367
367
  return self.impl.read_text(src=src)
368
368
 
369
- def copy_file_resumable_s3(
370
- self,
371
- src: str,
372
- dst: str,
373
- save_state_json: Path,
374
- chunk_size: SizeSuffix | None = None,
375
- read_threads: int = 8,
376
- write_threads: int = 8,
377
- retries: int = 3,
378
- verbose: bool | None = None,
379
- max_chunks_before_suspension: int | None = None,
380
- backend_log: Path | None = None,
381
- ) -> MultiUploadResult:
382
- """For massive files that rclone can't handle in one go, this function will copy the file in chunks to an S3 store"""
383
- return self.impl.copy_file_resumable_s3(
384
- src=src,
385
- dst=dst,
386
- save_state_json=save_state_json,
387
- chunk_size=chunk_size,
388
- read_threads=read_threads,
389
- write_threads=write_threads,
390
- retries=retries,
391
- verbose=verbose,
392
- max_chunks_before_suspension=max_chunks_before_suspension,
393
- backend_log=backend_log,
394
- )
395
-
396
369
  def copy_bytes(
397
370
  self,
398
371
  src: str,
@@ -423,7 +396,7 @@ class Rclone:
423
396
  """Copy a remote to another remote."""
424
397
  return self.impl.copy_remote(src=src, dst=dst, args=args)
425
398
 
426
- def copy_file_parts(
399
+ def copy_file_s3_resumable(
427
400
  self,
428
401
  src: str, # src:/Bucket/path/myfile.large.zst
429
402
  dst_dir: str, # dst:/Bucket/path/myfile.large.zst-parts/part.{part_number:05d}.start-end
@@ -432,7 +405,7 @@ class Rclone:
432
405
  merge_threads: int = 4, # Number of threads to use for merging the parts
433
406
  ) -> Exception | None:
434
407
  """Copy a file in parts."""
435
- return self.impl.copy_file_parts(
408
+ return self.impl.copy_file_s3_resumable(
436
409
  src=src,
437
410
  dst_dir=dst_dir,
438
411
  part_infos=part_infos,
@@ -87,7 +87,7 @@ def main() -> int:
87
87
  # save_state_json=args.save_state_json,
88
88
  # verbose=args.verbose,
89
89
  # )
90
- err: Exception | None = rclone.copy_file_parts(
90
+ err: Exception | None = rclone.copy_file_s3_resumable(
91
91
  src=args.src,
92
92
  dst_dir=args.dst,
93
93
  )
@@ -31,7 +31,7 @@ def list_files(rclone: Rclone, path: str):
31
31
  def _parse_args() -> Args:
32
32
  parser = argparse.ArgumentParser(description="List files in a remote path.")
33
33
  parser.add_argument("src", help="Directory that holds the info.json file")
34
- parser.add_argument("-v", "--verbose", help="Verbose output", action="store_true")
34
+ parser.add_argument("--no-verbose", help="Verbose output", action="store_true")
35
35
  parser.add_argument(
36
36
  "--config", help="Path to rclone config file", type=Path, required=False
37
37
  )
@@ -45,7 +45,7 @@ def _parse_args() -> Args:
45
45
  out = Args(
46
46
  config_path=config,
47
47
  src=args.src,
48
- verbose=args.verbose,
48
+ verbose=not args.no_verbose,
49
49
  )
50
50
  return out
51
51
 
@@ -65,7 +65,7 @@ def main() -> int:
65
65
  rclone = Rclone(rclone_conf=args.config_path)
66
66
  info_path = _get_info_path(src=args.src)
67
67
  s3_server_side_multi_part_merge(
68
- rclone=rclone.impl, info_path=info_path, max_workers=5
68
+ rclone=rclone.impl, info_path=info_path, max_workers=5, verbose=args.verbose
69
69
  )
70
70
  return 0
71
71
 
rclone_api/rclone_impl.py CHANGED
@@ -6,7 +6,6 @@ import os
6
6
  import random
7
7
  import subprocess
8
8
  import time
9
- import traceback
10
9
  import warnings
11
10
  from concurrent.futures import Future, ThreadPoolExecutor
12
11
  from datetime import datetime
@@ -34,10 +33,7 @@ from rclone_api.remote import Remote
34
33
  from rclone_api.rpath import RPath
35
34
  from rclone_api.s3.create import S3Credentials
36
35
  from rclone_api.s3.types import (
37
- MultiUploadResult,
38
- S3MutliPartUploadConfig,
39
36
  S3Provider,
40
- S3UploadTarget,
41
37
  )
42
38
  from rclone_api.types import (
43
39
  ListingOption,
@@ -787,7 +783,7 @@ class RcloneImpl:
787
783
  except subprocess.CalledProcessError:
788
784
  return False
789
785
 
790
- def copy_file_parts(
786
+ def copy_file_s3_resumable(
791
787
  self,
792
788
  src: str, # src:/Bucket/path/myfile.large.zst
793
789
  dst_dir: str, # dst:/Bucket/path/myfile.large.zst-parts/
@@ -938,105 +934,6 @@ class RcloneImpl:
938
934
  )
939
935
  return s3_creds
940
936
 
941
- def copy_file_resumable_s3(
942
- self,
943
- src: str,
944
- dst: str,
945
- save_state_json: Path,
946
- chunk_size: SizeSuffix | None = None,
947
- read_threads: int = 8,
948
- write_threads: int = 8,
949
- retries: int = 3,
950
- verbose: bool | None = None,
951
- max_chunks_before_suspension: int | None = None,
952
- backend_log: Path | None = None,
953
- ) -> MultiUploadResult:
954
- """For massive files that rclone can't handle in one go, this function will copy the file in chunks to an S3 store"""
955
- from rclone_api.http_server import HttpFetcher, HttpServer
956
- from rclone_api.s3.api import S3Client
957
- from rclone_api.util import S3PathInfo, split_s3_path
958
-
959
- src_path = Path(src)
960
- name = src_path.name
961
- src_parent_path = Path(src).parent.as_posix()
962
-
963
- size_result: SizeResult | Exception = self.size_files(src_parent_path, [name])
964
- if isinstance(size_result, Exception):
965
- raise size_result
966
- target_size = SizeSuffix(size_result.total_size)
967
-
968
- chunk_size = chunk_size or SizeSuffix("64M")
969
- MAX_CHUNKS = 10000
970
- min_chunk_size = SizeSuffix(size_result.total_size // (MAX_CHUNKS - 1))
971
- if min_chunk_size > chunk_size:
972
- warnings.warn(
973
- f"Chunk size {chunk_size} is too small for file size {size_result.total_size}, setting to {min_chunk_size}"
974
- )
975
- chunk_size = SizeSuffix(min_chunk_size)
976
-
977
- if target_size < SizeSuffix("5M"):
978
- # fallback to normal copy
979
- completed_proc = self.copy_to(src, dst, check=True)
980
- if completed_proc.ok:
981
- return MultiUploadResult.UPLOADED_FRESH
982
-
983
- if size_result.total_size <= 0:
984
- raise ValueError(
985
- f"File {src} has size {size_result.total_size}, is this a directory?"
986
- )
987
-
988
- path_info: S3PathInfo = split_s3_path(dst)
989
- # remote = path_info.remote
990
- bucket_name = path_info.bucket
991
- s3_key = path_info.key
992
- s3_creds: S3Credentials = self.get_s3_credentials(dst, verbose=verbose)
993
-
994
- port = random.randint(10000, 20000)
995
- http_server: HttpServer = self.serve_http(
996
- src=src_path.parent.as_posix(),
997
- addr=f"localhost:{port}",
998
- serve_http_log=backend_log,
999
- )
1000
- chunk_fetcher: HttpFetcher = http_server.get_fetcher(
1001
- path=src_path.name,
1002
- n_threads=read_threads,
1003
- )
1004
-
1005
- client = S3Client(s3_creds)
1006
- upload_config: S3MutliPartUploadConfig = S3MutliPartUploadConfig(
1007
- chunk_size=chunk_size.as_int(),
1008
- chunk_fetcher=chunk_fetcher.bytes_fetcher,
1009
- max_write_threads=write_threads,
1010
- retries=retries,
1011
- resume_path_json=save_state_json,
1012
- max_chunks_before_suspension=max_chunks_before_suspension,
1013
- )
1014
-
1015
- print(f"Uploading {name} to {s3_key} in bucket {bucket_name}")
1016
- print(f"Source: {src_path}")
1017
- print(f"bucket_name: {bucket_name}")
1018
- print(f"upload_config: {upload_config}")
1019
-
1020
- upload_target = S3UploadTarget(
1021
- src_file=src_path,
1022
- src_file_size=size_result.total_size,
1023
- bucket_name=bucket_name,
1024
- s3_key=s3_key,
1025
- )
1026
-
1027
- try:
1028
- out: MultiUploadResult = client.upload_file_multipart(
1029
- upload_target=upload_target,
1030
- upload_config=upload_config,
1031
- )
1032
- return out
1033
- except Exception as e:
1034
- print(f"Error uploading file: {e}")
1035
- traceback.print_exc()
1036
- raise
1037
- finally:
1038
- chunk_fetcher.shutdown()
1039
-
1040
937
  def copy_bytes(
1041
938
  self,
1042
939
  src: str,
@@ -0,0 +1,555 @@
1
+ Metadata-Version: 2.2
2
+ Name: rclone_api
3
+ Version: 1.4.27
4
+ Summary: rclone api in python
5
+ Home-page: https://github.com/zackees/rclone-api
6
+ License: BSD 3-Clause License
7
+ Keywords: template-python-cmd
8
+ Classifier: Programming Language :: Python :: 3
9
+ Requires-Python: >=3.10
10
+ Description-Content-Type: text/markdown
11
+ License-File: LICENSE
12
+ Requires-Dist: pyright>=1.1.393
13
+ Requires-Dist: python-dotenv>=1.0.0
14
+ Requires-Dist: certifi>=2025.1.31
15
+ Requires-Dist: psutil
16
+ Requires-Dist: boto3<=1.35.99,>=1.20.1
17
+ Requires-Dist: sqlmodel>=0.0.23
18
+ Requires-Dist: psycopg2-binary>=2.9.10
19
+ Requires-Dist: httpx>=0.28.1
20
+ Dynamic: home-page
21
+
22
+ # rclone-api
23
+
24
+
25
+ ![perpetualmaniac_faster_400fd528-df15-4a04-8ad3-3cca786d7bca (2)](https://github.com/user-attachments/assets/65138e38-b115-447c-849a-4adbd27e4b67)
26
+
27
+
28
+ <!--
29
+ [![Linting](https://github.com/zackees/rclone-api/actions/workflows/lint.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/lint.yml)
30
+ [![MacOS_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_macos.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_macos.yml)
31
+ [![Ubuntu_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_ubuntu.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_ubuntu.yml)
32
+ [![Win_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_win.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_win.yml)
33
+ -->
34
+
35
+
36
+ Got a lot of data to transfer quickly? This package is for you.
37
+
38
+ This library was built out of necessity to transfer large amounts of AI training data. Aggressive default means this api will transfer faster than rclone does in stock settings.
39
+
40
+ You must have rclone in your path to use this library. You'd want this anyway because rclone is still under heavy developement.
41
+
42
+ # Install
43
+
44
+ `pip install rclone-api`
45
+
46
+ # Quick
47
+
48
+ In addition to providing easy python use for rclone, this package provides additional features:
49
+
50
+ * Resumable multi-part uploads when s3 is the destination.
51
+ * Diffing src/dst repos as a stream of `list[str]`.
52
+ * Efficient copying of byte ranges of a file.
53
+ * Aggressive default settings for copying / syncing operations for extreme performance.
54
+ * Some platform specific fixes.
55
+
56
+
57
+ ## Example
58
+
59
+ ```python
60
+
61
+ from rclone_api import Rclone, DirListing, Config
62
+
63
+ RCLONE_CONFIG = Config("""
64
+ [dst]
65
+ type = s3
66
+ account = *********
67
+ key = ************
68
+ """)
69
+
70
+
71
+ def test_ls_glob_png(self) -> None:
72
+ rclone = Rclone(RCLONE_CONFIG)
73
+ path = f"dst:{BUCKET_NAME}/my_data"
74
+ listing: DirListing = rclone.ls(path, glob="*.png")
75
+ self.assertGreater(len(listing.files), 0)
76
+ for file in listing.files:
77
+ self.assertIsInstance(file, File)
78
+ # test that it ends with .png
79
+ self.assertTrue(file.name.endswith(".png"))
80
+ # there should be no directories with this glob
81
+ self.assertEqual(len(listing.dirs), 0)
82
+ ```
83
+
84
+ ## API
85
+
86
+ ```python
87
+
88
+ # from rclone_api import Rclone
89
+ # Rclone is the main api entry point.
90
+ class Rclone:
91
+ def __init__(
92
+ self, rclone_conf: Path | Config, rclone_exe: Path | None = None
93
+ ) -> None:
94
+ from rclone_api.rclone_impl import RcloneImpl
95
+
96
+ self.impl: RcloneImpl = RcloneImpl(rclone_conf, rclone_exe)
97
+
98
+ def webgui(self, other_args: list[str] | None = None) -> Process:
99
+ """Launch the Rclone web GUI."""
100
+ return self.impl.webgui(other_args=other_args)
101
+
102
+ def launch_server(
103
+ self,
104
+ addr: str,
105
+ user: str | None = None,
106
+ password: str | None = None,
107
+ other_args: list[str] | None = None,
108
+ ) -> Process:
109
+ """Launch the Rclone server so it can receive commands"""
110
+ return self.impl.launch_server(
111
+ addr=addr, user=user, password=password, other_args=other_args
112
+ )
113
+
114
+ def remote_control(
115
+ self,
116
+ addr: str,
117
+ user: str | None = None,
118
+ password: str | None = None,
119
+ capture: bool | None = None,
120
+ other_args: list[str] | None = None,
121
+ ) -> CompletedProcess:
122
+ return self.impl.remote_control(
123
+ addr=addr,
124
+ user=user,
125
+ password=password,
126
+ capture=capture,
127
+ other_args=other_args,
128
+ )
129
+
130
+ def obscure(self, password: str) -> str:
131
+ """Obscure a password for use in rclone config files."""
132
+ return self.impl.obscure(password=password)
133
+
134
+ def ls_stream(
135
+ self,
136
+ path: str,
137
+ max_depth: int = -1,
138
+ fast_list: bool = False,
139
+ ) -> FilesStream:
140
+ """
141
+ List files in the given path
142
+
143
+ Args:
144
+ src: Remote path to list
145
+ max_depth: Maximum recursion depth (-1 for unlimited)
146
+ fast_list: Use fast list (only use when getting THE entire data repository from the root/bucket, or it's small)
147
+ """
148
+ return self.impl.ls_stream(path=path, max_depth=max_depth, fast_list=fast_list)
149
+
150
+ def save_to_db(
151
+ self,
152
+ src: str,
153
+ db_url: str,
154
+ max_depth: int = -1,
155
+ fast_list: bool = False,
156
+ ) -> None:
157
+ """
158
+ Save files to a database (sqlite, mysql, postgres)
159
+
160
+ Args:
161
+ src: Remote path to list, this will be used to populate an entire table, so always use the root-most path.
162
+ db_url: Database URL, like sqlite:///data.db or mysql://user:pass@localhost/db or postgres://user:pass@localhost/db
163
+ max_depth: Maximum depth to traverse (-1 for unlimited)
164
+ fast_list: Use fast list (only use when getting THE entire data repository from the root/bucket)
165
+
166
+ """
167
+ return self.impl.save_to_db(
168
+ src=src, db_url=db_url, max_depth=max_depth, fast_list=fast_list
169
+ )
170
+
171
+ def ls(
172
+ self,
173
+ path: Dir | Remote | str | None = None,
174
+ max_depth: int | None = None,
175
+ glob: str | None = None,
176
+ order: Order = Order.NORMAL,
177
+ listing_option: ListingOption = ListingOption.ALL,
178
+ ) -> DirListing:
179
+ return self.impl.ls(
180
+ path=path,
181
+ max_depth=max_depth,
182
+ glob=glob,
183
+ order=order,
184
+ listing_option=listing_option,
185
+ )
186
+
187
+ def listremotes(self) -> list[Remote]:
188
+ return self.impl.listremotes()
189
+
190
+ def diff(
191
+ self,
192
+ src: str,
193
+ dst: str,
194
+ min_size: (
195
+ str | None
196
+ ) = None, # e. g. "1MB" - see rclone documentation: https://rclone.org/commands/rclone_check/
197
+ max_size: (
198
+ str | None
199
+ ) = None, # e. g. "1GB" - see rclone documentation: https://rclone.org/commands/rclone_check/
200
+ diff_option: DiffOption = DiffOption.COMBINED,
201
+ fast_list: bool = True,
202
+ size_only: bool | None = None,
203
+ checkers: int | None = None,
204
+ other_args: list[str] | None = None,
205
+ ) -> Generator[DiffItem, None, None]:
206
+ """Be extra careful with the src and dst values. If you are off by one
207
+ parent directory, you will get a huge amount of false diffs."""
208
+ return self.impl.diff(
209
+ src=src,
210
+ dst=dst,
211
+ min_size=min_size,
212
+ max_size=max_size,
213
+ diff_option=diff_option,
214
+ fast_list=fast_list,
215
+ size_only=size_only,
216
+ checkers=checkers,
217
+ other_args=other_args,
218
+ )
219
+
220
+ def walk(
221
+ self,
222
+ path: Dir | Remote | str,
223
+ max_depth: int = -1,
224
+ breadth_first: bool = True,
225
+ order: Order = Order.NORMAL,
226
+ ) -> Generator[DirListing, None, None]:
227
+ """Walk through the given path recursively.
228
+
229
+ Args:
230
+ path: Remote path or Remote object to walk through
231
+ max_depth: Maximum depth to traverse (-1 for unlimited)
232
+
233
+ Yields:
234
+ DirListing: Directory listing for each directory encountered
235
+ """
236
+ return self.impl.walk(
237
+ path=path, max_depth=max_depth, breadth_first=breadth_first, order=order
238
+ )
239
+
240
+ def scan_missing_folders(
241
+ self,
242
+ src: Dir | Remote | str,
243
+ dst: Dir | Remote | str,
244
+ max_depth: int = -1,
245
+ order: Order = Order.NORMAL,
246
+ ) -> Generator[Dir, None, None]:
247
+ """Walk through the given path recursively.
248
+
249
+ WORK IN PROGRESS!!
250
+
251
+ Args:
252
+ src: Source directory or Remote to walk through
253
+ dst: Destination directory or Remote to walk through
254
+ max_depth: Maximum depth to traverse (-1 for unlimited)
255
+
256
+ Yields:
257
+ DirListing: Directory listing for each directory encountered
258
+ """
259
+ return self.impl.scan_missing_folders(
260
+ src=src, dst=dst, max_depth=max_depth, order=order
261
+ )
262
+
263
+ def cleanup(
264
+ self, path: str, other_args: list[str] | None = None
265
+ ) -> CompletedProcess:
266
+ """Cleanup any resources used by the Rclone instance."""
267
+ return self.impl.cleanup(path=path, other_args=other_args)
268
+
269
+ def copy_to(
270
+ self,
271
+ src: File | str,
272
+ dst: File | str,
273
+ check: bool | None = None,
274
+ verbose: bool | None = None,
275
+ other_args: list[str] | None = None,
276
+ ) -> CompletedProcess:
277
+ """Copy one file from source to destination.
278
+
279
+ Warning - slow.
280
+
281
+ """
282
+ return self.impl.copy_to(
283
+ src=src, dst=dst, check=check, verbose=verbose, other_args=other_args
284
+ )
285
+
286
+ def copy_files(
287
+ self,
288
+ src: str,
289
+ dst: str,
290
+ files: list[str] | Path,
291
+ check: bool | None = None,
292
+ max_backlog: int | None = None,
293
+ verbose: bool | None = None,
294
+ checkers: int | None = None,
295
+ transfers: int | None = None,
296
+ low_level_retries: int | None = None,
297
+ retries: int | None = None,
298
+ retries_sleep: str | None = None,
299
+ metadata: bool | None = None,
300
+ timeout: str | None = None,
301
+ max_partition_workers: int | None = None,
302
+ multi_thread_streams: int | None = None,
303
+ other_args: list[str] | None = None,
304
+ ) -> list[CompletedProcess]:
305
+ """Copy multiple files from source to destination.
306
+
307
+ Args:
308
+ payload: Dictionary of source and destination file paths
309
+ """
310
+ return self.impl.copy_files(
311
+ src=src,
312
+ dst=dst,
313
+ files=files,
314
+ check=check,
315
+ max_backlog=max_backlog,
316
+ verbose=verbose,
317
+ checkers=checkers,
318
+ transfers=transfers,
319
+ low_level_retries=low_level_retries,
320
+ retries=retries,
321
+ retries_sleep=retries_sleep,
322
+ metadata=metadata,
323
+ timeout=timeout,
324
+ max_partition_workers=max_partition_workers,
325
+ multi_thread_streams=multi_thread_streams,
326
+ other_args=other_args,
327
+ )
328
+
329
+ def copy(
330
+ self,
331
+ src: Dir | str,
332
+ dst: Dir | str,
333
+ check: bool | None = None,
334
+ transfers: int | None = None,
335
+ checkers: int | None = None,
336
+ multi_thread_streams: int | None = None,
337
+ low_level_retries: int | None = None,
338
+ retries: int | None = None,
339
+ other_args: list[str] | None = None,
340
+ ) -> CompletedProcess:
341
+ """Copy files from source to destination.
342
+
343
+ Args:
344
+ src: Source directory
345
+ dst: Destination directory
346
+ """
347
+ return self.impl.copy(
348
+ src=src,
349
+ dst=dst,
350
+ check=check,
351
+ transfers=transfers,
352
+ checkers=checkers,
353
+ multi_thread_streams=multi_thread_streams,
354
+ low_level_retries=low_level_retries,
355
+ retries=retries,
356
+ other_args=other_args,
357
+ )
358
+
359
+ def purge(self, path: Dir | str) -> CompletedProcess:
360
+ """Purge a directory"""
361
+ return self.impl.purge(path=path)
362
+
363
+ def delete_files(
364
+ self,
365
+ files: str | File | list[str] | list[File],
366
+ check: bool | None = None,
367
+ rmdirs=False,
368
+ verbose: bool | None = None,
369
+ max_partition_workers: int | None = None,
370
+ other_args: list[str] | None = None,
371
+ ) -> CompletedProcess:
372
+ """Delete a directory"""
373
+ return self.impl.delete_files(
374
+ files=files,
375
+ check=check,
376
+ rmdirs=rmdirs,
377
+ verbose=verbose,
378
+ max_partition_workers=max_partition_workers,
379
+ other_args=other_args,
380
+ )
381
+
382
+ def exists(self, path: Dir | Remote | str | File) -> bool:
383
+ """Check if a file or directory exists."""
384
+ return self.impl.exists(path=path)
385
+
386
+ def is_synced(self, src: str | Dir, dst: str | Dir) -> bool:
387
+ """Check if two directories are in sync."""
388
+ return self.impl.is_synced(src=src, dst=dst)
389
+
390
+ def modtime(self, src: str) -> str | Exception:
391
+ """Get the modification time of a file or directory."""
392
+ return self.impl.modtime(src=src)
393
+
394
+ def modtime_dt(self, src: str) -> datetime | Exception:
395
+ """Get the modification time of a file or directory."""
396
+ return self.impl.modtime_dt(src=src)
397
+
398
+ def write_text(
399
+ self,
400
+ text: str,
401
+ dst: str,
402
+ ) -> Exception | None:
403
+ """Write text to a file."""
404
+ return self.impl.write_text(text=text, dst=dst)
405
+
406
+ def write_bytes(
407
+ self,
408
+ data: bytes,
409
+ dst: str,
410
+ ) -> Exception | None:
411
+ """Write bytes to a file."""
412
+ return self.impl.write_bytes(data=data, dst=dst)
413
+
414
+ def read_bytes(self, src: str) -> bytes | Exception:
415
+ """Read bytes from a file."""
416
+ return self.impl.read_bytes(src=src)
417
+
418
+ def read_text(self, src: str) -> str | Exception:
419
+ """Read text from a file."""
420
+ return self.impl.read_text(src=src)
421
+
422
+ def copy_bytes(
423
+ self,
424
+ src: str,
425
+ offset: int | SizeSuffix,
426
+ length: int | SizeSuffix,
427
+ outfile: Path,
428
+ other_args: list[str] | None = None,
429
+ ) -> Exception | None:
430
+ """Copy a slice of bytes from the src file to dst."""
431
+ return self.impl.copy_bytes(
432
+ src=src,
433
+ offset=offset,
434
+ length=length,
435
+ outfile=outfile,
436
+ other_args=other_args,
437
+ )
438
+
439
+ def copy_dir(
440
+ self, src: str | Dir, dst: str | Dir, args: list[str] | None = None
441
+ ) -> CompletedProcess:
442
+ """Copy a directory from source to destination."""
443
+ # convert src to str, also dst
444
+ return self.impl.copy_dir(src=src, dst=dst, args=args)
445
+
446
+ def copy_remote(
447
+ self, src: Remote, dst: Remote, args: list[str] | None = None
448
+ ) -> CompletedProcess:
449
+ """Copy a remote to another remote."""
450
+ return self.impl.copy_remote(src=src, dst=dst, args=args)
451
+
452
+ def copy_file_s3_resumable(
453
+ self,
454
+ src: str, # src:/Bucket/path/myfile.large.zst
455
+ dst_dir: str, # dst:/Bucket/path/myfile.large.zst-parts/part.{part_number:05d}.start-end
456
+ part_infos: list[PartInfo] | None = None,
457
+ upload_threads: int = 8, # Number of reader and writer threads to use
458
+ merge_threads: int = 4, # Number of threads to use for merging the parts
459
+ ) -> Exception | None:
460
+ """Copy a file in parts."""
461
+ return self.impl.copy_file_s3_resumable(
462
+ src=src,
463
+ dst_dir=dst_dir,
464
+ part_infos=part_infos,
465
+ upload_threads=upload_threads,
466
+ merge_threads=merge_threads,
467
+ )
468
+
469
+ def mount(
470
+ self,
471
+ src: Remote | Dir | str,
472
+ outdir: Path,
473
+ allow_writes: bool | None = False,
474
+ use_links: bool | None = None,
475
+ vfs_cache_mode: str | None = None,
476
+ verbose: bool | None = None,
477
+ cache_dir: Path | None = None,
478
+ cache_dir_delete_on_exit: bool | None = None,
479
+ log: Path | None = None,
480
+ other_args: list[str] | None = None,
481
+ ) -> Mount:
482
+ """Mount a remote or directory to a local path.
483
+
484
+ Args:
485
+ src: Remote or directory to mount
486
+ outdir: Local path to mount to
487
+
488
+ Returns:
489
+ CompletedProcess from the mount command execution
490
+
491
+ Raises:
492
+ subprocess.CalledProcessError: If the mount operation fails
493
+ """
494
+ return self.impl.mount(
495
+ src=src,
496
+ outdir=outdir,
497
+ allow_writes=allow_writes,
498
+ use_links=use_links,
499
+ vfs_cache_mode=vfs_cache_mode,
500
+ verbose=verbose,
501
+ cache_dir=cache_dir,
502
+ cache_dir_delete_on_exit=cache_dir_delete_on_exit,
503
+ log=log,
504
+ other_args=other_args,
505
+ )
506
+
507
+ def serve_http(
508
+ self,
509
+ src: str,
510
+ addr: str = "localhost:8080",
511
+ other_args: list[str] | None = None,
512
+ ) -> HttpServer:
513
+ """Serve a remote or directory via HTTP. The returned HttpServer has a client which can be used to
514
+ fetch files or parts.
515
+
516
+ Args:
517
+ src: Remote or directory to serve
518
+ addr: Network address and port to serve on (default: localhost:8080)
519
+ """
520
+ return self.impl.serve_http(src=src, addr=addr, other_args=other_args)
521
+
522
+ def size_files(
523
+ self,
524
+ src: str,
525
+ files: list[str],
526
+ fast_list: bool = False, # Recommend that this is False
527
+ other_args: list[str] | None = None,
528
+ check: bool | None = False,
529
+ verbose: bool | None = None,
530
+ ) -> SizeResult | Exception:
531
+ """Get the size of a list of files. Example of files items: "remote:bucket/to/file"."""
532
+ return self.impl.size_files(
533
+ src=src,
534
+ files=files,
535
+ fast_list=fast_list,
536
+ other_args=other_args,
537
+ check=check,
538
+ verbose=verbose,
539
+ )
540
+
541
+ def size_file(self, src: str) -> SizeSuffix | Exception:
542
+ """Get the size of a file."""
543
+ return self.impl.size_file(src=src)
544
+ ```
545
+
546
+
547
+ To develop software, run `. ./activate`
548
+
549
+ # Windows
550
+
551
+ This environment requires you to use `git-bash`.
552
+
553
+ # Linting
554
+
555
+ Run `./lint`
@@ -1,4 +1,4 @@
1
- rclone_api/__init__.py,sha256=wvLRTEYvwLfkBx_LuWI62cZQA-pEz0xo0oVh_9DCOuM,17876
1
+ rclone_api/__init__.py,sha256=MyVBox2r4zRoMt3mWDTw7My24n9SAxGoCxkvgAo6Nps,16898
2
2
  rclone_api/cli.py,sha256=dibfAZIh0kXWsBbfp3onKLjyZXo54mTzDjUdzJlDlWo,231
3
3
  rclone_api/completed_process.py,sha256=_IZ8IWK7DM1_tsbDEkH6wPZ-bbcrgf7A7smls854pmg,1775
4
4
  rclone_api/config.py,sha256=f6jEAxVorGFr31oHfcsu5AJTtOJj2wR5tTSsbGGZuIw,2558
@@ -18,7 +18,7 @@ rclone_api/http_server.py,sha256=LhovQu2AI-Z7zQIWflWelCiCDLnWzisL32Rs5350kxE,885
18
18
  rclone_api/log.py,sha256=VZHM7pNSXip2ZLBKMP7M1u-rp_F7zoafFDuR8CPUoKI,1271
19
19
  rclone_api/mount.py,sha256=TE_VIBMW7J1UkF_6HRCt8oi_jGdMov4S51bm2OgxFAM,10045
20
20
  rclone_api/process.py,sha256=tGooS5NLdPuqHh7hCH8SfK44A6LGftPQCPQUNgSo0a0,5714
21
- rclone_api/rclone_impl.py,sha256=WBLkQpQq4lGPla1uJBzpp1yf4kS3ub7fxpbU6SdJyZY,49873
21
+ rclone_api/rclone_impl.py,sha256=mFFpU4ngWuZfvtwoIaOl2iTVYrWNRte-TBtbZzVhWaU,46108
22
22
  rclone_api/remote.py,sha256=mTgMTQTwxUmbLjTpr-AGTId2ycXKI9mLX5L7PPpDIoc,520
23
23
  rclone_api/rpath.py,sha256=Y1JjQWcie39EgQrq-UtbfDz5yDLCwwfu27W7AQXllSE,2860
24
24
  rclone_api/scan_missing_folders.py,sha256=-8NCwpCaHeHrX-IepCoAEsX1rl8S-GOCxcIhTr_w3gA,4747
@@ -26,8 +26,8 @@ rclone_api/types.py,sha256=2ngxwpdNy88y0teeYJ5Vz5NiLK1rfaFx8Xf99i0J-Js,12155
26
26
  rclone_api/util.py,sha256=yY72YKpmpT_ZM7AleVtPpl0YZZYQPTwTdqKn9qPwm8Y,9290
27
27
  rclone_api/assets/example.txt,sha256=lTBovRjiz0_TgtAtbA1C5hNi2ffbqnNPqkKg6UiKCT8,54
28
28
  rclone_api/cmd/analyze.py,sha256=RHbvk1G5ZUc3qLqlm1AZEyQzd_W_ZjcbCNDvW4YpTKQ,1252
29
- rclone_api/cmd/copy_large_s3.py,sha256=yhPwbtGz9MmlronB-biiYUfNVclOsxfX9GIhe3ai3g4,3463
30
- rclone_api/cmd/copy_large_s3_finish.py,sha256=DMGEKKslsmBp2pYZwMA24oGIApYheqT04nZ-iNP2vLI,2283
29
+ rclone_api/cmd/copy_large_s3.py,sha256=O1cAfrgL-M1F-j93qH2w6F9zNoJeXG_4v_pUwL0sXxY,3470
30
+ rclone_api/cmd/copy_large_s3_finish.py,sha256=SNCqkvu8YtxPKmBp37WVMP876YhxV0kJDoYuOSNPaPY,2309
31
31
  rclone_api/cmd/list_files.py,sha256=x8FHODEilwKqwdiU1jdkeJbLwOqUkUQuDWPo2u_zpf0,741
32
32
  rclone_api/cmd/save_to_db.py,sha256=ylvnhg_yzexM-m6Zr7XDiswvoDVSl56ELuFAdb9gqBY,1957
33
33
  rclone_api/db/__init__.py,sha256=OSRUdnSWUlDTOHmjdjVmxYTUNpTbtaJ5Ll9sl-PfZg0,40
@@ -51,9 +51,9 @@ rclone_api/s3/multipart/upload_parts_inline.py,sha256=V7syKjFyVIe4U9Ahl5XgqVTzt9
51
51
  rclone_api/s3/multipart/upload_parts_resumable.py,sha256=diJoUpVYow6No_dNgOZIYVsv43k4evb6zixqpzWJaUk,9771
52
52
  rclone_api/s3/multipart/upload_parts_server_side_merge.py,sha256=Fp2pdrs5dONQI9LkfNolgAGj1-Z2V1SsRd0r0sreuXI,18040
53
53
  rclone_api/s3/multipart/upload_state.py,sha256=f-Aq2NqtAaMUMhYitlICSNIxCKurWAl2gDEUVizLIqw,6019
54
- rclone_api-1.4.24.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
55
- rclone_api-1.4.24.dist-info/METADATA,sha256=2vOkBf_iR7d3Au2fujA3PZaonjMIXHTzPfHb3ut1Ml0,4628
56
- rclone_api-1.4.24.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
57
- rclone_api-1.4.24.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
58
- rclone_api-1.4.24.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
59
- rclone_api-1.4.24.dist-info/RECORD,,
54
+ rclone_api-1.4.27.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
55
+ rclone_api-1.4.27.dist-info/METADATA,sha256=EfHFkopInrpyHiwq_Uai3U5VJIL0lDiR92qPctLEUHk,18480
56
+ rclone_api-1.4.27.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
57
+ rclone_api-1.4.27.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
58
+ rclone_api-1.4.27.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
59
+ rclone_api-1.4.27.dist-info/RECORD,,
@@ -1,154 +0,0 @@
1
- Metadata-Version: 2.2
2
- Name: rclone_api
3
- Version: 1.4.24
4
- Summary: rclone api in python
5
- Home-page: https://github.com/zackees/rclone-api
6
- License: BSD 3-Clause License
7
- Keywords: template-python-cmd
8
- Classifier: Programming Language :: Python :: 3
9
- Requires-Python: >=3.10
10
- Description-Content-Type: text/markdown
11
- License-File: LICENSE
12
- Requires-Dist: pyright>=1.1.393
13
- Requires-Dist: python-dotenv>=1.0.0
14
- Requires-Dist: certifi>=2025.1.31
15
- Requires-Dist: psutil
16
- Requires-Dist: boto3<=1.35.99,>=1.20.1
17
- Requires-Dist: sqlmodel>=0.0.23
18
- Requires-Dist: psycopg2-binary>=2.9.10
19
- Requires-Dist: httpx>=0.28.1
20
- Dynamic: home-page
21
-
22
- # rclone-api
23
-
24
- [![Linting](https://github.com/zackees/rclone-api/actions/workflows/lint.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/lint.yml)
25
- [![MacOS_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_macos.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_macos.yml)
26
- [![Ubuntu_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_ubuntu.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_ubuntu.yml)
27
- [![Win_Tests](https://github.com/zackees/rclone-api/actions/workflows/push_win.yml/badge.svg)](https://github.com/zackees/rclone-api/actions/workflows/push_win.yml)
28
-
29
- Started off as just python bindings to rclone, but this project is now adding features to fill in the gaps of the rclone app. The big features is streaming file diffs, stream file listings.
30
-
31
- You will need to have rclone installed and on your path.
32
-
33
- One of the benefits of this api is that it does not use 'shell=True' so therefore ctrl-c will work well in gracefully shutting down
34
-
35
- # Install
36
-
37
- `pip install rclone-api`
38
-
39
-
40
- # Examples
41
-
42
- You can use env variables or use a `.env` file to store your secrets.
43
-
44
-
45
- # Rclone API Usage Examples
46
-
47
- This script demonstrates how to interact with DigitalOcean Spaces using `rclone_api`.
48
-
49
- ## Setup & Usage
50
-
51
- Ensure you have set the required environment variables:
52
-
53
- - `BUCKET_NAME`
54
- - `BUCKET_KEY_PUBLIC`
55
- - `BUCKET_KEY_SECRET`
56
- - `BUCKET_URL`
57
-
58
- Then, run the following Python script:
59
-
60
- ```python
61
- import os
62
- from rclone_api import Config, DirListing, File, Rclone, Remote
63
-
64
- # Load environment variables
65
- BUCKET_NAME = os.getenv("BUCKET_NAME")
66
- BUCKET_KEY_PUBLIC = os.getenv("BUCKET_KEY_PUBLIC")
67
- BUCKET_KEY_SECRET = os.getenv("BUCKET_KEY_SECRET")
68
- BUCKET_URL = "sfo3.digitaloceanspaces.com"
69
-
70
- # Generate Rclone Configuration
71
- def generate_rclone_config() -> Config:
72
- config_text = f"""
73
- [dst]
74
- type = s3
75
- provider = DigitalOcean
76
- access_key_id = {BUCKET_KEY_PUBLIC}
77
- secret_access_key = {BUCKET_KEY_SECRET}
78
- endpoint = {BUCKET_URL}
79
- """
80
- return Config(config_text)
81
-
82
- rclone = Rclone(generate_rclone_config())
83
-
84
- # List Available Remotes
85
- print("\n=== Available Remotes ===")
86
- remotes = rclone.listremotes()
87
- for remote in remotes:
88
- print(remote)
89
-
90
- # List Contents of the Root Bucket
91
- print("\n=== Listing Root Bucket ===")
92
- listing = rclone.ls(f"dst:{BUCKET_NAME}", max_depth=-1)
93
-
94
- print("\nDirectories:")
95
- for dir in listing.dirs:
96
- print(dir)
97
-
98
- print("\nFiles:")
99
- for file in listing.files:
100
- print(file)
101
-
102
- # List a Specific Subdirectory
103
- print("\n=== Listing 'zachs_video' Subdirectory ===")
104
- path = f"dst:{BUCKET_NAME}/zachs_video"
105
- listing = rclone.ls(path)
106
- print(listing)
107
-
108
- # List PNG Files in a Subdirectory
109
- print("\n=== Listing PNG Files ===")
110
- listing = rclone.ls(path, glob="*.png")
111
-
112
- if listing.files:
113
- for file in listing.files:
114
- print(file)
115
-
116
- # Copy a File
117
- print("\n=== Copying a File ===")
118
- if listing.files:
119
- file = listing.files[0]
120
- new_path = f"dst:{BUCKET_NAME}/zachs_video/{file.name}_copy"
121
- rclone.copyfile(file, new_path)
122
- print(f"Copied {file.name} to {new_path}")
123
-
124
- # Copy Multiple Files
125
- print("\n=== Copying Multiple Files ===")
126
- if listing.files:
127
- file_mapping = {file.name: file.name + "_copy" for file in listing.files[:2]}
128
- rclone.copyfiles(file_mapping)
129
- print(f"Copied files: {file_mapping}")
130
-
131
- # Delete a File
132
- print("\n=== Deleting a File ===")
133
- file_to_delete = f"dst:{BUCKET_NAME}/zachs_video/sample.png_copy"
134
- rclone.delete_files([file_to_delete])
135
- print(f"Deleted {file_to_delete}")
136
-
137
- # Walk Through a Directory
138
- print("\n=== Walking Through a Directory ===")
139
- for dirlisting in rclone.walk(f"dst:{BUCKET_NAME}", max_depth=1):
140
- print(dirlisting)
141
-
142
- print("Done.")
143
- ```
144
-
145
-
146
- To develop software, run `. ./activate`
147
-
148
- # Windows
149
-
150
- This environment requires you to use `git-bash`.
151
-
152
- # Linting
153
-
154
- Run `./lint`