rclone-api 1.4.7__py2.py3-none-any.whl → 1.4.9__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,419 @@
1
+ """
2
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3/client/upload_part_copy.html
3
+ * client.upload_part_copy
4
+
5
+ This module provides functionality for S3 multipart uploads, including copying parts
6
+ from existing S3 objects using upload_part_copy.
7
+ """
8
+
9
+ from concurrent.futures import Future, ThreadPoolExecutor
10
+ from dataclasses import dataclass
11
+ from pathlib import Path
12
+ from threading import Semaphore
13
+ from typing import Optional
14
+
15
+ from botocore.client import BaseClient
16
+
17
+ from rclone_api.s3.multipart.finished_piece import FinishedPiece
18
+ from rclone_api.util import locked_print
19
+
20
+
21
+ @dataclass
22
+ class MultipartUploadInfo:
23
+ """Simplified upload information for multipart uploads."""
24
+
25
+ s3_client: BaseClient
26
+ bucket_name: str
27
+ object_name: str
28
+ upload_id: str
29
+ chunk_size: int
30
+ retries: int
31
+ file_size: Optional[int] = None
32
+ src_file_path: Optional[Path] = None
33
+
34
+
35
+ # response = client.upload_part_copy(
36
+ # Bucket='string',
37
+ # CopySource='string' or {'Bucket': 'string', 'Key': 'string', 'VersionId': 'string'},
38
+ # CopySourceIfMatch='string',
39
+ # CopySourceIfModifiedSince=datetime(2015, 1, 1),
40
+ # CopySourceIfNoneMatch='string',
41
+ # CopySourceIfUnmodifiedSince=datetime(2015, 1, 1),
42
+ # CopySourceRange='string',
43
+ # Key='string',
44
+ # PartNumber=123,
45
+ # UploadId='string',
46
+ # SSECustomerAlgorithm='string',
47
+ # SSECustomerKey='string',
48
+ # CopySourceSSECustomerAlgorithm='string',
49
+ # CopySourceSSECustomerKey='string',
50
+ # RequestPayer='requester',
51
+ # ExpectedBucketOwner='string',
52
+ # ExpectedSourceBucketOwner='string'
53
+ # )
54
+
55
+ # import _thread
56
+ # import os
57
+ # import traceback
58
+ # import warnings
59
+ # from concurrent.futures import Future, ThreadPoolExecutor
60
+ # from pathlib import Path
61
+ # from queue import Queue
62
+ # from threading import Event, Thread
63
+ # from typing import Any, Callable
64
+
65
+ # from botocore.client import BaseClient
66
+
67
+ # from rclone_api.mount_read_chunker import FilePart
68
+ # from rclone_api.s3.chunk_task import S3FileInfo, file_chunker
69
+ # from rclone_api.s3.chunk_types import (
70
+ # FinishedPiece,
71
+ # UploadInfo,
72
+ # UploadState,
73
+ # )
74
+ # from rclone_api.s3.types import MultiUploadResult
75
+ # from rclone_api.types import EndOfStream
76
+ # from rclone_api.util import locked_print
77
+
78
+
79
+ # This is how you upload large parts through multi part upload, then the final call
80
+ # is to assemble the parts that have already been uploaded through a multi part uploader
81
+ # and then call complete_multipart_upload to finish the upload
82
+ # response = (
83
+ # client.upload_part_copy(
84
+ # Bucket='string',
85
+ # CopySource='string' or {'Bucket': 'string', 'Key': 'string', 'VersionId': 'string'},
86
+ # CopySourceIfMatch='string',
87
+ # CopySourceIfModifiedSince=datetime(2015, 1, 1),
88
+ # CopySourceIfNoneMatch='string',
89
+ # CopySourceIfUnmodifiedSince=datetime(2015, 1, 1),
90
+ # CopySourceRange='string',
91
+ # Key='string',
92
+ # PartNumber=123,
93
+ # UploadId='string',
94
+ # SSECustomerAlgorithm='string',
95
+ # SSECustomerKey='string',
96
+ # CopySourceSSECustomerAlgorithm='string',
97
+ # CopySourceSSECustomerKey='string',
98
+ # RequestPayer='requester',
99
+ # ExpectedBucketOwner='string',
100
+ # ExpectedSourceBucketOwner='string'
101
+ # )
102
+
103
+
104
+ # def upload_task(
105
+ # info: UploadInfo,
106
+ # chunk: FilePart,
107
+ # part_number: int,
108
+ # retries: int,
109
+ # ) -> FinishedPiece:
110
+ # file_or_err: Path | Exception = chunk.get_file()
111
+ # if isinstance(file_or_err, Exception):
112
+ # raise file_or_err
113
+ # file: Path = file_or_err
114
+ # size = os.path.getsize(file)
115
+ # retries = retries + 1 # Add one for the initial attempt
116
+ # for retry in range(retries):
117
+ # try:
118
+ # if retry > 0:
119
+ # locked_print(f"Retrying part {part_number} for {info.src_file_path}")
120
+ # locked_print(
121
+ # f"Uploading part {part_number} for {info.src_file_path} of size {size}"
122
+ # )
123
+
124
+ # with open(file, "rb") as f:
125
+ # part = info.s3_client.upload_part(
126
+ # Bucket=info.bucket_name,
127
+ # Key=info.object_name,
128
+ # PartNumber=part_number,
129
+ # UploadId=info.upload_id,
130
+ # Body=f,
131
+ # )
132
+ # out: FinishedPiece = FinishedPiece(
133
+ # etag=part["ETag"], part_number=part_number
134
+ # )
135
+ # chunk.dispose()
136
+ # return out
137
+ # except Exception as e:
138
+ # if retry == retries - 1:
139
+ # locked_print(f"Error uploading part {part_number}: {e}")
140
+ # chunk.dispose()
141
+ # raise e
142
+ # else:
143
+ # locked_print(f"Error uploading part {part_number}: {e}, retrying")
144
+ # continue
145
+ # raise Exception("Should not reach here")
146
+
147
+
148
+ # def prepare_upload_file_multipart(
149
+ # s3_client: BaseClient,
150
+ # bucket_name: str,
151
+ # file_path: Path,
152
+ # file_size: int | None,
153
+ # object_name: str,
154
+ # chunk_size: int,
155
+ # retries: int,
156
+ # ) -> UploadInfo:
157
+ # """Upload a file to the bucket using multipart upload with customizable chunk size."""
158
+
159
+ # # Initiate multipart upload
160
+ # locked_print(
161
+ # f"Creating multipart upload for {file_path} to {bucket_name}/{object_name}"
162
+ # )
163
+ # mpu = s3_client.create_multipart_upload(Bucket=bucket_name, Key=object_name)
164
+ # upload_id = mpu["UploadId"]
165
+
166
+ # file_size = file_size if file_size is not None else os.path.getsize(file_path)
167
+
168
+ # upload_info: UploadInfo = UploadInfo(
169
+ # s3_client=s3_client,
170
+ # bucket_name=bucket_name,
171
+ # object_name=object_name,
172
+ # src_file_path=file_path,
173
+ # upload_id=upload_id,
174
+ # retries=retries,
175
+ # chunk_size=chunk_size,
176
+ # file_size=file_size,
177
+ # )
178
+ # return upload_info
179
+
180
+
181
+ def upload_part_copy_task(
182
+ info: MultipartUploadInfo,
183
+ source_bucket: str,
184
+ source_key: str,
185
+ part_number: int,
186
+ retries: int = 3,
187
+ ) -> FinishedPiece | Exception:
188
+ """
189
+ Upload a part by copying from an existing S3 object.
190
+
191
+ Args:
192
+ info: Upload information
193
+ source_bucket: Source bucket name
194
+ source_key: Source object key
195
+ part_number: Part number (1-10000)
196
+ byte_range: Optional byte range in format 'bytes=start-end'
197
+ retries: Number of retry attempts
198
+
199
+ Returns:
200
+ FinishedPiece with ETag and part number
201
+ """
202
+ copy_source = {"Bucket": source_bucket, "Key": source_key}
203
+
204
+ # from botocore.exceptions import NoSuchKey
205
+
206
+ retries = retries + 1 # Add one for the initial attempt
207
+ for retry in range(retries):
208
+ params: dict = {}
209
+ try:
210
+ if retry > 0:
211
+ locked_print(f"Retrying part copy {part_number} for {info.object_name}")
212
+
213
+ locked_print(
214
+ f"Copying part {part_number} for {info.object_name} from {source_bucket}/{source_key}"
215
+ )
216
+
217
+ # Prepare the upload_part_copy parameters
218
+ params = {
219
+ "Bucket": info.bucket_name,
220
+ "CopySource": copy_source,
221
+ "Key": info.object_name,
222
+ "PartNumber": part_number,
223
+ "UploadId": info.upload_id,
224
+ }
225
+
226
+ # Execute the copy operation
227
+ part = info.s3_client.upload_part_copy(**params)
228
+
229
+ # Extract ETag from the response
230
+ etag = part["CopyPartResult"]["ETag"]
231
+
232
+ return FinishedPiece(etag=etag, part_number=part_number)
233
+ # except NoSuchKey as e:
234
+ # locked_print(f"Error copying part {part_number}: {e}")
235
+ # return e
236
+
237
+ except Exception as e:
238
+ msg = f"Error copying {copy_source} -> {info.object_name}: {e}, params={params}"
239
+ if "NoSuchKey" in str(e):
240
+ locked_print(msg)
241
+ return e
242
+ if retry == retries - 1:
243
+ locked_print(msg)
244
+ return e
245
+ else:
246
+ locked_print(f"{msg}, retrying")
247
+ continue
248
+
249
+ return Exception("Should not reach here")
250
+
251
+
252
+ def complete_multipart_upload_from_parts(
253
+ info: MultipartUploadInfo, parts: list[FinishedPiece]
254
+ ) -> str:
255
+ """
256
+ Complete a multipart upload using the provided parts.
257
+
258
+ Args:
259
+ info: Upload information
260
+ parts: List of finished pieces with ETags
261
+
262
+ Returns:
263
+ The URL of the completed object
264
+ """
265
+ # Sort parts by part number to ensure correct order
266
+ parts.sort(key=lambda x: x.part_number)
267
+
268
+ # Prepare the parts list for the complete_multipart_upload call
269
+ multipart_parts = [
270
+ {"ETag": part.etag, "PartNumber": part.part_number} for part in parts
271
+ ]
272
+
273
+ # Complete the multipart upload
274
+ response = info.s3_client.complete_multipart_upload(
275
+ Bucket=info.bucket_name,
276
+ Key=info.object_name,
277
+ UploadId=info.upload_id,
278
+ MultipartUpload={"Parts": multipart_parts},
279
+ )
280
+
281
+ # Return the URL of the completed object
282
+ return response.get("Location", f"s3://{info.bucket_name}/{info.object_name}")
283
+
284
+
285
+ def finish_multipart_upload_from_keys(
286
+ s3_client: BaseClient,
287
+ source_bucket: str,
288
+ parts: list[tuple[int, str]],
289
+ final_size: int,
290
+ destination_bucket: str,
291
+ destination_key: str,
292
+ chunk_size: int, # 5MB default
293
+ max_workers: int = 100,
294
+ retries: int = 3,
295
+ ) -> str:
296
+ """
297
+ Finish a multipart upload by copying parts from existing S3 objects.
298
+
299
+ Args:
300
+ s3_client: Boto3 S3 client
301
+ source_bucket: Source bucket name
302
+ source_keys: List of source object keys to copy from
303
+ destination_bucket: Destination bucket name
304
+ destination_key: Destination object key
305
+ chunk_size: Size of each part in bytes
306
+ retries: Number of retry attempts
307
+ byte_ranges: Optional list of byte ranges corresponding to source_keys
308
+
309
+ Returns:
310
+ The URL of the completed object
311
+ """
312
+
313
+ # Initiate multipart upload
314
+ locked_print(
315
+ f"Creating multipart upload for {destination_bucket}/{destination_key} from {len(parts)} source objects"
316
+ )
317
+
318
+ create_params: dict[str, str] = {
319
+ "Bucket": destination_bucket,
320
+ "Key": destination_key,
321
+ }
322
+ print(f"Creating multipart upload with {create_params}")
323
+ mpu = s3_client.create_multipart_upload(**create_params)
324
+ print(f"Created multipart upload: {mpu}")
325
+ upload_id = mpu["UploadId"]
326
+
327
+ # Create upload info
328
+ upload_info = MultipartUploadInfo(
329
+ s3_client=s3_client,
330
+ bucket_name=destination_bucket,
331
+ object_name=destination_key,
332
+ upload_id=upload_id,
333
+ retries=retries,
334
+ chunk_size=chunk_size,
335
+ file_size=final_size,
336
+ )
337
+
338
+ futures: list[Future[FinishedPiece | Exception]] = []
339
+
340
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
341
+ # semaphore
342
+
343
+ semaphore = Semaphore(max_workers)
344
+ for part_number, source_key in parts:
345
+
346
+ def task(
347
+ info=upload_info,
348
+ source_bucket=source_bucket,
349
+ source_key=source_key,
350
+ part_number=part_number,
351
+ retries=retries,
352
+ ):
353
+ return upload_part_copy_task(
354
+ info=info,
355
+ source_bucket=source_bucket,
356
+ source_key=source_key,
357
+ part_number=part_number,
358
+ retries=retries,
359
+ )
360
+
361
+ fut = executor.submit(task)
362
+ fut.add_done_callback(lambda x: semaphore.release())
363
+ futures.append(fut)
364
+ semaphore.acquire()
365
+
366
+ # Upload parts by copying from source objects
367
+ finished_parts: list[FinishedPiece] = []
368
+
369
+ for fut in futures:
370
+ finished_part = fut.result()
371
+ if isinstance(finished_part, Exception):
372
+ executor.shutdown(wait=True, cancel_futures=True)
373
+ raise finished_part
374
+ finished_parts.append(finished_part)
375
+
376
+ # Complete the multipart upload
377
+ return complete_multipart_upload_from_parts(upload_info, finished_parts)
378
+
379
+
380
+ class S3MultiPartUploader:
381
+ def __init__(self, s3_client: BaseClient, verbose: bool) -> None:
382
+ self.s3_client = s3_client
383
+ self.verbose = verbose
384
+
385
+ def finish_from_keys(
386
+ self,
387
+ source_bucket: str,
388
+ parts: list[tuple[int, str]],
389
+ destination_bucket: str,
390
+ destination_key: str,
391
+ chunk_size: int,
392
+ final_size: int,
393
+ retries: int = 3,
394
+ ) -> str:
395
+ """
396
+ Finish a multipart upload by copying parts from existing S3 objects.
397
+
398
+ Args:
399
+ source_bucket: Source bucket name
400
+ source_keys: List of source object keys to copy from
401
+ destination_bucket: Destination bucket name
402
+ destination_key: Destination object key
403
+ chunk_size: Size of each part in bytes
404
+ retries: Number of retry attempts
405
+ byte_ranges: Optional list of byte ranges corresponding to source_keys
406
+
407
+ Returns:
408
+ The URL of the completed object
409
+ """
410
+ return finish_multipart_upload_from_keys(
411
+ s3_client=self.s3_client,
412
+ source_bucket=source_bucket,
413
+ parts=parts,
414
+ destination_bucket=destination_bucket,
415
+ destination_key=destination_key,
416
+ chunk_size=chunk_size,
417
+ final_size=final_size,
418
+ retries=retries,
419
+ )
rclone_api/s3/types.py CHANGED
@@ -26,6 +26,7 @@ class S3Provider(Enum):
26
26
  class S3Credentials:
27
27
  """Credentials for accessing S3."""
28
28
 
29
+ bucket_name: str
29
30
  provider: S3Provider
30
31
  access_key_id: str
31
32
  secret_access_key: str
rclone_api/types.py CHANGED
@@ -309,6 +309,13 @@ class Range:
309
309
  val = f"bytes={self.start.as_int()}-{last.as_int()}"
310
310
  return {"Range": val}
311
311
 
312
+ def __repr__(self) -> str:
313
+ length = self.end - self.start
314
+ return f"Range(start={self.start}, length={length})"
315
+
316
+ def __str__(self) -> str:
317
+ return self.__repr__()
318
+
312
319
 
313
320
  _MAX_PART_NUMBER = 10000
314
321
 
@@ -387,3 +394,9 @@ class PartInfo:
387
394
  end = SizeSuffix(self.range.end._size).as_int()
388
395
  dst_name = f"part.{partnumber}_{offset}-{end}"
389
396
  return dst_name
397
+
398
+ def __repr__(self) -> str:
399
+ return f"PartInfo(part_number={self.part_number}, range={self.range})"
400
+
401
+ def __str__(self) -> str:
402
+ return self.__repr__()
rclone_api/util.py CHANGED
@@ -1,10 +1,11 @@
1
+ import atexit
1
2
  import os
2
3
  import random
3
4
  import shutil
5
+ import signal
4
6
  import subprocess
5
7
  import warnings
6
8
  from pathlib import Path
7
- from tempfile import TemporaryDirectory
8
9
  from threading import Lock
9
10
  from typing import Any
10
11
 
@@ -18,6 +19,53 @@ from rclone_api.types import S3PathInfo
18
19
 
19
20
  _PRINT_LOCK = Lock()
20
21
 
22
+ _TMP_CONFIG_DIR = Path(".") / ".rclone" / "tmp_config"
23
+ _RCLONE_CONFIGS_LIST: list[Path] = []
24
+ _DO_CLEANUP = os.getenv("RCLONE_API_CLEANUP", "1") == "1"
25
+
26
+
27
+ def _clean_configs(signum=None, frame=None) -> None:
28
+ if not _DO_CLEANUP:
29
+ return
30
+ for config in _RCLONE_CONFIGS_LIST:
31
+ try:
32
+ config.unlink()
33
+ except Exception as e:
34
+ print(f"Error deleting config file: {config}, {e}")
35
+ _RCLONE_CONFIGS_LIST.clear()
36
+ if signum is not None:
37
+ signal.signal(signum, signal.SIG_DFL)
38
+ os.kill(os.getpid(), signum)
39
+
40
+
41
+ def _init_cleanup() -> None:
42
+ atexit.register(_clean_configs)
43
+
44
+ for sig in (signal.SIGINT, signal.SIGTERM):
45
+ signal.signal(sig, _clean_configs)
46
+
47
+
48
+ _init_cleanup()
49
+
50
+
51
+ def make_temp_config_file() -> Path:
52
+ from rclone_api.util import random_str
53
+
54
+ tmpdir = _TMP_CONFIG_DIR / random_str(32)
55
+ tmpdir.mkdir(parents=True, exist_ok=True)
56
+ tmpfile = tmpdir / "rclone.conf"
57
+ _RCLONE_CONFIGS_LIST.append(tmpfile)
58
+ return tmpfile
59
+
60
+
61
+ def clear_temp_config_file(path: Path | None) -> None:
62
+ if (path is None) or (not path.exists()) or (not _DO_CLEANUP):
63
+ return
64
+ try:
65
+ path.unlink()
66
+ except Exception as e:
67
+ print(f"Error deleting config file: {path}, {e}")
68
+
21
69
 
22
70
  def locked_print(*args, **kwargs):
23
71
  with _PRINT_LOCK:
@@ -116,7 +164,7 @@ def rclone_execute(
116
164
  capture: bool | Path | None = None,
117
165
  verbose: bool | None = None,
118
166
  ) -> subprocess.CompletedProcess:
119
- tempdir: TemporaryDirectory | None = None
167
+ tmpfile: Path | None = None
120
168
  verbose = get_verbose(verbose)
121
169
 
122
170
  # Handle the Path case for capture
@@ -131,8 +179,7 @@ def rclone_execute(
131
179
 
132
180
  try:
133
181
  if isinstance(rclone_conf, Config):
134
- tempdir = TemporaryDirectory()
135
- tmpfile = Path(tempdir.name) / "rclone.conf"
182
+ tmpfile = make_temp_config_file()
136
183
  tmpfile.write_text(rclone_conf.text, encoding="utf-8")
137
184
  rclone_conf = tmpfile
138
185
  cmd = (
@@ -168,11 +215,7 @@ def rclone_execute(
168
215
  )
169
216
  return cp
170
217
  finally:
171
- if tempdir:
172
- try:
173
- tempdir.cleanup()
174
- except Exception as e:
175
- print(f"Error cleaning up tempdir: {e}")
218
+ clear_temp_config_file(tmpfile)
176
219
 
177
220
 
178
221
  def split_s3_path(path: str) -> S3PathInfo:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: rclone_api
3
- Version: 1.4.7
3
+ Version: 1.4.9
4
4
  Summary: rclone api in python
5
5
  Home-page: https://github.com/zackees/rclone-api
6
6
  License: BSD 3-Clause License
@@ -17,39 +17,40 @@ rclone_api/group_files.py,sha256=H92xPW9lQnbNw5KbtZCl00bD6iRh9yRbCuxku4j_3dg,803
17
17
  rclone_api/http_server.py,sha256=3fPBV6l50erTe32DyeJBNmsDrn5KuujsbmEAbx13T-c,8720
18
18
  rclone_api/log.py,sha256=VZHM7pNSXip2ZLBKMP7M1u-rp_F7zoafFDuR8CPUoKI,1271
19
19
  rclone_api/mount.py,sha256=TE_VIBMW7J1UkF_6HRCt8oi_jGdMov4S51bm2OgxFAM,10045
20
- rclone_api/process.py,sha256=BGXJTZVT__jeaDyjN8_kRycliOhkBErMPdHO1hKRvJE,5271
21
- rclone_api/rclone_impl.py,sha256=HTvWbD0cWhqrIfUPzqCDp3Av1uCEBOoVO0BQrBYmjQM,48205
20
+ rclone_api/process.py,sha256=I7B4arAIbFcTBsek27cZ0t-l5YRWVHJJPji7G6ZLGjQ,4989
21
+ rclone_api/rclone_impl.py,sha256=xTTriz6-zn_aSrkY8B7wzT-zRXax7Og7ns6xu6-7O6g,48769
22
22
  rclone_api/remote.py,sha256=mTgMTQTwxUmbLjTpr-AGTId2ycXKI9mLX5L7PPpDIoc,520
23
23
  rclone_api/rpath.py,sha256=Y1JjQWcie39EgQrq-UtbfDz5yDLCwwfu27W7AQXllSE,2860
24
24
  rclone_api/scan_missing_folders.py,sha256=-8NCwpCaHeHrX-IepCoAEsX1rl8S-GOCxcIhTr_w3gA,4747
25
- rclone_api/types.py,sha256=gpEYVHNkxB7X3p_B4nEc1ls1kF_ykjNuUyZXCsvU-Cs,11788
26
- rclone_api/util.py,sha256=j252WPB-UMiz6zQcLvsZe9XvMq-Z2FIeZrjb-y01eL4,5947
25
+ rclone_api/types.py,sha256=HkpEZgZWhr5Gb04iHq5NxMRXxieWoN-PKmOfJFrg5Qg,12155
26
+ rclone_api/util.py,sha256=9w_m6W62l_X42Jw5q8p_p30h-QoxAqufvnCLI4PTMOE,7056
27
27
  rclone_api/assets/example.txt,sha256=lTBovRjiz0_TgtAtbA1C5hNi2ffbqnNPqkKg6UiKCT8,54
28
28
  rclone_api/cmd/analyze.py,sha256=RHbvk1G5ZUc3qLqlm1AZEyQzd_W_ZjcbCNDvW4YpTKQ,1252
29
- rclone_api/cmd/copy_large_s3.py,sha256=w1aCaq9EQ84aRoMj9mHtdr0Svjflkx6KTKcoldzRSo8,3788
29
+ rclone_api/cmd/copy_large_s3.py,sha256=B17GliDQyAauNglJCpsey0d3eArT2DAcT9g684TMQk8,3514
30
+ rclone_api/cmd/copy_large_s3_finish.py,sha256=PwtAL1qifnyADf-GM0H4htNa_1GhYvgIci7dGFUmjRg,6975
30
31
  rclone_api/cmd/list_files.py,sha256=x8FHODEilwKqwdiU1jdkeJbLwOqUkUQuDWPo2u_zpf0,741
31
32
  rclone_api/cmd/save_to_db.py,sha256=ylvnhg_yzexM-m6Zr7XDiswvoDVSl56ELuFAdb9gqBY,1957
32
33
  rclone_api/db/__init__.py,sha256=OSRUdnSWUlDTOHmjdjVmxYTUNpTbtaJ5Ll9sl-PfZg0,40
33
34
  rclone_api/db/db.py,sha256=YRnYrCaXHwytQt07uEZ_mMpvPHo9-0IWcOb95fVOOfs,10086
34
35
  rclone_api/db/models.py,sha256=v7qaXUehvsDvU51uk69JI23fSIs9JFGcOa-Tv1c_wVs,1600
35
- rclone_api/detail/copy_file_parts.py,sha256=CmkMqGTYe2F7MQ4scHy-ZJoaKKILVIaqAUmidHV3adE,13412
36
+ rclone_api/detail/copy_file_parts.py,sha256=dpqZ0d7l195dZg6Vob2Ty43Uah1v0ozQu5kMtblGqYc,16175
36
37
  rclone_api/detail/walk.py,sha256=-54NVE8EJcCstwDoaC_UtHm73R2HrZwVwQmsnv55xNU,3369
37
38
  rclone_api/experimental/flags.py,sha256=qCVD--fSTmzlk9hloRLr0q9elzAOFzPsvVpKM3aB1Mk,2739
38
39
  rclone_api/experimental/flags_base.py,sha256=ajU_czkTcAxXYU-SlmiCfHY7aCQGHvpCLqJ-Z8uZLk0,2102
39
40
  rclone_api/s3/api.py,sha256=PafsIEyWDpLWAXsZAjFm9CY14vJpsDr9lOsn0kGRLZ0,4009
40
41
  rclone_api/s3/basic_ops.py,sha256=hK3366xhVEzEcjz9Gk_8lFx6MRceAk72cax6mUrr6ko,2104
41
42
  rclone_api/s3/chunk_task.py,sha256=waEYe-iYQ1_BR3NCS4BrzVrK9UANvH1EcbXx2I6Z_NM,6839
42
- rclone_api/s3/create.py,sha256=wgfkapv_j904CfKuWyiBIWJVxfAx_ftemFSUV14aT68,3149
43
- rclone_api/s3/s3_multipart_uploader.py,sha256=KY9k585Us0VW8uqgS5jdSVrYynxlnO8Wzb52pcvR06M,4570
44
- rclone_api/s3/types.py,sha256=VqnvH0qhvb3_4wngqk0vSSStH-TgVQxNNxo9slz9-p8,1595
43
+ rclone_api/s3/create.py,sha256=quQmdKuz9RvpaFA7Ja8POwQBcF5x727cTW8v0k6CZiA,3151
44
+ rclone_api/s3/s3_multipart_uploader_by_copy.py,sha256=q5UTGsoodxEKHnDLu3aE9asxhjogJCh06Y-kr7hBOCI,13889
45
+ rclone_api/s3/types.py,sha256=cYI5MbXRNdT-ps5kGIRQaYrseHyx_ozT4AcwBABTKwk,1616
45
46
  rclone_api/s3/upload_file_multipart.py,sha256=V7syKjFyVIe4U9Ahl5XgqVTzt9akiew3MFjGmufLo2w,12503
46
47
  rclone_api/s3/multipart/file_info.py,sha256=8v_07_eADo0K-Nsv7F0Ac1wcv3lkIsrR3MaRCmkYLTQ,105
47
- rclone_api/s3/multipart/finished_piece.py,sha256=TcwA58-qgKBiskfHrePoCWaSSep6Za9psZEpzrLUUhE,1199
48
+ rclone_api/s3/multipart/finished_piece.py,sha256=9nMWnVZ8S99wi2VFQsm1h1ZHqmebkhMGgd2s56wNj9w,1331
48
49
  rclone_api/s3/multipart/upload_info.py,sha256=d6_OfzFR_vtDzCEegFfzCfWi2kUBUV4aXZzqAEVp1c4,1874
49
50
  rclone_api/s3/multipart/upload_state.py,sha256=f-Aq2NqtAaMUMhYitlICSNIxCKurWAl2gDEUVizLIqw,6019
50
- rclone_api-1.4.7.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
51
- rclone_api-1.4.7.dist-info/METADATA,sha256=kFj6G3KlaQXxERlakrTl-RW0KUJHwSWckEKwJ-1iVb8,4627
52
- rclone_api-1.4.7.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
53
- rclone_api-1.4.7.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
54
- rclone_api-1.4.7.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
55
- rclone_api-1.4.7.dist-info/RECORD,,
51
+ rclone_api-1.4.9.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
52
+ rclone_api-1.4.9.dist-info/METADATA,sha256=eBHYAbmsYK9JvGLBV4xzc4mgZcbE8_xTHhdAafeSfBk,4627
53
+ rclone_api-1.4.9.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
54
+ rclone_api-1.4.9.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
55
+ rclone_api-1.4.9.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
56
+ rclone_api-1.4.9.dist-info/RECORD,,