rclone-api 1.4.19__py2.py3-none-any.whl → 1.4.22__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
rclone_api/__init__.py CHANGED
@@ -428,11 +428,16 @@ class Rclone:
428
428
  src: str, # src:/Bucket/path/myfile.large.zst
429
429
  dst_dir: str, # dst:/Bucket/path/myfile.large.zst-parts/part.{part_number:05d}.start-end
430
430
  part_infos: list[PartInfo] | None = None,
431
- threads: int = 1, # Number of reader and writer threads to use
431
+ upload_threads: int = 8, # Number of reader and writer threads to use
432
+ merge_threads: int = 4, # Number of threads to use for merging the parts
432
433
  ) -> Exception | None:
433
434
  """Copy a file in parts."""
434
435
  return self.impl.copy_file_parts(
435
- src=src, dst_dir=dst_dir, part_infos=part_infos, threads=threads
436
+ src=src,
437
+ dst_dir=dst_dir,
438
+ part_infos=part_infos,
439
+ upload_threads=upload_threads,
440
+ merge_threads=merge_threads,
436
441
  )
437
442
 
438
443
  def mount(
@@ -496,7 +501,7 @@ class Rclone:
496
501
  other_args: list[str] | None = None,
497
502
  check: bool | None = False,
498
503
  verbose: bool | None = None,
499
- ) -> SizeResult:
504
+ ) -> SizeResult | Exception:
500
505
  """Get the size of a list of files. Example of files items: "remote:bucket/to/file"."""
501
506
  return self.impl.size_files(
502
507
  src=src,
@@ -90,8 +90,6 @@ def main() -> int:
90
90
  err: Exception | None = rclone.copy_file_parts(
91
91
  src=args.src,
92
92
  dst_dir=args.dst,
93
- threads=8,
94
- # verbose=args.verbose,
95
93
  )
96
94
  if err is not None:
97
95
  print(f"Error: {err}")
@@ -3,7 +3,7 @@ from dataclasses import dataclass
3
3
  from pathlib import Path
4
4
 
5
5
  from rclone_api import Rclone
6
- from rclone_api.s3.s3_multipart_uploader_by_copy import (
6
+ from rclone_api.s3.multipart.upload_parts_server_side_merge import (
7
7
  s3_server_side_multi_part_merge,
8
8
  )
9
9
 
@@ -57,7 +57,7 @@ def main() -> int:
57
57
  rclone = Rclone(rclone_conf=args.config_path)
58
58
  info_path = _get_info_path(src=args.src)
59
59
  s3_server_side_multi_part_merge(
60
- rclone=rclone.impl, info_path=info_path, max_workers=1
60
+ rclone=rclone.impl, info_path=info_path, max_workers=5
61
61
  )
62
62
  return 0
63
63
 
@@ -0,0 +1,42 @@
1
+ from rclone_api.rclone_impl import RcloneImpl
2
+ from rclone_api.types import (
3
+ PartInfo,
4
+ )
5
+
6
+
7
+ def copy_file_parts_resumable(
8
+ self: RcloneImpl,
9
+ src: str, # src:/Bucket/path/myfile.large.zst
10
+ dst_dir: str, # dst:/Bucket/path/myfile.large.zst-parts/
11
+ part_infos: list[PartInfo] | None = None,
12
+ upload_threads: int = 10,
13
+ merge_threads: int = 5,
14
+ verbose: bool | None = None,
15
+ ) -> Exception | None:
16
+ # _upload_parts
17
+ from rclone_api.s3.multipart.upload_parts_resumable import upload_parts_resumable
18
+ from rclone_api.s3.multipart.upload_parts_server_side_merge import (
19
+ s3_server_side_multi_part_merge,
20
+ )
21
+
22
+ if verbose is None:
23
+ verbose = self.get_verbose()
24
+
25
+ err: Exception | None = upload_parts_resumable(
26
+ self=self,
27
+ src=src,
28
+ dst_dir=dst_dir,
29
+ part_infos=part_infos,
30
+ threads=upload_threads,
31
+ )
32
+ if isinstance(err, Exception):
33
+ return err
34
+ if dst_dir.endswith("/"):
35
+ dst_dir = dst_dir[:-1]
36
+ dst_info = f"{dst_dir}/info.json"
37
+ err = s3_server_side_multi_part_merge(
38
+ rclone=self, info_path=dst_info, max_workers=merge_threads, verbose=verbose
39
+ )
40
+ if isinstance(err, Exception):
41
+ return err
42
+ return None
rclone_api/http_server.py CHANGED
@@ -86,7 +86,10 @@ class HttpServer:
86
86
  assert response.is_closed
87
87
  # print(f"Downloaded bytes {start}-{end} to {dst}")
88
88
  if range:
89
- print(f"Downloaded bytes {range.start}-{range.end} to {dst}")
89
+ length = range.end - range.start
90
+ print(
91
+ f"Downloaded bytes starting at {range.start} with size {length} to {dst}"
92
+ )
90
93
  else:
91
94
  size = dst.stat().st_size
92
95
  print(f"Downloaded {size} bytes to {dst}")
rclone_api/rclone_impl.py CHANGED
@@ -455,6 +455,9 @@ class RcloneImpl:
455
455
  out = self._run(cmd)
456
456
  return CompletedProcess.from_subprocess(out)
457
457
 
458
+ def get_verbose(self) -> bool:
459
+ return get_verbose(None)
460
+
458
461
  def copy_to(
459
462
  self,
460
463
  src: File | str,
@@ -789,17 +792,21 @@ class RcloneImpl:
789
792
  src: str, # src:/Bucket/path/myfile.large.zst
790
793
  dst_dir: str, # dst:/Bucket/path/myfile.large.zst-parts/
791
794
  part_infos: list[PartInfo] | None = None,
792
- threads: int = 1,
795
+ upload_threads: int = 8,
796
+ merge_threads: int = 4,
793
797
  ) -> Exception | None:
794
798
  """Copy parts of a file from source to destination."""
795
- from rclone_api.detail.copy_file_parts import copy_file_parts
799
+ from rclone_api.detail.copy_file_parts_resumable import (
800
+ copy_file_parts_resumable,
801
+ )
796
802
 
797
- out = copy_file_parts(
803
+ out = copy_file_parts_resumable(
798
804
  self=self,
799
805
  src=src,
800
806
  dst_dir=dst_dir,
801
807
  part_infos=part_infos,
802
- threads=threads,
808
+ upload_threads=upload_threads,
809
+ merge_threads=merge_threads,
803
810
  )
804
811
  return out
805
812
 
@@ -853,15 +860,25 @@ class RcloneImpl:
853
860
 
854
861
  def size_file(self, src: str) -> SizeSuffix | Exception:
855
862
  """Get the size of a file or directory."""
856
- src_parent = os.path.dirname(src)
857
- src_name = os.path.basename(src)
858
- out: SizeResult = self.size_files(src_parent, [src_name])
859
- one_file = len(out.file_sizes) == 1
860
- if not one_file:
861
- return Exception(
862
- f"More than one result returned, is this is a directory? {out}"
863
- )
864
- return SizeSuffix(out.total_size)
863
+ # src_parent = os.path.dirname(src)
864
+ # src_name = os.path.basename(src)
865
+ # can't use this because it's only one file.
866
+ # out: SizeResult = self.size_files(src_parent, [src_name])
867
+ # one_file = len(out.file_sizes) == 1
868
+ # if not one_file:
869
+ # return Exception(
870
+ # f"More than one result returned, is this is a directory? {out}"
871
+ # )
872
+ # return SizeSuffix(out.total_size)
873
+ dirlist: DirListing = self.ls(
874
+ src, listing_option=ListingOption.FILES_ONLY, max_depth=0
875
+ )
876
+ if len(dirlist.files) == 0:
877
+ return FileNotFoundError(f"File not found: {src}")
878
+ if len(dirlist.files) > 1:
879
+ return Exception(f"More than one file found: {src}")
880
+ file: File = dirlist.files[0]
881
+ return SizeSuffix(file.size)
865
882
 
866
883
  def get_s3_credentials(
867
884
  self, remote: str, verbose: bool | None = None
@@ -943,7 +960,9 @@ class RcloneImpl:
943
960
  name = src_path.name
944
961
  src_parent_path = Path(src).parent.as_posix()
945
962
 
946
- size_result: SizeResult = self.size_files(src_parent_path, [name])
963
+ size_result: SizeResult | Exception = self.size_files(src_parent_path, [name])
964
+ if isinstance(size_result, Exception):
965
+ raise size_result
947
966
  target_size = SizeSuffix(size_result.total_size)
948
967
 
949
968
  chunk_size = chunk_size or SizeSuffix("64M")
@@ -1286,10 +1305,18 @@ class RcloneImpl:
1286
1305
  other_args: list[str] | None = None,
1287
1306
  check: bool | None = False,
1288
1307
  verbose: bool | None = None,
1289
- ) -> SizeResult:
1308
+ ) -> SizeResult | Exception:
1290
1309
  """Get the size of a list of files. Example of files items: "remote:bucket/to/file"."""
1291
1310
  verbose = get_verbose(verbose)
1292
1311
  check = get_check(check)
1312
+ if len(files) < 2:
1313
+ tmp = self.size_file(files[0])
1314
+ if isinstance(tmp, Exception):
1315
+ return tmp
1316
+ assert isinstance(tmp, SizeSuffix)
1317
+ return SizeResult(
1318
+ prefix=src, total_size=tmp.as_int(), file_sizes={files[0]: tmp.as_int()}
1319
+ )
1293
1320
  if fast_list or (other_args and "--fast-list" in other_args):
1294
1321
  warnings.warn(
1295
1322
  "It's not recommended to use --fast-list with size_files as this will perform poorly on large repositories since the entire repository has to be scanned."
rclone_api/s3/api.py CHANGED
@@ -10,11 +10,11 @@ from rclone_api.s3.basic_ops import (
10
10
  upload_file,
11
11
  )
12
12
  from rclone_api.s3.create import S3Config, create_s3_client
13
- from rclone_api.s3.types import S3Credentials, S3MutliPartUploadConfig, S3UploadTarget
14
- from rclone_api.s3.upload_file_multipart import (
13
+ from rclone_api.s3.multipart.upload_parts_inline import (
15
14
  MultiUploadResult,
16
15
  upload_file_multipart,
17
16
  )
17
+ from rclone_api.s3.types import S3Credentials, S3MutliPartUploadConfig, S3UploadTarget
18
18
 
19
19
  _MIN_THRESHOLD_FOR_CHUNKING = 5 * 1024 * 1024
20
20
 
@@ -10,7 +10,17 @@ class FinishedPiece:
10
10
  etag: str
11
11
 
12
12
  def to_json(self) -> dict:
13
- return {"part_number": self.part_number, "etag": self.etag}
13
+ # return {"part_number": self.part_number, "etag": self.etag}
14
+ # amazon s3 style dict
15
+ tag = self.etag
16
+ if not tag.startswith('"'):
17
+ tag = f'"{tag}"'
18
+ out = {"PartNumber": self.part_number, "ETag": self.etag}
19
+ return out
20
+
21
+ def __post_init__(self):
22
+ assert isinstance(self.part_number, int)
23
+ assert isinstance(self.etag, str)
14
24
 
15
25
  @staticmethod
16
26
  def to_json_array(
@@ -30,13 +40,21 @@ class FinishedPiece:
30
40
  # assert count_eos <= 1, "Only one EndOfStream should be present"
31
41
  if count_eos > 1:
32
42
  warnings.warn(f"Only one EndOfStream should be present, found {count_eos}")
33
- return [p.to_json() for p in non_none]
43
+ out = [p.to_json() for p in non_none]
44
+ return out
34
45
 
35
46
  @staticmethod
36
47
  def from_json(json: dict | None) -> "FinishedPiece | EndOfStream":
37
48
  if json is None:
38
49
  return EndOfStream()
39
- return FinishedPiece(**json)
50
+ part_number = json.get("PartNumber")
51
+ etag = json.get("ETag")
52
+ assert isinstance(etag, str)
53
+ # handle the double quotes around the etag
54
+ etag = etag.replace('"', "")
55
+ assert isinstance(part_number, int)
56
+ assert isinstance(etag, str)
57
+ return FinishedPiece(part_number=part_number, etag=etag)
40
58
 
41
59
  @staticmethod
42
60
  def from_json_array(json: dict) -> list["FinishedPiece"]:
@@ -0,0 +1,239 @@
1
+ import hashlib
2
+ import json
3
+ import os
4
+ import warnings
5
+ from datetime import datetime
6
+
7
+ from rclone_api.dir_listing import DirListing
8
+ from rclone_api.rclone_impl import RcloneImpl
9
+ from rclone_api.types import (
10
+ PartInfo,
11
+ SizeSuffix,
12
+ )
13
+
14
+
15
+ def _fetch_all_names(
16
+ self: RcloneImpl,
17
+ src: str,
18
+ ) -> list[str]:
19
+ dl: DirListing = self.ls(src)
20
+ files = dl.files
21
+ filenames: list[str] = [f.name for f in files]
22
+ filtered: list[str] = [f for f in filenames if f.startswith("part.")]
23
+ return filtered
24
+
25
+
26
+ def _get_info_json(self: RcloneImpl, src: str | None, src_info: str) -> dict:
27
+ from rclone_api.file import File
28
+
29
+ data: dict
30
+ text: str
31
+ if src is None:
32
+ # just try to load the file
33
+ text_or_err = self.read_text(src_info)
34
+ if isinstance(text_or_err, Exception):
35
+ raise FileNotFoundError(f"Could not load {src_info}: {text_or_err}")
36
+ assert isinstance(text_or_err, str)
37
+ text = text_or_err
38
+ data = json.loads(text)
39
+ return data
40
+
41
+ src_stat: File | Exception = self.stat(src)
42
+ if isinstance(src_stat, Exception):
43
+ # just try to load the file
44
+ raise FileNotFoundError(f"Failed to stat {src}: {src_stat}")
45
+
46
+ now: datetime = datetime.now()
47
+ new_data = {
48
+ "new": True,
49
+ "created": now.isoformat(),
50
+ "src": src,
51
+ "src_modtime": src_stat.mod_time(),
52
+ "size": src_stat.size,
53
+ "chunksize": None,
54
+ "chunksize_int": None,
55
+ "first_part": None,
56
+ "last_part": None,
57
+ "hash": None,
58
+ }
59
+
60
+ text_or_err = self.read_text(src_info)
61
+ err: Exception | None = text_or_err if isinstance(text_or_err, Exception) else None
62
+ if isinstance(text_or_err, Exception):
63
+ warnings.warn(f"Failed to read {src_info}: {text_or_err}")
64
+ return new_data
65
+ assert isinstance(text_or_err, str)
66
+ text = text_or_err
67
+
68
+ if err is not None:
69
+ return new_data
70
+
71
+ try:
72
+ data = json.loads(text)
73
+ return data
74
+ except Exception as e:
75
+ warnings.warn(f"Failed to parse JSON: {e} at {src_info}")
76
+ return new_data
77
+
78
+
79
+ def _save_info_json(self: RcloneImpl, src: str, data: dict) -> None:
80
+ data = data.copy()
81
+ data["new"] = False
82
+ # hash
83
+
84
+ h = hashlib.md5()
85
+ tmp = [
86
+ data.get("src"),
87
+ data.get("src_modtime"),
88
+ data.get("size"),
89
+ data.get("chunksize_int"),
90
+ ]
91
+ data_vals: list[str] = [str(v) for v in tmp]
92
+ str_data = "".join(data_vals)
93
+ h.update(str_data.encode("utf-8"))
94
+ data["hash"] = h.hexdigest()
95
+ json_str = json.dumps(data, indent=0)
96
+ self.write_text(dst=src, text=json_str)
97
+
98
+
99
+ class InfoJson:
100
+ def __init__(self, rclone: RcloneImpl, src: str | None, src_info: str) -> None:
101
+ self.rclone = rclone
102
+ self.src = src
103
+ self.src_info = src_info
104
+ self.data: dict = {}
105
+
106
+ def load(self) -> bool:
107
+ """Returns true if the file exist and is now loaded."""
108
+ self.data = _get_info_json(self.rclone, self.src, self.src_info)
109
+ return not self.data.get("new", False)
110
+
111
+ def save(self) -> None:
112
+ _save_info_json(self.rclone, self.src_info, self.data)
113
+
114
+ def print(self) -> None:
115
+ self.rclone.print(self.src_info)
116
+
117
+ def fetch_all_finished(self) -> list[str]:
118
+ parent_path = os.path.dirname(self.src_info)
119
+ out = _fetch_all_names(self.rclone, parent_path)
120
+ return out
121
+
122
+ def fetch_all_finished_part_numbers(self) -> list[int]:
123
+ names = self.fetch_all_finished()
124
+ part_numbers = [int(name.split("_")[0].split(".")[1]) for name in names]
125
+ return part_numbers
126
+
127
+ @property
128
+ def parts_dir(self) -> str:
129
+ parts_dir = os.path.dirname(self.src_info)
130
+ if parts_dir.endswith("/"):
131
+ parts_dir = parts_dir[:-1]
132
+ return parts_dir
133
+
134
+ @property
135
+ def dst(self) -> str:
136
+ parts_dir = self.parts_dir
137
+ assert parts_dir.endswith("-parts")
138
+ out = parts_dir[:-6]
139
+ return out
140
+
141
+ @property
142
+ def dst_name(self) -> str:
143
+ return os.path.basename(self.dst)
144
+
145
+ def compute_all_parts(self) -> list[PartInfo] | Exception:
146
+ # full_part_infos: list[PartInfo] | Exception = PartInfo.split_parts(
147
+ # src_size, SizeSuffix("96MB")
148
+ try:
149
+
150
+ src_size = self.size
151
+ chunk_size = self.chunksize
152
+ assert isinstance(src_size, SizeSuffix)
153
+ assert isinstance(chunk_size, SizeSuffix)
154
+ first_part = self.data["first_part"]
155
+ last_part = self.data["last_part"]
156
+ full_part_infos: list[PartInfo] = PartInfo.split_parts(src_size, chunk_size)
157
+ return full_part_infos[first_part : last_part + 1]
158
+ except Exception as e:
159
+ return e
160
+
161
+ def compute_all_part_numbers(self) -> list[int] | Exception:
162
+ all_parts: list[PartInfo] | Exception = self.compute_all_parts()
163
+ if isinstance(all_parts, Exception):
164
+ raise all_parts
165
+
166
+ all_part_nums: list[int] = [p.part_number for p in all_parts]
167
+ return all_part_nums
168
+
169
+ def fetch_remaining_part_numbers(self) -> list[int] | Exception:
170
+ all_part_nums: list[int] | Exception = self.compute_all_part_numbers()
171
+ if isinstance(all_part_nums, Exception):
172
+ return all_part_nums
173
+ finished_part_nums: list[int] = self.fetch_all_finished_part_numbers()
174
+ remaining_part_nums: list[int] = list(
175
+ set(all_part_nums) - set(finished_part_nums)
176
+ )
177
+ return sorted(remaining_part_nums)
178
+
179
+ def fetch_is_done(self) -> bool:
180
+ remaining_part_nums: list[int] | Exception = self.fetch_remaining_part_numbers()
181
+ if isinstance(remaining_part_nums, Exception):
182
+ return False
183
+ return len(remaining_part_nums) == 0
184
+
185
+ @property
186
+ def new(self) -> bool:
187
+ return self.data.get("new", False)
188
+
189
+ @property
190
+ def chunksize(self) -> SizeSuffix | None:
191
+ chunksize_int: int | None = self.data.get("chunksize_int")
192
+ if chunksize_int is None:
193
+ return None
194
+ return SizeSuffix(chunksize_int)
195
+
196
+ @chunksize.setter
197
+ def chunksize(self, value: SizeSuffix) -> None:
198
+ self.data["chunksize"] = str(value)
199
+ self.data["chunksize_int"] = value.as_int()
200
+
201
+ @property
202
+ def src_modtime(self) -> datetime:
203
+ return datetime.fromisoformat(self.data["src_modtime"])
204
+
205
+ @src_modtime.setter
206
+ def src_modtime(self, value: datetime) -> None:
207
+ self.data["src_modtime"] = value.isoformat()
208
+
209
+ @property
210
+ def size(self) -> SizeSuffix:
211
+ return SizeSuffix(self.data["size"])
212
+
213
+ def _get_first_part(self) -> int | None:
214
+ return self.data.get("first_part")
215
+
216
+ def _set_first_part(self, value: int) -> None:
217
+ self.data["first_part"] = value
218
+
219
+ def _get_last_part(self) -> int | None:
220
+ return self.data.get("last_part")
221
+
222
+ def _set_last_part(self, value: int) -> None:
223
+ self.data["last_part"] = value
224
+
225
+ first_part: int | None = property(_get_first_part, _set_first_part) # type: ignore
226
+ last_part: int | None = property(_get_last_part, _set_last_part) # type: ignore
227
+
228
+ @property
229
+ def hash(self) -> str | None:
230
+ return self.data.get("hash")
231
+
232
+ def to_json_str(self) -> str:
233
+ return json.dumps(self.data)
234
+
235
+ def __repr__(self):
236
+ return f"InfoJson({self.src}, {self.src_info}, {self.data})"
237
+
238
+ def __str__(self):
239
+ return self.to_json_str()
@@ -1,26 +1,22 @@
1
1
  import _thread
2
- import hashlib
3
- import json
2
+ import atexit
4
3
  import os
4
+ import shutil
5
5
  import threading
6
6
  import warnings
7
7
  from concurrent.futures import Future, ThreadPoolExecutor
8
8
  from dataclasses import dataclass
9
- from datetime import datetime
10
9
  from pathlib import Path
11
10
 
12
- from rclone_api import rclone_verbose
13
- from rclone_api.dir_listing import DirListing
14
11
  from rclone_api.http_server import HttpServer
15
12
  from rclone_api.rclone_impl import RcloneImpl
13
+ from rclone_api.s3.multipart.info_json import InfoJson
16
14
  from rclone_api.types import (
17
15
  PartInfo,
18
16
  Range,
19
17
  SizeSuffix,
20
18
  )
21
19
 
22
- rclone_verbose(True)
23
-
24
20
 
25
21
  @dataclass
26
22
  class UploadPart:
@@ -95,233 +91,6 @@ def read_task(
95
91
  return UploadPart(chunk=outchunk, dst_part=part_dst, exception=e)
96
92
 
97
93
 
98
- def _fetch_all_names(
99
- self: RcloneImpl,
100
- src: str,
101
- ) -> list[str]:
102
- dl: DirListing = self.ls(src)
103
- files = dl.files
104
- filenames: list[str] = [f.name for f in files]
105
- filtered: list[str] = [f for f in filenames if f.startswith("part.")]
106
- return filtered
107
-
108
-
109
- def _get_info_json(self: RcloneImpl, src: str | None, src_info: str) -> dict:
110
- from rclone_api.file import File
111
-
112
- data: dict
113
- text: str
114
- if src is None:
115
- # just try to load the file
116
- text_or_err = self.read_text(src_info)
117
- if isinstance(text_or_err, Exception):
118
- raise FileNotFoundError(f"Could not load {src_info}: {text_or_err}")
119
- assert isinstance(text_or_err, str)
120
- text = text_or_err
121
- data = json.loads(text)
122
- return data
123
-
124
- src_stat: File | Exception = self.stat(src)
125
- if isinstance(src_stat, Exception):
126
- # just try to load the file
127
- raise FileNotFoundError(f"Failed to stat {src}: {src_stat}")
128
-
129
- now: datetime = datetime.now()
130
- new_data = {
131
- "new": True,
132
- "created": now.isoformat(),
133
- "src": src,
134
- "src_modtime": src_stat.mod_time(),
135
- "size": src_stat.size,
136
- "chunksize": None,
137
- "chunksize_int": None,
138
- "first_part": None,
139
- "last_part": None,
140
- "hash": None,
141
- }
142
-
143
- text_or_err = self.read_text(src_info)
144
- err: Exception | None = text_or_err if isinstance(text_or_err, Exception) else None
145
- if isinstance(text_or_err, Exception):
146
- warnings.warn(f"Failed to read {src_info}: {text_or_err}")
147
- return new_data
148
- assert isinstance(text_or_err, str)
149
- text = text_or_err
150
-
151
- if err is not None:
152
- return new_data
153
-
154
- try:
155
- data = json.loads(text)
156
- return data
157
- except Exception as e:
158
- warnings.warn(f"Failed to parse JSON: {e} at {src_info}")
159
- return new_data
160
-
161
-
162
- def _save_info_json(self: RcloneImpl, src: str, data: dict) -> None:
163
- data = data.copy()
164
- data["new"] = False
165
- # hash
166
-
167
- h = hashlib.md5()
168
- tmp = [
169
- data.get("src"),
170
- data.get("src_modtime"),
171
- data.get("size"),
172
- data.get("chunksize_int"),
173
- ]
174
- data_vals: list[str] = [str(v) for v in tmp]
175
- str_data = "".join(data_vals)
176
- h.update(str_data.encode("utf-8"))
177
- data["hash"] = h.hexdigest()
178
- json_str = json.dumps(data, indent=0)
179
- self.write_text(dst=src, text=json_str)
180
-
181
-
182
- class InfoJson:
183
- def __init__(self, rclone: RcloneImpl, src: str | None, src_info: str) -> None:
184
- self.rclone = rclone
185
- self.src = src
186
- self.src_info = src_info
187
- self.data: dict = {}
188
-
189
- def load(self) -> bool:
190
- """Returns true if the file exist and is now loaded."""
191
- self.data = _get_info_json(self.rclone, self.src, self.src_info)
192
- return not self.data.get("new", False)
193
-
194
- def save(self) -> None:
195
- _save_info_json(self.rclone, self.src_info, self.data)
196
-
197
- def print(self) -> None:
198
- self.rclone.print(self.src_info)
199
-
200
- def fetch_all_finished(self) -> list[str]:
201
- parent_path = os.path.dirname(self.src_info)
202
- out = _fetch_all_names(self.rclone, parent_path)
203
- return out
204
-
205
- def fetch_all_finished_part_numbers(self) -> list[int]:
206
- names = self.fetch_all_finished()
207
- part_numbers = [int(name.split("_")[0].split(".")[1]) for name in names]
208
- return part_numbers
209
-
210
- @property
211
- def parts_dir(self) -> str:
212
- parts_dir = os.path.dirname(self.src_info)
213
- if parts_dir.endswith("/"):
214
- parts_dir = parts_dir[:-1]
215
- return parts_dir
216
-
217
- @property
218
- def dst(self) -> str:
219
- parts_dir = self.parts_dir
220
- assert parts_dir.endswith("-parts")
221
- out = parts_dir[:-6]
222
- return out
223
-
224
- @property
225
- def dst_name(self) -> str:
226
- return os.path.basename(self.dst)
227
-
228
- def compute_all_parts(self) -> list[PartInfo] | Exception:
229
- # full_part_infos: list[PartInfo] | Exception = PartInfo.split_parts(
230
- # src_size, SizeSuffix("96MB")
231
- try:
232
-
233
- src_size = self.size
234
- chunk_size = self.chunksize
235
- assert isinstance(src_size, SizeSuffix)
236
- assert isinstance(chunk_size, SizeSuffix)
237
- first_part = self.data["first_part"]
238
- last_part = self.data["last_part"]
239
- full_part_infos: list[PartInfo] = PartInfo.split_parts(src_size, chunk_size)
240
- return full_part_infos[first_part : last_part + 1]
241
- except Exception as e:
242
- return e
243
-
244
- def compute_all_part_numbers(self) -> list[int] | Exception:
245
- all_parts: list[PartInfo] | Exception = self.compute_all_parts()
246
- if isinstance(all_parts, Exception):
247
- raise all_parts
248
-
249
- all_part_nums: list[int] = [p.part_number for p in all_parts]
250
- return all_part_nums
251
-
252
- def fetch_remaining_part_numbers(self) -> list[int] | Exception:
253
- all_part_nums: list[int] | Exception = self.compute_all_part_numbers()
254
- if isinstance(all_part_nums, Exception):
255
- return all_part_nums
256
- finished_part_nums: list[int] = self.fetch_all_finished_part_numbers()
257
- remaining_part_nums: list[int] = list(
258
- set(all_part_nums) - set(finished_part_nums)
259
- )
260
- return sorted(remaining_part_nums)
261
-
262
- def fetch_is_done(self) -> bool:
263
- remaining_part_nums: list[int] | Exception = self.fetch_remaining_part_numbers()
264
- if isinstance(remaining_part_nums, Exception):
265
- return False
266
- return len(remaining_part_nums) == 0
267
-
268
- @property
269
- def new(self) -> bool:
270
- return self.data.get("new", False)
271
-
272
- @property
273
- def chunksize(self) -> SizeSuffix | None:
274
- chunksize_int: int | None = self.data.get("chunksize_int")
275
- if chunksize_int is None:
276
- return None
277
- return SizeSuffix(chunksize_int)
278
-
279
- @chunksize.setter
280
- def chunksize(self, value: SizeSuffix) -> None:
281
- self.data["chunksize"] = str(value)
282
- self.data["chunksize_int"] = value.as_int()
283
-
284
- @property
285
- def src_modtime(self) -> datetime:
286
- return datetime.fromisoformat(self.data["src_modtime"])
287
-
288
- @src_modtime.setter
289
- def src_modtime(self, value: datetime) -> None:
290
- self.data["src_modtime"] = value.isoformat()
291
-
292
- @property
293
- def size(self) -> SizeSuffix:
294
- return SizeSuffix(self.data["size"])
295
-
296
- def _get_first_part(self) -> int | None:
297
- return self.data.get("first_part")
298
-
299
- def _set_first_part(self, value: int) -> None:
300
- self.data["first_part"] = value
301
-
302
- def _get_last_part(self) -> int | None:
303
- return self.data.get("last_part")
304
-
305
- def _set_last_part(self, value: int) -> None:
306
- self.data["last_part"] = value
307
-
308
- first_part: int | None = property(_get_first_part, _set_first_part) # type: ignore
309
- last_part: int | None = property(_get_last_part, _set_last_part) # type: ignore
310
-
311
- @property
312
- def hash(self) -> str | None:
313
- return self.data.get("hash")
314
-
315
- def to_json_str(self) -> str:
316
- return json.dumps(self.data)
317
-
318
- def __repr__(self):
319
- return f"InfoJson({self.src}, {self.src_info}, {self.data})"
320
-
321
- def __str__(self):
322
- return self.to_json_str()
323
-
324
-
325
94
  def collapse_runs(numbers: list[int]) -> list[str]:
326
95
  if not numbers:
327
96
  return []
@@ -352,19 +121,40 @@ def collapse_runs(numbers: list[int]) -> list[str]:
352
121
  return runs
353
122
 
354
123
 
355
- def copy_file_parts(
124
+ _MIN_PART_UPLOAD_SIZE = SizeSuffix("5MB")
125
+
126
+
127
+ def _check_part_size(parts: list[PartInfo]) -> Exception | None:
128
+ if len(parts) == 0:
129
+ return Exception("No parts to upload")
130
+ part = parts[0]
131
+ chunk = part.range.end - part.range.start
132
+ if chunk < _MIN_PART_UPLOAD_SIZE:
133
+ return Exception(
134
+ f"Part size {chunk} is too small to upload. Minimum size for server side merge is {_MIN_PART_UPLOAD_SIZE}"
135
+ )
136
+ return None
137
+
138
+
139
+ def upload_parts_resumable(
356
140
  self: RcloneImpl,
357
141
  src: str, # src:/Bucket/path/myfile.large.zst
358
142
  dst_dir: str, # dst:/Bucket/path/myfile.large.zst-parts/
359
143
  part_infos: list[PartInfo] | None = None,
360
144
  threads: int = 1,
145
+ verbose: bool | None = None,
361
146
  ) -> Exception | None:
362
147
  """Copy parts of a file from source to destination."""
363
148
  from rclone_api.util import random_str
364
149
 
150
+ def verbose_print(*args, **kwargs):
151
+ if verbose:
152
+ print(*args, **kwargs)
153
+
365
154
  if dst_dir.endswith("/"):
366
155
  dst_dir = dst_dir[:-1]
367
156
  src_size = self.size_file(src)
157
+
368
158
  if isinstance(src_size, Exception):
369
159
  return src_size
370
160
 
@@ -386,12 +176,16 @@ def copy_file_parts(
386
176
  return src_size
387
177
  part_infos = full_part_infos.copy()
388
178
 
179
+ err = _check_part_size(part_infos)
180
+ if err:
181
+ return err
182
+
389
183
  all_part_numbers: list[int] = [p.part_number for p in part_infos]
390
184
  src_info_json = f"{dst_dir}/info.json"
391
185
  info_json = InfoJson(self, src, src_info_json)
392
186
 
393
187
  if not info_json.load():
394
- print(f"New: {src_info_json}")
188
+ verbose_print(f"New: {src_info_json}")
395
189
  # info_json.save()
396
190
 
397
191
  all_numbers_already_done: set[int] = set(
@@ -401,7 +195,7 @@ def copy_file_parts(
401
195
  first_part_number = part_infos[0].part_number
402
196
  last_part_number = part_infos[-1].part_number
403
197
 
404
- print(
198
+ verbose_print(
405
199
  f"all_numbers_already_done: {collapse_runs(sorted(list(all_numbers_already_done)))}"
406
200
  )
407
201
 
@@ -410,12 +204,15 @@ def copy_file_parts(
410
204
  if part_info.part_number not in all_numbers_already_done:
411
205
  filtered_part_infos.append(part_info)
412
206
  part_infos = filtered_part_infos
413
-
414
207
  remaining_part_numbers: list[int] = [p.part_number for p in part_infos]
415
- print(f"remaining_part_numbers: {collapse_runs(remaining_part_numbers)}")
208
+ verbose_print(f"remaining_part_numbers: {collapse_runs(remaining_part_numbers)}")
209
+ num_remaining_to_upload = len(part_infos)
210
+ verbose_print(
211
+ f"num_remaining_to_upload: {num_remaining_to_upload} / {len(full_part_infos)}"
212
+ )
416
213
 
417
- if len(part_infos) == 0:
418
- return Exception(f"No parts to copy for {src}")
214
+ if num_remaining_to_upload == 0:
215
+ return None
419
216
  chunk_size = SizeSuffix(part_infos[0].range.end - part_infos[0].range.start)
420
217
 
421
218
  info_json.chunksize = chunk_size
@@ -432,8 +229,6 @@ def copy_file_parts(
432
229
 
433
230
  finished_tasks: list[UploadPart] = []
434
231
  tmp_dir = str(Path("chunks") / random_str(12))
435
- import atexit
436
- import shutil
437
232
 
438
233
  atexit.register(lambda: shutil.rmtree(tmp_dir, ignore_errors=True))
439
234
 
@@ -13,17 +13,17 @@ import warnings
13
13
  from concurrent.futures import Future, ThreadPoolExecutor
14
14
  from queue import Queue
15
15
  from threading import Semaphore, Thread
16
- from typing import Callable
16
+ from typing import Any, Callable
17
17
 
18
- from rclone_api.detail.copy_file_parts import InfoJson
19
18
  from rclone_api.rclone_impl import RcloneImpl
20
19
  from rclone_api.s3.create import (
21
20
  BaseClient,
22
21
  S3Config,
23
22
  create_s3_client,
24
23
  )
25
- from rclone_api.s3.merge_state import MergeState, Part
26
24
  from rclone_api.s3.multipart.finished_piece import FinishedPiece
25
+ from rclone_api.s3.multipart.info_json import InfoJson
26
+ from rclone_api.s3.multipart.merge_state import MergeState, Part
27
27
  from rclone_api.types import EndOfStream
28
28
  from rclone_api.util import locked_print
29
29
 
@@ -110,7 +110,7 @@ def _upload_part_copy_task(
110
110
 
111
111
  def _complete_multipart_upload_from_parts(
112
112
  s3_client: BaseClient, state: MergeState, finished_parts: list[FinishedPiece]
113
- ) -> str:
113
+ ) -> Exception | None:
114
114
  """
115
115
  Complete a multipart upload using the provided parts.
116
116
 
@@ -124,17 +124,28 @@ def _complete_multipart_upload_from_parts(
124
124
  # Sort parts by part number to ensure correct order
125
125
  finished_parts.sort(key=lambda x: x.part_number)
126
126
  multipart_parts = FinishedPiece.to_json_array(finished_parts)
127
+ multipart_upload: dict = {
128
+ "Parts": multipart_parts,
129
+ }
130
+ response: Any = None
131
+ try:
132
+ # Complete the multipart upload
133
+ response = s3_client.complete_multipart_upload(
134
+ Bucket=state.bucket,
135
+ Key=state.dst_key,
136
+ UploadId=state.upload_id,
137
+ MultipartUpload=multipart_upload,
138
+ )
139
+ except Exception as e:
140
+ import traceback
127
141
 
128
- # Complete the multipart upload
129
- response = s3_client.complete_multipart_upload(
130
- Bucket=state.bucket,
131
- Key=state.dst_key,
132
- UploadId=state.upload_id,
133
- MultipartUpload={"Parts": multipart_parts},
134
- )
142
+ stacktrace = traceback.format_exc()
143
+ warnings.warn(
144
+ f"Error completing multipart upload: {e}\n\n{response}\n\n{stacktrace}"
145
+ )
146
+ return e
135
147
 
136
- # Return the URL of the completed object
137
- return response.get("Location", f"s3://{state.bucket}/{state.dst_key}")
148
+ return None
138
149
 
139
150
 
140
151
  def _do_upload_task(
@@ -178,17 +189,22 @@ def _do_upload_task(
178
189
  while not semaphore.acquire(blocking=False):
179
190
  time.sleep(0.1)
180
191
 
181
- # Upload parts by copying from source objects
182
- finished_parts: list[FinishedPiece] = []
192
+ final_fut = executor.submit(lambda: on_finished(EndOfStream()))
183
193
 
184
194
  for fut in futures:
185
195
  finished_part = fut.result()
186
196
  if isinstance(finished_part, Exception):
187
197
  executor.shutdown(wait=True, cancel_futures=True)
188
198
  return finished_part
189
- finished_parts.append(finished_part)
199
+ final_fut.result()
190
200
 
191
- on_finished(EndOfStream())
201
+ finished_parts = merge_state.finished
202
+ try:
203
+ assert len(finished_parts) == len(merge_state.all_parts)
204
+ except Exception:
205
+ return ValueError(
206
+ f"Finished parts mismatch: {len(finished_parts)} != {len(parts)}"
207
+ )
192
208
 
193
209
  try:
194
210
  # Complete the multipart upload
@@ -243,9 +259,10 @@ def _begin_upload(
243
259
 
244
260
 
245
261
  class WriteMergeStateThread(Thread):
246
- def __init__(self, rclone_impl: RcloneImpl, merge_state: MergeState):
262
+ def __init__(self, rclone_impl: RcloneImpl, merge_state: MergeState, verbose: bool):
247
263
  super().__init__(daemon=True)
248
264
  assert isinstance(merge_state, MergeState)
265
+ self.verbose = verbose
249
266
  self.merge_state = merge_state
250
267
  self.merge_path = merge_state.merge_path
251
268
  self.rclone_impl = rclone_impl
@@ -265,11 +282,15 @@ class WriteMergeStateThread(Thread):
265
282
  return item
266
283
  return item
267
284
 
285
+ def verbose_print(self, msg: str) -> None:
286
+ if self.verbose:
287
+ locked_print(msg)
288
+
268
289
  def run(self):
269
290
  while True:
270
291
  item = self._get_next()
271
292
  if isinstance(item, EndOfStream):
272
- warnings.warn("End of stream")
293
+ self.verbose_print("WriteMergeStateThread: End of stream")
273
294
  break
274
295
 
275
296
  assert isinstance(item, FinishedPiece)
@@ -315,13 +336,14 @@ def _get_merge_path(info_path: str) -> str:
315
336
  def _begin_or_resume_merge(
316
337
  rclone: RcloneImpl,
317
338
  info: InfoJson,
339
+ verbose: bool = False,
318
340
  max_workers: int = DEFAULT_MAX_WORKERS,
319
341
  ) -> "S3MultiPartMerger | Exception":
320
342
  try:
321
343
  merger: S3MultiPartMerger = S3MultiPartMerger(
322
344
  rclone_impl=rclone,
323
345
  info=info,
324
- verbose=True,
346
+ verbose=verbose,
325
347
  max_workers=max_workers,
326
348
  )
327
349
 
@@ -412,9 +434,11 @@ class S3MultiPartMerger:
412
434
 
413
435
  @staticmethod
414
436
  def create(
415
- rclone: RcloneImpl, info: InfoJson, max_workers: int
437
+ rclone: RcloneImpl, info: InfoJson, max_workers: int, verbose: bool
416
438
  ) -> "S3MultiPartMerger | Exception":
417
- return _begin_or_resume_merge(rclone=rclone, info=info, max_workers=max_workers)
439
+ return _begin_or_resume_merge(
440
+ rclone=rclone, info=info, max_workers=max_workers, verbose=verbose
441
+ )
418
442
 
419
443
  @property
420
444
  def bucket(self) -> str:
@@ -426,6 +450,7 @@ class S3MultiPartMerger:
426
450
  self.write_thread = WriteMergeStateThread(
427
451
  rclone_impl=self.rclone_impl,
428
452
  merge_state=self.state,
453
+ verbose=self.verbose,
429
454
  )
430
455
 
431
456
  def _begin_new_merge(
@@ -494,7 +519,10 @@ class S3MultiPartMerger:
494
519
 
495
520
 
496
521
  def s3_server_side_multi_part_merge(
497
- rclone: RcloneImpl, info_path: str, max_workers: int = DEFAULT_MAX_WORKERS
522
+ rclone: RcloneImpl,
523
+ info_path: str,
524
+ max_workers: int = DEFAULT_MAX_WORKERS,
525
+ verbose: bool = False,
498
526
  ) -> Exception | None:
499
527
  info = InfoJson(rclone, src=None, src_info=info_path)
500
528
  loaded = info.load()
@@ -503,7 +531,7 @@ def s3_server_side_multi_part_merge(
503
531
  f"Info file not found, has the upload finished? {info_path}"
504
532
  )
505
533
  merger: S3MultiPartMerger | Exception = S3MultiPartMerger.create(
506
- rclone=rclone, info=info, max_workers=max_workers
534
+ rclone=rclone, info=info, max_workers=max_workers, verbose=verbose
507
535
  )
508
536
  if isinstance(merger, Exception):
509
537
  return merger
@@ -514,5 +542,5 @@ def s3_server_side_multi_part_merge(
514
542
 
515
543
  err = merger.cleanup()
516
544
  if isinstance(err, Exception):
517
- err
545
+ return err
518
546
  return None
rclone_api/types.py CHANGED
@@ -215,7 +215,7 @@ class SizeSuffix:
215
215
  return self._size == SizeSuffix(other)._size
216
216
 
217
217
  def __ne__(self, other: object) -> bool:
218
- return not self.__ne__(other)
218
+ return not self.__eq__(other)
219
219
 
220
220
  def __lt__(self, other: "int | SizeSuffix") -> bool:
221
221
  # if not isinstance(other, SizeSuffix):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: rclone_api
3
- Version: 1.4.19
3
+ Version: 1.4.22
4
4
  Summary: rclone api in python
5
5
  Home-page: https://github.com/zackees/rclone-api
6
6
  License: BSD 3-Clause License
@@ -1,4 +1,4 @@
1
- rclone_api/__init__.py,sha256=gsYL3jaqMgHbJ6mnLxNttuaKltUBXNuvpz7C6fF8_8w,17678
1
+ rclone_api/__init__.py,sha256=wvLRTEYvwLfkBx_LuWI62cZQA-pEz0xo0oVh_9DCOuM,17876
2
2
  rclone_api/cli.py,sha256=dibfAZIh0kXWsBbfp3onKLjyZXo54mTzDjUdzJlDlWo,231
3
3
  rclone_api/completed_process.py,sha256=_IZ8IWK7DM1_tsbDEkH6wPZ-bbcrgf7A7smls854pmg,1775
4
4
  rclone_api/config.py,sha256=f6jEAxVorGFr31oHfcsu5AJTtOJj2wR5tTSsbGGZuIw,2558
@@ -14,44 +14,46 @@ rclone_api/file_part.py,sha256=i6ByS5_sae8Eba-4imBVTxd-xKC8ExWy7NR8QGr0ors,6155
14
14
  rclone_api/file_stream.py,sha256=_W3qnwCuigqA0hzXl2q5pAxSZDRaUSwet4BkT0lpnzs,1431
15
15
  rclone_api/filelist.py,sha256=xbiusvNgaB_b_kQOZoHMJJxn6TWGtPrWd2J042BI28o,767
16
16
  rclone_api/group_files.py,sha256=H92xPW9lQnbNw5KbtZCl00bD6iRh9yRbCuxku4j_3dg,8036
17
- rclone_api/http_server.py,sha256=3fPBV6l50erTe32DyeJBNmsDrn5KuujsbmEAbx13T-c,8720
17
+ rclone_api/http_server.py,sha256=LhovQu2AI-Z7zQIWflWelCiCDLnWzisL32Rs5350kxE,8850
18
18
  rclone_api/log.py,sha256=VZHM7pNSXip2ZLBKMP7M1u-rp_F7zoafFDuR8CPUoKI,1271
19
19
  rclone_api/mount.py,sha256=TE_VIBMW7J1UkF_6HRCt8oi_jGdMov4S51bm2OgxFAM,10045
20
20
  rclone_api/process.py,sha256=tGooS5NLdPuqHh7hCH8SfK44A6LGftPQCPQUNgSo0a0,5714
21
- rclone_api/rclone_impl.py,sha256=xTTriz6-zn_aSrkY8B7wzT-zRXax7Og7ns6xu6-7O6g,48769
21
+ rclone_api/rclone_impl.py,sha256=WBLkQpQq4lGPla1uJBzpp1yf4kS3ub7fxpbU6SdJyZY,49873
22
22
  rclone_api/remote.py,sha256=mTgMTQTwxUmbLjTpr-AGTId2ycXKI9mLX5L7PPpDIoc,520
23
23
  rclone_api/rpath.py,sha256=Y1JjQWcie39EgQrq-UtbfDz5yDLCwwfu27W7AQXllSE,2860
24
24
  rclone_api/scan_missing_folders.py,sha256=-8NCwpCaHeHrX-IepCoAEsX1rl8S-GOCxcIhTr_w3gA,4747
25
- rclone_api/types.py,sha256=HkpEZgZWhr5Gb04iHq5NxMRXxieWoN-PKmOfJFrg5Qg,12155
25
+ rclone_api/types.py,sha256=2ngxwpdNy88y0teeYJ5Vz5NiLK1rfaFx8Xf99i0J-Js,12155
26
26
  rclone_api/util.py,sha256=yY72YKpmpT_ZM7AleVtPpl0YZZYQPTwTdqKn9qPwm8Y,9290
27
27
  rclone_api/assets/example.txt,sha256=lTBovRjiz0_TgtAtbA1C5hNi2ffbqnNPqkKg6UiKCT8,54
28
28
  rclone_api/cmd/analyze.py,sha256=RHbvk1G5ZUc3qLqlm1AZEyQzd_W_ZjcbCNDvW4YpTKQ,1252
29
- rclone_api/cmd/copy_large_s3.py,sha256=B17GliDQyAauNglJCpsey0d3eArT2DAcT9g684TMQk8,3514
30
- rclone_api/cmd/copy_large_s3_finish.py,sha256=Q_8uvC2J3V_jGQj8ualMlejWEPpkFn267bUKLC3Sp2M,2050
29
+ rclone_api/cmd/copy_large_s3.py,sha256=yhPwbtGz9MmlronB-biiYUfNVclOsxfX9GIhe3ai3g4,3463
30
+ rclone_api/cmd/copy_large_s3_finish.py,sha256=k9-LC5kr20JMdHtELqFBrxClNLOEUsfys4Sx6v_n23c,2061
31
31
  rclone_api/cmd/list_files.py,sha256=x8FHODEilwKqwdiU1jdkeJbLwOqUkUQuDWPo2u_zpf0,741
32
32
  rclone_api/cmd/save_to_db.py,sha256=ylvnhg_yzexM-m6Zr7XDiswvoDVSl56ELuFAdb9gqBY,1957
33
33
  rclone_api/db/__init__.py,sha256=OSRUdnSWUlDTOHmjdjVmxYTUNpTbtaJ5Ll9sl-PfZg0,40
34
34
  rclone_api/db/db.py,sha256=YRnYrCaXHwytQt07uEZ_mMpvPHo9-0IWcOb95fVOOfs,10086
35
35
  rclone_api/db/models.py,sha256=v7qaXUehvsDvU51uk69JI23fSIs9JFGcOa-Tv1c_wVs,1600
36
- rclone_api/detail/copy_file_parts.py,sha256=1h-5JJmZdB0_TuVcuYMIClHqAgCXUI4eLyZHbdRiCHg,16280
36
+ rclone_api/detail/copy_file_parts_resumable.py,sha256=RoUWV2eBWEvuuTfsvrz5BhtvX3BmX-DVmQKdARhRF80,1248
37
37
  rclone_api/detail/walk.py,sha256=-54NVE8EJcCstwDoaC_UtHm73R2HrZwVwQmsnv55xNU,3369
38
38
  rclone_api/experimental/flags.py,sha256=qCVD--fSTmzlk9hloRLr0q9elzAOFzPsvVpKM3aB1Mk,2739
39
39
  rclone_api/experimental/flags_base.py,sha256=ajU_czkTcAxXYU-SlmiCfHY7aCQGHvpCLqJ-Z8uZLk0,2102
40
- rclone_api/s3/api.py,sha256=owoQ1H-R0hXcUozxC6sl53D7NmMOewHk2pUxK-ye8ms,4061
40
+ rclone_api/s3/api.py,sha256=6E4xEOxtpP6niiAFEpgB1-ckWJclNyRsJ3D11Qm4RwU,4069
41
41
  rclone_api/s3/basic_ops.py,sha256=hK3366xhVEzEcjz9Gk_8lFx6MRceAk72cax6mUrr6ko,2104
42
42
  rclone_api/s3/chunk_task.py,sha256=waEYe-iYQ1_BR3NCS4BrzVrK9UANvH1EcbXx2I6Z_NM,6839
43
43
  rclone_api/s3/create.py,sha256=_Q-faQ4Zl8XKTB28gireRxVXWP-YNxoAK4bligxDtiI,3998
44
- rclone_api/s3/merge_state.py,sha256=ziTB9CYV-OWaky5C1fOT9hifSY2zgUrk5HmX1Xeu2UA,4978
45
- rclone_api/s3/s3_multipart_uploader_by_copy.py,sha256=iuxnRaRDHvIMCu-9YDZzirRMc1R4gZgjigkM5u_MFJg,17188
46
44
  rclone_api/s3/types.py,sha256=cYI5MbXRNdT-ps5kGIRQaYrseHyx_ozT4AcwBABTKwk,1616
47
- rclone_api/s3/upload_file_multipart.py,sha256=V7syKjFyVIe4U9Ahl5XgqVTzt9akiew3MFjGmufLo2w,12503
48
45
  rclone_api/s3/multipart/file_info.py,sha256=8v_07_eADo0K-Nsv7F0Ac1wcv3lkIsrR3MaRCmkYLTQ,105
49
- rclone_api/s3/multipart/finished_piece.py,sha256=LtlX_mm6_hsADR8FxgfC2_pcO5Wou_20-jE34IcRXew,1633
46
+ rclone_api/s3/multipart/finished_piece.py,sha256=XiFf-TXczCnMwg9OEy8xVY9uR-ytX6QvuFwywiewdQQ,2290
47
+ rclone_api/s3/multipart/info_json.py,sha256=-e8UCwrqjAP64U8PmH-o2ciJ6TN48DwHktJfpFV_3wU,7450
48
+ rclone_api/s3/multipart/merge_state.py,sha256=ziTB9CYV-OWaky5C1fOT9hifSY2zgUrk5HmX1Xeu2UA,4978
50
49
  rclone_api/s3/multipart/upload_info.py,sha256=d6_OfzFR_vtDzCEegFfzCfWi2kUBUV4aXZzqAEVp1c4,1874
50
+ rclone_api/s3/multipart/upload_parts_inline.py,sha256=V7syKjFyVIe4U9Ahl5XgqVTzt9akiew3MFjGmufLo2w,12503
51
+ rclone_api/s3/multipart/upload_parts_resumable.py,sha256=diJoUpVYow6No_dNgOZIYVsv43k4evb6zixqpzWJaUk,9771
52
+ rclone_api/s3/multipart/upload_parts_server_side_merge.py,sha256=Fp2pdrs5dONQI9LkfNolgAGj1-Z2V1SsRd0r0sreuXI,18040
51
53
  rclone_api/s3/multipart/upload_state.py,sha256=f-Aq2NqtAaMUMhYitlICSNIxCKurWAl2gDEUVizLIqw,6019
52
- rclone_api-1.4.19.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
53
- rclone_api-1.4.19.dist-info/METADATA,sha256=RpqVhvMVSL_isjprjoRJJWMStau5WxyiiyO1OkHBydo,4628
54
- rclone_api-1.4.19.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
55
- rclone_api-1.4.19.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
56
- rclone_api-1.4.19.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
57
- rclone_api-1.4.19.dist-info/RECORD,,
54
+ rclone_api-1.4.22.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
55
+ rclone_api-1.4.22.dist-info/METADATA,sha256=eqenqWL8LT3t-zpC0Qy-OgRQSYyC1Shg8dC8xwDsnuo,4628
56
+ rclone_api-1.4.22.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
57
+ rclone_api-1.4.22.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
58
+ rclone_api-1.4.22.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
59
+ rclone_api-1.4.22.dist-info/RECORD,,