rclone-api 1.4.11__py2.py3-none-any.whl → 1.4.13__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
rclone_api/__init__.py CHANGED
@@ -507,6 +507,10 @@ class Rclone:
507
507
  verbose=verbose,
508
508
  )
509
509
 
510
+ def size_file(self, src: str) -> SizeSuffix | Exception:
511
+ """Get the size of a file."""
512
+ return self.impl.size_file(src=src)
513
+
510
514
 
511
515
  __all__ = [
512
516
  "Rclone",
@@ -1,4 +1,5 @@
1
1
  import argparse
2
+ import os
2
3
  from dataclasses import dataclass
3
4
  from pathlib import Path
4
5
 
@@ -9,31 +10,6 @@ from rclone_api.s3.s3_multipart_uploader_by_copy import (
9
10
  )
10
11
  from rclone_api.types import SizeSuffix
11
12
 
12
- DATA_SOURCE = (
13
- "dst:TorrentBooks/aa_misc_data/aa_misc_data/world_lending_library_2024_11.tar.zst"
14
- )
15
-
16
-
17
- # response = client.upload_part_copy(
18
- # Bucket='string',
19
- # CopySource='string' or {'Bucket': 'string', 'Key': 'string', 'VersionId': 'string'},
20
- # CopySourceIfMatch='string',
21
- # CopySourceIfModifiedSince=datetime(2015, 1, 1),
22
- # CopySourceIfNoneMatch='string',
23
- # CopySourceIfUnmodifiedSince=datetime(2015, 1, 1),
24
- # CopySourceRange='string',
25
- # Key='string',
26
- # PartNumber=123,
27
- # UploadId='string',
28
- # SSECustomerAlgorithm='string',
29
- # SSECustomerKey='string',
30
- # CopySourceSSECustomerAlgorithm='string',
31
- # CopySourceSSECustomerKey='string',
32
- # RequestPayer='requester',
33
- # ExpectedBucketOwner='string',
34
- # ExpectedSourceBucketOwner='string'
35
- # )
36
-
37
13
 
38
14
  @dataclass
39
15
  class Args:
@@ -58,13 +34,6 @@ def _parse_args() -> Args:
58
34
  parser.add_argument(
59
35
  "--config", help="Path to rclone config file", type=Path, required=False
60
36
  )
61
- parser.add_argument(
62
- "--chunk-size",
63
- help="Chunk size that will be read and uploaded in SizeSuffix form, too low or too high will cause issues",
64
- type=str,
65
- default="128MB", # if this is too low or too high an s3 service
66
- )
67
-
68
37
  args = parser.parse_args()
69
38
  config: Path | None = args.config
70
39
  if config is None:
@@ -81,33 +50,18 @@ def _parse_args() -> Args:
81
50
  return out
82
51
 
83
52
 
84
- # from dataclasses import dataclass
85
-
86
- # def parse_info_json(text: str) -> UploadInfo:
87
- # import json
88
- # data = json.loads(text)
89
- # chunk_size = data["chunksize_int"]
90
- # first_part = data["first_part"]
91
- # last_part = data["last_part"]
92
- # assert isinstance(chunk_size, int)
93
- # assert isinstance(first_part, int)
94
- # assert isinstance(last_part, int)
95
- # assert first_part <= last_part
96
- # parts: list[str] = []
97
- # fmt = "part.{:05d}_{}-{}"
98
- # for i in range(first_part, last_part + 1):
99
- # offset: int = i * chunk_size
100
- # end: int = (i + 1) * chunk_size
101
- # part = fmt.format(i, offset, end)
102
- # parts.append(part)
103
- # return UploadInfo(chunk_size=chunk_size, parts=parts)
104
-
105
-
106
53
  def do_finish_part(rclone: Rclone, info: InfoJson, dst: str) -> None:
107
- from rclone_api.s3.create import BaseClient, S3Credentials, create_s3_client
54
+ from rclone_api.s3.create import (
55
+ BaseClient,
56
+ S3Config,
57
+ S3Credentials,
58
+ create_s3_client,
59
+ )
108
60
 
109
61
  s3_creds: S3Credentials = rclone.impl.get_s3_credentials(remote=dst)
110
- s3_client: BaseClient = create_s3_client(s3_creds)
62
+ s3_client: BaseClient = create_s3_client(
63
+ s3_creds, S3Config(verbose=False, timeout_read=5 * 60)
64
+ )
111
65
  s3_bucket = s3_creds.bucket_name
112
66
  is_done = info.fetch_is_done()
113
67
  assert is_done, f"Upload is not done: {info}"
@@ -138,7 +92,6 @@ def do_finish_part(rclone: Rclone, info: InfoJson, dst: str) -> None:
138
92
  out = f"{parts_path}"
139
93
  return out
140
94
 
141
- # s3_keys: list[str] = [_to_s3_key(name=p) for p in source_keys]
142
95
  parts: list[tuple[int, str]] = []
143
96
  part_num = 1
144
97
  for part_key in source_keys:
@@ -146,14 +99,9 @@ def do_finish_part(rclone: Rclone, info: InfoJson, dst: str) -> None:
146
99
  parts.append((part_num, s3_key))
147
100
  part_num += 1
148
101
 
149
- # for key in parts:
150
- # print(key)
151
-
152
102
  chunksize = info.chunksize
153
103
  assert chunksize is not None
154
104
 
155
- import os
156
-
157
105
  dst_name = info.dst_name
158
106
  dst_dir = os.path.dirname(parts_path)
159
107
  # dst_key =
@@ -167,10 +115,24 @@ def do_finish_part(rclone: Rclone, info: InfoJson, dst: str) -> None:
167
115
  destination_key=dst_key,
168
116
  chunk_size=chunksize.as_int(),
169
117
  final_size=size.as_int(),
170
- max_workers=100,
118
+ max_workers=50,
171
119
  retries=3,
172
120
  )
173
121
 
122
+ # now check if the dst now exists, if so, delete the parts folder.
123
+ # if rclone.exists(dst):
124
+ # rclone.purge(parts_dir)
125
+
126
+ if not rclone.exists(dst):
127
+ raise FileNotFoundError(f"Destination file not found: {dst}")
128
+
129
+ write_size = rclone.size_file(dst)
130
+ if write_size != size:
131
+ raise ValueError(f"Size mismatch: {write_size} != {size}")
132
+
133
+ print(f"Upload complete: {dst}")
134
+ rclone.purge(parts_dir)
135
+
174
136
 
175
137
  def main() -> int:
176
138
  """Main entry point."""
@@ -179,7 +141,10 @@ def main() -> int:
179
141
  info_json = f"{args.src}/info.json".replace("//", "/")
180
142
  info = InfoJson(rclone.impl, src=None, src_info=info_json)
181
143
  loaded = info.load()
182
- assert loaded
144
+ if not loaded:
145
+ raise FileNotFoundError(
146
+ f"Info file not found, has the upload finished? {info_json}"
147
+ )
183
148
  print(info)
184
149
  do_finish_part(rclone=rclone, info=info, dst=args.dst)
185
150
  return 0
rclone_api/s3/api.py CHANGED
@@ -9,7 +9,7 @@ from rclone_api.s3.basic_ops import (
9
9
  list_bucket_contents,
10
10
  upload_file,
11
11
  )
12
- from rclone_api.s3.create import create_s3_client
12
+ from rclone_api.s3.create import S3Config, create_s3_client
13
13
  from rclone_api.s3.types import S3Credentials, S3MutliPartUploadConfig, S3UploadTarget
14
14
  from rclone_api.s3.upload_file_multipart import (
15
15
  MultiUploadResult,
@@ -23,7 +23,9 @@ class S3Client:
23
23
  def __init__(self, credentials: S3Credentials, verbose: bool = False) -> None:
24
24
  self.verbose = verbose
25
25
  self.credentials: S3Credentials = credentials
26
- self.client: BaseClient = create_s3_client(credentials, verbose=verbose)
26
+ self.client: BaseClient = create_s3_client(
27
+ credentials, config=S3Config(verbose=verbose)
28
+ )
27
29
 
28
30
  def list_bucket_contents(self, bucket_name: str) -> None:
29
31
  list_bucket_contents(self.client, bucket_name)
rclone_api/s3/create.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import warnings
2
+ from dataclasses import dataclass
2
3
 
3
4
  import boto3
4
5
  from botocore.client import BaseClient
@@ -7,19 +8,41 @@ from botocore.config import Config
7
8
  from rclone_api.s3.types import S3Credentials, S3Provider
8
9
 
9
10
  _DEFAULT_BACKBLAZE_ENDPOINT = "https://s3.us-west-002.backblazeb2.com"
10
- _MAX_CONNECTIONS = 500
11
+ _MAX_CONNECTIONS = 50
12
+ _TIMEOUT_READ = 120
13
+ _TIMEOUT_CONNECT = 60
14
+
15
+
16
+ @dataclass
17
+ class S3Config:
18
+ max_pool_connections: int | None = None
19
+ max_connections: int | None = None
20
+ timeout_connection: int | None = None
21
+ timeout_read: int | None = None
22
+ verbose: bool | None = None
23
+
24
+ def resolve_defaults(self) -> None:
25
+ if self.max_pool_connections is None:
26
+ self.max_pool_connections = _MAX_CONNECTIONS
27
+ if self.timeout_connection is None:
28
+ self.timeout_connection = _TIMEOUT_CONNECT
29
+ if self.timeout_read is None:
30
+ self.timeout_read = _TIMEOUT_READ
31
+ if self.verbose is None:
32
+ self.verbose = False
11
33
 
12
34
 
13
35
  # Create a Boto3 session and S3 client, this is back blaze specific.
14
36
  # Add a function if you want to use a different S3 provider.
15
37
  # If AWS support is added in a fork then please merge it back here.
16
- def _create_backblaze_s3_client(creds: S3Credentials, verbose: bool) -> BaseClient:
38
+ def _create_backblaze_s3_client(creds: S3Credentials, config: S3Config) -> BaseClient:
17
39
  """Create and return an S3 client."""
18
40
  region_name = creds.region_name
19
41
  access_key = creds.access_key_id
20
42
  secret_key = creds.secret_access_key
21
43
  endpoint_url = creds.endpoint_url
22
44
  endpoint_url = endpoint_url or _DEFAULT_BACKBLAZE_ENDPOINT
45
+ config.resolve_defaults()
23
46
  session = boto3.session.Session() # type: ignore
24
47
  return session.client(
25
48
  service_name="s3",
@@ -30,7 +53,9 @@ def _create_backblaze_s3_client(creds: S3Credentials, verbose: bool) -> BaseClie
30
53
  config=Config(
31
54
  signature_version="s3v4",
32
55
  region_name=region_name,
33
- max_pool_connections=_MAX_CONNECTIONS,
56
+ max_pool_connections=config.max_connections,
57
+ read_timeout=config.timeout_read,
58
+ connect_timeout=config.timeout_connection,
34
59
  # Note that BackBlase has a boko3 bug where it doesn't support the new
35
60
  # checksum header, the following line was an attempt of fix it on the newest
36
61
  # version of boto3, but it didn't work.
@@ -39,18 +64,18 @@ def _create_backblaze_s3_client(creds: S3Credentials, verbose: bool) -> BaseClie
39
64
  )
40
65
 
41
66
 
42
- def _create_unknown_s3_client(creds: S3Credentials, verbose: bool) -> BaseClient:
67
+ def _create_unknown_s3_client(creds: S3Credentials, config: S3Config) -> BaseClient:
43
68
  """Create and return an S3 client."""
44
69
  access_key = creds.access_key_id
45
70
  secret_key = creds.secret_access_key
46
71
  endpoint_url = creds.endpoint_url
47
72
  if (endpoint_url is not None) and not (endpoint_url.startswith("http")):
48
- if verbose:
73
+ if config.verbose:
49
74
  warnings.warn(
50
75
  f"Endpoint URL is schema naive: {endpoint_url}, assuming HTTPS"
51
76
  )
52
77
  endpoint_url = f"https://{endpoint_url}"
53
-
78
+ config.resolve_defaults()
54
79
  session = boto3.session.Session() # type: ignore
55
80
  return session.client(
56
81
  service_name="s3",
@@ -60,19 +85,24 @@ def _create_unknown_s3_client(creds: S3Credentials, verbose: bool) -> BaseClient
60
85
  config=Config(
61
86
  signature_version="s3v4",
62
87
  region_name=creds.region_name,
63
- max_pool_connections=_MAX_CONNECTIONS,
88
+ max_pool_connections=config.max_connections,
89
+ read_timeout=config.timeout_read,
90
+ connect_timeout=config.timeout_connection,
64
91
  ),
65
92
  )
66
93
 
67
94
 
68
- def create_s3_client(credentials: S3Credentials, verbose=False) -> BaseClient:
95
+ def create_s3_client(
96
+ credentials: S3Credentials, config: S3Config | None = None
97
+ ) -> BaseClient:
69
98
  """Create and return an S3 client."""
99
+ config = config or S3Config()
70
100
  provider = credentials.provider
71
101
  if provider == S3Provider.BACKBLAZE:
72
- if verbose:
102
+ if config.verbose:
73
103
  print("Creating BackBlaze S3 client")
74
- return _create_backblaze_s3_client(creds=credentials, verbose=verbose)
104
+ return _create_backblaze_s3_client(creds=credentials, config=config)
75
105
  else:
76
- if verbose:
106
+ if config.verbose:
77
107
  print("Creating generic/unknown S3 client")
78
- return _create_unknown_s3_client(creds=credentials, verbose=verbose)
108
+ return _create_unknown_s3_client(creds=credentials, config=config)
@@ -32,152 +32,6 @@ class MultipartUploadInfo:
32
32
  src_file_path: Optional[Path] = None
33
33
 
34
34
 
35
- # response = client.upload_part_copy(
36
- # Bucket='string',
37
- # CopySource='string' or {'Bucket': 'string', 'Key': 'string', 'VersionId': 'string'},
38
- # CopySourceIfMatch='string',
39
- # CopySourceIfModifiedSince=datetime(2015, 1, 1),
40
- # CopySourceIfNoneMatch='string',
41
- # CopySourceIfUnmodifiedSince=datetime(2015, 1, 1),
42
- # CopySourceRange='string',
43
- # Key='string',
44
- # PartNumber=123,
45
- # UploadId='string',
46
- # SSECustomerAlgorithm='string',
47
- # SSECustomerKey='string',
48
- # CopySourceSSECustomerAlgorithm='string',
49
- # CopySourceSSECustomerKey='string',
50
- # RequestPayer='requester',
51
- # ExpectedBucketOwner='string',
52
- # ExpectedSourceBucketOwner='string'
53
- # )
54
-
55
- # import _thread
56
- # import os
57
- # import traceback
58
- # import warnings
59
- # from concurrent.futures import Future, ThreadPoolExecutor
60
- # from pathlib import Path
61
- # from queue import Queue
62
- # from threading import Event, Thread
63
- # from typing import Any, Callable
64
-
65
- # from botocore.client import BaseClient
66
-
67
- # from rclone_api.mount_read_chunker import FilePart
68
- # from rclone_api.s3.chunk_task import S3FileInfo, file_chunker
69
- # from rclone_api.s3.chunk_types import (
70
- # FinishedPiece,
71
- # UploadInfo,
72
- # UploadState,
73
- # )
74
- # from rclone_api.s3.types import MultiUploadResult
75
- # from rclone_api.types import EndOfStream
76
- # from rclone_api.util import locked_print
77
-
78
-
79
- # This is how you upload large parts through multi part upload, then the final call
80
- # is to assemble the parts that have already been uploaded through a multi part uploader
81
- # and then call complete_multipart_upload to finish the upload
82
- # response = (
83
- # client.upload_part_copy(
84
- # Bucket='string',
85
- # CopySource='string' or {'Bucket': 'string', 'Key': 'string', 'VersionId': 'string'},
86
- # CopySourceIfMatch='string',
87
- # CopySourceIfModifiedSince=datetime(2015, 1, 1),
88
- # CopySourceIfNoneMatch='string',
89
- # CopySourceIfUnmodifiedSince=datetime(2015, 1, 1),
90
- # CopySourceRange='string',
91
- # Key='string',
92
- # PartNumber=123,
93
- # UploadId='string',
94
- # SSECustomerAlgorithm='string',
95
- # SSECustomerKey='string',
96
- # CopySourceSSECustomerAlgorithm='string',
97
- # CopySourceSSECustomerKey='string',
98
- # RequestPayer='requester',
99
- # ExpectedBucketOwner='string',
100
- # ExpectedSourceBucketOwner='string'
101
- # )
102
-
103
-
104
- # def upload_task(
105
- # info: UploadInfo,
106
- # chunk: FilePart,
107
- # part_number: int,
108
- # retries: int,
109
- # ) -> FinishedPiece:
110
- # file_or_err: Path | Exception = chunk.get_file()
111
- # if isinstance(file_or_err, Exception):
112
- # raise file_or_err
113
- # file: Path = file_or_err
114
- # size = os.path.getsize(file)
115
- # retries = retries + 1 # Add one for the initial attempt
116
- # for retry in range(retries):
117
- # try:
118
- # if retry > 0:
119
- # locked_print(f"Retrying part {part_number} for {info.src_file_path}")
120
- # locked_print(
121
- # f"Uploading part {part_number} for {info.src_file_path} of size {size}"
122
- # )
123
-
124
- # with open(file, "rb") as f:
125
- # part = info.s3_client.upload_part(
126
- # Bucket=info.bucket_name,
127
- # Key=info.object_name,
128
- # PartNumber=part_number,
129
- # UploadId=info.upload_id,
130
- # Body=f,
131
- # )
132
- # out: FinishedPiece = FinishedPiece(
133
- # etag=part["ETag"], part_number=part_number
134
- # )
135
- # chunk.dispose()
136
- # return out
137
- # except Exception as e:
138
- # if retry == retries - 1:
139
- # locked_print(f"Error uploading part {part_number}: {e}")
140
- # chunk.dispose()
141
- # raise e
142
- # else:
143
- # locked_print(f"Error uploading part {part_number}: {e}, retrying")
144
- # continue
145
- # raise Exception("Should not reach here")
146
-
147
-
148
- # def prepare_upload_file_multipart(
149
- # s3_client: BaseClient,
150
- # bucket_name: str,
151
- # file_path: Path,
152
- # file_size: int | None,
153
- # object_name: str,
154
- # chunk_size: int,
155
- # retries: int,
156
- # ) -> UploadInfo:
157
- # """Upload a file to the bucket using multipart upload with customizable chunk size."""
158
-
159
- # # Initiate multipart upload
160
- # locked_print(
161
- # f"Creating multipart upload for {file_path} to {bucket_name}/{object_name}"
162
- # )
163
- # mpu = s3_client.create_multipart_upload(Bucket=bucket_name, Key=object_name)
164
- # upload_id = mpu["UploadId"]
165
-
166
- # file_size = file_size if file_size is not None else os.path.getsize(file_path)
167
-
168
- # upload_info: UploadInfo = UploadInfo(
169
- # s3_client=s3_client,
170
- # bucket_name=bucket_name,
171
- # object_name=object_name,
172
- # src_file_path=file_path,
173
- # upload_id=upload_id,
174
- # retries=retries,
175
- # chunk_size=chunk_size,
176
- # file_size=file_size,
177
- # )
178
- # return upload_info
179
-
180
-
181
35
  def upload_part_copy_task(
182
36
  info: MultipartUploadInfo,
183
37
  source_bucket: str,
@@ -228,11 +82,7 @@ def upload_part_copy_task(
228
82
 
229
83
  # Extract ETag from the response
230
84
  etag = part["CopyPartResult"]["ETag"]
231
-
232
85
  return FinishedPiece(etag=etag, part_number=part_number)
233
- # except NoSuchKey as e:
234
- # locked_print(f"Error copying part {part_number}: {e}")
235
- # return e
236
86
 
237
87
  except Exception as e:
238
88
  msg = f"Error copying {copy_source} -> {info.object_name}: {e}, params={params}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: rclone_api
3
- Version: 1.4.11
3
+ Version: 1.4.13
4
4
  Summary: rclone api in python
5
5
  Home-page: https://github.com/zackees/rclone-api
6
6
  License: BSD 3-Clause License
@@ -1,4 +1,4 @@
1
- rclone_api/__init__.py,sha256=cJpn62AmlcCXdbRIMIls4yEek6SLWJ-ONlr2mDnkIFk,17530
1
+ rclone_api/__init__.py,sha256=gsYL3jaqMgHbJ6mnLxNttuaKltUBXNuvpz7C6fF8_8w,17678
2
2
  rclone_api/cli.py,sha256=dibfAZIh0kXWsBbfp3onKLjyZXo54mTzDjUdzJlDlWo,231
3
3
  rclone_api/completed_process.py,sha256=_IZ8IWK7DM1_tsbDEkH6wPZ-bbcrgf7A7smls854pmg,1775
4
4
  rclone_api/config.py,sha256=f6jEAxVorGFr31oHfcsu5AJTtOJj2wR5tTSsbGGZuIw,2558
@@ -27,7 +27,7 @@ rclone_api/util.py,sha256=9w_m6W62l_X42Jw5q8p_p30h-QoxAqufvnCLI4PTMOE,7056
27
27
  rclone_api/assets/example.txt,sha256=lTBovRjiz0_TgtAtbA1C5hNi2ffbqnNPqkKg6UiKCT8,54
28
28
  rclone_api/cmd/analyze.py,sha256=RHbvk1G5ZUc3qLqlm1AZEyQzd_W_ZjcbCNDvW4YpTKQ,1252
29
29
  rclone_api/cmd/copy_large_s3.py,sha256=B17GliDQyAauNglJCpsey0d3eArT2DAcT9g684TMQk8,3514
30
- rclone_api/cmd/copy_large_s3_finish.py,sha256=2L2kw206rJ-p3zQZkqiixt1tz5vXaw9SOfji6dFZFOs,6138
30
+ rclone_api/cmd/copy_large_s3_finish.py,sha256=7bDaUZ0forOqR4JOT1eoMBF_2qtifM9GcLK_he53cw4,4877
31
31
  rclone_api/cmd/list_files.py,sha256=x8FHODEilwKqwdiU1jdkeJbLwOqUkUQuDWPo2u_zpf0,741
32
32
  rclone_api/cmd/save_to_db.py,sha256=ylvnhg_yzexM-m6Zr7XDiswvoDVSl56ELuFAdb9gqBY,1957
33
33
  rclone_api/db/__init__.py,sha256=OSRUdnSWUlDTOHmjdjVmxYTUNpTbtaJ5Ll9sl-PfZg0,40
@@ -37,20 +37,20 @@ rclone_api/detail/copy_file_parts.py,sha256=dpqZ0d7l195dZg6Vob2Ty43Uah1v0ozQu5kM
37
37
  rclone_api/detail/walk.py,sha256=-54NVE8EJcCstwDoaC_UtHm73R2HrZwVwQmsnv55xNU,3369
38
38
  rclone_api/experimental/flags.py,sha256=qCVD--fSTmzlk9hloRLr0q9elzAOFzPsvVpKM3aB1Mk,2739
39
39
  rclone_api/experimental/flags_base.py,sha256=ajU_czkTcAxXYU-SlmiCfHY7aCQGHvpCLqJ-Z8uZLk0,2102
40
- rclone_api/s3/api.py,sha256=PafsIEyWDpLWAXsZAjFm9CY14vJpsDr9lOsn0kGRLZ0,4009
40
+ rclone_api/s3/api.py,sha256=nIjOskTgTlbksbBHgF27ExFEYheCT_OvcdYl6FqRAto,4058
41
41
  rclone_api/s3/basic_ops.py,sha256=hK3366xhVEzEcjz9Gk_8lFx6MRceAk72cax6mUrr6ko,2104
42
42
  rclone_api/s3/chunk_task.py,sha256=waEYe-iYQ1_BR3NCS4BrzVrK9UANvH1EcbXx2I6Z_NM,6839
43
- rclone_api/s3/create.py,sha256=eoC-AFN-LNS0L6HoOnVYJtW7a5OaJWSkP9Vx7AvOX4U,2972
44
- rclone_api/s3/s3_multipart_uploader_by_copy.py,sha256=rivcMn9TgHCZIhq7w2lCtas6O-QM08-uCEX2ry8ZOhs,13893
43
+ rclone_api/s3/create.py,sha256=NsJtyOSTR_4kvwdojrzrg8LIjMVRuOsTGt4KkUrV0OM,4015
44
+ rclone_api/s3/s3_multipart_uploader_by_copy.py,sha256=KNEWG3CrM8lVWd2b0dA6WT5LO3gEEXNk2tA6Uk9Vi7I,8701
45
45
  rclone_api/s3/types.py,sha256=cYI5MbXRNdT-ps5kGIRQaYrseHyx_ozT4AcwBABTKwk,1616
46
46
  rclone_api/s3/upload_file_multipart.py,sha256=V7syKjFyVIe4U9Ahl5XgqVTzt9akiew3MFjGmufLo2w,12503
47
47
  rclone_api/s3/multipart/file_info.py,sha256=8v_07_eADo0K-Nsv7F0Ac1wcv3lkIsrR3MaRCmkYLTQ,105
48
48
  rclone_api/s3/multipart/finished_piece.py,sha256=9nMWnVZ8S99wi2VFQsm1h1ZHqmebkhMGgd2s56wNj9w,1331
49
49
  rclone_api/s3/multipart/upload_info.py,sha256=d6_OfzFR_vtDzCEegFfzCfWi2kUBUV4aXZzqAEVp1c4,1874
50
50
  rclone_api/s3/multipart/upload_state.py,sha256=f-Aq2NqtAaMUMhYitlICSNIxCKurWAl2gDEUVizLIqw,6019
51
- rclone_api-1.4.11.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
52
- rclone_api-1.4.11.dist-info/METADATA,sha256=ijUgYWrOnz6yxDexwljro-qm8ZZK3mN2HyreI6a5TxQ,4628
53
- rclone_api-1.4.11.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
54
- rclone_api-1.4.11.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
55
- rclone_api-1.4.11.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
56
- rclone_api-1.4.11.dist-info/RECORD,,
51
+ rclone_api-1.4.13.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
52
+ rclone_api-1.4.13.dist-info/METADATA,sha256=UFK0tBZDL5_d8X-NpHskBsPk7YxWHo5ZqMI0Quqwsic,4628
53
+ rclone_api-1.4.13.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
54
+ rclone_api-1.4.13.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
55
+ rclone_api-1.4.13.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
56
+ rclone_api-1.4.13.dist-info/RECORD,,