rclone-api 1.4.11__py2.py3-none-any.whl → 1.4.12__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rclone_api/cmd/copy_large_s3_finish.py +2 -61
- rclone_api/s3/create.py +1 -1
- rclone_api/s3/s3_multipart_uploader_by_copy.py +0 -150
- {rclone_api-1.4.11.dist-info → rclone_api-1.4.12.dist-info}/METADATA +1 -1
- {rclone_api-1.4.11.dist-info → rclone_api-1.4.12.dist-info}/RECORD +9 -9
- {rclone_api-1.4.11.dist-info → rclone_api-1.4.12.dist-info}/LICENSE +0 -0
- {rclone_api-1.4.11.dist-info → rclone_api-1.4.12.dist-info}/WHEEL +0 -0
- {rclone_api-1.4.11.dist-info → rclone_api-1.4.12.dist-info}/entry_points.txt +0 -0
- {rclone_api-1.4.11.dist-info → rclone_api-1.4.12.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
|
|
1
1
|
import argparse
|
2
|
+
import os
|
2
3
|
from dataclasses import dataclass
|
3
4
|
from pathlib import Path
|
4
5
|
|
@@ -9,31 +10,6 @@ from rclone_api.s3.s3_multipart_uploader_by_copy import (
|
|
9
10
|
)
|
10
11
|
from rclone_api.types import SizeSuffix
|
11
12
|
|
12
|
-
DATA_SOURCE = (
|
13
|
-
"dst:TorrentBooks/aa_misc_data/aa_misc_data/world_lending_library_2024_11.tar.zst"
|
14
|
-
)
|
15
|
-
|
16
|
-
|
17
|
-
# response = client.upload_part_copy(
|
18
|
-
# Bucket='string',
|
19
|
-
# CopySource='string' or {'Bucket': 'string', 'Key': 'string', 'VersionId': 'string'},
|
20
|
-
# CopySourceIfMatch='string',
|
21
|
-
# CopySourceIfModifiedSince=datetime(2015, 1, 1),
|
22
|
-
# CopySourceIfNoneMatch='string',
|
23
|
-
# CopySourceIfUnmodifiedSince=datetime(2015, 1, 1),
|
24
|
-
# CopySourceRange='string',
|
25
|
-
# Key='string',
|
26
|
-
# PartNumber=123,
|
27
|
-
# UploadId='string',
|
28
|
-
# SSECustomerAlgorithm='string',
|
29
|
-
# SSECustomerKey='string',
|
30
|
-
# CopySourceSSECustomerAlgorithm='string',
|
31
|
-
# CopySourceSSECustomerKey='string',
|
32
|
-
# RequestPayer='requester',
|
33
|
-
# ExpectedBucketOwner='string',
|
34
|
-
# ExpectedSourceBucketOwner='string'
|
35
|
-
# )
|
36
|
-
|
37
13
|
|
38
14
|
@dataclass
|
39
15
|
class Args:
|
@@ -58,13 +34,6 @@ def _parse_args() -> Args:
|
|
58
34
|
parser.add_argument(
|
59
35
|
"--config", help="Path to rclone config file", type=Path, required=False
|
60
36
|
)
|
61
|
-
parser.add_argument(
|
62
|
-
"--chunk-size",
|
63
|
-
help="Chunk size that will be read and uploaded in SizeSuffix form, too low or too high will cause issues",
|
64
|
-
type=str,
|
65
|
-
default="128MB", # if this is too low or too high an s3 service
|
66
|
-
)
|
67
|
-
|
68
37
|
args = parser.parse_args()
|
69
38
|
config: Path | None = args.config
|
70
39
|
if config is None:
|
@@ -81,28 +50,6 @@ def _parse_args() -> Args:
|
|
81
50
|
return out
|
82
51
|
|
83
52
|
|
84
|
-
# from dataclasses import dataclass
|
85
|
-
|
86
|
-
# def parse_info_json(text: str) -> UploadInfo:
|
87
|
-
# import json
|
88
|
-
# data = json.loads(text)
|
89
|
-
# chunk_size = data["chunksize_int"]
|
90
|
-
# first_part = data["first_part"]
|
91
|
-
# last_part = data["last_part"]
|
92
|
-
# assert isinstance(chunk_size, int)
|
93
|
-
# assert isinstance(first_part, int)
|
94
|
-
# assert isinstance(last_part, int)
|
95
|
-
# assert first_part <= last_part
|
96
|
-
# parts: list[str] = []
|
97
|
-
# fmt = "part.{:05d}_{}-{}"
|
98
|
-
# for i in range(first_part, last_part + 1):
|
99
|
-
# offset: int = i * chunk_size
|
100
|
-
# end: int = (i + 1) * chunk_size
|
101
|
-
# part = fmt.format(i, offset, end)
|
102
|
-
# parts.append(part)
|
103
|
-
# return UploadInfo(chunk_size=chunk_size, parts=parts)
|
104
|
-
|
105
|
-
|
106
53
|
def do_finish_part(rclone: Rclone, info: InfoJson, dst: str) -> None:
|
107
54
|
from rclone_api.s3.create import BaseClient, S3Credentials, create_s3_client
|
108
55
|
|
@@ -138,7 +85,6 @@ def do_finish_part(rclone: Rclone, info: InfoJson, dst: str) -> None:
|
|
138
85
|
out = f"{parts_path}"
|
139
86
|
return out
|
140
87
|
|
141
|
-
# s3_keys: list[str] = [_to_s3_key(name=p) for p in source_keys]
|
142
88
|
parts: list[tuple[int, str]] = []
|
143
89
|
part_num = 1
|
144
90
|
for part_key in source_keys:
|
@@ -146,14 +92,9 @@ def do_finish_part(rclone: Rclone, info: InfoJson, dst: str) -> None:
|
|
146
92
|
parts.append((part_num, s3_key))
|
147
93
|
part_num += 1
|
148
94
|
|
149
|
-
# for key in parts:
|
150
|
-
# print(key)
|
151
|
-
|
152
95
|
chunksize = info.chunksize
|
153
96
|
assert chunksize is not None
|
154
97
|
|
155
|
-
import os
|
156
|
-
|
157
98
|
dst_name = info.dst_name
|
158
99
|
dst_dir = os.path.dirname(parts_path)
|
159
100
|
# dst_key =
|
@@ -167,7 +108,7 @@ def do_finish_part(rclone: Rclone, info: InfoJson, dst: str) -> None:
|
|
167
108
|
destination_key=dst_key,
|
168
109
|
chunk_size=chunksize.as_int(),
|
169
110
|
final_size=size.as_int(),
|
170
|
-
max_workers=
|
111
|
+
max_workers=50,
|
171
112
|
retries=3,
|
172
113
|
)
|
173
114
|
|
rclone_api/s3/create.py
CHANGED
@@ -7,7 +7,7 @@ from botocore.config import Config
|
|
7
7
|
from rclone_api.s3.types import S3Credentials, S3Provider
|
8
8
|
|
9
9
|
_DEFAULT_BACKBLAZE_ENDPOINT = "https://s3.us-west-002.backblazeb2.com"
|
10
|
-
_MAX_CONNECTIONS =
|
10
|
+
_MAX_CONNECTIONS = 50
|
11
11
|
|
12
12
|
|
13
13
|
# Create a Boto3 session and S3 client, this is back blaze specific.
|
@@ -32,152 +32,6 @@ class MultipartUploadInfo:
|
|
32
32
|
src_file_path: Optional[Path] = None
|
33
33
|
|
34
34
|
|
35
|
-
# response = client.upload_part_copy(
|
36
|
-
# Bucket='string',
|
37
|
-
# CopySource='string' or {'Bucket': 'string', 'Key': 'string', 'VersionId': 'string'},
|
38
|
-
# CopySourceIfMatch='string',
|
39
|
-
# CopySourceIfModifiedSince=datetime(2015, 1, 1),
|
40
|
-
# CopySourceIfNoneMatch='string',
|
41
|
-
# CopySourceIfUnmodifiedSince=datetime(2015, 1, 1),
|
42
|
-
# CopySourceRange='string',
|
43
|
-
# Key='string',
|
44
|
-
# PartNumber=123,
|
45
|
-
# UploadId='string',
|
46
|
-
# SSECustomerAlgorithm='string',
|
47
|
-
# SSECustomerKey='string',
|
48
|
-
# CopySourceSSECustomerAlgorithm='string',
|
49
|
-
# CopySourceSSECustomerKey='string',
|
50
|
-
# RequestPayer='requester',
|
51
|
-
# ExpectedBucketOwner='string',
|
52
|
-
# ExpectedSourceBucketOwner='string'
|
53
|
-
# )
|
54
|
-
|
55
|
-
# import _thread
|
56
|
-
# import os
|
57
|
-
# import traceback
|
58
|
-
# import warnings
|
59
|
-
# from concurrent.futures import Future, ThreadPoolExecutor
|
60
|
-
# from pathlib import Path
|
61
|
-
# from queue import Queue
|
62
|
-
# from threading import Event, Thread
|
63
|
-
# from typing import Any, Callable
|
64
|
-
|
65
|
-
# from botocore.client import BaseClient
|
66
|
-
|
67
|
-
# from rclone_api.mount_read_chunker import FilePart
|
68
|
-
# from rclone_api.s3.chunk_task import S3FileInfo, file_chunker
|
69
|
-
# from rclone_api.s3.chunk_types import (
|
70
|
-
# FinishedPiece,
|
71
|
-
# UploadInfo,
|
72
|
-
# UploadState,
|
73
|
-
# )
|
74
|
-
# from rclone_api.s3.types import MultiUploadResult
|
75
|
-
# from rclone_api.types import EndOfStream
|
76
|
-
# from rclone_api.util import locked_print
|
77
|
-
|
78
|
-
|
79
|
-
# This is how you upload large parts through multi part upload, then the final call
|
80
|
-
# is to assemble the parts that have already been uploaded through a multi part uploader
|
81
|
-
# and then call complete_multipart_upload to finish the upload
|
82
|
-
# response = (
|
83
|
-
# client.upload_part_copy(
|
84
|
-
# Bucket='string',
|
85
|
-
# CopySource='string' or {'Bucket': 'string', 'Key': 'string', 'VersionId': 'string'},
|
86
|
-
# CopySourceIfMatch='string',
|
87
|
-
# CopySourceIfModifiedSince=datetime(2015, 1, 1),
|
88
|
-
# CopySourceIfNoneMatch='string',
|
89
|
-
# CopySourceIfUnmodifiedSince=datetime(2015, 1, 1),
|
90
|
-
# CopySourceRange='string',
|
91
|
-
# Key='string',
|
92
|
-
# PartNumber=123,
|
93
|
-
# UploadId='string',
|
94
|
-
# SSECustomerAlgorithm='string',
|
95
|
-
# SSECustomerKey='string',
|
96
|
-
# CopySourceSSECustomerAlgorithm='string',
|
97
|
-
# CopySourceSSECustomerKey='string',
|
98
|
-
# RequestPayer='requester',
|
99
|
-
# ExpectedBucketOwner='string',
|
100
|
-
# ExpectedSourceBucketOwner='string'
|
101
|
-
# )
|
102
|
-
|
103
|
-
|
104
|
-
# def upload_task(
|
105
|
-
# info: UploadInfo,
|
106
|
-
# chunk: FilePart,
|
107
|
-
# part_number: int,
|
108
|
-
# retries: int,
|
109
|
-
# ) -> FinishedPiece:
|
110
|
-
# file_or_err: Path | Exception = chunk.get_file()
|
111
|
-
# if isinstance(file_or_err, Exception):
|
112
|
-
# raise file_or_err
|
113
|
-
# file: Path = file_or_err
|
114
|
-
# size = os.path.getsize(file)
|
115
|
-
# retries = retries + 1 # Add one for the initial attempt
|
116
|
-
# for retry in range(retries):
|
117
|
-
# try:
|
118
|
-
# if retry > 0:
|
119
|
-
# locked_print(f"Retrying part {part_number} for {info.src_file_path}")
|
120
|
-
# locked_print(
|
121
|
-
# f"Uploading part {part_number} for {info.src_file_path} of size {size}"
|
122
|
-
# )
|
123
|
-
|
124
|
-
# with open(file, "rb") as f:
|
125
|
-
# part = info.s3_client.upload_part(
|
126
|
-
# Bucket=info.bucket_name,
|
127
|
-
# Key=info.object_name,
|
128
|
-
# PartNumber=part_number,
|
129
|
-
# UploadId=info.upload_id,
|
130
|
-
# Body=f,
|
131
|
-
# )
|
132
|
-
# out: FinishedPiece = FinishedPiece(
|
133
|
-
# etag=part["ETag"], part_number=part_number
|
134
|
-
# )
|
135
|
-
# chunk.dispose()
|
136
|
-
# return out
|
137
|
-
# except Exception as e:
|
138
|
-
# if retry == retries - 1:
|
139
|
-
# locked_print(f"Error uploading part {part_number}: {e}")
|
140
|
-
# chunk.dispose()
|
141
|
-
# raise e
|
142
|
-
# else:
|
143
|
-
# locked_print(f"Error uploading part {part_number}: {e}, retrying")
|
144
|
-
# continue
|
145
|
-
# raise Exception("Should not reach here")
|
146
|
-
|
147
|
-
|
148
|
-
# def prepare_upload_file_multipart(
|
149
|
-
# s3_client: BaseClient,
|
150
|
-
# bucket_name: str,
|
151
|
-
# file_path: Path,
|
152
|
-
# file_size: int | None,
|
153
|
-
# object_name: str,
|
154
|
-
# chunk_size: int,
|
155
|
-
# retries: int,
|
156
|
-
# ) -> UploadInfo:
|
157
|
-
# """Upload a file to the bucket using multipart upload with customizable chunk size."""
|
158
|
-
|
159
|
-
# # Initiate multipart upload
|
160
|
-
# locked_print(
|
161
|
-
# f"Creating multipart upload for {file_path} to {bucket_name}/{object_name}"
|
162
|
-
# )
|
163
|
-
# mpu = s3_client.create_multipart_upload(Bucket=bucket_name, Key=object_name)
|
164
|
-
# upload_id = mpu["UploadId"]
|
165
|
-
|
166
|
-
# file_size = file_size if file_size is not None else os.path.getsize(file_path)
|
167
|
-
|
168
|
-
# upload_info: UploadInfo = UploadInfo(
|
169
|
-
# s3_client=s3_client,
|
170
|
-
# bucket_name=bucket_name,
|
171
|
-
# object_name=object_name,
|
172
|
-
# src_file_path=file_path,
|
173
|
-
# upload_id=upload_id,
|
174
|
-
# retries=retries,
|
175
|
-
# chunk_size=chunk_size,
|
176
|
-
# file_size=file_size,
|
177
|
-
# )
|
178
|
-
# return upload_info
|
179
|
-
|
180
|
-
|
181
35
|
def upload_part_copy_task(
|
182
36
|
info: MultipartUploadInfo,
|
183
37
|
source_bucket: str,
|
@@ -228,11 +82,7 @@ def upload_part_copy_task(
|
|
228
82
|
|
229
83
|
# Extract ETag from the response
|
230
84
|
etag = part["CopyPartResult"]["ETag"]
|
231
|
-
|
232
85
|
return FinishedPiece(etag=etag, part_number=part_number)
|
233
|
-
# except NoSuchKey as e:
|
234
|
-
# locked_print(f"Error copying part {part_number}: {e}")
|
235
|
-
# return e
|
236
86
|
|
237
87
|
except Exception as e:
|
238
88
|
msg = f"Error copying {copy_source} -> {info.object_name}: {e}, params={params}"
|
@@ -27,7 +27,7 @@ rclone_api/util.py,sha256=9w_m6W62l_X42Jw5q8p_p30h-QoxAqufvnCLI4PTMOE,7056
|
|
27
27
|
rclone_api/assets/example.txt,sha256=lTBovRjiz0_TgtAtbA1C5hNi2ffbqnNPqkKg6UiKCT8,54
|
28
28
|
rclone_api/cmd/analyze.py,sha256=RHbvk1G5ZUc3qLqlm1AZEyQzd_W_ZjcbCNDvW4YpTKQ,1252
|
29
29
|
rclone_api/cmd/copy_large_s3.py,sha256=B17GliDQyAauNglJCpsey0d3eArT2DAcT9g684TMQk8,3514
|
30
|
-
rclone_api/cmd/copy_large_s3_finish.py,sha256=
|
30
|
+
rclone_api/cmd/copy_large_s3_finish.py,sha256=O7ezviM460ivprF5v9bx_ztqhg0_jnRmvzYK8n6R9IY,4212
|
31
31
|
rclone_api/cmd/list_files.py,sha256=x8FHODEilwKqwdiU1jdkeJbLwOqUkUQuDWPo2u_zpf0,741
|
32
32
|
rclone_api/cmd/save_to_db.py,sha256=ylvnhg_yzexM-m6Zr7XDiswvoDVSl56ELuFAdb9gqBY,1957
|
33
33
|
rclone_api/db/__init__.py,sha256=OSRUdnSWUlDTOHmjdjVmxYTUNpTbtaJ5Ll9sl-PfZg0,40
|
@@ -40,17 +40,17 @@ rclone_api/experimental/flags_base.py,sha256=ajU_czkTcAxXYU-SlmiCfHY7aCQGHvpCLqJ
|
|
40
40
|
rclone_api/s3/api.py,sha256=PafsIEyWDpLWAXsZAjFm9CY14vJpsDr9lOsn0kGRLZ0,4009
|
41
41
|
rclone_api/s3/basic_ops.py,sha256=hK3366xhVEzEcjz9Gk_8lFx6MRceAk72cax6mUrr6ko,2104
|
42
42
|
rclone_api/s3/chunk_task.py,sha256=waEYe-iYQ1_BR3NCS4BrzVrK9UANvH1EcbXx2I6Z_NM,6839
|
43
|
-
rclone_api/s3/create.py,sha256=
|
44
|
-
rclone_api/s3/s3_multipart_uploader_by_copy.py,sha256=
|
43
|
+
rclone_api/s3/create.py,sha256=Ao79MENZM5Bvi9SXwo6yng88Vkmn75rZElHKQrGckeo,2971
|
44
|
+
rclone_api/s3/s3_multipart_uploader_by_copy.py,sha256=KNEWG3CrM8lVWd2b0dA6WT5LO3gEEXNk2tA6Uk9Vi7I,8701
|
45
45
|
rclone_api/s3/types.py,sha256=cYI5MbXRNdT-ps5kGIRQaYrseHyx_ozT4AcwBABTKwk,1616
|
46
46
|
rclone_api/s3/upload_file_multipart.py,sha256=V7syKjFyVIe4U9Ahl5XgqVTzt9akiew3MFjGmufLo2w,12503
|
47
47
|
rclone_api/s3/multipart/file_info.py,sha256=8v_07_eADo0K-Nsv7F0Ac1wcv3lkIsrR3MaRCmkYLTQ,105
|
48
48
|
rclone_api/s3/multipart/finished_piece.py,sha256=9nMWnVZ8S99wi2VFQsm1h1ZHqmebkhMGgd2s56wNj9w,1331
|
49
49
|
rclone_api/s3/multipart/upload_info.py,sha256=d6_OfzFR_vtDzCEegFfzCfWi2kUBUV4aXZzqAEVp1c4,1874
|
50
50
|
rclone_api/s3/multipart/upload_state.py,sha256=f-Aq2NqtAaMUMhYitlICSNIxCKurWAl2gDEUVizLIqw,6019
|
51
|
-
rclone_api-1.4.
|
52
|
-
rclone_api-1.4.
|
53
|
-
rclone_api-1.4.
|
54
|
-
rclone_api-1.4.
|
55
|
-
rclone_api-1.4.
|
56
|
-
rclone_api-1.4.
|
51
|
+
rclone_api-1.4.12.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
|
52
|
+
rclone_api-1.4.12.dist-info/METADATA,sha256=i7GFP4S5sHfeiTlh9sKl_CXd7jVyXqldB2jvd_rLWPc,4628
|
53
|
+
rclone_api-1.4.12.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
|
54
|
+
rclone_api-1.4.12.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
|
55
|
+
rclone_api-1.4.12.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
|
56
|
+
rclone_api-1.4.12.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|