rclone-api 1.3.27__py2.py3-none-any.whl → 1.4.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rclone_api/__init__.py +491 -4
- rclone_api/cmd/copy_large_s3.py +17 -10
- rclone_api/db/db.py +3 -3
- rclone_api/detail/copy_file_parts.py +382 -0
- rclone_api/dir.py +1 -1
- rclone_api/dir_listing.py +1 -1
- rclone_api/file.py +8 -0
- rclone_api/file_part.py +198 -0
- rclone_api/file_stream.py +52 -0
- rclone_api/http_server.py +15 -21
- rclone_api/{rclone.py → rclone_impl.py} +153 -321
- rclone_api/remote.py +3 -3
- rclone_api/rpath.py +11 -4
- rclone_api/s3/chunk_task.py +3 -19
- rclone_api/s3/multipart/file_info.py +7 -0
- rclone_api/s3/multipart/finished_piece.py +38 -0
- rclone_api/s3/multipart/upload_info.py +62 -0
- rclone_api/s3/{chunk_types.py → multipart/upload_state.py} +3 -99
- rclone_api/s3/s3_multipart_uploader.py +138 -0
- rclone_api/s3/types.py +1 -1
- rclone_api/s3/upload_file_multipart.py +14 -14
- rclone_api/scan_missing_folders.py +1 -1
- rclone_api/types.py +136 -165
- rclone_api/util.py +22 -2
- {rclone_api-1.3.27.dist-info → rclone_api-1.4.1.dist-info}/METADATA +1 -1
- rclone_api-1.4.1.dist-info/RECORD +55 -0
- rclone_api/mount_read_chunker.py +0 -130
- rclone_api/profile/mount_copy_bytes.py +0 -311
- rclone_api-1.3.27.dist-info/RECORD +0 -50
- /rclone_api/{walk.py → detail/walk.py} +0 -0
- {rclone_api-1.3.27.dist-info → rclone_api-1.4.1.dist-info}/LICENSE +0 -0
- {rclone_api-1.3.27.dist-info → rclone_api-1.4.1.dist-info}/WHEEL +0 -0
- {rclone_api-1.3.27.dist-info → rclone_api-1.4.1.dist-info}/entry_points.txt +0 -0
- {rclone_api-1.3.27.dist-info → rclone_api-1.4.1.dist-info}/top_level.txt +0 -0
@@ -4,32 +4,31 @@ Unit test file.
|
|
4
4
|
|
5
5
|
import os
|
6
6
|
import random
|
7
|
-
import shutil
|
8
7
|
import subprocess
|
9
8
|
import time
|
10
9
|
import traceback
|
11
10
|
import warnings
|
12
11
|
from concurrent.futures import Future, ThreadPoolExecutor
|
13
|
-
from
|
14
|
-
from dataclasses import dataclass
|
12
|
+
from datetime import datetime
|
15
13
|
from fnmatch import fnmatch
|
16
14
|
from pathlib import Path
|
17
15
|
from tempfile import TemporaryDirectory
|
18
|
-
from typing import
|
16
|
+
from typing import Generator
|
19
17
|
|
20
18
|
from rclone_api import Dir
|
21
19
|
from rclone_api.completed_process import CompletedProcess
|
22
20
|
from rclone_api.config import Config, Parsed, Section
|
23
21
|
from rclone_api.convert import convert_to_filestr_list, convert_to_str
|
24
22
|
from rclone_api.deprecated import deprecated
|
23
|
+
from rclone_api.detail.walk import walk
|
25
24
|
from rclone_api.diff import DiffItem, DiffOption, diff_stream_from_running_process
|
26
25
|
from rclone_api.dir_listing import DirListing
|
27
26
|
from rclone_api.exec import RcloneExec
|
28
|
-
from rclone_api.file import File
|
27
|
+
from rclone_api.file import File
|
28
|
+
from rclone_api.file_stream import FilesStream
|
29
29
|
from rclone_api.group_files import group_files
|
30
30
|
from rclone_api.http_server import HttpServer
|
31
31
|
from rclone_api.mount import Mount, clean_mount, prepare_mount
|
32
|
-
from rclone_api.mount_read_chunker import MultiMountFileChunker
|
33
32
|
from rclone_api.process import Process
|
34
33
|
from rclone_api.remote import Remote
|
35
34
|
from rclone_api.rpath import RPath
|
@@ -40,20 +39,20 @@ from rclone_api.s3.types import (
|
|
40
39
|
S3UploadTarget,
|
41
40
|
)
|
42
41
|
from rclone_api.types import (
|
43
|
-
FilePart,
|
44
42
|
ListingOption,
|
45
43
|
ModTimeStrategy,
|
46
44
|
Order,
|
45
|
+
PartInfo,
|
47
46
|
SizeResult,
|
48
47
|
SizeSuffix,
|
49
48
|
)
|
50
49
|
from rclone_api.util import (
|
50
|
+
find_free_port,
|
51
51
|
get_check,
|
52
52
|
get_rclone_exe,
|
53
53
|
get_verbose,
|
54
54
|
to_path,
|
55
55
|
)
|
56
|
-
from rclone_api.walk import walk
|
57
56
|
|
58
57
|
|
59
58
|
def rclone_verbose(verbose: bool | None) -> bool:
|
@@ -70,51 +69,7 @@ def _to_rclone_conf(config: Config | Path) -> Config:
|
|
70
69
|
return config
|
71
70
|
|
72
71
|
|
73
|
-
class
|
74
|
-
|
75
|
-
def __init__(self, path: str, process: Process) -> None:
|
76
|
-
self.path = path
|
77
|
-
self.process = process
|
78
|
-
|
79
|
-
def __enter__(self) -> "FilesStream":
|
80
|
-
self.process.__enter__()
|
81
|
-
return self
|
82
|
-
|
83
|
-
def __exit__(self, *exc_info):
|
84
|
-
self.process.__exit__(*exc_info)
|
85
|
-
|
86
|
-
def files(self) -> Generator[FileItem, None, None]:
|
87
|
-
line: bytes
|
88
|
-
for line in self.process.stdout:
|
89
|
-
linestr: str = line.decode("utf-8").strip()
|
90
|
-
if linestr.startswith("["):
|
91
|
-
continue
|
92
|
-
if linestr.endswith(","):
|
93
|
-
linestr = linestr[:-1]
|
94
|
-
if linestr.endswith("]"):
|
95
|
-
continue
|
96
|
-
fileitem: FileItem | None = FileItem.from_json_str(self.path, linestr)
|
97
|
-
if fileitem is None:
|
98
|
-
continue
|
99
|
-
yield fileitem
|
100
|
-
|
101
|
-
def files_paged(
|
102
|
-
self, page_size: int = 1000
|
103
|
-
) -> Generator[list[FileItem], None, None]:
|
104
|
-
page: list[FileItem] = []
|
105
|
-
for fileitem in self.files():
|
106
|
-
page.append(fileitem)
|
107
|
-
if len(page) >= page_size:
|
108
|
-
yield page
|
109
|
-
page = []
|
110
|
-
if len(page) > 0:
|
111
|
-
yield page
|
112
|
-
|
113
|
-
def __iter__(self) -> Generator[FileItem, None, None]:
|
114
|
-
return self.files()
|
115
|
-
|
116
|
-
|
117
|
-
class Rclone:
|
72
|
+
class RcloneImpl:
|
118
73
|
def __init__(
|
119
74
|
self, rclone_conf: Path | Config, rclone_exe: Path | None = None
|
120
75
|
) -> None:
|
@@ -309,6 +264,46 @@ class Rclone:
|
|
309
264
|
random.shuffle(paths)
|
310
265
|
return DirListing(paths)
|
311
266
|
|
267
|
+
def print(self, path: str) -> Exception | None:
|
268
|
+
"""Print the contents of a file."""
|
269
|
+
try:
|
270
|
+
text_or_err = self.read_text(path)
|
271
|
+
if isinstance(text_or_err, Exception):
|
272
|
+
return text_or_err
|
273
|
+
print(text_or_err)
|
274
|
+
except Exception as e:
|
275
|
+
return e
|
276
|
+
return None
|
277
|
+
|
278
|
+
def stat(self, src: str) -> File | Exception:
|
279
|
+
"""Get the status of a file or directory."""
|
280
|
+
dirlist: DirListing = self.ls(src)
|
281
|
+
if len(dirlist.files) == 0:
|
282
|
+
# raise FileNotFoundError(f"File not found: {src}")
|
283
|
+
return FileNotFoundError(f"File not found: {src}")
|
284
|
+
try:
|
285
|
+
file: File = dirlist.files[0]
|
286
|
+
return file
|
287
|
+
except Exception as e:
|
288
|
+
return e
|
289
|
+
|
290
|
+
def modtime(self, src: str) -> str | Exception:
|
291
|
+
"""Get the modification time of a file or directory."""
|
292
|
+
try:
|
293
|
+
file: File | Exception = self.stat(src)
|
294
|
+
if isinstance(file, Exception):
|
295
|
+
return file
|
296
|
+
return file.mod_time()
|
297
|
+
except Exception as e:
|
298
|
+
return e
|
299
|
+
|
300
|
+
def modtime_dt(self, src: str) -> datetime | Exception:
|
301
|
+
"""Get the modification time of a file or directory."""
|
302
|
+
modtime: str | Exception = self.modtime(src)
|
303
|
+
if isinstance(modtime, Exception):
|
304
|
+
return modtime
|
305
|
+
return datetime.fromisoformat(modtime)
|
306
|
+
|
312
307
|
def listremotes(self) -> list[Remote]:
|
313
308
|
cmd = ["listremotes"]
|
314
309
|
cp = self._run(cmd)
|
@@ -476,7 +471,7 @@ class Rclone:
|
|
476
471
|
verbose = get_verbose(verbose)
|
477
472
|
src = src if isinstance(src, str) else str(src.path)
|
478
473
|
dst = dst if isinstance(dst, str) else str(dst.path)
|
479
|
-
cmd_list: list[str] = ["copyto", src, dst]
|
474
|
+
cmd_list: list[str] = ["copyto", src, dst, "--s3-no-check-bucket"]
|
480
475
|
if other_args is not None:
|
481
476
|
cmd_list += other_args
|
482
477
|
cp = self._run(cmd_list, check=check)
|
@@ -511,6 +506,7 @@ class Rclone:
|
|
511
506
|
low_level_retries = low_level_retries or 10
|
512
507
|
retries = retries or 3
|
513
508
|
other_args = other_args or []
|
509
|
+
other_args.append("--s3-no-check-bucket")
|
514
510
|
checkers = checkers or 1000
|
515
511
|
transfers = transfers or 32
|
516
512
|
verbose = get_verbose(verbose)
|
@@ -666,6 +662,7 @@ class Rclone:
|
|
666
662
|
cmd_list += ["--checkers", str(checkers)]
|
667
663
|
cmd_list += ["--transfers", str(transfers)]
|
668
664
|
cmd_list += ["--low-level-retries", str(low_level_retries)]
|
665
|
+
cmd_list.append("--s3-no-check-bucket")
|
669
666
|
if multi_thread_streams is not None:
|
670
667
|
cmd_list += ["--multi-thread-streams", str(multi_thread_streams)]
|
671
668
|
if other_args:
|
@@ -786,6 +783,85 @@ class Rclone:
|
|
786
783
|
except subprocess.CalledProcessError:
|
787
784
|
return False
|
788
785
|
|
786
|
+
def copy_file_parts(
|
787
|
+
self,
|
788
|
+
src: str, # src:/Bucket/path/myfile.large.zst
|
789
|
+
dst_dir: str, # dst:/Bucket/path/myfile.large.zst-parts/
|
790
|
+
part_infos: list[PartInfo] | None = None,
|
791
|
+
threads: int = 1,
|
792
|
+
) -> Exception | None:
|
793
|
+
"""Copy parts of a file from source to destination."""
|
794
|
+
from rclone_api.detail.copy_file_parts import copy_file_parts
|
795
|
+
|
796
|
+
out = copy_file_parts(
|
797
|
+
self=self,
|
798
|
+
src=src,
|
799
|
+
dst_dir=dst_dir,
|
800
|
+
part_infos=part_infos,
|
801
|
+
threads=threads,
|
802
|
+
)
|
803
|
+
return out
|
804
|
+
|
805
|
+
def write_text(
|
806
|
+
self,
|
807
|
+
dst: str,
|
808
|
+
text: str,
|
809
|
+
) -> Exception | None:
|
810
|
+
"""Write text to a file."""
|
811
|
+
data = text.encode("utf-8")
|
812
|
+
return self.write_bytes(dst=dst, data=data)
|
813
|
+
|
814
|
+
def write_bytes(
|
815
|
+
self,
|
816
|
+
dst: str,
|
817
|
+
data: bytes,
|
818
|
+
) -> Exception | None:
|
819
|
+
"""Write bytes to a file."""
|
820
|
+
with TemporaryDirectory() as tmpdir:
|
821
|
+
tmpfile = Path(tmpdir) / "file.bin"
|
822
|
+
tmpfile.write_bytes(data)
|
823
|
+
completed_proc = self.copy_to(str(tmpfile), dst, check=True)
|
824
|
+
if completed_proc.returncode != 0:
|
825
|
+
return Exception(f"Failed to write bytes to {dst}", completed_proc)
|
826
|
+
return None
|
827
|
+
|
828
|
+
def read_bytes(self, src: str) -> bytes | Exception:
|
829
|
+
"""Read bytes from a file."""
|
830
|
+
with TemporaryDirectory() as tmpdir:
|
831
|
+
tmpfile = Path(tmpdir) / "file.bin"
|
832
|
+
completed_proc = self.copy_to(src, str(tmpfile), check=True)
|
833
|
+
if completed_proc.returncode != 0:
|
834
|
+
return Exception(f"Failed to read bytes from {src}", completed_proc)
|
835
|
+
|
836
|
+
if not tmpfile.exists():
|
837
|
+
return Exception(f"Failed to read bytes from {src}, file not found")
|
838
|
+
try:
|
839
|
+
return tmpfile.read_bytes()
|
840
|
+
except Exception as e:
|
841
|
+
return Exception(f"Failed to read bytes from {src}", e)
|
842
|
+
|
843
|
+
def read_text(self, src: str) -> str | Exception:
|
844
|
+
"""Read text from a file."""
|
845
|
+
data = self.read_bytes(src)
|
846
|
+
if isinstance(data, Exception):
|
847
|
+
return data
|
848
|
+
try:
|
849
|
+
return data.decode("utf-8")
|
850
|
+
except UnicodeDecodeError as e:
|
851
|
+
return Exception(f"Failed to decode text from {src}", e)
|
852
|
+
|
853
|
+
def size_file(self, src: str) -> SizeSuffix | Exception:
|
854
|
+
"""Get the size of a file or directory."""
|
855
|
+
src_parent = os.path.dirname(src)
|
856
|
+
src_name = os.path.basename(src)
|
857
|
+
out: SizeResult = self.size_files(src_parent, [src_name])
|
858
|
+
one_file = len(out.file_sizes) == 1
|
859
|
+
if not one_file:
|
860
|
+
return Exception(
|
861
|
+
f"More than one result returned, is this is a directory? {out}"
|
862
|
+
)
|
863
|
+
return SizeSuffix(out.total_size)
|
864
|
+
|
789
865
|
def copy_file_resumable_s3(
|
790
866
|
self,
|
791
867
|
src: str,
|
@@ -797,8 +873,7 @@ class Rclone:
|
|
797
873
|
retries: int = 3,
|
798
874
|
verbose: bool | None = None,
|
799
875
|
max_chunks_before_suspension: int | None = None,
|
800
|
-
|
801
|
-
use_http_fetcher: bool = True, # else use mount fetcher
|
876
|
+
backend_log: Path | None = None,
|
802
877
|
) -> MultiUploadResult:
|
803
878
|
"""For massive files that rclone can't handle in one go, this function will copy the file in chunks to an S3 store"""
|
804
879
|
from rclone_api.http_server import HttpFetcher, HttpServer
|
@@ -822,37 +897,6 @@ class Rclone:
|
|
822
897
|
)
|
823
898
|
chunk_size = SizeSuffix(min_chunk_size)
|
824
899
|
|
825
|
-
other_args: list[str] = ["--no-modtime", "--vfs-read-wait", "1s"]
|
826
|
-
|
827
|
-
# BEGIN MOUNT SPECIFIC CONFIG
|
828
|
-
unit_chunk_size = chunk_size / read_threads
|
829
|
-
tmp_mount_dir = self._get_tmp_mount_dir()
|
830
|
-
vfs_read_chunk_size = unit_chunk_size
|
831
|
-
vfs_read_chunk_size_limit = chunk_size
|
832
|
-
vfs_read_chunk_streams = read_threads
|
833
|
-
vfs_disk_space_total_size = chunk_size
|
834
|
-
# assert (
|
835
|
-
# chunk_size.as_int() % vfs_read_chunk_size.as_int() == 0
|
836
|
-
# ), f"chunk_size {chunk_size} must be a multiple of vfs_read_chunk_size {vfs_read_chunk_size}"
|
837
|
-
other_args += ["--vfs-read-chunk-size", vfs_read_chunk_size.as_str()]
|
838
|
-
other_args += [
|
839
|
-
"--vfs-read-chunk-size-limit",
|
840
|
-
vfs_read_chunk_size_limit.as_str(),
|
841
|
-
]
|
842
|
-
other_args += ["--vfs-read-chunk-streams", str(vfs_read_chunk_streams)]
|
843
|
-
other_args += [
|
844
|
-
"--vfs-disk-space-total-size",
|
845
|
-
vfs_disk_space_total_size.as_str(),
|
846
|
-
]
|
847
|
-
other_args += ["--read-only"]
|
848
|
-
other_args += ["--direct-io"]
|
849
|
-
# --vfs-cache-max-size
|
850
|
-
other_args += ["--vfs-cache-max-size", vfs_disk_space_total_size.as_str()]
|
851
|
-
mount_path = tmp_mount_dir / "RCLONE_API_DYNAMIC_MOUNT"
|
852
|
-
## END MOUNT SPECIFIC CONFIG
|
853
|
-
|
854
|
-
# size_result: SizeResult = self.size_files(os.path.dirname(src), [name])
|
855
|
-
|
856
900
|
if target_size < SizeSuffix("5M"):
|
857
901
|
# fallback to normal copy
|
858
902
|
completed_proc = self.copy_to(src, dst, check=True)
|
@@ -911,63 +955,34 @@ class Rclone:
|
|
911
955
|
endpoint_url=section.endpoint(),
|
912
956
|
)
|
913
957
|
|
914
|
-
|
915
|
-
|
916
|
-
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
|
924
|
-
http_server: HttpServer = self.serve_http(
|
925
|
-
src=src_path.parent.as_posix(), addr=f"localhost:{port}"
|
926
|
-
)
|
927
|
-
chunk_fetcher: HttpFetcher = http_server.get_fetcher(
|
928
|
-
path=src_path.name,
|
929
|
-
n_threads=read_threads,
|
930
|
-
)
|
931
|
-
# return chunk_fetcher.fetch
|
932
|
-
return Fetcher(fetch=chunk_fetcher.fetch, shutdown=http_server.shutdown)
|
933
|
-
else:
|
934
|
-
# Use the mount fetcher, which relies on FUSE which has problems in Docker/Windows/MacOS
|
935
|
-
mount_fetcher: MultiMountFileChunker = (
|
936
|
-
self.get_multi_mount_file_chunker(
|
937
|
-
src=src_path.as_posix(),
|
938
|
-
chunk_size=chunk_size,
|
939
|
-
threads=read_threads,
|
940
|
-
mount_log=mount_log,
|
941
|
-
direct_io=True,
|
942
|
-
)
|
943
|
-
)
|
944
|
-
# return chunk_fetcher.fetch
|
945
|
-
return Fetcher(
|
946
|
-
fetch=mount_fetcher.fetch, shutdown=mount_fetcher.shutdown
|
947
|
-
)
|
958
|
+
port = random.randint(10000, 20000)
|
959
|
+
http_server: HttpServer = self.serve_http(
|
960
|
+
src=src_path.parent.as_posix(),
|
961
|
+
addr=f"localhost:{port}",
|
962
|
+
serve_http_log=backend_log,
|
963
|
+
)
|
964
|
+
chunk_fetcher: HttpFetcher = http_server.get_fetcher(
|
965
|
+
path=src_path.name,
|
966
|
+
n_threads=read_threads,
|
967
|
+
)
|
948
968
|
|
949
|
-
fetcher = get_fetcher()
|
950
969
|
client = S3Client(s3_creds)
|
951
970
|
upload_config: S3MutliPartUploadConfig = S3MutliPartUploadConfig(
|
952
971
|
chunk_size=chunk_size.as_int(),
|
953
|
-
chunk_fetcher=
|
972
|
+
chunk_fetcher=chunk_fetcher.bytes_fetcher,
|
954
973
|
max_write_threads=write_threads,
|
955
974
|
retries=retries,
|
956
975
|
resume_path_json=save_state_json,
|
957
976
|
max_chunks_before_suspension=max_chunks_before_suspension,
|
958
977
|
)
|
959
978
|
|
960
|
-
src_file = mount_path / name
|
961
|
-
|
962
979
|
print(f"Uploading {name} to {s3_key} in bucket {bucket_name}")
|
963
980
|
print(f"Source: {src_path}")
|
964
981
|
print(f"bucket_name: {bucket_name}")
|
965
982
|
print(f"upload_config: {upload_config}")
|
966
983
|
|
967
|
-
# get the file size
|
968
|
-
|
969
984
|
upload_target = S3UploadTarget(
|
970
|
-
src_file=
|
985
|
+
src_file=src_path,
|
971
986
|
src_file_size=size_result.total_size,
|
972
987
|
bucket_name=bucket_name,
|
973
988
|
s3_key=s3_key,
|
@@ -984,106 +999,7 @@ class Rclone:
|
|
984
999
|
traceback.print_exc()
|
985
1000
|
raise
|
986
1001
|
finally:
|
987
|
-
|
988
|
-
fetcher.shutdown()
|
989
|
-
|
990
|
-
def get_multi_mount_file_chunker(
|
991
|
-
self,
|
992
|
-
src: str,
|
993
|
-
chunk_size: SizeSuffix,
|
994
|
-
threads: int,
|
995
|
-
mount_log: Path | None,
|
996
|
-
direct_io: bool,
|
997
|
-
) -> MultiMountFileChunker:
|
998
|
-
from rclone_api.util import random_str
|
999
|
-
|
1000
|
-
mounts: list[Mount] = []
|
1001
|
-
vfs_read_chunk_size = chunk_size
|
1002
|
-
vfs_read_chunk_size_limit = chunk_size
|
1003
|
-
vfs_read_chunk_streams = 0
|
1004
|
-
vfs_disk_space_total_size = chunk_size
|
1005
|
-
other_args: list[str] = []
|
1006
|
-
other_args += ["--no-modtime"]
|
1007
|
-
other_args += ["--vfs-read-chunk-size", vfs_read_chunk_size.as_str()]
|
1008
|
-
other_args += [
|
1009
|
-
"--vfs-read-chunk-size-limit",
|
1010
|
-
vfs_read_chunk_size_limit.as_str(),
|
1011
|
-
]
|
1012
|
-
other_args += ["--vfs-read-chunk-streams", str(vfs_read_chunk_streams)]
|
1013
|
-
other_args += [
|
1014
|
-
"--vfs-disk-space-total-size",
|
1015
|
-
vfs_disk_space_total_size.as_str(),
|
1016
|
-
]
|
1017
|
-
other_args += ["--read-only"]
|
1018
|
-
if direct_io:
|
1019
|
-
other_args += ["--direct-io"]
|
1020
|
-
|
1021
|
-
base_mount_dir = self._get_tmp_mount_dir()
|
1022
|
-
base_cache_dir = self._get_cache_dir()
|
1023
|
-
|
1024
|
-
filename = Path(src).name
|
1025
|
-
with ThreadPoolExecutor(max_workers=threads) as executor:
|
1026
|
-
futures: list[Future] = []
|
1027
|
-
try:
|
1028
|
-
for i in range(threads):
|
1029
|
-
tmp_mnts = base_mount_dir / random_str(12)
|
1030
|
-
verbose = mount_log is not None
|
1031
|
-
|
1032
|
-
src_parent_path = Path(src).parent.as_posix()
|
1033
|
-
cache_dir = base_cache_dir / random_str(12)
|
1034
|
-
|
1035
|
-
def task(
|
1036
|
-
src_parent_path=src_parent_path,
|
1037
|
-
tmp_mnts=tmp_mnts,
|
1038
|
-
cache_dir=cache_dir,
|
1039
|
-
):
|
1040
|
-
clean_mount(tmp_mnts, verbose=verbose)
|
1041
|
-
prepare_mount(tmp_mnts, verbose=verbose)
|
1042
|
-
return self.mount(
|
1043
|
-
src=src_parent_path,
|
1044
|
-
outdir=tmp_mnts,
|
1045
|
-
allow_writes=False,
|
1046
|
-
use_links=True,
|
1047
|
-
vfs_cache_mode="minimal",
|
1048
|
-
verbose=False,
|
1049
|
-
cache_dir=cache_dir,
|
1050
|
-
cache_dir_delete_on_exit=True,
|
1051
|
-
log=mount_log,
|
1052
|
-
other_args=other_args,
|
1053
|
-
)
|
1054
|
-
|
1055
|
-
futures.append(executor.submit(task))
|
1056
|
-
mount_errors: list[Exception] = []
|
1057
|
-
for fut in futures:
|
1058
|
-
try:
|
1059
|
-
mount = fut.result()
|
1060
|
-
mounts.append(mount)
|
1061
|
-
except Exception as er:
|
1062
|
-
warnings.warn(f"Error mounting: {er}")
|
1063
|
-
mount_errors.append(er)
|
1064
|
-
if mount_errors:
|
1065
|
-
warnings.warn(f"Error mounting: {mount_errors}")
|
1066
|
-
raise Exception(mount_errors)
|
1067
|
-
except Exception:
|
1068
|
-
for mount in mounts:
|
1069
|
-
mount.close()
|
1070
|
-
raise
|
1071
|
-
|
1072
|
-
src_path: Path = Path(src)
|
1073
|
-
src_parent_path = src_path.parent.as_posix()
|
1074
|
-
name = src_path.name
|
1075
|
-
size_result: SizeResult = self.size_files(src_parent_path, [name])
|
1076
|
-
filesize = size_result.total_size
|
1077
|
-
|
1078
|
-
executor = ThreadPoolExecutor(max_workers=threads)
|
1079
|
-
filechunker: MultiMountFileChunker = MultiMountFileChunker(
|
1080
|
-
filename=filename,
|
1081
|
-
filesize=filesize,
|
1082
|
-
mounts=mounts,
|
1083
|
-
executor=executor,
|
1084
|
-
verbose=mount_log is not None,
|
1085
|
-
)
|
1086
|
-
return filechunker
|
1002
|
+
chunk_fetcher.shutdown()
|
1087
1003
|
|
1088
1004
|
def copy_bytes(
|
1089
1005
|
self,
|
@@ -1114,56 +1030,6 @@ class Rclone:
|
|
1114
1030
|
except subprocess.CalledProcessError as e:
|
1115
1031
|
return e
|
1116
1032
|
|
1117
|
-
def copy_bytes_mount(
|
1118
|
-
self,
|
1119
|
-
src: str,
|
1120
|
-
offset: int | SizeSuffix,
|
1121
|
-
length: int | SizeSuffix,
|
1122
|
-
chunk_size: SizeSuffix,
|
1123
|
-
max_threads: int = 1,
|
1124
|
-
# If outfile is supplied then bytes are written to this file and success returns bytes(0)
|
1125
|
-
outfile: Path | None = None,
|
1126
|
-
mount_log: Path | None = None,
|
1127
|
-
direct_io: bool = True,
|
1128
|
-
) -> bytes | Exception:
|
1129
|
-
"""Copy a slice of bytes from the src file to dst. Parallelism is achieved through multiple mounted files."""
|
1130
|
-
from rclone_api.types import FilePart
|
1131
|
-
|
1132
|
-
offset = SizeSuffix(offset).as_int()
|
1133
|
-
length = SizeSuffix(length).as_int()
|
1134
|
-
# determine number of threads from chunk size
|
1135
|
-
threads = max(1, min(max_threads, length // chunk_size.as_int()))
|
1136
|
-
# todo - implement max threads.
|
1137
|
-
filechunker = self.get_multi_mount_file_chunker(
|
1138
|
-
src=src,
|
1139
|
-
chunk_size=chunk_size,
|
1140
|
-
threads=threads,
|
1141
|
-
mount_log=mount_log,
|
1142
|
-
direct_io=direct_io,
|
1143
|
-
)
|
1144
|
-
try:
|
1145
|
-
fut = filechunker.fetch(offset, length, extra=None)
|
1146
|
-
fp: FilePart = fut.result()
|
1147
|
-
payload = fp.payload
|
1148
|
-
if isinstance(payload, Exception):
|
1149
|
-
return payload
|
1150
|
-
try:
|
1151
|
-
if outfile is None:
|
1152
|
-
return payload.read_bytes()
|
1153
|
-
shutil.move(payload, outfile)
|
1154
|
-
return bytes(0)
|
1155
|
-
finally:
|
1156
|
-
fp.dispose()
|
1157
|
-
|
1158
|
-
except Exception as e:
|
1159
|
-
warnings.warn(f"Error copying bytes: {e}")
|
1160
|
-
return e
|
1161
|
-
finally:
|
1162
|
-
try:
|
1163
|
-
filechunker.shutdown()
|
1164
|
-
except Exception as e:
|
1165
|
-
warnings.warn(f"Error closing filechunker: {e}")
|
1166
|
-
|
1167
1033
|
def copy_dir(
|
1168
1034
|
self, src: str | Dir, dst: str | Dir, args: list[str] | None = None
|
1169
1035
|
) -> CompletedProcess:
|
@@ -1171,7 +1037,7 @@ class Rclone:
|
|
1171
1037
|
# convert src to str, also dst
|
1172
1038
|
src = convert_to_str(src)
|
1173
1039
|
dst = convert_to_str(dst)
|
1174
|
-
cmd_list: list[str] = ["copy", src, dst]
|
1040
|
+
cmd_list: list[str] = ["copy", src, dst, "--s3-no-check-bucket"]
|
1175
1041
|
if args is not None:
|
1176
1042
|
cmd_list += args
|
1177
1043
|
cp = self._run(cmd_list)
|
@@ -1181,7 +1047,7 @@ class Rclone:
|
|
1181
1047
|
self, src: Remote, dst: Remote, args: list[str] | None = None
|
1182
1048
|
) -> CompletedProcess:
|
1183
1049
|
"""Copy a remote to another remote."""
|
1184
|
-
cmd_list: list[str] = ["copy", str(src), str(dst)]
|
1050
|
+
cmd_list: list[str] = ["copy", str(src), str(dst), "--s3-no-check-bucket"]
|
1185
1051
|
if args is not None:
|
1186
1052
|
cmd_list += args
|
1187
1053
|
# return self._run(cmd_list)
|
@@ -1251,45 +1117,6 @@ class Rclone:
|
|
1251
1117
|
)
|
1252
1118
|
return mount
|
1253
1119
|
|
1254
|
-
@contextmanager
|
1255
|
-
def scoped_mount(
|
1256
|
-
self,
|
1257
|
-
src: Remote | Dir | str,
|
1258
|
-
outdir: Path,
|
1259
|
-
allow_writes: bool | None = None,
|
1260
|
-
use_links: bool | None = None,
|
1261
|
-
vfs_cache_mode: str | None = None,
|
1262
|
-
verbose: bool | None = None,
|
1263
|
-
log: Path | None = None,
|
1264
|
-
cache_dir: Path | None = None,
|
1265
|
-
cache_dir_delete_on_exit: bool | None = None,
|
1266
|
-
other_args: list[str] | None = None,
|
1267
|
-
) -> Generator[Mount, None, None]:
|
1268
|
-
"""Like mount, but can be used in a context manager."""
|
1269
|
-
error_happened = False
|
1270
|
-
mount: Mount = self.mount(
|
1271
|
-
src,
|
1272
|
-
outdir,
|
1273
|
-
allow_writes=allow_writes,
|
1274
|
-
use_links=use_links,
|
1275
|
-
vfs_cache_mode=vfs_cache_mode,
|
1276
|
-
verbose=verbose,
|
1277
|
-
cache_dir=cache_dir,
|
1278
|
-
cache_dir_delete_on_exit=cache_dir_delete_on_exit,
|
1279
|
-
log=log,
|
1280
|
-
other_args=other_args,
|
1281
|
-
)
|
1282
|
-
try:
|
1283
|
-
yield mount
|
1284
|
-
except Exception as e:
|
1285
|
-
error_happened = True
|
1286
|
-
stack_trace = traceback.format_exc()
|
1287
|
-
warnings.warn(f"Error in scoped_mount: {e}\n\nStack Trace:\n{stack_trace}")
|
1288
|
-
raise
|
1289
|
-
finally:
|
1290
|
-
if not error_happened or (not allow_writes):
|
1291
|
-
mount.close()
|
1292
|
-
|
1293
1120
|
# Settings optimized for s3.
|
1294
1121
|
def mount_s3(
|
1295
1122
|
self,
|
@@ -1395,7 +1222,8 @@ class Rclone:
|
|
1395
1222
|
def serve_http(
|
1396
1223
|
self,
|
1397
1224
|
src: str,
|
1398
|
-
addr: str =
|
1225
|
+
addr: str | None = None,
|
1226
|
+
serve_http_log: Path | None = None,
|
1399
1227
|
other_args: list[str] | None = None,
|
1400
1228
|
) -> HttpServer:
|
1401
1229
|
"""Serve a remote or directory via HTTP.
|
@@ -1404,11 +1232,15 @@ class Rclone:
|
|
1404
1232
|
src: Remote or directory to serve
|
1405
1233
|
addr: Network address and port to serve on (default: localhost:8080)
|
1406
1234
|
"""
|
1235
|
+
addr = addr or f"localhost:{find_free_port()}"
|
1407
1236
|
_, subpath = src.split(":", 1) # might not work on local paths.
|
1408
1237
|
cmd_list: list[str] = ["serve", "http", "--addr", addr, src]
|
1238
|
+
if serve_http_log:
|
1239
|
+
cmd_list += ["--log-file", str(serve_http_log)]
|
1240
|
+
cmd_list += ["-vvvv"]
|
1409
1241
|
if other_args:
|
1410
1242
|
cmd_list += other_args
|
1411
|
-
proc = self._launch_process(cmd_list)
|
1243
|
+
proc = self._launch_process(cmd_list, log=serve_http_log)
|
1412
1244
|
time.sleep(2)
|
1413
1245
|
if proc.poll() is not None:
|
1414
1246
|
raise ValueError("HTTP serve process failed to start")
|
rclone_api/remote.py
CHANGED
@@ -5,14 +5,14 @@ class Remote:
|
|
5
5
|
"""Remote (root) directory."""
|
6
6
|
|
7
7
|
def __init__(self, name: str, rclone: Any) -> None:
|
8
|
-
from rclone_api.
|
8
|
+
from rclone_api.rclone_impl import RcloneImpl
|
9
9
|
|
10
10
|
if ":" in name:
|
11
11
|
raise ValueError("Remote name cannot contain ':'")
|
12
12
|
|
13
|
-
assert isinstance(rclone,
|
13
|
+
assert isinstance(rclone, RcloneImpl)
|
14
14
|
self.name = name
|
15
|
-
self.rclone:
|
15
|
+
self.rclone: RcloneImpl = rclone
|
16
16
|
|
17
17
|
def __str__(self) -> str:
|
18
18
|
return f"{self.name}:"
|
rclone_api/rpath.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
import json
|
2
|
+
from datetime import datetime
|
2
3
|
from typing import Any
|
3
4
|
|
4
5
|
from rclone_api.remote import Remote
|
@@ -17,8 +18,10 @@ class RPath:
|
|
17
18
|
mod_time: str,
|
18
19
|
is_dir: bool,
|
19
20
|
) -> None:
|
20
|
-
from rclone_api.
|
21
|
+
from rclone_api.rclone_impl import RcloneImpl
|
21
22
|
|
23
|
+
if path.endswith("/"):
|
24
|
+
path = path[:-1]
|
22
25
|
self.remote = remote
|
23
26
|
self.path = path
|
24
27
|
self.name = name
|
@@ -26,13 +29,17 @@ class RPath:
|
|
26
29
|
self.mime_type = mime_type
|
27
30
|
self.mod_time = mod_time
|
28
31
|
self.is_dir = is_dir
|
29
|
-
self.rclone:
|
32
|
+
self.rclone: RcloneImpl | None = None
|
33
|
+
|
34
|
+
def mod_time_dt(self) -> datetime:
|
35
|
+
"""Return the modification time as a datetime object."""
|
36
|
+
return datetime.fromisoformat(self.mod_time)
|
30
37
|
|
31
38
|
def set_rclone(self, rclone: Any) -> None:
|
32
39
|
"""Set the rclone object."""
|
33
|
-
from rclone_api.
|
40
|
+
from rclone_api.rclone_impl import RcloneImpl
|
34
41
|
|
35
|
-
assert isinstance(rclone,
|
42
|
+
assert isinstance(rclone, RcloneImpl)
|
36
43
|
self.rclone = rclone
|
37
44
|
|
38
45
|
@staticmethod
|