rclone-api 1.5.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rclone_api/__init__.py +951 -0
- rclone_api/assets/example.txt +1 -0
- rclone_api/cli.py +15 -0
- rclone_api/cmd/analyze.py +51 -0
- rclone_api/cmd/copy_large_s3.py +111 -0
- rclone_api/cmd/copy_large_s3_finish.py +81 -0
- rclone_api/cmd/list_files.py +27 -0
- rclone_api/cmd/save_to_db.py +77 -0
- rclone_api/completed_process.py +60 -0
- rclone_api/config.py +87 -0
- rclone_api/convert.py +31 -0
- rclone_api/db/__init__.py +3 -0
- rclone_api/db/db.py +277 -0
- rclone_api/db/models.py +57 -0
- rclone_api/deprecated.py +24 -0
- rclone_api/detail/copy_file_parts_resumable.py +42 -0
- rclone_api/detail/walk.py +116 -0
- rclone_api/diff.py +164 -0
- rclone_api/dir.py +113 -0
- rclone_api/dir_listing.py +66 -0
- rclone_api/exec.py +40 -0
- rclone_api/experimental/flags.py +89 -0
- rclone_api/experimental/flags_base.py +58 -0
- rclone_api/file.py +205 -0
- rclone_api/file_item.py +68 -0
- rclone_api/file_part.py +198 -0
- rclone_api/file_stream.py +52 -0
- rclone_api/filelist.py +30 -0
- rclone_api/group_files.py +256 -0
- rclone_api/http_server.py +244 -0
- rclone_api/install.py +95 -0
- rclone_api/log.py +44 -0
- rclone_api/mount.py +55 -0
- rclone_api/mount_util.py +247 -0
- rclone_api/process.py +187 -0
- rclone_api/rclone_impl.py +1285 -0
- rclone_api/remote.py +21 -0
- rclone_api/rpath.py +102 -0
- rclone_api/s3/api.py +109 -0
- rclone_api/s3/basic_ops.py +61 -0
- rclone_api/s3/chunk_task.py +187 -0
- rclone_api/s3/create.py +107 -0
- rclone_api/s3/multipart/file_info.py +7 -0
- rclone_api/s3/multipart/finished_piece.py +69 -0
- rclone_api/s3/multipart/info_json.py +239 -0
- rclone_api/s3/multipart/merge_state.py +147 -0
- rclone_api/s3/multipart/upload_info.py +62 -0
- rclone_api/s3/multipart/upload_parts_inline.py +356 -0
- rclone_api/s3/multipart/upload_parts_resumable.py +304 -0
- rclone_api/s3/multipart/upload_parts_server_side_merge.py +546 -0
- rclone_api/s3/multipart/upload_state.py +165 -0
- rclone_api/s3/types.py +67 -0
- rclone_api/scan_missing_folders.py +153 -0
- rclone_api/types.py +402 -0
- rclone_api/util.py +324 -0
- rclone_api-1.5.8.dist-info/LICENSE +21 -0
- rclone_api-1.5.8.dist-info/METADATA +969 -0
- rclone_api-1.5.8.dist-info/RECORD +61 -0
- rclone_api-1.5.8.dist-info/WHEEL +5 -0
- rclone_api-1.5.8.dist-info/entry_points.txt +5 -0
- rclone_api-1.5.8.dist-info/top_level.txt +1 -0
rclone_api/dir.py
ADDED
@@ -0,0 +1,113 @@
|
|
1
|
+
import json
|
2
|
+
from pathlib import Path
|
3
|
+
from typing import Generator
|
4
|
+
|
5
|
+
from rclone_api.dir_listing import DirListing
|
6
|
+
from rclone_api.remote import Remote
|
7
|
+
from rclone_api.rpath import RPath
|
8
|
+
from rclone_api.types import ListingOption, Order
|
9
|
+
|
10
|
+
|
11
|
+
class Dir:
|
12
|
+
"""Remote file dataclass."""
|
13
|
+
|
14
|
+
@property
|
15
|
+
def remote(self) -> Remote:
|
16
|
+
return self.path.remote
|
17
|
+
|
18
|
+
@property
|
19
|
+
def name(self) -> str:
|
20
|
+
return self.path.name
|
21
|
+
|
22
|
+
def __init__(self, path: RPath | Remote) -> None:
|
23
|
+
"""Initialize Dir with either an RPath or Remote.
|
24
|
+
|
25
|
+
Args:
|
26
|
+
path: Either an RPath object or a Remote object
|
27
|
+
"""
|
28
|
+
if isinstance(path, Remote):
|
29
|
+
# Need to create an RPath for the Remote's root
|
30
|
+
self.path = RPath(
|
31
|
+
remote=path,
|
32
|
+
path=str(path),
|
33
|
+
name=str(path),
|
34
|
+
size=0,
|
35
|
+
mime_type="inode/directory",
|
36
|
+
mod_time="",
|
37
|
+
is_dir=True,
|
38
|
+
)
|
39
|
+
# Ensure the RPath has the same rclone instance as the Remote
|
40
|
+
self.path.set_rclone(path.rclone)
|
41
|
+
else:
|
42
|
+
self.path = path
|
43
|
+
# self.path.set_rclone(self.path.remote.rclone)
|
44
|
+
assert self.path.rclone is not None
|
45
|
+
|
46
|
+
def ls(
|
47
|
+
self,
|
48
|
+
max_depth: int | None = None,
|
49
|
+
glob: str | None = None,
|
50
|
+
order: Order = Order.NORMAL,
|
51
|
+
listing_option: ListingOption = ListingOption.ALL,
|
52
|
+
) -> DirListing:
|
53
|
+
"""List files and directories in the given path."""
|
54
|
+
assert self.path.rclone is not None
|
55
|
+
dir = Dir(self.path)
|
56
|
+
return self.path.rclone.ls(
|
57
|
+
dir,
|
58
|
+
max_depth=max_depth,
|
59
|
+
glob=glob,
|
60
|
+
order=order,
|
61
|
+
listing_option=listing_option,
|
62
|
+
)
|
63
|
+
|
64
|
+
def relative_to(self, other: "Dir") -> str:
|
65
|
+
"""Return the relative path to the other directory."""
|
66
|
+
self_path = Path(self.path.path)
|
67
|
+
other_path = Path(other.path.path)
|
68
|
+
rel_path = self_path.relative_to(other_path)
|
69
|
+
return str(rel_path.as_posix())
|
70
|
+
|
71
|
+
def walk(
|
72
|
+
self, breadth_first: bool, max_depth: int = -1
|
73
|
+
) -> Generator[DirListing, None, None]:
|
74
|
+
"""List files and directories in the given path."""
|
75
|
+
from rclone_api.detail.walk import walk
|
76
|
+
|
77
|
+
assert self.path.rclone is not None
|
78
|
+
return walk(self, breadth_first=breadth_first, max_depth=max_depth)
|
79
|
+
|
80
|
+
def to_json(self) -> dict:
|
81
|
+
"""Convert the Dir to a JSON serializable dictionary."""
|
82
|
+
return self.path.to_json()
|
83
|
+
|
84
|
+
def __str__(self) -> str:
|
85
|
+
return str(self.path)
|
86
|
+
|
87
|
+
def __repr__(self) -> str:
|
88
|
+
data = self.path.to_json()
|
89
|
+
data_str = json.dumps(data)
|
90
|
+
return data_str
|
91
|
+
|
92
|
+
def to_string(self, include_remote: bool = True) -> str:
|
93
|
+
"""Convert the File to a string."""
|
94
|
+
out = str(self.path)
|
95
|
+
if not include_remote:
|
96
|
+
_, out = out.split(":", 1)
|
97
|
+
return out
|
98
|
+
|
99
|
+
# / operator
|
100
|
+
def __truediv__(self, other: str) -> "Dir":
|
101
|
+
"""Join the current path with another path."""
|
102
|
+
path = Path(self.path.path) / other
|
103
|
+
rpath = RPath(
|
104
|
+
self.path.remote,
|
105
|
+
str(path.as_posix()),
|
106
|
+
name=other,
|
107
|
+
size=0,
|
108
|
+
mime_type="inode/directory",
|
109
|
+
mod_time="",
|
110
|
+
is_dir=True,
|
111
|
+
)
|
112
|
+
rpath.set_rclone(self.path.rclone)
|
113
|
+
return Dir(rpath)
|
@@ -0,0 +1,66 @@
|
|
1
|
+
import json
|
2
|
+
import warnings
|
3
|
+
|
4
|
+
from rclone_api.rpath import RPath
|
5
|
+
|
6
|
+
|
7
|
+
def _dedupe(items: list[RPath]) -> list[RPath]:
|
8
|
+
"""Remove duplicate items from a list of RPath objects."""
|
9
|
+
seen = set()
|
10
|
+
unique_items = []
|
11
|
+
for item in items:
|
12
|
+
if item not in seen:
|
13
|
+
seen.add(item)
|
14
|
+
unique_items.append(item)
|
15
|
+
else:
|
16
|
+
warnings.warn(f"Duplicate item found: {item}, filtered out.")
|
17
|
+
return unique_items
|
18
|
+
|
19
|
+
|
20
|
+
class DirListing:
|
21
|
+
"""Remote file dataclass."""
|
22
|
+
|
23
|
+
def __init__(self, dirs_and_files: list[RPath]) -> None:
|
24
|
+
from rclone_api.dir import Dir
|
25
|
+
from rclone_api.file import File
|
26
|
+
|
27
|
+
dirs_and_files = _dedupe(dirs_and_files)
|
28
|
+
|
29
|
+
self.dirs: list[Dir] = [Dir(d) for d in dirs_and_files if d.is_dir]
|
30
|
+
self.files: list[File] = [File(f) for f in dirs_and_files if not f.is_dir]
|
31
|
+
|
32
|
+
def files_relative(self, prefix: str) -> list[str]:
|
33
|
+
"""Return a list of file paths relative to the root directory."""
|
34
|
+
from rclone_api.file import File
|
35
|
+
|
36
|
+
out: list[str] = []
|
37
|
+
f: File
|
38
|
+
for f in self.files:
|
39
|
+
out.append(f.relative_to(prefix))
|
40
|
+
return out
|
41
|
+
|
42
|
+
def __str__(self) -> str:
|
43
|
+
n_files = len(self.files)
|
44
|
+
n_dirs = len(self.dirs)
|
45
|
+
msg = f"\nFiles: {n_files}\n"
|
46
|
+
if n_files > 0:
|
47
|
+
for f in self.files:
|
48
|
+
msg += f" {f}\n"
|
49
|
+
msg += f"Dirs: {n_dirs}\n"
|
50
|
+
if n_dirs > 0:
|
51
|
+
for d in self.dirs:
|
52
|
+
msg += f" {d}\n"
|
53
|
+
return msg
|
54
|
+
|
55
|
+
def __repr__(self) -> str:
|
56
|
+
dirs: list = []
|
57
|
+
files: list = []
|
58
|
+
for d in self.dirs:
|
59
|
+
dirs.append(d.path.to_json())
|
60
|
+
for f in self.files:
|
61
|
+
files.append(f.path.to_json())
|
62
|
+
json_obj = {
|
63
|
+
"dirs": dirs,
|
64
|
+
"files": files,
|
65
|
+
}
|
66
|
+
return json.dumps(json_obj, indent=2)
|
rclone_api/exec.py
ADDED
@@ -0,0 +1,40 @@
|
|
1
|
+
import subprocess
|
2
|
+
from dataclasses import dataclass
|
3
|
+
from pathlib import Path
|
4
|
+
|
5
|
+
from rclone_api.config import Config
|
6
|
+
from rclone_api.process import Process, ProcessArgs
|
7
|
+
|
8
|
+
|
9
|
+
@dataclass
|
10
|
+
class RcloneExec:
|
11
|
+
"""Rclone execution dataclass."""
|
12
|
+
|
13
|
+
rclone_config: Path | Config
|
14
|
+
rclone_exe: Path
|
15
|
+
|
16
|
+
def execute(
|
17
|
+
self, cmd: list[str], check: bool, capture: bool | Path | None = None
|
18
|
+
) -> subprocess.CompletedProcess:
|
19
|
+
"""Execute rclone command."""
|
20
|
+
from rclone_api.util import rclone_execute
|
21
|
+
|
22
|
+
return rclone_execute(
|
23
|
+
cmd, self.rclone_config, self.rclone_exe, check=check, capture=capture
|
24
|
+
)
|
25
|
+
|
26
|
+
def launch_process(
|
27
|
+
self, cmd: list[str], capture: bool | None, log: Path | None
|
28
|
+
) -> Process:
|
29
|
+
"""Launch rclone process."""
|
30
|
+
|
31
|
+
args: ProcessArgs = ProcessArgs(
|
32
|
+
cmd=cmd,
|
33
|
+
rclone_conf=self.rclone_config,
|
34
|
+
rclone_exe=self.rclone_exe,
|
35
|
+
cmd_list=cmd,
|
36
|
+
capture_stdout=capture,
|
37
|
+
log=log,
|
38
|
+
)
|
39
|
+
process = Process(args)
|
40
|
+
return process
|
@@ -0,0 +1,89 @@
|
|
1
|
+
from dataclasses import dataclass
|
2
|
+
|
3
|
+
from rclone_api.experimental.flags_base import BaseFlags, merge_flags
|
4
|
+
from rclone_api.types import SizeSuffix
|
5
|
+
|
6
|
+
|
7
|
+
@dataclass
|
8
|
+
class CopyFlags(BaseFlags):
|
9
|
+
check_first: bool | None = None
|
10
|
+
checksum: bool | None = False
|
11
|
+
compare_dest: list[str] | None = None
|
12
|
+
copy_dest: list[str] | None = None
|
13
|
+
cutoff_mode: str | None = None
|
14
|
+
ignore_case_sync: bool | None = None
|
15
|
+
ignore_checksum: bool | None = None
|
16
|
+
ignore_existing: bool | None = None
|
17
|
+
ignore_size: bool | None = None
|
18
|
+
ignore_times: bool | None = None
|
19
|
+
immutable: bool | None = None
|
20
|
+
inplace: bool | None = None
|
21
|
+
links: bool | None = None
|
22
|
+
max_backlog: int | None = None
|
23
|
+
max_duration: str | None = None
|
24
|
+
max_transfer: SizeSuffix | None = None
|
25
|
+
metadata: bool | None = None
|
26
|
+
modify_window: str | None = None
|
27
|
+
multi_thread_chunk_size: str | None = None
|
28
|
+
multi_thread_cutoff: str | None = None
|
29
|
+
multi_thread_streams: int | None = None
|
30
|
+
multi_thread_write_buffer_size: str | None = None
|
31
|
+
no_check_dest: bool | None = None
|
32
|
+
no_traverse: bool | None = None
|
33
|
+
no_update_dir_modtime: bool | None = None
|
34
|
+
no_update_modtime: bool | None = None
|
35
|
+
order_by: str | None = None
|
36
|
+
partial_suffix: str | None = None
|
37
|
+
refresh_times: bool | None = None
|
38
|
+
server_side_across_configs: bool | None = None
|
39
|
+
size_only: bool | None = None
|
40
|
+
streaming_upload_cutoff: str | None = None
|
41
|
+
update: bool | None = None
|
42
|
+
|
43
|
+
def to_args(self) -> list[str]:
|
44
|
+
return super().to_args()
|
45
|
+
|
46
|
+
def merge(self, other: "CopyFlags") -> "CopyFlags":
|
47
|
+
return merge_flags(CopyFlags, self, other)
|
48
|
+
|
49
|
+
def __repr__(self):
|
50
|
+
return super().__repr__()
|
51
|
+
|
52
|
+
|
53
|
+
@dataclass
|
54
|
+
class Flags(BaseFlags):
|
55
|
+
copy: CopyFlags | None = None
|
56
|
+
|
57
|
+
def to_args(self) -> list[str]:
|
58
|
+
return super().to_args()
|
59
|
+
|
60
|
+
def merge(self, other: "Flags") -> "Flags":
|
61
|
+
return merge_flags(Flags, self, other)
|
62
|
+
|
63
|
+
def __repr__(self):
|
64
|
+
return super().__repr__()
|
65
|
+
|
66
|
+
|
67
|
+
def unit_test() -> None:
|
68
|
+
copy_flags_a = CopyFlags(compare_dest=["a", "b"])
|
69
|
+
copy_flags_b = CopyFlags(checksum=False)
|
70
|
+
flags_a = copy_flags_a.merge(copy_flags_b)
|
71
|
+
print("A:", flags_a)
|
72
|
+
|
73
|
+
copy_flags_c = CopyFlags(checksum=True)
|
74
|
+
copy_flags_d = CopyFlags(checksum=False)
|
75
|
+
|
76
|
+
merged_c_d = copy_flags_c.merge(copy_flags_d)
|
77
|
+
print("B:", merged_c_d)
|
78
|
+
merged_d_c = copy_flags_d.merge(copy_flags_c)
|
79
|
+
print("C:", merged_d_c)
|
80
|
+
|
81
|
+
# now do the one with the SizeSuffix type
|
82
|
+
copy_flags_e = CopyFlags(max_transfer=SizeSuffix("128M"))
|
83
|
+
copy_flags_f = CopyFlags(max_transfer=SizeSuffix("256M"))
|
84
|
+
merged_e_f = copy_flags_e.merge(copy_flags_f)
|
85
|
+
print("D:", merged_e_f)
|
86
|
+
|
87
|
+
|
88
|
+
if __name__ == "__main__":
|
89
|
+
unit_test()
|
@@ -0,0 +1,58 @@
|
|
1
|
+
from dataclasses import dataclass, fields, is_dataclass
|
2
|
+
from typing import Type, TypeVar
|
3
|
+
|
4
|
+
T = TypeVar("T")
|
5
|
+
|
6
|
+
|
7
|
+
def merge_flags(cls: Type[T], dataclass_a: T, dataclass_b: T) -> T:
|
8
|
+
if not is_dataclass(dataclass_a) or not is_dataclass(dataclass_b):
|
9
|
+
raise ValueError("Both inputs must be dataclass instances")
|
10
|
+
if type(dataclass_a) is not type(dataclass_b):
|
11
|
+
raise ValueError("Dataclass instances must be of the same type")
|
12
|
+
|
13
|
+
merged_kwargs = {}
|
14
|
+
for field in fields(dataclass_a):
|
15
|
+
a_value = getattr(dataclass_a, field.name)
|
16
|
+
b_value = getattr(dataclass_b, field.name)
|
17
|
+
|
18
|
+
if is_dataclass(a_value) and is_dataclass(b_value):
|
19
|
+
merged_kwargs[field.name] = merge_flags(type(a_value), a_value, b_value)
|
20
|
+
else:
|
21
|
+
merged_kwargs[field.name] = b_value if b_value is not None else a_value
|
22
|
+
|
23
|
+
return cls(**merged_kwargs)
|
24
|
+
|
25
|
+
|
26
|
+
def _field_name_to_flag(field_name: str) -> str:
|
27
|
+
return f"--{field_name.replace('_', '-')}"
|
28
|
+
|
29
|
+
|
30
|
+
@dataclass
|
31
|
+
class BaseFlags:
|
32
|
+
"""provides to_args(), merge() and __repr__ methods for flags dataclasses"""
|
33
|
+
|
34
|
+
def to_args(self) -> list[str]:
|
35
|
+
args = []
|
36
|
+
for field in fields(self):
|
37
|
+
value = getattr(self, field.name)
|
38
|
+
if value is None:
|
39
|
+
continue
|
40
|
+
# If the field value is a nested dataclass that supports to_args, use it.
|
41
|
+
if is_dataclass(value) and hasattr(value, "to_args"):
|
42
|
+
to_args = getattr(value, "to_args")
|
43
|
+
args.extend(to_args())
|
44
|
+
elif isinstance(value, bool):
|
45
|
+
# Only include the flag if the boolean is True.
|
46
|
+
if value:
|
47
|
+
args.append(_field_name_to_flag(field.name))
|
48
|
+
else:
|
49
|
+
args.append(_field_name_to_flag(field.name))
|
50
|
+
if isinstance(value, list):
|
51
|
+
# Join list values with a comma.
|
52
|
+
args.append(",".join(map(str, value)))
|
53
|
+
else:
|
54
|
+
args.append(str(value))
|
55
|
+
return args
|
56
|
+
|
57
|
+
def __repr__(self):
|
58
|
+
return str(self.to_args())
|
rclone_api/file.py
ADDED
@@ -0,0 +1,205 @@
|
|
1
|
+
import json
|
2
|
+
import warnings
|
3
|
+
from dataclasses import dataclass
|
4
|
+
from datetime import datetime
|
5
|
+
from pathlib import Path
|
6
|
+
|
7
|
+
from rclone_api.rpath import RPath
|
8
|
+
|
9
|
+
_STRING_INTERNER: dict[str, str] = {}
|
10
|
+
|
11
|
+
|
12
|
+
def _intern(s: str) -> str:
|
13
|
+
return _STRING_INTERNER.setdefault(s, s)
|
14
|
+
|
15
|
+
|
16
|
+
_SUFFIX_LARGEST_SIZE = len("torrents") + 2
|
17
|
+
|
18
|
+
|
19
|
+
def _suffix_clean_bad_parts(suffix: list[str]) -> list[str]:
|
20
|
+
"""Remove any bad parts from the suffix list."""
|
21
|
+
out = []
|
22
|
+
for part in suffix:
|
23
|
+
if part in ["", ""]:
|
24
|
+
continue
|
25
|
+
if " " in part:
|
26
|
+
# split on spaces
|
27
|
+
continue
|
28
|
+
if "--" in part:
|
29
|
+
# split on --
|
30
|
+
parts = part.split("--")
|
31
|
+
parts = [x.strip() for x in parts if x.strip()]
|
32
|
+
out.extend(parts)
|
33
|
+
out.append(part)
|
34
|
+
|
35
|
+
out, tmp = [], out
|
36
|
+
for part in tmp:
|
37
|
+
if len(part) > _SUFFIX_LARGEST_SIZE:
|
38
|
+
continue
|
39
|
+
out.append(part)
|
40
|
+
return out
|
41
|
+
|
42
|
+
|
43
|
+
def _get_suffix(name: str, chop_compressed_suffixes: bool = True) -> str:
|
44
|
+
# name.sql.gz -> sql.gz
|
45
|
+
try:
|
46
|
+
parts = name.split(".")
|
47
|
+
if len(parts) == 1:
|
48
|
+
return ""
|
49
|
+
parts = _suffix_clean_bad_parts(parts)
|
50
|
+
last_part = parts[-1]
|
51
|
+
if chop_compressed_suffixes:
|
52
|
+
if last_part == "gz" and len(parts) > 2:
|
53
|
+
parts = parts[:-1]
|
54
|
+
return ".".join(parts[-1:])
|
55
|
+
except IndexError:
|
56
|
+
warnings.warn(f"Invalid name: {name} for normal suffix extraction")
|
57
|
+
suffix = Path(name).suffix
|
58
|
+
if suffix.startswith("."):
|
59
|
+
return suffix[1:]
|
60
|
+
return suffix
|
61
|
+
|
62
|
+
|
63
|
+
# File is too complex, this is a simple dataclass that can be streamed out.
|
64
|
+
@dataclass
|
65
|
+
class FileItem:
|
66
|
+
"""Remote file dataclass."""
|
67
|
+
|
68
|
+
remote: str
|
69
|
+
parent: str
|
70
|
+
name: str
|
71
|
+
size: int
|
72
|
+
mime_type: str
|
73
|
+
mod_time: str
|
74
|
+
hash: str | None = None
|
75
|
+
id: int | None = None
|
76
|
+
|
77
|
+
@property
|
78
|
+
def path(self) -> str:
|
79
|
+
if self.parent == ".":
|
80
|
+
return f"{self.remote}/{self.name}"
|
81
|
+
else:
|
82
|
+
return f"{self.remote}/{self.parent}/{self.name}"
|
83
|
+
|
84
|
+
@property
|
85
|
+
def path_no_remote(self) -> str:
|
86
|
+
if self.parent == ".":
|
87
|
+
return f"{self.name}"
|
88
|
+
else:
|
89
|
+
return f"{self.parent}/{self.name}"
|
90
|
+
|
91
|
+
@property
|
92
|
+
def real_suffix(self) -> str:
|
93
|
+
return self._suffix
|
94
|
+
|
95
|
+
def __post_init__(self):
|
96
|
+
self.parent = _intern(self.parent)
|
97
|
+
self.mime_type = _intern(self.mime_type)
|
98
|
+
self.remote = _intern(self.remote)
|
99
|
+
self._suffix = _intern(_get_suffix(self.name))
|
100
|
+
|
101
|
+
@staticmethod
|
102
|
+
def from_json(remote: str, data: dict) -> "FileItem | None":
|
103
|
+
try:
|
104
|
+
path_str: str = data["Path"]
|
105
|
+
parent_path = Path(path_str).parent.as_posix()
|
106
|
+
name = data["Name"]
|
107
|
+
size = data["Size"]
|
108
|
+
mime_type = data["MimeType"]
|
109
|
+
mod_time = data["ModTime"]
|
110
|
+
|
111
|
+
return FileItem(
|
112
|
+
remote=remote,
|
113
|
+
parent=parent_path,
|
114
|
+
name=name,
|
115
|
+
size=size,
|
116
|
+
mime_type=mime_type,
|
117
|
+
mod_time=mod_time,
|
118
|
+
)
|
119
|
+
except KeyError:
|
120
|
+
warnings.warn(f"Invalid data: {data}")
|
121
|
+
return None
|
122
|
+
|
123
|
+
@staticmethod
|
124
|
+
def from_json_str(remote: str, data: str) -> "FileItem | None":
|
125
|
+
try:
|
126
|
+
data_dict = json.loads(data)
|
127
|
+
return FileItem.from_json(remote, data_dict)
|
128
|
+
except json.JSONDecodeError:
|
129
|
+
warnings.warn(f"Invalid JSON data: {data}")
|
130
|
+
return None
|
131
|
+
|
132
|
+
# hasher for set membership
|
133
|
+
def __hash__(self) -> int:
|
134
|
+
return hash(self.path_no_remote)
|
135
|
+
|
136
|
+
|
137
|
+
class File:
|
138
|
+
"""Remote file dataclass."""
|
139
|
+
|
140
|
+
def __init__(
|
141
|
+
self,
|
142
|
+
path: RPath,
|
143
|
+
) -> None:
|
144
|
+
self.path = path
|
145
|
+
|
146
|
+
@property
|
147
|
+
def name(self) -> str:
|
148
|
+
return self.path.name
|
149
|
+
|
150
|
+
def mod_time(self) -> str:
|
151
|
+
return self.path.mod_time
|
152
|
+
|
153
|
+
def mod_time_dt(self) -> datetime:
|
154
|
+
"""Return the modification time as a datetime object."""
|
155
|
+
return self.path.mod_time_dt()
|
156
|
+
|
157
|
+
def read_text(self) -> str:
|
158
|
+
"""Read the file contents as bytes.
|
159
|
+
|
160
|
+
Returns:
|
161
|
+
bytes: The file contents
|
162
|
+
|
163
|
+
Raises:
|
164
|
+
RuntimeError: If no rclone instance is associated with this file
|
165
|
+
RuntimeError: If the path represents a directory
|
166
|
+
"""
|
167
|
+
if self.path.rclone is None:
|
168
|
+
raise RuntimeError("No rclone instance associated with this file")
|
169
|
+
if self.path.is_dir:
|
170
|
+
raise RuntimeError("Cannot read a directory as bytes")
|
171
|
+
|
172
|
+
result = self.path.rclone._run(["cat", self.path.path], check=True)
|
173
|
+
return result.stdout
|
174
|
+
|
175
|
+
def to_json(self) -> dict:
|
176
|
+
"""Convert the File to a JSON serializable dictionary."""
|
177
|
+
return self.path.to_json()
|
178
|
+
|
179
|
+
def to_string(self, include_remote: bool = True) -> str:
|
180
|
+
"""Convert the File to a string."""
|
181
|
+
# out = str(self.path)
|
182
|
+
remote = self.path.remote
|
183
|
+
rest = self.path.path
|
184
|
+
if include_remote:
|
185
|
+
return f"{remote.name}:{rest}"
|
186
|
+
return rest
|
187
|
+
|
188
|
+
def relative_to(self, prefix: str) -> str:
|
189
|
+
"""Return the relative path to the other directory."""
|
190
|
+
self_path = Path(str(self))
|
191
|
+
rel_path = self_path.relative_to(prefix)
|
192
|
+
return str(rel_path.as_posix())
|
193
|
+
|
194
|
+
@property
|
195
|
+
def size(self) -> int:
|
196
|
+
"""Get the size of the file."""
|
197
|
+
return self.path.size
|
198
|
+
|
199
|
+
def __str__(self) -> str:
|
200
|
+
return str(self.path)
|
201
|
+
|
202
|
+
def __repr__(self) -> str:
|
203
|
+
data = self.path.to_json()
|
204
|
+
data_str = json.dumps(data)
|
205
|
+
return data_str
|
rclone_api/file_item.py
ADDED
@@ -0,0 +1,68 @@
|
|
1
|
+
import json
|
2
|
+
import warnings
|
3
|
+
from dataclasses import dataclass
|
4
|
+
from pathlib import Path
|
5
|
+
|
6
|
+
_STRING_INTERNER: dict[str, str] = {}
|
7
|
+
|
8
|
+
|
9
|
+
def _intern(s: str) -> str:
|
10
|
+
return _STRING_INTERNER.setdefault(s, s)
|
11
|
+
|
12
|
+
|
13
|
+
# File is too complex, this is a simple dataclass that can be streamed out.
|
14
|
+
@dataclass
|
15
|
+
class FileItem:
|
16
|
+
"""Remote file dataclass."""
|
17
|
+
|
18
|
+
remote: str
|
19
|
+
parent: str
|
20
|
+
name: str
|
21
|
+
size: int
|
22
|
+
mime_type: str
|
23
|
+
mod_time: str
|
24
|
+
|
25
|
+
@property
|
26
|
+
def path(self) -> str:
|
27
|
+
return f"{self.remote}/{self.parent}/{self.name}"
|
28
|
+
|
29
|
+
@property
|
30
|
+
def suffix(self) -> str:
|
31
|
+
return self._suffix
|
32
|
+
|
33
|
+
def __post_init__(self):
|
34
|
+
self.parent = _intern(self.parent)
|
35
|
+
self.mime_type = _intern(self.mime_type)
|
36
|
+
suffix = Path(self.name).suffix
|
37
|
+
self._suffix = _intern(suffix)
|
38
|
+
|
39
|
+
@staticmethod
|
40
|
+
def from_json(data: dict) -> "FileItem | None":
|
41
|
+
try:
|
42
|
+
path_str: str = data["Path"]
|
43
|
+
parent_path = Path(path_str).parent.as_posix()
|
44
|
+
name = data["Name"]
|
45
|
+
size = data["Size"]
|
46
|
+
mime_type = data["MimeType"]
|
47
|
+
mod_time = data["ModTime"]
|
48
|
+
|
49
|
+
return FileItem(
|
50
|
+
remote="DUMMY",
|
51
|
+
parent=parent_path,
|
52
|
+
name=name,
|
53
|
+
size=size,
|
54
|
+
mime_type=mime_type,
|
55
|
+
mod_time=mod_time,
|
56
|
+
)
|
57
|
+
except KeyError:
|
58
|
+
warnings.warn(f"Invalid data: {data}")
|
59
|
+
return None
|
60
|
+
|
61
|
+
@staticmethod
|
62
|
+
def from_json_str(data: str) -> "FileItem | None":
|
63
|
+
try:
|
64
|
+
data_dict = json.loads(data)
|
65
|
+
return FileItem.from_json(data_dict)
|
66
|
+
except json.JSONDecodeError:
|
67
|
+
warnings.warn(f"Invalid JSON data: {data}")
|
68
|
+
return None
|