rclone-api 1.3.19__py2.py3-none-any.whl → 1.3.20__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
rclone_api/__init__.py CHANGED
@@ -9,6 +9,7 @@ from .dir import Dir
9
9
  from .dir_listing import DirListing
10
10
  from .file import File, FileItem
11
11
  from .filelist import FileList
12
+ from .http_server import HttpFetcher, HttpServer, Range
12
13
 
13
14
  # Import the configure_logging function to make it available at package level
14
15
  from .log import configure_logging, setup_default_logging
@@ -45,6 +46,9 @@ __all__ = [
45
46
  "SizeSuffix",
46
47
  "configure_logging",
47
48
  "log",
49
+ "HttpServer",
50
+ "Range",
51
+ "HttpFetcher",
48
52
  ]
49
53
 
50
54
  setup_default_logging()
@@ -0,0 +1,233 @@
1
+ """
2
+ Unit test file for testing rclone mount functionality.
3
+ """
4
+
5
+ import tempfile
6
+ import warnings
7
+ from concurrent.futures import Future, ThreadPoolExecutor
8
+ from dataclasses import dataclass
9
+ from pathlib import Path
10
+ from typing import Any
11
+
12
+ import httpx
13
+
14
+ from rclone_api.process import Process
15
+ from rclone_api.types import FilePart, SizeSuffix, get_chunk_tmpdir
16
+
17
+ _TIMEOUT = 10 * 60 # 10 minutes
18
+
19
+
20
+ @dataclass
21
+ class Range:
22
+ start: int
23
+ end: int
24
+
25
+ def to_header(self) -> dict[str, str]:
26
+ val = f"bytes={self.start}-{self.end-1}"
27
+ return {
28
+ "Range": val,
29
+ }
30
+
31
+
32
+ _range = range
33
+
34
+
35
+ class HttpServer:
36
+ """HTTP server configuration."""
37
+
38
+ def __init__(self, url: str, subpath: str, process: Process) -> None:
39
+ self.url = url
40
+ self.subpath = subpath
41
+ self.process: Process | None = process
42
+
43
+ def _get_file_url(self, path: str | Path) -> str:
44
+ # if self.subpath == "":
45
+ path = Path(path).as_posix()
46
+ return f"{self.url}/{path}"
47
+ # return f"{self.url}/{self.subpath}/{path}"
48
+
49
+ def get_fetcher(self, path: str, n_threads: int = 16) -> "HttpFetcher":
50
+ return HttpFetcher(self, path, n_threads=n_threads)
51
+
52
+ def get(self, path: str) -> bytes | Exception:
53
+ """Get bytes from the server."""
54
+ with tempfile.TemporaryFile() as file:
55
+ self.download(path, Path(file.name), None)
56
+ file.seek(0)
57
+ return file.read()
58
+
59
+ def size(self, path: str) -> int | Exception:
60
+ """Get size of the file from the server."""
61
+ try:
62
+ assert self.process is not None
63
+ # response = httpx.head(f"{self.url}/{path}")
64
+ url = self._get_file_url(path)
65
+ response = httpx.head(url)
66
+ response.raise_for_status()
67
+ size = int(response.headers["Content-Length"])
68
+ return size
69
+ except Exception as e:
70
+ warnings.warn(f"Failed to get size of {self.url}/{path}: {e}")
71
+ return e
72
+
73
+ def download(
74
+ self, path: str, dst: Path, range: Range | None = None
75
+ ) -> Path | Exception:
76
+ """Get bytes from the server."""
77
+ if not dst.parent.exists():
78
+ dst.parent.mkdir(parents=True, exist_ok=True)
79
+ headers: dict[str, str] = {}
80
+ if range:
81
+ headers.update(range.to_header())
82
+ url = self._get_file_url(path)
83
+ try:
84
+ with httpx.stream(
85
+ "GET", url, headers=headers, timeout=_TIMEOUT
86
+ ) as response:
87
+ response.raise_for_status()
88
+ with open(dst, "wb") as file:
89
+ for chunk in response.iter_bytes(chunk_size=8192):
90
+ if chunk:
91
+ file.write(chunk)
92
+ else:
93
+ assert response.is_closed
94
+ # print(f"Downloaded bytes {start}-{end} to {dst}")
95
+ if range:
96
+ print(f"Downloaded bytes {range.start}-{range.end} to {dst}")
97
+ else:
98
+ size = dst.stat().st_size
99
+ print(f"Downloaded {size} bytes to {dst}")
100
+ return dst
101
+ except Exception as e:
102
+ warnings.warn(f"Failed to download {url} to {dst}: {e}")
103
+ return e
104
+
105
+ def download_multi_threaded(
106
+ self,
107
+ src_path: str,
108
+ dst_path: Path,
109
+ chunk_size: int = 32 * 1024 * 1024,
110
+ n_threads: int = 16,
111
+ range: Range | None = None,
112
+ ) -> Path | Exception:
113
+ """Copy file from src to dst."""
114
+
115
+ finished: list[Path] = []
116
+ errors: list[Exception] = []
117
+
118
+ if range is None:
119
+ sz = self.size(src_path)
120
+ if isinstance(sz, Exception):
121
+ return sz
122
+ range = Range(0, sz)
123
+
124
+ with ThreadPoolExecutor(max_workers=n_threads) as executor:
125
+ try:
126
+ futures: list[Future[Path | Exception]] = []
127
+ for start in _range(range.start, range.end, chunk_size):
128
+ end = min(start + chunk_size, range.end)
129
+ r = Range(start=start, end=end)
130
+
131
+ def task(r: Range = r) -> Path | Exception:
132
+ dst = dst_path.with_suffix(f".{r.start}")
133
+ out = self.download(src_path, dst, r)
134
+ if isinstance(out, Exception):
135
+ warnings.warn(f"Failed to download chunked: {out}")
136
+ return out
137
+
138
+ fut = executor.submit(task, r)
139
+ futures.append(fut)
140
+ for fut in futures:
141
+ result = fut.result()
142
+ if isinstance(result, Exception):
143
+ errors.append(result)
144
+ else:
145
+ finished.append(result)
146
+ if errors:
147
+ for finished_file in finished:
148
+ try:
149
+ finished_file.unlink()
150
+ except Exception as e:
151
+ warnings.warn(f"Failed to delete file {finished_file}: {e}")
152
+ return Exception(f"Failed to download chunked: {errors}")
153
+
154
+ if not dst_path.parent.exists():
155
+ dst_path.parent.mkdir(parents=True, exist_ok=True)
156
+
157
+ count = 0
158
+ with open(dst_path, "wb") as file:
159
+ for f in finished:
160
+ print(f"Appending {f} to {dst_path}")
161
+ with open(f, "rb") as part:
162
+ # chunk = part.read(8192 * 4)
163
+ while chunk := part.read(8192 * 4):
164
+ if not chunk:
165
+ break
166
+ count += len(chunk)
167
+ file.write(chunk)
168
+ print(f"Removing {f}")
169
+ f.unlink()
170
+ # print(f"Downloaded {count} bytes to {dst_path}")
171
+ return dst_path
172
+ except Exception as e:
173
+ warnings.warn(f"Failed to copy chunked: {e}")
174
+ for f in finished:
175
+ try:
176
+ if f.exists():
177
+ f.unlink()
178
+ except Exception as ee:
179
+ warnings.warn(f"Failed to delete file {f}: {ee}")
180
+ return e
181
+
182
+ def __enter__(self) -> "HttpServer":
183
+ return self
184
+
185
+ def __exit__(self, exc_type, exc_value, traceback) -> None:
186
+ self.shutdown()
187
+
188
+ def shutdown(self) -> None:
189
+ """Shutdown the server."""
190
+ if self.process:
191
+ self.process.terminate()
192
+ if self.process.stdout:
193
+ self.process.stdout.close()
194
+ if self.process.stderr:
195
+ self.process.stderr.close()
196
+
197
+
198
+ class HttpFetcher:
199
+ def __init__(self, server: "HttpServer", path: str, n_threads: int) -> None:
200
+ self.server = server
201
+ self.path = path
202
+ self.executor = ThreadPoolExecutor(max_workers=n_threads)
203
+ from threading import Semaphore
204
+
205
+ self.semaphore = Semaphore(n_threads)
206
+
207
+ def fetch(
208
+ self, offset: int | SizeSuffix, size: int | SizeSuffix, extra: Any
209
+ ) -> Future[FilePart]:
210
+ if isinstance(offset, SizeSuffix):
211
+ offset = offset.as_int()
212
+ if isinstance(size, SizeSuffix):
213
+ size = size.as_int()
214
+
215
+ def task() -> FilePart:
216
+ from rclone_api.util import random_str
217
+
218
+ try:
219
+ range = Range(offset, offset + size)
220
+ dst = get_chunk_tmpdir() / f"{random_str(12)}.chunk"
221
+ out = self.server.download(self.path, dst, range)
222
+ if isinstance(out, Exception):
223
+ raise out
224
+ return FilePart(payload=dst, extra=extra)
225
+ finally:
226
+ self.semaphore.release()
227
+
228
+ self.semaphore.acquire()
229
+ fut = self.executor.submit(task)
230
+ return fut
231
+
232
+ def shutdown(self) -> None:
233
+ self.executor.shutdown(wait=True)
@@ -142,7 +142,7 @@ def _run_profile(
142
142
  filepart_or_err = future.result()
143
143
  if isinstance(filepart_or_err, Exception):
144
144
  assert False, f"Error: {filepart_or_err}"
145
- filepart_or_err.close()
145
+ filepart_or_err.dispose()
146
146
  futures.clear()
147
147
 
148
148
  start = time.time()
rclone_api/rclone.py CHANGED
@@ -11,10 +11,11 @@ import traceback
11
11
  import warnings
12
12
  from concurrent.futures import Future, ThreadPoolExecutor
13
13
  from contextlib import contextmanager
14
+ from dataclasses import dataclass
14
15
  from fnmatch import fnmatch
15
16
  from pathlib import Path
16
17
  from tempfile import TemporaryDirectory
17
- from typing import Generator
18
+ from typing import Any, Callable, Generator
18
19
 
19
20
  from rclone_api import Dir
20
21
  from rclone_api.completed_process import CompletedProcess
@@ -26,6 +27,7 @@ from rclone_api.dir_listing import DirListing
26
27
  from rclone_api.exec import RcloneExec
27
28
  from rclone_api.file import File, FileItem
28
29
  from rclone_api.group_files import group_files
30
+ from rclone_api.http_server import HttpServer
29
31
  from rclone_api.mount import Mount, clean_mount, prepare_mount
30
32
  from rclone_api.mount_read_chunker import MultiMountFileChunker
31
33
  from rclone_api.process import Process
@@ -38,6 +40,7 @@ from rclone_api.s3.types import (
38
40
  S3UploadTarget,
39
41
  )
40
42
  from rclone_api.types import (
43
+ FilePart,
41
44
  ListingOption,
42
45
  ModTimeStrategy,
43
46
  Order,
@@ -242,7 +245,7 @@ class Rclone:
242
245
 
243
246
  def ls(
244
247
  self,
245
- path: Dir | Remote | str,
248
+ path: Dir | Remote | str | None = None,
246
249
  max_depth: int | None = None,
247
250
  glob: str | None = None,
248
251
  order: Order = Order.NORMAL,
@@ -258,6 +261,15 @@ class Rclone:
258
261
  List of File objects found at the path
259
262
  """
260
263
 
264
+ if path is None:
265
+ # list remotes instead
266
+ list_remotes: list[Remote] = self.listremotes()
267
+ dirs: list[Dir] = [Dir(remote) for remote in list_remotes]
268
+ for d in dirs:
269
+ d.path.path = ""
270
+ rpaths = [d.path for d in dirs]
271
+ return DirListing(rpaths)
272
+
261
273
  if isinstance(path, str):
262
274
  path = Dir(
263
275
  to_path(path, self)
@@ -786,14 +798,33 @@ class Rclone:
786
798
  verbose: bool | None = None,
787
799
  max_chunks_before_suspension: int | None = None,
788
800
  mount_log: Path | None = None,
801
+ use_http_fetcher: bool = True, # else use mount fetcher
789
802
  ) -> MultiUploadResult:
790
803
  """For massive files that rclone can't handle in one go, this function will copy the file in chunks to an S3 store"""
804
+ from rclone_api.http_server import HttpFetcher, HttpServer
791
805
  from rclone_api.s3.api import S3Client
792
806
  from rclone_api.s3.create import S3Credentials
793
807
  from rclone_api.util import S3PathInfo, split_s3_path
794
808
 
795
- other_args: list[str] = ["--no-modtime", "--vfs-read-wait", "1s"]
809
+ src_path = Path(src)
810
+ name = src_path.name
811
+ src_parent_path = Path(src).parent.as_posix()
812
+
813
+ size_result: SizeResult = self.size_files(src_parent_path, [name])
814
+ target_size = SizeSuffix(size_result.total_size)
815
+
796
816
  chunk_size = chunk_size or SizeSuffix("64M")
817
+ MAX_CHUNKS = 10000
818
+ min_chunk_size = size_result.total_size // (MAX_CHUNKS - 1)
819
+ if min_chunk_size > chunk_size:
820
+ warnings.warn(
821
+ f"Chunk size {chunk_size} is too small for file size {size_result.total_size}, setting to {min_chunk_size}"
822
+ )
823
+ chunk_size = SizeSuffix(min_chunk_size)
824
+
825
+ other_args: list[str] = ["--no-modtime", "--vfs-read-wait", "1s"]
826
+
827
+ # BEGIN MOUNT SPECIFIC CONFIG
797
828
  unit_chunk_size = chunk_size / read_threads
798
829
  tmp_mount_dir = self._get_tmp_mount_dir()
799
830
  vfs_read_chunk_size = unit_chunk_size
@@ -818,18 +849,16 @@ class Rclone:
818
849
  # --vfs-cache-max-size
819
850
  other_args += ["--vfs-cache-max-size", vfs_disk_space_total_size.as_str()]
820
851
  mount_path = tmp_mount_dir / "RCLONE_API_DYNAMIC_MOUNT"
821
- src_path = Path(src)
822
- name = src_path.name
852
+ ## END MOUNT SPECIFIC CONFIG
823
853
 
824
- src_parent_path = Path(src).parent.as_posix()
825
- size_result: SizeResult = self.size_files(src_parent_path, [name])
854
+ # size_result: SizeResult = self.size_files(os.path.dirname(src), [name])
826
855
 
827
- target_size = SizeSuffix(size_result.total_size)
828
856
  if target_size < SizeSuffix("5M"):
829
857
  # fallback to normal copy
830
858
  completed_proc = self.copy_to(src, dst, check=True)
831
859
  if completed_proc.ok:
832
860
  return MultiUploadResult.UPLOADED_FRESH
861
+
833
862
  if size_result.total_size <= 0:
834
863
  raise ValueError(
835
864
  f"File {src} has size {size_result.total_size}, is this a directory?"
@@ -882,18 +911,46 @@ class Rclone:
882
911
  endpoint_url=section.endpoint(),
883
912
  )
884
913
 
885
- chunk_fetcher: MultiMountFileChunker = self.get_multi_mount_file_chunker(
886
- src=src_path.as_posix(),
887
- chunk_size=chunk_size,
888
- threads=read_threads,
889
- mount_log=mount_log,
890
- direct_io=True,
891
- )
914
+ @dataclass
915
+ class Fetcher:
916
+ fetch: Callable[[int, int, Any], Future[FilePart]]
917
+ shutdown: Callable[[], None]
918
+
919
+ def get_fetcher() -> Fetcher:
920
+ if use_http_fetcher:
921
+ import random
892
922
 
923
+ port = random.randint(10000, 20000)
924
+ http_server: HttpServer = self.serve_http(
925
+ src=src_path.parent.as_posix(), addr=f"localhost:{port}"
926
+ )
927
+ chunk_fetcher: HttpFetcher = http_server.get_fetcher(
928
+ path=src_path.name,
929
+ n_threads=read_threads,
930
+ )
931
+ # return chunk_fetcher.fetch
932
+ return Fetcher(fetch=chunk_fetcher.fetch, shutdown=http_server.shutdown)
933
+ else:
934
+ # Use the mount fetcher, which relies on FUSE which has problems in Docker/Windows/MacOS
935
+ mount_fetcher: MultiMountFileChunker = (
936
+ self.get_multi_mount_file_chunker(
937
+ src=src_path.as_posix(),
938
+ chunk_size=chunk_size,
939
+ threads=read_threads,
940
+ mount_log=mount_log,
941
+ direct_io=True,
942
+ )
943
+ )
944
+ # return chunk_fetcher.fetch
945
+ return Fetcher(
946
+ fetch=mount_fetcher.fetch, shutdown=mount_fetcher.shutdown
947
+ )
948
+
949
+ fetcher = get_fetcher()
893
950
  client = S3Client(s3_creds)
894
951
  upload_config: S3MutliPartUploadConfig = S3MutliPartUploadConfig(
895
952
  chunk_size=chunk_size.as_int(),
896
- chunk_fetcher=chunk_fetcher.fetch,
953
+ chunk_fetcher=fetcher.fetch,
897
954
  max_write_threads=write_threads,
898
955
  retries=retries,
899
956
  resume_path_json=save_state_json,
@@ -927,7 +984,8 @@ class Rclone:
927
984
  traceback.print_exc()
928
985
  raise
929
986
  finally:
930
- chunk_fetcher.shutdown()
987
+ fetcher.shutdown()
988
+ fetcher.shutdown()
931
989
 
932
990
  def get_multi_mount_file_chunker(
933
991
  self,
@@ -1095,7 +1153,7 @@ class Rclone:
1095
1153
  shutil.move(payload, outfile)
1096
1154
  return bytes(0)
1097
1155
  finally:
1098
- fp.close()
1156
+ fp.dispose()
1099
1157
 
1100
1158
  except Exception as e:
1101
1159
  warnings.warn(f"Error copying bytes: {e}")
@@ -1334,6 +1392,31 @@ class Rclone:
1334
1392
  raise ValueError("NFS serve process failed to start")
1335
1393
  return proc
1336
1394
 
1395
+ def serve_http(
1396
+ self,
1397
+ src: str,
1398
+ addr: str = "localhost:8080",
1399
+ other_args: list[str] | None = None,
1400
+ ) -> HttpServer:
1401
+ """Serve a remote or directory via HTTP.
1402
+
1403
+ Args:
1404
+ src: Remote or directory to serve
1405
+ addr: Network address and port to serve on (default: localhost:8080)
1406
+ """
1407
+ _, subpath = src.split(":", 1) # might not work on local paths.
1408
+ cmd_list: list[str] = ["serve", "http", "--addr", addr, src]
1409
+ if other_args:
1410
+ cmd_list += other_args
1411
+ proc = self._launch_process(cmd_list)
1412
+ time.sleep(2)
1413
+ if proc.poll() is not None:
1414
+ raise ValueError("HTTP serve process failed to start")
1415
+ out: HttpServer = HttpServer(
1416
+ url=f"http://{addr}", subpath=subpath, process=proc
1417
+ )
1418
+ return out
1419
+
1337
1420
  def size_files(
1338
1421
  self,
1339
1422
  src: str,
rclone_api/remote.py CHANGED
@@ -16,3 +16,6 @@ class Remote:
16
16
 
17
17
  def __str__(self) -> str:
18
18
  return f"{self.name}:"
19
+
20
+ def __repr__(self) -> str:
21
+ return f"Remote({self.name!r})"
@@ -25,31 +25,39 @@ _MIN_UPLOAD_CHUNK_SIZE = 5 * 1024 * 1024 # 5MB
25
25
 
26
26
 
27
27
  def upload_task(
28
- info: UploadInfo, chunk: bytes, part_number: int, retries: int
28
+ info: UploadInfo, chunk: FilePart, part_number: int, retries: int
29
29
  ) -> FinishedPiece:
30
- assert len(chunk) > 0
30
+ file_or_err: Path | Exception = chunk.get_file()
31
+ if isinstance(file_or_err, Exception):
32
+ raise file_or_err
33
+ file: Path = file_or_err
34
+ size = os.path.getsize(file)
31
35
  retries = retries + 1 # Add one for the initial attempt
32
36
  for retry in range(retries):
33
37
  try:
34
38
  if retry > 0:
35
39
  locked_print(f"Retrying part {part_number} for {info.src_file_path}")
36
40
  locked_print(
37
- f"Uploading part {part_number} for {info.src_file_path} of size {len(chunk)}"
38
- )
39
- part = info.s3_client.upload_part(
40
- Bucket=info.bucket_name,
41
- Key=info.object_name,
42
- PartNumber=part_number,
43
- UploadId=info.upload_id,
44
- Body=chunk,
45
- )
46
- out: FinishedPiece = FinishedPiece(
47
- etag=part["ETag"], part_number=part_number
41
+ f"Uploading part {part_number} for {info.src_file_path} of size {size}"
48
42
  )
43
+
44
+ with open(file, "rb") as f:
45
+ part = info.s3_client.upload_part(
46
+ Bucket=info.bucket_name,
47
+ Key=info.object_name,
48
+ PartNumber=part_number,
49
+ UploadId=info.upload_id,
50
+ Body=f,
51
+ )
52
+ out: FinishedPiece = FinishedPiece(
53
+ etag=part["ETag"], part_number=part_number
54
+ )
55
+ chunk.dispose()
49
56
  return out
50
57
  except Exception as e:
51
58
  if retry == retries - 1:
52
59
  locked_print(f"Error uploading part {part_number}: {e}")
60
+ chunk.dispose()
53
61
  raise e
54
62
  else:
55
63
  locked_print(f"Error uploading part {part_number}: {e}, retrying")
@@ -72,7 +80,7 @@ def handle_upload(
72
80
 
73
81
  part: FinishedPiece = upload_task(
74
82
  info=upload_info,
75
- chunk=fp.load(),
83
+ chunk=fp,
76
84
  part_number=part_number,
77
85
  retries=upload_info.retries,
78
86
  )
@@ -83,7 +91,7 @@ def handle_upload(
83
91
  warnings.warn(msg)
84
92
  return e
85
93
  finally:
86
- fp.close()
94
+ fp.dispose()
87
95
 
88
96
 
89
97
  def prepare_upload_file_multipart(
rclone_api/types.py CHANGED
@@ -289,7 +289,7 @@ atexit.register(_on_exit_cleanup)
289
289
 
290
290
 
291
291
  class FilePart:
292
- def __init__(self, payload: bytes | Exception, extra: Any) -> None:
292
+ def __init__(self, payload: Path | bytes | Exception, extra: Any) -> None:
293
293
  from rclone_api.util import random_str
294
294
 
295
295
  self.extra = extra
@@ -298,12 +298,18 @@ class FilePart:
298
298
  if isinstance(payload, Exception):
299
299
  self.payload = payload
300
300
  return
301
- self.payload = get_chunk_tmpdir() / f"{random_str(12)}.chunk"
302
- with _TMP_DIR_ACCESS_LOCK:
303
- if not self.payload.parent.exists():
304
- self.payload.parent.mkdir(parents=True, exist_ok=True)
305
- self.payload.write_bytes(payload)
306
- _add_for_cleanup(self.payload)
301
+ if isinstance(payload, bytes):
302
+ self.payload = get_chunk_tmpdir() / f"{random_str(12)}.chunk"
303
+ with _TMP_DIR_ACCESS_LOCK:
304
+ if not self.payload.parent.exists():
305
+ self.payload.parent.mkdir(parents=True, exist_ok=True)
306
+ self.payload.write_bytes(payload)
307
+ _add_for_cleanup(self.payload)
308
+ if isinstance(payload, Path):
309
+ self.payload = payload
310
+
311
+ def get_file(self) -> Path | Exception:
312
+ return self.payload
307
313
 
308
314
  @property
309
315
  def size(self) -> int:
@@ -337,7 +343,7 @@ class FilePart:
337
343
  def is_error(self) -> bool:
338
344
  return isinstance(self.payload, Exception)
339
345
 
340
- def close(self) -> None:
346
+ def dispose(self) -> None:
341
347
  with self._lock:
342
348
  if isinstance(self.payload, Exception):
343
349
  warnings.warn(
@@ -352,4 +358,4 @@ class FilePart:
352
358
  warnings.warn(f"Cannot close file part because of error: {e}")
353
359
 
354
360
  def __del__(self):
355
- self.close()
361
+ self.dispose()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: rclone_api
3
- Version: 1.3.19
3
+ Version: 1.3.20
4
4
  Summary: rclone api in python
5
5
  Home-page: https://github.com/zackees/rclone-api
6
6
  License: BSD 3-Clause License
@@ -1,4 +1,4 @@
1
- rclone_api/__init__.py,sha256=bJ6x-7ySj1kC7xjQJqEEA-0cr46RUh_tvIZsebGcyu4,1224
1
+ rclone_api/__init__.py,sha256=gOQJgOs0_oaV_pOwlY00LXRYAHk1_MDwN59od1VpoC0,1334
2
2
  rclone_api/cli.py,sha256=dibfAZIh0kXWsBbfp3onKLjyZXo54mTzDjUdzJlDlWo,231
3
3
  rclone_api/completed_process.py,sha256=_IZ8IWK7DM1_tsbDEkH6wPZ-bbcrgf7A7smls854pmg,1775
4
4
  rclone_api/config.py,sha256=f6jEAxVorGFr31oHfcsu5AJTtOJj2wR5tTSsbGGZuIw,2558
@@ -12,15 +12,16 @@ rclone_api/file.py,sha256=cz-7_nJArkVdJ9z2QaC_XZYpihXe3IPBC90Z5_3g2aw,5419
12
12
  rclone_api/file_item.py,sha256=cH-AQYsxedhNPp4c8NHY1ad4Z7St4yf_VGbmiGD59no,1770
13
13
  rclone_api/filelist.py,sha256=xbiusvNgaB_b_kQOZoHMJJxn6TWGtPrWd2J042BI28o,767
14
14
  rclone_api/group_files.py,sha256=H92xPW9lQnbNw5KbtZCl00bD6iRh9yRbCuxku4j_3dg,8036
15
+ rclone_api/http_server.py,sha256=SmeUDDKaMpJGDqRNkHoImHTRNkvHEtGFzu_8jYfoeZU,8113
15
16
  rclone_api/log.py,sha256=VZHM7pNSXip2ZLBKMP7M1u-rp_F7zoafFDuR8CPUoKI,1271
16
17
  rclone_api/mount.py,sha256=TE_VIBMW7J1UkF_6HRCt8oi_jGdMov4S51bm2OgxFAM,10045
17
18
  rclone_api/mount_read_chunker.py,sha256=7jaF1Rsjr-kXIZW--Ol1QuG7WArBgdIcpQ0AJMYn7bI,4764
18
19
  rclone_api/process.py,sha256=BGXJTZVT__jeaDyjN8_kRycliOhkBErMPdHO1hKRvJE,5271
19
- rclone_api/rclone.py,sha256=ogWjSt--Ph2dpeq31mWsBRBvKhpf1EF0jJD2HxgQ8T0,50903
20
- rclone_api/remote.py,sha256=O9WDUFQy9f6oT1HdUbTixK2eg0xtBBm8k4Xl6aa6K00,431
20
+ rclone_api/rclone.py,sha256=lLUHeG11brzhODCTlOhjy6rqcJeC_LhQBGTBcRTkaw8,54099
21
+ rclone_api/remote.py,sha256=jq3dPbAGvYZFW5cTviqxT2w6_jG2LLfS1RIcYSmMsQQ,503
21
22
  rclone_api/rpath.py,sha256=8ZA_1wxWtskwcy0I8V2VbjKDmzPkiWd8Q2JQSvh-sYE,2586
22
23
  rclone_api/scan_missing_folders.py,sha256=Kulca2Q6WZodt00ATFHkmqqInuoPvBkhTcS9703y6po,4740
23
- rclone_api/types.py,sha256=OYO14H6Xf6u2nHbrzfoMbGbU7LRPhALaRA9FEClf8OI,10388
24
+ rclone_api/types.py,sha256=aj3usJrIDKC4MKuObBo9WjlR1isyJColdOQWXo3Repo,10608
24
25
  rclone_api/util.py,sha256=F9Q3zbWRsgPF4NG6OWB63cZ7GVq82lsraP47gmmDohU,5416
25
26
  rclone_api/walk.py,sha256=-54NVE8EJcCstwDoaC_UtHm73R2HrZwVwQmsnv55xNU,3369
26
27
  rclone_api/assets/example.txt,sha256=lTBovRjiz0_TgtAtbA1C5hNi2ffbqnNPqkKg6UiKCT8,54
@@ -33,17 +34,17 @@ rclone_api/db/db.py,sha256=ZpYfeCUe8MKg_fdJucRSe6-fwGY_rWqUn7WkHCNFH_4,10074
33
34
  rclone_api/db/models.py,sha256=v7qaXUehvsDvU51uk69JI23fSIs9JFGcOa-Tv1c_wVs,1600
34
35
  rclone_api/experimental/flags.py,sha256=qCVD--fSTmzlk9hloRLr0q9elzAOFzPsvVpKM3aB1Mk,2739
35
36
  rclone_api/experimental/flags_base.py,sha256=ajU_czkTcAxXYU-SlmiCfHY7aCQGHvpCLqJ-Z8uZLk0,2102
36
- rclone_api/profile/mount_copy_bytes.py,sha256=nZtqMukLhSzHq64Pn1I8pXwjoraqWjCKey3WLAeubx0,9069
37
+ rclone_api/profile/mount_copy_bytes.py,sha256=M1vZn-Mrga14Ik7MHGZHbnwYli41Ep6Tyll7hQc7Wmo,9071
37
38
  rclone_api/s3/api.py,sha256=PafsIEyWDpLWAXsZAjFm9CY14vJpsDr9lOsn0kGRLZ0,4009
38
39
  rclone_api/s3/basic_ops.py,sha256=hK3366xhVEzEcjz9Gk_8lFx6MRceAk72cax6mUrr6ko,2104
39
40
  rclone_api/s3/chunk_task.py,sha256=kA6_5fLNdtT3QdTFrfBY6y8sH9Og8nM2mrjgAz_g1Rc,7196
40
41
  rclone_api/s3/chunk_types.py,sha256=oSWv8No9V3BeM7IcGnowyR2a7YrszdAXzEJlxaeZcp0,8852
41
42
  rclone_api/s3/create.py,sha256=wgfkapv_j904CfKuWyiBIWJVxfAx_ftemFSUV14aT68,3149
42
43
  rclone_api/s3/types.py,sha256=Elmh__gvZJyJyElYwMmvYZIBIunDJiTRAbEg21GmsRU,1604
43
- rclone_api/s3/upload_file_multipart.py,sha256=eVjaRoE0xfoOFmiYRuwz3tVfT2TvJl2lSYSAut12fMg,11765
44
- rclone_api-1.3.19.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
45
- rclone_api-1.3.19.dist-info/METADATA,sha256=fN9Od_VwYlO6TkvAdThtH-GdpyFUlkhXTpOGU0n-Dpk,4598
46
- rclone_api-1.3.19.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
47
- rclone_api-1.3.19.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
48
- rclone_api-1.3.19.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
49
- rclone_api-1.3.19.dist-info/RECORD,,
44
+ rclone_api/s3/upload_file_multipart.py,sha256=UlrUl8fB0oK8_r0w8ZwH79jlOCHQrMOKWZeNCmHrT7M,12052
45
+ rclone_api-1.3.20.dist-info/LICENSE,sha256=b6pOoifSXiUaz_lDS84vWlG3fr4yUKwB8fzkrH9R8bQ,1064
46
+ rclone_api-1.3.20.dist-info/METADATA,sha256=RFYBoioCW6Gs8lnUrpIJKl02vRFXTYdOD3YCIt7zJXw,4598
47
+ rclone_api-1.3.20.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
48
+ rclone_api-1.3.20.dist-info/entry_points.txt,sha256=fJteOlYVwgX3UbNuL9jJ0zUTuX2O79JFAeNgK7Sw7EQ,255
49
+ rclone_api-1.3.20.dist-info/top_level.txt,sha256=EvZ7uuruUpe9RiUyEp25d1Keq7PWYNT0O_-mr8FCG5g,11
50
+ rclone_api-1.3.20.dist-info/RECORD,,