rclone-api 1.0.88__tar.gz → 1.0.89__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. {rclone_api-1.0.88 → rclone_api-1.0.89}/.gitignore +5 -0
  2. {rclone_api-1.0.88 → rclone_api-1.0.89}/PKG-INFO +2 -3
  3. {rclone_api-1.0.88 → rclone_api-1.0.89}/pyproject.toml +7 -1
  4. {rclone_api-1.0.88 → rclone_api-1.0.89}/setup.py +0 -1
  5. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/__init__.py +5 -1
  6. rclone_api-1.0.89/src/rclone_api/config.py +75 -0
  7. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/group_files.py +4 -1
  8. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/rclone.py +225 -49
  9. rclone_api-1.0.89/src/rclone_api/s3/api.py +72 -0
  10. rclone_api-1.0.89/src/rclone_api/s3/basic_ops.py +61 -0
  11. rclone_api-1.0.89/src/rclone_api/s3/chunk_uploader.py +538 -0
  12. rclone_api-1.0.89/src/rclone_api/s3/create.py +71 -0
  13. rclone_api-1.0.89/src/rclone_api/s3/types.py +55 -0
  14. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/types.py +5 -3
  15. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/util.py +30 -4
  16. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api.egg-info/PKG-INFO +2 -3
  17. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api.egg-info/SOURCES.txt +10 -2
  18. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api.egg-info/requires.txt +1 -0
  19. rclone_api-1.0.89/tests/archive/test_paramiko.py.disabled +326 -0
  20. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_diff.py +2 -2
  21. rclone_api-1.0.89/tests/test_mounted_ranged_download.py +151 -0
  22. rclone_api-1.0.89/tests/test_rclone_config.py +70 -0
  23. rclone_api-1.0.89/tests/test_s3.py +113 -0
  24. rclone_api-1.0.88/src/rclone_api/config.py +0 -8
  25. rclone_api-1.0.88/tests/test_serve_webdav.py +0 -108
  26. {rclone_api-1.0.88 → rclone_api-1.0.89}/.aiderignore +0 -0
  27. {rclone_api-1.0.88 → rclone_api-1.0.89}/.github/workflows/lint.yml +0 -0
  28. {rclone_api-1.0.88 → rclone_api-1.0.89}/.github/workflows/push_macos.yml +0 -0
  29. {rclone_api-1.0.88 → rclone_api-1.0.89}/.github/workflows/push_ubuntu.yml +0 -0
  30. {rclone_api-1.0.88 → rclone_api-1.0.89}/.github/workflows/push_win.yml +0 -0
  31. {rclone_api-1.0.88 → rclone_api-1.0.89}/.pylintrc +0 -0
  32. {rclone_api-1.0.88 → rclone_api-1.0.89}/.vscode/launch.json +0 -0
  33. {rclone_api-1.0.88 → rclone_api-1.0.89}/.vscode/settings.json +0 -0
  34. {rclone_api-1.0.88 → rclone_api-1.0.89}/.vscode/tasks.json +0 -0
  35. {rclone_api-1.0.88 → rclone_api-1.0.89}/LICENSE +0 -0
  36. {rclone_api-1.0.88 → rclone_api-1.0.89}/MANIFEST.in +0 -0
  37. {rclone_api-1.0.88 → rclone_api-1.0.89}/README.md +0 -0
  38. {rclone_api-1.0.88 → rclone_api-1.0.89}/clean +0 -0
  39. {rclone_api-1.0.88 → rclone_api-1.0.89}/install +0 -0
  40. {rclone_api-1.0.88 → rclone_api-1.0.89}/lint +0 -0
  41. {rclone_api-1.0.88 → rclone_api-1.0.89}/requirements.testing.txt +0 -0
  42. {rclone_api-1.0.88 → rclone_api-1.0.89}/setup.cfg +0 -0
  43. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/assets/example.txt +0 -0
  44. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/cli.py +0 -0
  45. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/cmd/list_files.py +0 -0
  46. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/completed_process.py +0 -0
  47. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/convert.py +0 -0
  48. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/deprecated.py +0 -0
  49. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/diff.py +0 -0
  50. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/dir.py +0 -0
  51. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/dir_listing.py +0 -0
  52. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/exec.py +0 -0
  53. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/file.py +0 -0
  54. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/filelist.py +0 -0
  55. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/process.py +0 -0
  56. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/remote.py +0 -0
  57. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/rpath.py +0 -0
  58. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/scan_missing_folders.py +0 -0
  59. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api/walk.py +0 -0
  60. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api.egg-info/dependency_links.txt +0 -0
  61. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api.egg-info/entry_points.txt +0 -0
  62. {rclone_api-1.0.88 → rclone_api-1.0.89}/src/rclone_api.egg-info/top_level.txt +0 -0
  63. {rclone_api-1.0.88 → rclone_api-1.0.89}/test +0 -0
  64. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_cmd_list_files.py +0 -0
  65. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_copy.py +0 -0
  66. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_copy_files.py +0 -0
  67. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_group_files.py +0 -0
  68. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_is_synced.py +0 -0
  69. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_ls.py +0 -0
  70. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_mount.py +0 -0
  71. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_mount_s3.py +0 -0
  72. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_mount_webdav.py +0 -0
  73. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_obscure.py +0 -0
  74. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_remote_control.py +0 -0
  75. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_remotes.py +0 -0
  76. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_scan_missing_folders.py +0 -0
  77. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_size_files.py +0 -0
  78. {rclone_api-1.0.88 → rclone_api-1.0.89}/tests/test_walk.py +0 -0
  79. {rclone_api-1.0.88 → rclone_api-1.0.89}/tox.ini +0 -0
  80. {rclone_api-1.0.88 → rclone_api-1.0.89}/upload_package.sh +0 -0
@@ -145,3 +145,8 @@ uv.lock
145
145
  !.aiderignore
146
146
 
147
147
  rclone*.conf
148
+ test_mount2
149
+ t.py
150
+ mount
151
+ t2.py
152
+ chunk_store
@@ -1,9 +1,8 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: rclone_api
3
- Version: 1.0.88
3
+ Version: 1.0.89
4
4
  Summary: rclone api in python
5
5
  Home-page: https://github.com/zackees/rclone-api
6
- Maintainer: Zachary Vorhies
7
6
  License: BSD 3-Clause License
8
7
  Keywords: template-python-cmd
9
8
  Classifier: Programming Language :: Python :: 3
@@ -12,8 +11,8 @@ Description-Content-Type: text/markdown
12
11
  License-File: LICENSE
13
12
  Requires-Dist: pyright>=1.1.393
14
13
  Requires-Dist: python-dotenv>=1.0.0
14
+ Requires-Dist: boto3<=1.35.99,>=1.20.1
15
15
  Dynamic: home-page
16
- Dynamic: maintainer
17
16
 
18
17
  # rclone-api
19
18
 
@@ -13,9 +13,15 @@ classifiers = ["Programming Language :: Python :: 3"]
13
13
  dependencies = [
14
14
  "pyright>=1.1.393",
15
15
  "python-dotenv>=1.0.0",
16
+
17
+ # BOTO3 Library needs to be pinned to a specific version
18
+ # BackBlaze S3 fails with checksum header which it doesn't support after 1.35.99
19
+ # The 1.20.1 was the earliest one I checked that worked and is not the true lower bound.
20
+ "boto3>=1.20.1,<=1.35.99",
16
21
  ]
22
+
17
23
  # Change this with the version number bump.
18
- version = "1.0.88"
24
+ version = "1.0.89"
19
25
 
20
26
  [tool.setuptools]
21
27
  package-dir = {"" = "src"}
@@ -15,7 +15,6 @@ HERE = os.path.dirname(os.path.abspath(__file__))
15
15
 
16
16
  if __name__ == "__main__":
17
17
  setup(
18
- maintainer="Zachary Vorhies",
19
18
  keywords=KEYWORDS,
20
19
  url=URL,
21
20
  package_data={"": ["assets/example.txt"]},
@@ -1,5 +1,5 @@
1
1
  from .completed_process import CompletedProcess
2
- from .config import Config
2
+ from .config import Config, Parsed, Section
3
3
  from .diff import DiffItem, DiffOption, DiffType
4
4
  from .dir import Dir
5
5
  from .dir_listing import DirListing
@@ -9,6 +9,7 @@ from .process import Process
9
9
  from .rclone import Rclone, rclone_verbose
10
10
  from .remote import Remote
11
11
  from .rpath import RPath
12
+ from .s3.types import MultiUploadResult
12
13
  from .types import ListingOption, Order, SizeResult
13
14
 
14
15
  __all__ = [
@@ -30,4 +31,7 @@ __all__ = [
30
31
  "Order",
31
32
  "ListingOption",
32
33
  "SizeResult",
34
+ "Parsed",
35
+ "Section",
36
+ "MultiUploadResult",
33
37
  ]
@@ -0,0 +1,75 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Dict, List
3
+
4
+
5
+ @dataclass
6
+ class Section:
7
+ name: str
8
+ data: Dict[str, str] = field(default_factory=dict)
9
+
10
+ def add(self, key: str, value: str) -> None:
11
+ self.data[key] = value
12
+
13
+ def provider(self) -> str:
14
+ return self.data["provider"]
15
+
16
+ def access_key_id(self) -> str:
17
+ return self.data["access_key_id"]
18
+
19
+ def secret_access_key(self) -> str:
20
+ return self.data["secret_access_key"]
21
+
22
+ def endpoint(self) -> str | None:
23
+ return self.data.get("endpoint")
24
+
25
+
26
+ @dataclass
27
+ class Parsed:
28
+ # sections: List[ParsedSection]
29
+ sections: dict[str, Section]
30
+
31
+ @staticmethod
32
+ def parse(content: str) -> "Parsed":
33
+ return parse_rclone_config(content)
34
+
35
+
36
+ @dataclass
37
+ class Config:
38
+ """Rclone configuration dataclass."""
39
+
40
+ text: str
41
+
42
+ def parse(self) -> Parsed:
43
+ return Parsed.parse(self.text)
44
+
45
+
46
+ def parse_rclone_config(content: str) -> Parsed:
47
+ """
48
+ Parses an rclone configuration file and returns a list of RcloneConfigSection objects.
49
+
50
+ Each section in the file starts with a line like [section_name]
51
+ followed by key=value pairs.
52
+ """
53
+ sections: List[Section] = []
54
+ current_section: Section | None = None
55
+
56
+ lines = content.splitlines()
57
+ for line in lines:
58
+ line = line.strip()
59
+ # Skip empty lines and comments (assumed to start with '#' or ';')
60
+ if not line or line.startswith(("#", ";")):
61
+ continue
62
+ # New section header detected
63
+ if line.startswith("[") and line.endswith("]"):
64
+ section_name = line[1:-1].strip()
65
+ current_section = Section(name=section_name)
66
+ sections.append(current_section)
67
+ elif "=" in line and current_section is not None:
68
+ # Parse key and value, splitting only on the first '=' found
69
+ key, value = line.split("=", 1)
70
+ current_section.add(key.strip(), value.strip())
71
+
72
+ data: dict[str, Section] = {}
73
+ for section in sections:
74
+ data[section.name] = section
75
+ return Parsed(sections=data)
@@ -68,7 +68,10 @@ class TreeNode:
68
68
  paths_reversed: list[str] = [self.name]
69
69
  node: TreeNode | None = self
70
70
  assert node is not None
71
- while node := node.parent:
71
+ while True:
72
+ node = node.parent
73
+ if node is None:
74
+ break
72
75
  paths_reversed.append(node.name)
73
76
  return "/".join(reversed(paths_reversed))
74
77
 
@@ -6,8 +6,10 @@ import os
6
6
  import random
7
7
  import subprocess
8
8
  import time
9
+ import traceback
9
10
  import warnings
10
11
  from concurrent.futures import Future, ThreadPoolExecutor
12
+ from contextlib import contextmanager
11
13
  from fnmatch import fnmatch
12
14
  from pathlib import Path
13
15
  from tempfile import TemporaryDirectory
@@ -15,7 +17,7 @@ from typing import Generator
15
17
 
16
18
  from rclone_api import Dir
17
19
  from rclone_api.completed_process import CompletedProcess
18
- from rclone_api.config import Config
20
+ from rclone_api.config import Config, Parsed, Section
19
21
  from rclone_api.convert import convert_to_filestr_list, convert_to_str
20
22
  from rclone_api.deprecated import deprecated
21
23
  from rclone_api.diff import DiffItem, DiffOption, diff_stream_from_running_process
@@ -26,12 +28,13 @@ from rclone_api.group_files import group_files
26
28
  from rclone_api.process import Process
27
29
  from rclone_api.remote import Remote
28
30
  from rclone_api.rpath import RPath
29
- from rclone_api.types import (
30
- ListingOption,
31
- ModTimeStrategy,
32
- Order,
33
- SizeResult,
31
+ from rclone_api.s3.types import (
32
+ MultiUploadResult,
33
+ S3MutliPartUploadConfig,
34
+ S3Provider,
35
+ S3UploadTarget,
34
36
  )
37
+ from rclone_api.types import ListingOption, ModTimeStrategy, Order, SizeResult
35
38
  from rclone_api.util import (
36
39
  get_check,
37
40
  get_rclone_exe,
@@ -41,6 +44,8 @@ from rclone_api.util import (
41
44
  )
42
45
  from rclone_api.walk import walk
43
46
 
47
+ _IS_WINDOWS = os.name == "nt"
48
+
44
49
 
45
50
  def rclone_verbose(verbose: bool | None) -> bool:
46
51
  if verbose is not None:
@@ -48,6 +53,14 @@ def rclone_verbose(verbose: bool | None) -> bool:
48
53
  return bool(int(os.getenv("RCLONE_API_VERBOSE", "0")))
49
54
 
50
55
 
56
+ def _to_rclone_conf(config: Config | Path) -> Config:
57
+ if isinstance(config, Path):
58
+ content = config.read_text(encoding="utf-8")
59
+ return Config(content)
60
+ else:
61
+ return config
62
+
63
+
51
64
  class Rclone:
52
65
  def __init__(
53
66
  self, rclone_conf: Path | Config, rclone_exe: Path | None = None
@@ -56,6 +69,7 @@ class Rclone:
56
69
  if not rclone_conf.exists():
57
70
  raise ValueError(f"Rclone config file not found: {rclone_conf}")
58
71
  self._exec = RcloneExec(rclone_conf, get_rclone_exe(rclone_exe))
72
+ self.config: Config = _to_rclone_conf(rclone_conf)
59
73
 
60
74
  def _run(
61
75
  self, cmd: list[str], check: bool = False, capture: bool | None = None
@@ -399,7 +413,7 @@ class Rclone:
399
413
  using_fast_list = "--fast-list" in other_args
400
414
  if using_fast_list:
401
415
  warnings.warn(
402
- "It's not recommended to use --fast-list with copy_files as the entire repository has to be listed"
416
+ "It's not recommended to use --fast-list with copy_files as this will perform poorly on large repositories since the entire repository has to be scanned."
403
417
  )
404
418
 
405
419
  if max_partition_workers > 1:
@@ -654,6 +668,114 @@ class Rclone:
654
668
  except subprocess.CalledProcessError:
655
669
  return False
656
670
 
671
+ def copy_file_resumable_s3(
672
+ self,
673
+ src: str,
674
+ dst: str,
675
+ save_state_json: Path,
676
+ chunk_size: int = 16
677
+ * 1024
678
+ * 1024, # This setting will scale the performance of the upload
679
+ concurrent_chunks: int = 4, # This setting will scale the performance of the upload
680
+ retries: int = 3,
681
+ max_chunks_before_suspension: int | None = None,
682
+ ) -> MultiUploadResult:
683
+ """For massive files that rclone can't handle in one go, this function will copy the file in chunks to an S3 store"""
684
+ other_args: list[str] = [
685
+ "--no-modtime",
686
+ "--vfs-read-wait",
687
+ "1s",
688
+ "--vfs-disk-space-total-size",
689
+ str(2 * chunk_size * concurrent_chunks), # purge quickly.
690
+ "--vfs-read-chunk-size",
691
+ str(chunk_size),
692
+ "--vfs-read-chunk-size-limit",
693
+ str(chunk_size * concurrent_chunks),
694
+ "--vfs-read-chunk-streams",
695
+ str(concurrent_chunks),
696
+ "--vfs-fast-fingerprint",
697
+ ]
698
+ mount_path = Path("rclone_api_upload_mount")
699
+ src_path = Path(src)
700
+ name = src_path.name
701
+
702
+ parent_path = str(src_path.parent.as_posix())
703
+ with self.scoped_mount(
704
+ parent_path,
705
+ mount_path,
706
+ use_links=True,
707
+ vfs_cache_mode="minimal",
708
+ other_args=other_args,
709
+ ):
710
+ # raise NotImplementedError("Not implemented yet")
711
+ from rclone_api.s3.create import S3Credentials
712
+ from rclone_api.util import S3PathInfo, split_s3_path
713
+
714
+ path_info: S3PathInfo = split_s3_path(dst)
715
+ remote = path_info.remote
716
+ bucket_name = path_info.bucket
717
+ s3_key = path_info.key
718
+ parsed: Parsed = self.config.parse()
719
+ sections: dict[str, Section] = parsed.sections
720
+ if remote not in sections:
721
+ raise ValueError(
722
+ f"Remote {remote} not found in rclone config, remotes are: {sections.keys()}"
723
+ )
724
+
725
+ section: Section = sections[remote]
726
+ provider: str = section.provider()
727
+ provider_enum = S3Provider.from_str(provider)
728
+
729
+ s3_creds: S3Credentials = S3Credentials(
730
+ provider=provider_enum,
731
+ access_key_id=section.access_key_id(),
732
+ secret_access_key=section.secret_access_key(),
733
+ endpoint_url=section.endpoint(),
734
+ )
735
+ print(s3_creds)
736
+ # create_s3_client
737
+
738
+ print(f"Info: {section}")
739
+ from rclone_api.s3.api import S3Client
740
+
741
+ client = S3Client(s3_creds)
742
+ print(f"Client: {client}")
743
+
744
+ config: S3MutliPartUploadConfig = S3MutliPartUploadConfig(
745
+ chunk_size=chunk_size,
746
+ retries=retries,
747
+ resume_path_json=save_state_json,
748
+ max_chunks_before_suspension=max_chunks_before_suspension,
749
+ )
750
+
751
+ src_file = mount_path / name
752
+
753
+ print(f"Uploading {name} to {s3_key} in bucket {bucket_name}")
754
+ print(f"Source: {src_path}")
755
+ print(f"bucket_name: {bucket_name}")
756
+ print(f"upload_config: {config}")
757
+
758
+ upload_target: S3UploadTarget
759
+ upload_config: S3MutliPartUploadConfig
760
+
761
+ upload_target = S3UploadTarget(
762
+ bucket_name=bucket_name,
763
+ src_file=src_file,
764
+ s3_key=s3_key,
765
+ )
766
+
767
+ upload_config = S3MutliPartUploadConfig(
768
+ chunk_size=chunk_size,
769
+ retries=retries,
770
+ resume_path_json=save_state_json,
771
+ max_chunks_before_suspension=max_chunks_before_suspension,
772
+ )
773
+
774
+ out: MultiUploadResult = client.upload_file_multipart(
775
+ upload_target=upload_target, upload_config=upload_config
776
+ )
777
+ return out
778
+
657
779
  def copy_dir(
658
780
  self, src: str | Dir, dst: str | Dir, args: list[str] | None = None
659
781
  ) -> CompletedProcess:
@@ -682,10 +804,10 @@ class Rclone:
682
804
  self,
683
805
  src: Remote | Dir | str,
684
806
  outdir: Path,
685
- allow_writes=False,
686
- use_links=True,
687
- vfs_cache_mode="full",
688
- other_cmds: list[str] | None = None,
807
+ allow_writes: bool | None = False,
808
+ use_links: bool | None = None,
809
+ vfs_cache_mode: str | None = None,
810
+ other_args: list[str] | None = None,
689
811
  ) -> Process:
690
812
  """Mount a remote or directory to a local path.
691
813
 
@@ -699,6 +821,9 @@ class Rclone:
699
821
  Raises:
700
822
  subprocess.CalledProcessError: If the mount operation fails
701
823
  """
824
+ allow_writes = allow_writes or False
825
+ use_links = use_links or True
826
+ vfs_cache_mode = vfs_cache_mode or "full"
702
827
  if outdir.exists():
703
828
  is_empty = not list(outdir.iterdir())
704
829
  if not is_empty:
@@ -706,12 +831,13 @@ class Rclone:
706
831
  f"Mount directory already exists and is not empty: {outdir}"
707
832
  )
708
833
  outdir.rmdir()
709
- try:
834
+
835
+ if _IS_WINDOWS:
836
+ # Windows -> Must create parent directories only if they don't exist
710
837
  outdir.parent.mkdir(parents=True, exist_ok=True)
711
- except PermissionError:
712
- warnings.warn(
713
- f"Permission error creating parent directory: {outdir.parent}"
714
- )
838
+ else:
839
+ # Linux -> Must create parent directories and the directory itself
840
+ outdir.mkdir(parents=True, exist_ok=True)
715
841
  src_str = convert_to_str(src)
716
842
  cmd_list: list[str] = ["mount", src_str, str(outdir)]
717
843
  if not allow_writes:
@@ -721,19 +847,50 @@ class Rclone:
721
847
  if vfs_cache_mode:
722
848
  cmd_list.append("--vfs-cache-mode")
723
849
  cmd_list.append(vfs_cache_mode)
724
- if other_cmds:
725
- cmd_list += other_cmds
850
+ if other_args:
851
+ cmd_list += other_args
726
852
  proc = self._launch_process(cmd_list)
727
853
  wait_for_mount(outdir, proc)
728
854
  return proc
729
855
 
856
+ @contextmanager
857
+ def scoped_mount(
858
+ self,
859
+ src: Remote | Dir | str,
860
+ outdir: Path,
861
+ allow_writes: bool | None = None,
862
+ use_links: bool | None = None,
863
+ vfs_cache_mode: str | None = None,
864
+ other_args: list[str] | None = None,
865
+ ) -> Generator[Process, None, None]:
866
+ """Like mount, but can be used in a context manager."""
867
+ proc = self.mount(
868
+ src,
869
+ outdir,
870
+ allow_writes=allow_writes,
871
+ use_links=use_links,
872
+ vfs_cache_mode=vfs_cache_mode,
873
+ other_args=other_args,
874
+ )
875
+ try:
876
+ yield proc
877
+ except Exception as e:
878
+ stack_trace = traceback.format_exc()
879
+ warnings.warn(f"Error in scoped_mount: {e}\n\nStack Trace:\n{stack_trace}")
880
+ raise
881
+ finally:
882
+ if proc.poll() is None:
883
+ proc.terminate()
884
+ proc.wait()
885
+
886
+ @deprecated("mount")
730
887
  def mount_webdav(
731
888
  self,
732
889
  url: str,
733
890
  outdir: Path,
734
- vfs_cache_mode="full",
891
+ vfs_cache_mode: str | None = None,
735
892
  vfs_disk_space_total_size: str | None = "10G",
736
- other_cmds: list[str] | None = None,
893
+ other_args: list[str] | None = None,
737
894
  ) -> Process:
738
895
  """Mount a remote or directory to a local path.
739
896
 
@@ -747,6 +904,20 @@ class Rclone:
747
904
  Raises:
748
905
  subprocess.CalledProcessError: If the mount operation fails
749
906
  """
907
+ other_args = other_args or []
908
+ if vfs_cache_mode is None:
909
+ if "--vfs-cache-mode" in other_args:
910
+ pass
911
+ else:
912
+ vfs_cache_mode = "full"
913
+ elif "--vfs-cache-mode" in other_args:
914
+ warnings.warn(
915
+ f"vfs_cache_mode is set to {vfs_cache_mode} but --vfs-cache-mode is already in other_args"
916
+ )
917
+ idx = other_args.index("--vfs-cache-mode")
918
+ other_args.pop(idx)
919
+ other_args.pop(idx) # also the next value which will be the cache mode.
920
+
750
921
  if outdir.exists():
751
922
  is_empty = not list(outdir.iterdir())
752
923
  if not is_empty:
@@ -757,10 +928,11 @@ class Rclone:
757
928
 
758
929
  src_str = url
759
930
  cmd_list: list[str] = ["mount", src_str, str(outdir)]
760
- cmd_list.append("--vfs-cache-mode")
761
- cmd_list.append(vfs_cache_mode)
762
- if other_cmds:
763
- cmd_list += other_cmds
931
+ if vfs_cache_mode:
932
+ cmd_list.append("--vfs-cache-mode")
933
+ cmd_list.append(vfs_cache_mode)
934
+ if other_args:
935
+ cmd_list += other_args
764
936
  if vfs_disk_space_total_size is not None:
765
937
  cmd_list.append("--vfs-cache-max-size")
766
938
  cmd_list.append(vfs_disk_space_total_size)
@@ -768,6 +940,7 @@ class Rclone:
768
940
  wait_for_mount(outdir, proc)
769
941
  return proc
770
942
 
943
+ # Settings optimized for s3.
771
944
  def mount_s3(
772
945
  self,
773
946
  url: str,
@@ -789,7 +962,7 @@ class Rclone:
789
962
  vfs_fast_fingerprint: bool = True,
790
963
  # vfs-refresh
791
964
  vfs_refresh: bool = True,
792
- other_cmds: list[str] | None = None,
965
+ other_args: list[str] | None = None,
793
966
  ) -> Process:
794
967
  """Mount a remote or directory to a local path.
795
968
 
@@ -797,44 +970,44 @@ class Rclone:
797
970
  src: Remote or directory to mount
798
971
  outdir: Local path to mount to
799
972
  """
800
- other_cmds = other_cmds or []
973
+ other_args = other_args or []
801
974
  if modtime_strategy is not None:
802
- other_cmds.append(f"--{modtime_strategy.value}")
975
+ other_args.append(f"--{modtime_strategy.value}")
803
976
  if (vfs_cache_mode == "full" or vfs_cache_mode == "writes") and (
804
- transfers is not None and "--transfers" not in other_cmds
977
+ transfers is not None and "--transfers" not in other_args
805
978
  ):
806
- other_cmds.append("--transfers")
807
- other_cmds.append(str(transfers))
808
- if dir_cache_time is not None and "--dir-cache-time" not in other_cmds:
809
- other_cmds.append("--dir-cache-time")
810
- other_cmds.append(dir_cache_time)
979
+ other_args.append("--transfers")
980
+ other_args.append(str(transfers))
981
+ if dir_cache_time is not None and "--dir-cache-time" not in other_args:
982
+ other_args.append("--dir-cache-time")
983
+ other_args.append(dir_cache_time)
811
984
  if (
812
985
  vfs_disk_space_total_size is not None
813
- and "--vfs-cache-max-size" not in other_cmds
986
+ and "--vfs-cache-max-size" not in other_args
814
987
  ):
815
- other_cmds.append("--vfs-cache-max-size")
816
- other_cmds.append(vfs_disk_space_total_size)
817
- if vfs_refresh and "--vfs-refresh" not in other_cmds:
818
- other_cmds.append("--vfs-refresh")
819
- if attribute_timeout is not None and "--attr-timeout" not in other_cmds:
820
- other_cmds.append("--attr-timeout")
821
- other_cmds.append(attribute_timeout)
988
+ other_args.append("--vfs-cache-max-size")
989
+ other_args.append(vfs_disk_space_total_size)
990
+ if vfs_refresh and "--vfs-refresh" not in other_args:
991
+ other_args.append("--vfs-refresh")
992
+ if attribute_timeout is not None and "--attr-timeout" not in other_args:
993
+ other_args.append("--attr-timeout")
994
+ other_args.append(attribute_timeout)
822
995
  if vfs_read_chunk_streams:
823
- other_cmds.append("--vfs-read-chunk-streams")
824
- other_cmds.append(str(vfs_read_chunk_streams))
996
+ other_args.append("--vfs-read-chunk-streams")
997
+ other_args.append(str(vfs_read_chunk_streams))
825
998
  if vfs_read_chunk_size:
826
- other_cmds.append("--vfs-read-chunk-size")
827
- other_cmds.append(vfs_read_chunk_size)
999
+ other_args.append("--vfs-read-chunk-size")
1000
+ other_args.append(vfs_read_chunk_size)
828
1001
  if vfs_fast_fingerprint:
829
- other_cmds.append("--vfs-fast-fingerprint")
1002
+ other_args.append("--vfs-fast-fingerprint")
830
1003
 
831
- other_cmds = other_cmds if other_cmds else None
1004
+ other_args = other_args if other_args else None
832
1005
  return self.mount(
833
1006
  url,
834
1007
  outdir,
835
1008
  allow_writes=allow_writes,
836
1009
  vfs_cache_mode=vfs_cache_mode,
837
- other_cmds=other_cmds,
1010
+ other_args=other_args,
838
1011
  )
839
1012
 
840
1013
  def serve_webdav(
@@ -844,6 +1017,7 @@ class Rclone:
844
1017
  password: str,
845
1018
  addr: str = "localhost:2049",
846
1019
  allow_other: bool = False,
1020
+ other_args: list[str] | None = None,
847
1021
  ) -> Process:
848
1022
  """Serve a remote or directory via NFS.
849
1023
 
@@ -863,6 +1037,8 @@ class Rclone:
863
1037
  cmd_list.extend(["--user", user, "--pass", password])
864
1038
  if allow_other:
865
1039
  cmd_list.append("--allow-other")
1040
+ if other_args:
1041
+ cmd_list += other_args
866
1042
  proc = self._launch_process(cmd_list)
867
1043
  time.sleep(2) # give it a moment to start
868
1044
  if proc.poll() is not None:
@@ -883,7 +1059,7 @@ class Rclone:
883
1059
  check = get_check(check)
884
1060
  if fast_list or (other_args and "--fast-list" in other_args):
885
1061
  warnings.warn(
886
- "It's not recommended to use --fast-list with size_files as the entire repository has to be listed"
1062
+ "It's not recommended to use --fast-list with size_files as this will perform poorly on large repositories since the entire repository has to be scanned."
887
1063
  )
888
1064
  files = list(files)
889
1065
  all_files: list[File] = []
@@ -0,0 +1,72 @@
1
+ import warnings
2
+
3
+ from botocore.client import BaseClient
4
+
5
+ from rclone_api.s3.basic_ops import (
6
+ download_file,
7
+ head,
8
+ list_bucket_contents,
9
+ upload_file,
10
+ )
11
+ from rclone_api.s3.chunk_uploader import MultiUploadResult, upload_file_multipart
12
+ from rclone_api.s3.create import create_s3_client
13
+ from rclone_api.s3.types import S3Credentials, S3MutliPartUploadConfig, S3UploadTarget
14
+
15
+ _MIN_THRESHOLD_FOR_CHUNKING = 5 * 1024 * 1024
16
+
17
+
18
+ class S3Client:
19
+ def __init__(self, credentials: S3Credentials):
20
+ self.credentials: S3Credentials = credentials
21
+ self.client: BaseClient = create_s3_client(credentials)
22
+
23
+ def list_bucket_contents(self, bucket_name: str) -> None:
24
+ list_bucket_contents(self.client, bucket_name)
25
+
26
+ def upload_file(self, target: S3UploadTarget) -> Exception | None:
27
+ bucket_name = target.bucket_name
28
+ file_path = target.src_file
29
+ object_name = target.s3_key
30
+ return upload_file(
31
+ s3_client=self.client,
32
+ bucket_name=bucket_name,
33
+ file_path=file_path,
34
+ object_name=object_name,
35
+ )
36
+
37
+ def download_file(self, bucket_name: str, object_name: str, file_path: str) -> None:
38
+ download_file(self.client, bucket_name, object_name, file_path)
39
+
40
+ def head(self, bucket_name: str, object_name: str) -> dict | None:
41
+ return head(self.client, bucket_name, object_name)
42
+
43
+ def upload_file_multipart(
44
+ self,
45
+ upload_target: S3UploadTarget,
46
+ upload_config: S3MutliPartUploadConfig,
47
+ ) -> MultiUploadResult:
48
+ filesize = upload_target.src_file.stat().st_size
49
+ if filesize < _MIN_THRESHOLD_FOR_CHUNKING:
50
+ warnings.warn(
51
+ f"File size {filesize} is less than the minimum threshold for chunking ({_MIN_THRESHOLD_FOR_CHUNKING}), switching to single threaded upload."
52
+ )
53
+ err = self.upload_file(upload_target)
54
+ if err:
55
+ raise err
56
+ return MultiUploadResult.UPLOADED_FRESH
57
+ chunk_size = upload_config.chunk_size
58
+ retries = upload_config.retries
59
+ resume_path_json = upload_config.resume_path_json
60
+ max_chunks_before_suspension = upload_config.max_chunks_before_suspension
61
+ bucket_name = upload_target.bucket_name
62
+ out = upload_file_multipart(
63
+ s3_client=self.client,
64
+ bucket_name=bucket_name,
65
+ file_path=upload_target.src_file,
66
+ object_name=upload_target.s3_key,
67
+ resumable_info_path=resume_path_json,
68
+ chunk_size=chunk_size,
69
+ retries=retries,
70
+ max_chunks_before_suspension=max_chunks_before_suspension,
71
+ )
72
+ return out