rclone-api 1.0.89__tar.gz → 1.0.92__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. {rclone_api-1.0.89 → rclone_api-1.0.92}/PKG-INFO +1 -1
  2. {rclone_api-1.0.89 → rclone_api-1.0.92}/pyproject.toml +3 -2
  3. rclone_api-1.0.92/src/rclone_api/cmd/copy_large_s3.py +99 -0
  4. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/config.py +16 -4
  5. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/rclone.py +44 -11
  6. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/s3/api.py +32 -20
  7. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/s3/create.py +1 -3
  8. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/s3/types.py +4 -0
  9. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/util.py +9 -0
  10. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api.egg-info/PKG-INFO +1 -1
  11. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api.egg-info/SOURCES.txt +1 -0
  12. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api.egg-info/entry_points.txt +1 -0
  13. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_mounted_ranged_download.py +1 -1
  14. {rclone_api-1.0.89 → rclone_api-1.0.92}/.aiderignore +0 -0
  15. {rclone_api-1.0.89 → rclone_api-1.0.92}/.github/workflows/lint.yml +0 -0
  16. {rclone_api-1.0.89 → rclone_api-1.0.92}/.github/workflows/push_macos.yml +0 -0
  17. {rclone_api-1.0.89 → rclone_api-1.0.92}/.github/workflows/push_ubuntu.yml +0 -0
  18. {rclone_api-1.0.89 → rclone_api-1.0.92}/.github/workflows/push_win.yml +0 -0
  19. {rclone_api-1.0.89 → rclone_api-1.0.92}/.gitignore +0 -0
  20. {rclone_api-1.0.89 → rclone_api-1.0.92}/.pylintrc +0 -0
  21. {rclone_api-1.0.89 → rclone_api-1.0.92}/.vscode/launch.json +0 -0
  22. {rclone_api-1.0.89 → rclone_api-1.0.92}/.vscode/settings.json +0 -0
  23. {rclone_api-1.0.89 → rclone_api-1.0.92}/.vscode/tasks.json +0 -0
  24. {rclone_api-1.0.89 → rclone_api-1.0.92}/LICENSE +0 -0
  25. {rclone_api-1.0.89 → rclone_api-1.0.92}/MANIFEST.in +0 -0
  26. {rclone_api-1.0.89 → rclone_api-1.0.92}/README.md +0 -0
  27. {rclone_api-1.0.89 → rclone_api-1.0.92}/clean +0 -0
  28. {rclone_api-1.0.89 → rclone_api-1.0.92}/install +0 -0
  29. {rclone_api-1.0.89 → rclone_api-1.0.92}/lint +0 -0
  30. {rclone_api-1.0.89 → rclone_api-1.0.92}/requirements.testing.txt +0 -0
  31. {rclone_api-1.0.89 → rclone_api-1.0.92}/setup.cfg +0 -0
  32. {rclone_api-1.0.89 → rclone_api-1.0.92}/setup.py +0 -0
  33. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/__init__.py +0 -0
  34. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/assets/example.txt +0 -0
  35. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/cli.py +0 -0
  36. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/cmd/list_files.py +0 -0
  37. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/completed_process.py +0 -0
  38. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/convert.py +0 -0
  39. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/deprecated.py +0 -0
  40. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/diff.py +0 -0
  41. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/dir.py +0 -0
  42. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/dir_listing.py +0 -0
  43. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/exec.py +0 -0
  44. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/file.py +0 -0
  45. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/filelist.py +0 -0
  46. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/group_files.py +0 -0
  47. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/process.py +0 -0
  48. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/remote.py +0 -0
  49. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/rpath.py +0 -0
  50. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/s3/basic_ops.py +0 -0
  51. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/s3/chunk_uploader.py +0 -0
  52. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/scan_missing_folders.py +0 -0
  53. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/types.py +0 -0
  54. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api/walk.py +0 -0
  55. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api.egg-info/dependency_links.txt +0 -0
  56. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api.egg-info/requires.txt +0 -0
  57. {rclone_api-1.0.89 → rclone_api-1.0.92}/src/rclone_api.egg-info/top_level.txt +0 -0
  58. {rclone_api-1.0.89 → rclone_api-1.0.92}/test +0 -0
  59. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/archive/test_paramiko.py.disabled +0 -0
  60. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_cmd_list_files.py +0 -0
  61. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_copy.py +0 -0
  62. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_copy_files.py +0 -0
  63. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_diff.py +0 -0
  64. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_group_files.py +0 -0
  65. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_is_synced.py +0 -0
  66. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_ls.py +0 -0
  67. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_mount.py +0 -0
  68. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_mount_s3.py +0 -0
  69. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_mount_webdav.py +0 -0
  70. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_obscure.py +0 -0
  71. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_rclone_config.py +0 -0
  72. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_remote_control.py +0 -0
  73. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_remotes.py +0 -0
  74. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_s3.py +0 -0
  75. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_scan_missing_folders.py +0 -0
  76. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_size_files.py +0 -0
  77. {rclone_api-1.0.89 → rclone_api-1.0.92}/tests/test_walk.py +0 -0
  78. {rclone_api-1.0.89 → rclone_api-1.0.92}/tox.ini +0 -0
  79. {rclone_api-1.0.89 → rclone_api-1.0.92}/upload_package.sh +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: rclone_api
3
- Version: 1.0.89
3
+ Version: 1.0.92
4
4
  Summary: rclone api in python
5
5
  Home-page: https://github.com/zackees/rclone-api
6
6
  License: BSD 3-Clause License
@@ -21,7 +21,7 @@ dependencies = [
21
21
  ]
22
22
 
23
23
  # Change this with the version number bump.
24
- version = "1.0.89"
24
+ version = "1.0.92"
25
25
 
26
26
  [tool.setuptools]
27
27
  package-dir = {"" = "src"}
@@ -51,4 +51,5 @@ ignore_missing_imports = true
51
51
  disable_error_code = ["import-untyped"]
52
52
 
53
53
  [project.scripts]
54
- rclone-api-listfiles = "rclone_api.cmd.list_files:main"
54
+ rclone-api-listfiles = "rclone_api.cmd.list_files:main"
55
+ rclone-api-copylarge-s3 = "rclone_api.cmd.copy_large_s3:main"
@@ -0,0 +1,99 @@
1
+ import argparse
2
+ from dataclasses import dataclass
3
+ from pathlib import Path
4
+
5
+ from rclone_api import MultiUploadResult, Rclone
6
+
7
+ _1MB = 1024 * 1024
8
+
9
+
10
+ @dataclass
11
+ class Args:
12
+ config_path: Path
13
+ src: str
14
+ dst: str
15
+ chunk_size_mb: int
16
+ read_concurrent_chunks: int
17
+ retries: int
18
+ save_state_json: Path
19
+
20
+
21
+ def list_files(rclone: Rclone, path: str):
22
+ """List files in a remote path."""
23
+ for dirlisting in rclone.walk(path):
24
+ for file in dirlisting.files:
25
+ print(file.path)
26
+
27
+
28
+ def _parse_args() -> Args:
29
+ parser = argparse.ArgumentParser(description="List files in a remote path.")
30
+ parser.add_argument("src", help="File to copy")
31
+ parser.add_argument("dst", help="Destination file")
32
+ parser.add_argument(
33
+ "--config", help="Path to rclone config file", type=Path, required=True
34
+ )
35
+ parser.add_argument(
36
+ "--chunk-size-mb", help="Chunk size in MB", type=int, default=256
37
+ )
38
+ parser.add_argument(
39
+ "--read-concurrent-chunks",
40
+ help="Maximum number of chunks to read",
41
+ type=int,
42
+ default=4,
43
+ )
44
+ parser.add_argument("--retries", help="Number of retries", type=int, default=3)
45
+ parser.add_argument(
46
+ "--resumable-json",
47
+ help="Path to resumable JSON file",
48
+ type=Path,
49
+ default="resume.json",
50
+ )
51
+
52
+ args = parser.parse_args()
53
+ out = Args(
54
+ config_path=Path(args.config),
55
+ src=args.src,
56
+ dst=args.dst,
57
+ chunk_size_mb=args.chunk_size_mb,
58
+ read_concurrent_chunks=args.read_concurrent_chunks,
59
+ retries=args.retries,
60
+ save_state_json=args.resumable_json,
61
+ )
62
+ return out
63
+
64
+
65
+ def main() -> int:
66
+ """Main entry point."""
67
+ args = _parse_args()
68
+ rclone = Rclone(rclone_conf=args.config_path)
69
+ rslt: MultiUploadResult = rclone.copy_file_resumable_s3(
70
+ src=args.src,
71
+ dst=args.dst,
72
+ chunk_size=args.chunk_size_mb * _1MB,
73
+ concurrent_chunks=args.read_concurrent_chunks,
74
+ retries=args.retries,
75
+ save_state_json=args.save_state_json,
76
+ )
77
+ print(rslt)
78
+ return 0
79
+
80
+
81
+ if __name__ == "__main__":
82
+ import os
83
+ import sys
84
+
85
+ here = Path(__file__).parent
86
+ project_root = here.parent.parent.parent
87
+ print(f"project_root: {project_root}")
88
+ os.chdir(str(project_root))
89
+ cwd = Path(__file__).parent
90
+ print(f"cwd: {cwd}")
91
+ sys.argv.append("--config")
92
+ sys.argv.append("rclone.conf")
93
+ sys.argv.append(
94
+ "45061:aa_misc_data/aa_misc_data/world_lending_library_2024_11.tar.zst.torrent"
95
+ )
96
+ sys.argv.append(
97
+ "dst:TorrentBooks/aa_misc_data/aa_misc_data/world_lending_library_2024_11.tar.zst.torrent"
98
+ )
99
+ main()
@@ -10,14 +10,26 @@ class Section:
10
10
  def add(self, key: str, value: str) -> None:
11
11
  self.data[key] = value
12
12
 
13
- def provider(self) -> str:
14
- return self.data["provider"]
13
+ def type(self) -> str:
14
+ return self.data["type"]
15
+
16
+ def provider(self) -> str | None:
17
+ return self.data.get("provider")
15
18
 
16
19
  def access_key_id(self) -> str:
17
- return self.data["access_key_id"]
20
+ if "access_key_id" in self.data:
21
+ return self.data["access_key_id"]
22
+ elif "account" in self.data:
23
+ return self.data["account"]
24
+ raise KeyError("No access key found")
18
25
 
19
26
  def secret_access_key(self) -> str:
20
- return self.data["secret_access_key"]
27
+ # return self.data["secret_access_key"]
28
+ if "secret_access_key" in self.data:
29
+ return self.data["secret_access_key"]
30
+ elif "key" in self.data:
31
+ return self.data["key"]
32
+ raise KeyError("No secret access key found")
21
33
 
22
34
  def endpoint(self) -> str | None:
23
35
  return self.data.get("endpoint")
@@ -679,8 +679,13 @@ class Rclone:
679
679
  concurrent_chunks: int = 4, # This setting will scale the performance of the upload
680
680
  retries: int = 3,
681
681
  max_chunks_before_suspension: int | None = None,
682
+ mount_path: Path | None = None,
682
683
  ) -> MultiUploadResult:
683
684
  """For massive files that rclone can't handle in one go, this function will copy the file in chunks to an S3 store"""
685
+ from rclone_api.s3.api import S3Client
686
+ from rclone_api.s3.create import S3Credentials
687
+ from rclone_api.util import S3PathInfo, random_str, split_s3_path
688
+
684
689
  other_args: list[str] = [
685
690
  "--no-modtime",
686
691
  "--vfs-read-wait",
@@ -695,7 +700,7 @@ class Rclone:
695
700
  str(concurrent_chunks),
696
701
  "--vfs-fast-fingerprint",
697
702
  ]
698
- mount_path = Path("rclone_api_upload_mount")
703
+ mount_path = mount_path or Path("tmp_mnts") / random_str(12)
699
704
  src_path = Path(src)
700
705
  name = src_path.name
701
706
 
@@ -708,8 +713,6 @@ class Rclone:
708
713
  other_args=other_args,
709
714
  ):
710
715
  # raise NotImplementedError("Not implemented yet")
711
- from rclone_api.s3.create import S3Credentials
712
- from rclone_api.util import S3PathInfo, split_s3_path
713
716
 
714
717
  path_info: S3PathInfo = split_s3_path(dst)
715
718
  remote = path_info.remote
@@ -723,7 +726,24 @@ class Rclone:
723
726
  )
724
727
 
725
728
  section: Section = sections[remote]
726
- provider: str = section.provider()
729
+ dst_type = section.type()
730
+ if dst_type != "s3" and dst_type != "b2":
731
+ raise ValueError(
732
+ f"Remote {remote} is not an S3 remote, it is of type {dst_type}"
733
+ )
734
+
735
+ def get_provider_str(section=section) -> str | None:
736
+ type: str = section.type()
737
+ provider: str | None = section.provider()
738
+ if provider is not None:
739
+ return provider
740
+ if type == "b2":
741
+ return S3Provider.BACKBLAZE.value
742
+ if type != "s3":
743
+ raise ValueError(f"Remote {remote} is not an S3 remote")
744
+ return S3Provider.S3.value
745
+
746
+ provider: str = get_provider_str() or S3Provider.S3.value
727
747
  provider_enum = S3Provider.from_str(provider)
728
748
 
729
749
  s3_creds: S3Credentials = S3Credentials(
@@ -732,15 +752,8 @@ class Rclone:
732
752
  secret_access_key=section.secret_access_key(),
733
753
  endpoint_url=section.endpoint(),
734
754
  )
735
- print(s3_creds)
736
- # create_s3_client
737
-
738
- print(f"Info: {section}")
739
- from rclone_api.s3.api import S3Client
740
755
 
741
756
  client = S3Client(s3_creds)
742
- print(f"Client: {client}")
743
-
744
757
  config: S3MutliPartUploadConfig = S3MutliPartUploadConfig(
745
758
  chunk_size=chunk_size,
746
759
  retries=retries,
@@ -864,6 +877,7 @@ class Rclone:
864
877
  other_args: list[str] | None = None,
865
878
  ) -> Generator[Process, None, None]:
866
879
  """Like mount, but can be used in a context manager."""
880
+ error_happened = False
867
881
  proc = self.mount(
868
882
  src,
869
883
  outdir,
@@ -875,6 +889,7 @@ class Rclone:
875
889
  try:
876
890
  yield proc
877
891
  except Exception as e:
892
+ error_happened = True
878
893
  stack_trace = traceback.format_exc()
879
894
  warnings.warn(f"Error in scoped_mount: {e}\n\nStack Trace:\n{stack_trace}")
880
895
  raise
@@ -882,6 +897,24 @@ class Rclone:
882
897
  if proc.poll() is None:
883
898
  proc.terminate()
884
899
  proc.wait()
900
+ if not error_happened and outdir.exists():
901
+ time.sleep(2)
902
+ if outdir.exists():
903
+ print(f"{outdir} mount still exists, attempting to remove")
904
+ if not _IS_WINDOWS:
905
+ # attempt
906
+ os.system(f"fusermount -u {outdir}")
907
+ os.system(f"umount {outdir}")
908
+ time.sleep(2)
909
+ if outdir.exists():
910
+ is_empty = not list(outdir.iterdir())
911
+ if not is_empty:
912
+ warnings.warn(f"Failed to unmount {outdir}")
913
+ else:
914
+ try:
915
+ outdir.rmdir()
916
+ except Exception as e:
917
+ warnings.warn(f"Failed to remove {outdir}: {e}")
885
918
 
886
919
  @deprecated("mount")
887
920
  def mount_webdav(
@@ -45,28 +45,40 @@ class S3Client:
45
45
  upload_target: S3UploadTarget,
46
46
  upload_config: S3MutliPartUploadConfig,
47
47
  ) -> MultiUploadResult:
48
- filesize = upload_target.src_file.stat().st_size
49
- if filesize < _MIN_THRESHOLD_FOR_CHUNKING:
50
- warnings.warn(
51
- f"File size {filesize} is less than the minimum threshold for chunking ({_MIN_THRESHOLD_FOR_CHUNKING}), switching to single threaded upload."
52
- )
53
- err = self.upload_file(upload_target)
54
- if err:
55
- raise err
56
- return MultiUploadResult.UPLOADED_FRESH
48
+
57
49
  chunk_size = upload_config.chunk_size
58
50
  retries = upload_config.retries
59
51
  resume_path_json = upload_config.resume_path_json
60
52
  max_chunks_before_suspension = upload_config.max_chunks_before_suspension
61
53
  bucket_name = upload_target.bucket_name
62
- out = upload_file_multipart(
63
- s3_client=self.client,
64
- bucket_name=bucket_name,
65
- file_path=upload_target.src_file,
66
- object_name=upload_target.s3_key,
67
- resumable_info_path=resume_path_json,
68
- chunk_size=chunk_size,
69
- retries=retries,
70
- max_chunks_before_suspension=max_chunks_before_suspension,
71
- )
72
- return out
54
+
55
+ try:
56
+ filesize = upload_target.src_file.stat().st_size
57
+ if filesize < _MIN_THRESHOLD_FOR_CHUNKING:
58
+ warnings.warn(
59
+ f"File size {filesize} is less than the minimum threshold for chunking ({_MIN_THRESHOLD_FOR_CHUNKING}), switching to single threaded upload."
60
+ )
61
+ err = self.upload_file(upload_target)
62
+ if err:
63
+ raise err
64
+ return MultiUploadResult.UPLOADED_FRESH
65
+
66
+ out = upload_file_multipart(
67
+ s3_client=self.client,
68
+ bucket_name=bucket_name,
69
+ file_path=upload_target.src_file,
70
+ object_name=upload_target.s3_key,
71
+ resumable_info_path=resume_path_json,
72
+ chunk_size=chunk_size,
73
+ retries=retries,
74
+ max_chunks_before_suspension=max_chunks_before_suspension,
75
+ )
76
+ return out
77
+ except Exception:
78
+ key = upload_target.s3_key
79
+ access_key_id = self.credentials.access_key_id[:4] + "..."
80
+ secret = self.credentials.secret_access_key[:4] + "..."
81
+ warnings.warn(
82
+ f"Error uploading {key} to {bucket_name} with\n access_key_id: {access_key_id}\n secret: {secret}\n"
83
+ )
84
+ raise
@@ -16,9 +16,7 @@ def _create_backblaze_s3_client(creds: S3Credentials) -> BaseClient:
16
16
  access_key = creds.access_key_id
17
17
  secret_key = creds.secret_access_key
18
18
  endpoint_url = creds.endpoint_url
19
-
20
- if region_name is not None:
21
- warnings.warn(f"Region name is not used for provider: {creds.provider}")
19
+ region_name = region_name or "https://s3.us-west-002.backblazeb2.com"
22
20
 
23
21
  session = boto3.session.Session() # type: ignore
24
22
  return session.client(
@@ -4,6 +4,7 @@ from pathlib import Path
4
4
 
5
5
 
6
6
  class S3Provider(Enum):
7
+ S3 = "s3" # generic S3
7
8
  BACKBLAZE = "b2"
8
9
  DIGITAL_OCEAN = "DigitalOcean"
9
10
 
@@ -46,6 +47,9 @@ class S3MutliPartUploadConfig:
46
47
  retries: int
47
48
  resume_path_json: Path
48
49
  max_chunks_before_suspension: int | None = None
50
+ mount_path: Path | None = (
51
+ None # If set this will be used to mount the src file, otherwise it's one is chosen automatically
52
+ )
49
53
 
50
54
 
51
55
  class MultiUploadResult(Enum):
@@ -141,6 +141,8 @@ def wait_for_mount(path: Path, mount_process: Any, timeout: int = 10) -> None:
141
141
  # how many files?
142
142
  dircontents = os.listdir(str(path))
143
143
  if len(dircontents) > 0:
144
+ print(f"Mount point {path}, waiting 5 seconds for files to appear.")
145
+ time.sleep(5)
144
146
  return
145
147
  time.sleep(1)
146
148
 
@@ -164,3 +166,10 @@ def split_s3_path(path: str) -> S3PathInfo:
164
166
  assert bucket
165
167
  assert key
166
168
  return S3PathInfo(remote=remote, bucket=bucket, key=key)
169
+
170
+
171
+ def random_str(length: int) -> str:
172
+ import random
173
+ import string
174
+
175
+ return "".join(random.choices(string.ascii_lowercase + string.digits, k=length))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: rclone_api
3
- Version: 1.0.89
3
+ Version: 1.0.92
4
4
  Summary: rclone api in python
5
5
  Home-page: https://github.com/zackees/rclone-api
6
6
  License: BSD 3-Clause License
@@ -48,6 +48,7 @@ src/rclone_api.egg-info/entry_points.txt
48
48
  src/rclone_api.egg-info/requires.txt
49
49
  src/rclone_api.egg-info/top_level.txt
50
50
  src/rclone_api/assets/example.txt
51
+ src/rclone_api/cmd/copy_large_s3.py
51
52
  src/rclone_api/cmd/list_files.py
52
53
  src/rclone_api/s3/api.py
53
54
  src/rclone_api/s3/basic_ops.py
@@ -1,2 +1,3 @@
1
1
  [console_scripts]
2
+ rclone-api-copylarge-s3 = rclone_api.cmd.copy_large_s3:main
2
3
  rclone-api-listfiles = rclone_api.cmd.list_files:main
@@ -118,7 +118,7 @@ class RcloneCopyResumableFileToS3(unittest.TestCase):
118
118
  )
119
119
  os.environ["RCLONE_API_VERBOSE"] = "1"
120
120
 
121
- @unittest.skipIf(_IS_WINDOWS, "Test not enabled on Windows")
121
+ # @unittest.skipIf(_IS_WINDOWS, "Test not enabled on Windows")
122
122
  def test_upload_chunks(self) -> None:
123
123
  """Test basic Webdav serve functionality."""
124
124
  # config = _generate_rclone_config(PORT)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes