rclone-api 1.0.40__tar.gz → 1.0.42__tar.gz

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of rclone-api might be problematic. Click here for more details.

Files changed (64) hide show
  1. {rclone_api-1.0.40 → rclone_api-1.0.42}/PKG-INFO +1 -1
  2. {rclone_api-1.0.40 → rclone_api-1.0.42}/pyproject.toml +1 -1
  3. rclone_api-1.0.42/src/rclone_api/group_files.py +145 -0
  4. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/rclone.py +81 -48
  5. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/util.py +0 -12
  6. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api.egg-info/PKG-INFO +1 -1
  7. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api.egg-info/SOURCES.txt +2 -0
  8. rclone_api-1.0.42/tests/test_group_files.py +81 -0
  9. {rclone_api-1.0.40 → rclone_api-1.0.42}/.aiderignore +0 -0
  10. {rclone_api-1.0.40 → rclone_api-1.0.42}/.github/workflows/lint.yml +0 -0
  11. {rclone_api-1.0.40 → rclone_api-1.0.42}/.github/workflows/push_macos.yml +0 -0
  12. {rclone_api-1.0.40 → rclone_api-1.0.42}/.github/workflows/push_ubuntu.yml +0 -0
  13. {rclone_api-1.0.40 → rclone_api-1.0.42}/.github/workflows/push_win.yml +0 -0
  14. {rclone_api-1.0.40 → rclone_api-1.0.42}/.gitignore +0 -0
  15. {rclone_api-1.0.40 → rclone_api-1.0.42}/.pylintrc +0 -0
  16. {rclone_api-1.0.40 → rclone_api-1.0.42}/.vscode/launch.json +0 -0
  17. {rclone_api-1.0.40 → rclone_api-1.0.42}/.vscode/settings.json +0 -0
  18. {rclone_api-1.0.40 → rclone_api-1.0.42}/.vscode/tasks.json +0 -0
  19. {rclone_api-1.0.40 → rclone_api-1.0.42}/LICENSE +0 -0
  20. {rclone_api-1.0.40 → rclone_api-1.0.42}/MANIFEST.in +0 -0
  21. {rclone_api-1.0.40 → rclone_api-1.0.42}/README.md +0 -0
  22. {rclone_api-1.0.40 → rclone_api-1.0.42}/clean +0 -0
  23. {rclone_api-1.0.40 → rclone_api-1.0.42}/install +0 -0
  24. {rclone_api-1.0.40 → rclone_api-1.0.42}/lint +0 -0
  25. {rclone_api-1.0.40 → rclone_api-1.0.42}/requirements.testing.txt +0 -0
  26. {rclone_api-1.0.40 → rclone_api-1.0.42}/setup.cfg +0 -0
  27. {rclone_api-1.0.40 → rclone_api-1.0.42}/setup.py +0 -0
  28. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/__init__.py +0 -0
  29. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/assets/example.txt +0 -0
  30. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/cli.py +0 -0
  31. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/cmd/list_files.py +0 -0
  32. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/completed_process.py +0 -0
  33. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/config.py +0 -0
  34. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/convert.py +0 -0
  35. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/deprecated.py +0 -0
  36. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/diff.py +0 -0
  37. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/dir.py +0 -0
  38. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/dir_listing.py +0 -0
  39. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/exec.py +0 -0
  40. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/file.py +0 -0
  41. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/filelist.py +0 -0
  42. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/process.py +0 -0
  43. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/remote.py +0 -0
  44. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/rpath.py +0 -0
  45. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api/walk.py +0 -0
  46. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api.egg-info/dependency_links.txt +0 -0
  47. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api.egg-info/entry_points.txt +0 -0
  48. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api.egg-info/requires.txt +0 -0
  49. {rclone_api-1.0.40 → rclone_api-1.0.42}/src/rclone_api.egg-info/top_level.txt +0 -0
  50. {rclone_api-1.0.40 → rclone_api-1.0.42}/test +0 -0
  51. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_cmd_list_files.py +0 -0
  52. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_copy.py +0 -0
  53. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_diff.py +0 -0
  54. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_is_synced.py +0 -0
  55. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_ls.py +0 -0
  56. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_mount.py +0 -0
  57. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_mount_s3.py +0 -0
  58. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_mount_webdav.py +0 -0
  59. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_obscure.py +0 -0
  60. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_remotes.py +0 -0
  61. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_serve_webdav.py +0 -0
  62. {rclone_api-1.0.40 → rclone_api-1.0.42}/tests/test_walk.py +0 -0
  63. {rclone_api-1.0.40 → rclone_api-1.0.42}/tox.ini +0 -0
  64. {rclone_api-1.0.40 → rclone_api-1.0.42}/upload_package.sh +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: rclone_api
3
- Version: 1.0.40
3
+ Version: 1.0.42
4
4
  Summary: rclone api in python
5
5
  Home-page: https://github.com/zackees/rclone-api
6
6
  Maintainer: Zachary Vorhies
@@ -15,7 +15,7 @@ dependencies = [
15
15
  "python-dotenv>=1.0.0",
16
16
  ]
17
17
  # Change this with the version number bump.
18
- version = "1.0.40"
18
+ version = "1.0.42"
19
19
 
20
20
  [tool.setuptools]
21
21
  package-dir = {"" = "src"}
@@ -0,0 +1,145 @@
1
+ from dataclasses import dataclass
2
+
3
+
4
+ @dataclass
5
+ class FilePathParts:
6
+ """File path dataclass."""
7
+
8
+ remote: str
9
+ parents: list[str]
10
+ name: str
11
+
12
+
13
+ def parse_file(file_path: str) -> FilePathParts:
14
+ """Parse file path into parts."""
15
+ assert not file_path.endswith("/"), "This looks like a directory path"
16
+ parts = file_path.split(":")
17
+ remote = parts[0]
18
+ path = parts[1]
19
+ if path.startswith("/"):
20
+ path = path[1:]
21
+ parents = path.split("/")
22
+ if len(parents) == 1:
23
+ return FilePathParts(remote=remote, parents=[], name=parents[0])
24
+ name = parents.pop()
25
+ return FilePathParts(remote=remote, parents=parents, name=name)
26
+
27
+
28
+ class TreeNode:
29
+ def __init__(
30
+ self,
31
+ name: str,
32
+ child_nodes: dict[str, "TreeNode"] | None = None,
33
+ files: list[str] | None = None,
34
+ parent: "TreeNode | None" = None,
35
+ ):
36
+ self.name = name
37
+ self.child_nodes = child_nodes or {}
38
+ self.files = files or []
39
+ self.count = 0
40
+ self.parent = parent
41
+
42
+ def add_count(self):
43
+ self.count += 1
44
+ if self.parent:
45
+ self.parent.add_count()
46
+
47
+ def get_path(self) -> str:
48
+ paths_reversed: list[str] = [self.name]
49
+ node: TreeNode | None = self
50
+ assert node is not None
51
+ while node := node.parent:
52
+ paths_reversed.append(node.name)
53
+ return "/".join(reversed(paths_reversed))
54
+
55
+ def get_child_subpaths(self, parent_path: str | None = None) -> list[str]:
56
+ paths: list[str] = []
57
+ for child in self.child_nodes.values():
58
+ child_paths = child.get_child_subpaths(parent_path=child.name)
59
+ paths.extend(child_paths)
60
+ for file in self.files:
61
+ if parent_path:
62
+ file = f"{parent_path}/{file}"
63
+ paths.append(file)
64
+ return paths
65
+
66
+ def __repr__(self, indent: int = 0) -> str:
67
+ # return f"{self.name}: {self.count}, {len(self.children)}"
68
+ leftpad = " " * indent
69
+ msg = f"{leftpad}{self.name}: {self.count}"
70
+ if self.child_nodes:
71
+ # msg += f"\n {len(self.children)} children"
72
+ msg += "\n"
73
+ for child in self.child_nodes.values():
74
+ if isinstance(child, TreeNode):
75
+ msg += child.__repr__(indent + 2)
76
+ else:
77
+ msg += f"{leftpad} {child}\n"
78
+ return msg
79
+
80
+
81
+ def _merge(node: TreeNode, parent_path: str, out: dict[str, list[str]]) -> None:
82
+ parent_path = parent_path + "/" + node.name
83
+ this_count = node.count
84
+ child_count = 0
85
+ children_has_files = False
86
+ if not node.child_nodes and not node.files:
87
+ return # done
88
+
89
+ if node.files:
90
+ children_has_files = True
91
+ filelist = out.setdefault(parent_path, [])
92
+ # for file in node.files:
93
+ # filelist.append(file)
94
+ # out[parent_path] = filelist
95
+ paths = node.get_child_subpaths()
96
+ for path in paths:
97
+ filelist.append(path)
98
+ out[parent_path] = filelist
99
+ return
100
+
101
+ for child in node.child_nodes.values():
102
+ child_count += child.count
103
+ child_count += len(node.files)
104
+ for file in node.files:
105
+ child_count += 1
106
+
107
+ if child_count != this_count or children_has_files:
108
+ # print(
109
+ # f"Cannot merge {node.name} because different counts or has children with files"
110
+ # )
111
+ filelist = out.setdefault(parent_path, [])
112
+ for child in node.child_nodes.values():
113
+ subpaths = child.get_child_subpaths()
114
+ filelist.extend(subpaths)
115
+ out[parent_path] = filelist
116
+ else:
117
+ for child in node.child_nodes.values():
118
+ _merge(child, parent_path, out)
119
+
120
+
121
+ def group_files(files: list[str]) -> dict[str, list[str]]:
122
+ """split between filename and parent directory path"""
123
+ tree: dict[str, TreeNode] = {}
124
+ for file in files:
125
+ parts = parse_file(file)
126
+ remote = parts.remote
127
+ node: TreeNode = tree.setdefault(remote, TreeNode(remote))
128
+ for parent in parts.parents:
129
+ is_last = parent == parts.parents[-1]
130
+ node = node.child_nodes.setdefault(parent, TreeNode(parent, parent=node))
131
+ if is_last:
132
+ node.files.append(parts.name)
133
+ node.add_count()
134
+ outpaths: dict[str, list[str]] = {}
135
+ for _, node in tree.items():
136
+ _merge(node, "", outpaths)
137
+ out: dict[str, list[str]] = {}
138
+ for path, files in outpaths.items():
139
+ # fixup path
140
+ assert path.startswith("/"), "Path should start with /"
141
+ path = path[1:]
142
+ # replace the first / with :
143
+ path = path.replace("/", ":", 1)
144
+ out[path] = files
145
+ return out
@@ -5,6 +5,7 @@ Unit test file.
5
5
  import subprocess
6
6
  import time
7
7
  import warnings
8
+ from concurrent.futures import Future, ThreadPoolExecutor
8
9
  from enum import Enum
9
10
  from fnmatch import fnmatch
10
11
  from pathlib import Path
@@ -20,12 +21,20 @@ from rclone_api.diff import DiffItem, diff_stream_from_running_process
20
21
  from rclone_api.dir_listing import DirListing
21
22
  from rclone_api.exec import RcloneExec
22
23
  from rclone_api.file import File
24
+ from rclone_api.group_files import group_files
23
25
  from rclone_api.process import Process
24
26
  from rclone_api.remote import Remote
25
27
  from rclone_api.rpath import RPath
26
- from rclone_api.util import get_rclone_exe, partition_files, to_path, wait_for_mount
28
+ from rclone_api.util import (
29
+ get_rclone_exe,
30
+ get_verbose,
31
+ to_path,
32
+ wait_for_mount,
33
+ )
27
34
  from rclone_api.walk import walk
28
35
 
36
+ EXECUTOR = ThreadPoolExecutor(16)
37
+
29
38
 
30
39
  class ModTimeStrategy(Enum):
31
40
  USE_SERVER_MODTIME = "use-server-modtime"
@@ -197,7 +206,7 @@ class Rclone:
197
206
  cmd_list: list[str] = ["copyto", src, dst]
198
207
  self._run(cmd_list)
199
208
 
200
- def copyfiles(self, files: str | File | list[str] | list[File]) -> None:
209
+ def copyfiles(self, files: str | File | list[str] | list[File], check=True) -> None:
201
210
  """Copy multiple files from source to destination.
202
211
 
203
212
  Warning - slow.
@@ -209,31 +218,42 @@ class Rclone:
209
218
  if len(payload) == 0:
210
219
  return
211
220
 
212
- datalists: dict[str, list[str]] = partition_files(payload)
221
+ datalists: dict[str, list[str]] = group_files(payload)
213
222
  out: subprocess.CompletedProcess | None = None
214
223
 
224
+ futures: list[Future] = []
225
+
215
226
  for remote, files in datalists.items():
216
- with TemporaryDirectory() as tmpdir:
217
- include_files_txt = Path(tmpdir) / "include_files.txt"
218
- include_files_txt.write_text("\n".join(files), encoding="utf-8")
219
-
220
- # print(include_files_txt)
221
- cmd_list: list[str] = [
222
- "delete",
223
- remote,
224
- "--files-from",
225
- str(include_files_txt),
226
- "--checkers",
227
- "1000",
228
- "--transfers",
229
- "1000",
230
- ]
231
- out = self._run(cmd_list)
232
- if out.returncode != 0:
233
- print(out)
234
- raise ValueError(f"Error deleting files: {out.stderr}")
235
227
 
236
- assert out is not None
228
+ def _task(files=files) -> subprocess.CompletedProcess:
229
+ with TemporaryDirectory() as tmpdir:
230
+ include_files_txt = Path(tmpdir) / "include_files.txt"
231
+ include_files_txt.write_text("\n".join(files), encoding="utf-8")
232
+
233
+ # print(include_files_txt)
234
+ cmd_list: list[str] = [
235
+ "delete",
236
+ remote,
237
+ "--files-from",
238
+ str(include_files_txt),
239
+ "--checkers",
240
+ "1000",
241
+ "--transfers",
242
+ "1000",
243
+ ]
244
+ out = self._run(cmd_list)
245
+ return out
246
+
247
+ fut: Future = EXECUTOR.submit(_task)
248
+ futures.append(fut)
249
+ for fut in futures:
250
+ out = fut.result()
251
+ assert out is not None
252
+ if out.returncode != 0:
253
+ if check:
254
+ raise ValueError(f"Error deleting files: {out.stderr}")
255
+ else:
256
+ warnings.warn(f"Error deleting files: {out.stderr}")
237
257
 
238
258
  def copy(self, src: Dir | str, dst: Dir | str) -> CompletedProcess:
239
259
  """Copy files from source to destination.
@@ -263,6 +283,7 @@ class Rclone:
263
283
  files: str | File | list[str] | list[File],
264
284
  check=True,
265
285
  rmdirs=False,
286
+ verbose: bool | None = None,
266
287
  other_args: list[str] | None = None,
267
288
  ) -> CompletedProcess:
268
289
  """Delete a directory"""
@@ -276,41 +297,53 @@ class Rclone:
276
297
  )
277
298
  return CompletedProcess.from_subprocess(cp)
278
299
 
279
- datalists: dict[str, list[str]] = partition_files(payload)
280
- out: subprocess.CompletedProcess | None = None
281
-
300
+ datalists: dict[str, list[str]] = group_files(payload)
282
301
  completed_processes: list[subprocess.CompletedProcess] = []
302
+ verbose = get_verbose(verbose)
303
+
304
+ futures: list[Future] = []
283
305
 
284
306
  for remote, files in datalists.items():
285
- with TemporaryDirectory() as tmpdir:
286
- include_files_txt = Path(tmpdir) / "include_files.txt"
287
- include_files_txt.write_text("\n".join(files), encoding="utf-8")
288
-
289
- # print(include_files_txt)
290
- cmd_list: list[str] = [
291
- "delete",
292
- remote,
293
- "--files-from",
294
- str(include_files_txt),
295
- "--checkers",
296
- "1000",
297
- "--transfers",
298
- "1000",
299
- ]
300
- if rmdirs:
301
- cmd_list.append("--rmdirs")
302
- if other_args:
303
- cmd_list += other_args
304
- out = self._run(cmd_list)
305
- completed_processes.append(out)
307
+
308
+ def _task(files=files, check=check) -> subprocess.CompletedProcess:
309
+ with TemporaryDirectory() as tmpdir:
310
+ include_files_txt = Path(tmpdir) / "include_files.txt"
311
+ include_files_txt.write_text("\n".join(files), encoding="utf-8")
312
+
313
+ # print(include_files_txt)
314
+ cmd_list: list[str] = [
315
+ "delete",
316
+ remote,
317
+ "--files-from",
318
+ str(include_files_txt),
319
+ "--checkers",
320
+ "1000",
321
+ "--transfers",
322
+ "1000",
323
+ ]
324
+ if verbose:
325
+ cmd_list.append("-vvvv")
326
+ if rmdirs:
327
+ cmd_list.append("--rmdirs")
328
+ if other_args:
329
+ cmd_list += other_args
330
+ out = self._run(cmd_list, check=check)
306
331
  if out.returncode != 0:
307
332
  if check:
308
333
  completed_processes.append(out)
309
334
  raise ValueError(f"Error deleting files: {out}")
310
335
  else:
311
336
  warnings.warn(f"Error deleting files: {out}")
337
+ return out
338
+
339
+ fut: Future = EXECUTOR.submit(_task)
340
+ futures.append(fut)
341
+
342
+ for fut in futures:
343
+ out = fut.result()
344
+ assert out is not None
345
+ completed_processes.append(out)
312
346
 
313
- assert out is not None
314
347
  return CompletedProcess(completed_processes)
315
348
 
316
349
  @deprecated("delete_files")
@@ -129,15 +129,3 @@ def wait_for_mount(path: Path, mount_process: Any, timeout: int = 60) -> None:
129
129
  if path.exists():
130
130
  return
131
131
  raise TimeoutError(f"Path {path} did not exist after {timeout} seconds")
132
-
133
-
134
- def partition_files(files: list[str]) -> dict[str, list[str]]:
135
- """split between filename and parent directory path"""
136
- datalists: dict[str, list[str]] = {}
137
- for f in files:
138
- base = os.path.basename(f)
139
- parent_path = os.path.dirname(f)
140
- if parent_path not in datalists:
141
- datalists[parent_path] = []
142
- datalists[parent_path].append(base)
143
- return datalists
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: rclone_api
3
- Version: 1.0.40
3
+ Version: 1.0.42
4
4
  Summary: rclone api in python
5
5
  Home-page: https://github.com/zackees/rclone-api
6
6
  Maintainer: Zachary Vorhies
@@ -32,6 +32,7 @@ src/rclone_api/dir_listing.py
32
32
  src/rclone_api/exec.py
33
33
  src/rclone_api/file.py
34
34
  src/rclone_api/filelist.py
35
+ src/rclone_api/group_files.py
35
36
  src/rclone_api/process.py
36
37
  src/rclone_api/rclone.py
37
38
  src/rclone_api/remote.py
@@ -49,6 +50,7 @@ src/rclone_api/cmd/list_files.py
49
50
  tests/test_cmd_list_files.py
50
51
  tests/test_copy.py
51
52
  tests/test_diff.py
53
+ tests/test_group_files.py
52
54
  tests/test_is_synced.py
53
55
  tests/test_ls.py
54
56
  tests/test_mount.py
@@ -0,0 +1,81 @@
1
+ """
2
+ Unit test file.
3
+ """
4
+
5
+ import unittest
6
+
7
+ from rclone_api.group_files import group_files
8
+
9
+
10
+ class GroupFilestest(unittest.TestCase):
11
+ """Test rclone functionality."""
12
+
13
+ def test_simple_group_files(self) -> None:
14
+ files = [
15
+ "dst:Bucket/subdir/file1.txt",
16
+ "dst:Bucket/subdir/file2.txt",
17
+ ]
18
+ groups: dict[str, list[str]] = group_files(files)
19
+ self.assertEqual(len(groups), 1)
20
+ # dst:/Bucket/subdir should be the key
21
+ self.assertIn("dst:Bucket/subdir", groups)
22
+ self.assertEqual(len(groups["dst:Bucket/subdir"]), 2)
23
+ expected_files = [
24
+ "file1.txt",
25
+ "file2.txt",
26
+ ]
27
+ self.assertIn(expected_files[0], groups["dst:Bucket/subdir"])
28
+ self.assertIn(expected_files[1], groups["dst:Bucket/subdir"])
29
+ print("done")
30
+
31
+ def test_different_paths(self) -> None:
32
+ files = [
33
+ "dst:Bucket/subdir/file1.txt",
34
+ "dst:Bucket/subdir2/file2.txt",
35
+ ]
36
+ groups: dict[str, list[str]] = group_files(files)
37
+ self.assertEqual(len(groups), 2)
38
+ # dst:/Bucket/subdir should be the key
39
+ self.assertIn("dst:Bucket/subdir", groups)
40
+ self.assertEqual(len(groups["dst:Bucket/subdir"]), 1)
41
+ expected_files = [
42
+ "file1.txt",
43
+ ]
44
+ self.assertIn(expected_files[0], groups["dst:Bucket/subdir"])
45
+ # dst:/Bucket/subdir2 should be the key
46
+ self.assertIn("dst:Bucket/subdir2", groups)
47
+ self.assertEqual(len(groups["dst:Bucket/subdir2"]), 1)
48
+
49
+ def test_two_big_directories(self) -> None:
50
+ files = [
51
+ "dst:Bucket/subdir/file1.txt",
52
+ "dst:Bucket/subdir/file2.txt",
53
+ "dst:Bucket/subdir2/file3.txt",
54
+ "dst:Bucket/subdir2/file4.txt",
55
+ ]
56
+
57
+ groups: dict[str, list[str]] = group_files(files)
58
+ self.assertEqual(len(groups), 2)
59
+ # dst:/Bucket/subdir should be the key
60
+ self.assertIn("dst:Bucket/subdir", groups)
61
+ self.assertEqual(len(groups["dst:Bucket/subdir"]), 2)
62
+ expected_files = [
63
+ "file1.txt",
64
+ "file2.txt",
65
+ ]
66
+ self.assertIn(expected_files[0], groups["dst:Bucket/subdir"])
67
+ self.assertIn(expected_files[1], groups["dst:Bucket/subdir"])
68
+ # dst:/Bucket/subdir2 should be the key
69
+ self.assertIn("dst:Bucket/subdir2", groups)
70
+ self.assertEqual(len(groups["dst:Bucket/subdir2"]), 2)
71
+ expected_files = [
72
+ "file3.txt",
73
+ "file4.txt",
74
+ ]
75
+ self.assertIn(expected_files[0], groups["dst:Bucket/subdir2"])
76
+ self.assertIn(expected_files[1], groups["dst:Bucket/subdir2"])
77
+ print("done")
78
+
79
+
80
+ if __name__ == "__main__":
81
+ unittest.main()
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes