antsibull-nox 0.0.1__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- antsibull_nox/__init__.py +66 -3
- antsibull_nox/ansible.py +260 -0
- antsibull_nox/collection/__init__.py +56 -0
- antsibull_nox/collection/data.py +106 -0
- antsibull_nox/collection/extract.py +23 -0
- antsibull_nox/collection/install.py +523 -0
- antsibull_nox/collection/search.py +456 -0
- antsibull_nox/config.py +332 -0
- antsibull_nox/data/action-groups.py +199 -0
- antsibull_nox/data/antsibull_nox_data_util.py +91 -0
- antsibull_nox/data/license-check.py +144 -0
- antsibull_nox/data/license-check.py.license +3 -0
- antsibull_nox/data/no-unwanted-files.py +123 -0
- antsibull_nox/data/plugin-yamllint.py +244 -0
- antsibull_nox/data_util.py +38 -0
- antsibull_nox/interpret_config.py +235 -0
- antsibull_nox/paths.py +220 -0
- antsibull_nox/python.py +81 -0
- antsibull_nox/sessions.py +1389 -168
- antsibull_nox/utils.py +85 -0
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.2.0.dist-info}/METADATA +14 -4
- antsibull_nox-0.2.0.dist-info/RECORD +25 -0
- antsibull_nox-0.0.1.dist-info/RECORD +0 -7
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.2.0.dist-info}/WHEEL +0 -0
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.2.0.dist-info}/licenses/LICENSES/GPL-3.0-or-later.txt +0 -0
@@ -0,0 +1,523 @@
|
|
1
|
+
# Author: Felix Fontein <felix@fontein.de>
|
2
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
|
3
|
+
# https://www.gnu.org/licenses/gpl-3.0.txt)
|
4
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
5
|
+
# SPDX-FileCopyrightText: 2025, Ansible Project
|
6
|
+
|
7
|
+
"""
|
8
|
+
Handle Ansible collections.
|
9
|
+
"""
|
10
|
+
|
11
|
+
from __future__ import annotations
|
12
|
+
|
13
|
+
import os
|
14
|
+
import shutil
|
15
|
+
import tempfile
|
16
|
+
import typing as t
|
17
|
+
from collections.abc import Iterable
|
18
|
+
from dataclasses import dataclass
|
19
|
+
from pathlib import Path
|
20
|
+
|
21
|
+
from antsibull_fileutils.yaml import load_yaml_file
|
22
|
+
|
23
|
+
from ..paths import copy_collection as _paths_copy_collection
|
24
|
+
from ..paths import remove_path as _remove
|
25
|
+
from .data import CollectionData, CollectionSource, SetupResult
|
26
|
+
from .extract import extract_tarball
|
27
|
+
from .search import (
|
28
|
+
CollectionList,
|
29
|
+
_update_collection_list,
|
30
|
+
get_collection_list,
|
31
|
+
)
|
32
|
+
|
33
|
+
# Function that runs a command (and fails on non-zero return code)
|
34
|
+
# and returns a tuple (stdout, stderr)
|
35
|
+
Runner = t.Callable[[list[str]], tuple[bytes, bytes]]
|
36
|
+
|
37
|
+
|
38
|
+
class _CollectionSources:
|
39
|
+
sources: dict[str, CollectionSource]
|
40
|
+
|
41
|
+
def __init__(self):
|
42
|
+
self.sources = {}
|
43
|
+
|
44
|
+
def set_source(self, name: str, source: CollectionSource) -> None:
|
45
|
+
"""
|
46
|
+
Set source for collection.
|
47
|
+
"""
|
48
|
+
self.sources[name] = source
|
49
|
+
|
50
|
+
def get_source(self, name: str) -> CollectionSource:
|
51
|
+
"""
|
52
|
+
Get source for collection.
|
53
|
+
"""
|
54
|
+
source = self.sources.get(name)
|
55
|
+
if source is None:
|
56
|
+
source = CollectionSource(name, name)
|
57
|
+
return source
|
58
|
+
|
59
|
+
|
60
|
+
_COLLECTION_SOURCES = _CollectionSources()
|
61
|
+
_TARBALL_EXTENSION = ".tar.gz"
|
62
|
+
_INSTALLATION_CONFIG_ENV_VAR = "ANTSIBULL_NOX_INSTALL_COLLECTIONS"
|
63
|
+
|
64
|
+
|
65
|
+
def setup_collection_sources(collection_sources: dict[str, CollectionSource]) -> None:
|
66
|
+
"""
|
67
|
+
Setup collection sources.
|
68
|
+
"""
|
69
|
+
for name, source in collection_sources.items():
|
70
|
+
_COLLECTION_SOURCES.set_source(name, source)
|
71
|
+
|
72
|
+
|
73
|
+
def _download_collections(
|
74
|
+
*, destination: Path, sources: list[CollectionSource], runner: Runner
|
75
|
+
) -> None:
|
76
|
+
destination.mkdir(exist_ok=True)
|
77
|
+
names = ", ".join(sorted(source.name for source in sources))
|
78
|
+
print(f"Downloading {names} to {destination}...")
|
79
|
+
for source in sources:
|
80
|
+
if source.name != source.source:
|
81
|
+
print(f" Installing {source.name} via {source.source}...")
|
82
|
+
with tempfile.TemporaryDirectory(prefix="antsibull-nox-galaxy-download") as dest:
|
83
|
+
tempdir = Path(dest)
|
84
|
+
command = [
|
85
|
+
"ansible-galaxy",
|
86
|
+
"collection",
|
87
|
+
"download",
|
88
|
+
"--no-deps",
|
89
|
+
"--download-path",
|
90
|
+
str(tempdir),
|
91
|
+
"--",
|
92
|
+
*(source.source for source in sources),
|
93
|
+
]
|
94
|
+
runner(command)
|
95
|
+
for file in tempdir.iterdir():
|
96
|
+
if file.name.endswith(_TARBALL_EXTENSION) and file.is_file():
|
97
|
+
destfile = destination / file.name
|
98
|
+
_remove(destfile)
|
99
|
+
shutil.move(file, destfile)
|
100
|
+
|
101
|
+
|
102
|
+
def _list_downloaded_dir(*, path: Path) -> dict[str, Path]:
|
103
|
+
if not path.is_dir():
|
104
|
+
return {}
|
105
|
+
result: dict[str, Path] = {}
|
106
|
+
for file in path.iterdir():
|
107
|
+
if not file.name.endswith(_TARBALL_EXTENSION) or not file.is_file():
|
108
|
+
continue
|
109
|
+
basename = file.name[: -len(_TARBALL_EXTENSION)]
|
110
|
+
# Format: community-internal_test_tools-0.15.0, community-aws-10.0.0-dev0
|
111
|
+
parts = basename.split("-", 2)
|
112
|
+
if len(parts) != 3:
|
113
|
+
continue
|
114
|
+
full_name = ".".join(parts[:2])
|
115
|
+
if full_name in result:
|
116
|
+
old_stat = result[full_name].stat()
|
117
|
+
new_stat = file.stat()
|
118
|
+
if new_stat.st_mtime > old_stat.st_mtime:
|
119
|
+
older_file = result[full_name]
|
120
|
+
result[full_name] = file
|
121
|
+
else:
|
122
|
+
older_file = file
|
123
|
+
# Clean up older entry
|
124
|
+
_remove(older_file)
|
125
|
+
else:
|
126
|
+
result[full_name] = file
|
127
|
+
return result
|
128
|
+
|
129
|
+
|
130
|
+
def _install_from_download_cache(
|
131
|
+
*, full_name: str, tarball: Path, destination: Path
|
132
|
+
) -> Path:
|
133
|
+
destination_dir = destination / full_name
|
134
|
+
_remove(destination_dir)
|
135
|
+
print(f"Installing {full_name} from {tarball} to {destination_dir}...")
|
136
|
+
extract_tarball(tarball=tarball, destination=destination_dir)
|
137
|
+
return destination_dir
|
138
|
+
|
139
|
+
|
140
|
+
def _install_missing(
|
141
|
+
collections: list[str],
|
142
|
+
*,
|
143
|
+
runner: Runner,
|
144
|
+
) -> list[CollectionData]:
|
145
|
+
config = os.environ.get(_INSTALLATION_CONFIG_ENV_VAR)
|
146
|
+
if config == "never":
|
147
|
+
names = ", ".join(sorted(collections))
|
148
|
+
plural_s = "" if len(collections) == 1 else "s"
|
149
|
+
print(
|
150
|
+
f"{_INSTALLATION_CONFIG_ENV_VAR} is set to 'never',"
|
151
|
+
f" thus cannot install missing exception{plural_s} {names}..."
|
152
|
+
)
|
153
|
+
return []
|
154
|
+
sources = [_COLLECTION_SOURCES.get_source(name) for name in collections]
|
155
|
+
result: list[CollectionData] = []
|
156
|
+
with _update_collection_list() as updater:
|
157
|
+
global_cache = updater.get_global_cache()
|
158
|
+
install: list[str] = []
|
159
|
+
download: list[CollectionSource] = []
|
160
|
+
download_cache = _list_downloaded_dir(path=global_cache.download_cache)
|
161
|
+
for source in sources:
|
162
|
+
if cd := updater.find(source.name):
|
163
|
+
result.append(cd)
|
164
|
+
else:
|
165
|
+
install.append(source.name)
|
166
|
+
if not download_cache.get(source.name):
|
167
|
+
download.append(source)
|
168
|
+
if download:
|
169
|
+
_download_collections(
|
170
|
+
destination=global_cache.download_cache, sources=download, runner=runner
|
171
|
+
)
|
172
|
+
download_cache = _list_downloaded_dir(path=global_cache.download_cache)
|
173
|
+
if install:
|
174
|
+
for name in install:
|
175
|
+
if name not in download_cache:
|
176
|
+
raise ValueError(
|
177
|
+
f"Error: cannot find {name} in download cache"
|
178
|
+
f" {global_cache.download_cache} after successful download!"
|
179
|
+
)
|
180
|
+
c_dir = _install_from_download_cache(
|
181
|
+
full_name=name,
|
182
|
+
tarball=download_cache[name],
|
183
|
+
destination=global_cache.extracted_cache,
|
184
|
+
)
|
185
|
+
c_namespace, c_name = name.split(".", 1)
|
186
|
+
result.append(
|
187
|
+
updater.add_collection(
|
188
|
+
directory=c_dir, namespace=c_namespace, name=c_name
|
189
|
+
)
|
190
|
+
)
|
191
|
+
return result
|
192
|
+
|
193
|
+
|
194
|
+
@dataclass(frozen=True, order=True)
|
195
|
+
class _Source:
|
196
|
+
"""
|
197
|
+
Represents the source of a missing dependency.
|
198
|
+
"""
|
199
|
+
|
200
|
+
name: str | None = None
|
201
|
+
path: Path | None = None
|
202
|
+
what: str | None = None
|
203
|
+
|
204
|
+
@classmethod
|
205
|
+
def dependency_of(cls, name: str) -> _Source:
|
206
|
+
"""
|
207
|
+
Dependency of collection.
|
208
|
+
"""
|
209
|
+
return cls(name=name)
|
210
|
+
|
211
|
+
@classmethod
|
212
|
+
def from_file(cls, path: Path) -> _Source:
|
213
|
+
"""
|
214
|
+
Dependency from collection requirements file.
|
215
|
+
"""
|
216
|
+
return cls(path=path)
|
217
|
+
|
218
|
+
@classmethod
|
219
|
+
def from_other(cls, what: str) -> _Source:
|
220
|
+
"""
|
221
|
+
Another source.
|
222
|
+
"""
|
223
|
+
return cls(what=what)
|
224
|
+
|
225
|
+
def nice_str(self) -> str:
|
226
|
+
"""
|
227
|
+
Convert to a nice (human readable) string.
|
228
|
+
"""
|
229
|
+
if self.name:
|
230
|
+
return f"dependency of {self.name}"
|
231
|
+
if self.path:
|
232
|
+
return f"required in {self.path}"
|
233
|
+
if self.what:
|
234
|
+
return f"required through {self.what}"
|
235
|
+
return "(unknown)"
|
236
|
+
|
237
|
+
|
238
|
+
class _MissingDependency:
|
239
|
+
"""
|
240
|
+
Models a missing dependency with a list of sources where it is required from.
|
241
|
+
"""
|
242
|
+
|
243
|
+
name: str
|
244
|
+
sources: set[_Source]
|
245
|
+
|
246
|
+
def __init__(self, name: str, source: _Source) -> None:
|
247
|
+
"""
|
248
|
+
Create missing dependency with source.
|
249
|
+
"""
|
250
|
+
self.name = name
|
251
|
+
self.sources = {source}
|
252
|
+
|
253
|
+
def add_source(self, source: _Source) -> None:
|
254
|
+
"""
|
255
|
+
Add source.
|
256
|
+
"""
|
257
|
+
self.sources.add(source)
|
258
|
+
|
259
|
+
|
260
|
+
class _MissingDependencies:
|
261
|
+
"""
|
262
|
+
Models all missing dependencies.
|
263
|
+
"""
|
264
|
+
|
265
|
+
missing: dict[str, _MissingDependency]
|
266
|
+
|
267
|
+
def __init__(self) -> None:
|
268
|
+
self.missing = {}
|
269
|
+
|
270
|
+
def is_empty(self) -> bool:
|
271
|
+
"""
|
272
|
+
Query whether no collections are missing.
|
273
|
+
"""
|
274
|
+
return not self.missing
|
275
|
+
|
276
|
+
def get_missing_names(self) -> list[str]:
|
277
|
+
"""
|
278
|
+
Get a sorted list of missing collections.
|
279
|
+
"""
|
280
|
+
return sorted(self.missing)
|
281
|
+
|
282
|
+
def add(self, name: str, *, source: _Source) -> None:
|
283
|
+
"""
|
284
|
+
Add a missing dependency.
|
285
|
+
"""
|
286
|
+
if name in self.missing:
|
287
|
+
self.missing[name].add_source(source)
|
288
|
+
else:
|
289
|
+
self.missing[name] = _MissingDependency(name, source)
|
290
|
+
|
291
|
+
def remove(self, name: str) -> None:
|
292
|
+
"""
|
293
|
+
Remove a missing dependency (because it was installed).
|
294
|
+
"""
|
295
|
+
self.missing.pop(name)
|
296
|
+
|
297
|
+
def raise_error(self) -> None:
|
298
|
+
"""
|
299
|
+
Raise a human-readable error about missing collections.
|
300
|
+
If no collections are missing, simply return.
|
301
|
+
"""
|
302
|
+
if not self.missing:
|
303
|
+
return
|
304
|
+
collections: list[str] = []
|
305
|
+
for collection in sorted(self.missing):
|
306
|
+
sources = sorted(self.missing[collection].sources)
|
307
|
+
sources_text = ", ".join(source.nice_str() for source in sources)
|
308
|
+
collections.append(f"{collection} (required from {sources_text})")
|
309
|
+
plural_s = "" if len(collections) == 1 else ""
|
310
|
+
enumeration = "- " + "\n- ".join(collections)
|
311
|
+
raise ValueError(
|
312
|
+
f"The following collection{plural_s} are missing:\n{enumeration}"
|
313
|
+
)
|
314
|
+
|
315
|
+
|
316
|
+
def _add_all_dependencies(
|
317
|
+
collections: dict[str, CollectionData],
|
318
|
+
missing_dependencies: _MissingDependencies,
|
319
|
+
all_collections: CollectionList,
|
320
|
+
) -> None:
|
321
|
+
to_process = list(collections.values())
|
322
|
+
while to_process:
|
323
|
+
collection = to_process.pop(0)
|
324
|
+
for dependency_name in collection.dependencies:
|
325
|
+
if dependency_name not in collections:
|
326
|
+
dependency_data = all_collections.find(dependency_name)
|
327
|
+
if dependency_data is None:
|
328
|
+
missing_dependencies.add(
|
329
|
+
dependency_name,
|
330
|
+
source=_Source.dependency_of(collection.full_name),
|
331
|
+
)
|
332
|
+
continue
|
333
|
+
collections[dependency_name] = dependency_data
|
334
|
+
to_process.append(dependency_data)
|
335
|
+
|
336
|
+
|
337
|
+
def _install_collection(collection: CollectionData, path: Path) -> None:
|
338
|
+
# Compute relative path
|
339
|
+
sym_path = collection.path.absolute().relative_to(path.parents[0], walk_up=True)
|
340
|
+
# Ensure that path is symlink with this relative path
|
341
|
+
if path.is_symlink():
|
342
|
+
if path.readlink() == sym_path:
|
343
|
+
return
|
344
|
+
path.unlink()
|
345
|
+
else:
|
346
|
+
_remove(path)
|
347
|
+
path.symlink_to(sym_path)
|
348
|
+
|
349
|
+
|
350
|
+
def _install_current_collection(collection: CollectionData, path: Path) -> None:
|
351
|
+
if path.exists() and (path.is_symlink() or not path.is_dir()):
|
352
|
+
path.unlink()
|
353
|
+
path.mkdir(exist_ok=True)
|
354
|
+
present = {p.name for p in path.iterdir()}
|
355
|
+
for source_entry in collection.path.absolute().iterdir():
|
356
|
+
if source_entry.name == ".nox":
|
357
|
+
continue
|
358
|
+
dest_entry = path / source_entry.name
|
359
|
+
# Compute relative path
|
360
|
+
sym_path = source_entry.relative_to(path, walk_up=True)
|
361
|
+
# Ensure that dest_entry is symlink with this relative path
|
362
|
+
if source_entry.name in present:
|
363
|
+
present.remove(source_entry.name)
|
364
|
+
if dest_entry.is_symlink() and dest_entry.readlink() == sym_path:
|
365
|
+
continue
|
366
|
+
_remove(dest_entry)
|
367
|
+
dest_entry.symlink_to(sym_path)
|
368
|
+
for name in present:
|
369
|
+
dest_entry = path / name
|
370
|
+
_remove(dest_entry)
|
371
|
+
|
372
|
+
|
373
|
+
def _install_collections(
|
374
|
+
collections: Iterable[CollectionData], root: Path, *, with_current: bool
|
375
|
+
) -> None:
|
376
|
+
for collection in collections:
|
377
|
+
namespace_dir = root / collection.namespace
|
378
|
+
namespace_dir.mkdir(exist_ok=True)
|
379
|
+
path = namespace_dir / collection.name
|
380
|
+
if not collection.current:
|
381
|
+
_install_collection(collection, path)
|
382
|
+
elif with_current:
|
383
|
+
_install_current_collection(collection, path)
|
384
|
+
|
385
|
+
|
386
|
+
def _extract_collections_from_extra_deps_file(path: str | os.PathLike) -> list[str]:
|
387
|
+
if not os.path.exists(path):
|
388
|
+
return []
|
389
|
+
try:
|
390
|
+
data = load_yaml_file(path)
|
391
|
+
result = []
|
392
|
+
if data.get("collections"):
|
393
|
+
for index, collection in enumerate(data["collections"]):
|
394
|
+
if isinstance(collection, str):
|
395
|
+
result.append(collection)
|
396
|
+
continue
|
397
|
+
if not isinstance(collection, dict):
|
398
|
+
raise ValueError(
|
399
|
+
f"Collection entry #{index + 1} must be a string or dictionary"
|
400
|
+
)
|
401
|
+
if not isinstance(collection.get("name"), str):
|
402
|
+
raise ValueError(
|
403
|
+
f"Collection entry #{index + 1} does not have a 'name' field of type string"
|
404
|
+
)
|
405
|
+
result.append(collection["name"])
|
406
|
+
return result
|
407
|
+
except Exception as exc:
|
408
|
+
raise ValueError(
|
409
|
+
f"Error while loading collection dependency file {path}: {exc}"
|
410
|
+
) from exc
|
411
|
+
|
412
|
+
|
413
|
+
def setup_collections(
|
414
|
+
destination: str | os.PathLike,
|
415
|
+
runner: Runner,
|
416
|
+
*,
|
417
|
+
extra_collections: list[str] | None = None,
|
418
|
+
extra_deps_files: list[str | os.PathLike] | None = None,
|
419
|
+
global_cache_dir: Path,
|
420
|
+
with_current: bool = True,
|
421
|
+
) -> SetupResult:
|
422
|
+
"""
|
423
|
+
Setup all collections in a tree structure inside the destination directory.
|
424
|
+
"""
|
425
|
+
all_collections = get_collection_list(
|
426
|
+
runner=runner, global_cache_dir=global_cache_dir
|
427
|
+
)
|
428
|
+
destination_root = Path(destination) / "ansible_collections"
|
429
|
+
destination_root.mkdir(exist_ok=True)
|
430
|
+
current = all_collections.current
|
431
|
+
collections_to_install = {current.full_name: current}
|
432
|
+
missing = _MissingDependencies()
|
433
|
+
if extra_collections:
|
434
|
+
for collection in extra_collections:
|
435
|
+
collection_data = all_collections.find(collection)
|
436
|
+
if collection_data is None:
|
437
|
+
missing.add(collection, source=_Source.from_other("noxfile"))
|
438
|
+
else:
|
439
|
+
collections_to_install[collection_data.full_name] = collection_data
|
440
|
+
if extra_deps_files is not None:
|
441
|
+
for extra_deps_file in extra_deps_files:
|
442
|
+
path = Path(extra_deps_file)
|
443
|
+
for collection in _extract_collections_from_extra_deps_file(path):
|
444
|
+
collection_data = all_collections.find(collection)
|
445
|
+
if collection_data is None:
|
446
|
+
missing.add(collection, source=_Source.from_file(path))
|
447
|
+
else:
|
448
|
+
collections_to_install[collection_data.full_name] = collection_data
|
449
|
+
while True:
|
450
|
+
_add_all_dependencies(collections_to_install, missing, all_collections)
|
451
|
+
if missing.is_empty():
|
452
|
+
break
|
453
|
+
for collection_data in _install_missing(
|
454
|
+
missing.get_missing_names(), runner=runner
|
455
|
+
):
|
456
|
+
collections_to_install[collection_data.full_name] = collection_data
|
457
|
+
missing.remove(collection_data.full_name)
|
458
|
+
missing.raise_error()
|
459
|
+
_install_collections(
|
460
|
+
collections_to_install.values(), destination_root, with_current=with_current
|
461
|
+
)
|
462
|
+
return SetupResult(
|
463
|
+
root=destination_root,
|
464
|
+
current_collection=current,
|
465
|
+
current_path=(
|
466
|
+
(destination_root / current.namespace / current.name)
|
467
|
+
if with_current
|
468
|
+
else None
|
469
|
+
),
|
470
|
+
)
|
471
|
+
|
472
|
+
|
473
|
+
def _copy_collection(collection: CollectionData, path: Path) -> None:
|
474
|
+
_paths_copy_collection(collection.path, path)
|
475
|
+
|
476
|
+
|
477
|
+
def _copy_collection_rsync_hard_links(
|
478
|
+
collection: CollectionData, path: Path, runner: Runner
|
479
|
+
) -> None:
|
480
|
+
_, __ = runner(
|
481
|
+
[
|
482
|
+
"rsync",
|
483
|
+
"-av",
|
484
|
+
"--delete",
|
485
|
+
"--exclude",
|
486
|
+
".nox",
|
487
|
+
"--link-dest",
|
488
|
+
str(collection.path) + "/",
|
489
|
+
"--",
|
490
|
+
str(collection.path) + "/",
|
491
|
+
str(path) + "/",
|
492
|
+
]
|
493
|
+
)
|
494
|
+
|
495
|
+
|
496
|
+
def setup_current_tree(
|
497
|
+
place: str | os.PathLike, current_collection: CollectionData
|
498
|
+
) -> SetupResult:
|
499
|
+
"""
|
500
|
+
Setup a tree structure with the current collection in it.
|
501
|
+
"""
|
502
|
+
|
503
|
+
path = Path(place)
|
504
|
+
root = path / "ansible_collections"
|
505
|
+
root.mkdir(exist_ok=True)
|
506
|
+
namespace = root / current_collection.namespace
|
507
|
+
namespace.mkdir(exist_ok=True)
|
508
|
+
collection = namespace / current_collection.name
|
509
|
+
_copy_collection(current_collection, collection)
|
510
|
+
# _copy_collection_rsync_hard_links(current_collection, collection, runner)
|
511
|
+
return SetupResult(
|
512
|
+
root=root,
|
513
|
+
current_collection=current_collection,
|
514
|
+
current_path=collection,
|
515
|
+
)
|
516
|
+
|
517
|
+
|
518
|
+
__all__ = [
|
519
|
+
"get_collection_list",
|
520
|
+
"setup_collections",
|
521
|
+
"setup_current_tree",
|
522
|
+
"setup_collection_sources",
|
523
|
+
]
|