antsibull-nox 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- antsibull_nox/__init__.py +19 -3
- antsibull_nox/collection.py +545 -0
- antsibull_nox/data/action-groups.py +199 -0
- antsibull_nox/data/license-check.py +144 -0
- antsibull_nox/data/license-check.py.license +3 -0
- antsibull_nox/data/no-unwanted-files.py +119 -0
- antsibull_nox/data_util.py +115 -0
- antsibull_nox/paths.py +201 -0
- antsibull_nox/sessions.py +507 -158
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.1.0.dist-info}/METADATA +12 -4
- antsibull_nox-0.1.0.dist-info/RECORD +14 -0
- antsibull_nox-0.0.1.dist-info/RECORD +0 -7
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.1.0.dist-info}/WHEEL +0 -0
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.1.0.dist-info}/licenses/LICENSES/GPL-3.0-or-later.txt +0 -0
antsibull_nox/__init__.py
CHANGED
@@ -10,8 +10,24 @@ Antsibull Nox Helper.
|
|
10
10
|
|
11
11
|
from __future__ import annotations
|
12
12
|
|
13
|
-
from .sessions import
|
13
|
+
from .sessions import (
|
14
|
+
ActionGroup,
|
15
|
+
add_build_import_check,
|
16
|
+
add_docs_check,
|
17
|
+
add_extra_checks,
|
18
|
+
add_license_check,
|
19
|
+
add_lint_sessions,
|
20
|
+
)
|
14
21
|
|
15
|
-
__version__ = "0.0
|
22
|
+
__version__ = "0.1.0"
|
16
23
|
|
17
|
-
|
24
|
+
# pylint:disable=duplicate-code
|
25
|
+
__all__ = (
|
26
|
+
"__version__",
|
27
|
+
"ActionGroup",
|
28
|
+
"add_build_import_check",
|
29
|
+
"add_docs_check",
|
30
|
+
"add_extra_checks",
|
31
|
+
"add_license_check",
|
32
|
+
"add_lint_sessions",
|
33
|
+
)
|
@@ -0,0 +1,545 @@
|
|
1
|
+
# Author: Felix Fontein <felix@fontein.de>
|
2
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
|
3
|
+
# https://www.gnu.org/licenses/gpl-3.0.txt)
|
4
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
5
|
+
# SPDX-FileCopyrightText: 2025, Ansible Project
|
6
|
+
|
7
|
+
"""
|
8
|
+
Handle Ansible collections.
|
9
|
+
"""
|
10
|
+
|
11
|
+
from __future__ import annotations
|
12
|
+
|
13
|
+
import functools
|
14
|
+
import json
|
15
|
+
import os
|
16
|
+
import typing as t
|
17
|
+
from collections.abc import Collection, Iterable, Iterator, Sequence
|
18
|
+
from dataclasses import dataclass
|
19
|
+
from pathlib import Path
|
20
|
+
|
21
|
+
from antsibull_fileutils.yaml import load_yaml_file, store_yaml_file
|
22
|
+
|
23
|
+
from .paths import copy_collection as _paths_copy_collection
|
24
|
+
from .paths import remove_path as _remove
|
25
|
+
|
26
|
+
# Function that runs a command (and fails on non-zero return code)
|
27
|
+
# and returns a tuple (stdout, stderr)
|
28
|
+
Runner = t.Callable[[list[str]], tuple[bytes, bytes]]
|
29
|
+
|
30
|
+
|
31
|
+
@dataclass
|
32
|
+
class CollectionData: # pylint: disable=too-many-instance-attributes
|
33
|
+
"""
|
34
|
+
An Ansible collection.
|
35
|
+
"""
|
36
|
+
|
37
|
+
collections_root_path: Path | None
|
38
|
+
path: Path
|
39
|
+
namespace: str
|
40
|
+
name: str
|
41
|
+
full_name: str
|
42
|
+
version: str | None
|
43
|
+
dependencies: dict[str, str]
|
44
|
+
current: bool
|
45
|
+
|
46
|
+
@classmethod
|
47
|
+
def create(
|
48
|
+
cls,
|
49
|
+
*,
|
50
|
+
collections_root_path: Path | None = None,
|
51
|
+
path: Path,
|
52
|
+
full_name: str,
|
53
|
+
version: str | None = None,
|
54
|
+
dependencies: dict[str, str] | None = None,
|
55
|
+
current: bool = False,
|
56
|
+
):
|
57
|
+
"""
|
58
|
+
Create a CollectionData object.
|
59
|
+
"""
|
60
|
+
namespace, name = full_name.split(".", 1)
|
61
|
+
return CollectionData(
|
62
|
+
collections_root_path=collections_root_path,
|
63
|
+
path=path,
|
64
|
+
namespace=namespace,
|
65
|
+
name=name,
|
66
|
+
full_name=full_name,
|
67
|
+
version=version,
|
68
|
+
dependencies=dependencies or {},
|
69
|
+
current=current,
|
70
|
+
)
|
71
|
+
|
72
|
+
|
73
|
+
def _load_galaxy_yml(galaxy_yml: Path) -> dict[str, t.Any]:
|
74
|
+
try:
|
75
|
+
data = load_yaml_file(galaxy_yml)
|
76
|
+
except Exception as exc:
|
77
|
+
raise ValueError(f"Cannot parse {galaxy_yml}: {exc}") from exc
|
78
|
+
if not isinstance(data, dict):
|
79
|
+
raise ValueError(f"{galaxy_yml} is not a dictionary")
|
80
|
+
return data
|
81
|
+
|
82
|
+
|
83
|
+
def _load_manifest_json_collection_info(manifest_json: Path) -> dict[str, t.Any]:
|
84
|
+
try:
|
85
|
+
with open(manifest_json, "br") as f:
|
86
|
+
data = json.load(f)
|
87
|
+
except Exception as exc:
|
88
|
+
raise ValueError(f"Cannot parse {manifest_json}: {exc}") from exc
|
89
|
+
ci = data.get("collection_info")
|
90
|
+
if not isinstance(ci, dict):
|
91
|
+
raise ValueError(f"{manifest_json} does not contain collection_info")
|
92
|
+
return ci
|
93
|
+
|
94
|
+
|
95
|
+
def load_collection_data_from_disk(
|
96
|
+
path: Path,
|
97
|
+
*,
|
98
|
+
namespace: str | None = None,
|
99
|
+
name: str | None = None,
|
100
|
+
root: Path | None = None,
|
101
|
+
current: bool = False,
|
102
|
+
accept_manifest: bool = True,
|
103
|
+
) -> CollectionData:
|
104
|
+
"""
|
105
|
+
Load collection data from disk.
|
106
|
+
"""
|
107
|
+
galaxy_yml = path / "galaxy.yml"
|
108
|
+
manifest_json = path / "MANIFEST.json"
|
109
|
+
found: Path
|
110
|
+
if galaxy_yml.is_file():
|
111
|
+
found = galaxy_yml
|
112
|
+
data = _load_galaxy_yml(galaxy_yml)
|
113
|
+
elif not accept_manifest:
|
114
|
+
raise ValueError(f"Cannot find galaxy.yml in {path}")
|
115
|
+
elif manifest_json.is_file():
|
116
|
+
found = manifest_json
|
117
|
+
data = _load_manifest_json_collection_info(manifest_json)
|
118
|
+
else:
|
119
|
+
raise ValueError(f"Cannot find galaxy.yml or MANIFEST.json in {path}")
|
120
|
+
|
121
|
+
ns = data.get("namespace")
|
122
|
+
if not isinstance(ns, str):
|
123
|
+
raise ValueError(f"{found} does not contain a namespace")
|
124
|
+
n = data.get("name")
|
125
|
+
if not isinstance(n, str):
|
126
|
+
raise ValueError(f"{found} does not contain a name")
|
127
|
+
v = data.get("version")
|
128
|
+
if not isinstance(v, str):
|
129
|
+
v = None
|
130
|
+
d = data.get("dependencies") or {}
|
131
|
+
if not isinstance(d, dict):
|
132
|
+
raise ValueError(f"{found}'s dependencies is not a mapping")
|
133
|
+
|
134
|
+
if namespace is not None and ns != namespace:
|
135
|
+
raise ValueError(
|
136
|
+
f"{found} contains namespace {ns!r}, but was hoping for {namespace!r}"
|
137
|
+
)
|
138
|
+
if name is not None and n != name:
|
139
|
+
raise ValueError(f"{found} contains name {n!r}, but was hoping for {name!r}")
|
140
|
+
return CollectionData(
|
141
|
+
collections_root_path=root,
|
142
|
+
path=path,
|
143
|
+
namespace=ns,
|
144
|
+
name=n,
|
145
|
+
full_name=f"{ns}.{n}",
|
146
|
+
version=v,
|
147
|
+
dependencies=d,
|
148
|
+
current=current,
|
149
|
+
)
|
150
|
+
|
151
|
+
|
152
|
+
def force_collection_version(path: Path, *, version: str) -> bool:
|
153
|
+
"""
|
154
|
+
Make sure galaxy.yml contains this version.
|
155
|
+
|
156
|
+
Returns ``True`` if the version was changed, and ``False`` if the version
|
157
|
+
was already set to this value.
|
158
|
+
"""
|
159
|
+
galaxy_yml = path / "galaxy.yml"
|
160
|
+
try:
|
161
|
+
data = load_yaml_file(galaxy_yml)
|
162
|
+
except Exception as exc:
|
163
|
+
raise ValueError(f"Cannot parse {galaxy_yml}: {exc}") from exc
|
164
|
+
if data.get("version") == version:
|
165
|
+
return False
|
166
|
+
data["version"] = version
|
167
|
+
store_yaml_file(galaxy_yml, data)
|
168
|
+
return True
|
169
|
+
|
170
|
+
|
171
|
+
def _list_adjacent_collections_ansible_collections_tree(
|
172
|
+
root: Path,
|
173
|
+
*,
|
174
|
+
directories_to_ignore: Collection[Path] | None = None,
|
175
|
+
) -> Iterator[CollectionData]:
|
176
|
+
directories_to_ignore = directories_to_ignore or ()
|
177
|
+
for namespace in root.iterdir(): # pylint: disable=too-many-nested-blocks
|
178
|
+
try:
|
179
|
+
if namespace.is_dir() or namespace.is_symlink():
|
180
|
+
for name in namespace.iterdir():
|
181
|
+
if name in directories_to_ignore:
|
182
|
+
continue
|
183
|
+
try:
|
184
|
+
if name.is_dir() or name.is_symlink():
|
185
|
+
yield load_collection_data_from_disk(
|
186
|
+
name,
|
187
|
+
namespace=namespace.name,
|
188
|
+
name=name.name,
|
189
|
+
root=root,
|
190
|
+
)
|
191
|
+
except Exception: # pylint: disable=broad-exception-caught
|
192
|
+
# If name doesn't happen to be a (symlink to a) directory, ...
|
193
|
+
pass
|
194
|
+
except Exception: # pylint: disable=broad-exception-caught
|
195
|
+
# If namespace doesn't happen to be a (symlink to a) directory, ...
|
196
|
+
pass
|
197
|
+
|
198
|
+
|
199
|
+
def _list_adjacent_collections_outside_tree(
|
200
|
+
directory: Path,
|
201
|
+
*,
|
202
|
+
directories_to_ignore: Collection[Path] | None = None,
|
203
|
+
) -> Iterator[CollectionData]:
|
204
|
+
directories_to_ignore = directories_to_ignore or ()
|
205
|
+
for collection_dir in directory.iterdir():
|
206
|
+
if collection_dir in directories_to_ignore:
|
207
|
+
continue
|
208
|
+
if not collection_dir.is_dir() and not collection_dir.is_symlink():
|
209
|
+
continue
|
210
|
+
parts = collection_dir.name.split(".")
|
211
|
+
if len(parts) != 2:
|
212
|
+
continue
|
213
|
+
namespace, name = parts
|
214
|
+
if not namespace.isidentifier() or not name.isidentifier():
|
215
|
+
continue
|
216
|
+
try:
|
217
|
+
yield load_collection_data_from_disk(
|
218
|
+
collection_dir,
|
219
|
+
namespace=namespace,
|
220
|
+
name=name,
|
221
|
+
)
|
222
|
+
except Exception: # pylint: disable=broad-exception-caught
|
223
|
+
# If collection_dir doesn't happen to be a (symlink to a) directory, ...
|
224
|
+
pass
|
225
|
+
|
226
|
+
|
227
|
+
def _fs_list_local_collections() -> Iterator[CollectionData]:
|
228
|
+
root: Path | None = None
|
229
|
+
|
230
|
+
# Determine potential root
|
231
|
+
cwd = Path.cwd()
|
232
|
+
parents: Sequence[Path] = cwd.parents
|
233
|
+
if len(parents) > 2 and parents[1].name == "ansible_collections":
|
234
|
+
root = parents[1]
|
235
|
+
|
236
|
+
# Current collection
|
237
|
+
try:
|
238
|
+
current = load_collection_data_from_disk(cwd, root=root, current=True)
|
239
|
+
if root and current.namespace == parents[0].name and current.name == cwd.name:
|
240
|
+
yield current
|
241
|
+
else:
|
242
|
+
root = None
|
243
|
+
current = load_collection_data_from_disk(cwd, current=True)
|
244
|
+
yield current
|
245
|
+
except Exception as exc:
|
246
|
+
raise ValueError(
|
247
|
+
f"Cannot load current collection's info from {cwd}: {exc}"
|
248
|
+
) from exc
|
249
|
+
|
250
|
+
# Search tree
|
251
|
+
if root:
|
252
|
+
yield from _list_adjacent_collections_ansible_collections_tree(
|
253
|
+
root, directories_to_ignore=(cwd,)
|
254
|
+
)
|
255
|
+
elif len(parents) > 0:
|
256
|
+
yield from _list_adjacent_collections_outside_tree(
|
257
|
+
parents[0], directories_to_ignore=(cwd,)
|
258
|
+
)
|
259
|
+
|
260
|
+
|
261
|
+
def _galaxy_list_collections(runner: Runner) -> Iterator[CollectionData]:
|
262
|
+
try:
|
263
|
+
stdout, _ = runner(["ansible-galaxy", "collection", "list", "--format", "json"])
|
264
|
+
data = json.loads(stdout)
|
265
|
+
for collections_root_path, collections in data.items():
|
266
|
+
root = Path(collections_root_path)
|
267
|
+
for collection in collections:
|
268
|
+
namespace, name = collection.split(".", 1)
|
269
|
+
try:
|
270
|
+
yield load_collection_data_from_disk(
|
271
|
+
root / namespace / name,
|
272
|
+
namespace=namespace,
|
273
|
+
name=name,
|
274
|
+
root=root,
|
275
|
+
current=False,
|
276
|
+
)
|
277
|
+
except: # noqa: E722, pylint: disable=bare-except
|
278
|
+
# Looks like Ansible passed crap on to us...
|
279
|
+
pass
|
280
|
+
except Exception as exc:
|
281
|
+
raise ValueError(f"Error while loading collection list: {exc}") from exc
|
282
|
+
|
283
|
+
|
284
|
+
@dataclass
|
285
|
+
class CollectionList:
|
286
|
+
"""
|
287
|
+
A list of Ansible collections.
|
288
|
+
"""
|
289
|
+
|
290
|
+
collections: list[CollectionData]
|
291
|
+
collection_map: dict[str, CollectionData]
|
292
|
+
current: CollectionData
|
293
|
+
|
294
|
+
@classmethod
|
295
|
+
def create(cls, collections_map: dict[str, CollectionData]):
|
296
|
+
"""
|
297
|
+
Given a dictionary mapping collection names to collection data, creates a CollectionList.
|
298
|
+
|
299
|
+
One of the collections must have the ``current`` flag set.
|
300
|
+
"""
|
301
|
+
collections = sorted(collections_map.values(), key=lambda cli: cli.full_name)
|
302
|
+
current = next(c for c in collections if c.current)
|
303
|
+
return cls(
|
304
|
+
collections=collections,
|
305
|
+
collection_map=collections_map,
|
306
|
+
current=current,
|
307
|
+
)
|
308
|
+
|
309
|
+
@classmethod
|
310
|
+
def collect(cls, runner: Runner) -> CollectionList:
|
311
|
+
"""
|
312
|
+
Search for a list of collections. The result is not cached.
|
313
|
+
"""
|
314
|
+
found_collections = {}
|
315
|
+
for collection_data in _fs_list_local_collections():
|
316
|
+
found_collections[collection_data.full_name] = collection_data
|
317
|
+
for collection_data in _galaxy_list_collections(runner):
|
318
|
+
# Similar to Ansible, we use the first match
|
319
|
+
if collection_data.full_name not in found_collections:
|
320
|
+
found_collections[collection_data.full_name] = collection_data
|
321
|
+
return cls.create(found_collections)
|
322
|
+
|
323
|
+
def find(self, name: str) -> CollectionData | None:
|
324
|
+
"""
|
325
|
+
Find a collection for a given name.
|
326
|
+
"""
|
327
|
+
return self.collection_map.get(name)
|
328
|
+
|
329
|
+
|
330
|
+
@functools.cache
|
331
|
+
def get_collection_list(runner: Runner) -> CollectionList:
|
332
|
+
"""
|
333
|
+
Search for a list of collections. The result is cached.
|
334
|
+
"""
|
335
|
+
return CollectionList.collect(runner)
|
336
|
+
|
337
|
+
|
338
|
+
def _add_all_dependencies(
|
339
|
+
collections: dict[str, CollectionData], all_collections: CollectionList
|
340
|
+
) -> None:
|
341
|
+
to_process = list(collections.values())
|
342
|
+
while to_process:
|
343
|
+
collection = to_process.pop(0)
|
344
|
+
for dependency_name in collection.dependencies:
|
345
|
+
if dependency_name not in collections:
|
346
|
+
dependency_data = all_collections.find(dependency_name)
|
347
|
+
if dependency_data is None:
|
348
|
+
raise ValueError(
|
349
|
+
f"Cannot find collection {dependency_name},"
|
350
|
+
f" a dependency of {collection.full_name}!"
|
351
|
+
)
|
352
|
+
collections[dependency_name] = dependency_data
|
353
|
+
to_process.append(dependency_data)
|
354
|
+
|
355
|
+
|
356
|
+
def _install_collection(collection: CollectionData, path: Path) -> None:
|
357
|
+
if path.is_symlink():
|
358
|
+
if path.readlink() == collection.path:
|
359
|
+
return
|
360
|
+
path.unlink()
|
361
|
+
else:
|
362
|
+
_remove(path)
|
363
|
+
path.symlink_to(collection.path)
|
364
|
+
|
365
|
+
|
366
|
+
def _install_current_collection(collection: CollectionData, path: Path) -> None:
|
367
|
+
if path.exists() and (path.is_symlink() or not path.is_dir()):
|
368
|
+
path.unlink()
|
369
|
+
path.mkdir(exist_ok=True)
|
370
|
+
present = {p.name for p in path.iterdir()}
|
371
|
+
for source_entry in collection.path.iterdir():
|
372
|
+
if source_entry.name == ".nox":
|
373
|
+
continue
|
374
|
+
dest_entry = path / source_entry.name
|
375
|
+
if source_entry.name in present:
|
376
|
+
present.remove(source_entry.name)
|
377
|
+
if dest_entry.is_symlink() and dest_entry.readlink() == source_entry:
|
378
|
+
continue
|
379
|
+
_remove(dest_entry)
|
380
|
+
dest_entry.symlink_to(source_entry)
|
381
|
+
for name in present:
|
382
|
+
dest_entry = path / name
|
383
|
+
_remove(dest_entry)
|
384
|
+
|
385
|
+
|
386
|
+
def _install_collections(
|
387
|
+
collections: Iterable[CollectionData], root: Path, *, with_current: bool
|
388
|
+
) -> None:
|
389
|
+
for collection in collections:
|
390
|
+
namespace_dir = root / collection.namespace
|
391
|
+
namespace_dir.mkdir(exist_ok=True)
|
392
|
+
path = namespace_dir / collection.name
|
393
|
+
if not collection.current:
|
394
|
+
_install_collection(collection, path)
|
395
|
+
elif with_current:
|
396
|
+
_install_current_collection(collection, path)
|
397
|
+
|
398
|
+
|
399
|
+
def _extract_collections_from_extra_deps_file(path: str | os.PathLike) -> list[str]:
|
400
|
+
if not os.path.exists(path):
|
401
|
+
return []
|
402
|
+
try:
|
403
|
+
data = load_yaml_file(path)
|
404
|
+
result = []
|
405
|
+
if data.get("collections"):
|
406
|
+
for index, collection in enumerate(data["collections"]):
|
407
|
+
if isinstance(collection, str):
|
408
|
+
result.append(collection)
|
409
|
+
continue
|
410
|
+
if not isinstance(collection, dict):
|
411
|
+
raise ValueError(
|
412
|
+
f"Collection entry #{index + 1} must be a string or dictionary"
|
413
|
+
)
|
414
|
+
if not isinstance(collection.get("name"), str):
|
415
|
+
raise ValueError(
|
416
|
+
f"Collection entry #{index + 1} does not have a 'name' field of type string"
|
417
|
+
)
|
418
|
+
result.append(collection["name"])
|
419
|
+
return result
|
420
|
+
except Exception as exc:
|
421
|
+
raise ValueError(
|
422
|
+
f"Error while loading collection dependency file {path}: {exc}"
|
423
|
+
) from exc
|
424
|
+
|
425
|
+
|
426
|
+
@dataclass
|
427
|
+
class SetupResult:
|
428
|
+
"""
|
429
|
+
Information on how the collections are set up.
|
430
|
+
"""
|
431
|
+
|
432
|
+
# The path of the ansible_collections directory.
|
433
|
+
root: Path
|
434
|
+
|
435
|
+
# Data on the current collection (as in the repository).
|
436
|
+
current_collection: CollectionData
|
437
|
+
|
438
|
+
# If it was installed, the path of the current collection inside the collection tree below root.
|
439
|
+
current_path: Path | None
|
440
|
+
|
441
|
+
|
442
|
+
def setup_collections(
|
443
|
+
destination: str | os.PathLike,
|
444
|
+
runner: Runner,
|
445
|
+
*,
|
446
|
+
extra_collections: list[str] | None = None,
|
447
|
+
extra_deps_files: list[str | os.PathLike] | None = None,
|
448
|
+
with_current: bool = True,
|
449
|
+
) -> SetupResult:
|
450
|
+
"""
|
451
|
+
Setup all collections in a tree structure inside the destination directory.
|
452
|
+
"""
|
453
|
+
all_collections = get_collection_list(runner)
|
454
|
+
destination_root = Path(destination) / "ansible_collections"
|
455
|
+
destination_root.mkdir(exist_ok=True)
|
456
|
+
current = all_collections.current
|
457
|
+
collections_to_install = {current.full_name: current}
|
458
|
+
if extra_collections:
|
459
|
+
for collection in extra_collections:
|
460
|
+
collection_data = all_collections.find(collection)
|
461
|
+
if collection_data is None:
|
462
|
+
raise ValueError(
|
463
|
+
f"Cannot find collection {collection} required by the noxfile!"
|
464
|
+
)
|
465
|
+
collections_to_install[collection_data.full_name] = collection_data
|
466
|
+
if extra_deps_files is not None:
|
467
|
+
for extra_deps_file in extra_deps_files:
|
468
|
+
for collection in _extract_collections_from_extra_deps_file(
|
469
|
+
extra_deps_file
|
470
|
+
):
|
471
|
+
collection_data = all_collections.find(collection)
|
472
|
+
if collection_data is None:
|
473
|
+
raise ValueError(
|
474
|
+
f"Cannot find collection {collection} required in {extra_deps_file}!"
|
475
|
+
)
|
476
|
+
collections_to_install[collection_data.full_name] = collection_data
|
477
|
+
_add_all_dependencies(collections_to_install, all_collections)
|
478
|
+
_install_collections(
|
479
|
+
collections_to_install.values(), destination_root, with_current=with_current
|
480
|
+
)
|
481
|
+
return SetupResult(
|
482
|
+
root=destination_root,
|
483
|
+
current_collection=current,
|
484
|
+
current_path=(
|
485
|
+
(destination_root / current.namespace / current.name)
|
486
|
+
if with_current
|
487
|
+
else None
|
488
|
+
),
|
489
|
+
)
|
490
|
+
|
491
|
+
|
492
|
+
def _copy_collection(collection: CollectionData, path: Path) -> None:
|
493
|
+
_paths_copy_collection(collection.path, path)
|
494
|
+
|
495
|
+
|
496
|
+
def _copy_collection_rsync_hard_links(
|
497
|
+
collection: CollectionData, path: Path, runner: Runner
|
498
|
+
) -> None:
|
499
|
+
_, __ = runner(
|
500
|
+
[
|
501
|
+
"rsync",
|
502
|
+
"-av",
|
503
|
+
"--delete",
|
504
|
+
"--exclude",
|
505
|
+
".nox",
|
506
|
+
"--link-dest",
|
507
|
+
str(collection.path) + "/",
|
508
|
+
"--",
|
509
|
+
str(collection.path) + "/",
|
510
|
+
str(path) + "/",
|
511
|
+
]
|
512
|
+
)
|
513
|
+
|
514
|
+
|
515
|
+
def setup_current_tree(
|
516
|
+
place: str | os.PathLike, current_collection: CollectionData
|
517
|
+
) -> SetupResult:
|
518
|
+
"""
|
519
|
+
Setup a tree structure with the current collection in it.
|
520
|
+
"""
|
521
|
+
|
522
|
+
path = Path(place)
|
523
|
+
root = path / "ansible_collections"
|
524
|
+
root.mkdir(exist_ok=True)
|
525
|
+
namespace = root / current_collection.namespace
|
526
|
+
namespace.mkdir(exist_ok=True)
|
527
|
+
collection = namespace / current_collection.name
|
528
|
+
_copy_collection(current_collection, collection)
|
529
|
+
# _copy_collection_rsync_hard_links(current_collection, collection, runner)
|
530
|
+
return SetupResult(
|
531
|
+
root=root,
|
532
|
+
current_collection=current_collection,
|
533
|
+
current_path=collection,
|
534
|
+
)
|
535
|
+
|
536
|
+
|
537
|
+
__all__ = [
|
538
|
+
"CollectionData",
|
539
|
+
"CollectionList",
|
540
|
+
"SetupResult",
|
541
|
+
"get_collection_list",
|
542
|
+
"load_collection_data_from_disk",
|
543
|
+
"setup_collections",
|
544
|
+
"setup_current_tree",
|
545
|
+
]
|