antsibull-nox 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- antsibull_nox/__init__.py +48 -1
- antsibull_nox/ansible.py +260 -0
- antsibull_nox/collection/__init__.py +56 -0
- antsibull_nox/collection/data.py +106 -0
- antsibull_nox/collection/extract.py +23 -0
- antsibull_nox/collection/install.py +523 -0
- antsibull_nox/{collection.py → collection/search.py} +164 -253
- antsibull_nox/config.py +332 -0
- antsibull_nox/data/action-groups.py +1 -1
- antsibull_nox/data/antsibull_nox_data_util.py +91 -0
- antsibull_nox/data/license-check.py +1 -1
- antsibull_nox/data/no-unwanted-files.py +5 -1
- antsibull_nox/data/plugin-yamllint.py +244 -0
- antsibull_nox/data_util.py +0 -77
- antsibull_nox/interpret_config.py +235 -0
- antsibull_nox/paths.py +19 -0
- antsibull_nox/python.py +81 -0
- antsibull_nox/sessions.py +898 -26
- antsibull_nox/utils.py +85 -0
- {antsibull_nox-0.1.0.dist-info → antsibull_nox-0.2.0.dist-info}/METADATA +3 -1
- antsibull_nox-0.2.0.dist-info/RECORD +25 -0
- antsibull_nox-0.1.0.dist-info/RECORD +0 -14
- {antsibull_nox-0.1.0.dist-info → antsibull_nox-0.2.0.dist-info}/WHEEL +0 -0
- {antsibull_nox-0.1.0.dist-info → antsibull_nox-0.2.0.dist-info}/licenses/LICENSES/GPL-3.0-or-later.txt +0 -0
@@ -10,63 +10,39 @@ Handle Ansible collections.
|
|
10
10
|
|
11
11
|
from __future__ import annotations
|
12
12
|
|
13
|
-
import functools
|
14
13
|
import json
|
15
14
|
import os
|
15
|
+
import threading
|
16
16
|
import typing as t
|
17
|
-
from collections.abc import Collection,
|
17
|
+
from collections.abc import Collection, Iterator, Sequence
|
18
|
+
from contextlib import contextmanager
|
18
19
|
from dataclasses import dataclass
|
19
20
|
from pathlib import Path
|
20
21
|
|
21
|
-
from antsibull_fileutils.yaml import load_yaml_file
|
22
|
+
from antsibull_fileutils.yaml import load_yaml_file
|
22
23
|
|
23
|
-
from .
|
24
|
-
from .paths import remove_path as _remove
|
24
|
+
from .data import CollectionData
|
25
25
|
|
26
26
|
# Function that runs a command (and fails on non-zero return code)
|
27
27
|
# and returns a tuple (stdout, stderr)
|
28
28
|
Runner = t.Callable[[list[str]], tuple[bytes, bytes]]
|
29
29
|
|
30
30
|
|
31
|
-
@dataclass
|
32
|
-
class
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
collections_root_path: Path | None
|
38
|
-
path: Path
|
39
|
-
namespace: str
|
40
|
-
name: str
|
41
|
-
full_name: str
|
42
|
-
version: str | None
|
43
|
-
dependencies: dict[str, str]
|
44
|
-
current: bool
|
31
|
+
@dataclass(frozen=True)
|
32
|
+
class _GlobalCache:
|
33
|
+
root: Path
|
34
|
+
download_cache: Path
|
35
|
+
extracted_cache: Path
|
45
36
|
|
46
37
|
@classmethod
|
47
|
-
def create(
|
48
|
-
cls,
|
49
|
-
*,
|
50
|
-
collections_root_path: Path | None = None,
|
51
|
-
path: Path,
|
52
|
-
full_name: str,
|
53
|
-
version: str | None = None,
|
54
|
-
dependencies: dict[str, str] | None = None,
|
55
|
-
current: bool = False,
|
56
|
-
):
|
38
|
+
def create(cls, *, root: Path) -> _GlobalCache:
|
57
39
|
"""
|
58
|
-
Create a
|
40
|
+
Create a global cache object.
|
59
41
|
"""
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
namespace=namespace,
|
65
|
-
name=name,
|
66
|
-
full_name=full_name,
|
67
|
-
version=version,
|
68
|
-
dependencies=dependencies or {},
|
69
|
-
current=current,
|
42
|
+
return cls(
|
43
|
+
root=root,
|
44
|
+
download_cache=root / "downloaded",
|
45
|
+
extracted_cache=root / "extracted",
|
70
46
|
)
|
71
47
|
|
72
48
|
|
@@ -149,25 +125,6 @@ def load_collection_data_from_disk(
|
|
149
125
|
)
|
150
126
|
|
151
127
|
|
152
|
-
def force_collection_version(path: Path, *, version: str) -> bool:
|
153
|
-
"""
|
154
|
-
Make sure galaxy.yml contains this version.
|
155
|
-
|
156
|
-
Returns ``True`` if the version was changed, and ``False`` if the version
|
157
|
-
was already set to this value.
|
158
|
-
"""
|
159
|
-
galaxy_yml = path / "galaxy.yml"
|
160
|
-
try:
|
161
|
-
data = load_yaml_file(galaxy_yml)
|
162
|
-
except Exception as exc:
|
163
|
-
raise ValueError(f"Cannot parse {galaxy_yml}: {exc}") from exc
|
164
|
-
if data.get("version") == version:
|
165
|
-
return False
|
166
|
-
data["version"] = version
|
167
|
-
store_yaml_file(galaxy_yml, data)
|
168
|
-
return True
|
169
|
-
|
170
|
-
|
171
128
|
def _list_adjacent_collections_ansible_collections_tree(
|
172
129
|
root: Path,
|
173
130
|
*,
|
@@ -189,10 +146,11 @@ def _list_adjacent_collections_ansible_collections_tree(
|
|
189
146
|
root=root,
|
190
147
|
)
|
191
148
|
except Exception: # pylint: disable=broad-exception-caught
|
192
|
-
# If name doesn't happen to be a (symlink to a) directory,
|
149
|
+
# If name doesn't happen to be a (symlink to a) directory,
|
150
|
+
# is not readable, ...
|
193
151
|
pass
|
194
152
|
except Exception: # pylint: disable=broad-exception-caught
|
195
|
-
# If namespace doesn't happen to be a (symlink to a) directory, ...
|
153
|
+
# If namespace doesn't happen to be a (symlink to a) directory, is not readable, ...
|
196
154
|
pass
|
197
155
|
|
198
156
|
|
@@ -256,6 +214,16 @@ def _fs_list_local_collections() -> Iterator[CollectionData]:
|
|
256
214
|
yield from _list_adjacent_collections_outside_tree(
|
257
215
|
parents[0], directories_to_ignore=(cwd,)
|
258
216
|
)
|
217
|
+
else:
|
218
|
+
# Only happens if cwd == "/"
|
219
|
+
pass # pragma: no cover
|
220
|
+
|
221
|
+
|
222
|
+
def _fs_list_global_cache(global_cache_dir: Path) -> Iterator[CollectionData]:
|
223
|
+
if not global_cache_dir.is_dir():
|
224
|
+
return
|
225
|
+
|
226
|
+
yield from _list_adjacent_collections_outside_tree(global_cache_dir)
|
259
227
|
|
260
228
|
|
261
229
|
def _galaxy_list_collections(runner: Runner) -> Iterator[CollectionData]:
|
@@ -307,14 +275,19 @@ class CollectionList:
|
|
307
275
|
)
|
308
276
|
|
309
277
|
@classmethod
|
310
|
-
def collect(cls, runner: Runner) -> CollectionList:
|
278
|
+
def collect(cls, *, runner: Runner, global_cache: _GlobalCache) -> CollectionList:
|
311
279
|
"""
|
312
280
|
Search for a list of collections. The result is not cached.
|
313
281
|
"""
|
314
282
|
found_collections = {}
|
315
283
|
for collection_data in _fs_list_local_collections():
|
316
284
|
found_collections[collection_data.full_name] = collection_data
|
317
|
-
|
285
|
+
if os.environ.get("ANTSIBULL_NOX_IGNORE_INSTALLED_COLLECTIONS") != "true":
|
286
|
+
for collection_data in _galaxy_list_collections(runner):
|
287
|
+
# Similar to Ansible, we use the first match
|
288
|
+
if collection_data.full_name not in found_collections:
|
289
|
+
found_collections[collection_data.full_name] = collection_data
|
290
|
+
for collection_data in _fs_list_global_cache(global_cache.extracted_cache):
|
318
291
|
# Similar to Ansible, we use the first match
|
319
292
|
if collection_data.full_name not in found_collections:
|
320
293
|
found_collections[collection_data.full_name] = collection_data
|
@@ -326,220 +299,158 @@ class CollectionList:
|
|
326
299
|
"""
|
327
300
|
return self.collection_map.get(name)
|
328
301
|
|
302
|
+
def clone(self) -> CollectionList:
|
303
|
+
"""
|
304
|
+
Create a clone of this list.
|
305
|
+
"""
|
306
|
+
return CollectionList(
|
307
|
+
collections=list(self.collections),
|
308
|
+
collection_map=dict(self.collection_map),
|
309
|
+
current=self.current,
|
310
|
+
)
|
329
311
|
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
def _add_all_dependencies(
|
339
|
-
collections: dict[str, CollectionData], all_collections: CollectionList
|
340
|
-
) -> None:
|
341
|
-
to_process = list(collections.values())
|
342
|
-
while to_process:
|
343
|
-
collection = to_process.pop(0)
|
344
|
-
for dependency_name in collection.dependencies:
|
345
|
-
if dependency_name not in collections:
|
346
|
-
dependency_data = all_collections.find(dependency_name)
|
347
|
-
if dependency_data is None:
|
348
|
-
raise ValueError(
|
349
|
-
f"Cannot find collection {dependency_name},"
|
350
|
-
f" a dependency of {collection.full_name}!"
|
351
|
-
)
|
352
|
-
collections[dependency_name] = dependency_data
|
353
|
-
to_process.append(dependency_data)
|
312
|
+
def _add(self, collection: CollectionData, *, force: bool = True) -> bool:
|
313
|
+
if not force and collection.full_name in self.collection_map:
|
314
|
+
return False
|
315
|
+
self.collections.append(collection)
|
316
|
+
self.collection_map[collection.full_name] = collection
|
317
|
+
return True
|
354
318
|
|
355
319
|
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
_remove(path)
|
363
|
-
path.symlink_to(collection.path)
|
320
|
+
class _CollectionListUpdater:
|
321
|
+
def __init__(
|
322
|
+
self, *, owner: "_CollectionListSingleton", collection_list: CollectionList
|
323
|
+
) -> None:
|
324
|
+
self._owner = owner
|
325
|
+
self._collection_list = collection_list
|
364
326
|
|
327
|
+
def find(self, name: str) -> CollectionData | None:
|
328
|
+
"""
|
329
|
+
Find a collection for a given name.
|
330
|
+
"""
|
331
|
+
return self._collection_list.find(name)
|
365
332
|
|
366
|
-
def
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
present.remove(source_entry.name)
|
377
|
-
if dest_entry.is_symlink() and dest_entry.readlink() == source_entry:
|
378
|
-
continue
|
379
|
-
_remove(dest_entry)
|
380
|
-
dest_entry.symlink_to(source_entry)
|
381
|
-
for name in present:
|
382
|
-
dest_entry = path / name
|
383
|
-
_remove(dest_entry)
|
384
|
-
|
385
|
-
|
386
|
-
def _install_collections(
|
387
|
-
collections: Iterable[CollectionData], root: Path, *, with_current: bool
|
388
|
-
) -> None:
|
389
|
-
for collection in collections:
|
390
|
-
namespace_dir = root / collection.namespace
|
391
|
-
namespace_dir.mkdir(exist_ok=True)
|
392
|
-
path = namespace_dir / collection.name
|
393
|
-
if not collection.current:
|
394
|
-
_install_collection(collection, path)
|
395
|
-
elif with_current:
|
396
|
-
_install_current_collection(collection, path)
|
397
|
-
|
398
|
-
|
399
|
-
def _extract_collections_from_extra_deps_file(path: str | os.PathLike) -> list[str]:
|
400
|
-
if not os.path.exists(path):
|
401
|
-
return []
|
402
|
-
try:
|
403
|
-
data = load_yaml_file(path)
|
404
|
-
result = []
|
405
|
-
if data.get("collections"):
|
406
|
-
for index, collection in enumerate(data["collections"]):
|
407
|
-
if isinstance(collection, str):
|
408
|
-
result.append(collection)
|
409
|
-
continue
|
410
|
-
if not isinstance(collection, dict):
|
411
|
-
raise ValueError(
|
412
|
-
f"Collection entry #{index + 1} must be a string or dictionary"
|
413
|
-
)
|
414
|
-
if not isinstance(collection.get("name"), str):
|
415
|
-
raise ValueError(
|
416
|
-
f"Collection entry #{index + 1} does not have a 'name' field of type string"
|
417
|
-
)
|
418
|
-
result.append(collection["name"])
|
419
|
-
return result
|
420
|
-
except Exception as exc:
|
421
|
-
raise ValueError(
|
422
|
-
f"Error while loading collection dependency file {path}: {exc}"
|
423
|
-
) from exc
|
333
|
+
def add_collection(
|
334
|
+
self, *, directory: Path, namespace: str, name: str
|
335
|
+
) -> CollectionData:
|
336
|
+
"""
|
337
|
+
Add a new collection to the cache.
|
338
|
+
"""
|
339
|
+
# pylint: disable-next=protected-access
|
340
|
+
return self._owner._add_collection(
|
341
|
+
directory=directory, namespace=namespace, name=name
|
342
|
+
)
|
424
343
|
|
344
|
+
def get_global_cache(self) -> _GlobalCache:
|
345
|
+
"""
|
346
|
+
Get the global cache object.
|
347
|
+
"""
|
348
|
+
return self._owner._get_global_cache() # pylint: disable=protected-access
|
425
349
|
|
426
|
-
@dataclass
|
427
|
-
class SetupResult:
|
428
|
-
"""
|
429
|
-
Information on how the collections are set up.
|
430
|
-
"""
|
431
350
|
|
432
|
-
|
433
|
-
|
351
|
+
class _CollectionListSingleton:
|
352
|
+
_lock = threading.Lock()
|
434
353
|
|
435
|
-
|
436
|
-
|
354
|
+
_global_cache_dir: Path | None = None
|
355
|
+
_collection_list: CollectionList | None = None
|
437
356
|
|
438
|
-
|
439
|
-
|
357
|
+
def setup(self, *, global_cache_dir: Path) -> None:
|
358
|
+
"""
|
359
|
+
Setup data.
|
360
|
+
"""
|
361
|
+
with self._lock:
|
362
|
+
if (
|
363
|
+
self._global_cache_dir is not None
|
364
|
+
and self._global_cache_dir != global_cache_dir
|
365
|
+
):
|
366
|
+
raise ValueError(
|
367
|
+
"Setup mismatch: global cache dir cannot be both"
|
368
|
+
f" {self._global_cache_dir} and {global_cache_dir}"
|
369
|
+
)
|
370
|
+
self._global_cache_dir = global_cache_dir
|
440
371
|
|
372
|
+
def clear(self) -> None:
|
373
|
+
"""
|
374
|
+
Clear collection cache.
|
375
|
+
"""
|
376
|
+
with self._lock:
|
377
|
+
self._collection_list = None
|
441
378
|
|
442
|
-
def
|
443
|
-
|
444
|
-
|
445
|
-
|
446
|
-
|
447
|
-
|
448
|
-
|
449
|
-
) ->
|
450
|
-
|
451
|
-
|
452
|
-
|
453
|
-
|
454
|
-
|
455
|
-
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
460
|
-
|
461
|
-
|
379
|
+
def get_cached(self) -> CollectionList | None:
|
380
|
+
"""
|
381
|
+
Return cached list of collections, if present.
|
382
|
+
Do not modify the result!
|
383
|
+
"""
|
384
|
+
return self._collection_list
|
385
|
+
|
386
|
+
def get(self, *, runner: Runner) -> CollectionList:
|
387
|
+
"""
|
388
|
+
Search for a list of collections. The result is cached.
|
389
|
+
"""
|
390
|
+
with self._lock:
|
391
|
+
if self._global_cache_dir is None:
|
392
|
+
raise ValueError("Internal error: global cache dir not setup")
|
393
|
+
result = self._collection_list
|
394
|
+
if result is None:
|
395
|
+
result = CollectionList.collect(
|
396
|
+
runner=runner,
|
397
|
+
global_cache=_GlobalCache.create(root=self._global_cache_dir),
|
398
|
+
)
|
399
|
+
self._collection_list = result
|
400
|
+
return result.clone()
|
401
|
+
|
402
|
+
def _get_global_cache(self) -> _GlobalCache:
|
403
|
+
"""
|
404
|
+
Returns the global cache dir.
|
405
|
+
"""
|
406
|
+
if self._global_cache_dir is None:
|
407
|
+
raise ValueError("Internal error: global cache dir not setup")
|
408
|
+
return _GlobalCache.create(root=self._global_cache_dir)
|
409
|
+
|
410
|
+
def _add_collection(
|
411
|
+
self, *, directory: Path, namespace: str, name: str
|
412
|
+
) -> CollectionData:
|
413
|
+
"""
|
414
|
+
Add collection in directory if the collection list has been cached.
|
415
|
+
"""
|
416
|
+
if not self._collection_list:
|
417
|
+
raise ValueError("Internal error: collections not listed")
|
418
|
+
data = load_collection_data_from_disk(directory, namespace=namespace, name=name)
|
419
|
+
self._collection_list._add(data) # pylint: disable=protected-access
|
420
|
+
return data
|
421
|
+
|
422
|
+
@contextmanager
|
423
|
+
def _update_collection_list(self) -> t.Iterator[_CollectionListUpdater]:
|
424
|
+
with self._lock:
|
425
|
+
if not self._collection_list or self._global_cache_dir is None:
|
462
426
|
raise ValueError(
|
463
|
-
|
427
|
+
"Internal error: collections not listed or global cache not setup"
|
464
428
|
)
|
465
|
-
|
466
|
-
|
467
|
-
|
468
|
-
for collection in _extract_collections_from_extra_deps_file(
|
469
|
-
extra_deps_file
|
470
|
-
):
|
471
|
-
collection_data = all_collections.find(collection)
|
472
|
-
if collection_data is None:
|
473
|
-
raise ValueError(
|
474
|
-
f"Cannot find collection {collection} required in {extra_deps_file}!"
|
475
|
-
)
|
476
|
-
collections_to_install[collection_data.full_name] = collection_data
|
477
|
-
_add_all_dependencies(collections_to_install, all_collections)
|
478
|
-
_install_collections(
|
479
|
-
collections_to_install.values(), destination_root, with_current=with_current
|
480
|
-
)
|
481
|
-
return SetupResult(
|
482
|
-
root=destination_root,
|
483
|
-
current_collection=current,
|
484
|
-
current_path=(
|
485
|
-
(destination_root / current.namespace / current.name)
|
486
|
-
if with_current
|
487
|
-
else None
|
488
|
-
),
|
489
|
-
)
|
429
|
+
yield _CollectionListUpdater(
|
430
|
+
owner=self, collection_list=self._collection_list
|
431
|
+
)
|
490
432
|
|
491
433
|
|
492
|
-
|
493
|
-
_paths_copy_collection(collection.path, path)
|
494
|
-
|
495
|
-
|
496
|
-
def _copy_collection_rsync_hard_links(
|
497
|
-
collection: CollectionData, path: Path, runner: Runner
|
498
|
-
) -> None:
|
499
|
-
_, __ = runner(
|
500
|
-
[
|
501
|
-
"rsync",
|
502
|
-
"-av",
|
503
|
-
"--delete",
|
504
|
-
"--exclude",
|
505
|
-
".nox",
|
506
|
-
"--link-dest",
|
507
|
-
str(collection.path) + "/",
|
508
|
-
"--",
|
509
|
-
str(collection.path) + "/",
|
510
|
-
str(path) + "/",
|
511
|
-
]
|
512
|
-
)
|
434
|
+
_COLLECTION_LIST = _CollectionListSingleton()
|
513
435
|
|
514
436
|
|
515
|
-
|
516
|
-
|
517
|
-
|
437
|
+
@contextmanager
|
438
|
+
def _update_collection_list() -> t.Iterator[_CollectionListUpdater]:
|
439
|
+
# pylint: disable-next=protected-access
|
440
|
+
with _COLLECTION_LIST._update_collection_list() as result:
|
441
|
+
yield result
|
442
|
+
|
443
|
+
|
444
|
+
def get_collection_list(*, runner: Runner, global_cache_dir: Path) -> CollectionList:
|
518
445
|
"""
|
519
|
-
|
446
|
+
Search for a list of collections. The result is cached.
|
520
447
|
"""
|
521
|
-
|
522
|
-
|
523
|
-
root = path / "ansible_collections"
|
524
|
-
root.mkdir(exist_ok=True)
|
525
|
-
namespace = root / current_collection.namespace
|
526
|
-
namespace.mkdir(exist_ok=True)
|
527
|
-
collection = namespace / current_collection.name
|
528
|
-
_copy_collection(current_collection, collection)
|
529
|
-
# _copy_collection_rsync_hard_links(current_collection, collection, runner)
|
530
|
-
return SetupResult(
|
531
|
-
root=root,
|
532
|
-
current_collection=current_collection,
|
533
|
-
current_path=collection,
|
534
|
-
)
|
448
|
+
_COLLECTION_LIST.setup(global_cache_dir=global_cache_dir)
|
449
|
+
return _COLLECTION_LIST.get(runner=runner)
|
535
450
|
|
536
451
|
|
537
452
|
__all__ = [
|
538
|
-
"CollectionData",
|
539
453
|
"CollectionList",
|
540
|
-
"SetupResult",
|
541
454
|
"get_collection_list",
|
542
455
|
"load_collection_data_from_disk",
|
543
|
-
"setup_collections",
|
544
|
-
"setup_current_tree",
|
545
456
|
]
|