antsibull-nox 0.0.1__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- antsibull_nox/__init__.py +66 -3
- antsibull_nox/ansible.py +260 -0
- antsibull_nox/collection/__init__.py +56 -0
- antsibull_nox/collection/data.py +106 -0
- antsibull_nox/collection/extract.py +23 -0
- antsibull_nox/collection/install.py +523 -0
- antsibull_nox/collection/search.py +456 -0
- antsibull_nox/config.py +332 -0
- antsibull_nox/data/action-groups.py +199 -0
- antsibull_nox/data/antsibull_nox_data_util.py +91 -0
- antsibull_nox/data/license-check.py +144 -0
- antsibull_nox/data/license-check.py.license +3 -0
- antsibull_nox/data/no-unwanted-files.py +123 -0
- antsibull_nox/data/plugin-yamllint.py +244 -0
- antsibull_nox/data_util.py +38 -0
- antsibull_nox/interpret_config.py +235 -0
- antsibull_nox/paths.py +220 -0
- antsibull_nox/python.py +81 -0
- antsibull_nox/sessions.py +1389 -168
- antsibull_nox/utils.py +85 -0
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.2.0.dist-info}/METADATA +14 -4
- antsibull_nox-0.2.0.dist-info/RECORD +25 -0
- antsibull_nox-0.0.1.dist-info/RECORD +0 -7
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.2.0.dist-info}/WHEEL +0 -0
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.2.0.dist-info}/licenses/LICENSES/GPL-3.0-or-later.txt +0 -0
@@ -0,0 +1,456 @@
|
|
1
|
+
# Author: Felix Fontein <felix@fontein.de>
|
2
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
|
3
|
+
# https://www.gnu.org/licenses/gpl-3.0.txt)
|
4
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
5
|
+
# SPDX-FileCopyrightText: 2025, Ansible Project
|
6
|
+
|
7
|
+
"""
|
8
|
+
Handle Ansible collections.
|
9
|
+
"""
|
10
|
+
|
11
|
+
from __future__ import annotations
|
12
|
+
|
13
|
+
import json
|
14
|
+
import os
|
15
|
+
import threading
|
16
|
+
import typing as t
|
17
|
+
from collections.abc import Collection, Iterator, Sequence
|
18
|
+
from contextlib import contextmanager
|
19
|
+
from dataclasses import dataclass
|
20
|
+
from pathlib import Path
|
21
|
+
|
22
|
+
from antsibull_fileutils.yaml import load_yaml_file
|
23
|
+
|
24
|
+
from .data import CollectionData
|
25
|
+
|
26
|
+
# Function that runs a command (and fails on non-zero return code)
|
27
|
+
# and returns a tuple (stdout, stderr)
|
28
|
+
Runner = t.Callable[[list[str]], tuple[bytes, bytes]]
|
29
|
+
|
30
|
+
|
31
|
+
@dataclass(frozen=True)
|
32
|
+
class _GlobalCache:
|
33
|
+
root: Path
|
34
|
+
download_cache: Path
|
35
|
+
extracted_cache: Path
|
36
|
+
|
37
|
+
@classmethod
|
38
|
+
def create(cls, *, root: Path) -> _GlobalCache:
|
39
|
+
"""
|
40
|
+
Create a global cache object.
|
41
|
+
"""
|
42
|
+
return cls(
|
43
|
+
root=root,
|
44
|
+
download_cache=root / "downloaded",
|
45
|
+
extracted_cache=root / "extracted",
|
46
|
+
)
|
47
|
+
|
48
|
+
|
49
|
+
def _load_galaxy_yml(galaxy_yml: Path) -> dict[str, t.Any]:
|
50
|
+
try:
|
51
|
+
data = load_yaml_file(galaxy_yml)
|
52
|
+
except Exception as exc:
|
53
|
+
raise ValueError(f"Cannot parse {galaxy_yml}: {exc}") from exc
|
54
|
+
if not isinstance(data, dict):
|
55
|
+
raise ValueError(f"{galaxy_yml} is not a dictionary")
|
56
|
+
return data
|
57
|
+
|
58
|
+
|
59
|
+
def _load_manifest_json_collection_info(manifest_json: Path) -> dict[str, t.Any]:
|
60
|
+
try:
|
61
|
+
with open(manifest_json, "br") as f:
|
62
|
+
data = json.load(f)
|
63
|
+
except Exception as exc:
|
64
|
+
raise ValueError(f"Cannot parse {manifest_json}: {exc}") from exc
|
65
|
+
ci = data.get("collection_info")
|
66
|
+
if not isinstance(ci, dict):
|
67
|
+
raise ValueError(f"{manifest_json} does not contain collection_info")
|
68
|
+
return ci
|
69
|
+
|
70
|
+
|
71
|
+
def load_collection_data_from_disk(
|
72
|
+
path: Path,
|
73
|
+
*,
|
74
|
+
namespace: str | None = None,
|
75
|
+
name: str | None = None,
|
76
|
+
root: Path | None = None,
|
77
|
+
current: bool = False,
|
78
|
+
accept_manifest: bool = True,
|
79
|
+
) -> CollectionData:
|
80
|
+
"""
|
81
|
+
Load collection data from disk.
|
82
|
+
"""
|
83
|
+
galaxy_yml = path / "galaxy.yml"
|
84
|
+
manifest_json = path / "MANIFEST.json"
|
85
|
+
found: Path
|
86
|
+
if galaxy_yml.is_file():
|
87
|
+
found = galaxy_yml
|
88
|
+
data = _load_galaxy_yml(galaxy_yml)
|
89
|
+
elif not accept_manifest:
|
90
|
+
raise ValueError(f"Cannot find galaxy.yml in {path}")
|
91
|
+
elif manifest_json.is_file():
|
92
|
+
found = manifest_json
|
93
|
+
data = _load_manifest_json_collection_info(manifest_json)
|
94
|
+
else:
|
95
|
+
raise ValueError(f"Cannot find galaxy.yml or MANIFEST.json in {path}")
|
96
|
+
|
97
|
+
ns = data.get("namespace")
|
98
|
+
if not isinstance(ns, str):
|
99
|
+
raise ValueError(f"{found} does not contain a namespace")
|
100
|
+
n = data.get("name")
|
101
|
+
if not isinstance(n, str):
|
102
|
+
raise ValueError(f"{found} does not contain a name")
|
103
|
+
v = data.get("version")
|
104
|
+
if not isinstance(v, str):
|
105
|
+
v = None
|
106
|
+
d = data.get("dependencies") or {}
|
107
|
+
if not isinstance(d, dict):
|
108
|
+
raise ValueError(f"{found}'s dependencies is not a mapping")
|
109
|
+
|
110
|
+
if namespace is not None and ns != namespace:
|
111
|
+
raise ValueError(
|
112
|
+
f"{found} contains namespace {ns!r}, but was hoping for {namespace!r}"
|
113
|
+
)
|
114
|
+
if name is not None and n != name:
|
115
|
+
raise ValueError(f"{found} contains name {n!r}, but was hoping for {name!r}")
|
116
|
+
return CollectionData(
|
117
|
+
collections_root_path=root,
|
118
|
+
path=path,
|
119
|
+
namespace=ns,
|
120
|
+
name=n,
|
121
|
+
full_name=f"{ns}.{n}",
|
122
|
+
version=v,
|
123
|
+
dependencies=d,
|
124
|
+
current=current,
|
125
|
+
)
|
126
|
+
|
127
|
+
|
128
|
+
def _list_adjacent_collections_ansible_collections_tree(
|
129
|
+
root: Path,
|
130
|
+
*,
|
131
|
+
directories_to_ignore: Collection[Path] | None = None,
|
132
|
+
) -> Iterator[CollectionData]:
|
133
|
+
directories_to_ignore = directories_to_ignore or ()
|
134
|
+
for namespace in root.iterdir(): # pylint: disable=too-many-nested-blocks
|
135
|
+
try:
|
136
|
+
if namespace.is_dir() or namespace.is_symlink():
|
137
|
+
for name in namespace.iterdir():
|
138
|
+
if name in directories_to_ignore:
|
139
|
+
continue
|
140
|
+
try:
|
141
|
+
if name.is_dir() or name.is_symlink():
|
142
|
+
yield load_collection_data_from_disk(
|
143
|
+
name,
|
144
|
+
namespace=namespace.name,
|
145
|
+
name=name.name,
|
146
|
+
root=root,
|
147
|
+
)
|
148
|
+
except Exception: # pylint: disable=broad-exception-caught
|
149
|
+
# If name doesn't happen to be a (symlink to a) directory,
|
150
|
+
# is not readable, ...
|
151
|
+
pass
|
152
|
+
except Exception: # pylint: disable=broad-exception-caught
|
153
|
+
# If namespace doesn't happen to be a (symlink to a) directory, is not readable, ...
|
154
|
+
pass
|
155
|
+
|
156
|
+
|
157
|
+
def _list_adjacent_collections_outside_tree(
|
158
|
+
directory: Path,
|
159
|
+
*,
|
160
|
+
directories_to_ignore: Collection[Path] | None = None,
|
161
|
+
) -> Iterator[CollectionData]:
|
162
|
+
directories_to_ignore = directories_to_ignore or ()
|
163
|
+
for collection_dir in directory.iterdir():
|
164
|
+
if collection_dir in directories_to_ignore:
|
165
|
+
continue
|
166
|
+
if not collection_dir.is_dir() and not collection_dir.is_symlink():
|
167
|
+
continue
|
168
|
+
parts = collection_dir.name.split(".")
|
169
|
+
if len(parts) != 2:
|
170
|
+
continue
|
171
|
+
namespace, name = parts
|
172
|
+
if not namespace.isidentifier() or not name.isidentifier():
|
173
|
+
continue
|
174
|
+
try:
|
175
|
+
yield load_collection_data_from_disk(
|
176
|
+
collection_dir,
|
177
|
+
namespace=namespace,
|
178
|
+
name=name,
|
179
|
+
)
|
180
|
+
except Exception: # pylint: disable=broad-exception-caught
|
181
|
+
# If collection_dir doesn't happen to be a (symlink to a) directory, ...
|
182
|
+
pass
|
183
|
+
|
184
|
+
|
185
|
+
def _fs_list_local_collections() -> Iterator[CollectionData]:
|
186
|
+
root: Path | None = None
|
187
|
+
|
188
|
+
# Determine potential root
|
189
|
+
cwd = Path.cwd()
|
190
|
+
parents: Sequence[Path] = cwd.parents
|
191
|
+
if len(parents) > 2 and parents[1].name == "ansible_collections":
|
192
|
+
root = parents[1]
|
193
|
+
|
194
|
+
# Current collection
|
195
|
+
try:
|
196
|
+
current = load_collection_data_from_disk(cwd, root=root, current=True)
|
197
|
+
if root and current.namespace == parents[0].name and current.name == cwd.name:
|
198
|
+
yield current
|
199
|
+
else:
|
200
|
+
root = None
|
201
|
+
current = load_collection_data_from_disk(cwd, current=True)
|
202
|
+
yield current
|
203
|
+
except Exception as exc:
|
204
|
+
raise ValueError(
|
205
|
+
f"Cannot load current collection's info from {cwd}: {exc}"
|
206
|
+
) from exc
|
207
|
+
|
208
|
+
# Search tree
|
209
|
+
if root:
|
210
|
+
yield from _list_adjacent_collections_ansible_collections_tree(
|
211
|
+
root, directories_to_ignore=(cwd,)
|
212
|
+
)
|
213
|
+
elif len(parents) > 0:
|
214
|
+
yield from _list_adjacent_collections_outside_tree(
|
215
|
+
parents[0], directories_to_ignore=(cwd,)
|
216
|
+
)
|
217
|
+
else:
|
218
|
+
# Only happens if cwd == "/"
|
219
|
+
pass # pragma: no cover
|
220
|
+
|
221
|
+
|
222
|
+
def _fs_list_global_cache(global_cache_dir: Path) -> Iterator[CollectionData]:
|
223
|
+
if not global_cache_dir.is_dir():
|
224
|
+
return
|
225
|
+
|
226
|
+
yield from _list_adjacent_collections_outside_tree(global_cache_dir)
|
227
|
+
|
228
|
+
|
229
|
+
def _galaxy_list_collections(runner: Runner) -> Iterator[CollectionData]:
|
230
|
+
try:
|
231
|
+
stdout, _ = runner(["ansible-galaxy", "collection", "list", "--format", "json"])
|
232
|
+
data = json.loads(stdout)
|
233
|
+
for collections_root_path, collections in data.items():
|
234
|
+
root = Path(collections_root_path)
|
235
|
+
for collection in collections:
|
236
|
+
namespace, name = collection.split(".", 1)
|
237
|
+
try:
|
238
|
+
yield load_collection_data_from_disk(
|
239
|
+
root / namespace / name,
|
240
|
+
namespace=namespace,
|
241
|
+
name=name,
|
242
|
+
root=root,
|
243
|
+
current=False,
|
244
|
+
)
|
245
|
+
except: # noqa: E722, pylint: disable=bare-except
|
246
|
+
# Looks like Ansible passed crap on to us...
|
247
|
+
pass
|
248
|
+
except Exception as exc:
|
249
|
+
raise ValueError(f"Error while loading collection list: {exc}") from exc
|
250
|
+
|
251
|
+
|
252
|
+
@dataclass
|
253
|
+
class CollectionList:
|
254
|
+
"""
|
255
|
+
A list of Ansible collections.
|
256
|
+
"""
|
257
|
+
|
258
|
+
collections: list[CollectionData]
|
259
|
+
collection_map: dict[str, CollectionData]
|
260
|
+
current: CollectionData
|
261
|
+
|
262
|
+
@classmethod
|
263
|
+
def create(cls, collections_map: dict[str, CollectionData]):
|
264
|
+
"""
|
265
|
+
Given a dictionary mapping collection names to collection data, creates a CollectionList.
|
266
|
+
|
267
|
+
One of the collections must have the ``current`` flag set.
|
268
|
+
"""
|
269
|
+
collections = sorted(collections_map.values(), key=lambda cli: cli.full_name)
|
270
|
+
current = next(c for c in collections if c.current)
|
271
|
+
return cls(
|
272
|
+
collections=collections,
|
273
|
+
collection_map=collections_map,
|
274
|
+
current=current,
|
275
|
+
)
|
276
|
+
|
277
|
+
@classmethod
|
278
|
+
def collect(cls, *, runner: Runner, global_cache: _GlobalCache) -> CollectionList:
|
279
|
+
"""
|
280
|
+
Search for a list of collections. The result is not cached.
|
281
|
+
"""
|
282
|
+
found_collections = {}
|
283
|
+
for collection_data in _fs_list_local_collections():
|
284
|
+
found_collections[collection_data.full_name] = collection_data
|
285
|
+
if os.environ.get("ANTSIBULL_NOX_IGNORE_INSTALLED_COLLECTIONS") != "true":
|
286
|
+
for collection_data in _galaxy_list_collections(runner):
|
287
|
+
# Similar to Ansible, we use the first match
|
288
|
+
if collection_data.full_name not in found_collections:
|
289
|
+
found_collections[collection_data.full_name] = collection_data
|
290
|
+
for collection_data in _fs_list_global_cache(global_cache.extracted_cache):
|
291
|
+
# Similar to Ansible, we use the first match
|
292
|
+
if collection_data.full_name not in found_collections:
|
293
|
+
found_collections[collection_data.full_name] = collection_data
|
294
|
+
return cls.create(found_collections)
|
295
|
+
|
296
|
+
def find(self, name: str) -> CollectionData | None:
|
297
|
+
"""
|
298
|
+
Find a collection for a given name.
|
299
|
+
"""
|
300
|
+
return self.collection_map.get(name)
|
301
|
+
|
302
|
+
def clone(self) -> CollectionList:
|
303
|
+
"""
|
304
|
+
Create a clone of this list.
|
305
|
+
"""
|
306
|
+
return CollectionList(
|
307
|
+
collections=list(self.collections),
|
308
|
+
collection_map=dict(self.collection_map),
|
309
|
+
current=self.current,
|
310
|
+
)
|
311
|
+
|
312
|
+
def _add(self, collection: CollectionData, *, force: bool = True) -> bool:
|
313
|
+
if not force and collection.full_name in self.collection_map:
|
314
|
+
return False
|
315
|
+
self.collections.append(collection)
|
316
|
+
self.collection_map[collection.full_name] = collection
|
317
|
+
return True
|
318
|
+
|
319
|
+
|
320
|
+
class _CollectionListUpdater:
|
321
|
+
def __init__(
|
322
|
+
self, *, owner: "_CollectionListSingleton", collection_list: CollectionList
|
323
|
+
) -> None:
|
324
|
+
self._owner = owner
|
325
|
+
self._collection_list = collection_list
|
326
|
+
|
327
|
+
def find(self, name: str) -> CollectionData | None:
|
328
|
+
"""
|
329
|
+
Find a collection for a given name.
|
330
|
+
"""
|
331
|
+
return self._collection_list.find(name)
|
332
|
+
|
333
|
+
def add_collection(
|
334
|
+
self, *, directory: Path, namespace: str, name: str
|
335
|
+
) -> CollectionData:
|
336
|
+
"""
|
337
|
+
Add a new collection to the cache.
|
338
|
+
"""
|
339
|
+
# pylint: disable-next=protected-access
|
340
|
+
return self._owner._add_collection(
|
341
|
+
directory=directory, namespace=namespace, name=name
|
342
|
+
)
|
343
|
+
|
344
|
+
def get_global_cache(self) -> _GlobalCache:
|
345
|
+
"""
|
346
|
+
Get the global cache object.
|
347
|
+
"""
|
348
|
+
return self._owner._get_global_cache() # pylint: disable=protected-access
|
349
|
+
|
350
|
+
|
351
|
+
class _CollectionListSingleton:
|
352
|
+
_lock = threading.Lock()
|
353
|
+
|
354
|
+
_global_cache_dir: Path | None = None
|
355
|
+
_collection_list: CollectionList | None = None
|
356
|
+
|
357
|
+
def setup(self, *, global_cache_dir: Path) -> None:
|
358
|
+
"""
|
359
|
+
Setup data.
|
360
|
+
"""
|
361
|
+
with self._lock:
|
362
|
+
if (
|
363
|
+
self._global_cache_dir is not None
|
364
|
+
and self._global_cache_dir != global_cache_dir
|
365
|
+
):
|
366
|
+
raise ValueError(
|
367
|
+
"Setup mismatch: global cache dir cannot be both"
|
368
|
+
f" {self._global_cache_dir} and {global_cache_dir}"
|
369
|
+
)
|
370
|
+
self._global_cache_dir = global_cache_dir
|
371
|
+
|
372
|
+
def clear(self) -> None:
|
373
|
+
"""
|
374
|
+
Clear collection cache.
|
375
|
+
"""
|
376
|
+
with self._lock:
|
377
|
+
self._collection_list = None
|
378
|
+
|
379
|
+
def get_cached(self) -> CollectionList | None:
|
380
|
+
"""
|
381
|
+
Return cached list of collections, if present.
|
382
|
+
Do not modify the result!
|
383
|
+
"""
|
384
|
+
return self._collection_list
|
385
|
+
|
386
|
+
def get(self, *, runner: Runner) -> CollectionList:
|
387
|
+
"""
|
388
|
+
Search for a list of collections. The result is cached.
|
389
|
+
"""
|
390
|
+
with self._lock:
|
391
|
+
if self._global_cache_dir is None:
|
392
|
+
raise ValueError("Internal error: global cache dir not setup")
|
393
|
+
result = self._collection_list
|
394
|
+
if result is None:
|
395
|
+
result = CollectionList.collect(
|
396
|
+
runner=runner,
|
397
|
+
global_cache=_GlobalCache.create(root=self._global_cache_dir),
|
398
|
+
)
|
399
|
+
self._collection_list = result
|
400
|
+
return result.clone()
|
401
|
+
|
402
|
+
def _get_global_cache(self) -> _GlobalCache:
|
403
|
+
"""
|
404
|
+
Returns the global cache dir.
|
405
|
+
"""
|
406
|
+
if self._global_cache_dir is None:
|
407
|
+
raise ValueError("Internal error: global cache dir not setup")
|
408
|
+
return _GlobalCache.create(root=self._global_cache_dir)
|
409
|
+
|
410
|
+
def _add_collection(
|
411
|
+
self, *, directory: Path, namespace: str, name: str
|
412
|
+
) -> CollectionData:
|
413
|
+
"""
|
414
|
+
Add collection in directory if the collection list has been cached.
|
415
|
+
"""
|
416
|
+
if not self._collection_list:
|
417
|
+
raise ValueError("Internal error: collections not listed")
|
418
|
+
data = load_collection_data_from_disk(directory, namespace=namespace, name=name)
|
419
|
+
self._collection_list._add(data) # pylint: disable=protected-access
|
420
|
+
return data
|
421
|
+
|
422
|
+
@contextmanager
|
423
|
+
def _update_collection_list(self) -> t.Iterator[_CollectionListUpdater]:
|
424
|
+
with self._lock:
|
425
|
+
if not self._collection_list or self._global_cache_dir is None:
|
426
|
+
raise ValueError(
|
427
|
+
"Internal error: collections not listed or global cache not setup"
|
428
|
+
)
|
429
|
+
yield _CollectionListUpdater(
|
430
|
+
owner=self, collection_list=self._collection_list
|
431
|
+
)
|
432
|
+
|
433
|
+
|
434
|
+
_COLLECTION_LIST = _CollectionListSingleton()
|
435
|
+
|
436
|
+
|
437
|
+
@contextmanager
|
438
|
+
def _update_collection_list() -> t.Iterator[_CollectionListUpdater]:
|
439
|
+
# pylint: disable-next=protected-access
|
440
|
+
with _COLLECTION_LIST._update_collection_list() as result:
|
441
|
+
yield result
|
442
|
+
|
443
|
+
|
444
|
+
def get_collection_list(*, runner: Runner, global_cache_dir: Path) -> CollectionList:
|
445
|
+
"""
|
446
|
+
Search for a list of collections. The result is cached.
|
447
|
+
"""
|
448
|
+
_COLLECTION_LIST.setup(global_cache_dir=global_cache_dir)
|
449
|
+
return _COLLECTION_LIST.get(runner=runner)
|
450
|
+
|
451
|
+
|
452
|
+
__all__ = [
|
453
|
+
"CollectionList",
|
454
|
+
"get_collection_list",
|
455
|
+
"load_collection_data_from_disk",
|
456
|
+
]
|