ansible-core 2.20.0b2__py3-none-any.whl → 2.20.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ansible-core might be problematic. Click here for more details.
- ansible/executor/process/worker.py +17 -11
- ansible/executor/task_queue_manager.py +43 -1
- ansible/galaxy/collection/__init__.py +7 -4
- ansible/galaxy/dependency_resolution/__init__.py +10 -9
- ansible/galaxy/dependency_resolution/dataclasses.py +86 -60
- ansible/galaxy/dependency_resolution/providers.py +32 -18
- ansible/galaxy/dependency_resolution/versioning.py +2 -4
- ansible/module_utils/ansible_release.py +1 -1
- ansible/release.py +1 -1
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/METADATA +2 -2
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/RECORD +38 -38
- ansible_test/_data/completion/docker.txt +7 -7
- ansible_test/_data/requirements/ansible-test.txt +1 -1
- ansible_test/_data/requirements/ansible.txt +1 -1
- ansible_test/_data/requirements/sanity.ansible-doc.txt +2 -2
- ansible_test/_data/requirements/sanity.changelog.txt +1 -1
- ansible_test/_data/requirements/sanity.import.plugin.txt +2 -2
- ansible_test/_data/requirements/sanity.import.txt +1 -1
- ansible_test/_data/requirements/sanity.integration-aliases.txt +1 -1
- ansible_test/_data/requirements/sanity.pylint.txt +5 -5
- ansible_test/_data/requirements/sanity.runtime-metadata.txt +1 -1
- ansible_test/_data/requirements/sanity.validate-modules.txt +2 -2
- ansible_test/_data/requirements/sanity.yamllint.txt +1 -1
- ansible_test/_internal/commands/sanity/pylint.py +11 -0
- ansible_test/_internal/coverage_util.py +1 -1
- ansible_test/_internal/python_requirements.py +1 -1
- ansible_test/_util/controller/sanity/pylint/plugins/deprecated_calls.py +48 -45
- ansible_test/_util/controller/sanity/pylint/plugins/string_format.py +9 -7
- ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py +11 -10
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/WHEEL +0 -0
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/entry_points.txt +0 -0
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/licenses/COPYING +0 -0
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/licenses/licenses/Apache-License.txt +0 -0
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/licenses/licenses/BSD-3-Clause.txt +0 -0
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/licenses/licenses/MIT-license.txt +0 -0
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/licenses/licenses/PSF-license.txt +0 -0
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/licenses/licenses/simplified_bsd.txt +0 -0
- {ansible_core-2.20.0b2.dist-info → ansible_core-2.20.0rc1.dist-info}/top_level.txt +0 -0
|
@@ -17,6 +17,7 @@
|
|
|
17
17
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
+
import errno
|
|
20
21
|
import io
|
|
21
22
|
import os
|
|
22
23
|
import signal
|
|
@@ -103,11 +104,19 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
|
|
|
103
104
|
self._cliargs = cliargs
|
|
104
105
|
|
|
105
106
|
def _term(self, signum, frame) -> None:
|
|
106
|
-
"""
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
107
|
+
"""In child termination when notified by the parent"""
|
|
108
|
+
signal.signal(signum, signal.SIG_DFL)
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
os.killpg(self.pid, signum)
|
|
112
|
+
os.kill(self.pid, signum)
|
|
113
|
+
except OSError as e:
|
|
114
|
+
if e.errno != errno.ESRCH:
|
|
115
|
+
signame = signal.strsignal(signum)
|
|
116
|
+
display.error(f'Unable to send {signame} to child[{self.pid}]: {e}')
|
|
117
|
+
|
|
118
|
+
# fallthrough, if we are still here, just die
|
|
119
|
+
os._exit(1)
|
|
111
120
|
|
|
112
121
|
def start(self) -> None:
|
|
113
122
|
"""
|
|
@@ -121,11 +130,6 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
|
|
|
121
130
|
# FUTURE: this lock can be removed once a more generalized pre-fork thread pause is in place
|
|
122
131
|
with display._lock:
|
|
123
132
|
super(WorkerProcess, self).start()
|
|
124
|
-
# Since setsid is called later, if the worker is termed
|
|
125
|
-
# it won't term the new process group
|
|
126
|
-
# register a handler to propagate the signal
|
|
127
|
-
signal.signal(signal.SIGTERM, self._term)
|
|
128
|
-
signal.signal(signal.SIGINT, self._term)
|
|
129
133
|
|
|
130
134
|
def _hard_exit(self, e: str) -> t.NoReturn:
|
|
131
135
|
"""
|
|
@@ -170,7 +174,6 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
|
|
|
170
174
|
# to give better errors, and to prevent fd 0 reuse
|
|
171
175
|
sys.stdin.close()
|
|
172
176
|
except Exception as e:
|
|
173
|
-
display.debug(f'Could not detach from stdio: {traceback.format_exc()}')
|
|
174
177
|
display.error(f'Could not detach from stdio: {e}')
|
|
175
178
|
os._exit(1)
|
|
176
179
|
|
|
@@ -187,6 +190,9 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
|
|
|
187
190
|
# Set the queue on Display so calls to Display.display are proxied over the queue
|
|
188
191
|
display.set_queue(self._final_q)
|
|
189
192
|
self._detach()
|
|
193
|
+
# propagate signals
|
|
194
|
+
signal.signal(signal.SIGINT, self._term)
|
|
195
|
+
signal.signal(signal.SIGTERM, self._term)
|
|
190
196
|
try:
|
|
191
197
|
with _task.TaskContext(self._task):
|
|
192
198
|
return self._run()
|
|
@@ -18,8 +18,10 @@
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
20
|
import dataclasses
|
|
21
|
+
import errno
|
|
21
22
|
import os
|
|
22
23
|
import sys
|
|
24
|
+
import signal
|
|
23
25
|
import tempfile
|
|
24
26
|
import threading
|
|
25
27
|
import time
|
|
@@ -185,8 +187,48 @@ class TaskQueueManager:
|
|
|
185
187
|
# plugins for inter-process locking.
|
|
186
188
|
self._connection_lockfile = tempfile.TemporaryFile()
|
|
187
189
|
|
|
190
|
+
self._workers: list[WorkerProcess | None] = []
|
|
191
|
+
|
|
192
|
+
# signal handlers to propagate signals to workers
|
|
193
|
+
signal.signal(signal.SIGTERM, self._signal_handler)
|
|
194
|
+
signal.signal(signal.SIGINT, self._signal_handler)
|
|
195
|
+
|
|
188
196
|
def _initialize_processes(self, num: int) -> None:
|
|
189
|
-
|
|
197
|
+
# mutable update to ensure the reference stays the same
|
|
198
|
+
self._workers[:] = [None] * num
|
|
199
|
+
|
|
200
|
+
def _signal_handler(self, signum, frame) -> None:
|
|
201
|
+
"""
|
|
202
|
+
terminate all running process groups created as a result of calling
|
|
203
|
+
setsid from within a WorkerProcess.
|
|
204
|
+
|
|
205
|
+
Since the children become process leaders, signals will not
|
|
206
|
+
automatically propagate to them.
|
|
207
|
+
"""
|
|
208
|
+
signal.signal(signum, signal.SIG_DFL)
|
|
209
|
+
|
|
210
|
+
for worker in self._workers:
|
|
211
|
+
if worker is None or not worker.is_alive():
|
|
212
|
+
continue
|
|
213
|
+
if worker.pid:
|
|
214
|
+
try:
|
|
215
|
+
# notify workers
|
|
216
|
+
os.kill(worker.pid, signum)
|
|
217
|
+
except OSError as e:
|
|
218
|
+
if e.errno != errno.ESRCH:
|
|
219
|
+
signame = signal.strsignal(signum)
|
|
220
|
+
display.error(f'Unable to send {signame} to child[{worker.pid}]: {e}')
|
|
221
|
+
|
|
222
|
+
if signum == signal.SIGINT:
|
|
223
|
+
# Defer to CLI handling
|
|
224
|
+
raise KeyboardInterrupt()
|
|
225
|
+
|
|
226
|
+
pid = os.getpid()
|
|
227
|
+
try:
|
|
228
|
+
os.kill(pid, signum)
|
|
229
|
+
except OSError as e:
|
|
230
|
+
signame = signal.strsignal(signum)
|
|
231
|
+
display.error(f'Unable to send {signame} to {pid}: {e}')
|
|
190
232
|
|
|
191
233
|
def load_callbacks(self):
|
|
192
234
|
"""
|
|
@@ -1839,10 +1839,13 @@ def _resolve_depenency_map(
|
|
|
1839
1839
|
offline=offline,
|
|
1840
1840
|
)
|
|
1841
1841
|
try:
|
|
1842
|
-
return
|
|
1843
|
-
|
|
1844
|
-
|
|
1845
|
-
|
|
1842
|
+
return t.cast(
|
|
1843
|
+
dict[str, Candidate],
|
|
1844
|
+
collection_dep_resolver.resolve(
|
|
1845
|
+
requested_requirements,
|
|
1846
|
+
max_rounds=2000000, # NOTE: same constant pip uses
|
|
1847
|
+
).mapping,
|
|
1848
|
+
)
|
|
1846
1849
|
except CollectionDependencyResolutionImpossible as dep_exc:
|
|
1847
1850
|
conflict_causes = (
|
|
1848
1851
|
'* {req.fqcn!s}:{req.ver!s} ({dep_origin!s})'.format(
|
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
|
|
6
6
|
from __future__ import annotations
|
|
7
7
|
|
|
8
|
+
import collections.abc as _c
|
|
8
9
|
import typing as t
|
|
9
10
|
|
|
10
11
|
if t.TYPE_CHECKING:
|
|
@@ -21,15 +22,15 @@ from ansible.galaxy.dependency_resolution.resolvers import CollectionDependencyR
|
|
|
21
22
|
|
|
22
23
|
|
|
23
24
|
def build_collection_dependency_resolver(
|
|
24
|
-
galaxy_apis
|
|
25
|
-
concrete_artifacts_manager
|
|
26
|
-
preferred_candidates
|
|
27
|
-
with_deps=True,
|
|
28
|
-
with_pre_releases=False,
|
|
29
|
-
upgrade=False,
|
|
30
|
-
include_signatures=True,
|
|
31
|
-
offline=False,
|
|
32
|
-
)
|
|
25
|
+
galaxy_apis: _c.Iterable[GalaxyAPI],
|
|
26
|
+
concrete_artifacts_manager: ConcreteArtifactsManager,
|
|
27
|
+
preferred_candidates: _c.Iterable[Candidate] | None = None,
|
|
28
|
+
with_deps: bool = True,
|
|
29
|
+
with_pre_releases: bool = False,
|
|
30
|
+
upgrade: bool = False,
|
|
31
|
+
include_signatures: bool = True,
|
|
32
|
+
offline: bool = False,
|
|
33
|
+
) -> CollectionDependencyResolver:
|
|
33
34
|
"""Return a collection dependency resolver.
|
|
34
35
|
|
|
35
36
|
The returned instance will have a ``resolve()`` method for
|
|
@@ -6,12 +6,12 @@
|
|
|
6
6
|
|
|
7
7
|
from __future__ import annotations
|
|
8
8
|
|
|
9
|
+
import collections.abc as _c
|
|
9
10
|
import os
|
|
10
11
|
import pathlib
|
|
11
12
|
import typing as t
|
|
12
13
|
|
|
13
14
|
from collections import namedtuple
|
|
14
|
-
from collections.abc import MutableSequence, MutableMapping
|
|
15
15
|
from glob import iglob
|
|
16
16
|
from urllib.parse import urlparse
|
|
17
17
|
from yaml import safe_load
|
|
@@ -43,7 +43,12 @@ _SOURCE_METADATA_FILE = b'GALAXY.yml'
|
|
|
43
43
|
display = Display()
|
|
44
44
|
|
|
45
45
|
|
|
46
|
-
def get_validated_source_info(
|
|
46
|
+
def get_validated_source_info(
|
|
47
|
+
b_source_info_path: bytes,
|
|
48
|
+
namespace: str,
|
|
49
|
+
name: str,
|
|
50
|
+
version: str,
|
|
51
|
+
) -> dict[str, object] | None:
|
|
47
52
|
source_info_path = to_text(b_source_info_path, errors='surrogate_or_strict')
|
|
48
53
|
|
|
49
54
|
if not os.path.isfile(b_source_info_path):
|
|
@@ -58,7 +63,7 @@ def get_validated_source_info(b_source_info_path, namespace, name, version):
|
|
|
58
63
|
)
|
|
59
64
|
return None
|
|
60
65
|
|
|
61
|
-
if not isinstance(metadata,
|
|
66
|
+
if not isinstance(metadata, dict):
|
|
62
67
|
display.warning(f"Error getting collection source information at '{source_info_path}': expected a YAML dictionary")
|
|
63
68
|
return None
|
|
64
69
|
|
|
@@ -72,7 +77,12 @@ def get_validated_source_info(b_source_info_path, namespace, name, version):
|
|
|
72
77
|
return metadata
|
|
73
78
|
|
|
74
79
|
|
|
75
|
-
def _validate_v1_source_info_schema(
|
|
80
|
+
def _validate_v1_source_info_schema(
|
|
81
|
+
namespace: str,
|
|
82
|
+
name: str,
|
|
83
|
+
version: str,
|
|
84
|
+
provided_arguments: dict[str, object],
|
|
85
|
+
) -> list[str]:
|
|
76
86
|
argument_spec_data = dict(
|
|
77
87
|
format_version=dict(choices=["1.0.0"]),
|
|
78
88
|
download_url=dict(),
|
|
@@ -102,24 +112,24 @@ def _validate_v1_source_info_schema(namespace, name, version, provided_arguments
|
|
|
102
112
|
return validation_result.error_messages
|
|
103
113
|
|
|
104
114
|
|
|
105
|
-
def _is_collection_src_dir(dir_path):
|
|
115
|
+
def _is_collection_src_dir(dir_path: bytes | str) -> bool:
|
|
106
116
|
b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
|
|
107
117
|
return os.path.isfile(os.path.join(b_dir_path, _GALAXY_YAML))
|
|
108
118
|
|
|
109
119
|
|
|
110
|
-
def _is_installed_collection_dir(dir_path):
|
|
120
|
+
def _is_installed_collection_dir(dir_path: bytes | str) -> bool:
|
|
111
121
|
b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
|
|
112
122
|
return os.path.isfile(os.path.join(b_dir_path, _MANIFEST_JSON))
|
|
113
123
|
|
|
114
124
|
|
|
115
|
-
def _is_collection_dir(dir_path):
|
|
125
|
+
def _is_collection_dir(dir_path: bytes | str) -> bool:
|
|
116
126
|
return (
|
|
117
127
|
_is_installed_collection_dir(dir_path) or
|
|
118
128
|
_is_collection_src_dir(dir_path)
|
|
119
129
|
)
|
|
120
130
|
|
|
121
131
|
|
|
122
|
-
def _find_collections_in_subdirs(dir_path):
|
|
132
|
+
def _find_collections_in_subdirs(dir_path: str) -> _c.Iterator[bytes]:
|
|
123
133
|
b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
|
|
124
134
|
|
|
125
135
|
subdir_glob_pattern = os.path.join(
|
|
@@ -135,23 +145,23 @@ def _find_collections_in_subdirs(dir_path):
|
|
|
135
145
|
yield subdir
|
|
136
146
|
|
|
137
147
|
|
|
138
|
-
def _is_collection_namespace_dir(tested_str):
|
|
148
|
+
def _is_collection_namespace_dir(tested_str: str) -> bool:
|
|
139
149
|
return any(_find_collections_in_subdirs(tested_str))
|
|
140
150
|
|
|
141
151
|
|
|
142
|
-
def _is_file_path(tested_str):
|
|
152
|
+
def _is_file_path(tested_str: str) -> bool:
|
|
143
153
|
return os.path.isfile(to_bytes(tested_str, errors='surrogate_or_strict'))
|
|
144
154
|
|
|
145
155
|
|
|
146
|
-
def _is_http_url(tested_str):
|
|
156
|
+
def _is_http_url(tested_str: str) -> bool:
|
|
147
157
|
return urlparse(tested_str).scheme.lower() in {'http', 'https'}
|
|
148
158
|
|
|
149
159
|
|
|
150
|
-
def _is_git_url(tested_str):
|
|
160
|
+
def _is_git_url(tested_str: str) -> bool:
|
|
151
161
|
return tested_str.startswith(('git+', 'git@'))
|
|
152
162
|
|
|
153
163
|
|
|
154
|
-
def _is_concrete_artifact_pointer(tested_str):
|
|
164
|
+
def _is_concrete_artifact_pointer(tested_str: str) -> bool:
|
|
155
165
|
return any(
|
|
156
166
|
predicate(tested_str)
|
|
157
167
|
for predicate in (
|
|
@@ -168,7 +178,7 @@ def _is_concrete_artifact_pointer(tested_str):
|
|
|
168
178
|
class _ComputedReqKindsMixin:
|
|
169
179
|
UNIQUE_ATTRS = ('fqcn', 'ver', 'src', 'type')
|
|
170
180
|
|
|
171
|
-
def __init__(self, *args, **kwargs):
|
|
181
|
+
def __init__(self, *args, **kwargs) -> None:
|
|
172
182
|
if not self.may_have_offline_galaxy_info:
|
|
173
183
|
self._source_info = None
|
|
174
184
|
else:
|
|
@@ -181,18 +191,18 @@ class _ComputedReqKindsMixin:
|
|
|
181
191
|
self.ver
|
|
182
192
|
)
|
|
183
193
|
|
|
184
|
-
def __hash__(self):
|
|
194
|
+
def __hash__(self) -> int:
|
|
185
195
|
return hash(tuple(getattr(self, attr) for attr in _ComputedReqKindsMixin.UNIQUE_ATTRS))
|
|
186
196
|
|
|
187
|
-
def __eq__(self, candidate):
|
|
197
|
+
def __eq__(self, candidate: _c.Hashable) -> bool:
|
|
188
198
|
return hash(self) == hash(candidate)
|
|
189
199
|
|
|
190
200
|
@classmethod
|
|
191
|
-
def from_dir_path_as_unknown(
|
|
192
|
-
cls,
|
|
193
|
-
dir_path
|
|
194
|
-
art_mgr
|
|
195
|
-
)
|
|
201
|
+
def from_dir_path_as_unknown(
|
|
202
|
+
cls,
|
|
203
|
+
dir_path: bytes,
|
|
204
|
+
art_mgr: ConcreteArtifactsManager,
|
|
205
|
+
) -> t.Self:
|
|
196
206
|
"""Make collection from an unspecified dir type.
|
|
197
207
|
|
|
198
208
|
This alternative constructor attempts to grab metadata from the
|
|
@@ -215,11 +225,11 @@ class _ComputedReqKindsMixin:
|
|
|
215
225
|
return cls.from_dir_path_implicit(dir_path)
|
|
216
226
|
|
|
217
227
|
@classmethod
|
|
218
|
-
def from_dir_path(
|
|
219
|
-
cls,
|
|
220
|
-
dir_path
|
|
221
|
-
art_mgr
|
|
222
|
-
)
|
|
228
|
+
def from_dir_path(
|
|
229
|
+
cls,
|
|
230
|
+
dir_path: bytes,
|
|
231
|
+
art_mgr: ConcreteArtifactsManager,
|
|
232
|
+
) -> t.Self:
|
|
223
233
|
"""Make collection from an directory with metadata."""
|
|
224
234
|
if dir_path.endswith(to_bytes(os.path.sep)):
|
|
225
235
|
dir_path = dir_path.rstrip(to_bytes(os.path.sep))
|
|
@@ -262,10 +272,10 @@ class _ComputedReqKindsMixin:
|
|
|
262
272
|
return cls(req_name, req_version, dir_path, 'dir', None)
|
|
263
273
|
|
|
264
274
|
@classmethod
|
|
265
|
-
def from_dir_path_implicit(
|
|
266
|
-
cls,
|
|
267
|
-
dir_path
|
|
268
|
-
)
|
|
275
|
+
def from_dir_path_implicit(
|
|
276
|
+
cls,
|
|
277
|
+
dir_path: bytes,
|
|
278
|
+
) -> t.Self:
|
|
269
279
|
"""Construct a collection instance based on an arbitrary dir.
|
|
270
280
|
|
|
271
281
|
This alternative constructor infers the FQCN based on the parent
|
|
@@ -278,11 +288,16 @@ class _ComputedReqKindsMixin:
|
|
|
278
288
|
u_dir_path = to_text(dir_path, errors='surrogate_or_strict')
|
|
279
289
|
path_list = u_dir_path.split(os.path.sep)
|
|
280
290
|
req_name = '.'.join(path_list[-2:])
|
|
281
|
-
return cls(req_name, '*', dir_path, 'dir', None)
|
|
291
|
+
return cls(req_name, '*', dir_path, 'dir', None)
|
|
282
292
|
|
|
283
293
|
@classmethod
|
|
284
|
-
def from_string(
|
|
285
|
-
|
|
294
|
+
def from_string(
|
|
295
|
+
cls,
|
|
296
|
+
collection_input: str,
|
|
297
|
+
artifacts_manager: ConcreteArtifactsManager,
|
|
298
|
+
supplemental_signatures: list[str] | None,
|
|
299
|
+
) -> t.Self:
|
|
300
|
+
req: dict[str, str | list[str] | None] = {}
|
|
286
301
|
if _is_concrete_artifact_pointer(collection_input) or AnsibleCollectionRef.is_valid_collection_name(collection_input):
|
|
287
302
|
# Arg is a file path or URL to a collection, or just a collection
|
|
288
303
|
req['name'] = collection_input
|
|
@@ -307,7 +322,14 @@ class _ComputedReqKindsMixin:
|
|
|
307
322
|
return cls.from_requirement_dict(req, artifacts_manager)
|
|
308
323
|
|
|
309
324
|
@classmethod
|
|
310
|
-
def from_requirement_dict(
|
|
325
|
+
def from_requirement_dict(
|
|
326
|
+
cls,
|
|
327
|
+
# NOTE: The actual `collection_req` shape is supposed to be
|
|
328
|
+
# NOTE: `dict[str, str | list[str] | None]`
|
|
329
|
+
collection_req: dict[str, t.Any],
|
|
330
|
+
art_mgr: ConcreteArtifactsManager,
|
|
331
|
+
validate_signature_options: bool = True,
|
|
332
|
+
) -> t.Self:
|
|
311
333
|
req_name = collection_req.get('name', None)
|
|
312
334
|
req_version = collection_req.get('version', '*')
|
|
313
335
|
req_type = collection_req.get('type')
|
|
@@ -320,7 +342,7 @@ class _ComputedReqKindsMixin:
|
|
|
320
342
|
f"Signatures were provided to verify {req_name} but no keyring was configured."
|
|
321
343
|
)
|
|
322
344
|
|
|
323
|
-
if not isinstance(req_signature_sources, MutableSequence):
|
|
345
|
+
if not isinstance(req_signature_sources, _c.MutableSequence):
|
|
324
346
|
req_signature_sources = [req_signature_sources]
|
|
325
347
|
req_signature_sources = frozenset(req_signature_sources)
|
|
326
348
|
|
|
@@ -434,7 +456,11 @@ class _ComputedReqKindsMixin:
|
|
|
434
456
|
format(not_url=req_source.api_server),
|
|
435
457
|
)
|
|
436
458
|
|
|
437
|
-
if
|
|
459
|
+
if (
|
|
460
|
+
req_type == 'dir'
|
|
461
|
+
and isinstance(req_source, str)
|
|
462
|
+
and req_source.endswith(os.path.sep)
|
|
463
|
+
):
|
|
438
464
|
req_source = req_source.rstrip(os.path.sep)
|
|
439
465
|
|
|
440
466
|
tmp_inst_req = cls(req_name, req_version, req_source, req_type, req_signature_sources)
|
|
@@ -451,16 +477,16 @@ class _ComputedReqKindsMixin:
|
|
|
451
477
|
req_signature_sources,
|
|
452
478
|
)
|
|
453
479
|
|
|
454
|
-
def __repr__(self):
|
|
480
|
+
def __repr__(self) -> str:
|
|
455
481
|
return (
|
|
456
482
|
'<{self!s} of type {coll_type!r} from {src!s}>'.
|
|
457
483
|
format(self=self, coll_type=self.type, src=self.src or 'Galaxy')
|
|
458
484
|
)
|
|
459
485
|
|
|
460
|
-
def __str__(self):
|
|
486
|
+
def __str__(self) -> str:
|
|
461
487
|
return to_native(self.__unicode__())
|
|
462
488
|
|
|
463
|
-
def __unicode__(self):
|
|
489
|
+
def __unicode__(self) -> str:
|
|
464
490
|
if self.fqcn is None:
|
|
465
491
|
return (
|
|
466
492
|
f'{self.type} collection from a Git repo' if self.is_scm
|
|
@@ -473,7 +499,7 @@ class _ComputedReqKindsMixin:
|
|
|
473
499
|
)
|
|
474
500
|
|
|
475
501
|
@property
|
|
476
|
-
def may_have_offline_galaxy_info(self):
|
|
502
|
+
def may_have_offline_galaxy_info(self) -> bool:
|
|
477
503
|
if self.fqcn is None:
|
|
478
504
|
# Virtual collection
|
|
479
505
|
return False
|
|
@@ -482,7 +508,7 @@ class _ComputedReqKindsMixin:
|
|
|
482
508
|
return False
|
|
483
509
|
return True
|
|
484
510
|
|
|
485
|
-
def construct_galaxy_info_path(self, b_collection_path):
|
|
511
|
+
def construct_galaxy_info_path(self, b_collection_path: bytes) -> bytes:
|
|
486
512
|
if not self.may_have_offline_galaxy_info and not self.type == 'galaxy':
|
|
487
513
|
raise TypeError('Only installed collections from a Galaxy server have offline Galaxy info')
|
|
488
514
|
|
|
@@ -502,21 +528,21 @@ class _ComputedReqKindsMixin:
|
|
|
502
528
|
return self.fqcn.split('.')
|
|
503
529
|
|
|
504
530
|
@property
|
|
505
|
-
def namespace(self):
|
|
531
|
+
def namespace(self) -> str:
|
|
506
532
|
if self.is_virtual:
|
|
507
533
|
raise TypeError(f'{self.type} collections do not have a namespace')
|
|
508
534
|
|
|
509
535
|
return self._get_separate_ns_n_name()[0]
|
|
510
536
|
|
|
511
537
|
@property
|
|
512
|
-
def name(self):
|
|
538
|
+
def name(self) -> str:
|
|
513
539
|
if self.is_virtual:
|
|
514
540
|
raise TypeError(f'{self.type} collections do not have a name')
|
|
515
541
|
|
|
516
542
|
return self._get_separate_ns_n_name()[-1]
|
|
517
543
|
|
|
518
544
|
@property
|
|
519
|
-
def canonical_package_id(self):
|
|
545
|
+
def canonical_package_id(self) -> str:
|
|
520
546
|
if not self.is_virtual:
|
|
521
547
|
return to_native(self.fqcn)
|
|
522
548
|
|
|
@@ -526,46 +552,46 @@ class _ComputedReqKindsMixin:
|
|
|
526
552
|
)
|
|
527
553
|
|
|
528
554
|
@property
|
|
529
|
-
def is_virtual(self):
|
|
555
|
+
def is_virtual(self) -> bool:
|
|
530
556
|
return self.is_scm or self.is_subdirs
|
|
531
557
|
|
|
532
558
|
@property
|
|
533
|
-
def is_file(self):
|
|
559
|
+
def is_file(self) -> bool:
|
|
534
560
|
return self.type == 'file'
|
|
535
561
|
|
|
536
562
|
@property
|
|
537
|
-
def is_dir(self):
|
|
563
|
+
def is_dir(self) -> bool:
|
|
538
564
|
return self.type == 'dir'
|
|
539
565
|
|
|
540
566
|
@property
|
|
541
|
-
def namespace_collection_paths(self):
|
|
567
|
+
def namespace_collection_paths(self) -> list[str]:
|
|
542
568
|
return [
|
|
543
569
|
to_native(path)
|
|
544
570
|
for path in _find_collections_in_subdirs(self.src)
|
|
545
571
|
]
|
|
546
572
|
|
|
547
573
|
@property
|
|
548
|
-
def is_subdirs(self):
|
|
574
|
+
def is_subdirs(self) -> bool:
|
|
549
575
|
return self.type == 'subdirs'
|
|
550
576
|
|
|
551
577
|
@property
|
|
552
|
-
def is_url(self):
|
|
578
|
+
def is_url(self) -> bool:
|
|
553
579
|
return self.type == 'url'
|
|
554
580
|
|
|
555
581
|
@property
|
|
556
|
-
def is_scm(self):
|
|
582
|
+
def is_scm(self) -> bool:
|
|
557
583
|
return self.type == 'git'
|
|
558
584
|
|
|
559
585
|
@property
|
|
560
|
-
def is_concrete_artifact(self):
|
|
586
|
+
def is_concrete_artifact(self) -> bool:
|
|
561
587
|
return self.type in {'git', 'url', 'file', 'dir', 'subdirs'}
|
|
562
588
|
|
|
563
589
|
@property
|
|
564
|
-
def is_online_index_pointer(self):
|
|
590
|
+
def is_online_index_pointer(self) -> bool:
|
|
565
591
|
return not self.is_concrete_artifact
|
|
566
592
|
|
|
567
593
|
@property
|
|
568
|
-
def is_pinned(self):
|
|
594
|
+
def is_pinned(self) -> bool:
|
|
569
595
|
"""Indicate if the version set is considered pinned.
|
|
570
596
|
|
|
571
597
|
This essentially computes whether the version field of the current
|
|
@@ -585,7 +611,7 @@ class _ComputedReqKindsMixin:
|
|
|
585
611
|
)
|
|
586
612
|
|
|
587
613
|
@property
|
|
588
|
-
def source_info(self):
|
|
614
|
+
def source_info(self) -> dict[str, object] | None:
|
|
589
615
|
return self._source_info
|
|
590
616
|
|
|
591
617
|
|
|
@@ -601,11 +627,11 @@ class Requirement(
|
|
|
601
627
|
):
|
|
602
628
|
"""An abstract requirement request."""
|
|
603
629
|
|
|
604
|
-
def __new__(cls, *args, **kwargs):
|
|
630
|
+
def __new__(cls, *args: object, **kwargs: object) -> t.Self:
|
|
605
631
|
self = RequirementNamedTuple.__new__(cls, *args, **kwargs)
|
|
606
632
|
return self
|
|
607
633
|
|
|
608
|
-
def __init__(self, *args, **kwargs):
|
|
634
|
+
def __init__(self, *args: object, **kwargs: object) -> None:
|
|
609
635
|
super(Requirement, self).__init__()
|
|
610
636
|
|
|
611
637
|
|
|
@@ -615,14 +641,14 @@ class Candidate(
|
|
|
615
641
|
):
|
|
616
642
|
"""A concrete collection candidate with its version resolved."""
|
|
617
643
|
|
|
618
|
-
def __new__(cls, *args, **kwargs):
|
|
644
|
+
def __new__(cls, *args: object, **kwargs: object) -> t.Self:
|
|
619
645
|
self = CandidateNamedTuple.__new__(cls, *args, **kwargs)
|
|
620
646
|
return self
|
|
621
647
|
|
|
622
|
-
def __init__(self, *args, **kwargs):
|
|
648
|
+
def __init__(self, *args: object, **kwargs: object) -> None:
|
|
623
649
|
super(Candidate, self).__init__()
|
|
624
650
|
|
|
625
|
-
def with_signatures_repopulated(self)
|
|
651
|
+
def with_signatures_repopulated(self) -> Candidate:
|
|
626
652
|
"""Populate a new Candidate instance with Galaxy signatures.
|
|
627
653
|
:raises AnsibleAssertionError: If the supplied candidate is not sourced from a Galaxy-like index.
|
|
628
654
|
"""
|
|
@@ -16,6 +16,8 @@ if t.TYPE_CHECKING:
|
|
|
16
16
|
from ansible.galaxy.collection.galaxy_api_proxy import MultiGalaxyAPIProxy
|
|
17
17
|
from ansible.galaxy.api import GalaxyAPI
|
|
18
18
|
|
|
19
|
+
from resolvelib.structs import RequirementInformation
|
|
20
|
+
|
|
19
21
|
from ansible.galaxy.collection.gpg import get_signature_from_source
|
|
20
22
|
from ansible.galaxy.dependency_resolution.dataclasses import (
|
|
21
23
|
Candidate,
|
|
@@ -48,14 +50,14 @@ class CollectionDependencyProvider(AbstractProvider):
|
|
|
48
50
|
|
|
49
51
|
def __init__(
|
|
50
52
|
self,
|
|
51
|
-
apis
|
|
52
|
-
concrete_artifacts_manager
|
|
53
|
-
preferred_candidates
|
|
54
|
-
with_deps=True,
|
|
55
|
-
with_pre_releases=False,
|
|
56
|
-
upgrade=False,
|
|
57
|
-
include_signatures=True,
|
|
58
|
-
)
|
|
53
|
+
apis: MultiGalaxyAPIProxy,
|
|
54
|
+
concrete_artifacts_manager: ConcreteArtifactsManager,
|
|
55
|
+
preferred_candidates: _c.Iterable[Candidate] | None = None,
|
|
56
|
+
with_deps: bool = True,
|
|
57
|
+
with_pre_releases: bool = False,
|
|
58
|
+
upgrade: bool = False,
|
|
59
|
+
include_signatures: bool = True,
|
|
60
|
+
) -> None:
|
|
59
61
|
r"""Initialize helper attributes.
|
|
60
62
|
|
|
61
63
|
:param api: An instance of the multiple Galaxy APIs wrapper.
|
|
@@ -91,8 +93,10 @@ class CollectionDependencyProvider(AbstractProvider):
|
|
|
91
93
|
self._upgrade = upgrade
|
|
92
94
|
self._include_signatures = include_signatures
|
|
93
95
|
|
|
94
|
-
def identify(
|
|
95
|
-
|
|
96
|
+
def identify(
|
|
97
|
+
self,
|
|
98
|
+
requirement_or_candidate: Candidate | Requirement,
|
|
99
|
+
) -> str:
|
|
96
100
|
"""Given requirement or candidate, return an identifier for it.
|
|
97
101
|
|
|
98
102
|
This is used to identify a requirement or candidate, e.g.
|
|
@@ -108,8 +112,13 @@ class CollectionDependencyProvider(AbstractProvider):
|
|
|
108
112
|
identifier: str,
|
|
109
113
|
resolutions: _c.Mapping[str, Candidate],
|
|
110
114
|
candidates: _c.Mapping[str, _c.Iterator[Candidate]],
|
|
111
|
-
information: _c.
|
|
112
|
-
|
|
115
|
+
information: _c.Mapping[
|
|
116
|
+
str,
|
|
117
|
+
_c.Iterator[RequirementInformation[Requirement, Candidate]],
|
|
118
|
+
],
|
|
119
|
+
backtrack_causes: _c.Sequence[
|
|
120
|
+
RequirementInformation[Requirement, Candidate],
|
|
121
|
+
],
|
|
113
122
|
) -> float | int:
|
|
114
123
|
"""Return sort key function return value for given requirement.
|
|
115
124
|
|
|
@@ -205,7 +214,10 @@ class CollectionDependencyProvider(AbstractProvider):
|
|
|
205
214
|
all(self.is_satisfied_by(requirement, candidate) for requirement in requirements)
|
|
206
215
|
}
|
|
207
216
|
try:
|
|
208
|
-
coll_versions
|
|
217
|
+
coll_versions: _c.Iterable[tuple[str, GalaxyAPI]] = (
|
|
218
|
+
[] if preinstalled_candidates
|
|
219
|
+
else self._api_proxy.get_collection_versions(first_req)
|
|
220
|
+
)
|
|
209
221
|
except TypeError as exc:
|
|
210
222
|
if first_req.is_concrete_artifact:
|
|
211
223
|
# Non hashable versions will cause a TypeError
|
|
@@ -248,7 +260,7 @@ class CollectionDependencyProvider(AbstractProvider):
|
|
|
248
260
|
|
|
249
261
|
latest_matches = []
|
|
250
262
|
signatures = []
|
|
251
|
-
extra_signature_sources
|
|
263
|
+
extra_signature_sources: list[str] = []
|
|
252
264
|
|
|
253
265
|
discarding_pre_releases_acceptable = any(
|
|
254
266
|
not is_pre_release(candidate_version)
|
|
@@ -353,8 +365,11 @@ class CollectionDependencyProvider(AbstractProvider):
|
|
|
353
365
|
|
|
354
366
|
return list(preinstalled_candidates) + latest_matches
|
|
355
367
|
|
|
356
|
-
def is_satisfied_by(
|
|
357
|
-
|
|
368
|
+
def is_satisfied_by(
|
|
369
|
+
self,
|
|
370
|
+
requirement: Requirement,
|
|
371
|
+
candidate: Candidate,
|
|
372
|
+
) -> bool:
|
|
358
373
|
r"""Whether the given requirement is satisfiable by a candidate.
|
|
359
374
|
|
|
360
375
|
:param requirement: A requirement that produced the `candidate`.
|
|
@@ -380,8 +395,7 @@ class CollectionDependencyProvider(AbstractProvider):
|
|
|
380
395
|
requirements=requirement.ver,
|
|
381
396
|
)
|
|
382
397
|
|
|
383
|
-
def get_dependencies(self, candidate):
|
|
384
|
-
# type: (Candidate) -> list[Candidate]
|
|
398
|
+
def get_dependencies(self, candidate: Candidate) -> list[Requirement]:
|
|
385
399
|
r"""Get direct dependencies of a candidate.
|
|
386
400
|
|
|
387
401
|
:returns: A collection of requirements that `candidate` \
|