dependence 0.3.6__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dependence/_utilities.py +966 -39
- dependence/freeze.py +86 -45
- dependence/update.py +194 -86
- dependence-1.0.0.dist-info/METADATA +179 -0
- dependence-1.0.0.dist-info/RECORD +10 -0
- {dependence-0.3.6.dist-info → dependence-1.0.0.dist-info}/WHEEL +1 -2
- dependence/utilities.py +0 -1034
- dependence-0.3.6.dist-info/METADATA +0 -136
- dependence-0.3.6.dist-info/RECORD +0 -12
- dependence-0.3.6.dist-info/top_level.txt +0 -1
- {dependence-0.3.6.dist-info → dependence-1.0.0.dist-info}/entry_points.txt +0 -0
dependence/_utilities.py
CHANGED
|
@@ -1,23 +1,47 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
1
5
|
import sys
|
|
2
|
-
from
|
|
6
|
+
from collections import deque
|
|
7
|
+
from configparser import ConfigParser, SectionProxy
|
|
8
|
+
from enum import Enum, auto
|
|
9
|
+
from glob import iglob
|
|
10
|
+
from importlib.metadata import Distribution, PackageNotFoundError
|
|
11
|
+
from importlib.metadata import distribution as _get_distribution
|
|
12
|
+
from importlib.metadata import distributions as _get_distributions
|
|
3
13
|
from itertools import chain
|
|
4
14
|
from pathlib import Path
|
|
5
|
-
from
|
|
15
|
+
from shutil import rmtree
|
|
16
|
+
from subprocess import DEVNULL, PIPE, CalledProcessError, list2cmdline, run
|
|
6
17
|
from traceback import format_exception
|
|
7
18
|
from typing import (
|
|
19
|
+
IO,
|
|
20
|
+
AbstractSet,
|
|
8
21
|
Any,
|
|
9
22
|
Callable,
|
|
23
|
+
Container,
|
|
10
24
|
Dict,
|
|
11
25
|
Hashable,
|
|
12
26
|
Iterable,
|
|
13
27
|
List,
|
|
28
|
+
MutableSet,
|
|
29
|
+
Optional,
|
|
14
30
|
Set,
|
|
15
31
|
Tuple,
|
|
32
|
+
TypedDict,
|
|
16
33
|
Union,
|
|
34
|
+
cast,
|
|
17
35
|
)
|
|
18
36
|
from warnings import warn
|
|
19
37
|
|
|
20
38
|
import tomli
|
|
39
|
+
from jsonpointer import resolve_pointer # type: ignore
|
|
40
|
+
from packaging.requirements import InvalidRequirement, Requirement
|
|
41
|
+
from packaging.utils import canonicalize_name
|
|
42
|
+
|
|
43
|
+
_BUILTIN_DISTRIBUTION_NAMES: Tuple[str] = ("distribute",)
|
|
44
|
+
_UNSAFE_CHARACTERS_PATTERN: re.Pattern = re.compile("[^A-Za-z0-9.]+")
|
|
21
45
|
|
|
22
46
|
|
|
23
47
|
def iter_distinct(items: Iterable[Hashable]) -> Iterable:
|
|
@@ -32,17 +56,6 @@ def iter_distinct(items: Iterable[Hashable]) -> Iterable:
|
|
|
32
56
|
yield item
|
|
33
57
|
|
|
34
58
|
|
|
35
|
-
@lru_cache()
|
|
36
|
-
def pyproject_toml_defines_project(pyproject_toml_path: str) -> bool:
|
|
37
|
-
pyproject: Dict[str, Any]
|
|
38
|
-
try:
|
|
39
|
-
with open(pyproject_toml_path, "r") as pyproject_io:
|
|
40
|
-
pyproject = tomli.loads(pyproject_io.read())
|
|
41
|
-
except FileNotFoundError:
|
|
42
|
-
return False
|
|
43
|
-
return bool(pyproject.get("project", {}).get("name"))
|
|
44
|
-
|
|
45
|
-
|
|
46
59
|
def get_exception_text() -> str:
|
|
47
60
|
"""
|
|
48
61
|
When called within an exception, this function returns a text
|
|
@@ -53,29 +66,6 @@ def get_exception_text() -> str:
|
|
|
53
66
|
return "".join(format_exception(*sys.exc_info()))
|
|
54
67
|
|
|
55
68
|
|
|
56
|
-
def append_exception_text(error: Exception, message: str) -> None:
|
|
57
|
-
"""
|
|
58
|
-
Cause `message` to be appended to an error's exception text.
|
|
59
|
-
"""
|
|
60
|
-
last_attribute_name: str
|
|
61
|
-
for last_attribute_name in ("strerror", "msg"):
|
|
62
|
-
last_attribute_value = getattr(error, last_attribute_name, "")
|
|
63
|
-
if last_attribute_value:
|
|
64
|
-
setattr(
|
|
65
|
-
error, last_attribute_name, f"{last_attribute_value}{message}"
|
|
66
|
-
)
|
|
67
|
-
break
|
|
68
|
-
if not last_attribute_value:
|
|
69
|
-
index: int
|
|
70
|
-
arg: Any
|
|
71
|
-
reversed_args: List[Any] = list(reversed(error.args)) or [""]
|
|
72
|
-
for index, value in enumerate(reversed_args):
|
|
73
|
-
if isinstance(value, str):
|
|
74
|
-
reversed_args[index] = f"{value}{message}"
|
|
75
|
-
break
|
|
76
|
-
error.args = tuple(reversed(reversed_args))
|
|
77
|
-
|
|
78
|
-
|
|
79
69
|
def _iter_parse_delimited_value(value: str, delimiter: str) -> Iterable[str]:
|
|
80
70
|
return value.split(delimiter)
|
|
81
71
|
|
|
@@ -149,10 +139,9 @@ def deprecated(message: str = "") -> Callable[..., Callable[..., Any]]:
|
|
|
149
139
|
"""
|
|
150
140
|
|
|
151
141
|
def decorating_function(
|
|
152
|
-
function: Callable[..., Any]
|
|
142
|
+
function: Callable[..., Any],
|
|
153
143
|
) -> Callable[..., Any]:
|
|
154
|
-
|
|
155
|
-
@wraps(function)
|
|
144
|
+
@functools.wraps(function)
|
|
156
145
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
157
146
|
name: str = get_qualified_name(function)
|
|
158
147
|
warn(
|
|
@@ -173,3 +162,941 @@ def deprecated(message: str = "") -> Callable[..., Callable[..., Any]]:
|
|
|
173
162
|
return wrapper
|
|
174
163
|
|
|
175
164
|
return decorating_function
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def split_dot(path: str) -> Tuple[str, ...]:
|
|
168
|
+
return tuple(path.split("."))
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def tuple_starts_with(
|
|
172
|
+
a: Tuple[str, ...],
|
|
173
|
+
b: Tuple[str, ...],
|
|
174
|
+
) -> bool:
|
|
175
|
+
"""
|
|
176
|
+
Determine if tuple `a` starts with tuple `b`
|
|
177
|
+
"""
|
|
178
|
+
return a[: len(b)] == b
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def tuple_starts_with_any(
|
|
182
|
+
a: Tuple[str, ...],
|
|
183
|
+
bs: Tuple[Tuple[str, ...], ...],
|
|
184
|
+
) -> bool:
|
|
185
|
+
"""
|
|
186
|
+
Determine if tuple `a` starts with any tuple in `bs`
|
|
187
|
+
"""
|
|
188
|
+
b: Tuple[str, ...]
|
|
189
|
+
return any(tuple_starts_with(a, b) for b in bs)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def iter_find_qualified_lists(
|
|
193
|
+
data: Union[Dict[str, Any], list],
|
|
194
|
+
item_condition: Callable[[Any], bool],
|
|
195
|
+
exclude_object_ids: AbstractSet[int] = frozenset(),
|
|
196
|
+
) -> Iterable[list]:
|
|
197
|
+
"""
|
|
198
|
+
Recursively yield all lists where all items in the list
|
|
199
|
+
satisfy the provided condition.
|
|
200
|
+
|
|
201
|
+
Parameters:
|
|
202
|
+
data: A dictionary or list to search
|
|
203
|
+
item_condition: A function that returns True if the list item
|
|
204
|
+
is the type we are looking for
|
|
205
|
+
|
|
206
|
+
>>> tuple(
|
|
207
|
+
... iter_find_qualified_lists(
|
|
208
|
+
... {
|
|
209
|
+
... "a": [
|
|
210
|
+
... 1,
|
|
211
|
+
... 2,
|
|
212
|
+
... 3,
|
|
213
|
+
... ],
|
|
214
|
+
... "b": [
|
|
215
|
+
... "four",
|
|
216
|
+
... "five",
|
|
217
|
+
... "six",
|
|
218
|
+
... ],
|
|
219
|
+
... "c": [
|
|
220
|
+
... 7,
|
|
221
|
+
... 8,
|
|
222
|
+
... 9,
|
|
223
|
+
... ],
|
|
224
|
+
... "d": [
|
|
225
|
+
... "ten",
|
|
226
|
+
... "eleven",
|
|
227
|
+
... "twelve",
|
|
228
|
+
... ],
|
|
229
|
+
... "e": {
|
|
230
|
+
... "aa": [
|
|
231
|
+
... 13,
|
|
232
|
+
... 14,
|
|
233
|
+
... 15,
|
|
234
|
+
... ],
|
|
235
|
+
... "bb": [
|
|
236
|
+
... "sixteen",
|
|
237
|
+
... "seventeen",
|
|
238
|
+
... "eighteen",
|
|
239
|
+
... ],
|
|
240
|
+
... },
|
|
241
|
+
... "f": [
|
|
242
|
+
... [
|
|
243
|
+
... 19,
|
|
244
|
+
... 20,
|
|
245
|
+
... 21,
|
|
246
|
+
... ],
|
|
247
|
+
... [
|
|
248
|
+
... "twenty-two",
|
|
249
|
+
... "twenty-three",
|
|
250
|
+
... "twenty-four",
|
|
251
|
+
... ],
|
|
252
|
+
... ],
|
|
253
|
+
... },
|
|
254
|
+
... lambda item: isinstance(
|
|
255
|
+
... item,
|
|
256
|
+
... int,
|
|
257
|
+
... ),
|
|
258
|
+
... )
|
|
259
|
+
... )
|
|
260
|
+
([1, 2, 3], [7, 8, 9], [13, 14, 15], [19, 20, 21])
|
|
261
|
+
"""
|
|
262
|
+
if id(data) in exclude_object_ids:
|
|
263
|
+
return
|
|
264
|
+
if isinstance(data, dict):
|
|
265
|
+
_key: str
|
|
266
|
+
value: Any
|
|
267
|
+
for _key, value in data.items():
|
|
268
|
+
if isinstance(value, (list, dict)):
|
|
269
|
+
yield from iter_find_qualified_lists(
|
|
270
|
+
value, item_condition, exclude_object_ids
|
|
271
|
+
)
|
|
272
|
+
elif isinstance(data, list) and data:
|
|
273
|
+
matched: bool = True
|
|
274
|
+
item: Any
|
|
275
|
+
for item in data:
|
|
276
|
+
if not item_condition(item):
|
|
277
|
+
matched = False
|
|
278
|
+
if isinstance(item, (list, dict)):
|
|
279
|
+
yield from iter_find_qualified_lists(
|
|
280
|
+
item, item_condition, exclude_object_ids
|
|
281
|
+
)
|
|
282
|
+
if matched:
|
|
283
|
+
yield data
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def normalize_name(name: str) -> str:
|
|
287
|
+
"""
|
|
288
|
+
Normalize a project/distribution name
|
|
289
|
+
"""
|
|
290
|
+
return _UNSAFE_CHARACTERS_PATTERN.sub("-", canonicalize_name(name)).lower()
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
class ConfigurationFileType(Enum):
|
|
294
|
+
REQUIREMENTS_TXT = auto()
|
|
295
|
+
SETUP_CFG = auto()
|
|
296
|
+
TOX_INI = auto()
|
|
297
|
+
PYPROJECT_TOML = auto()
|
|
298
|
+
TOML = auto()
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
@functools.lru_cache
|
|
302
|
+
def get_configuration_file_type(path: str) -> ConfigurationFileType:
|
|
303
|
+
if not os.path.isfile(path):
|
|
304
|
+
raise FileNotFoundError(path)
|
|
305
|
+
basename: str = os.path.basename(path).lower()
|
|
306
|
+
if basename == "setup.cfg":
|
|
307
|
+
return ConfigurationFileType.SETUP_CFG
|
|
308
|
+
elif basename == "tox.ini":
|
|
309
|
+
return ConfigurationFileType.TOX_INI
|
|
310
|
+
elif basename == "pyproject.toml":
|
|
311
|
+
return ConfigurationFileType.PYPROJECT_TOML
|
|
312
|
+
elif basename.endswith(".txt"):
|
|
313
|
+
return ConfigurationFileType.REQUIREMENTS_TXT
|
|
314
|
+
elif basename.endswith(".toml"):
|
|
315
|
+
return ConfigurationFileType.TOML
|
|
316
|
+
else:
|
|
317
|
+
raise ValueError(
|
|
318
|
+
f"{path} is not a recognized type of configuration file."
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
def is_configuration_file(path: str) -> bool:
|
|
323
|
+
try:
|
|
324
|
+
get_configuration_file_type(path)
|
|
325
|
+
except (FileNotFoundError, ValueError):
|
|
326
|
+
return False
|
|
327
|
+
return True
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
class _EditablePackageMetadata(TypedDict):
|
|
331
|
+
name: str
|
|
332
|
+
version: str
|
|
333
|
+
editable_project_location: str
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
def _iter_editable_distribution_locations() -> Iterable[Tuple[str, str]]:
|
|
337
|
+
metadata: _EditablePackageMetadata
|
|
338
|
+
for metadata in json.loads(
|
|
339
|
+
check_output(
|
|
340
|
+
(
|
|
341
|
+
sys.executable,
|
|
342
|
+
"-m",
|
|
343
|
+
"pip",
|
|
344
|
+
"list",
|
|
345
|
+
"--editable",
|
|
346
|
+
"--format=json",
|
|
347
|
+
)
|
|
348
|
+
)
|
|
349
|
+
):
|
|
350
|
+
yield (
|
|
351
|
+
normalize_name(metadata["name"]),
|
|
352
|
+
metadata["editable_project_location"],
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
@functools.lru_cache
|
|
357
|
+
def get_editable_distributions_locations() -> Dict[str, str]:
|
|
358
|
+
"""
|
|
359
|
+
Get a mapping of (normalized) editable distribution names to their
|
|
360
|
+
locations.
|
|
361
|
+
"""
|
|
362
|
+
return dict(_iter_editable_distribution_locations())
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
def cache_clear() -> None:
|
|
366
|
+
"""
|
|
367
|
+
Clear distribution metadata caches
|
|
368
|
+
"""
|
|
369
|
+
get_installed_distributions.cache_clear()
|
|
370
|
+
get_editable_distributions_locations.cache_clear()
|
|
371
|
+
is_editable.cache_clear()
|
|
372
|
+
is_installed.cache_clear()
|
|
373
|
+
get_requirement_string_distribution_name.cache_clear()
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def refresh_editable_distributions() -> None:
|
|
377
|
+
"""
|
|
378
|
+
Update distribution information for editable installs
|
|
379
|
+
"""
|
|
380
|
+
name: str
|
|
381
|
+
location: str
|
|
382
|
+
for name, location in get_editable_distributions_locations().items():
|
|
383
|
+
_install_requirement_string(location, name=name, editable=True)
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
@functools.lru_cache
|
|
387
|
+
def get_installed_distributions() -> Dict[str, Distribution]:
|
|
388
|
+
"""
|
|
389
|
+
Return a dictionary of installed distributions.
|
|
390
|
+
"""
|
|
391
|
+
refresh_editable_distributions()
|
|
392
|
+
installed: Dict[str, Distribution] = {}
|
|
393
|
+
for distribution in _get_distributions():
|
|
394
|
+
installed[normalize_name(distribution.metadata["Name"])] = distribution
|
|
395
|
+
return installed
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
def get_distribution(name: str) -> Distribution:
|
|
399
|
+
return get_installed_distributions()[normalize_name(name)]
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
@functools.lru_cache
|
|
403
|
+
def is_installed(distribution_name: str) -> bool:
|
|
404
|
+
return normalize_name(distribution_name) in get_installed_distributions()
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
def get_requirement_distribution_name(requirement: Requirement) -> str:
|
|
408
|
+
return normalize_name(requirement.name)
|
|
409
|
+
|
|
410
|
+
|
|
411
|
+
@functools.lru_cache
|
|
412
|
+
def get_requirement_string_distribution_name(requirement_string: str) -> str:
|
|
413
|
+
return get_requirement_distribution_name(
|
|
414
|
+
get_requirement(requirement_string)
|
|
415
|
+
)
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
@functools.lru_cache
|
|
419
|
+
def is_requirement_string(requirement_string: str) -> bool:
|
|
420
|
+
try:
|
|
421
|
+
Requirement(requirement_string)
|
|
422
|
+
except InvalidRequirement:
|
|
423
|
+
return False
|
|
424
|
+
return True
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
def _iter_file_requirement_strings(path: str) -> Iterable[str]:
|
|
428
|
+
lines: List[str]
|
|
429
|
+
requirement_file_io: IO[str]
|
|
430
|
+
with open(path) as requirement_file_io:
|
|
431
|
+
lines = requirement_file_io.readlines()
|
|
432
|
+
return filter(is_requirement_string, lines)
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
def _iter_setup_cfg_requirement_strings(path: str) -> Iterable[str]:
|
|
436
|
+
parser: ConfigParser = ConfigParser()
|
|
437
|
+
parser.read(path)
|
|
438
|
+
requirement_strings: Iterable[str] = ()
|
|
439
|
+
if ("options" in parser) and ("install_requires" in parser["options"]):
|
|
440
|
+
requirement_strings = chain(
|
|
441
|
+
requirement_strings,
|
|
442
|
+
filter(
|
|
443
|
+
is_requirement_string,
|
|
444
|
+
parser["options"]["install_requires"].split("\n"),
|
|
445
|
+
),
|
|
446
|
+
)
|
|
447
|
+
if "options.extras_require" in parser:
|
|
448
|
+
extras_require: SectionProxy = parser["options.extras_require"]
|
|
449
|
+
extra_requirements_string: str
|
|
450
|
+
for extra_requirements_string in extras_require.values():
|
|
451
|
+
requirement_strings = chain(
|
|
452
|
+
requirement_strings,
|
|
453
|
+
filter(
|
|
454
|
+
is_requirement_string,
|
|
455
|
+
extra_requirements_string.split("\n"),
|
|
456
|
+
),
|
|
457
|
+
)
|
|
458
|
+
return iter_distinct(requirement_strings)
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
def _iter_tox_ini_requirement_strings(
|
|
462
|
+
path: Union[str, Path, ConfigParser] = "",
|
|
463
|
+
string: str = "",
|
|
464
|
+
) -> Iterable[str]:
|
|
465
|
+
"""
|
|
466
|
+
Parse a tox.ini file and yield the requirements found in the `deps`
|
|
467
|
+
options of each section.
|
|
468
|
+
|
|
469
|
+
Parameters:
|
|
470
|
+
|
|
471
|
+
- path (str|Path) = "": The path to a tox.ini file
|
|
472
|
+
- string (str) = "": The contents of a tox.ini file
|
|
473
|
+
"""
|
|
474
|
+
parser: ConfigParser = ConfigParser()
|
|
475
|
+
if path:
|
|
476
|
+
assert (
|
|
477
|
+
not string
|
|
478
|
+
), "Either `path` or `string` arguments may be provided, but not both"
|
|
479
|
+
parser.read(path)
|
|
480
|
+
else:
|
|
481
|
+
assert string, "Either a `path` or `string` argument must be provided"
|
|
482
|
+
parser.read_string(string)
|
|
483
|
+
|
|
484
|
+
def get_section_option_requirements(
|
|
485
|
+
section_name: str, option_name: str
|
|
486
|
+
) -> Iterable[str]:
|
|
487
|
+
if parser.has_option(section_name, option_name):
|
|
488
|
+
return filter(
|
|
489
|
+
is_requirement_string,
|
|
490
|
+
parser.get(section_name, option_name).split("\n"),
|
|
491
|
+
)
|
|
492
|
+
return ()
|
|
493
|
+
|
|
494
|
+
def get_section_requirements(section_name: str) -> Iterable[str]:
|
|
495
|
+
requirements: Iterable[str] = get_section_option_requirements(
|
|
496
|
+
section_name, "deps"
|
|
497
|
+
)
|
|
498
|
+
if section_name == "tox":
|
|
499
|
+
requirements = chain(
|
|
500
|
+
requirements,
|
|
501
|
+
get_section_option_requirements(section_name, "requires"),
|
|
502
|
+
)
|
|
503
|
+
return requirements
|
|
504
|
+
|
|
505
|
+
return iter_distinct(
|
|
506
|
+
chain(("tox",), *map(get_section_requirements, parser.sections()))
|
|
507
|
+
)
|
|
508
|
+
|
|
509
|
+
|
|
510
|
+
def _is_installed_requirement_string(item: Any) -> bool:
|
|
511
|
+
"""
|
|
512
|
+
Determine if an item is a valid requirement string for an installed
|
|
513
|
+
package.
|
|
514
|
+
|
|
515
|
+
Parameters:
|
|
516
|
+
item: An item to evaluate.
|
|
517
|
+
"""
|
|
518
|
+
if not isinstance(item, str):
|
|
519
|
+
return False
|
|
520
|
+
try:
|
|
521
|
+
requirement: Requirement = Requirement(item)
|
|
522
|
+
except InvalidRequirement:
|
|
523
|
+
return False
|
|
524
|
+
return is_installed(requirement.name)
|
|
525
|
+
|
|
526
|
+
|
|
527
|
+
def iter_find_requirements_lists(
|
|
528
|
+
document: Union[Dict[str, Any], list],
|
|
529
|
+
include_pointers: Tuple[str, ...] = (),
|
|
530
|
+
exclude_pointers: Tuple[str, ...] = (),
|
|
531
|
+
) -> Iterable[List[str]]:
|
|
532
|
+
"""
|
|
533
|
+
Recursively yield all lists of valid requirement strings for installed
|
|
534
|
+
packages. Exclusions are resolved before inclusions.
|
|
535
|
+
|
|
536
|
+
Parameters:
|
|
537
|
+
document: A dictionary or list of JSON-compatible data elements.
|
|
538
|
+
include_pointers: JSON pointers of elements to include.
|
|
539
|
+
exclude_pointers: JSON pointers of elements to exclude.
|
|
540
|
+
"""
|
|
541
|
+
exclude_object_ids: AbstractSet[int]
|
|
542
|
+
if exclude_pointers:
|
|
543
|
+
exclude_object_ids = set(
|
|
544
|
+
map(
|
|
545
|
+
id,
|
|
546
|
+
filter(
|
|
547
|
+
None,
|
|
548
|
+
map(
|
|
549
|
+
functools.partial(
|
|
550
|
+
resolve_pointer, document, default=None
|
|
551
|
+
),
|
|
552
|
+
exclude_pointers,
|
|
553
|
+
),
|
|
554
|
+
),
|
|
555
|
+
)
|
|
556
|
+
)
|
|
557
|
+
else:
|
|
558
|
+
exclude_object_ids = frozenset()
|
|
559
|
+
if include_pointers:
|
|
560
|
+
included_element: Any
|
|
561
|
+
for included_element in filter(
|
|
562
|
+
None,
|
|
563
|
+
map(
|
|
564
|
+
functools.partial(resolve_pointer, document, default=None),
|
|
565
|
+
include_pointers,
|
|
566
|
+
),
|
|
567
|
+
):
|
|
568
|
+
if isinstance(included_element, (list, dict)):
|
|
569
|
+
yield from iter_find_qualified_lists(
|
|
570
|
+
included_element,
|
|
571
|
+
item_condition=_is_installed_requirement_string,
|
|
572
|
+
exclude_object_ids=exclude_object_ids,
|
|
573
|
+
)
|
|
574
|
+
else:
|
|
575
|
+
yield from iter_find_qualified_lists(
|
|
576
|
+
document,
|
|
577
|
+
item_condition=_is_installed_requirement_string,
|
|
578
|
+
exclude_object_ids=exclude_object_ids,
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
def _iter_toml_requirement_strings(
|
|
583
|
+
path: str,
|
|
584
|
+
include_pointers: Tuple[str, ...] = (),
|
|
585
|
+
exclude_pointers: Tuple[str, ...] = (),
|
|
586
|
+
) -> Iterable[str]:
|
|
587
|
+
"""
|
|
588
|
+
Read a TOML file and yield the requirements found.
|
|
589
|
+
|
|
590
|
+
Parameters:
|
|
591
|
+
Path: The path to a TOML file.
|
|
592
|
+
include_pointers: A tuple of JSON pointers indicating elements to
|
|
593
|
+
include (defaults to all elements).
|
|
594
|
+
exclude_pointers: A tuple of JSON pointers indicating elements to
|
|
595
|
+
exclude (defaults to no exclusions).
|
|
596
|
+
"""
|
|
597
|
+
# Parse pyproject.toml
|
|
598
|
+
try:
|
|
599
|
+
with open(path, "rb") as pyproject_io:
|
|
600
|
+
document: Dict[str, Any] = tomli.load(pyproject_io)
|
|
601
|
+
except FileNotFoundError:
|
|
602
|
+
return
|
|
603
|
+
# Find requirements
|
|
604
|
+
yield from iter_distinct(
|
|
605
|
+
chain(
|
|
606
|
+
*iter_find_requirements_lists(
|
|
607
|
+
document,
|
|
608
|
+
include_pointers=include_pointers,
|
|
609
|
+
exclude_pointers=exclude_pointers,
|
|
610
|
+
)
|
|
611
|
+
)
|
|
612
|
+
)
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
def iter_configuration_file_requirement_strings(
|
|
616
|
+
path: str,
|
|
617
|
+
*,
|
|
618
|
+
include_pointers: Tuple[str, ...] = (),
|
|
619
|
+
exclude_pointers: Tuple[str, ...] = (),
|
|
620
|
+
) -> Iterable[str]:
|
|
621
|
+
"""
|
|
622
|
+
Read a configuration file and yield the parsed requirements.
|
|
623
|
+
|
|
624
|
+
Parameters:
|
|
625
|
+
path: The path to a configuration file
|
|
626
|
+
include_pointers: A tuple of JSON pointers indicating elements to
|
|
627
|
+
include (defaults to all elements).
|
|
628
|
+
exclude_pointers: A tuple of JSON pointers indicating elements to
|
|
629
|
+
exclude (defaults to no exclusions).
|
|
630
|
+
"""
|
|
631
|
+
configuration_file_type: ConfigurationFileType = (
|
|
632
|
+
get_configuration_file_type(path)
|
|
633
|
+
)
|
|
634
|
+
if configuration_file_type == ConfigurationFileType.SETUP_CFG:
|
|
635
|
+
return _iter_setup_cfg_requirement_strings(path)
|
|
636
|
+
elif configuration_file_type in (
|
|
637
|
+
ConfigurationFileType.PYPROJECT_TOML,
|
|
638
|
+
ConfigurationFileType.TOML,
|
|
639
|
+
):
|
|
640
|
+
return _iter_toml_requirement_strings(
|
|
641
|
+
path,
|
|
642
|
+
include_pointers=include_pointers,
|
|
643
|
+
exclude_pointers=exclude_pointers,
|
|
644
|
+
)
|
|
645
|
+
elif configuration_file_type == ConfigurationFileType.TOX_INI:
|
|
646
|
+
return _iter_tox_ini_requirement_strings(path=path)
|
|
647
|
+
else:
|
|
648
|
+
assert (
|
|
649
|
+
configuration_file_type == ConfigurationFileType.REQUIREMENTS_TXT
|
|
650
|
+
)
|
|
651
|
+
return _iter_file_requirement_strings(path)
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
@functools.lru_cache
|
|
655
|
+
def is_editable(name: str) -> bool:
|
|
656
|
+
"""
|
|
657
|
+
Return `True` if the indicated distribution is an editable installation.
|
|
658
|
+
"""
|
|
659
|
+
return bool(normalize_name(name) in get_editable_distributions_locations())
|
|
660
|
+
|
|
661
|
+
|
|
662
|
+
def _get_setup_cfg_metadata(path: str, key: str) -> str:
|
|
663
|
+
if os.path.basename(path).lower() != "setup.cfg":
|
|
664
|
+
if not os.path.isdir(path):
|
|
665
|
+
path = os.path.dirname(path)
|
|
666
|
+
path = os.path.join(path, "setup.cfg")
|
|
667
|
+
if os.path.isfile(path):
|
|
668
|
+
parser: ConfigParser = ConfigParser()
|
|
669
|
+
parser.read(path)
|
|
670
|
+
if "metadata" in parser:
|
|
671
|
+
return parser.get("metadata", key, fallback="")
|
|
672
|
+
else:
|
|
673
|
+
warn(
|
|
674
|
+
f"No `metadata` section found in: {path}",
|
|
675
|
+
stacklevel=2,
|
|
676
|
+
)
|
|
677
|
+
return ""
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
def _get_setup_py_metadata(path: str, args: Tuple[str, ...]) -> str:
|
|
681
|
+
"""
|
|
682
|
+
Execute a setup.py script with `args` and return the response.
|
|
683
|
+
|
|
684
|
+
Parameters:
|
|
685
|
+
|
|
686
|
+
- path (str)
|
|
687
|
+
- args ([str])
|
|
688
|
+
"""
|
|
689
|
+
value: str = ""
|
|
690
|
+
current_directory: str = os.path.abspath(os.curdir)
|
|
691
|
+
directory: str = path
|
|
692
|
+
try:
|
|
693
|
+
if os.path.basename(path).lower() == "setup.py":
|
|
694
|
+
directory = os.path.dirname(path)
|
|
695
|
+
os.chdir(directory)
|
|
696
|
+
else:
|
|
697
|
+
if not os.path.isdir(path):
|
|
698
|
+
directory = os.path.dirname(path)
|
|
699
|
+
os.chdir(directory)
|
|
700
|
+
path = os.path.join(directory, "setup.py")
|
|
701
|
+
if os.path.isfile(path):
|
|
702
|
+
command: Tuple[str, ...] = (sys.executable, path) + args
|
|
703
|
+
try:
|
|
704
|
+
value = check_output(command).strip().split("\n")[-1]
|
|
705
|
+
except CalledProcessError:
|
|
706
|
+
warn(
|
|
707
|
+
f"A package name could not be found in {path}, "
|
|
708
|
+
"attempting to refresh egg info"
|
|
709
|
+
f"\nError ignored: {get_exception_text()}",
|
|
710
|
+
stacklevel=2,
|
|
711
|
+
)
|
|
712
|
+
# re-write egg info and attempt to get the name again
|
|
713
|
+
setup_egg_info(directory)
|
|
714
|
+
try:
|
|
715
|
+
value = check_output(command).strip().split("\n")[-1]
|
|
716
|
+
except Exception:
|
|
717
|
+
warn(
|
|
718
|
+
f"A package name could not be found in {path}"
|
|
719
|
+
f"\nError ignored: {get_exception_text()}",
|
|
720
|
+
stacklevel=2,
|
|
721
|
+
)
|
|
722
|
+
finally:
|
|
723
|
+
os.chdir(current_directory)
|
|
724
|
+
return value
|
|
725
|
+
|
|
726
|
+
|
|
727
|
+
def _get_pyproject_toml_project_metadata(path: str, key: str) -> str:
|
|
728
|
+
if os.path.basename(path).lower() != "pyproject.toml":
|
|
729
|
+
if not os.path.isdir(path):
|
|
730
|
+
path = os.path.dirname(path)
|
|
731
|
+
path = os.path.join(path, "pyproject.toml")
|
|
732
|
+
if os.path.isfile(path):
|
|
733
|
+
pyproject_io: IO[str]
|
|
734
|
+
with open(path) as pyproject_io:
|
|
735
|
+
pyproject: Dict[str, Any] = tomli.loads(pyproject_io.read())
|
|
736
|
+
if "project" in pyproject:
|
|
737
|
+
return pyproject["project"].get(key, "")
|
|
738
|
+
return ""
|
|
739
|
+
|
|
740
|
+
|
|
741
|
+
def get_setup_distribution_name(path: str) -> str:
|
|
742
|
+
"""
|
|
743
|
+
Get a distribution's name from setup.py, setup.cfg or pyproject.toml
|
|
744
|
+
"""
|
|
745
|
+
return normalize_name(
|
|
746
|
+
_get_setup_cfg_metadata(path, "name")
|
|
747
|
+
or _get_pyproject_toml_project_metadata(path, "name")
|
|
748
|
+
or _get_setup_py_metadata(path, ("--name",))
|
|
749
|
+
)
|
|
750
|
+
|
|
751
|
+
|
|
752
|
+
def get_setup_distribution_version(path: str) -> str:
|
|
753
|
+
"""
|
|
754
|
+
Get a distribution's version from setup.py, setup.cfg or pyproject.toml
|
|
755
|
+
"""
|
|
756
|
+
return (
|
|
757
|
+
_get_setup_cfg_metadata(path, "version")
|
|
758
|
+
or _get_pyproject_toml_project_metadata(path, "version")
|
|
759
|
+
or _get_setup_py_metadata(path, ("--version",))
|
|
760
|
+
)
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
def _setup(arguments: Tuple[str, ...]) -> None:
|
|
764
|
+
try:
|
|
765
|
+
check_output((sys.executable, "setup.py") + arguments)
|
|
766
|
+
except CalledProcessError:
|
|
767
|
+
warn(f"Ignoring error: {get_exception_text()}", stacklevel=2)
|
|
768
|
+
|
|
769
|
+
|
|
770
|
+
def _setup_location(
|
|
771
|
+
location: Union[str, Path], arguments: Iterable[Tuple[str, ...]]
|
|
772
|
+
) -> None:
|
|
773
|
+
if isinstance(location, str):
|
|
774
|
+
location = Path(location)
|
|
775
|
+
# If there is no setup.py file, we can't update egg info
|
|
776
|
+
if not location.joinpath("setup.py").is_file():
|
|
777
|
+
return
|
|
778
|
+
if isinstance(arguments, str):
|
|
779
|
+
arguments = (arguments,)
|
|
780
|
+
current_directory: Path = Path(os.curdir).absolute()
|
|
781
|
+
os.chdir(location)
|
|
782
|
+
try:
|
|
783
|
+
deque(map(_setup, arguments), maxlen=0)
|
|
784
|
+
finally:
|
|
785
|
+
os.chdir(current_directory)
|
|
786
|
+
|
|
787
|
+
|
|
788
|
+
def get_editable_distribution_location(name: str) -> str:
|
|
789
|
+
return get_editable_distributions_locations().get(normalize_name(name), "")
|
|
790
|
+
|
|
791
|
+
|
|
792
|
+
def setup_egg_info(directory: Union[str, Path], egg_base: str = "") -> None:
|
|
793
|
+
"""
|
|
794
|
+
Refresh egg-info for the editable package installed in
|
|
795
|
+
`directory` (only applicable for packages using a `setup.py` script)
|
|
796
|
+
"""
|
|
797
|
+
if isinstance(directory, str):
|
|
798
|
+
directory = Path(directory)
|
|
799
|
+
directory = directory.absolute()
|
|
800
|
+
if not directory.is_dir():
|
|
801
|
+
directory = directory.parent
|
|
802
|
+
# If there is a setup.py, and a *.dist-info directory, but that
|
|
803
|
+
# *.dist-info directory has no RECORD, we need to remove the *.dist-info
|
|
804
|
+
# directory
|
|
805
|
+
if directory.joinpath("setup.py").is_file():
|
|
806
|
+
dist_info: str
|
|
807
|
+
for dist_info in iglob(str(directory.joinpath("*.dist-info"))):
|
|
808
|
+
dist_info_path: Path = Path(dist_info)
|
|
809
|
+
if not dist_info_path.joinpath("RECORD").is_file():
|
|
810
|
+
rmtree(dist_info_path)
|
|
811
|
+
_setup_location(
|
|
812
|
+
directory,
|
|
813
|
+
(("-q", "egg_info") + (("--egg-base", egg_base) if egg_base else ()),),
|
|
814
|
+
)
|
|
815
|
+
|
|
816
|
+
|
|
817
|
+
def get_requirement(
|
|
818
|
+
requirement_string: str,
|
|
819
|
+
) -> Requirement:
|
|
820
|
+
try:
|
|
821
|
+
return Requirement(requirement_string)
|
|
822
|
+
except InvalidRequirement:
|
|
823
|
+
# Try to parse the requirement as an installation target location,
|
|
824
|
+
# such as can be used with `pip install`
|
|
825
|
+
location: str = requirement_string
|
|
826
|
+
extras: str = ""
|
|
827
|
+
if "[" in requirement_string and requirement_string.endswith("]"):
|
|
828
|
+
parts: List[str] = requirement_string.split("[")
|
|
829
|
+
location = "[".join(parts[:-1])
|
|
830
|
+
extras = f"[{parts[-1]}"
|
|
831
|
+
location = os.path.abspath(location)
|
|
832
|
+
name: str = get_setup_distribution_name(location)
|
|
833
|
+
assert name, f"No distribution found in {location}"
|
|
834
|
+
return Requirement(f"{name}{extras}")
|
|
835
|
+
|
|
836
|
+
|
|
837
|
+
def get_required_distribution_names(
|
|
838
|
+
requirement_string: str,
|
|
839
|
+
exclude: Iterable[str] = (),
|
|
840
|
+
recursive: bool = True,
|
|
841
|
+
echo: bool = False,
|
|
842
|
+
depth: Optional[int] = None,
|
|
843
|
+
) -> MutableSet[str]:
|
|
844
|
+
"""
|
|
845
|
+
Return a `tuple` of all distribution names which are required by the
|
|
846
|
+
distribution specified in `requirement_string`.
|
|
847
|
+
|
|
848
|
+
Parameters:
|
|
849
|
+
|
|
850
|
+
- requirement_string (str): A distribution name, or a requirement string
|
|
851
|
+
indicating both a distribution name and extras.
|
|
852
|
+
- exclude ([str]): The name of one or more distributions to *exclude*
|
|
853
|
+
from requirements lookup. Please note that excluding a distribution will
|
|
854
|
+
also halt recursive lookup of requirements for that distribution.
|
|
855
|
+
- recursive (bool): If `True` (the default), required distributions will
|
|
856
|
+
be obtained recursively.
|
|
857
|
+
- echo (bool) = False: If `True`, commands and responses executed in
|
|
858
|
+
subprocesses will be printed to `sys.stdout`
|
|
859
|
+
- depth (int|None) = None: The maximum depth of recursion to follow
|
|
860
|
+
requirements. If `None` (the default), recursion is not restricted.
|
|
861
|
+
"""
|
|
862
|
+
if isinstance(exclude, str):
|
|
863
|
+
exclude = {normalize_name(exclude)}
|
|
864
|
+
else:
|
|
865
|
+
exclude = set(map(normalize_name, exclude))
|
|
866
|
+
return set(
|
|
867
|
+
_iter_requirement_names(
|
|
868
|
+
get_requirement(requirement_string),
|
|
869
|
+
exclude=exclude,
|
|
870
|
+
recursive=recursive,
|
|
871
|
+
echo=echo,
|
|
872
|
+
depth=depth,
|
|
873
|
+
)
|
|
874
|
+
)
|
|
875
|
+
|
|
876
|
+
|
|
877
|
+
def _get_requirement_name(requirement: Requirement) -> str:
|
|
878
|
+
return normalize_name(requirement.name)
|
|
879
|
+
|
|
880
|
+
|
|
881
|
+
def install_requirement(
|
|
882
|
+
requirement: Union[str, Requirement],
|
|
883
|
+
echo: bool = True,
|
|
884
|
+
) -> None:
|
|
885
|
+
"""
|
|
886
|
+
Install a requirement
|
|
887
|
+
|
|
888
|
+
Parameters:
|
|
889
|
+
|
|
890
|
+
- requirement (str)
|
|
891
|
+
- echo (bool) = True: If `True` (default), the `pip install`
|
|
892
|
+
commands will be echoed to `sys.stdout`
|
|
893
|
+
"""
|
|
894
|
+
if isinstance(requirement, str):
|
|
895
|
+
requirement = Requirement(requirement)
|
|
896
|
+
return _install_requirement(requirement)
|
|
897
|
+
|
|
898
|
+
|
|
899
|
+
def _install_requirement_string(
|
|
900
|
+
requirement_string: str,
|
|
901
|
+
name: str = "",
|
|
902
|
+
editable: bool = False,
|
|
903
|
+
) -> None:
|
|
904
|
+
"""
|
|
905
|
+
Install a requirement string with no dependencies, compilation, build
|
|
906
|
+
isolation, etc.
|
|
907
|
+
"""
|
|
908
|
+
command: Tuple[str, ...] = (
|
|
909
|
+
sys.executable,
|
|
910
|
+
"-m",
|
|
911
|
+
"pip",
|
|
912
|
+
"install",
|
|
913
|
+
"--no-deps",
|
|
914
|
+
"--no-compile",
|
|
915
|
+
)
|
|
916
|
+
if editable:
|
|
917
|
+
command += (
|
|
918
|
+
"-e",
|
|
919
|
+
requirement_string,
|
|
920
|
+
)
|
|
921
|
+
else:
|
|
922
|
+
command += (requirement_string,)
|
|
923
|
+
try:
|
|
924
|
+
check_output(command)
|
|
925
|
+
except CalledProcessError as error:
|
|
926
|
+
message: str = (
|
|
927
|
+
(
|
|
928
|
+
f"\nCould not install {name}:"
|
|
929
|
+
f"\n$ {list2cmdline(command)}"
|
|
930
|
+
f"\n{error.output.decode()}"
|
|
931
|
+
if name == requirement_string
|
|
932
|
+
else (
|
|
933
|
+
f"\nCould not install {name} from "
|
|
934
|
+
f"{requirement_string}:"
|
|
935
|
+
f"\n$ {list2cmdline(command)}"
|
|
936
|
+
f"\n{error.output.decode()}"
|
|
937
|
+
)
|
|
938
|
+
)
|
|
939
|
+
if name
|
|
940
|
+
else (
|
|
941
|
+
f"\n{list2cmdline(command)}"
|
|
942
|
+
f"\nCould not install {requirement_string}"
|
|
943
|
+
)
|
|
944
|
+
)
|
|
945
|
+
if not editable:
|
|
946
|
+
print(message)
|
|
947
|
+
raise error
|
|
948
|
+
try:
|
|
949
|
+
check_output(command + ("--force-reinstall",))
|
|
950
|
+
except CalledProcessError as retry_error:
|
|
951
|
+
print(message)
|
|
952
|
+
raise retry_error
|
|
953
|
+
|
|
954
|
+
|
|
955
|
+
def _install_requirement(
|
|
956
|
+
requirement: Requirement,
|
|
957
|
+
) -> None:
|
|
958
|
+
requirement_string: str = str(requirement)
|
|
959
|
+
# Get the distribution name
|
|
960
|
+
distribution: Optional[Distribution] = None
|
|
961
|
+
editable_location: str = ""
|
|
962
|
+
try:
|
|
963
|
+
distribution = _get_distribution(requirement.name)
|
|
964
|
+
editable_location = get_editable_distribution_location(
|
|
965
|
+
distribution.metadata["Name"]
|
|
966
|
+
)
|
|
967
|
+
except (PackageNotFoundError, KeyError):
|
|
968
|
+
pass
|
|
969
|
+
# If the requirement is installed and editable, re-install from
|
|
970
|
+
# the editable location
|
|
971
|
+
if distribution and editable_location:
|
|
972
|
+
# Assemble a requirement specifier for the editable install
|
|
973
|
+
requirement_string = editable_location
|
|
974
|
+
if requirement.extras:
|
|
975
|
+
requirement_string = (
|
|
976
|
+
f"{requirement_string}[{','.join(requirement.extras)}]"
|
|
977
|
+
)
|
|
978
|
+
_install_requirement_string(
|
|
979
|
+
requirement_string=requirement_string,
|
|
980
|
+
name=normalize_name(requirement.name),
|
|
981
|
+
editable=bool(editable_location),
|
|
982
|
+
)
|
|
983
|
+
# Refresh the metadata
|
|
984
|
+
cache_clear()
|
|
985
|
+
|
|
986
|
+
|
|
987
|
+
def _get_requirement_distribution(
|
|
988
|
+
requirement: Requirement,
|
|
989
|
+
name: str,
|
|
990
|
+
reinstall: bool = True,
|
|
991
|
+
echo: bool = False,
|
|
992
|
+
) -> Optional[Distribution]:
|
|
993
|
+
if name in _BUILTIN_DISTRIBUTION_NAMES:
|
|
994
|
+
return None
|
|
995
|
+
try:
|
|
996
|
+
return get_installed_distributions()[name]
|
|
997
|
+
except KeyError:
|
|
998
|
+
if not reinstall:
|
|
999
|
+
raise
|
|
1000
|
+
if echo:
|
|
1001
|
+
warn(
|
|
1002
|
+
f'The required distribution "{name}" was not installed, '
|
|
1003
|
+
"attempting to install it now...",
|
|
1004
|
+
stacklevel=2,
|
|
1005
|
+
)
|
|
1006
|
+
# Attempt to install the requirement...
|
|
1007
|
+
install_requirement(requirement, echo=echo)
|
|
1008
|
+
return _get_requirement_distribution(
|
|
1009
|
+
requirement, name, reinstall=False, echo=echo
|
|
1010
|
+
)
|
|
1011
|
+
|
|
1012
|
+
|
|
1013
|
+
def _iter_distribution_requirements(
|
|
1014
|
+
distribution: Distribution,
|
|
1015
|
+
extras: Tuple[str, ...] = (),
|
|
1016
|
+
exclude: Container[str] = (),
|
|
1017
|
+
) -> Iterable[Requirement]:
|
|
1018
|
+
if not distribution.requires:
|
|
1019
|
+
return
|
|
1020
|
+
requirement: Requirement
|
|
1021
|
+
for requirement in map(Requirement, distribution.requires):
|
|
1022
|
+
if (
|
|
1023
|
+
(requirement.marker is None)
|
|
1024
|
+
or any(
|
|
1025
|
+
requirement.marker.evaluate({"extra": extra})
|
|
1026
|
+
for extra in extras
|
|
1027
|
+
)
|
|
1028
|
+
) and (normalize_name(requirement.name) not in exclude):
|
|
1029
|
+
yield requirement
|
|
1030
|
+
|
|
1031
|
+
|
|
1032
|
+
def _iter_requirement_names(
|
|
1033
|
+
requirement: Requirement,
|
|
1034
|
+
exclude: MutableSet[str],
|
|
1035
|
+
recursive: bool = True,
|
|
1036
|
+
echo: bool = False,
|
|
1037
|
+
depth: Optional[int] = None,
|
|
1038
|
+
) -> Iterable[str]:
|
|
1039
|
+
name: str = normalize_name(requirement.name)
|
|
1040
|
+
extras: Tuple[str, ...] = tuple(requirement.extras)
|
|
1041
|
+
if name in exclude:
|
|
1042
|
+
return ()
|
|
1043
|
+
# Ensure we don't follow the same requirement again, causing cyclic
|
|
1044
|
+
# recursion
|
|
1045
|
+
exclude.add(name)
|
|
1046
|
+
distribution: Optional[Distribution] = _get_requirement_distribution(
|
|
1047
|
+
requirement, name, echo=echo
|
|
1048
|
+
)
|
|
1049
|
+
if distribution is None:
|
|
1050
|
+
return ()
|
|
1051
|
+
requirements: Tuple[Requirement, ...] = tuple(
|
|
1052
|
+
iter_distinct(
|
|
1053
|
+
_iter_distribution_requirements(
|
|
1054
|
+
distribution,
|
|
1055
|
+
extras=extras,
|
|
1056
|
+
exclude=exclude,
|
|
1057
|
+
),
|
|
1058
|
+
)
|
|
1059
|
+
)
|
|
1060
|
+
lateral_exclude: MutableSet[str] = set()
|
|
1061
|
+
|
|
1062
|
+
def iter_requirement_names_(
|
|
1063
|
+
requirement_: Requirement,
|
|
1064
|
+
depth_: Optional[int] = None,
|
|
1065
|
+
) -> Iterable[str]:
|
|
1066
|
+
if (depth_ is None) or depth_ >= 0:
|
|
1067
|
+
yield from _iter_requirement_names(
|
|
1068
|
+
requirement_,
|
|
1069
|
+
exclude=cast(
|
|
1070
|
+
MutableSet[str],
|
|
1071
|
+
exclude
|
|
1072
|
+
| (
|
|
1073
|
+
lateral_exclude - {_get_requirement_name(requirement_)}
|
|
1074
|
+
),
|
|
1075
|
+
),
|
|
1076
|
+
recursive=recursive,
|
|
1077
|
+
echo=echo,
|
|
1078
|
+
depth=None if (depth_ is None) else depth_ - 1,
|
|
1079
|
+
)
|
|
1080
|
+
|
|
1081
|
+
def not_excluded(name: str) -> bool:
|
|
1082
|
+
if name not in exclude:
|
|
1083
|
+
# Add this to the exclusions
|
|
1084
|
+
lateral_exclude.add(name)
|
|
1085
|
+
return True
|
|
1086
|
+
return False
|
|
1087
|
+
|
|
1088
|
+
requirement_names: Iterable[str] = filter(
|
|
1089
|
+
not_excluded, map(_get_requirement_name, requirements)
|
|
1090
|
+
)
|
|
1091
|
+
if recursive:
|
|
1092
|
+
requirement_: Requirement
|
|
1093
|
+
requirement_names = chain(
|
|
1094
|
+
requirement_names,
|
|
1095
|
+
*(
|
|
1096
|
+
iter_requirement_names_(
|
|
1097
|
+
requirement_, None if (depth is None) else depth - 1
|
|
1098
|
+
)
|
|
1099
|
+
for requirement_ in requirements
|
|
1100
|
+
),
|
|
1101
|
+
)
|
|
1102
|
+
return requirement_names
|