dependence 1.0.3__tar.gz → 1.0.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dependence-1.0.3 → dependence-1.0.5}/PKG-INFO +7 -6
- {dependence-1.0.3 → dependence-1.0.5}/dependence/__main__.py +8 -5
- {dependence-1.0.3 → dependence-1.0.5}/dependence/_utilities.py +96 -99
- {dependence-1.0.3 → dependence-1.0.5}/dependence/freeze.py +22 -18
- {dependence-1.0.3 → dependence-1.0.5}/dependence/update.py +50 -47
- {dependence-1.0.3 → dependence-1.0.5}/pyproject.toml +57 -19
- {dependence-1.0.3 → dependence-1.0.5}/.gitignore +0 -0
- {dependence-1.0.3 → dependence-1.0.5}/README.md +0 -0
- {dependence-1.0.3 → dependence-1.0.5}/dependence/__init__.py +0 -0
- {dependence-1.0.3 → dependence-1.0.5}/dependence/py.typed +0 -0
|
@@ -1,17 +1,18 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: dependence
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.5
|
|
4
4
|
Summary: A dependency management tool for python projects
|
|
5
|
-
Project-URL:
|
|
5
|
+
Project-URL: Documentation, https://dependence.enorganic.org
|
|
6
|
+
Project-URL: Repository, https://github.com/enorganic/dependence
|
|
6
7
|
Author-email: david@belais.me
|
|
7
|
-
License: MIT
|
|
8
|
+
License-Expression: MIT
|
|
8
9
|
Keywords: dependencies,requirements
|
|
9
|
-
Requires-Python: ~=3.
|
|
10
|
+
Requires-Python: ~=3.9
|
|
10
11
|
Requires-Dist: jsonpointer
|
|
11
12
|
Requires-Dist: packaging
|
|
12
13
|
Requires-Dist: pip
|
|
13
14
|
Requires-Dist: setuptools>63
|
|
14
|
-
Requires-Dist: tomli-w~=1.
|
|
15
|
+
Requires-Dist: tomli-w~=1.2
|
|
15
16
|
Requires-Dist: tomli~=2.2
|
|
16
17
|
Description-Content-Type: text/markdown
|
|
17
18
|
|
|
@@ -1,13 +1,16 @@
|
|
|
1
1
|
import sys
|
|
2
2
|
from importlib import import_module
|
|
3
|
-
from
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
from
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
from types import ModuleType
|
|
7
|
+
|
|
8
|
+
from dependence import __name__ as _module_name
|
|
9
|
+
from dependence._utilities import get_exception_text
|
|
7
10
|
|
|
8
11
|
|
|
9
12
|
def _print_help() -> None:
|
|
10
|
-
print(
|
|
13
|
+
print( # noqa: T201
|
|
11
14
|
"Usage:\n"
|
|
12
15
|
" dependence <command> [options]\n\n"
|
|
13
16
|
"Commands:\n"
|
|
@@ -47,7 +50,7 @@ def main() -> None:
|
|
|
47
50
|
module = import_module(f"{_module_name}.{command}")
|
|
48
51
|
module.main() # type: ignore
|
|
49
52
|
except ImportError:
|
|
50
|
-
print(get_exception_text())
|
|
53
|
+
print(get_exception_text()) # noqa: T201
|
|
51
54
|
_print_help()
|
|
52
55
|
|
|
53
56
|
|
|
@@ -1,9 +1,13 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import functools
|
|
2
4
|
import json
|
|
3
5
|
import os
|
|
4
6
|
import re
|
|
5
7
|
import sys
|
|
6
8
|
from collections import deque
|
|
9
|
+
from collections.abc import Container, Hashable, Iterable, MutableSet
|
|
10
|
+
from collections.abc import Set as AbstractSet
|
|
7
11
|
from configparser import ConfigParser, SectionProxy
|
|
8
12
|
from enum import Enum, auto
|
|
9
13
|
from glob import iglob
|
|
@@ -17,20 +21,9 @@ from subprocess import DEVNULL, PIPE, CalledProcessError, list2cmdline, run
|
|
|
17
21
|
from traceback import format_exception
|
|
18
22
|
from typing import (
|
|
19
23
|
IO,
|
|
20
|
-
AbstractSet,
|
|
21
24
|
Any,
|
|
22
25
|
Callable,
|
|
23
|
-
Container,
|
|
24
|
-
Dict,
|
|
25
|
-
Hashable,
|
|
26
|
-
Iterable,
|
|
27
|
-
List,
|
|
28
|
-
MutableSet,
|
|
29
|
-
Optional,
|
|
30
|
-
Set,
|
|
31
|
-
Tuple,
|
|
32
26
|
TypedDict,
|
|
33
|
-
Union,
|
|
34
27
|
cast,
|
|
35
28
|
)
|
|
36
29
|
from warnings import warn
|
|
@@ -40,7 +33,7 @@ from jsonpointer import resolve_pointer # type: ignore
|
|
|
40
33
|
from packaging.requirements import InvalidRequirement, Requirement
|
|
41
34
|
from packaging.utils import canonicalize_name
|
|
42
35
|
|
|
43
|
-
_BUILTIN_DISTRIBUTION_NAMES:
|
|
36
|
+
_BUILTIN_DISTRIBUTION_NAMES: tuple[str] = ("distribute",)
|
|
44
37
|
_UNSAFE_CHARACTERS_PATTERN: re.Pattern = re.compile("[^A-Za-z0-9.]+")
|
|
45
38
|
|
|
46
39
|
|
|
@@ -48,7 +41,7 @@ def iter_distinct(items: Iterable[Hashable]) -> Iterable:
|
|
|
48
41
|
"""
|
|
49
42
|
Yield distinct elements, preserving order
|
|
50
43
|
"""
|
|
51
|
-
visited:
|
|
44
|
+
visited: set[Hashable] = set()
|
|
52
45
|
item: Hashable
|
|
53
46
|
for item in items:
|
|
54
47
|
if item not in visited:
|
|
@@ -88,8 +81,9 @@ def iter_parse_delimited_values(
|
|
|
88
81
|
|
|
89
82
|
|
|
90
83
|
def check_output(
|
|
91
|
-
args:
|
|
92
|
-
cwd:
|
|
84
|
+
args: tuple[str, ...],
|
|
85
|
+
cwd: str | Path = "",
|
|
86
|
+
*,
|
|
93
87
|
echo: bool = False,
|
|
94
88
|
) -> str:
|
|
95
89
|
"""
|
|
@@ -98,13 +92,13 @@ def check_output(
|
|
|
98
92
|
|
|
99
93
|
Parameters:
|
|
100
94
|
|
|
101
|
-
- command (
|
|
95
|
+
- command (tuple[str, ...]): The command to run
|
|
102
96
|
"""
|
|
103
97
|
if echo:
|
|
104
98
|
if cwd:
|
|
105
|
-
print("$", "cd", cwd, "&&", list2cmdline(args))
|
|
99
|
+
print("$", "cd", cwd, "&&", list2cmdline(args)) # noqa: T201
|
|
106
100
|
else:
|
|
107
|
-
print("$", list2cmdline(args))
|
|
101
|
+
print("$", list2cmdline(args)) # noqa: T201
|
|
108
102
|
output: str = run(
|
|
109
103
|
args,
|
|
110
104
|
stdout=PIPE,
|
|
@@ -113,7 +107,7 @@ def check_output(
|
|
|
113
107
|
cwd=cwd or None,
|
|
114
108
|
).stdout.decode("utf-8", errors="ignore")
|
|
115
109
|
if echo:
|
|
116
|
-
print(output)
|
|
110
|
+
print(output) # noqa: T201
|
|
117
111
|
return output
|
|
118
112
|
|
|
119
113
|
|
|
@@ -164,13 +158,13 @@ def deprecated(message: str = "") -> Callable[..., Callable[..., Any]]:
|
|
|
164
158
|
return decorating_function
|
|
165
159
|
|
|
166
160
|
|
|
167
|
-
def split_dot(path: str) ->
|
|
161
|
+
def split_dot(path: str) -> tuple[str, ...]:
|
|
168
162
|
return tuple(path.split("."))
|
|
169
163
|
|
|
170
164
|
|
|
171
165
|
def tuple_starts_with(
|
|
172
|
-
a:
|
|
173
|
-
b:
|
|
166
|
+
a: tuple[str, ...],
|
|
167
|
+
b: tuple[str, ...],
|
|
174
168
|
) -> bool:
|
|
175
169
|
"""
|
|
176
170
|
Determine if tuple `a` starts with tuple `b`
|
|
@@ -179,18 +173,18 @@ def tuple_starts_with(
|
|
|
179
173
|
|
|
180
174
|
|
|
181
175
|
def tuple_starts_with_any(
|
|
182
|
-
a:
|
|
183
|
-
bs:
|
|
176
|
+
a: tuple[str, ...],
|
|
177
|
+
bs: tuple[tuple[str, ...], ...],
|
|
184
178
|
) -> bool:
|
|
185
179
|
"""
|
|
186
180
|
Determine if tuple `a` starts with any tuple in `bs`
|
|
187
181
|
"""
|
|
188
|
-
b:
|
|
182
|
+
b: tuple[str, ...]
|
|
189
183
|
return any(tuple_starts_with(a, b) for b in bs)
|
|
190
184
|
|
|
191
185
|
|
|
192
186
|
def iter_find_qualified_lists(
|
|
193
|
-
data:
|
|
187
|
+
data: dict[str, Any] | list,
|
|
194
188
|
item_condition: Callable[[Any], bool],
|
|
195
189
|
exclude_object_ids: AbstractSet[int] = frozenset(),
|
|
196
190
|
) -> Iterable[list]:
|
|
@@ -262,9 +256,8 @@ def iter_find_qualified_lists(
|
|
|
262
256
|
if id(data) in exclude_object_ids:
|
|
263
257
|
return
|
|
264
258
|
if isinstance(data, dict):
|
|
265
|
-
_key: str
|
|
266
259
|
value: Any
|
|
267
|
-
for
|
|
260
|
+
for value in data.values():
|
|
268
261
|
if isinstance(value, (list, dict)):
|
|
269
262
|
yield from iter_find_qualified_lists(
|
|
270
263
|
value, item_condition, exclude_object_ids
|
|
@@ -305,18 +298,16 @@ def get_configuration_file_type(path: str) -> ConfigurationFileType:
|
|
|
305
298
|
basename: str = os.path.basename(path).lower()
|
|
306
299
|
if basename == "setup.cfg":
|
|
307
300
|
return ConfigurationFileType.SETUP_CFG
|
|
308
|
-
|
|
301
|
+
if basename == "tox.ini":
|
|
309
302
|
return ConfigurationFileType.TOX_INI
|
|
310
|
-
|
|
303
|
+
if basename == "pyproject.toml":
|
|
311
304
|
return ConfigurationFileType.PYPROJECT_TOML
|
|
312
|
-
|
|
305
|
+
if basename.endswith(".txt"):
|
|
313
306
|
return ConfigurationFileType.REQUIREMENTS_TXT
|
|
314
|
-
|
|
307
|
+
if basename.endswith(".toml"):
|
|
315
308
|
return ConfigurationFileType.TOML
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
f"{path} is not a recognized type of configuration file."
|
|
319
|
-
)
|
|
309
|
+
message: str = f"{path} is not a recognized type of configuration file."
|
|
310
|
+
raise ValueError(message)
|
|
320
311
|
|
|
321
312
|
|
|
322
313
|
def is_configuration_file(path: str) -> bool:
|
|
@@ -333,7 +324,7 @@ class _EditablePackageMetadata(TypedDict):
|
|
|
333
324
|
editable_project_location: str
|
|
334
325
|
|
|
335
326
|
|
|
336
|
-
def _iter_editable_distribution_locations() -> Iterable[
|
|
327
|
+
def _iter_editable_distribution_locations() -> Iterable[tuple[str, str]]:
|
|
337
328
|
metadata: _EditablePackageMetadata
|
|
338
329
|
for metadata in json.loads(
|
|
339
330
|
check_output(
|
|
@@ -354,7 +345,7 @@ def _iter_editable_distribution_locations() -> Iterable[Tuple[str, str]]:
|
|
|
354
345
|
|
|
355
346
|
|
|
356
347
|
@functools.lru_cache
|
|
357
|
-
def get_editable_distributions_locations() ->
|
|
348
|
+
def get_editable_distributions_locations() -> dict[str, str]:
|
|
358
349
|
"""
|
|
359
350
|
Get a mapping of (normalized) editable distribution names to their
|
|
360
351
|
locations.
|
|
@@ -384,12 +375,12 @@ def refresh_editable_distributions() -> None:
|
|
|
384
375
|
|
|
385
376
|
|
|
386
377
|
@functools.lru_cache
|
|
387
|
-
def get_installed_distributions() ->
|
|
378
|
+
def get_installed_distributions() -> dict[str, Distribution]:
|
|
388
379
|
"""
|
|
389
380
|
Return a dictionary of installed distributions.
|
|
390
381
|
"""
|
|
391
382
|
refresh_editable_distributions()
|
|
392
|
-
installed:
|
|
383
|
+
installed: dict[str, Distribution] = {}
|
|
393
384
|
for distribution in _get_distributions():
|
|
394
385
|
installed[normalize_name(distribution.metadata["Name"])] = distribution
|
|
395
386
|
return installed
|
|
@@ -425,7 +416,7 @@ def is_requirement_string(requirement_string: str) -> bool:
|
|
|
425
416
|
|
|
426
417
|
|
|
427
418
|
def _iter_file_requirement_strings(path: str) -> Iterable[str]:
|
|
428
|
-
lines:
|
|
419
|
+
lines: list[str]
|
|
429
420
|
requirement_file_io: IO[str]
|
|
430
421
|
with open(path) as requirement_file_io:
|
|
431
422
|
lines = requirement_file_io.readlines()
|
|
@@ -459,7 +450,7 @@ def _iter_setup_cfg_requirement_strings(path: str) -> Iterable[str]:
|
|
|
459
450
|
|
|
460
451
|
|
|
461
452
|
def _iter_tox_ini_requirement_strings(
|
|
462
|
-
path:
|
|
453
|
+
path: str | Path | ConfigParser = "",
|
|
463
454
|
string: str = "",
|
|
464
455
|
) -> Iterable[str]:
|
|
465
456
|
"""
|
|
@@ -472,13 +463,19 @@ def _iter_tox_ini_requirement_strings(
|
|
|
472
463
|
- string (str) = "": The contents of a tox.ini file
|
|
473
464
|
"""
|
|
474
465
|
parser: ConfigParser = ConfigParser()
|
|
466
|
+
message: str
|
|
475
467
|
if path:
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
468
|
+
if string:
|
|
469
|
+
message = (
|
|
470
|
+
"Either `path` or `string` arguments may be provided, but not "
|
|
471
|
+
"both"
|
|
472
|
+
)
|
|
473
|
+
raise ValueError(message)
|
|
479
474
|
parser.read(path)
|
|
480
475
|
else:
|
|
481
|
-
|
|
476
|
+
if not string:
|
|
477
|
+
message = "Either a `path` or `string` argument must be provided"
|
|
478
|
+
raise ValueError(message)
|
|
482
479
|
parser.read_string(string)
|
|
483
480
|
|
|
484
481
|
def get_section_option_requirements(
|
|
@@ -525,10 +522,10 @@ def _is_installed_requirement_string(item: Any) -> bool:
|
|
|
525
522
|
|
|
526
523
|
|
|
527
524
|
def iter_find_requirements_lists(
|
|
528
|
-
document:
|
|
529
|
-
include_pointers:
|
|
530
|
-
exclude_pointers:
|
|
531
|
-
) -> Iterable[
|
|
525
|
+
document: dict[str, Any] | list,
|
|
526
|
+
include_pointers: tuple[str, ...] = (),
|
|
527
|
+
exclude_pointers: tuple[str, ...] = (),
|
|
528
|
+
) -> Iterable[list[str]]:
|
|
532
529
|
"""
|
|
533
530
|
Recursively yield all lists of valid requirement strings for installed
|
|
534
531
|
packages. Exclusions are resolved before inclusions.
|
|
@@ -581,8 +578,8 @@ def iter_find_requirements_lists(
|
|
|
581
578
|
|
|
582
579
|
def _iter_toml_requirement_strings(
|
|
583
580
|
path: str,
|
|
584
|
-
include_pointers:
|
|
585
|
-
exclude_pointers:
|
|
581
|
+
include_pointers: tuple[str, ...] = (),
|
|
582
|
+
exclude_pointers: tuple[str, ...] = (),
|
|
586
583
|
) -> Iterable[str]:
|
|
587
584
|
"""
|
|
588
585
|
Read a TOML file and yield the requirements found.
|
|
@@ -597,7 +594,7 @@ def _iter_toml_requirement_strings(
|
|
|
597
594
|
# Parse pyproject.toml
|
|
598
595
|
try:
|
|
599
596
|
with open(path, "rb") as pyproject_io:
|
|
600
|
-
document:
|
|
597
|
+
document: dict[str, Any] = tomli.load(pyproject_io)
|
|
601
598
|
except FileNotFoundError:
|
|
602
599
|
return
|
|
603
600
|
# Find requirements
|
|
@@ -615,8 +612,8 @@ def _iter_toml_requirement_strings(
|
|
|
615
612
|
def iter_configuration_file_requirement_strings(
|
|
616
613
|
path: str,
|
|
617
614
|
*,
|
|
618
|
-
include_pointers:
|
|
619
|
-
exclude_pointers:
|
|
615
|
+
include_pointers: tuple[str, ...] = (),
|
|
616
|
+
exclude_pointers: tuple[str, ...] = (),
|
|
620
617
|
) -> Iterable[str]:
|
|
621
618
|
"""
|
|
622
619
|
Read a configuration file and yield the parsed requirements.
|
|
@@ -633,7 +630,7 @@ def iter_configuration_file_requirement_strings(
|
|
|
633
630
|
)
|
|
634
631
|
if configuration_file_type == ConfigurationFileType.SETUP_CFG:
|
|
635
632
|
return _iter_setup_cfg_requirement_strings(path)
|
|
636
|
-
|
|
633
|
+
if configuration_file_type in (
|
|
637
634
|
ConfigurationFileType.PYPROJECT_TOML,
|
|
638
635
|
ConfigurationFileType.TOML,
|
|
639
636
|
):
|
|
@@ -642,13 +639,11 @@ def iter_configuration_file_requirement_strings(
|
|
|
642
639
|
include_pointers=include_pointers,
|
|
643
640
|
exclude_pointers=exclude_pointers,
|
|
644
641
|
)
|
|
645
|
-
|
|
642
|
+
if configuration_file_type == ConfigurationFileType.TOX_INI:
|
|
646
643
|
return _iter_tox_ini_requirement_strings(path=path)
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
)
|
|
651
|
-
return _iter_file_requirement_strings(path)
|
|
644
|
+
if configuration_file_type != ConfigurationFileType.REQUIREMENTS_TXT:
|
|
645
|
+
raise ValueError(configuration_file_type)
|
|
646
|
+
return _iter_file_requirement_strings(path)
|
|
652
647
|
|
|
653
648
|
|
|
654
649
|
@functools.lru_cache
|
|
@@ -669,15 +664,14 @@ def _get_setup_cfg_metadata(path: str, key: str) -> str:
|
|
|
669
664
|
parser.read(path)
|
|
670
665
|
if "metadata" in parser:
|
|
671
666
|
return parser.get("metadata", key, fallback="")
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
)
|
|
667
|
+
warn(
|
|
668
|
+
f"No `metadata` section found in: {path}",
|
|
669
|
+
stacklevel=2,
|
|
670
|
+
)
|
|
677
671
|
return ""
|
|
678
672
|
|
|
679
673
|
|
|
680
|
-
def _get_setup_py_metadata(path: str, args:
|
|
674
|
+
def _get_setup_py_metadata(path: str, args: tuple[str, ...]) -> str:
|
|
681
675
|
"""
|
|
682
676
|
Execute a setup.py script with `args` and return the response.
|
|
683
677
|
|
|
@@ -699,7 +693,7 @@ def _get_setup_py_metadata(path: str, args: Tuple[str, ...]) -> str:
|
|
|
699
693
|
os.chdir(directory)
|
|
700
694
|
path = os.path.join(directory, "setup.py")
|
|
701
695
|
if os.path.isfile(path):
|
|
702
|
-
command:
|
|
696
|
+
command: tuple[str, ...] = (sys.executable, path, *args)
|
|
703
697
|
try:
|
|
704
698
|
value = check_output(command).strip().split("\n")[-1]
|
|
705
699
|
except CalledProcessError:
|
|
@@ -713,7 +707,7 @@ def _get_setup_py_metadata(path: str, args: Tuple[str, ...]) -> str:
|
|
|
713
707
|
setup_egg_info(directory)
|
|
714
708
|
try:
|
|
715
709
|
value = check_output(command).strip().split("\n")[-1]
|
|
716
|
-
except Exception:
|
|
710
|
+
except Exception: # noqa: BLE001
|
|
717
711
|
warn(
|
|
718
712
|
f"A package name could not be found in {path}"
|
|
719
713
|
f"\nError ignored: {get_exception_text()}",
|
|
@@ -732,7 +726,7 @@ def _get_pyproject_toml_project_metadata(path: str, key: str) -> str:
|
|
|
732
726
|
if os.path.isfile(path):
|
|
733
727
|
pyproject_io: IO[str]
|
|
734
728
|
with open(path) as pyproject_io:
|
|
735
|
-
pyproject:
|
|
729
|
+
pyproject: dict[str, Any] = tomli.loads(pyproject_io.read())
|
|
736
730
|
if "project" in pyproject:
|
|
737
731
|
return pyproject["project"].get(key, "")
|
|
738
732
|
return ""
|
|
@@ -760,15 +754,15 @@ def get_setup_distribution_version(path: str) -> str:
|
|
|
760
754
|
)
|
|
761
755
|
|
|
762
756
|
|
|
763
|
-
def _setup(arguments:
|
|
757
|
+
def _setup(arguments: tuple[str, ...]) -> None:
|
|
764
758
|
try:
|
|
765
|
-
check_output((sys.executable, "setup.py"
|
|
759
|
+
check_output((sys.executable, "setup.py", *arguments))
|
|
766
760
|
except CalledProcessError:
|
|
767
761
|
warn(f"Ignoring error: {get_exception_text()}", stacklevel=2)
|
|
768
762
|
|
|
769
763
|
|
|
770
764
|
def _setup_location(
|
|
771
|
-
location:
|
|
765
|
+
location: str | Path, arguments: Iterable[tuple[str, ...]]
|
|
772
766
|
) -> None:
|
|
773
767
|
if isinstance(location, str):
|
|
774
768
|
location = Path(location)
|
|
@@ -789,7 +783,7 @@ def get_editable_distribution_location(name: str) -> str:
|
|
|
789
783
|
return get_editable_distributions_locations().get(normalize_name(name), "")
|
|
790
784
|
|
|
791
785
|
|
|
792
|
-
def setup_egg_info(directory:
|
|
786
|
+
def setup_egg_info(directory: str | Path, egg_base: str = "") -> None:
|
|
793
787
|
"""
|
|
794
788
|
Refresh egg-info for the editable package installed in
|
|
795
789
|
`directory` (only applicable for packages using a `setup.py` script)
|
|
@@ -819,27 +813,30 @@ def get_requirement(
|
|
|
819
813
|
) -> Requirement:
|
|
820
814
|
try:
|
|
821
815
|
return Requirement(requirement_string)
|
|
822
|
-
except InvalidRequirement:
|
|
816
|
+
except InvalidRequirement as error:
|
|
823
817
|
# Try to parse the requirement as an installation target location,
|
|
824
818
|
# such as can be used with `pip install`
|
|
825
819
|
location: str = requirement_string
|
|
826
820
|
extras: str = ""
|
|
827
821
|
if "[" in requirement_string and requirement_string.endswith("]"):
|
|
828
|
-
parts:
|
|
822
|
+
parts: list[str] = requirement_string.split("[")
|
|
829
823
|
location = "[".join(parts[:-1])
|
|
830
824
|
extras = f"[{parts[-1]}"
|
|
831
825
|
location = os.path.abspath(location)
|
|
832
826
|
name: str = get_setup_distribution_name(location)
|
|
833
|
-
|
|
827
|
+
if not name:
|
|
828
|
+
message: str = f"No distribution found in {location}"
|
|
829
|
+
raise FileNotFoundError(message) from error
|
|
834
830
|
return Requirement(f"{name}{extras}")
|
|
835
831
|
|
|
836
832
|
|
|
837
833
|
def get_required_distribution_names(
|
|
838
834
|
requirement_string: str,
|
|
835
|
+
*,
|
|
839
836
|
exclude: Iterable[str] = (),
|
|
840
837
|
recursive: bool = True,
|
|
841
838
|
echo: bool = False,
|
|
842
|
-
depth:
|
|
839
|
+
depth: int | None = None,
|
|
843
840
|
) -> MutableSet[str]:
|
|
844
841
|
"""
|
|
845
842
|
Return a `tuple` of all distribution names which are required by the
|
|
@@ -878,10 +875,7 @@ def _get_requirement_name(requirement: Requirement) -> str:
|
|
|
878
875
|
return normalize_name(requirement.name)
|
|
879
876
|
|
|
880
877
|
|
|
881
|
-
def install_requirement(
|
|
882
|
-
requirement: Union[str, Requirement],
|
|
883
|
-
echo: bool = True,
|
|
884
|
-
) -> None:
|
|
878
|
+
def install_requirement(requirement: str | Requirement) -> None:
|
|
885
879
|
"""
|
|
886
880
|
Install a requirement
|
|
887
881
|
|
|
@@ -899,13 +893,14 @@ def install_requirement(
|
|
|
899
893
|
def _install_requirement_string(
|
|
900
894
|
requirement_string: str,
|
|
901
895
|
name: str = "",
|
|
896
|
+
*,
|
|
902
897
|
editable: bool = False,
|
|
903
898
|
) -> None:
|
|
904
899
|
"""
|
|
905
900
|
Install a requirement string with no dependencies, compilation, build
|
|
906
901
|
isolation, etc.
|
|
907
902
|
"""
|
|
908
|
-
command:
|
|
903
|
+
command: tuple[str, ...] = (
|
|
909
904
|
sys.executable,
|
|
910
905
|
"-m",
|
|
911
906
|
"pip",
|
|
@@ -943,13 +938,13 @@ def _install_requirement_string(
|
|
|
943
938
|
)
|
|
944
939
|
)
|
|
945
940
|
if not editable:
|
|
946
|
-
print(message)
|
|
947
|
-
raise
|
|
941
|
+
print(message) # noqa: T201
|
|
942
|
+
raise
|
|
948
943
|
try:
|
|
949
|
-
check_output(command
|
|
950
|
-
except CalledProcessError
|
|
951
|
-
print(message)
|
|
952
|
-
raise
|
|
944
|
+
check_output((*command, "--force-reinstall"))
|
|
945
|
+
except CalledProcessError:
|
|
946
|
+
print(message) # noqa: T201
|
|
947
|
+
raise
|
|
953
948
|
|
|
954
949
|
|
|
955
950
|
def _install_requirement(
|
|
@@ -957,7 +952,7 @@ def _install_requirement(
|
|
|
957
952
|
) -> None:
|
|
958
953
|
requirement_string: str = str(requirement)
|
|
959
954
|
# Get the distribution name
|
|
960
|
-
distribution:
|
|
955
|
+
distribution: Distribution | None = None
|
|
961
956
|
editable_location: str = ""
|
|
962
957
|
try:
|
|
963
958
|
distribution = _get_distribution(requirement.name)
|
|
@@ -987,9 +982,10 @@ def _install_requirement(
|
|
|
987
982
|
def _get_requirement_distribution(
|
|
988
983
|
requirement: Requirement,
|
|
989
984
|
name: str,
|
|
985
|
+
*,
|
|
990
986
|
reinstall: bool = True,
|
|
991
987
|
echo: bool = False,
|
|
992
|
-
) ->
|
|
988
|
+
) -> Distribution | None:
|
|
993
989
|
if name in _BUILTIN_DISTRIBUTION_NAMES:
|
|
994
990
|
return None
|
|
995
991
|
try:
|
|
@@ -1004,7 +1000,7 @@ def _get_requirement_distribution(
|
|
|
1004
1000
|
stacklevel=2,
|
|
1005
1001
|
)
|
|
1006
1002
|
# Attempt to install the requirement...
|
|
1007
|
-
install_requirement(requirement
|
|
1003
|
+
install_requirement(requirement)
|
|
1008
1004
|
return _get_requirement_distribution(
|
|
1009
1005
|
requirement, name, reinstall=False, echo=echo
|
|
1010
1006
|
)
|
|
@@ -1012,7 +1008,7 @@ def _get_requirement_distribution(
|
|
|
1012
1008
|
|
|
1013
1009
|
def _iter_distribution_requirements(
|
|
1014
1010
|
distribution: Distribution,
|
|
1015
|
-
extras:
|
|
1011
|
+
extras: tuple[str, ...] = (),
|
|
1016
1012
|
exclude: Container[str] = (),
|
|
1017
1013
|
) -> Iterable[Requirement]:
|
|
1018
1014
|
if not distribution.requires:
|
|
@@ -1031,24 +1027,25 @@ def _iter_distribution_requirements(
|
|
|
1031
1027
|
|
|
1032
1028
|
def _iter_requirement_names(
|
|
1033
1029
|
requirement: Requirement,
|
|
1030
|
+
*,
|
|
1034
1031
|
exclude: MutableSet[str],
|
|
1035
1032
|
recursive: bool = True,
|
|
1036
1033
|
echo: bool = False,
|
|
1037
|
-
depth:
|
|
1034
|
+
depth: int | None = None,
|
|
1038
1035
|
) -> Iterable[str]:
|
|
1039
1036
|
name: str = normalize_name(requirement.name)
|
|
1040
|
-
extras:
|
|
1037
|
+
extras: tuple[str, ...] = tuple(requirement.extras)
|
|
1041
1038
|
if name in exclude:
|
|
1042
1039
|
return ()
|
|
1043
1040
|
# Ensure we don't follow the same requirement again, causing cyclic
|
|
1044
1041
|
# recursion
|
|
1045
1042
|
exclude.add(name)
|
|
1046
|
-
distribution:
|
|
1043
|
+
distribution: Distribution | None = _get_requirement_distribution(
|
|
1047
1044
|
requirement, name, echo=echo
|
|
1048
1045
|
)
|
|
1049
1046
|
if distribution is None:
|
|
1050
1047
|
return ()
|
|
1051
|
-
requirements:
|
|
1048
|
+
requirements: tuple[Requirement, ...] = tuple(
|
|
1052
1049
|
iter_distinct(
|
|
1053
1050
|
_iter_distribution_requirements(
|
|
1054
1051
|
distribution,
|
|
@@ -1061,7 +1058,7 @@ def _iter_requirement_names(
|
|
|
1061
1058
|
|
|
1062
1059
|
def iter_requirement_names_(
|
|
1063
1060
|
requirement_: Requirement,
|
|
1064
|
-
depth_:
|
|
1061
|
+
depth_: int | None = None,
|
|
1065
1062
|
) -> Iterable[str]:
|
|
1066
1063
|
if (depth_ is None) or depth_ >= 0:
|
|
1067
1064
|
yield from _iter_requirement_names(
|
|
@@ -1,12 +1,15 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import argparse
|
|
4
|
+
from collections.abc import Iterable, MutableSet
|
|
2
5
|
from fnmatch import fnmatch
|
|
3
6
|
from functools import partial
|
|
4
7
|
from importlib.metadata import Distribution
|
|
5
8
|
from importlib.metadata import distribution as _get_distribution
|
|
6
9
|
from itertools import chain
|
|
7
|
-
from typing import
|
|
10
|
+
from typing import cast
|
|
8
11
|
|
|
9
|
-
from ._utilities import (
|
|
12
|
+
from dependence._utilities import (
|
|
10
13
|
get_distribution,
|
|
11
14
|
get_required_distribution_names,
|
|
12
15
|
get_requirement_string_distribution_name,
|
|
@@ -30,18 +33,18 @@ def _iter_sort_dependents_last(requirements: Iterable[str]) -> Iterable[str]:
|
|
|
30
33
|
"""
|
|
31
34
|
requirements = list(requirements)
|
|
32
35
|
distribution_name: str
|
|
33
|
-
distribution_requirement:
|
|
36
|
+
distribution_requirement: dict[str, str] = {
|
|
34
37
|
get_requirement_string_distribution_name(requirement): requirement
|
|
35
38
|
for requirement in requirements
|
|
36
39
|
}
|
|
37
|
-
dependent_dependencies:
|
|
40
|
+
dependent_dependencies: dict[str, MutableSet[str]] = {
|
|
38
41
|
distribution_name: get_required_distribution_names(requirement)
|
|
39
42
|
for distribution_name, requirement in distribution_requirement.items()
|
|
40
43
|
}
|
|
41
44
|
while dependent_dependencies:
|
|
42
45
|
dependent: str
|
|
43
46
|
dependencies: MutableSet[str]
|
|
44
|
-
item:
|
|
47
|
+
item: tuple[str, MutableSet[str]]
|
|
45
48
|
for dependent, dependencies in sorted( # noqa: C414
|
|
46
49
|
tuple(dependent_dependencies.items()),
|
|
47
50
|
key=lambda item: item[0].lower(),
|
|
@@ -73,15 +76,16 @@ def _iter_sort_dependents_last(requirements: Iterable[str]) -> Iterable[str]:
|
|
|
73
76
|
|
|
74
77
|
def get_frozen_requirements(
|
|
75
78
|
requirements: Iterable[str] = (),
|
|
79
|
+
*,
|
|
76
80
|
exclude: Iterable[str] = (),
|
|
77
81
|
exclude_recursive: Iterable[str] = (),
|
|
78
82
|
no_version: Iterable[str] = (),
|
|
79
83
|
dependency_order: bool = False,
|
|
80
84
|
reverse: bool = False,
|
|
81
|
-
depth:
|
|
82
|
-
include_pointers:
|
|
83
|
-
exclude_pointers:
|
|
84
|
-
) ->
|
|
85
|
+
depth: int | None = None,
|
|
86
|
+
include_pointers: tuple[str, ...] = (),
|
|
87
|
+
exclude_pointers: tuple[str, ...] = (),
|
|
88
|
+
) -> tuple[str, ...]:
|
|
85
89
|
"""
|
|
86
90
|
Get the (frozen) requirements for one or more specified distributions or
|
|
87
91
|
configuration files.
|
|
@@ -183,7 +187,7 @@ def _iter_frozen_requirements(
|
|
|
183
187
|
exclude: MutableSet[str],
|
|
184
188
|
exclude_recursive: MutableSet[str],
|
|
185
189
|
no_version: Iterable[str] = (),
|
|
186
|
-
depth:
|
|
190
|
+
depth: int | None = None,
|
|
187
191
|
) -> Iterable[str]:
|
|
188
192
|
def get_requirement_string(distribution_name: str) -> str:
|
|
189
193
|
def distribution_name_matches_pattern(pattern: str) -> bool:
|
|
@@ -198,13 +202,13 @@ def _iter_frozen_requirements(
|
|
|
198
202
|
distribution = get_distribution(distribution_name)
|
|
199
203
|
except KeyError:
|
|
200
204
|
# If the distribution is missing, install it
|
|
201
|
-
install_requirement(distribution_name
|
|
205
|
+
install_requirement(distribution_name)
|
|
202
206
|
distribution = _get_distribution(distribution_name)
|
|
203
207
|
return f"{distribution.metadata['Name']}=={distribution.version}"
|
|
204
208
|
|
|
205
209
|
def get_required_distribution_names_(
|
|
206
210
|
requirement_string: str,
|
|
207
|
-
depth_:
|
|
211
|
+
depth_: int | None = None,
|
|
208
212
|
) -> MutableSet[str]:
|
|
209
213
|
name: str = get_requirement_string_distribution_name(
|
|
210
214
|
requirement_string
|
|
@@ -234,20 +238,20 @@ def _iter_frozen_requirements(
|
|
|
234
238
|
)
|
|
235
239
|
),
|
|
236
240
|
)
|
|
237
|
-
|
|
238
|
-
return requirements
|
|
241
|
+
return map(get_requirement_string, requirements)
|
|
239
242
|
|
|
240
243
|
|
|
241
244
|
def freeze(
|
|
242
245
|
requirements: Iterable[str] = (),
|
|
246
|
+
*,
|
|
243
247
|
exclude: Iterable[str] = (),
|
|
244
248
|
exclude_recursive: Iterable[str] = (),
|
|
245
249
|
no_version: Iterable[str] = (),
|
|
246
250
|
dependency_order: bool = False,
|
|
247
251
|
reverse: bool = False,
|
|
248
|
-
depth:
|
|
249
|
-
include_pointers:
|
|
250
|
-
exclude_pointers:
|
|
252
|
+
depth: int | None = None,
|
|
253
|
+
include_pointers: tuple[str, ...] = (),
|
|
254
|
+
exclude_pointers: tuple[str, ...] = (),
|
|
251
255
|
) -> None:
|
|
252
256
|
"""
|
|
253
257
|
Print the (frozen) requirements for one or more specified requirements or
|
|
@@ -273,7 +277,7 @@ def freeze(
|
|
|
273
277
|
exclude_pointers: If not empty, these TOML tables will *not* be
|
|
274
278
|
inspected (for pyproject.toml files)
|
|
275
279
|
"""
|
|
276
|
-
print(
|
|
280
|
+
print( # noqa: T201
|
|
277
281
|
"\n".join(
|
|
278
282
|
get_frozen_requirements(
|
|
279
283
|
requirements=requirements,
|
|
@@ -1,23 +1,18 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import argparse
|
|
2
4
|
import re
|
|
3
5
|
from collections import deque
|
|
4
6
|
from configparser import ConfigParser, SectionProxy
|
|
5
7
|
from copy import deepcopy
|
|
6
8
|
from dataclasses import dataclass
|
|
7
|
-
from importlib.metadata import Distribution
|
|
8
9
|
from io import StringIO
|
|
9
10
|
from itertools import chain
|
|
10
11
|
from typing import (
|
|
11
12
|
IO,
|
|
13
|
+
TYPE_CHECKING,
|
|
12
14
|
Any,
|
|
13
15
|
Callable,
|
|
14
|
-
Dict,
|
|
15
|
-
Iterable,
|
|
16
|
-
List,
|
|
17
|
-
Optional,
|
|
18
|
-
Set,
|
|
19
|
-
Tuple,
|
|
20
|
-
Union,
|
|
21
16
|
)
|
|
22
17
|
|
|
23
18
|
import tomli
|
|
@@ -27,7 +22,7 @@ from packaging.specifiers import Specifier, SpecifierSet
|
|
|
27
22
|
from packaging.version import Version
|
|
28
23
|
from packaging.version import parse as parse_version
|
|
29
24
|
|
|
30
|
-
from ._utilities import (
|
|
25
|
+
from dependence._utilities import (
|
|
31
26
|
ConfigurationFileType,
|
|
32
27
|
get_configuration_file_type,
|
|
33
28
|
get_installed_distributions,
|
|
@@ -38,6 +33,10 @@ from ._utilities import (
|
|
|
38
33
|
normalize_name,
|
|
39
34
|
)
|
|
40
35
|
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
from collections.abc import Iterable
|
|
38
|
+
from importlib.metadata import Distribution
|
|
39
|
+
|
|
41
40
|
|
|
42
41
|
@dataclass
|
|
43
42
|
class _Version:
|
|
@@ -48,7 +47,7 @@ class _Version:
|
|
|
48
47
|
"""
|
|
49
48
|
|
|
50
49
|
epoch: int
|
|
51
|
-
release:
|
|
50
|
+
release: tuple[int, ...]
|
|
52
51
|
pre: Any
|
|
53
52
|
post: Any
|
|
54
53
|
dev: Any
|
|
@@ -64,13 +63,14 @@ def _update_requirement_specifiers(
|
|
|
64
63
|
"""
|
|
65
64
|
installed_version: Version = parse_version(installed_version_string)
|
|
66
65
|
specifier: Specifier
|
|
67
|
-
updated_specifier_strings:
|
|
66
|
+
updated_specifier_strings: list[str] = []
|
|
68
67
|
for specifier in requirement.specifier: # type: ignore
|
|
69
68
|
# Only update requirement to match our installed version
|
|
70
69
|
# if the requirement is *inclusive*
|
|
71
70
|
if ("=" in specifier.operator) and ("!" not in specifier.operator):
|
|
72
71
|
specifier_version: Version = parse_version(specifier.version)
|
|
73
|
-
|
|
72
|
+
if installed_version.release is None:
|
|
73
|
+
raise ValueError(installed_version)
|
|
74
74
|
if specifier_version.release is None:
|
|
75
75
|
updated_specifier_strings.append(f"{specifier.operator}")
|
|
76
76
|
else:
|
|
@@ -117,7 +117,7 @@ def _update_requirement_specifiers(
|
|
|
117
117
|
|
|
118
118
|
|
|
119
119
|
def _get_updated_requirement_string(
|
|
120
|
-
requirement_string: str, ignore:
|
|
120
|
+
requirement_string: str, ignore: set[str]
|
|
121
121
|
) -> str:
|
|
122
122
|
"""
|
|
123
123
|
This function updates version numbers in a requirement string to match
|
|
@@ -139,8 +139,8 @@ def _get_updated_requirement_string(
|
|
|
139
139
|
return str(requirement)
|
|
140
140
|
|
|
141
141
|
|
|
142
|
-
def _normalize_ignore_argument(ignore: Iterable[str]) ->
|
|
143
|
-
ignore_set:
|
|
142
|
+
def _normalize_ignore_argument(ignore: Iterable[str]) -> set[str]:
|
|
143
|
+
ignore_set: set[str]
|
|
144
144
|
# Normalize/harmonize excluded project names
|
|
145
145
|
if isinstance(ignore, str):
|
|
146
146
|
ignore = (ignore,)
|
|
@@ -161,7 +161,7 @@ def _get_updated_requirements_txt(
|
|
|
161
161
|
- data (str): The contents of a *requirements.txt* file
|
|
162
162
|
- ignore ([str]): One or more project names to leave as-is
|
|
163
163
|
"""
|
|
164
|
-
ignore_set:
|
|
164
|
+
ignore_set: set[str] = _normalize_ignore_argument(ignore)
|
|
165
165
|
|
|
166
166
|
def get_updated_requirement_string(requirement: str) -> str:
|
|
167
167
|
return _get_updated_requirement_string(requirement, ignore=ignore_set)
|
|
@@ -184,7 +184,7 @@ def _get_updated_setup_cfg(
|
|
|
184
184
|
- all_extra_name (str): An (optional) extra name which will
|
|
185
185
|
consolidate requirements from all other extras
|
|
186
186
|
"""
|
|
187
|
-
ignore_set:
|
|
187
|
+
ignore_set: set[str] = _normalize_ignore_argument(ignore)
|
|
188
188
|
|
|
189
189
|
def get_updated_requirement_string(requirement: str) -> str:
|
|
190
190
|
return _get_updated_requirement_string(requirement, ignore=ignore_set)
|
|
@@ -202,10 +202,10 @@ def _get_updated_setup_cfg(
|
|
|
202
202
|
)
|
|
203
203
|
if "options.extras_require" in parser:
|
|
204
204
|
extras_require: SectionProxy = parser["options.extras_require"]
|
|
205
|
-
all_extra_requirements:
|
|
205
|
+
all_extra_requirements: list[str] = []
|
|
206
206
|
extra_name: str
|
|
207
207
|
extra_requirements_string: str
|
|
208
|
-
extra_requirements:
|
|
208
|
+
extra_requirements: list[str]
|
|
209
209
|
for extra_name, extra_requirements_string in extras_require.items():
|
|
210
210
|
if extra_name != all_extra_name:
|
|
211
211
|
extra_requirements = list(
|
|
@@ -223,7 +223,7 @@ def _get_updated_setup_cfg(
|
|
|
223
223
|
# We pre-pend an empty requirement string in order to]
|
|
224
224
|
# force new-line creation at the beginning of the extra
|
|
225
225
|
extras_require[all_extra_name] = "\n".join(
|
|
226
|
-
iter_distinct([""
|
|
226
|
+
iter_distinct(["", *all_extra_requirements])
|
|
227
227
|
)
|
|
228
228
|
# Return as a string
|
|
229
229
|
setup_cfg: str
|
|
@@ -246,10 +246,10 @@ def _get_updated_tox_ini(data: str, ignore: Iterable[str] = ()) -> str:
|
|
|
246
246
|
- data (str): The contents of a **tox.ini** file
|
|
247
247
|
- ignore ([str]): One or more project names to leave as-is
|
|
248
248
|
"""
|
|
249
|
-
ignore_set:
|
|
249
|
+
ignore_set: set[str] = _normalize_ignore_argument(ignore)
|
|
250
250
|
|
|
251
251
|
def get_updated_requirement_string(requirement: str) -> str:
|
|
252
|
-
prefix:
|
|
252
|
+
prefix: str | None = None
|
|
253
253
|
if ":" in requirement:
|
|
254
254
|
prefix, requirement = requirement.split(":", maxsplit=1)
|
|
255
255
|
requirement = _get_updated_requirement_string(
|
|
@@ -294,18 +294,18 @@ def _get_updated_tox_ini(data: str, ignore: Iterable[str] = ()) -> str:
|
|
|
294
294
|
|
|
295
295
|
|
|
296
296
|
def _update_document_requirements(
|
|
297
|
-
document:
|
|
297
|
+
document: dict[str, Any],
|
|
298
298
|
ignore: Iterable[str] = (),
|
|
299
|
-
include_pointers:
|
|
300
|
-
exclude_pointers:
|
|
299
|
+
include_pointers: tuple[str, ...] = (),
|
|
300
|
+
exclude_pointers: tuple[str, ...] = (),
|
|
301
301
|
) -> None:
|
|
302
|
-
ignore_set:
|
|
302
|
+
ignore_set: set[str] = _normalize_ignore_argument(ignore)
|
|
303
303
|
|
|
304
304
|
def get_updated_requirement_string(requirement: str) -> str:
|
|
305
305
|
return _get_updated_requirement_string(requirement, ignore=ignore_set)
|
|
306
306
|
|
|
307
307
|
# Find and update requirements
|
|
308
|
-
requirements_list:
|
|
308
|
+
requirements_list: list[str]
|
|
309
309
|
for requirements_list in iter_find_requirements_lists(
|
|
310
310
|
document,
|
|
311
311
|
include_pointers=include_pointers,
|
|
@@ -323,8 +323,8 @@ def _get_updated_pyproject_toml(
|
|
|
323
323
|
data: str,
|
|
324
324
|
ignore: Iterable[str] = (),
|
|
325
325
|
all_extra_name: str = "",
|
|
326
|
-
include_pointers:
|
|
327
|
-
exclude_pointers:
|
|
326
|
+
include_pointers: tuple[str, ...] = (),
|
|
327
|
+
exclude_pointers: tuple[str, ...] = (),
|
|
328
328
|
) -> str:
|
|
329
329
|
"""
|
|
330
330
|
Return the contents of a *pyproject.toml* file, updated to reflect the
|
|
@@ -345,8 +345,8 @@ def _get_updated_pyproject_toml(
|
|
|
345
345
|
The contents of the updated pyproject.toml file.
|
|
346
346
|
"""
|
|
347
347
|
# Parse pyproject.toml
|
|
348
|
-
original_pyproject:
|
|
349
|
-
updated_pyproject:
|
|
348
|
+
original_pyproject: dict[str, Any] = tomli.loads(data)
|
|
349
|
+
updated_pyproject: dict[str, Any] = deepcopy(original_pyproject)
|
|
350
350
|
# Find and update requirements
|
|
351
351
|
_update_document_requirements(
|
|
352
352
|
updated_pyproject,
|
|
@@ -355,13 +355,13 @@ def _get_updated_pyproject_toml(
|
|
|
355
355
|
exclude_pointers=exclude_pointers,
|
|
356
356
|
)
|
|
357
357
|
# Update consolidated optional requirements
|
|
358
|
-
project_optional_dependencies:
|
|
358
|
+
project_optional_dependencies: dict[str, list[str]] = (
|
|
359
359
|
updated_pyproject.get("project", {}).get("optional-dependencies", {})
|
|
360
360
|
)
|
|
361
361
|
# Update an extra indicated to encompass all other extras
|
|
362
362
|
if project_optional_dependencies and all_extra_name:
|
|
363
363
|
key: str
|
|
364
|
-
dependencies:
|
|
364
|
+
dependencies: list[str]
|
|
365
365
|
project_optional_dependencies[all_extra_name] = list(
|
|
366
366
|
iter_distinct(
|
|
367
367
|
chain(
|
|
@@ -384,8 +384,8 @@ def _get_updated_pyproject_toml(
|
|
|
384
384
|
def _get_updated_toml(
|
|
385
385
|
data: str,
|
|
386
386
|
ignore: Iterable[str] = (),
|
|
387
|
-
include_pointers:
|
|
388
|
-
exclude_pointers:
|
|
387
|
+
include_pointers: tuple[str, ...] = (),
|
|
388
|
+
exclude_pointers: tuple[str, ...] = (),
|
|
389
389
|
) -> str:
|
|
390
390
|
"""
|
|
391
391
|
Return the contents of a TOML file, updated to reflect the
|
|
@@ -407,8 +407,8 @@ def _get_updated_toml(
|
|
|
407
407
|
The contents of the updated TOML file.
|
|
408
408
|
"""
|
|
409
409
|
# Parse pyproject.toml
|
|
410
|
-
original_pyproject:
|
|
411
|
-
updated_pyproject:
|
|
410
|
+
original_pyproject: dict[str, Any] = tomli.loads(data)
|
|
411
|
+
updated_pyproject: dict[str, Any] = deepcopy(original_pyproject)
|
|
412
412
|
# Find and update requirements
|
|
413
413
|
_update_document_requirements(
|
|
414
414
|
updated_pyproject,
|
|
@@ -426,12 +426,12 @@ def _update(
|
|
|
426
426
|
path: str,
|
|
427
427
|
ignore: Iterable[str] = (),
|
|
428
428
|
all_extra_name: str = "",
|
|
429
|
-
include_pointers:
|
|
430
|
-
exclude_pointers:
|
|
429
|
+
include_pointers: tuple[str, ...] = (),
|
|
430
|
+
exclude_pointers: tuple[str, ...] = (),
|
|
431
431
|
) -> None:
|
|
432
432
|
data: str
|
|
433
433
|
update_function: Callable[[str], str]
|
|
434
|
-
kwargs:
|
|
434
|
+
kwargs: dict[str, str | Iterable[str]] = {}
|
|
435
435
|
configuration_file_type: ConfigurationFileType = (
|
|
436
436
|
get_configuration_file_type(path)
|
|
437
437
|
)
|
|
@@ -457,18 +457,21 @@ def _update(
|
|
|
457
457
|
elif configuration_file_type == ConfigurationFileType.REQUIREMENTS_TXT:
|
|
458
458
|
update_function = _get_updated_requirements_txt
|
|
459
459
|
else:
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
)
|
|
460
|
+
msg = f"Updating requirements for {path} is not supported"
|
|
461
|
+
raise NotImplementedError(msg)
|
|
463
462
|
kwargs["ignore"] = ignore
|
|
464
463
|
file_io: IO[str]
|
|
465
464
|
with open(path) as file_io:
|
|
466
465
|
data = file_io.read()
|
|
467
466
|
updated_data: str = update_function(data, **kwargs)
|
|
468
467
|
if updated_data == data:
|
|
469
|
-
print(
|
|
468
|
+
print( # noqa: T201
|
|
469
|
+
f"All requirements were already up-to-date in {path}"
|
|
470
|
+
)
|
|
470
471
|
else:
|
|
471
|
-
print(
|
|
472
|
+
print( # noqa: T201
|
|
473
|
+
f"Updating requirements in {path}"
|
|
474
|
+
)
|
|
472
475
|
with open(path, "w") as file_io:
|
|
473
476
|
file_io.write(updated_data)
|
|
474
477
|
|
|
@@ -477,8 +480,8 @@ def update(
|
|
|
477
480
|
paths: Iterable[str],
|
|
478
481
|
ignore: Iterable[str] = (),
|
|
479
482
|
all_extra_name: str = "",
|
|
480
|
-
include_pointers:
|
|
481
|
-
exclude_pointers:
|
|
483
|
+
include_pointers: tuple[str, ...] = (),
|
|
484
|
+
exclude_pointers: tuple[str, ...] = (),
|
|
482
485
|
) -> None:
|
|
483
486
|
"""
|
|
484
487
|
Update requirement versions in the specified files.
|
|
@@ -6,11 +6,11 @@ build-backend = "hatchling.build"
|
|
|
6
6
|
|
|
7
7
|
[project]
|
|
8
8
|
name = "dependence"
|
|
9
|
-
version = "1.0.
|
|
9
|
+
version = "1.0.5"
|
|
10
10
|
description = "A dependency management tool for python projects"
|
|
11
11
|
readme = "README.md"
|
|
12
12
|
license = "MIT"
|
|
13
|
-
requires-python = "~=3.
|
|
13
|
+
requires-python = "~=3.9"
|
|
14
14
|
authors = [
|
|
15
15
|
{ email = "david@belais.me" },
|
|
16
16
|
]
|
|
@@ -22,7 +22,7 @@ dependencies = [
|
|
|
22
22
|
"packaging",
|
|
23
23
|
"pip",
|
|
24
24
|
"setuptools>63",
|
|
25
|
-
"tomli-w~=1.
|
|
25
|
+
"tomli-w~=1.2",
|
|
26
26
|
"tomli~=2.2",
|
|
27
27
|
"jsonpointer",
|
|
28
28
|
]
|
|
@@ -31,33 +31,45 @@ dependencies = [
|
|
|
31
31
|
dependence = "dependence.__main__:main"
|
|
32
32
|
|
|
33
33
|
[project.urls]
|
|
34
|
-
|
|
34
|
+
Documentation = "https://dependence.enorganic.org"
|
|
35
|
+
Repository = "https://github.com/enorganic/dependence"
|
|
35
36
|
|
|
36
37
|
[tool.hatch.build.targets.sdist]
|
|
37
38
|
packages = [
|
|
38
39
|
"src/dependence",
|
|
39
40
|
]
|
|
41
|
+
sources = [
|
|
42
|
+
"src",
|
|
43
|
+
]
|
|
40
44
|
|
|
41
45
|
[tool.hatch.build.targets.wheel]
|
|
42
46
|
packages = [
|
|
43
47
|
"src/dependence",
|
|
44
48
|
]
|
|
49
|
+
sources = [
|
|
50
|
+
"src",
|
|
51
|
+
]
|
|
45
52
|
|
|
46
53
|
[tool.hatch.envs.default]
|
|
47
|
-
python = "3.
|
|
54
|
+
python = "3.9"
|
|
48
55
|
dependencies = [
|
|
49
56
|
"mypy",
|
|
50
57
|
"pytest",
|
|
51
58
|
"ruff",
|
|
52
59
|
]
|
|
60
|
+
pre-install-commands = [
|
|
61
|
+
"pip install --upgrade pip setuptools",
|
|
62
|
+
]
|
|
63
|
+
post-install-commands = [
|
|
64
|
+
"hatch run mypy --install-types --non-interactive || echo",
|
|
65
|
+
]
|
|
53
66
|
|
|
54
67
|
[tool.hatch.envs.default.scripts]
|
|
55
68
|
lint = "ruff check . && ruff format --check . && mypy"
|
|
56
69
|
|
|
57
|
-
[tool.hatch.envs.default.env-vars]
|
|
58
|
-
PIP_CONSTRAINT = "default_requirements.txt"
|
|
59
70
|
|
|
60
71
|
[tool.hatch.envs.docs]
|
|
72
|
+
template = "docs"
|
|
61
73
|
python = "3.13"
|
|
62
74
|
dependencies = [
|
|
63
75
|
"mkdocs-material",
|
|
@@ -65,26 +77,28 @@ dependencies = [
|
|
|
65
77
|
"black",
|
|
66
78
|
]
|
|
67
79
|
|
|
68
|
-
[tool.hatch.envs.
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
80
|
+
[tool.hatch.envs.hatch-test]
|
|
81
|
+
template = "hatch-test"
|
|
82
|
+
extra-dependencies = []
|
|
83
|
+
|
|
84
|
+
[[tool.hatch.envs.hatch-test.matrix]]
|
|
85
|
+
python = [
|
|
86
|
+
"3.9",
|
|
87
|
+
"3.10",
|
|
88
|
+
"3.11",
|
|
89
|
+
"3.12",
|
|
90
|
+
"3.13",
|
|
74
91
|
]
|
|
75
92
|
|
|
76
|
-
[tool.hatch.envs.test.env-vars]
|
|
77
|
-
PIP_CONSTRAINT = "test_requirements.txt"
|
|
78
|
-
|
|
79
|
-
[tool.hatch.envs.test.scripts]
|
|
80
|
-
test = "py.test"
|
|
81
93
|
|
|
82
94
|
[tool.ruff]
|
|
83
95
|
line-length = 79
|
|
84
96
|
|
|
97
|
+
|
|
85
98
|
[tool.ruff.lint]
|
|
86
99
|
ignore = [
|
|
87
100
|
"F842",
|
|
101
|
+
"INP001",
|
|
88
102
|
]
|
|
89
103
|
extend-select = [
|
|
90
104
|
"E",
|
|
@@ -107,10 +121,34 @@ docstring-code-line-length = 20
|
|
|
107
121
|
[tool.black]
|
|
108
122
|
line-length = 79
|
|
109
123
|
target-version = [
|
|
110
|
-
"py38",
|
|
111
124
|
"py39",
|
|
112
125
|
"py310",
|
|
113
126
|
"py311",
|
|
114
127
|
"py312",
|
|
115
128
|
"py313",
|
|
116
129
|
]
|
|
130
|
+
|
|
131
|
+
[tool.mypy]
|
|
132
|
+
python_version = "3.9"
|
|
133
|
+
files = [
|
|
134
|
+
"src",
|
|
135
|
+
"tests",
|
|
136
|
+
]
|
|
137
|
+
exclude = [
|
|
138
|
+
"tests/test_projects",
|
|
139
|
+
]
|
|
140
|
+
disallow_untyped_defs = true
|
|
141
|
+
disallow_incomplete_defs = true
|
|
142
|
+
|
|
143
|
+
[tool.coverage.run]
|
|
144
|
+
omit = [
|
|
145
|
+
"tests/test_projects",
|
|
146
|
+
]
|
|
147
|
+
|
|
148
|
+
[tool.coverage.paths]
|
|
149
|
+
source = [
|
|
150
|
+
"src/",
|
|
151
|
+
]
|
|
152
|
+
|
|
153
|
+
[tool.coverage.report]
|
|
154
|
+
fail_under = 80
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|