pip 25.3__py3-none-any.whl → 26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. pip/__init__.py +1 -1
  2. pip/_internal/build_env.py +194 -5
  3. pip/_internal/cli/base_command.py +11 -0
  4. pip/_internal/cli/cmdoptions.py +157 -0
  5. pip/_internal/cli/index_command.py +20 -0
  6. pip/_internal/cli/main.py +11 -6
  7. pip/_internal/cli/main_parser.py +3 -1
  8. pip/_internal/cli/parser.py +93 -33
  9. pip/_internal/cli/progress_bars.py +4 -2
  10. pip/_internal/cli/req_command.py +99 -23
  11. pip/_internal/commands/cache.py +24 -0
  12. pip/_internal/commands/completion.py +2 -1
  13. pip/_internal/commands/download.py +8 -4
  14. pip/_internal/commands/index.py +13 -6
  15. pip/_internal/commands/install.py +36 -29
  16. pip/_internal/commands/list.py +14 -16
  17. pip/_internal/commands/lock.py +16 -8
  18. pip/_internal/commands/wheel.py +8 -13
  19. pip/_internal/exceptions.py +76 -3
  20. pip/_internal/index/collector.py +2 -3
  21. pip/_internal/index/package_finder.py +84 -18
  22. pip/_internal/locations/__init__.py +1 -2
  23. pip/_internal/locations/_sysconfig.py +4 -1
  24. pip/_internal/models/link.py +18 -14
  25. pip/_internal/models/release_control.py +92 -0
  26. pip/_internal/models/selection_prefs.py +6 -3
  27. pip/_internal/network/auth.py +6 -2
  28. pip/_internal/network/download.py +4 -5
  29. pip/_internal/network/session.py +14 -10
  30. pip/_internal/operations/install/wheel.py +1 -2
  31. pip/_internal/operations/prepare.py +2 -3
  32. pip/_internal/req/constructors.py +3 -1
  33. pip/_internal/req/pep723.py +41 -0
  34. pip/_internal/req/req_file.py +10 -1
  35. pip/_internal/resolution/resolvelib/factory.py +12 -1
  36. pip/_internal/resolution/resolvelib/requirements.py +7 -3
  37. pip/_internal/self_outdated_check.py +6 -13
  38. pip/_internal/utils/datetime.py +18 -0
  39. pip/_internal/utils/filesystem.py +40 -1
  40. pip/_internal/utils/logging.py +34 -2
  41. pip/_internal/utils/misc.py +18 -12
  42. pip/_internal/utils/pylock.py +116 -0
  43. pip/_internal/utils/unpacking.py +1 -1
  44. pip/_internal/vcs/versioncontrol.py +3 -1
  45. pip/_vendor/cachecontrol/__init__.py +6 -3
  46. pip/_vendor/cachecontrol/adapter.py +0 -1
  47. pip/_vendor/cachecontrol/controller.py +1 -1
  48. pip/_vendor/cachecontrol/filewrapper.py +3 -1
  49. pip/_vendor/certifi/__init__.py +1 -1
  50. pip/_vendor/certifi/cacert.pem +0 -332
  51. pip/_vendor/idna/LICENSE.md +1 -1
  52. pip/_vendor/idna/codec.py +1 -1
  53. pip/_vendor/idna/core.py +1 -1
  54. pip/_vendor/idna/idnadata.py +72 -6
  55. pip/_vendor/idna/package_data.py +1 -1
  56. pip/_vendor/idna/uts46data.py +891 -731
  57. pip/_vendor/packaging/__init__.py +1 -1
  58. pip/_vendor/packaging/_elffile.py +0 -1
  59. pip/_vendor/packaging/_manylinux.py +36 -36
  60. pip/_vendor/packaging/_musllinux.py +1 -1
  61. pip/_vendor/packaging/_parser.py +22 -10
  62. pip/_vendor/packaging/_structures.py +8 -0
  63. pip/_vendor/packaging/_tokenizer.py +23 -25
  64. pip/_vendor/packaging/licenses/__init__.py +13 -11
  65. pip/_vendor/packaging/licenses/_spdx.py +41 -1
  66. pip/_vendor/packaging/markers.py +64 -38
  67. pip/_vendor/packaging/metadata.py +143 -27
  68. pip/_vendor/packaging/pylock.py +635 -0
  69. pip/_vendor/packaging/requirements.py +5 -10
  70. pip/_vendor/packaging/specifiers.py +219 -170
  71. pip/_vendor/packaging/tags.py +15 -20
  72. pip/_vendor/packaging/utils.py +19 -24
  73. pip/_vendor/packaging/version.py +315 -105
  74. pip/_vendor/platformdirs/version.py +2 -2
  75. pip/_vendor/platformdirs/windows.py +7 -1
  76. pip/_vendor/vendor.txt +5 -5
  77. {pip-25.3.dist-info → pip-26.0.dist-info}/METADATA +2 -2
  78. {pip-25.3.dist-info → pip-26.0.dist-info}/RECORD +103 -100
  79. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/AUTHORS.txt +18 -0
  80. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/idna/LICENSE.md +1 -1
  81. pip/_internal/models/pylock.py +0 -188
  82. {pip-25.3.dist-info → pip-26.0.dist-info}/WHEEL +0 -0
  83. {pip-25.3.dist-info → pip-26.0.dist-info}/entry_points.txt +0 -0
  84. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/LICENSE.txt +0 -0
  85. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/cachecontrol/LICENSE.txt +0 -0
  86. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/certifi/LICENSE +0 -0
  87. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/dependency_groups/LICENSE.txt +0 -0
  88. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/distlib/LICENSE.txt +0 -0
  89. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/distro/LICENSE +0 -0
  90. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/msgpack/COPYING +0 -0
  91. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/packaging/LICENSE +0 -0
  92. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/packaging/LICENSE.APACHE +0 -0
  93. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/packaging/LICENSE.BSD +0 -0
  94. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/pkg_resources/LICENSE +0 -0
  95. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/platformdirs/LICENSE +0 -0
  96. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/pygments/LICENSE +0 -0
  97. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/pyproject_hooks/LICENSE +0 -0
  98. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/requests/LICENSE +0 -0
  99. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/resolvelib/LICENSE +0 -0
  100. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/rich/LICENSE +0 -0
  101. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/tomli/LICENSE +0 -0
  102. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/tomli_w/LICENSE +0 -0
  103. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/truststore/LICENSE +0 -0
  104. {pip-25.3.dist-info → pip-26.0.dist-info}/licenses/src/pip/_vendor/urllib3/LICENSE.txt +0 -0
@@ -6,7 +6,7 @@ __title__ = "packaging"
6
6
  __summary__ = "Core utilities for Python packages"
7
7
  __uri__ = "https://github.com/pypa/packaging"
8
8
 
9
- __version__ = "25.0"
9
+ __version__ = "26.0"
10
10
 
11
11
  __author__ = "Donald Stufft and individual contributors"
12
12
  __email__ = "donald@stufft.io"
@@ -4,7 +4,6 @@ ELF file parser.
4
4
  This provides a class ``ELFFile`` that parses an ELF executable in a similar
5
5
  interface to ``ZipFile``. Only the read interface is implemented.
6
6
 
7
- Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
8
7
  ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
9
8
  """
10
9
 
@@ -15,6 +15,16 @@ EF_ARM_ABIMASK = 0xFF000000
15
15
  EF_ARM_ABI_VER5 = 0x05000000
16
16
  EF_ARM_ABI_FLOAT_HARD = 0x00000400
17
17
 
18
+ _ALLOWED_ARCHS = {
19
+ "x86_64",
20
+ "aarch64",
21
+ "ppc64",
22
+ "ppc64le",
23
+ "s390x",
24
+ "loongarch64",
25
+ "riscv64",
26
+ }
27
+
18
28
 
19
29
  # `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
20
30
  # as the type for `path` until then.
@@ -57,16 +67,7 @@ def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
57
67
  return _is_linux_armhf(executable)
58
68
  if "i686" in archs:
59
69
  return _is_linux_i686(executable)
60
- allowed_archs = {
61
- "x86_64",
62
- "aarch64",
63
- "ppc64",
64
- "ppc64le",
65
- "s390x",
66
- "loongarch64",
67
- "riscv64",
68
- }
69
- return any(arch in allowed_archs for arch in archs)
70
+ return any(arch in _ALLOWED_ARCHS for arch in archs)
70
71
 
71
72
 
72
73
  # If glibc ever changes its major version, we need to know what the last
@@ -106,7 +107,7 @@ def _glibc_version_string_ctypes() -> str | None:
106
107
  Fallback implementation of glibc_version_string using ctypes.
107
108
  """
108
109
  try:
109
- import ctypes
110
+ import ctypes # noqa: PLC0415
110
111
  except ImportError:
111
112
  return None
112
113
 
@@ -150,7 +151,7 @@ def _glibc_version_string() -> str | None:
150
151
  return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
151
152
 
152
153
 
153
- def _parse_glibc_version(version_str: str) -> tuple[int, int]:
154
+ def _parse_glibc_version(version_str: str) -> _GLibCVersion:
154
155
  """Parse glibc version.
155
156
 
156
157
  We use a regexp instead of str.split because we want to discard any
@@ -165,15 +166,15 @@ def _parse_glibc_version(version_str: str) -> tuple[int, int]:
165
166
  RuntimeWarning,
166
167
  stacklevel=2,
167
168
  )
168
- return -1, -1
169
- return int(m.group("major")), int(m.group("minor"))
169
+ return _GLibCVersion(-1, -1)
170
+ return _GLibCVersion(int(m.group("major")), int(m.group("minor")))
170
171
 
171
172
 
172
173
  @functools.lru_cache
173
- def _get_glibc_version() -> tuple[int, int]:
174
+ def _get_glibc_version() -> _GLibCVersion:
174
175
  version_str = _glibc_version_string()
175
176
  if version_str is None:
176
- return (-1, -1)
177
+ return _GLibCVersion(-1, -1)
177
178
  return _parse_glibc_version(version_str)
178
179
 
179
180
 
@@ -184,7 +185,7 @@ def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
184
185
  return False
185
186
  # Check for presence of _manylinux module.
186
187
  try:
187
- import _manylinux
188
+ import _manylinux # noqa: PLC0415
188
189
  except ImportError:
189
190
  return True
190
191
  if hasattr(_manylinux, "manylinux_compatible"):
@@ -192,25 +193,26 @@ def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
192
193
  if result is not None:
193
194
  return bool(result)
194
195
  return True
195
- if version == _GLibCVersion(2, 5):
196
- if hasattr(_manylinux, "manylinux1_compatible"):
197
- return bool(_manylinux.manylinux1_compatible)
198
- if version == _GLibCVersion(2, 12):
199
- if hasattr(_manylinux, "manylinux2010_compatible"):
200
- return bool(_manylinux.manylinux2010_compatible)
201
- if version == _GLibCVersion(2, 17):
202
- if hasattr(_manylinux, "manylinux2014_compatible"):
203
- return bool(_manylinux.manylinux2014_compatible)
196
+ if version == _GLibCVersion(2, 5) and hasattr(_manylinux, "manylinux1_compatible"):
197
+ return bool(_manylinux.manylinux1_compatible)
198
+ if version == _GLibCVersion(2, 12) and hasattr(
199
+ _manylinux, "manylinux2010_compatible"
200
+ ):
201
+ return bool(_manylinux.manylinux2010_compatible)
202
+ if version == _GLibCVersion(2, 17) and hasattr(
203
+ _manylinux, "manylinux2014_compatible"
204
+ ):
205
+ return bool(_manylinux.manylinux2014_compatible)
204
206
  return True
205
207
 
206
208
 
207
- _LEGACY_MANYLINUX_MAP = {
209
+ _LEGACY_MANYLINUX_MAP: dict[_GLibCVersion, str] = {
208
210
  # CentOS 7 w/ glibc 2.17 (PEP 599)
209
- (2, 17): "manylinux2014",
211
+ _GLibCVersion(2, 17): "manylinux2014",
210
212
  # CentOS 6 w/ glibc 2.12 (PEP 571)
211
- (2, 12): "manylinux2010",
213
+ _GLibCVersion(2, 12): "manylinux2010",
212
214
  # CentOS 5 w/ glibc 2.5 (PEP 513)
213
- (2, 5): "manylinux1",
215
+ _GLibCVersion(2, 5): "manylinux1",
214
216
  }
215
217
 
216
218
 
@@ -252,11 +254,9 @@ def platform_tags(archs: Sequence[str]) -> Iterator[str]:
252
254
  min_minor = -1
253
255
  for glibc_minor in range(glibc_max.minor, min_minor, -1):
254
256
  glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
255
- tag = "manylinux_{}_{}".format(*glibc_version)
256
257
  if _is_compatible(arch, glibc_version):
257
- yield f"{tag}_{arch}"
258
- # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
259
- if glibc_version in _LEGACY_MANYLINUX_MAP:
260
- legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
261
- if _is_compatible(arch, glibc_version):
258
+ yield "manylinux_{}_{}_{}".format(*glibc_version, arch)
259
+
260
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
261
+ if legacy_tag := _LEGACY_MANYLINUX_MAP.get(glibc_version):
262
262
  yield f"{legacy_tag}_{arch}"
@@ -49,7 +49,7 @@ def _get_musl_version(executable: str) -> _MuslVersion | None:
49
49
  return None
50
50
  if ld is None or "musl" not in ld:
51
51
  return None
52
- proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
52
+ proc = subprocess.run([ld], check=False, stderr=subprocess.PIPE, text=True)
53
53
  return _parse_musl_version(proc.stderr)
54
54
 
55
55
 
@@ -7,12 +7,14 @@ the implementation.
7
7
  from __future__ import annotations
8
8
 
9
9
  import ast
10
- from typing import NamedTuple, Sequence, Tuple, Union
10
+ from typing import List, Literal, NamedTuple, Sequence, Tuple, Union
11
11
 
12
12
  from ._tokenizer import DEFAULT_RULES, Tokenizer
13
13
 
14
14
 
15
15
  class Node:
16
+ __slots__ = ("value",)
17
+
16
18
  def __init__(self, value: str) -> None:
17
19
  self.value = value
18
20
 
@@ -20,31 +22,38 @@ class Node:
20
22
  return self.value
21
23
 
22
24
  def __repr__(self) -> str:
23
- return f"<{self.__class__.__name__}('{self}')>"
25
+ return f"<{self.__class__.__name__}({self.value!r})>"
24
26
 
25
27
  def serialize(self) -> str:
26
28
  raise NotImplementedError
27
29
 
28
30
 
29
31
  class Variable(Node):
32
+ __slots__ = ()
33
+
30
34
  def serialize(self) -> str:
31
35
  return str(self)
32
36
 
33
37
 
34
38
  class Value(Node):
39
+ __slots__ = ()
40
+
35
41
  def serialize(self) -> str:
36
42
  return f'"{self}"'
37
43
 
38
44
 
39
45
  class Op(Node):
46
+ __slots__ = ()
47
+
40
48
  def serialize(self) -> str:
41
49
  return str(self)
42
50
 
43
51
 
52
+ MarkerLogical = Literal["and", "or"]
44
53
  MarkerVar = Union[Variable, Value]
45
54
  MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
46
55
  MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]]
47
- MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]]
56
+ MarkerList = List[Union["MarkerList", MarkerAtom, MarkerLogical]]
48
57
 
49
58
 
50
59
  class ParsedRequirement(NamedTuple):
@@ -111,7 +120,9 @@ def _parse_requirement_details(
111
120
  return (url, specifier, marker)
112
121
 
113
122
  marker = _parse_requirement_marker(
114
- tokenizer, span_start=url_start, after="URL and whitespace"
123
+ tokenizer,
124
+ span_start=url_start,
125
+ expected="semicolon (after URL and whitespace)",
115
126
  )
116
127
  else:
117
128
  specifier_start = tokenizer.position
@@ -124,10 +135,10 @@ def _parse_requirement_details(
124
135
  marker = _parse_requirement_marker(
125
136
  tokenizer,
126
137
  span_start=specifier_start,
127
- after=(
128
- "version specifier"
138
+ expected=(
139
+ "comma (within version specifier), semicolon (after version specifier)"
129
140
  if specifier
130
- else "name and no valid version specifier"
141
+ else "semicolon (after name with no version specifier)"
131
142
  ),
132
143
  )
133
144
 
@@ -135,7 +146,7 @@ def _parse_requirement_details(
135
146
 
136
147
 
137
148
  def _parse_requirement_marker(
138
- tokenizer: Tokenizer, *, span_start: int, after: str
149
+ tokenizer: Tokenizer, *, span_start: int, expected: str
139
150
  ) -> MarkerList:
140
151
  """
141
152
  requirement_marker = SEMICOLON marker WS?
@@ -143,8 +154,9 @@ def _parse_requirement_marker(
143
154
 
144
155
  if not tokenizer.check("SEMICOLON"):
145
156
  tokenizer.raise_syntax_error(
146
- f"Expected end or semicolon (after {after})",
157
+ f"Expected {expected} or end",
147
158
  span_start=span_start,
159
+ span_end=None,
148
160
  )
149
161
  tokenizer.read()
150
162
 
@@ -307,7 +319,7 @@ def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
307
319
  return (marker_var_left, marker_op, marker_var_right)
308
320
 
309
321
 
310
- def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
322
+ def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: # noqa: RET503
311
323
  """
312
324
  marker_var = VARIABLE | QUOTED_STRING
313
325
  """
@@ -2,8 +2,13 @@
2
2
  # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
3
  # for complete details.
4
4
 
5
+ import typing
5
6
 
7
+
8
+ @typing.final
6
9
  class InfinityType:
10
+ __slots__ = ()
11
+
7
12
  def __repr__(self) -> str:
8
13
  return "Infinity"
9
14
 
@@ -32,7 +37,10 @@ class InfinityType:
32
37
  Infinity = InfinityType()
33
38
 
34
39
 
40
+ @typing.final
35
41
  class NegativeInfinityType:
42
+ __slots__ = ()
43
+
36
44
  def __repr__(self) -> str:
37
45
  return "-Infinity"
38
46
 
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import contextlib
4
4
  import re
5
5
  from dataclasses import dataclass
6
- from typing import Iterator, NoReturn
6
+ from typing import Generator, Mapping, NoReturn
7
7
 
8
8
  from .specifiers import Specifier
9
9
 
@@ -33,16 +33,16 @@ class ParserSyntaxError(Exception):
33
33
 
34
34
  def __str__(self) -> str:
35
35
  marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
36
- return "\n ".join([self.message, self.source, marker])
36
+ return f"{self.message}\n {self.source}\n {marker}"
37
37
 
38
38
 
39
- DEFAULT_RULES: dict[str, str | re.Pattern[str]] = {
40
- "LEFT_PARENTHESIS": r"\(",
41
- "RIGHT_PARENTHESIS": r"\)",
42
- "LEFT_BRACKET": r"\[",
43
- "RIGHT_BRACKET": r"\]",
44
- "SEMICOLON": r";",
45
- "COMMA": r",",
39
+ DEFAULT_RULES: dict[str, re.Pattern[str]] = {
40
+ "LEFT_PARENTHESIS": re.compile(r"\("),
41
+ "RIGHT_PARENTHESIS": re.compile(r"\)"),
42
+ "LEFT_BRACKET": re.compile(r"\["),
43
+ "RIGHT_BRACKET": re.compile(r"\]"),
44
+ "SEMICOLON": re.compile(r";"),
45
+ "COMMA": re.compile(r","),
46
46
  "QUOTED_STRING": re.compile(
47
47
  r"""
48
48
  (
@@ -53,10 +53,10 @@ DEFAULT_RULES: dict[str, str | re.Pattern[str]] = {
53
53
  """,
54
54
  re.VERBOSE,
55
55
  ),
56
- "OP": r"(===|==|~=|!=|<=|>=|<|>)",
57
- "BOOLOP": r"\b(or|and)\b",
58
- "IN": r"\bin\b",
59
- "NOT": r"\bnot\b",
56
+ "OP": re.compile(r"(===|==|~=|!=|<=|>=|<|>)"),
57
+ "BOOLOP": re.compile(r"\b(or|and)\b"),
58
+ "IN": re.compile(r"\bin\b"),
59
+ "NOT": re.compile(r"\bnot\b"),
60
60
  "VARIABLE": re.compile(
61
61
  r"""
62
62
  \b(
@@ -78,13 +78,13 @@ DEFAULT_RULES: dict[str, str | re.Pattern[str]] = {
78
78
  Specifier._operator_regex_str + Specifier._version_regex_str,
79
79
  re.VERBOSE | re.IGNORECASE,
80
80
  ),
81
- "AT": r"\@",
82
- "URL": r"[^ \t]+",
83
- "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
84
- "VERSION_PREFIX_TRAIL": r"\.\*",
85
- "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
86
- "WS": r"[ \t]+",
87
- "END": r"$",
81
+ "AT": re.compile(r"\@"),
82
+ "URL": re.compile(r"[^ \t]+"),
83
+ "IDENTIFIER": re.compile(r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b"),
84
+ "VERSION_PREFIX_TRAIL": re.compile(r"\.\*"),
85
+ "VERSION_LOCAL_LABEL_TRAIL": re.compile(r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*"),
86
+ "WS": re.compile(r"[ \t]+"),
87
+ "END": re.compile(r"$"),
88
88
  }
89
89
 
90
90
 
@@ -99,12 +99,10 @@ class Tokenizer:
99
99
  self,
100
100
  source: str,
101
101
  *,
102
- rules: dict[str, str | re.Pattern[str]],
102
+ rules: Mapping[str, re.Pattern[str]],
103
103
  ) -> None:
104
104
  self.source = source
105
- self.rules: dict[str, re.Pattern[str]] = {
106
- name: re.compile(pattern) for name, pattern in rules.items()
107
- }
105
+ self.rules = rules
108
106
  self.next_token: Token | None = None
109
107
  self.position = 0
110
108
 
@@ -174,7 +172,7 @@ class Tokenizer:
174
172
  @contextlib.contextmanager
175
173
  def enclosing_tokens(
176
174
  self, open_token: str, close_token: str, *, around: str
177
- ) -> Iterator[None]:
175
+ ) -> Generator[None, None, None]:
178
176
  if self.check(open_token):
179
177
  open_position = self.position
180
178
  self.read()
@@ -34,7 +34,7 @@ from __future__ import annotations
34
34
  import re
35
35
  from typing import NewType, cast
36
36
 
37
- from pip._vendor.packaging.licenses._spdx import EXCEPTIONS, LICENSES
37
+ from ._spdx import EXCEPTIONS, LICENSES
38
38
 
39
39
  __all__ = [
40
40
  "InvalidLicenseExpression",
@@ -80,16 +80,21 @@ def canonicalize_license_expression(
80
80
 
81
81
  tokens = license_expression.split()
82
82
 
83
- # Rather than implementing boolean logic, we create an expression that Python can
84
- # parse. Everything that is not involved with the grammar itself is treated as
85
- # `False` and the expression should evaluate as such.
83
+ # Rather than implementing a parenthesis/boolean logic parser, create an
84
+ # expression that Python can parse. Everything that is not involved with the
85
+ # grammar itself is replaced with the placeholder `False` and the resultant
86
+ # expression should become a valid Python expression.
86
87
  python_tokens = []
87
88
  for token in tokens:
88
89
  if token not in {"or", "and", "with", "(", ")"}:
89
90
  python_tokens.append("False")
90
91
  elif token == "with":
91
92
  python_tokens.append("or")
92
- elif token == "(" and python_tokens and python_tokens[-1] not in {"or", "and"}:
93
+ elif (
94
+ token == "("
95
+ and python_tokens
96
+ and python_tokens[-1] not in {"or", "and", "("}
97
+ ) or (token == ")" and python_tokens and python_tokens[-1] == "("):
93
98
  message = f"Invalid license expression: {raw_license_expression!r}"
94
99
  raise InvalidLicenseExpression(message)
95
100
  else:
@@ -97,11 +102,8 @@ def canonicalize_license_expression(
97
102
 
98
103
  python_expression = " ".join(python_tokens)
99
104
  try:
100
- invalid = eval(python_expression, globals(), locals())
101
- except Exception:
102
- invalid = True
103
-
104
- if invalid is not False:
105
+ compile(python_expression, "", "eval")
106
+ except SyntaxError:
105
107
  message = f"Invalid license expression: {raw_license_expression!r}"
106
108
  raise InvalidLicenseExpression(message) from None
107
109
 
@@ -140,6 +142,6 @@ def canonicalize_license_expression(
140
142
  normalized_expression = " ".join(normalized_tokens)
141
143
 
142
144
  return cast(
143
- NormalizedLicenseExpression,
145
+ "NormalizedLicenseExpression",
144
146
  normalized_expression.replace("( ", "(").replace(" )", ")"),
145
147
  )