pip 25.1.1__py3-none-any.whl → 25.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (202) hide show
  1. pip/__init__.py +3 -3
  2. pip/_internal/__init__.py +2 -2
  3. pip/_internal/build_env.py +118 -94
  4. pip/_internal/cache.py +16 -14
  5. pip/_internal/cli/autocompletion.py +13 -4
  6. pip/_internal/cli/base_command.py +18 -7
  7. pip/_internal/cli/cmdoptions.py +14 -9
  8. pip/_internal/cli/command_context.py +4 -3
  9. pip/_internal/cli/index_command.py +11 -9
  10. pip/_internal/cli/main.py +3 -2
  11. pip/_internal/cli/main_parser.py +4 -3
  12. pip/_internal/cli/parser.py +26 -22
  13. pip/_internal/cli/progress_bars.py +19 -12
  14. pip/_internal/cli/req_command.py +16 -12
  15. pip/_internal/cli/spinners.py +81 -5
  16. pip/_internal/commands/__init__.py +5 -3
  17. pip/_internal/commands/cache.py +18 -15
  18. pip/_internal/commands/check.py +1 -2
  19. pip/_internal/commands/completion.py +1 -2
  20. pip/_internal/commands/configuration.py +26 -18
  21. pip/_internal/commands/debug.py +8 -6
  22. pip/_internal/commands/download.py +2 -3
  23. pip/_internal/commands/freeze.py +2 -3
  24. pip/_internal/commands/hash.py +1 -2
  25. pip/_internal/commands/help.py +1 -2
  26. pip/_internal/commands/index.py +15 -9
  27. pip/_internal/commands/inspect.py +4 -4
  28. pip/_internal/commands/install.py +44 -39
  29. pip/_internal/commands/list.py +35 -26
  30. pip/_internal/commands/lock.py +1 -2
  31. pip/_internal/commands/search.py +14 -12
  32. pip/_internal/commands/show.py +14 -11
  33. pip/_internal/commands/uninstall.py +1 -2
  34. pip/_internal/commands/wheel.py +2 -3
  35. pip/_internal/configuration.py +39 -25
  36. pip/_internal/distributions/base.py +6 -4
  37. pip/_internal/distributions/installed.py +8 -4
  38. pip/_internal/distributions/sdist.py +20 -13
  39. pip/_internal/distributions/wheel.py +6 -4
  40. pip/_internal/exceptions.py +58 -39
  41. pip/_internal/index/collector.py +24 -29
  42. pip/_internal/index/package_finder.py +70 -61
  43. pip/_internal/index/sources.py +17 -14
  44. pip/_internal/locations/__init__.py +18 -16
  45. pip/_internal/locations/_distutils.py +12 -11
  46. pip/_internal/locations/_sysconfig.py +5 -4
  47. pip/_internal/locations/base.py +4 -3
  48. pip/_internal/main.py +2 -2
  49. pip/_internal/metadata/__init__.py +8 -6
  50. pip/_internal/metadata/_json.py +5 -4
  51. pip/_internal/metadata/base.py +22 -27
  52. pip/_internal/metadata/importlib/_compat.py +6 -4
  53. pip/_internal/metadata/importlib/_dists.py +12 -17
  54. pip/_internal/metadata/importlib/_envs.py +9 -6
  55. pip/_internal/metadata/pkg_resources.py +11 -14
  56. pip/_internal/models/direct_url.py +24 -21
  57. pip/_internal/models/format_control.py +5 -5
  58. pip/_internal/models/installation_report.py +4 -3
  59. pip/_internal/models/link.py +39 -34
  60. pip/_internal/models/pylock.py +27 -22
  61. pip/_internal/models/search_scope.py +6 -7
  62. pip/_internal/models/selection_prefs.py +3 -3
  63. pip/_internal/models/target_python.py +10 -9
  64. pip/_internal/models/wheel.py +7 -5
  65. pip/_internal/network/auth.py +20 -22
  66. pip/_internal/network/cache.py +22 -6
  67. pip/_internal/network/download.py +169 -141
  68. pip/_internal/network/lazy_wheel.py +10 -7
  69. pip/_internal/network/session.py +32 -27
  70. pip/_internal/network/utils.py +2 -2
  71. pip/_internal/network/xmlrpc.py +2 -2
  72. pip/_internal/operations/build/build_tracker.py +10 -8
  73. pip/_internal/operations/build/wheel.py +3 -2
  74. pip/_internal/operations/build/wheel_editable.py +3 -2
  75. pip/_internal/operations/build/wheel_legacy.py +9 -8
  76. pip/_internal/operations/check.py +21 -26
  77. pip/_internal/operations/freeze.py +12 -9
  78. pip/_internal/operations/install/editable_legacy.py +5 -3
  79. pip/_internal/operations/install/wheel.py +49 -41
  80. pip/_internal/operations/prepare.py +35 -30
  81. pip/_internal/pyproject.py +7 -10
  82. pip/_internal/req/__init__.py +12 -10
  83. pip/_internal/req/constructors.py +33 -31
  84. pip/_internal/req/req_dependency_group.py +7 -11
  85. pip/_internal/req/req_file.py +32 -35
  86. pip/_internal/req/req_install.py +37 -34
  87. pip/_internal/req/req_set.py +4 -5
  88. pip/_internal/req/req_uninstall.py +20 -17
  89. pip/_internal/resolution/base.py +3 -3
  90. pip/_internal/resolution/legacy/resolver.py +21 -20
  91. pip/_internal/resolution/resolvelib/base.py +16 -13
  92. pip/_internal/resolution/resolvelib/candidates.py +29 -26
  93. pip/_internal/resolution/resolvelib/factory.py +41 -50
  94. pip/_internal/resolution/resolvelib/found_candidates.py +11 -9
  95. pip/_internal/resolution/resolvelib/provider.py +15 -20
  96. pip/_internal/resolution/resolvelib/reporter.py +5 -3
  97. pip/_internal/resolution/resolvelib/requirements.py +8 -6
  98. pip/_internal/resolution/resolvelib/resolver.py +39 -23
  99. pip/_internal/self_outdated_check.py +8 -6
  100. pip/_internal/utils/appdirs.py +1 -2
  101. pip/_internal/utils/compat.py +7 -1
  102. pip/_internal/utils/compatibility_tags.py +17 -16
  103. pip/_internal/utils/deprecation.py +11 -9
  104. pip/_internal/utils/direct_url_helpers.py +2 -2
  105. pip/_internal/utils/egg_link.py +6 -5
  106. pip/_internal/utils/entrypoints.py +3 -2
  107. pip/_internal/utils/filesystem.py +8 -5
  108. pip/_internal/utils/filetypes.py +4 -6
  109. pip/_internal/utils/glibc.py +6 -5
  110. pip/_internal/utils/hashes.py +9 -6
  111. pip/_internal/utils/logging.py +8 -5
  112. pip/_internal/utils/misc.py +37 -45
  113. pip/_internal/utils/packaging.py +3 -2
  114. pip/_internal/utils/retry.py +7 -4
  115. pip/_internal/utils/setuptools_build.py +12 -10
  116. pip/_internal/utils/subprocess.py +20 -17
  117. pip/_internal/utils/temp_dir.py +10 -12
  118. pip/_internal/utils/unpacking.py +6 -4
  119. pip/_internal/utils/urls.py +1 -1
  120. pip/_internal/utils/virtualenv.py +3 -2
  121. pip/_internal/utils/wheel.py +3 -4
  122. pip/_internal/vcs/bazaar.py +26 -8
  123. pip/_internal/vcs/git.py +59 -24
  124. pip/_internal/vcs/mercurial.py +34 -11
  125. pip/_internal/vcs/subversion.py +27 -16
  126. pip/_internal/vcs/versioncontrol.py +56 -51
  127. pip/_internal/wheel_builder.py +14 -12
  128. pip/_vendor/cachecontrol/__init__.py +1 -1
  129. pip/_vendor/certifi/__init__.py +1 -1
  130. pip/_vendor/certifi/cacert.pem +102 -221
  131. pip/_vendor/certifi/core.py +1 -32
  132. pip/_vendor/distlib/__init__.py +2 -2
  133. pip/_vendor/distlib/scripts.py +1 -1
  134. pip/_vendor/msgpack/__init__.py +2 -2
  135. pip/_vendor/pkg_resources/__init__.py +1 -1
  136. pip/_vendor/platformdirs/version.py +2 -2
  137. pip/_vendor/pygments/__init__.py +1 -1
  138. pip/_vendor/requests/__version__.py +2 -2
  139. pip/_vendor/requests/compat.py +12 -0
  140. pip/_vendor/requests/models.py +3 -1
  141. pip/_vendor/requests/utils.py +6 -16
  142. pip/_vendor/resolvelib/__init__.py +3 -3
  143. pip/_vendor/resolvelib/reporters.py +1 -1
  144. pip/_vendor/resolvelib/resolvers/__init__.py +4 -4
  145. pip/_vendor/resolvelib/resolvers/resolution.py +91 -10
  146. pip/_vendor/rich/__main__.py +12 -40
  147. pip/_vendor/rich/_inspect.py +1 -1
  148. pip/_vendor/rich/_ratio.py +1 -7
  149. pip/_vendor/rich/align.py +1 -7
  150. pip/_vendor/rich/box.py +1 -7
  151. pip/_vendor/rich/console.py +25 -20
  152. pip/_vendor/rich/control.py +1 -7
  153. pip/_vendor/rich/diagnose.py +1 -0
  154. pip/_vendor/rich/emoji.py +1 -6
  155. pip/_vendor/rich/live.py +32 -7
  156. pip/_vendor/rich/live_render.py +1 -7
  157. pip/_vendor/rich/logging.py +1 -1
  158. pip/_vendor/rich/panel.py +3 -4
  159. pip/_vendor/rich/progress.py +15 -15
  160. pip/_vendor/rich/spinner.py +7 -13
  161. pip/_vendor/rich/syntax.py +24 -5
  162. pip/_vendor/rich/traceback.py +32 -17
  163. pip/_vendor/truststore/_api.py +1 -1
  164. pip/_vendor/vendor.txt +9 -10
  165. {pip-25.1.1.dist-info → pip-25.2.dist-info}/METADATA +26 -4
  166. {pip-25.1.1.dist-info → pip-25.2.dist-info}/RECORD +193 -180
  167. {pip-25.1.1.dist-info → pip-25.2.dist-info}/WHEEL +1 -1
  168. {pip-25.1.1.dist-info → pip-25.2.dist-info}/licenses/AUTHORS.txt +12 -0
  169. pip-25.2.dist-info/licenses/src/pip/_vendor/cachecontrol/LICENSE.txt +13 -0
  170. pip-25.2.dist-info/licenses/src/pip/_vendor/certifi/LICENSE +20 -0
  171. pip-25.2.dist-info/licenses/src/pip/_vendor/dependency_groups/LICENSE.txt +9 -0
  172. pip-25.2.dist-info/licenses/src/pip/_vendor/distlib/LICENSE.txt +284 -0
  173. pip-25.2.dist-info/licenses/src/pip/_vendor/distro/LICENSE +202 -0
  174. pip-25.2.dist-info/licenses/src/pip/_vendor/idna/LICENSE.md +31 -0
  175. pip-25.2.dist-info/licenses/src/pip/_vendor/msgpack/COPYING +14 -0
  176. pip-25.2.dist-info/licenses/src/pip/_vendor/packaging/LICENSE +3 -0
  177. pip-25.2.dist-info/licenses/src/pip/_vendor/packaging/LICENSE.APACHE +177 -0
  178. pip-25.2.dist-info/licenses/src/pip/_vendor/packaging/LICENSE.BSD +23 -0
  179. pip-25.2.dist-info/licenses/src/pip/_vendor/pkg_resources/LICENSE +17 -0
  180. pip-25.2.dist-info/licenses/src/pip/_vendor/platformdirs/LICENSE +21 -0
  181. pip-25.2.dist-info/licenses/src/pip/_vendor/pygments/LICENSE +25 -0
  182. pip-25.2.dist-info/licenses/src/pip/_vendor/pyproject_hooks/LICENSE +21 -0
  183. pip-25.2.dist-info/licenses/src/pip/_vendor/requests/LICENSE +175 -0
  184. pip-25.2.dist-info/licenses/src/pip/_vendor/resolvelib/LICENSE +13 -0
  185. pip-25.2.dist-info/licenses/src/pip/_vendor/rich/LICENSE +19 -0
  186. pip-25.2.dist-info/licenses/src/pip/_vendor/tomli/LICENSE +21 -0
  187. pip-25.2.dist-info/licenses/src/pip/_vendor/tomli/LICENSE-HEADER +3 -0
  188. pip-25.2.dist-info/licenses/src/pip/_vendor/tomli_w/LICENSE +21 -0
  189. pip-25.2.dist-info/licenses/src/pip/_vendor/truststore/LICENSE +21 -0
  190. pip-25.2.dist-info/licenses/src/pip/_vendor/urllib3/LICENSE.txt +21 -0
  191. pip/_vendor/distlib/database.py +0 -1329
  192. pip/_vendor/distlib/index.py +0 -508
  193. pip/_vendor/distlib/locators.py +0 -1295
  194. pip/_vendor/distlib/manifest.py +0 -384
  195. pip/_vendor/distlib/markers.py +0 -162
  196. pip/_vendor/distlib/metadata.py +0 -1031
  197. pip/_vendor/distlib/version.py +0 -750
  198. pip/_vendor/distlib/wheel.py +0 -1100
  199. pip/_vendor/typing_extensions.py +0 -4584
  200. {pip-25.1.1.dist-info → pip-25.2.dist-info}/entry_points.txt +0 -0
  201. {pip-25.1.1.dist-info → pip-25.2.dist-info}/licenses/LICENSE.txt +0 -0
  202. {pip-25.1.1.dist-info → pip-25.2.dist-info}/top_level.txt +0 -0
@@ -2,8 +2,10 @@
2
2
  name that have meaning.
3
3
  """
4
4
 
5
+ from __future__ import annotations
6
+
5
7
  import re
6
- from typing import Dict, Iterable, List, Optional
8
+ from collections.abc import Iterable
7
9
 
8
10
  from pip._vendor.packaging.tags import Tag
9
11
  from pip._vendor.packaging.utils import BuildTag, parse_wheel_filename
@@ -31,7 +33,7 @@ class Wheel:
31
33
  # To make mypy happy specify type hints that can come from either
32
34
  # parse_wheel_filename or the legacy_wheel_file_re match.
33
35
  self.name: str
34
- self._build_tag: Optional[BuildTag] = None
36
+ self._build_tag: BuildTag | None = None
35
37
 
36
38
  try:
37
39
  wheel_info = parse_wheel_filename(filename)
@@ -88,11 +90,11 @@ class Wheel:
88
90
 
89
91
  return self._build_tag
90
92
 
91
- def get_formatted_file_tags(self) -> List[str]:
93
+ def get_formatted_file_tags(self) -> list[str]:
92
94
  """Return the wheel's tags as a sorted list of strings."""
93
95
  return sorted(str(tag) for tag in self.file_tags)
94
96
 
95
- def support_index_min(self, tags: List[Tag]) -> int:
97
+ def support_index_min(self, tags: list[Tag]) -> int:
96
98
  """Return the lowest index that one of the wheel's file_tag combinations
97
99
  achieves in the given list of supported tags.
98
100
 
@@ -111,7 +113,7 @@ class Wheel:
111
113
  raise ValueError()
112
114
 
113
115
  def find_most_preferred_tag(
114
- self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
116
+ self, tags: list[Tag], tag_to_priority: dict[Tag, int]
115
117
  ) -> int:
116
118
  """Return the priority of the most preferred tag that one of the wheel's file
117
119
  tag combinations achieves in the given list of supported tags using the given
@@ -4,6 +4,8 @@ Contains interface (MultiDomainBasicAuth) and associated glue code for
4
4
  providing credentials in the context of network requests.
5
5
  """
6
6
 
7
+ from __future__ import annotations
8
+
7
9
  import logging
8
10
  import os
9
11
  import shutil
@@ -12,10 +14,10 @@ import sysconfig
12
14
  import typing
13
15
  import urllib.parse
14
16
  from abc import ABC, abstractmethod
15
- from functools import lru_cache
17
+ from functools import cache
16
18
  from os.path import commonprefix
17
19
  from pathlib import Path
18
- from typing import Any, Dict, List, NamedTuple, Optional, Tuple
20
+ from typing import Any, NamedTuple
19
21
 
20
22
  from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
21
23
  from pip._vendor.requests.models import Request, Response
@@ -48,9 +50,7 @@ class KeyRingBaseProvider(ABC):
48
50
  has_keyring: bool
49
51
 
50
52
  @abstractmethod
51
- def get_auth_info(
52
- self, url: str, username: Optional[str]
53
- ) -> Optional[AuthInfo]: ...
53
+ def get_auth_info(self, url: str, username: str | None) -> AuthInfo | None: ...
54
54
 
55
55
  @abstractmethod
56
56
  def save_auth_info(self, url: str, username: str, password: str) -> None: ...
@@ -61,7 +61,7 @@ class KeyRingNullProvider(KeyRingBaseProvider):
61
61
 
62
62
  has_keyring = False
63
63
 
64
- def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
64
+ def get_auth_info(self, url: str, username: str | None) -> AuthInfo | None:
65
65
  return None
66
66
 
67
67
  def save_auth_info(self, url: str, username: str, password: str) -> None:
@@ -78,7 +78,7 @@ class KeyRingPythonProvider(KeyRingBaseProvider):
78
78
 
79
79
  self.keyring = keyring
80
80
 
81
- def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
81
+ def get_auth_info(self, url: str, username: str | None) -> AuthInfo | None:
82
82
  # Support keyring's get_credential interface which supports getting
83
83
  # credentials without a username. This is only available for
84
84
  # keyring>=15.2.0.
@@ -114,7 +114,7 @@ class KeyRingCliProvider(KeyRingBaseProvider):
114
114
  def __init__(self, cmd: str) -> None:
115
115
  self.keyring = cmd
116
116
 
117
- def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
117
+ def get_auth_info(self, url: str, username: str | None) -> AuthInfo | None:
118
118
  # This is the default implementation of keyring.get_credential
119
119
  # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139
120
120
  if username is not None:
@@ -126,7 +126,7 @@ class KeyRingCliProvider(KeyRingBaseProvider):
126
126
  def save_auth_info(self, url: str, username: str, password: str) -> None:
127
127
  return self._set_password(url, username, password)
128
128
 
129
- def _get_password(self, service_name: str, username: str) -> Optional[str]:
129
+ def _get_password(self, service_name: str, username: str) -> str | None:
130
130
  """Mirror the implementation of keyring.get_password using cli"""
131
131
  if self.keyring is None:
132
132
  return None
@@ -159,7 +159,7 @@ class KeyRingCliProvider(KeyRingBaseProvider):
159
159
  return None
160
160
 
161
161
 
162
- @lru_cache(maxsize=None)
162
+ @cache
163
163
  def get_keyring_provider(provider: str) -> KeyRingBaseProvider:
164
164
  logger.verbose("Keyring provider requested: %s", provider)
165
165
 
@@ -225,19 +225,19 @@ class MultiDomainBasicAuth(AuthBase):
225
225
  def __init__(
226
226
  self,
227
227
  prompting: bool = True,
228
- index_urls: Optional[List[str]] = None,
228
+ index_urls: list[str] | None = None,
229
229
  keyring_provider: str = "auto",
230
230
  ) -> None:
231
231
  self.prompting = prompting
232
232
  self.index_urls = index_urls
233
- self.keyring_provider = keyring_provider # type: ignore[assignment]
234
- self.passwords: Dict[str, AuthInfo] = {}
233
+ self.keyring_provider = keyring_provider
234
+ self.passwords: dict[str, AuthInfo] = {}
235
235
  # When the user is prompted to enter credentials and keyring is
236
236
  # available, we will offer to save them. If the user accepts,
237
237
  # this value is set to the credentials they entered. After the
238
238
  # request authenticates, the caller should call
239
239
  # ``save_credentials`` to save these.
240
- self._credentials_to_save: Optional[Credentials] = None
240
+ self._credentials_to_save: Credentials | None = None
241
241
 
242
242
  @property
243
243
  def keyring_provider(self) -> KeyRingBaseProvider:
@@ -260,9 +260,9 @@ class MultiDomainBasicAuth(AuthBase):
260
260
 
261
261
  def _get_keyring_auth(
262
262
  self,
263
- url: Optional[str],
264
- username: Optional[str],
265
- ) -> Optional[AuthInfo]:
263
+ url: str | None,
264
+ username: str | None,
265
+ ) -> AuthInfo | None:
266
266
  """Return the tuple auth for a given url from keyring."""
267
267
  # Do nothing if no url was provided
268
268
  if not url:
@@ -284,7 +284,7 @@ class MultiDomainBasicAuth(AuthBase):
284
284
  get_keyring_provider.cache_clear()
285
285
  return None
286
286
 
287
- def _get_index_url(self, url: str) -> Optional[str]:
287
+ def _get_index_url(self, url: str) -> str | None:
288
288
  """Return the original index URL matching the requested URL.
289
289
 
290
290
  Cached or dynamically generated credentials may work against
@@ -391,7 +391,7 @@ class MultiDomainBasicAuth(AuthBase):
391
391
 
392
392
  def _get_url_and_credentials(
393
393
  self, original_url: str
394
- ) -> Tuple[str, Optional[str], Optional[str]]:
394
+ ) -> tuple[str, str | None, str | None]:
395
395
  """Return the credentials to use for the provided URL.
396
396
 
397
397
  If allowed, netrc and keyring may be used to obtain the
@@ -454,9 +454,7 @@ class MultiDomainBasicAuth(AuthBase):
454
454
  return req
455
455
 
456
456
  # Factored out to allow for easy patching in tests
457
- def _prompt_for_password(
458
- self, netloc: str
459
- ) -> Tuple[Optional[str], Optional[str], bool]:
457
+ def _prompt_for_password(self, netloc: str) -> tuple[str | None, str | None, bool]:
460
458
  username = ask_input(f"User for {netloc}: ") if self.prompting else None
461
459
  if not username:
462
460
  return None, None, False
@@ -1,9 +1,13 @@
1
1
  """HTTP cache implementation."""
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  import os
6
+ import shutil
7
+ from collections.abc import Generator
4
8
  from contextlib import contextmanager
5
9
  from datetime import datetime
6
- from typing import BinaryIO, Generator, Optional, Union
10
+ from typing import Any, BinaryIO, Callable
7
11
 
8
12
  from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache
9
13
  from pip._vendor.cachecontrol.caches import SeparateBodyFileCache
@@ -59,7 +63,7 @@ class SafeFileCache(SeparateBodyBaseCache):
59
63
  parts = list(hashed[:5]) + [hashed]
60
64
  return os.path.join(self.directory, *parts)
61
65
 
62
- def get(self, key: str) -> Optional[bytes]:
66
+ def get(self, key: str) -> bytes | None:
63
67
  # The cache entry is only valid if both metadata and body exist.
64
68
  metadata_path = self._get_cache_path(key)
65
69
  body_path = metadata_path + ".body"
@@ -69,12 +73,13 @@ class SafeFileCache(SeparateBodyBaseCache):
69
73
  with open(metadata_path, "rb") as f:
70
74
  return f.read()
71
75
 
72
- def _write(self, path: str, data: bytes) -> None:
76
+ def _write_to_file(self, path: str, writer_func: Callable[[BinaryIO], Any]) -> None:
77
+ """Common file writing logic with proper permissions and atomic replacement."""
73
78
  with suppressed_cache_errors():
74
79
  ensure_dir(os.path.dirname(path))
75
80
 
76
81
  with adjacent_tmp_file(path) as f:
77
- f.write(data)
82
+ writer_func(f)
78
83
  # Inherit the read/write permissions of the cache directory
79
84
  # to enable multi-user cache use-cases.
80
85
  mode = (
@@ -90,8 +95,14 @@ class SafeFileCache(SeparateBodyBaseCache):
90
95
 
91
96
  replace(f.name, path)
92
97
 
98
+ def _write(self, path: str, data: bytes) -> None:
99
+ self._write_to_file(path, lambda f: f.write(data))
100
+
101
+ def _write_from_io(self, path: str, source_file: BinaryIO) -> None:
102
+ self._write_to_file(path, lambda f: shutil.copyfileobj(source_file, f))
103
+
93
104
  def set(
94
- self, key: str, value: bytes, expires: Union[int, datetime, None] = None
105
+ self, key: str, value: bytes, expires: int | datetime | None = None
95
106
  ) -> None:
96
107
  path = self._get_cache_path(key)
97
108
  self._write(path, value)
@@ -103,7 +114,7 @@ class SafeFileCache(SeparateBodyBaseCache):
103
114
  with suppressed_cache_errors():
104
115
  os.remove(path + ".body")
105
116
 
106
- def get_body(self, key: str) -> Optional[BinaryIO]:
117
+ def get_body(self, key: str) -> BinaryIO | None:
107
118
  # The cache entry is only valid if both metadata and body exist.
108
119
  metadata_path = self._get_cache_path(key)
109
120
  body_path = metadata_path + ".body"
@@ -115,3 +126,8 @@ class SafeFileCache(SeparateBodyBaseCache):
115
126
  def set_body(self, key: str, body: bytes) -> None:
116
127
  path = self._get_cache_path(key) + ".body"
117
128
  self._write(path, body)
129
+
130
+ def set_body_from_io(self, key: str, body_file: BinaryIO) -> None:
131
+ """Set the body of the cache entry from a file object."""
132
+ path = self._get_cache_path(key) + ".body"
133
+ self._write_from_io(path, body_file)
@@ -1,35 +1,42 @@
1
1
  """Download files with progress indicators."""
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  import email.message
4
6
  import logging
5
7
  import mimetypes
6
8
  import os
9
+ from collections.abc import Iterable, Mapping
10
+ from dataclasses import dataclass
7
11
  from http import HTTPStatus
8
- from typing import BinaryIO, Iterable, Optional, Tuple
12
+ from typing import BinaryIO
9
13
 
14
+ from pip._vendor.requests import PreparedRequest
10
15
  from pip._vendor.requests.models import Response
16
+ from pip._vendor.urllib3 import HTTPResponse as URLlib3Response
17
+ from pip._vendor.urllib3._collections import HTTPHeaderDict
11
18
  from pip._vendor.urllib3.exceptions import ReadTimeoutError
12
19
 
13
- from pip._internal.cli.progress_bars import get_download_progress_renderer
20
+ from pip._internal.cli.progress_bars import BarType, get_download_progress_renderer
14
21
  from pip._internal.exceptions import IncompleteDownloadError, NetworkConnectionError
15
22
  from pip._internal.models.index import PyPI
16
23
  from pip._internal.models.link import Link
17
- from pip._internal.network.cache import is_from_cache
18
- from pip._internal.network.session import PipSession
24
+ from pip._internal.network.cache import SafeFileCache, is_from_cache
25
+ from pip._internal.network.session import CacheControlAdapter, PipSession
19
26
  from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
20
27
  from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext
21
28
 
22
29
  logger = logging.getLogger(__name__)
23
30
 
24
31
 
25
- def _get_http_response_size(resp: Response) -> Optional[int]:
32
+ def _get_http_response_size(resp: Response) -> int | None:
26
33
  try:
27
34
  return int(resp.headers["content-length"])
28
35
  except (ValueError, KeyError, TypeError):
29
36
  return None
30
37
 
31
38
 
32
- def _get_http_response_etag_or_last_modified(resp: Response) -> Optional[str]:
39
+ def _get_http_response_etag_or_last_modified(resp: Response) -> str | None:
33
40
  """
34
41
  Return either the ETag or Last-Modified header (or None if neither exists).
35
42
  The return value can be used in an If-Range header.
@@ -37,12 +44,12 @@ def _get_http_response_etag_or_last_modified(resp: Response) -> Optional[str]:
37
44
  return resp.headers.get("etag", resp.headers.get("last-modified"))
38
45
 
39
46
 
40
- def _prepare_download(
47
+ def _log_download(
41
48
  resp: Response,
42
49
  link: Link,
43
- progress_bar: str,
44
- total_length: Optional[int],
45
- range_start: Optional[int] = 0,
50
+ progress_bar: BarType,
51
+ total_length: int | None,
52
+ range_start: int | None = 0,
46
53
  ) -> Iterable[bytes]:
47
54
  if link.netloc == PyPI.file_storage_domain:
48
55
  url = link.show_url
@@ -119,7 +126,7 @@ def _get_http_response_filename(resp: Response, link: Link) -> str:
119
126
  content_disposition = resp.headers.get("content-disposition")
120
127
  if content_disposition:
121
128
  filename = parse_content_disposition(content_disposition, filename)
122
- ext: Optional[str] = splitext(filename)[1]
129
+ ext: str | None = splitext(filename)[1]
123
130
  if not ext:
124
131
  ext = mimetypes.guess_extension(resp.headers.get("content-type", ""))
125
132
  if ext:
@@ -131,35 +138,35 @@ def _get_http_response_filename(resp: Response, link: Link) -> str:
131
138
  return filename
132
139
 
133
140
 
134
- def _http_get_download(
135
- session: PipSession,
136
- link: Link,
137
- range_start: Optional[int] = 0,
138
- if_range: Optional[str] = None,
139
- ) -> Response:
140
- target_url = link.url.split("#", 1)[0]
141
- headers = HEADERS.copy()
142
- # request a partial download
143
- if range_start:
144
- headers["Range"] = f"bytes={range_start}-"
145
- # make sure the file hasn't changed
146
- if if_range:
147
- headers["If-Range"] = if_range
148
- try:
149
- resp = session.get(target_url, headers=headers, stream=True)
150
- raise_for_status(resp)
151
- except NetworkConnectionError as e:
152
- assert e.response is not None
153
- logger.critical("HTTP error %s while getting %s", e.response.status_code, link)
154
- raise
155
- return resp
141
+ @dataclass
142
+ class _FileDownload:
143
+ """Stores the state of a single link download."""
144
+
145
+ link: Link
146
+ output_file: BinaryIO
147
+ size: int | None
148
+ bytes_received: int = 0
149
+ reattempts: int = 0
150
+
151
+ def is_incomplete(self) -> bool:
152
+ return bool(self.size is not None and self.bytes_received < self.size)
153
+
154
+ def write_chunk(self, data: bytes) -> None:
155
+ self.bytes_received += len(data)
156
+ self.output_file.write(data)
157
+
158
+ def reset_file(self) -> None:
159
+ """Delete any saved data and reset progress to zero."""
160
+ self.output_file.seek(0)
161
+ self.output_file.truncate()
162
+ self.bytes_received = 0
156
163
 
157
164
 
158
165
  class Downloader:
159
166
  def __init__(
160
167
  self,
161
168
  session: PipSession,
162
- progress_bar: str,
169
+ progress_bar: BarType,
163
170
  resume_retries: int,
164
171
  ) -> None:
165
172
  assert (
@@ -169,146 +176,167 @@ class Downloader:
169
176
  self._progress_bar = progress_bar
170
177
  self._resume_retries = resume_retries
171
178
 
172
- def __call__(self, link: Link, location: str) -> Tuple[str, str]:
173
- """Download the file given by link into location."""
174
- resp = _http_get_download(self._session, link)
175
- # NOTE: The original download size needs to be passed down everywhere
176
- # so if the download is resumed (with a HTTP Range request) the progress
177
- # bar will report the right size.
178
- total_length = _get_http_response_size(resp)
179
- content_type = resp.headers.get("Content-Type", "")
179
+ def batch(
180
+ self, links: Iterable[Link], location: str
181
+ ) -> Iterable[tuple[Link, tuple[str, str]]]:
182
+ """Convenience method to download multiple links."""
183
+ for link in links:
184
+ filepath, content_type = self(link, location)
185
+ yield link, (filepath, content_type)
180
186
 
181
- filename = _get_http_response_filename(resp, link)
182
- filepath = os.path.join(location, filename)
187
+ def __call__(self, link: Link, location: str) -> tuple[str, str]:
188
+ """Download a link and save it under location."""
189
+ resp = self._http_get(link)
190
+ download_size = _get_http_response_size(resp)
183
191
 
192
+ filepath = os.path.join(location, _get_http_response_filename(resp, link))
184
193
  with open(filepath, "wb") as content_file:
185
- bytes_received = self._process_response(
186
- resp, link, content_file, 0, total_length
187
- )
188
- # If possible, check for an incomplete download and attempt resuming.
189
- if total_length and bytes_received < total_length:
190
- self._attempt_resume(
191
- resp, link, content_file, total_length, bytes_received
192
- )
194
+ download = _FileDownload(link, content_file, download_size)
195
+ self._process_response(download, resp)
196
+ if download.is_incomplete():
197
+ self._attempt_resumes_or_redownloads(download, resp)
193
198
 
199
+ content_type = resp.headers.get("Content-Type", "")
194
200
  return filepath, content_type
195
201
 
196
- def _process_response(
197
- self,
198
- resp: Response,
199
- link: Link,
200
- content_file: BinaryIO,
201
- bytes_received: int,
202
- total_length: Optional[int],
203
- ) -> int:
204
- """Process the response and write the chunks to the file."""
205
- chunks = _prepare_download(
206
- resp, link, self._progress_bar, total_length, range_start=bytes_received
207
- )
208
- return self._write_chunks_to_file(
209
- chunks, content_file, allow_partial=bool(total_length)
202
+ def _process_response(self, download: _FileDownload, resp: Response) -> None:
203
+ """Download and save chunks from a response."""
204
+ chunks = _log_download(
205
+ resp,
206
+ download.link,
207
+ self._progress_bar,
208
+ download.size,
209
+ range_start=download.bytes_received,
210
210
  )
211
-
212
- def _write_chunks_to_file(
213
- self, chunks: Iterable[bytes], content_file: BinaryIO, *, allow_partial: bool
214
- ) -> int:
215
- """Write the chunks to the file and return the number of bytes received."""
216
- bytes_received = 0
217
211
  try:
218
212
  for chunk in chunks:
219
- bytes_received += len(chunk)
220
- content_file.write(chunk)
213
+ download.write_chunk(chunk)
221
214
  except ReadTimeoutError as e:
222
- # If partial downloads are OK (the download will be retried), don't bail.
223
- if not allow_partial:
215
+ # If the download size is not known, then give up downloading the file.
216
+ if download.size is None:
224
217
  raise e
225
218
 
226
- # Ensuring bytes_received is returned to attempt resume
227
219
  logger.warning("Connection timed out while downloading.")
228
220
 
229
- return bytes_received
230
-
231
- def _attempt_resume(
232
- self,
233
- resp: Response,
234
- link: Link,
235
- content_file: BinaryIO,
236
- total_length: Optional[int],
237
- bytes_received: int,
221
+ def _attempt_resumes_or_redownloads(
222
+ self, download: _FileDownload, first_resp: Response
238
223
  ) -> None:
239
- """Attempt to resume the download if connection was dropped."""
240
- etag_or_last_modified = _get_http_response_etag_or_last_modified(resp)
241
-
242
- attempts_left = self._resume_retries
243
- while total_length and attempts_left and bytes_received < total_length:
244
- attempts_left -= 1
224
+ """Attempt to resume/restart the download if connection was dropped."""
245
225
 
226
+ while download.reattempts < self._resume_retries and download.is_incomplete():
227
+ assert download.size is not None
228
+ download.reattempts += 1
246
229
  logger.warning(
247
230
  "Attempting to resume incomplete download (%s/%s, attempt %d)",
248
- format_size(bytes_received),
249
- format_size(total_length),
250
- (self._resume_retries - attempts_left),
231
+ format_size(download.bytes_received),
232
+ format_size(download.size),
233
+ download.reattempts,
251
234
  )
252
235
 
253
236
  try:
254
- # Try to resume the download using a HTTP range request.
255
- resume_resp = _http_get_download(
256
- self._session,
257
- link,
258
- range_start=bytes_received,
259
- if_range=etag_or_last_modified,
260
- )
261
-
237
+ resume_resp = self._http_get_resume(download, should_match=first_resp)
262
238
  # Fallback: if the server responded with 200 (i.e., the file has
263
239
  # since been modified or range requests are unsupported) or any
264
240
  # other unexpected status, restart the download from the beginning.
265
241
  must_restart = resume_resp.status_code != HTTPStatus.PARTIAL_CONTENT
266
242
  if must_restart:
267
- bytes_received, total_length, etag_or_last_modified = (
268
- self._reset_download_state(resume_resp, content_file)
269
- )
243
+ download.reset_file()
244
+ download.size = _get_http_response_size(resume_resp)
245
+ first_resp = resume_resp
270
246
 
271
- bytes_received += self._process_response(
272
- resume_resp, link, content_file, bytes_received, total_length
273
- )
247
+ self._process_response(download, resume_resp)
274
248
  except (ConnectionError, ReadTimeoutError, OSError):
275
249
  continue
276
250
 
277
251
  # No more resume attempts. Raise an error if the download is still incomplete.
278
- if total_length and bytes_received < total_length:
279
- os.remove(content_file.name)
280
- raise IncompleteDownloadError(
281
- link, bytes_received, total_length, retries=self._resume_retries
282
- )
252
+ if download.is_incomplete():
253
+ os.remove(download.output_file.name)
254
+ raise IncompleteDownloadError(download)
283
255
 
284
- def _reset_download_state(
285
- self,
286
- resp: Response,
287
- content_file: BinaryIO,
288
- ) -> Tuple[int, Optional[int], Optional[str]]:
289
- """Reset the download state to restart downloading from the beginning."""
290
- content_file.seek(0)
291
- content_file.truncate()
292
- bytes_received = 0
293
- total_length = _get_http_response_size(resp)
294
- etag_or_last_modified = _get_http_response_etag_or_last_modified(resp)
256
+ # If we successfully completed the download via resume, manually cache it
257
+ # as a complete response to enable future caching
258
+ if download.reattempts > 0:
259
+ self._cache_resumed_download(download, first_resp)
295
260
 
296
- return bytes_received, total_length, etag_or_last_modified
261
+ def _cache_resumed_download(
262
+ self, download: _FileDownload, original_response: Response
263
+ ) -> None:
264
+ """
265
+ Manually cache a file that was successfully downloaded via resume retries.
266
+
267
+ cachecontrol doesn't cache 206 (Partial Content) responses, since they
268
+ are not complete files. This method manually adds the final file to the
269
+ cache as though it was downloaded in a single request, so that future
270
+ requests can use the cache.
271
+ """
272
+ url = download.link.url_without_fragment
273
+ adapter = self._session.get_adapter(url)
274
+
275
+ # Check if the adapter is the CacheControlAdapter (i.e. caching is enabled)
276
+ if not isinstance(adapter, CacheControlAdapter):
277
+ logger.debug(
278
+ "Skipping resume download caching: no cache controller for %s", url
279
+ )
280
+ return
281
+
282
+ # Check SafeFileCache is being used
283
+ assert isinstance(
284
+ adapter.cache, SafeFileCache
285
+ ), "separate body cache not in use!"
286
+
287
+ synthetic_request = PreparedRequest()
288
+ synthetic_request.prepare(method="GET", url=url, headers={})
289
+
290
+ synthetic_response_headers = HTTPHeaderDict()
291
+ for key, value in original_response.headers.items():
292
+ if key.lower() not in ["content-range", "content-length"]:
293
+ synthetic_response_headers[key] = value
294
+ synthetic_response_headers["content-length"] = str(download.size)
295
+
296
+ synthetic_response = URLlib3Response(
297
+ body="",
298
+ headers=synthetic_response_headers,
299
+ status=200,
300
+ preload_content=False,
301
+ )
297
302
 
303
+ # Save metadata and then stream the file contents to cache.
304
+ cache_url = adapter.controller.cache_url(url)
305
+ metadata_blob = adapter.controller.serializer.dumps(
306
+ synthetic_request, synthetic_response, b""
307
+ )
308
+ adapter.cache.set(cache_url, metadata_blob)
309
+ download.output_file.flush()
310
+ with open(download.output_file.name, "rb") as f:
311
+ adapter.cache.set_body_from_io(cache_url, f)
298
312
 
299
- class BatchDownloader:
300
- def __init__(
301
- self,
302
- session: PipSession,
303
- progress_bar: str,
304
- resume_retries: int,
305
- ) -> None:
306
- self._downloader = Downloader(session, progress_bar, resume_retries)
313
+ logger.debug(
314
+ "Cached resumed download as complete response for future use: %s", url
315
+ )
307
316
 
308
- def __call__(
309
- self, links: Iterable[Link], location: str
310
- ) -> Iterable[Tuple[Link, Tuple[str, str]]]:
311
- """Download the files given by links into location."""
312
- for link in links:
313
- filepath, content_type = self._downloader(link, location)
314
- yield link, (filepath, content_type)
317
+ def _http_get_resume(
318
+ self, download: _FileDownload, should_match: Response
319
+ ) -> Response:
320
+ """Issue a HTTP range request to resume the download."""
321
+ # To better understand the download resumption logic, see the mdn web docs:
322
+ # https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/Range_requests
323
+ headers = HEADERS.copy()
324
+ headers["Range"] = f"bytes={download.bytes_received}-"
325
+ # If possible, use a conditional range request to avoid corrupted
326
+ # downloads caused by the remote file changing in-between.
327
+ if identifier := _get_http_response_etag_or_last_modified(should_match):
328
+ headers["If-Range"] = identifier
329
+ return self._http_get(download.link, headers)
330
+
331
+ def _http_get(self, link: Link, headers: Mapping[str, str] = HEADERS) -> Response:
332
+ target_url = link.url_without_fragment
333
+ try:
334
+ resp = self._session.get(target_url, headers=headers, stream=True)
335
+ raise_for_status(resp)
336
+ except NetworkConnectionError as e:
337
+ assert e.response is not None
338
+ logger.critical(
339
+ "HTTP error %s while getting %s", e.response.status_code, link
340
+ )
341
+ raise
342
+ return resp