pip 25.1.1__py3-none-any.whl → 25.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pip/__init__.py +3 -3
- pip/_internal/__init__.py +2 -2
- pip/_internal/build_env.py +186 -94
- pip/_internal/cache.py +17 -15
- pip/_internal/cli/autocompletion.py +13 -4
- pip/_internal/cli/base_command.py +18 -7
- pip/_internal/cli/cmdoptions.py +57 -80
- pip/_internal/cli/command_context.py +4 -3
- pip/_internal/cli/index_command.py +11 -9
- pip/_internal/cli/main.py +3 -2
- pip/_internal/cli/main_parser.py +4 -3
- pip/_internal/cli/parser.py +24 -20
- pip/_internal/cli/progress_bars.py +19 -12
- pip/_internal/cli/req_command.py +57 -33
- pip/_internal/cli/spinners.py +81 -5
- pip/_internal/commands/__init__.py +5 -3
- pip/_internal/commands/cache.py +18 -15
- pip/_internal/commands/check.py +1 -2
- pip/_internal/commands/completion.py +1 -2
- pip/_internal/commands/configuration.py +26 -18
- pip/_internal/commands/debug.py +8 -6
- pip/_internal/commands/download.py +6 -10
- pip/_internal/commands/freeze.py +2 -3
- pip/_internal/commands/hash.py +1 -2
- pip/_internal/commands/help.py +1 -2
- pip/_internal/commands/index.py +15 -9
- pip/_internal/commands/inspect.py +4 -4
- pip/_internal/commands/install.py +63 -53
- pip/_internal/commands/list.py +35 -26
- pip/_internal/commands/lock.py +4 -8
- pip/_internal/commands/search.py +14 -12
- pip/_internal/commands/show.py +14 -11
- pip/_internal/commands/uninstall.py +1 -2
- pip/_internal/commands/wheel.py +7 -13
- pip/_internal/configuration.py +40 -27
- pip/_internal/distributions/base.py +6 -4
- pip/_internal/distributions/installed.py +8 -4
- pip/_internal/distributions/sdist.py +33 -27
- pip/_internal/distributions/wheel.py +6 -4
- pip/_internal/exceptions.py +78 -42
- pip/_internal/index/collector.py +24 -29
- pip/_internal/index/package_finder.py +73 -64
- pip/_internal/index/sources.py +17 -14
- pip/_internal/locations/__init__.py +18 -16
- pip/_internal/locations/_distutils.py +12 -11
- pip/_internal/locations/_sysconfig.py +5 -4
- pip/_internal/locations/base.py +4 -3
- pip/_internal/main.py +2 -2
- pip/_internal/metadata/__init__.py +14 -7
- pip/_internal/metadata/_json.py +5 -4
- pip/_internal/metadata/base.py +22 -27
- pip/_internal/metadata/importlib/_compat.py +6 -4
- pip/_internal/metadata/importlib/_dists.py +20 -19
- pip/_internal/metadata/importlib/_envs.py +9 -6
- pip/_internal/metadata/pkg_resources.py +11 -14
- pip/_internal/models/direct_url.py +24 -21
- pip/_internal/models/format_control.py +5 -5
- pip/_internal/models/installation_report.py +4 -3
- pip/_internal/models/link.py +39 -34
- pip/_internal/models/pylock.py +27 -22
- pip/_internal/models/search_scope.py +6 -7
- pip/_internal/models/selection_prefs.py +3 -3
- pip/_internal/models/target_python.py +10 -9
- pip/_internal/models/wheel.py +12 -71
- pip/_internal/network/auth.py +20 -22
- pip/_internal/network/cache.py +28 -17
- pip/_internal/network/download.py +169 -141
- pip/_internal/network/lazy_wheel.py +15 -10
- pip/_internal/network/session.py +32 -27
- pip/_internal/network/utils.py +2 -2
- pip/_internal/network/xmlrpc.py +2 -2
- pip/_internal/operations/build/build_tracker.py +10 -8
- pip/_internal/operations/build/wheel.py +7 -6
- pip/_internal/operations/build/wheel_editable.py +7 -6
- pip/_internal/operations/check.py +21 -26
- pip/_internal/operations/freeze.py +12 -9
- pip/_internal/operations/install/wheel.py +49 -41
- pip/_internal/operations/prepare.py +42 -31
- pip/_internal/pyproject.py +7 -69
- pip/_internal/req/__init__.py +12 -12
- pip/_internal/req/constructors.py +68 -62
- pip/_internal/req/req_dependency_group.py +7 -11
- pip/_internal/req/req_file.py +32 -36
- pip/_internal/req/req_install.py +64 -170
- pip/_internal/req/req_set.py +4 -5
- pip/_internal/req/req_uninstall.py +20 -17
- pip/_internal/resolution/base.py +3 -3
- pip/_internal/resolution/legacy/resolver.py +21 -20
- pip/_internal/resolution/resolvelib/base.py +16 -13
- pip/_internal/resolution/resolvelib/candidates.py +49 -37
- pip/_internal/resolution/resolvelib/factory.py +72 -50
- pip/_internal/resolution/resolvelib/found_candidates.py +11 -9
- pip/_internal/resolution/resolvelib/provider.py +24 -20
- pip/_internal/resolution/resolvelib/reporter.py +26 -11
- pip/_internal/resolution/resolvelib/requirements.py +8 -6
- pip/_internal/resolution/resolvelib/resolver.py +41 -29
- pip/_internal/self_outdated_check.py +19 -9
- pip/_internal/utils/appdirs.py +1 -2
- pip/_internal/utils/compat.py +7 -1
- pip/_internal/utils/compatibility_tags.py +17 -16
- pip/_internal/utils/deprecation.py +11 -9
- pip/_internal/utils/direct_url_helpers.py +2 -2
- pip/_internal/utils/egg_link.py +6 -5
- pip/_internal/utils/entrypoints.py +3 -2
- pip/_internal/utils/filesystem.py +20 -5
- pip/_internal/utils/filetypes.py +4 -6
- pip/_internal/utils/glibc.py +6 -5
- pip/_internal/utils/hashes.py +9 -6
- pip/_internal/utils/logging.py +8 -5
- pip/_internal/utils/misc.py +37 -45
- pip/_internal/utils/packaging.py +3 -2
- pip/_internal/utils/retry.py +7 -4
- pip/_internal/utils/subprocess.py +20 -17
- pip/_internal/utils/temp_dir.py +10 -12
- pip/_internal/utils/unpacking.py +31 -4
- pip/_internal/utils/urls.py +1 -1
- pip/_internal/utils/virtualenv.py +3 -2
- pip/_internal/utils/wheel.py +3 -4
- pip/_internal/vcs/bazaar.py +26 -8
- pip/_internal/vcs/git.py +59 -24
- pip/_internal/vcs/mercurial.py +34 -11
- pip/_internal/vcs/subversion.py +27 -16
- pip/_internal/vcs/versioncontrol.py +56 -51
- pip/_internal/wheel_builder.py +30 -101
- pip/_vendor/README.rst +180 -0
- pip/_vendor/cachecontrol/LICENSE.txt +13 -0
- pip/_vendor/cachecontrol/__init__.py +1 -1
- pip/_vendor/certifi/LICENSE +20 -0
- pip/_vendor/certifi/__init__.py +1 -1
- pip/_vendor/certifi/cacert.pem +164 -261
- pip/_vendor/certifi/core.py +1 -32
- pip/_vendor/dependency_groups/LICENSE.txt +9 -0
- pip/_vendor/distlib/LICENSE.txt +284 -0
- pip/_vendor/distlib/__init__.py +2 -2
- pip/_vendor/distlib/scripts.py +1 -1
- pip/_vendor/distro/LICENSE +202 -0
- pip/_vendor/idna/LICENSE.md +31 -0
- pip/_vendor/msgpack/COPYING +14 -0
- pip/_vendor/msgpack/__init__.py +2 -2
- pip/_vendor/packaging/LICENSE +3 -0
- pip/_vendor/packaging/LICENSE.APACHE +177 -0
- pip/_vendor/packaging/LICENSE.BSD +23 -0
- pip/_vendor/pkg_resources/LICENSE +17 -0
- pip/_vendor/pkg_resources/__init__.py +1 -1
- pip/_vendor/platformdirs/LICENSE +21 -0
- pip/_vendor/platformdirs/api.py +1 -1
- pip/_vendor/platformdirs/macos.py +10 -8
- pip/_vendor/platformdirs/version.py +16 -3
- pip/_vendor/pygments/LICENSE +25 -0
- pip/_vendor/pygments/__init__.py +1 -1
- pip/_vendor/pyproject_hooks/LICENSE +21 -0
- pip/_vendor/requests/LICENSE +175 -0
- pip/_vendor/requests/__version__.py +2 -2
- pip/_vendor/requests/adapters.py +17 -40
- pip/_vendor/requests/compat.py +12 -0
- pip/_vendor/requests/models.py +3 -1
- pip/_vendor/requests/sessions.py +1 -1
- pip/_vendor/requests/utils.py +6 -16
- pip/_vendor/resolvelib/LICENSE +13 -0
- pip/_vendor/resolvelib/__init__.py +3 -3
- pip/_vendor/resolvelib/reporters.py +1 -1
- pip/_vendor/resolvelib/resolvers/__init__.py +4 -4
- pip/_vendor/resolvelib/resolvers/abstract.py +3 -3
- pip/_vendor/resolvelib/resolvers/resolution.py +96 -10
- pip/_vendor/rich/LICENSE +19 -0
- pip/_vendor/rich/__main__.py +12 -40
- pip/_vendor/rich/_inspect.py +1 -1
- pip/_vendor/rich/_ratio.py +1 -7
- pip/_vendor/rich/align.py +1 -7
- pip/_vendor/rich/box.py +1 -7
- pip/_vendor/rich/console.py +25 -20
- pip/_vendor/rich/control.py +1 -7
- pip/_vendor/rich/diagnose.py +1 -0
- pip/_vendor/rich/emoji.py +1 -6
- pip/_vendor/rich/live.py +32 -7
- pip/_vendor/rich/live_render.py +1 -7
- pip/_vendor/rich/logging.py +1 -1
- pip/_vendor/rich/panel.py +3 -4
- pip/_vendor/rich/progress.py +15 -15
- pip/_vendor/rich/spinner.py +7 -13
- pip/_vendor/rich/style.py +7 -11
- pip/_vendor/rich/syntax.py +24 -5
- pip/_vendor/rich/traceback.py +32 -17
- pip/_vendor/tomli/LICENSE +21 -0
- pip/_vendor/tomli/__init__.py +1 -1
- pip/_vendor/tomli/_parser.py +28 -21
- pip/_vendor/tomli/_re.py +8 -5
- pip/_vendor/tomli_w/LICENSE +21 -0
- pip/_vendor/truststore/LICENSE +21 -0
- pip/_vendor/truststore/__init__.py +1 -1
- pip/_vendor/truststore/_api.py +15 -7
- pip/_vendor/truststore/_openssl.py +3 -1
- pip/_vendor/urllib3/LICENSE.txt +21 -0
- pip/_vendor/vendor.txt +11 -12
- {pip-25.1.1.dist-info → pip-25.3.dist-info}/METADATA +32 -11
- {pip-25.1.1.dist-info → pip-25.3.dist-info}/RECORD +221 -192
- {pip-25.1.1.dist-info → pip-25.3.dist-info}/WHEEL +1 -2
- pip-25.3.dist-info/entry_points.txt +4 -0
- {pip-25.1.1.dist-info → pip-25.3.dist-info}/licenses/AUTHORS.txt +21 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/cachecontrol/LICENSE.txt +13 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/certifi/LICENSE +20 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/dependency_groups/LICENSE.txt +9 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/distlib/LICENSE.txt +284 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/distro/LICENSE +202 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/idna/LICENSE.md +31 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/msgpack/COPYING +14 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/packaging/LICENSE +3 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/packaging/LICENSE.APACHE +177 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/packaging/LICENSE.BSD +23 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/pkg_resources/LICENSE +17 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/platformdirs/LICENSE +21 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/pygments/LICENSE +25 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/pyproject_hooks/LICENSE +21 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/requests/LICENSE +175 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/resolvelib/LICENSE +13 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/rich/LICENSE +19 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/tomli/LICENSE +21 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/tomli_w/LICENSE +21 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/truststore/LICENSE +21 -0
- pip-25.3.dist-info/licenses/src/pip/_vendor/urllib3/LICENSE.txt +21 -0
- pip/_internal/operations/build/metadata_legacy.py +0 -73
- pip/_internal/operations/build/wheel_legacy.py +0 -118
- pip/_internal/operations/install/editable_legacy.py +0 -46
- pip/_internal/utils/setuptools_build.py +0 -147
- pip/_vendor/distlib/database.py +0 -1329
- pip/_vendor/distlib/index.py +0 -508
- pip/_vendor/distlib/locators.py +0 -1295
- pip/_vendor/distlib/manifest.py +0 -384
- pip/_vendor/distlib/markers.py +0 -162
- pip/_vendor/distlib/metadata.py +0 -1031
- pip/_vendor/distlib/version.py +0 -750
- pip/_vendor/distlib/wheel.py +0 -1100
- pip/_vendor/typing_extensions.py +0 -4584
- pip-25.1.1.dist-info/entry_points.txt +0 -3
- pip-25.1.1.dist-info/top_level.txt +0 -1
- {pip-25.1.1.dist-info → pip-25.3.dist-info}/licenses/LICENSE.txt +0 -0
|
@@ -1,35 +1,42 @@
|
|
|
1
1
|
"""Download files with progress indicators."""
|
|
2
2
|
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
3
5
|
import email.message
|
|
4
6
|
import logging
|
|
5
7
|
import mimetypes
|
|
6
8
|
import os
|
|
9
|
+
from collections.abc import Iterable, Mapping
|
|
10
|
+
from dataclasses import dataclass
|
|
7
11
|
from http import HTTPStatus
|
|
8
|
-
from typing import BinaryIO
|
|
12
|
+
from typing import BinaryIO
|
|
9
13
|
|
|
14
|
+
from pip._vendor.requests import PreparedRequest
|
|
10
15
|
from pip._vendor.requests.models import Response
|
|
16
|
+
from pip._vendor.urllib3 import HTTPResponse as URLlib3Response
|
|
17
|
+
from pip._vendor.urllib3._collections import HTTPHeaderDict
|
|
11
18
|
from pip._vendor.urllib3.exceptions import ReadTimeoutError
|
|
12
19
|
|
|
13
|
-
from pip._internal.cli.progress_bars import get_download_progress_renderer
|
|
20
|
+
from pip._internal.cli.progress_bars import BarType, get_download_progress_renderer
|
|
14
21
|
from pip._internal.exceptions import IncompleteDownloadError, NetworkConnectionError
|
|
15
22
|
from pip._internal.models.index import PyPI
|
|
16
23
|
from pip._internal.models.link import Link
|
|
17
|
-
from pip._internal.network.cache import is_from_cache
|
|
18
|
-
from pip._internal.network.session import PipSession
|
|
24
|
+
from pip._internal.network.cache import SafeFileCache, is_from_cache
|
|
25
|
+
from pip._internal.network.session import CacheControlAdapter, PipSession
|
|
19
26
|
from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
|
|
20
27
|
from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext
|
|
21
28
|
|
|
22
29
|
logger = logging.getLogger(__name__)
|
|
23
30
|
|
|
24
31
|
|
|
25
|
-
def _get_http_response_size(resp: Response) ->
|
|
32
|
+
def _get_http_response_size(resp: Response) -> int | None:
|
|
26
33
|
try:
|
|
27
34
|
return int(resp.headers["content-length"])
|
|
28
35
|
except (ValueError, KeyError, TypeError):
|
|
29
36
|
return None
|
|
30
37
|
|
|
31
38
|
|
|
32
|
-
def _get_http_response_etag_or_last_modified(resp: Response) ->
|
|
39
|
+
def _get_http_response_etag_or_last_modified(resp: Response) -> str | None:
|
|
33
40
|
"""
|
|
34
41
|
Return either the ETag or Last-Modified header (or None if neither exists).
|
|
35
42
|
The return value can be used in an If-Range header.
|
|
@@ -37,12 +44,12 @@ def _get_http_response_etag_or_last_modified(resp: Response) -> Optional[str]:
|
|
|
37
44
|
return resp.headers.get("etag", resp.headers.get("last-modified"))
|
|
38
45
|
|
|
39
46
|
|
|
40
|
-
def
|
|
47
|
+
def _log_download(
|
|
41
48
|
resp: Response,
|
|
42
49
|
link: Link,
|
|
43
|
-
progress_bar:
|
|
44
|
-
total_length:
|
|
45
|
-
range_start:
|
|
50
|
+
progress_bar: BarType,
|
|
51
|
+
total_length: int | None,
|
|
52
|
+
range_start: int | None = 0,
|
|
46
53
|
) -> Iterable[bytes]:
|
|
47
54
|
if link.netloc == PyPI.file_storage_domain:
|
|
48
55
|
url = link.show_url
|
|
@@ -119,7 +126,7 @@ def _get_http_response_filename(resp: Response, link: Link) -> str:
|
|
|
119
126
|
content_disposition = resp.headers.get("content-disposition")
|
|
120
127
|
if content_disposition:
|
|
121
128
|
filename = parse_content_disposition(content_disposition, filename)
|
|
122
|
-
ext:
|
|
129
|
+
ext: str | None = splitext(filename)[1]
|
|
123
130
|
if not ext:
|
|
124
131
|
ext = mimetypes.guess_extension(resp.headers.get("content-type", ""))
|
|
125
132
|
if ext:
|
|
@@ -131,35 +138,35 @@ def _get_http_response_filename(resp: Response, link: Link) -> str:
|
|
|
131
138
|
return filename
|
|
132
139
|
|
|
133
140
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
link
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
141
|
+
@dataclass
|
|
142
|
+
class _FileDownload:
|
|
143
|
+
"""Stores the state of a single link download."""
|
|
144
|
+
|
|
145
|
+
link: Link
|
|
146
|
+
output_file: BinaryIO
|
|
147
|
+
size: int | None
|
|
148
|
+
bytes_received: int = 0
|
|
149
|
+
reattempts: int = 0
|
|
150
|
+
|
|
151
|
+
def is_incomplete(self) -> bool:
|
|
152
|
+
return bool(self.size is not None and self.bytes_received < self.size)
|
|
153
|
+
|
|
154
|
+
def write_chunk(self, data: bytes) -> None:
|
|
155
|
+
self.bytes_received += len(data)
|
|
156
|
+
self.output_file.write(data)
|
|
157
|
+
|
|
158
|
+
def reset_file(self) -> None:
|
|
159
|
+
"""Delete any saved data and reset progress to zero."""
|
|
160
|
+
self.output_file.seek(0)
|
|
161
|
+
self.output_file.truncate()
|
|
162
|
+
self.bytes_received = 0
|
|
156
163
|
|
|
157
164
|
|
|
158
165
|
class Downloader:
|
|
159
166
|
def __init__(
|
|
160
167
|
self,
|
|
161
168
|
session: PipSession,
|
|
162
|
-
progress_bar:
|
|
169
|
+
progress_bar: BarType,
|
|
163
170
|
resume_retries: int,
|
|
164
171
|
) -> None:
|
|
165
172
|
assert (
|
|
@@ -169,146 +176,167 @@ class Downloader:
|
|
|
169
176
|
self._progress_bar = progress_bar
|
|
170
177
|
self._resume_retries = resume_retries
|
|
171
178
|
|
|
172
|
-
def
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
content_type = resp.headers.get("Content-Type", "")
|
|
179
|
+
def batch(
|
|
180
|
+
self, links: Iterable[Link], location: str
|
|
181
|
+
) -> Iterable[tuple[Link, tuple[str, str]]]:
|
|
182
|
+
"""Convenience method to download multiple links."""
|
|
183
|
+
for link in links:
|
|
184
|
+
filepath, content_type = self(link, location)
|
|
185
|
+
yield link, (filepath, content_type)
|
|
180
186
|
|
|
181
|
-
|
|
182
|
-
|
|
187
|
+
def __call__(self, link: Link, location: str) -> tuple[str, str]:
|
|
188
|
+
"""Download a link and save it under location."""
|
|
189
|
+
resp = self._http_get(link)
|
|
190
|
+
download_size = _get_http_response_size(resp)
|
|
183
191
|
|
|
192
|
+
filepath = os.path.join(location, _get_http_response_filename(resp, link))
|
|
184
193
|
with open(filepath, "wb") as content_file:
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
)
|
|
188
|
-
|
|
189
|
-
if total_length and bytes_received < total_length:
|
|
190
|
-
self._attempt_resume(
|
|
191
|
-
resp, link, content_file, total_length, bytes_received
|
|
192
|
-
)
|
|
194
|
+
download = _FileDownload(link, content_file, download_size)
|
|
195
|
+
self._process_response(download, resp)
|
|
196
|
+
if download.is_incomplete():
|
|
197
|
+
self._attempt_resumes_or_redownloads(download, resp)
|
|
193
198
|
|
|
199
|
+
content_type = resp.headers.get("Content-Type", "")
|
|
194
200
|
return filepath, content_type
|
|
195
201
|
|
|
196
|
-
def _process_response(
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
"""Process the response and write the chunks to the file."""
|
|
205
|
-
chunks = _prepare_download(
|
|
206
|
-
resp, link, self._progress_bar, total_length, range_start=bytes_received
|
|
207
|
-
)
|
|
208
|
-
return self._write_chunks_to_file(
|
|
209
|
-
chunks, content_file, allow_partial=bool(total_length)
|
|
202
|
+
def _process_response(self, download: _FileDownload, resp: Response) -> None:
|
|
203
|
+
"""Download and save chunks from a response."""
|
|
204
|
+
chunks = _log_download(
|
|
205
|
+
resp,
|
|
206
|
+
download.link,
|
|
207
|
+
self._progress_bar,
|
|
208
|
+
download.size,
|
|
209
|
+
range_start=download.bytes_received,
|
|
210
210
|
)
|
|
211
|
-
|
|
212
|
-
def _write_chunks_to_file(
|
|
213
|
-
self, chunks: Iterable[bytes], content_file: BinaryIO, *, allow_partial: bool
|
|
214
|
-
) -> int:
|
|
215
|
-
"""Write the chunks to the file and return the number of bytes received."""
|
|
216
|
-
bytes_received = 0
|
|
217
211
|
try:
|
|
218
212
|
for chunk in chunks:
|
|
219
|
-
|
|
220
|
-
content_file.write(chunk)
|
|
213
|
+
download.write_chunk(chunk)
|
|
221
214
|
except ReadTimeoutError as e:
|
|
222
|
-
# If
|
|
223
|
-
if
|
|
215
|
+
# If the download size is not known, then give up downloading the file.
|
|
216
|
+
if download.size is None:
|
|
224
217
|
raise e
|
|
225
218
|
|
|
226
|
-
# Ensuring bytes_received is returned to attempt resume
|
|
227
219
|
logger.warning("Connection timed out while downloading.")
|
|
228
220
|
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
def _attempt_resume(
|
|
232
|
-
self,
|
|
233
|
-
resp: Response,
|
|
234
|
-
link: Link,
|
|
235
|
-
content_file: BinaryIO,
|
|
236
|
-
total_length: Optional[int],
|
|
237
|
-
bytes_received: int,
|
|
221
|
+
def _attempt_resumes_or_redownloads(
|
|
222
|
+
self, download: _FileDownload, first_resp: Response
|
|
238
223
|
) -> None:
|
|
239
|
-
"""Attempt to resume the download if connection was dropped."""
|
|
240
|
-
etag_or_last_modified = _get_http_response_etag_or_last_modified(resp)
|
|
241
|
-
|
|
242
|
-
attempts_left = self._resume_retries
|
|
243
|
-
while total_length and attempts_left and bytes_received < total_length:
|
|
244
|
-
attempts_left -= 1
|
|
224
|
+
"""Attempt to resume/restart the download if connection was dropped."""
|
|
245
225
|
|
|
226
|
+
while download.reattempts < self._resume_retries and download.is_incomplete():
|
|
227
|
+
assert download.size is not None
|
|
228
|
+
download.reattempts += 1
|
|
246
229
|
logger.warning(
|
|
247
230
|
"Attempting to resume incomplete download (%s/%s, attempt %d)",
|
|
248
|
-
format_size(bytes_received),
|
|
249
|
-
format_size(
|
|
250
|
-
|
|
231
|
+
format_size(download.bytes_received),
|
|
232
|
+
format_size(download.size),
|
|
233
|
+
download.reattempts,
|
|
251
234
|
)
|
|
252
235
|
|
|
253
236
|
try:
|
|
254
|
-
|
|
255
|
-
resume_resp = _http_get_download(
|
|
256
|
-
self._session,
|
|
257
|
-
link,
|
|
258
|
-
range_start=bytes_received,
|
|
259
|
-
if_range=etag_or_last_modified,
|
|
260
|
-
)
|
|
261
|
-
|
|
237
|
+
resume_resp = self._http_get_resume(download, should_match=first_resp)
|
|
262
238
|
# Fallback: if the server responded with 200 (i.e., the file has
|
|
263
239
|
# since been modified or range requests are unsupported) or any
|
|
264
240
|
# other unexpected status, restart the download from the beginning.
|
|
265
241
|
must_restart = resume_resp.status_code != HTTPStatus.PARTIAL_CONTENT
|
|
266
242
|
if must_restart:
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
243
|
+
download.reset_file()
|
|
244
|
+
download.size = _get_http_response_size(resume_resp)
|
|
245
|
+
first_resp = resume_resp
|
|
270
246
|
|
|
271
|
-
|
|
272
|
-
resume_resp, link, content_file, bytes_received, total_length
|
|
273
|
-
)
|
|
247
|
+
self._process_response(download, resume_resp)
|
|
274
248
|
except (ConnectionError, ReadTimeoutError, OSError):
|
|
275
249
|
continue
|
|
276
250
|
|
|
277
251
|
# No more resume attempts. Raise an error if the download is still incomplete.
|
|
278
|
-
if
|
|
279
|
-
os.remove(
|
|
280
|
-
raise IncompleteDownloadError(
|
|
281
|
-
link, bytes_received, total_length, retries=self._resume_retries
|
|
282
|
-
)
|
|
252
|
+
if download.is_incomplete():
|
|
253
|
+
os.remove(download.output_file.name)
|
|
254
|
+
raise IncompleteDownloadError(download)
|
|
283
255
|
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
) -> Tuple[int, Optional[int], Optional[str]]:
|
|
289
|
-
"""Reset the download state to restart downloading from the beginning."""
|
|
290
|
-
content_file.seek(0)
|
|
291
|
-
content_file.truncate()
|
|
292
|
-
bytes_received = 0
|
|
293
|
-
total_length = _get_http_response_size(resp)
|
|
294
|
-
etag_or_last_modified = _get_http_response_etag_or_last_modified(resp)
|
|
256
|
+
# If we successfully completed the download via resume, manually cache it
|
|
257
|
+
# as a complete response to enable future caching
|
|
258
|
+
if download.reattempts > 0:
|
|
259
|
+
self._cache_resumed_download(download, first_resp)
|
|
295
260
|
|
|
296
|
-
|
|
261
|
+
def _cache_resumed_download(
|
|
262
|
+
self, download: _FileDownload, original_response: Response
|
|
263
|
+
) -> None:
|
|
264
|
+
"""
|
|
265
|
+
Manually cache a file that was successfully downloaded via resume retries.
|
|
266
|
+
|
|
267
|
+
cachecontrol doesn't cache 206 (Partial Content) responses, since they
|
|
268
|
+
are not complete files. This method manually adds the final file to the
|
|
269
|
+
cache as though it was downloaded in a single request, so that future
|
|
270
|
+
requests can use the cache.
|
|
271
|
+
"""
|
|
272
|
+
url = download.link.url_without_fragment
|
|
273
|
+
adapter = self._session.get_adapter(url)
|
|
274
|
+
|
|
275
|
+
# Check if the adapter is the CacheControlAdapter (i.e. caching is enabled)
|
|
276
|
+
if not isinstance(adapter, CacheControlAdapter):
|
|
277
|
+
logger.debug(
|
|
278
|
+
"Skipping resume download caching: no cache controller for %s", url
|
|
279
|
+
)
|
|
280
|
+
return
|
|
281
|
+
|
|
282
|
+
# Check SafeFileCache is being used
|
|
283
|
+
assert isinstance(
|
|
284
|
+
adapter.cache, SafeFileCache
|
|
285
|
+
), "separate body cache not in use!"
|
|
286
|
+
|
|
287
|
+
synthetic_request = PreparedRequest()
|
|
288
|
+
synthetic_request.prepare(method="GET", url=url, headers={})
|
|
289
|
+
|
|
290
|
+
synthetic_response_headers = HTTPHeaderDict()
|
|
291
|
+
for key, value in original_response.headers.items():
|
|
292
|
+
if key.lower() not in ["content-range", "content-length"]:
|
|
293
|
+
synthetic_response_headers[key] = value
|
|
294
|
+
synthetic_response_headers["content-length"] = str(download.size)
|
|
295
|
+
|
|
296
|
+
synthetic_response = URLlib3Response(
|
|
297
|
+
body="",
|
|
298
|
+
headers=synthetic_response_headers,
|
|
299
|
+
status=200,
|
|
300
|
+
preload_content=False,
|
|
301
|
+
)
|
|
297
302
|
|
|
303
|
+
# Save metadata and then stream the file contents to cache.
|
|
304
|
+
cache_url = adapter.controller.cache_url(url)
|
|
305
|
+
metadata_blob = adapter.controller.serializer.dumps(
|
|
306
|
+
synthetic_request, synthetic_response, b""
|
|
307
|
+
)
|
|
308
|
+
adapter.cache.set(cache_url, metadata_blob)
|
|
309
|
+
download.output_file.flush()
|
|
310
|
+
with open(download.output_file.name, "rb") as f:
|
|
311
|
+
adapter.cache.set_body_from_io(cache_url, f)
|
|
298
312
|
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
session: PipSession,
|
|
303
|
-
progress_bar: str,
|
|
304
|
-
resume_retries: int,
|
|
305
|
-
) -> None:
|
|
306
|
-
self._downloader = Downloader(session, progress_bar, resume_retries)
|
|
313
|
+
logger.debug(
|
|
314
|
+
"Cached resumed download as complete response for future use: %s", url
|
|
315
|
+
)
|
|
307
316
|
|
|
308
|
-
def
|
|
309
|
-
self,
|
|
310
|
-
) ->
|
|
311
|
-
"""
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
317
|
+
def _http_get_resume(
|
|
318
|
+
self, download: _FileDownload, should_match: Response
|
|
319
|
+
) -> Response:
|
|
320
|
+
"""Issue a HTTP range request to resume the download."""
|
|
321
|
+
# To better understand the download resumption logic, see the mdn web docs:
|
|
322
|
+
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/Range_requests
|
|
323
|
+
headers = HEADERS.copy()
|
|
324
|
+
headers["Range"] = f"bytes={download.bytes_received}-"
|
|
325
|
+
# If possible, use a conditional range request to avoid corrupted
|
|
326
|
+
# downloads caused by the remote file changing in-between.
|
|
327
|
+
if identifier := _get_http_response_etag_or_last_modified(should_match):
|
|
328
|
+
headers["If-Range"] = identifier
|
|
329
|
+
return self._http_get(download.link, headers)
|
|
330
|
+
|
|
331
|
+
def _http_get(self, link: Link, headers: Mapping[str, str] = HEADERS) -> Response:
|
|
332
|
+
target_url = link.url_without_fragment
|
|
333
|
+
try:
|
|
334
|
+
resp = self._session.get(target_url, headers=headers, stream=True)
|
|
335
|
+
raise_for_status(resp)
|
|
336
|
+
except NetworkConnectionError as e:
|
|
337
|
+
assert e.response is not None
|
|
338
|
+
logger.critical(
|
|
339
|
+
"HTTP error %s while getting %s", e.response.status_code, link
|
|
340
|
+
)
|
|
341
|
+
raise
|
|
342
|
+
return resp
|
|
@@ -1,14 +1,17 @@
|
|
|
1
1
|
"""Lazy ZIP over HTTP"""
|
|
2
2
|
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
3
5
|
__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"]
|
|
4
6
|
|
|
5
7
|
from bisect import bisect_left, bisect_right
|
|
8
|
+
from collections.abc import Generator
|
|
6
9
|
from contextlib import contextmanager
|
|
7
10
|
from tempfile import NamedTemporaryFile
|
|
8
|
-
from typing import Any
|
|
11
|
+
from typing import Any
|
|
9
12
|
from zipfile import BadZipFile, ZipFile
|
|
10
13
|
|
|
11
|
-
from pip._vendor.packaging.utils import
|
|
14
|
+
from pip._vendor.packaging.utils import NormalizedName
|
|
12
15
|
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
|
13
16
|
|
|
14
17
|
from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution
|
|
@@ -20,7 +23,9 @@ class HTTPRangeRequestUnsupported(Exception):
|
|
|
20
23
|
pass
|
|
21
24
|
|
|
22
25
|
|
|
23
|
-
def dist_from_wheel_url(
|
|
26
|
+
def dist_from_wheel_url(
|
|
27
|
+
name: NormalizedName, url: str, session: PipSession
|
|
28
|
+
) -> BaseDistribution:
|
|
24
29
|
"""Return a distribution object from the given wheel URL.
|
|
25
30
|
|
|
26
31
|
This uses HTTP range requests to only fetch the portion of the wheel
|
|
@@ -34,7 +39,7 @@ def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistrib
|
|
|
34
39
|
wheel = MemoryWheel(zf.name, zf) # type: ignore
|
|
35
40
|
# After context manager exit, wheel.name
|
|
36
41
|
# is an invalid file by intention.
|
|
37
|
-
return get_wheel_distribution(wheel,
|
|
42
|
+
return get_wheel_distribution(wheel, name)
|
|
38
43
|
|
|
39
44
|
|
|
40
45
|
class LazyZipOverHTTP:
|
|
@@ -56,8 +61,8 @@ class LazyZipOverHTTP:
|
|
|
56
61
|
self._length = int(head.headers["Content-Length"])
|
|
57
62
|
self._file = NamedTemporaryFile()
|
|
58
63
|
self.truncate(self._length)
|
|
59
|
-
self._left:
|
|
60
|
-
self._right:
|
|
64
|
+
self._left: list[int] = []
|
|
65
|
+
self._right: list[int] = []
|
|
61
66
|
if "bytes" not in head.headers.get("Accept-Ranges", "none"):
|
|
62
67
|
raise HTTPRangeRequestUnsupported("range request is not supported")
|
|
63
68
|
self._check_zip()
|
|
@@ -117,7 +122,7 @@ class LazyZipOverHTTP:
|
|
|
117
122
|
"""Return the current position."""
|
|
118
123
|
return self._file.tell()
|
|
119
124
|
|
|
120
|
-
def truncate(self, size:
|
|
125
|
+
def truncate(self, size: int | None = None) -> int:
|
|
121
126
|
"""Resize the stream to the given size in bytes.
|
|
122
127
|
|
|
123
128
|
If size is unspecified resize to the current position.
|
|
@@ -131,7 +136,7 @@ class LazyZipOverHTTP:
|
|
|
131
136
|
"""Return False."""
|
|
132
137
|
return False
|
|
133
138
|
|
|
134
|
-
def __enter__(self) ->
|
|
139
|
+
def __enter__(self) -> LazyZipOverHTTP:
|
|
135
140
|
self._file.__enter__()
|
|
136
141
|
return self
|
|
137
142
|
|
|
@@ -166,7 +171,7 @@ class LazyZipOverHTTP:
|
|
|
166
171
|
break
|
|
167
172
|
|
|
168
173
|
def _stream_response(
|
|
169
|
-
self, start: int, end: int, base_headers:
|
|
174
|
+
self, start: int, end: int, base_headers: dict[str, str] = HEADERS
|
|
170
175
|
) -> Response:
|
|
171
176
|
"""Return HTTP response to a range request from start to end."""
|
|
172
177
|
headers = base_headers.copy()
|
|
@@ -177,7 +182,7 @@ class LazyZipOverHTTP:
|
|
|
177
182
|
|
|
178
183
|
def _merge(
|
|
179
184
|
self, start: int, end: int, left: int, right: int
|
|
180
|
-
) -> Generator[
|
|
185
|
+
) -> Generator[tuple[int, int], None, None]:
|
|
181
186
|
"""Return a generator of intervals to be fetched.
|
|
182
187
|
|
|
183
188
|
Args:
|
pip/_internal/network/session.py
CHANGED
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
network request configuration and behavior.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
5
7
|
import email.utils
|
|
6
8
|
import functools
|
|
7
9
|
import io
|
|
@@ -16,16 +18,11 @@ import subprocess
|
|
|
16
18
|
import sys
|
|
17
19
|
import urllib.parse
|
|
18
20
|
import warnings
|
|
21
|
+
from collections.abc import Generator, Mapping, Sequence
|
|
19
22
|
from typing import (
|
|
20
23
|
TYPE_CHECKING,
|
|
21
24
|
Any,
|
|
22
|
-
Dict,
|
|
23
|
-
Generator,
|
|
24
|
-
List,
|
|
25
|
-
Mapping,
|
|
26
25
|
Optional,
|
|
27
|
-
Sequence,
|
|
28
|
-
Tuple,
|
|
29
26
|
Union,
|
|
30
27
|
)
|
|
31
28
|
|
|
@@ -54,18 +51,19 @@ if TYPE_CHECKING:
|
|
|
54
51
|
from ssl import SSLContext
|
|
55
52
|
|
|
56
53
|
from pip._vendor.urllib3.poolmanager import PoolManager
|
|
54
|
+
from pip._vendor.urllib3.proxymanager import ProxyManager
|
|
57
55
|
|
|
58
56
|
|
|
59
57
|
logger = logging.getLogger(__name__)
|
|
60
58
|
|
|
61
|
-
SecureOrigin =
|
|
59
|
+
SecureOrigin = tuple[str, str, Optional[Union[int, str]]]
|
|
62
60
|
|
|
63
61
|
|
|
64
62
|
# Ignore warning raised when using --trusted-host.
|
|
65
63
|
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
|
66
64
|
|
|
67
65
|
|
|
68
|
-
SECURE_ORIGINS:
|
|
66
|
+
SECURE_ORIGINS: list[SecureOrigin] = [
|
|
69
67
|
# protocol, hostname, port
|
|
70
68
|
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
|
|
71
69
|
("https", "*", "*"),
|
|
@@ -112,7 +110,7 @@ def user_agent() -> str:
|
|
|
112
110
|
"""
|
|
113
111
|
Return a string representing the user agent.
|
|
114
112
|
"""
|
|
115
|
-
data:
|
|
113
|
+
data: dict[str, Any] = {
|
|
116
114
|
"installer": {"name": "pip", "version": __version__},
|
|
117
115
|
"python": platform.python_version(),
|
|
118
116
|
"implementation": {
|
|
@@ -140,7 +138,7 @@ def user_agent() -> str:
|
|
|
140
138
|
from pip._vendor import distro
|
|
141
139
|
|
|
142
140
|
linux_distribution = distro.name(), distro.version(), distro.codename()
|
|
143
|
-
distro_infos:
|
|
141
|
+
distro_infos: dict[str, Any] = dict(
|
|
144
142
|
filter(
|
|
145
143
|
lambda x: x[1],
|
|
146
144
|
zip(["name", "version", "id"], linux_distribution),
|
|
@@ -214,10 +212,10 @@ class LocalFSAdapter(BaseAdapter):
|
|
|
214
212
|
self,
|
|
215
213
|
request: PreparedRequest,
|
|
216
214
|
stream: bool = False,
|
|
217
|
-
timeout:
|
|
218
|
-
verify:
|
|
219
|
-
cert:
|
|
220
|
-
proxies:
|
|
215
|
+
timeout: float | tuple[float, float] | None = None,
|
|
216
|
+
verify: bool | str = True,
|
|
217
|
+
cert: str | tuple[str, str] | None = None,
|
|
218
|
+
proxies: Mapping[str, str] | None = None,
|
|
221
219
|
) -> Response:
|
|
222
220
|
pathname = url_to_path(request.url)
|
|
223
221
|
|
|
@@ -264,7 +262,7 @@ class _SSLContextAdapterMixin:
|
|
|
264
262
|
def __init__(
|
|
265
263
|
self,
|
|
266
264
|
*,
|
|
267
|
-
ssl_context:
|
|
265
|
+
ssl_context: SSLContext | None = None,
|
|
268
266
|
**kwargs: Any,
|
|
269
267
|
) -> None:
|
|
270
268
|
self._ssl_context = ssl_context
|
|
@@ -276,7 +274,7 @@ class _SSLContextAdapterMixin:
|
|
|
276
274
|
maxsize: int,
|
|
277
275
|
block: bool = DEFAULT_POOLBLOCK,
|
|
278
276
|
**pool_kwargs: Any,
|
|
279
|
-
) ->
|
|
277
|
+
) -> PoolManager:
|
|
280
278
|
if self._ssl_context is not None:
|
|
281
279
|
pool_kwargs.setdefault("ssl_context", self._ssl_context)
|
|
282
280
|
return super().init_poolmanager( # type: ignore[misc]
|
|
@@ -286,6 +284,13 @@ class _SSLContextAdapterMixin:
|
|
|
286
284
|
**pool_kwargs,
|
|
287
285
|
)
|
|
288
286
|
|
|
287
|
+
def proxy_manager_for(self, proxy: str, **proxy_kwargs: Any) -> ProxyManager:
|
|
288
|
+
# Proxy manager replaces the pool manager, so inject our SSL
|
|
289
|
+
# context here too. https://github.com/pypa/pip/issues/13288
|
|
290
|
+
if self._ssl_context is not None:
|
|
291
|
+
proxy_kwargs.setdefault("ssl_context", self._ssl_context)
|
|
292
|
+
return super().proxy_manager_for(proxy, **proxy_kwargs) # type: ignore[misc]
|
|
293
|
+
|
|
289
294
|
|
|
290
295
|
class HTTPAdapter(_SSLContextAdapterMixin, _BaseHTTPAdapter):
|
|
291
296
|
pass
|
|
@@ -300,8 +305,8 @@ class InsecureHTTPAdapter(HTTPAdapter):
|
|
|
300
305
|
self,
|
|
301
306
|
conn: ConnectionPool,
|
|
302
307
|
url: str,
|
|
303
|
-
verify:
|
|
304
|
-
cert:
|
|
308
|
+
verify: bool | str,
|
|
309
|
+
cert: str | tuple[str, str] | None,
|
|
305
310
|
) -> None:
|
|
306
311
|
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
|
|
307
312
|
|
|
@@ -311,23 +316,23 @@ class InsecureCacheControlAdapter(CacheControlAdapter):
|
|
|
311
316
|
self,
|
|
312
317
|
conn: ConnectionPool,
|
|
313
318
|
url: str,
|
|
314
|
-
verify:
|
|
315
|
-
cert:
|
|
319
|
+
verify: bool | str,
|
|
320
|
+
cert: str | tuple[str, str] | None,
|
|
316
321
|
) -> None:
|
|
317
322
|
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
|
|
318
323
|
|
|
319
324
|
|
|
320
325
|
class PipSession(requests.Session):
|
|
321
|
-
timeout:
|
|
326
|
+
timeout: int | None = None
|
|
322
327
|
|
|
323
328
|
def __init__(
|
|
324
329
|
self,
|
|
325
330
|
*args: Any,
|
|
326
331
|
retries: int = 0,
|
|
327
|
-
cache:
|
|
332
|
+
cache: str | None = None,
|
|
328
333
|
trusted_hosts: Sequence[str] = (),
|
|
329
|
-
index_urls:
|
|
330
|
-
ssl_context:
|
|
334
|
+
index_urls: list[str] | None = None,
|
|
335
|
+
ssl_context: SSLContext | None = None,
|
|
331
336
|
**kwargs: Any,
|
|
332
337
|
) -> None:
|
|
333
338
|
"""
|
|
@@ -338,7 +343,7 @@ class PipSession(requests.Session):
|
|
|
338
343
|
|
|
339
344
|
# Namespace the attribute with "pip_" just in case to prevent
|
|
340
345
|
# possible conflicts with the base class.
|
|
341
|
-
self.pip_trusted_origins:
|
|
346
|
+
self.pip_trusted_origins: list[tuple[str, int | None]] = []
|
|
342
347
|
self.pip_proxy = None
|
|
343
348
|
|
|
344
349
|
# Attach our User Agent to the request
|
|
@@ -401,7 +406,7 @@ class PipSession(requests.Session):
|
|
|
401
406
|
for host in trusted_hosts:
|
|
402
407
|
self.add_trusted_host(host, suppress_logging=True)
|
|
403
408
|
|
|
404
|
-
def update_index_urls(self, new_index_urls:
|
|
409
|
+
def update_index_urls(self, new_index_urls: list[str]) -> None:
|
|
405
410
|
"""
|
|
406
411
|
:param new_index_urls: New index urls to update the authentication
|
|
407
412
|
handler with.
|
|
@@ -409,7 +414,7 @@ class PipSession(requests.Session):
|
|
|
409
414
|
self.auth.index_urls = new_index_urls
|
|
410
415
|
|
|
411
416
|
def add_trusted_host(
|
|
412
|
-
self, host: str, source:
|
|
417
|
+
self, host: str, source: str | None = None, suppress_logging: bool = False
|
|
413
418
|
) -> None:
|
|
414
419
|
"""
|
|
415
420
|
:param host: It is okay to provide a host that has previously been
|