pip 25.1__py3-none-any.whl → 25.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pip/__init__.py +3 -3
- pip/_internal/__init__.py +2 -2
- pip/_internal/build_env.py +118 -94
- pip/_internal/cache.py +16 -14
- pip/_internal/cli/autocompletion.py +13 -4
- pip/_internal/cli/base_command.py +18 -7
- pip/_internal/cli/cmdoptions.py +14 -9
- pip/_internal/cli/command_context.py +4 -3
- pip/_internal/cli/index_command.py +11 -9
- pip/_internal/cli/main.py +3 -2
- pip/_internal/cli/main_parser.py +4 -3
- pip/_internal/cli/parser.py +26 -22
- pip/_internal/cli/progress_bars.py +19 -12
- pip/_internal/cli/req_command.py +16 -12
- pip/_internal/cli/spinners.py +81 -5
- pip/_internal/commands/__init__.py +5 -3
- pip/_internal/commands/cache.py +18 -15
- pip/_internal/commands/check.py +1 -2
- pip/_internal/commands/completion.py +1 -2
- pip/_internal/commands/configuration.py +26 -18
- pip/_internal/commands/debug.py +8 -6
- pip/_internal/commands/download.py +2 -3
- pip/_internal/commands/freeze.py +2 -3
- pip/_internal/commands/hash.py +1 -2
- pip/_internal/commands/help.py +1 -2
- pip/_internal/commands/index.py +15 -9
- pip/_internal/commands/inspect.py +4 -4
- pip/_internal/commands/install.py +45 -40
- pip/_internal/commands/list.py +35 -26
- pip/_internal/commands/lock.py +1 -2
- pip/_internal/commands/search.py +14 -12
- pip/_internal/commands/show.py +14 -11
- pip/_internal/commands/uninstall.py +1 -2
- pip/_internal/commands/wheel.py +2 -3
- pip/_internal/configuration.py +39 -25
- pip/_internal/distributions/base.py +6 -4
- pip/_internal/distributions/installed.py +8 -4
- pip/_internal/distributions/sdist.py +20 -13
- pip/_internal/distributions/wheel.py +6 -4
- pip/_internal/exceptions.py +58 -39
- pip/_internal/index/collector.py +24 -29
- pip/_internal/index/package_finder.py +70 -61
- pip/_internal/index/sources.py +17 -14
- pip/_internal/locations/__init__.py +18 -16
- pip/_internal/locations/_distutils.py +12 -11
- pip/_internal/locations/_sysconfig.py +5 -4
- pip/_internal/locations/base.py +4 -3
- pip/_internal/main.py +2 -2
- pip/_internal/metadata/__init__.py +8 -6
- pip/_internal/metadata/_json.py +5 -4
- pip/_internal/metadata/base.py +22 -27
- pip/_internal/metadata/importlib/_compat.py +6 -4
- pip/_internal/metadata/importlib/_dists.py +12 -17
- pip/_internal/metadata/importlib/_envs.py +9 -6
- pip/_internal/metadata/pkg_resources.py +11 -14
- pip/_internal/models/direct_url.py +24 -21
- pip/_internal/models/format_control.py +5 -5
- pip/_internal/models/installation_report.py +4 -3
- pip/_internal/models/link.py +39 -34
- pip/_internal/models/pylock.py +27 -22
- pip/_internal/models/search_scope.py +6 -7
- pip/_internal/models/selection_prefs.py +3 -3
- pip/_internal/models/target_python.py +10 -9
- pip/_internal/models/wheel.py +7 -5
- pip/_internal/network/auth.py +20 -22
- pip/_internal/network/cache.py +22 -6
- pip/_internal/network/download.py +169 -141
- pip/_internal/network/lazy_wheel.py +10 -7
- pip/_internal/network/session.py +32 -27
- pip/_internal/network/utils.py +2 -2
- pip/_internal/network/xmlrpc.py +2 -2
- pip/_internal/operations/build/build_tracker.py +10 -8
- pip/_internal/operations/build/wheel.py +3 -2
- pip/_internal/operations/build/wheel_editable.py +3 -2
- pip/_internal/operations/build/wheel_legacy.py +9 -8
- pip/_internal/operations/check.py +21 -26
- pip/_internal/operations/freeze.py +12 -9
- pip/_internal/operations/install/editable_legacy.py +5 -3
- pip/_internal/operations/install/wheel.py +53 -44
- pip/_internal/operations/prepare.py +35 -30
- pip/_internal/pyproject.py +7 -10
- pip/_internal/req/__init__.py +12 -10
- pip/_internal/req/constructors.py +33 -31
- pip/_internal/req/req_dependency_group.py +9 -8
- pip/_internal/req/req_file.py +32 -35
- pip/_internal/req/req_install.py +37 -34
- pip/_internal/req/req_set.py +4 -5
- pip/_internal/req/req_uninstall.py +20 -17
- pip/_internal/resolution/base.py +3 -3
- pip/_internal/resolution/legacy/resolver.py +21 -20
- pip/_internal/resolution/resolvelib/base.py +16 -13
- pip/_internal/resolution/resolvelib/candidates.py +29 -26
- pip/_internal/resolution/resolvelib/factory.py +41 -50
- pip/_internal/resolution/resolvelib/found_candidates.py +11 -9
- pip/_internal/resolution/resolvelib/provider.py +15 -20
- pip/_internal/resolution/resolvelib/reporter.py +5 -3
- pip/_internal/resolution/resolvelib/requirements.py +8 -6
- pip/_internal/resolution/resolvelib/resolver.py +39 -23
- pip/_internal/self_outdated_check.py +8 -6
- pip/_internal/utils/appdirs.py +1 -2
- pip/_internal/utils/compat.py +7 -1
- pip/_internal/utils/compatibility_tags.py +17 -16
- pip/_internal/utils/deprecation.py +11 -9
- pip/_internal/utils/direct_url_helpers.py +2 -2
- pip/_internal/utils/egg_link.py +6 -5
- pip/_internal/utils/entrypoints.py +3 -2
- pip/_internal/utils/filesystem.py +8 -5
- pip/_internal/utils/filetypes.py +4 -6
- pip/_internal/utils/glibc.py +6 -5
- pip/_internal/utils/hashes.py +9 -6
- pip/_internal/utils/logging.py +8 -5
- pip/_internal/utils/misc.py +54 -44
- pip/_internal/utils/packaging.py +3 -2
- pip/_internal/utils/retry.py +7 -4
- pip/_internal/utils/setuptools_build.py +12 -10
- pip/_internal/utils/subprocess.py +20 -17
- pip/_internal/utils/temp_dir.py +10 -12
- pip/_internal/utils/unpacking.py +6 -4
- pip/_internal/utils/urls.py +1 -1
- pip/_internal/utils/virtualenv.py +3 -2
- pip/_internal/utils/wheel.py +3 -4
- pip/_internal/vcs/bazaar.py +26 -8
- pip/_internal/vcs/git.py +59 -24
- pip/_internal/vcs/mercurial.py +34 -11
- pip/_internal/vcs/subversion.py +27 -16
- pip/_internal/vcs/versioncontrol.py +56 -51
- pip/_internal/wheel_builder.py +14 -12
- pip/_vendor/cachecontrol/__init__.py +1 -1
- pip/_vendor/certifi/__init__.py +1 -1
- pip/_vendor/certifi/cacert.pem +102 -221
- pip/_vendor/certifi/core.py +1 -32
- pip/_vendor/dependency_groups/_implementation.py +7 -11
- pip/_vendor/distlib/__init__.py +2 -2
- pip/_vendor/distlib/scripts.py +1 -1
- pip/_vendor/msgpack/__init__.py +2 -2
- pip/_vendor/pkg_resources/__init__.py +1 -1
- pip/_vendor/platformdirs/version.py +2 -2
- pip/_vendor/pygments/__init__.py +1 -1
- pip/_vendor/requests/__version__.py +2 -2
- pip/_vendor/requests/compat.py +12 -0
- pip/_vendor/requests/models.py +3 -1
- pip/_vendor/requests/utils.py +6 -16
- pip/_vendor/resolvelib/__init__.py +3 -3
- pip/_vendor/resolvelib/reporters.py +1 -1
- pip/_vendor/resolvelib/resolvers/__init__.py +4 -4
- pip/_vendor/resolvelib/resolvers/resolution.py +91 -10
- pip/_vendor/rich/__main__.py +12 -40
- pip/_vendor/rich/_inspect.py +1 -1
- pip/_vendor/rich/_ratio.py +1 -7
- pip/_vendor/rich/align.py +1 -7
- pip/_vendor/rich/box.py +1 -7
- pip/_vendor/rich/console.py +25 -20
- pip/_vendor/rich/control.py +1 -7
- pip/_vendor/rich/diagnose.py +1 -0
- pip/_vendor/rich/emoji.py +1 -6
- pip/_vendor/rich/live.py +32 -7
- pip/_vendor/rich/live_render.py +1 -7
- pip/_vendor/rich/logging.py +1 -1
- pip/_vendor/rich/panel.py +3 -4
- pip/_vendor/rich/progress.py +15 -15
- pip/_vendor/rich/spinner.py +7 -13
- pip/_vendor/rich/syntax.py +24 -5
- pip/_vendor/rich/traceback.py +32 -17
- pip/_vendor/truststore/_api.py +1 -1
- pip/_vendor/vendor.txt +10 -11
- {pip-25.1.dist-info → pip-25.2.dist-info}/METADATA +26 -4
- {pip-25.1.dist-info → pip-25.2.dist-info}/RECORD +194 -181
- {pip-25.1.dist-info → pip-25.2.dist-info}/WHEEL +1 -1
- {pip-25.1.dist-info → pip-25.2.dist-info}/licenses/AUTHORS.txt +12 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/cachecontrol/LICENSE.txt +13 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/certifi/LICENSE +20 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/dependency_groups/LICENSE.txt +9 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/distlib/LICENSE.txt +284 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/distro/LICENSE +202 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/idna/LICENSE.md +31 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/msgpack/COPYING +14 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/packaging/LICENSE +3 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/packaging/LICENSE.APACHE +177 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/packaging/LICENSE.BSD +23 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/pkg_resources/LICENSE +17 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/platformdirs/LICENSE +21 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/pygments/LICENSE +25 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/pyproject_hooks/LICENSE +21 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/requests/LICENSE +175 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/resolvelib/LICENSE +13 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/rich/LICENSE +19 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/tomli/LICENSE +21 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/tomli/LICENSE-HEADER +3 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/tomli_w/LICENSE +21 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/truststore/LICENSE +21 -0
- pip-25.2.dist-info/licenses/src/pip/_vendor/urllib3/LICENSE.txt +21 -0
- pip/_vendor/distlib/database.py +0 -1329
- pip/_vendor/distlib/index.py +0 -508
- pip/_vendor/distlib/locators.py +0 -1295
- pip/_vendor/distlib/manifest.py +0 -384
- pip/_vendor/distlib/markers.py +0 -162
- pip/_vendor/distlib/metadata.py +0 -1031
- pip/_vendor/distlib/version.py +0 -750
- pip/_vendor/distlib/wheel.py +0 -1100
- pip/_vendor/typing_extensions.py +0 -4584
- {pip-25.1.dist-info → pip-25.2.dist-info}/entry_points.txt +0 -0
- {pip-25.1.dist-info → pip-25.2.dist-info}/licenses/LICENSE.txt +0 -0
- {pip-25.1.dist-info → pip-25.2.dist-info}/top_level.txt +0 -0
pip/_vendor/distlib/wheel.py
DELETED
|
@@ -1,1100 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
#
|
|
3
|
-
# Copyright (C) 2013-2023 Vinay Sajip.
|
|
4
|
-
# Licensed to the Python Software Foundation under a contributor agreement.
|
|
5
|
-
# See LICENSE.txt and CONTRIBUTORS.txt.
|
|
6
|
-
#
|
|
7
|
-
from __future__ import unicode_literals
|
|
8
|
-
|
|
9
|
-
import base64
|
|
10
|
-
import codecs
|
|
11
|
-
import datetime
|
|
12
|
-
from email import message_from_file
|
|
13
|
-
import hashlib
|
|
14
|
-
import json
|
|
15
|
-
import logging
|
|
16
|
-
import os
|
|
17
|
-
import posixpath
|
|
18
|
-
import re
|
|
19
|
-
import shutil
|
|
20
|
-
import sys
|
|
21
|
-
import tempfile
|
|
22
|
-
import zipfile
|
|
23
|
-
|
|
24
|
-
from . import __version__, DistlibException
|
|
25
|
-
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
|
|
26
|
-
from .database import InstalledDistribution
|
|
27
|
-
from .metadata import Metadata, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME
|
|
28
|
-
from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, cached_property, get_cache_base,
|
|
29
|
-
read_exports, tempdir, get_platform)
|
|
30
|
-
from .version import NormalizedVersion, UnsupportedVersionError
|
|
31
|
-
|
|
32
|
-
logger = logging.getLogger(__name__)
|
|
33
|
-
|
|
34
|
-
cache = None # created when needed
|
|
35
|
-
|
|
36
|
-
if hasattr(sys, 'pypy_version_info'): # pragma: no cover
|
|
37
|
-
IMP_PREFIX = 'pp'
|
|
38
|
-
elif sys.platform.startswith('java'): # pragma: no cover
|
|
39
|
-
IMP_PREFIX = 'jy'
|
|
40
|
-
elif sys.platform == 'cli': # pragma: no cover
|
|
41
|
-
IMP_PREFIX = 'ip'
|
|
42
|
-
else:
|
|
43
|
-
IMP_PREFIX = 'cp'
|
|
44
|
-
|
|
45
|
-
VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
|
|
46
|
-
if not VER_SUFFIX: # pragma: no cover
|
|
47
|
-
VER_SUFFIX = '%s%s' % sys.version_info[:2]
|
|
48
|
-
PYVER = 'py' + VER_SUFFIX
|
|
49
|
-
IMPVER = IMP_PREFIX + VER_SUFFIX
|
|
50
|
-
|
|
51
|
-
ARCH = get_platform().replace('-', '_').replace('.', '_')
|
|
52
|
-
|
|
53
|
-
ABI = sysconfig.get_config_var('SOABI')
|
|
54
|
-
if ABI and ABI.startswith('cpython-'):
|
|
55
|
-
ABI = ABI.replace('cpython-', 'cp').split('-')[0]
|
|
56
|
-
else:
|
|
57
|
-
|
|
58
|
-
def _derive_abi():
|
|
59
|
-
parts = ['cp', VER_SUFFIX]
|
|
60
|
-
if sysconfig.get_config_var('Py_DEBUG'):
|
|
61
|
-
parts.append('d')
|
|
62
|
-
if IMP_PREFIX == 'cp':
|
|
63
|
-
vi = sys.version_info[:2]
|
|
64
|
-
if vi < (3, 8):
|
|
65
|
-
wpm = sysconfig.get_config_var('WITH_PYMALLOC')
|
|
66
|
-
if wpm is None:
|
|
67
|
-
wpm = True
|
|
68
|
-
if wpm:
|
|
69
|
-
parts.append('m')
|
|
70
|
-
if vi < (3, 3):
|
|
71
|
-
us = sysconfig.get_config_var('Py_UNICODE_SIZE')
|
|
72
|
-
if us == 4 or (us is None and sys.maxunicode == 0x10FFFF):
|
|
73
|
-
parts.append('u')
|
|
74
|
-
return ''.join(parts)
|
|
75
|
-
|
|
76
|
-
ABI = _derive_abi()
|
|
77
|
-
del _derive_abi
|
|
78
|
-
|
|
79
|
-
FILENAME_RE = re.compile(
|
|
80
|
-
r'''
|
|
81
|
-
(?P<nm>[^-]+)
|
|
82
|
-
-(?P<vn>\d+[^-]*)
|
|
83
|
-
(-(?P<bn>\d+[^-]*))?
|
|
84
|
-
-(?P<py>\w+\d+(\.\w+\d+)*)
|
|
85
|
-
-(?P<bi>\w+)
|
|
86
|
-
-(?P<ar>\w+(\.\w+)*)
|
|
87
|
-
\.whl$
|
|
88
|
-
''', re.IGNORECASE | re.VERBOSE)
|
|
89
|
-
|
|
90
|
-
NAME_VERSION_RE = re.compile(r'''
|
|
91
|
-
(?P<nm>[^-]+)
|
|
92
|
-
-(?P<vn>\d+[^-]*)
|
|
93
|
-
(-(?P<bn>\d+[^-]*))?$
|
|
94
|
-
''', re.IGNORECASE | re.VERBOSE)
|
|
95
|
-
|
|
96
|
-
SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
|
|
97
|
-
SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
|
|
98
|
-
SHEBANG_PYTHON = b'#!python'
|
|
99
|
-
SHEBANG_PYTHONW = b'#!pythonw'
|
|
100
|
-
|
|
101
|
-
if os.sep == '/':
|
|
102
|
-
to_posix = lambda o: o
|
|
103
|
-
else:
|
|
104
|
-
to_posix = lambda o: o.replace(os.sep, '/')
|
|
105
|
-
|
|
106
|
-
if sys.version_info[0] < 3:
|
|
107
|
-
import imp
|
|
108
|
-
else:
|
|
109
|
-
imp = None
|
|
110
|
-
import importlib.machinery
|
|
111
|
-
import importlib.util
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def _get_suffixes():
|
|
115
|
-
if imp:
|
|
116
|
-
return [s[0] for s in imp.get_suffixes()]
|
|
117
|
-
else:
|
|
118
|
-
return importlib.machinery.EXTENSION_SUFFIXES
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
def _load_dynamic(name, path):
|
|
122
|
-
# https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
|
|
123
|
-
if imp:
|
|
124
|
-
return imp.load_dynamic(name, path)
|
|
125
|
-
else:
|
|
126
|
-
spec = importlib.util.spec_from_file_location(name, path)
|
|
127
|
-
module = importlib.util.module_from_spec(spec)
|
|
128
|
-
sys.modules[name] = module
|
|
129
|
-
spec.loader.exec_module(module)
|
|
130
|
-
return module
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
class Mounter(object):
|
|
134
|
-
|
|
135
|
-
def __init__(self):
|
|
136
|
-
self.impure_wheels = {}
|
|
137
|
-
self.libs = {}
|
|
138
|
-
|
|
139
|
-
def add(self, pathname, extensions):
|
|
140
|
-
self.impure_wheels[pathname] = extensions
|
|
141
|
-
self.libs.update(extensions)
|
|
142
|
-
|
|
143
|
-
def remove(self, pathname):
|
|
144
|
-
extensions = self.impure_wheels.pop(pathname)
|
|
145
|
-
for k, v in extensions:
|
|
146
|
-
if k in self.libs:
|
|
147
|
-
del self.libs[k]
|
|
148
|
-
|
|
149
|
-
def find_module(self, fullname, path=None):
|
|
150
|
-
if fullname in self.libs:
|
|
151
|
-
result = self
|
|
152
|
-
else:
|
|
153
|
-
result = None
|
|
154
|
-
return result
|
|
155
|
-
|
|
156
|
-
def load_module(self, fullname):
|
|
157
|
-
if fullname in sys.modules:
|
|
158
|
-
result = sys.modules[fullname]
|
|
159
|
-
else:
|
|
160
|
-
if fullname not in self.libs:
|
|
161
|
-
raise ImportError('unable to find extension for %s' % fullname)
|
|
162
|
-
result = _load_dynamic(fullname, self.libs[fullname])
|
|
163
|
-
result.__loader__ = self
|
|
164
|
-
parts = fullname.rsplit('.', 1)
|
|
165
|
-
if len(parts) > 1:
|
|
166
|
-
result.__package__ = parts[0]
|
|
167
|
-
return result
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
_hook = Mounter()
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
class Wheel(object):
|
|
174
|
-
"""
|
|
175
|
-
Class to build and install from Wheel files (PEP 427).
|
|
176
|
-
"""
|
|
177
|
-
|
|
178
|
-
wheel_version = (1, 1)
|
|
179
|
-
hash_kind = 'sha256'
|
|
180
|
-
|
|
181
|
-
def __init__(self, filename=None, sign=False, verify=False):
|
|
182
|
-
"""
|
|
183
|
-
Initialise an instance using a (valid) filename.
|
|
184
|
-
"""
|
|
185
|
-
self.sign = sign
|
|
186
|
-
self.should_verify = verify
|
|
187
|
-
self.buildver = ''
|
|
188
|
-
self.pyver = [PYVER]
|
|
189
|
-
self.abi = ['none']
|
|
190
|
-
self.arch = ['any']
|
|
191
|
-
self.dirname = os.getcwd()
|
|
192
|
-
if filename is None:
|
|
193
|
-
self.name = 'dummy'
|
|
194
|
-
self.version = '0.1'
|
|
195
|
-
self._filename = self.filename
|
|
196
|
-
else:
|
|
197
|
-
m = NAME_VERSION_RE.match(filename)
|
|
198
|
-
if m:
|
|
199
|
-
info = m.groupdict('')
|
|
200
|
-
self.name = info['nm']
|
|
201
|
-
# Reinstate the local version separator
|
|
202
|
-
self.version = info['vn'].replace('_', '-')
|
|
203
|
-
self.buildver = info['bn']
|
|
204
|
-
self._filename = self.filename
|
|
205
|
-
else:
|
|
206
|
-
dirname, filename = os.path.split(filename)
|
|
207
|
-
m = FILENAME_RE.match(filename)
|
|
208
|
-
if not m:
|
|
209
|
-
raise DistlibException('Invalid name or '
|
|
210
|
-
'filename: %r' % filename)
|
|
211
|
-
if dirname:
|
|
212
|
-
self.dirname = os.path.abspath(dirname)
|
|
213
|
-
self._filename = filename
|
|
214
|
-
info = m.groupdict('')
|
|
215
|
-
self.name = info['nm']
|
|
216
|
-
self.version = info['vn']
|
|
217
|
-
self.buildver = info['bn']
|
|
218
|
-
self.pyver = info['py'].split('.')
|
|
219
|
-
self.abi = info['bi'].split('.')
|
|
220
|
-
self.arch = info['ar'].split('.')
|
|
221
|
-
|
|
222
|
-
@property
|
|
223
|
-
def filename(self):
|
|
224
|
-
"""
|
|
225
|
-
Build and return a filename from the various components.
|
|
226
|
-
"""
|
|
227
|
-
if self.buildver:
|
|
228
|
-
buildver = '-' + self.buildver
|
|
229
|
-
else:
|
|
230
|
-
buildver = ''
|
|
231
|
-
pyver = '.'.join(self.pyver)
|
|
232
|
-
abi = '.'.join(self.abi)
|
|
233
|
-
arch = '.'.join(self.arch)
|
|
234
|
-
# replace - with _ as a local version separator
|
|
235
|
-
version = self.version.replace('-', '_')
|
|
236
|
-
return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver, abi, arch)
|
|
237
|
-
|
|
238
|
-
@property
|
|
239
|
-
def exists(self):
|
|
240
|
-
path = os.path.join(self.dirname, self.filename)
|
|
241
|
-
return os.path.isfile(path)
|
|
242
|
-
|
|
243
|
-
@property
|
|
244
|
-
def tags(self):
|
|
245
|
-
for pyver in self.pyver:
|
|
246
|
-
for abi in self.abi:
|
|
247
|
-
for arch in self.arch:
|
|
248
|
-
yield pyver, abi, arch
|
|
249
|
-
|
|
250
|
-
@cached_property
|
|
251
|
-
def metadata(self):
|
|
252
|
-
pathname = os.path.join(self.dirname, self.filename)
|
|
253
|
-
name_ver = '%s-%s' % (self.name, self.version)
|
|
254
|
-
info_dir = '%s.dist-info' % name_ver
|
|
255
|
-
wrapper = codecs.getreader('utf-8')
|
|
256
|
-
with ZipFile(pathname, 'r') as zf:
|
|
257
|
-
self.get_wheel_metadata(zf)
|
|
258
|
-
# wv = wheel_metadata['Wheel-Version'].split('.', 1)
|
|
259
|
-
# file_version = tuple([int(i) for i in wv])
|
|
260
|
-
# if file_version < (1, 1):
|
|
261
|
-
# fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME,
|
|
262
|
-
# LEGACY_METADATA_FILENAME]
|
|
263
|
-
# else:
|
|
264
|
-
# fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
|
|
265
|
-
fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME]
|
|
266
|
-
result = None
|
|
267
|
-
for fn in fns:
|
|
268
|
-
try:
|
|
269
|
-
metadata_filename = posixpath.join(info_dir, fn)
|
|
270
|
-
with zf.open(metadata_filename) as bf:
|
|
271
|
-
wf = wrapper(bf)
|
|
272
|
-
result = Metadata(fileobj=wf)
|
|
273
|
-
if result:
|
|
274
|
-
break
|
|
275
|
-
except KeyError:
|
|
276
|
-
pass
|
|
277
|
-
if not result:
|
|
278
|
-
raise ValueError('Invalid wheel, because metadata is '
|
|
279
|
-
'missing: looked in %s' % ', '.join(fns))
|
|
280
|
-
return result
|
|
281
|
-
|
|
282
|
-
def get_wheel_metadata(self, zf):
|
|
283
|
-
name_ver = '%s-%s' % (self.name, self.version)
|
|
284
|
-
info_dir = '%s.dist-info' % name_ver
|
|
285
|
-
metadata_filename = posixpath.join(info_dir, 'WHEEL')
|
|
286
|
-
with zf.open(metadata_filename) as bf:
|
|
287
|
-
wf = codecs.getreader('utf-8')(bf)
|
|
288
|
-
message = message_from_file(wf)
|
|
289
|
-
return dict(message)
|
|
290
|
-
|
|
291
|
-
@cached_property
|
|
292
|
-
def info(self):
|
|
293
|
-
pathname = os.path.join(self.dirname, self.filename)
|
|
294
|
-
with ZipFile(pathname, 'r') as zf:
|
|
295
|
-
result = self.get_wheel_metadata(zf)
|
|
296
|
-
return result
|
|
297
|
-
|
|
298
|
-
def process_shebang(self, data):
|
|
299
|
-
m = SHEBANG_RE.match(data)
|
|
300
|
-
if m:
|
|
301
|
-
end = m.end()
|
|
302
|
-
shebang, data_after_shebang = data[:end], data[end:]
|
|
303
|
-
# Preserve any arguments after the interpreter
|
|
304
|
-
if b'pythonw' in shebang.lower():
|
|
305
|
-
shebang_python = SHEBANG_PYTHONW
|
|
306
|
-
else:
|
|
307
|
-
shebang_python = SHEBANG_PYTHON
|
|
308
|
-
m = SHEBANG_DETAIL_RE.match(shebang)
|
|
309
|
-
if m:
|
|
310
|
-
args = b' ' + m.groups()[-1]
|
|
311
|
-
else:
|
|
312
|
-
args = b''
|
|
313
|
-
shebang = shebang_python + args
|
|
314
|
-
data = shebang + data_after_shebang
|
|
315
|
-
else:
|
|
316
|
-
cr = data.find(b'\r')
|
|
317
|
-
lf = data.find(b'\n')
|
|
318
|
-
if cr < 0 or cr > lf:
|
|
319
|
-
term = b'\n'
|
|
320
|
-
else:
|
|
321
|
-
if data[cr:cr + 2] == b'\r\n':
|
|
322
|
-
term = b'\r\n'
|
|
323
|
-
else:
|
|
324
|
-
term = b'\r'
|
|
325
|
-
data = SHEBANG_PYTHON + term + data
|
|
326
|
-
return data
|
|
327
|
-
|
|
328
|
-
def get_hash(self, data, hash_kind=None):
|
|
329
|
-
if hash_kind is None:
|
|
330
|
-
hash_kind = self.hash_kind
|
|
331
|
-
try:
|
|
332
|
-
hasher = getattr(hashlib, hash_kind)
|
|
333
|
-
except AttributeError:
|
|
334
|
-
raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
|
|
335
|
-
result = hasher(data).digest()
|
|
336
|
-
result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
|
|
337
|
-
return hash_kind, result
|
|
338
|
-
|
|
339
|
-
def write_record(self, records, record_path, archive_record_path):
|
|
340
|
-
records = list(records) # make a copy, as mutated
|
|
341
|
-
records.append((archive_record_path, '', ''))
|
|
342
|
-
with CSVWriter(record_path) as writer:
|
|
343
|
-
for row in records:
|
|
344
|
-
writer.writerow(row)
|
|
345
|
-
|
|
346
|
-
def write_records(self, info, libdir, archive_paths):
|
|
347
|
-
records = []
|
|
348
|
-
distinfo, info_dir = info
|
|
349
|
-
# hasher = getattr(hashlib, self.hash_kind)
|
|
350
|
-
for ap, p in archive_paths:
|
|
351
|
-
with open(p, 'rb') as f:
|
|
352
|
-
data = f.read()
|
|
353
|
-
digest = '%s=%s' % self.get_hash(data)
|
|
354
|
-
size = os.path.getsize(p)
|
|
355
|
-
records.append((ap, digest, size))
|
|
356
|
-
|
|
357
|
-
p = os.path.join(distinfo, 'RECORD')
|
|
358
|
-
ap = to_posix(os.path.join(info_dir, 'RECORD'))
|
|
359
|
-
self.write_record(records, p, ap)
|
|
360
|
-
archive_paths.append((ap, p))
|
|
361
|
-
|
|
362
|
-
def build_zip(self, pathname, archive_paths):
|
|
363
|
-
with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
|
|
364
|
-
for ap, p in archive_paths:
|
|
365
|
-
logger.debug('Wrote %s to %s in wheel', p, ap)
|
|
366
|
-
zf.write(p, ap)
|
|
367
|
-
|
|
368
|
-
def build(self, paths, tags=None, wheel_version=None):
|
|
369
|
-
"""
|
|
370
|
-
Build a wheel from files in specified paths, and use any specified tags
|
|
371
|
-
when determining the name of the wheel.
|
|
372
|
-
"""
|
|
373
|
-
if tags is None:
|
|
374
|
-
tags = {}
|
|
375
|
-
|
|
376
|
-
libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
|
|
377
|
-
if libkey == 'platlib':
|
|
378
|
-
is_pure = 'false'
|
|
379
|
-
default_pyver = [IMPVER]
|
|
380
|
-
default_abi = [ABI]
|
|
381
|
-
default_arch = [ARCH]
|
|
382
|
-
else:
|
|
383
|
-
is_pure = 'true'
|
|
384
|
-
default_pyver = [PYVER]
|
|
385
|
-
default_abi = ['none']
|
|
386
|
-
default_arch = ['any']
|
|
387
|
-
|
|
388
|
-
self.pyver = tags.get('pyver', default_pyver)
|
|
389
|
-
self.abi = tags.get('abi', default_abi)
|
|
390
|
-
self.arch = tags.get('arch', default_arch)
|
|
391
|
-
|
|
392
|
-
libdir = paths[libkey]
|
|
393
|
-
|
|
394
|
-
name_ver = '%s-%s' % (self.name, self.version)
|
|
395
|
-
data_dir = '%s.data' % name_ver
|
|
396
|
-
info_dir = '%s.dist-info' % name_ver
|
|
397
|
-
|
|
398
|
-
archive_paths = []
|
|
399
|
-
|
|
400
|
-
# First, stuff which is not in site-packages
|
|
401
|
-
for key in ('data', 'headers', 'scripts'):
|
|
402
|
-
if key not in paths:
|
|
403
|
-
continue
|
|
404
|
-
path = paths[key]
|
|
405
|
-
if os.path.isdir(path):
|
|
406
|
-
for root, dirs, files in os.walk(path):
|
|
407
|
-
for fn in files:
|
|
408
|
-
p = fsdecode(os.path.join(root, fn))
|
|
409
|
-
rp = os.path.relpath(p, path)
|
|
410
|
-
ap = to_posix(os.path.join(data_dir, key, rp))
|
|
411
|
-
archive_paths.append((ap, p))
|
|
412
|
-
if key == 'scripts' and not p.endswith('.exe'):
|
|
413
|
-
with open(p, 'rb') as f:
|
|
414
|
-
data = f.read()
|
|
415
|
-
data = self.process_shebang(data)
|
|
416
|
-
with open(p, 'wb') as f:
|
|
417
|
-
f.write(data)
|
|
418
|
-
|
|
419
|
-
# Now, stuff which is in site-packages, other than the
|
|
420
|
-
# distinfo stuff.
|
|
421
|
-
path = libdir
|
|
422
|
-
distinfo = None
|
|
423
|
-
for root, dirs, files in os.walk(path):
|
|
424
|
-
if root == path:
|
|
425
|
-
# At the top level only, save distinfo for later
|
|
426
|
-
# and skip it for now
|
|
427
|
-
for i, dn in enumerate(dirs):
|
|
428
|
-
dn = fsdecode(dn)
|
|
429
|
-
if dn.endswith('.dist-info'):
|
|
430
|
-
distinfo = os.path.join(root, dn)
|
|
431
|
-
del dirs[i]
|
|
432
|
-
break
|
|
433
|
-
assert distinfo, '.dist-info directory expected, not found'
|
|
434
|
-
|
|
435
|
-
for fn in files:
|
|
436
|
-
# comment out next suite to leave .pyc files in
|
|
437
|
-
if fsdecode(fn).endswith(('.pyc', '.pyo')):
|
|
438
|
-
continue
|
|
439
|
-
p = os.path.join(root, fn)
|
|
440
|
-
rp = to_posix(os.path.relpath(p, path))
|
|
441
|
-
archive_paths.append((rp, p))
|
|
442
|
-
|
|
443
|
-
# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
|
|
444
|
-
files = os.listdir(distinfo)
|
|
445
|
-
for fn in files:
|
|
446
|
-
if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
|
|
447
|
-
p = fsdecode(os.path.join(distinfo, fn))
|
|
448
|
-
ap = to_posix(os.path.join(info_dir, fn))
|
|
449
|
-
archive_paths.append((ap, p))
|
|
450
|
-
|
|
451
|
-
wheel_metadata = [
|
|
452
|
-
'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
|
|
453
|
-
'Generator: distlib %s' % __version__,
|
|
454
|
-
'Root-Is-Purelib: %s' % is_pure,
|
|
455
|
-
]
|
|
456
|
-
for pyver, abi, arch in self.tags:
|
|
457
|
-
wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
|
|
458
|
-
p = os.path.join(distinfo, 'WHEEL')
|
|
459
|
-
with open(p, 'w') as f:
|
|
460
|
-
f.write('\n'.join(wheel_metadata))
|
|
461
|
-
ap = to_posix(os.path.join(info_dir, 'WHEEL'))
|
|
462
|
-
archive_paths.append((ap, p))
|
|
463
|
-
|
|
464
|
-
# sort the entries by archive path. Not needed by any spec, but it
|
|
465
|
-
# keeps the archive listing and RECORD tidier than they would otherwise
|
|
466
|
-
# be. Use the number of path segments to keep directory entries together,
|
|
467
|
-
# and keep the dist-info stuff at the end.
|
|
468
|
-
def sorter(t):
|
|
469
|
-
ap = t[0]
|
|
470
|
-
n = ap.count('/')
|
|
471
|
-
if '.dist-info' in ap:
|
|
472
|
-
n += 10000
|
|
473
|
-
return (n, ap)
|
|
474
|
-
|
|
475
|
-
archive_paths = sorted(archive_paths, key=sorter)
|
|
476
|
-
|
|
477
|
-
# Now, at last, RECORD.
|
|
478
|
-
# Paths in here are archive paths - nothing else makes sense.
|
|
479
|
-
self.write_records((distinfo, info_dir), libdir, archive_paths)
|
|
480
|
-
# Now, ready to build the zip file
|
|
481
|
-
pathname = os.path.join(self.dirname, self.filename)
|
|
482
|
-
self.build_zip(pathname, archive_paths)
|
|
483
|
-
return pathname
|
|
484
|
-
|
|
485
|
-
def skip_entry(self, arcname):
|
|
486
|
-
"""
|
|
487
|
-
Determine whether an archive entry should be skipped when verifying
|
|
488
|
-
or installing.
|
|
489
|
-
"""
|
|
490
|
-
# The signature file won't be in RECORD,
|
|
491
|
-
# and we don't currently don't do anything with it
|
|
492
|
-
# We also skip directories, as they won't be in RECORD
|
|
493
|
-
# either. See:
|
|
494
|
-
#
|
|
495
|
-
# https://github.com/pypa/wheel/issues/294
|
|
496
|
-
# https://github.com/pypa/wheel/issues/287
|
|
497
|
-
# https://github.com/pypa/wheel/pull/289
|
|
498
|
-
#
|
|
499
|
-
return arcname.endswith(('/', '/RECORD.jws'))
|
|
500
|
-
|
|
501
|
-
def install(self, paths, maker, **kwargs):
|
|
502
|
-
"""
|
|
503
|
-
Install a wheel to the specified paths. If kwarg ``warner`` is
|
|
504
|
-
specified, it should be a callable, which will be called with two
|
|
505
|
-
tuples indicating the wheel version of this software and the wheel
|
|
506
|
-
version in the file, if there is a discrepancy in the versions.
|
|
507
|
-
This can be used to issue any warnings to raise any exceptions.
|
|
508
|
-
If kwarg ``lib_only`` is True, only the purelib/platlib files are
|
|
509
|
-
installed, and the headers, scripts, data and dist-info metadata are
|
|
510
|
-
not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
|
|
511
|
-
bytecode will try to use file-hash based invalidation (PEP-552) on
|
|
512
|
-
supported interpreter versions (CPython 3.7+).
|
|
513
|
-
|
|
514
|
-
The return value is a :class:`InstalledDistribution` instance unless
|
|
515
|
-
``options.lib_only`` is True, in which case the return value is ``None``.
|
|
516
|
-
"""
|
|
517
|
-
|
|
518
|
-
dry_run = maker.dry_run
|
|
519
|
-
warner = kwargs.get('warner')
|
|
520
|
-
lib_only = kwargs.get('lib_only', False)
|
|
521
|
-
bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
|
|
522
|
-
|
|
523
|
-
pathname = os.path.join(self.dirname, self.filename)
|
|
524
|
-
name_ver = '%s-%s' % (self.name, self.version)
|
|
525
|
-
data_dir = '%s.data' % name_ver
|
|
526
|
-
info_dir = '%s.dist-info' % name_ver
|
|
527
|
-
|
|
528
|
-
metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
|
|
529
|
-
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
|
|
530
|
-
record_name = posixpath.join(info_dir, 'RECORD')
|
|
531
|
-
|
|
532
|
-
wrapper = codecs.getreader('utf-8')
|
|
533
|
-
|
|
534
|
-
with ZipFile(pathname, 'r') as zf:
|
|
535
|
-
with zf.open(wheel_metadata_name) as bwf:
|
|
536
|
-
wf = wrapper(bwf)
|
|
537
|
-
message = message_from_file(wf)
|
|
538
|
-
wv = message['Wheel-Version'].split('.', 1)
|
|
539
|
-
file_version = tuple([int(i) for i in wv])
|
|
540
|
-
if (file_version != self.wheel_version) and warner:
|
|
541
|
-
warner(self.wheel_version, file_version)
|
|
542
|
-
|
|
543
|
-
if message['Root-Is-Purelib'] == 'true':
|
|
544
|
-
libdir = paths['purelib']
|
|
545
|
-
else:
|
|
546
|
-
libdir = paths['platlib']
|
|
547
|
-
|
|
548
|
-
records = {}
|
|
549
|
-
with zf.open(record_name) as bf:
|
|
550
|
-
with CSVReader(stream=bf) as reader:
|
|
551
|
-
for row in reader:
|
|
552
|
-
p = row[0]
|
|
553
|
-
records[p] = row
|
|
554
|
-
|
|
555
|
-
data_pfx = posixpath.join(data_dir, '')
|
|
556
|
-
info_pfx = posixpath.join(info_dir, '')
|
|
557
|
-
script_pfx = posixpath.join(data_dir, 'scripts', '')
|
|
558
|
-
|
|
559
|
-
# make a new instance rather than a copy of maker's,
|
|
560
|
-
# as we mutate it
|
|
561
|
-
fileop = FileOperator(dry_run=dry_run)
|
|
562
|
-
fileop.record = True # so we can rollback if needed
|
|
563
|
-
|
|
564
|
-
bc = not sys.dont_write_bytecode # Double negatives. Lovely!
|
|
565
|
-
|
|
566
|
-
outfiles = [] # for RECORD writing
|
|
567
|
-
|
|
568
|
-
# for script copying/shebang processing
|
|
569
|
-
workdir = tempfile.mkdtemp()
|
|
570
|
-
# set target dir later
|
|
571
|
-
# we default add_launchers to False, as the
|
|
572
|
-
# Python Launcher should be used instead
|
|
573
|
-
maker.source_dir = workdir
|
|
574
|
-
maker.target_dir = None
|
|
575
|
-
try:
|
|
576
|
-
for zinfo in zf.infolist():
|
|
577
|
-
arcname = zinfo.filename
|
|
578
|
-
if isinstance(arcname, text_type):
|
|
579
|
-
u_arcname = arcname
|
|
580
|
-
else:
|
|
581
|
-
u_arcname = arcname.decode('utf-8')
|
|
582
|
-
if self.skip_entry(u_arcname):
|
|
583
|
-
continue
|
|
584
|
-
row = records[u_arcname]
|
|
585
|
-
if row[2] and str(zinfo.file_size) != row[2]:
|
|
586
|
-
raise DistlibException('size mismatch for '
|
|
587
|
-
'%s' % u_arcname)
|
|
588
|
-
if row[1]:
|
|
589
|
-
kind, value = row[1].split('=', 1)
|
|
590
|
-
with zf.open(arcname) as bf:
|
|
591
|
-
data = bf.read()
|
|
592
|
-
_, digest = self.get_hash(data, kind)
|
|
593
|
-
if digest != value:
|
|
594
|
-
raise DistlibException('digest mismatch for '
|
|
595
|
-
'%s' % arcname)
|
|
596
|
-
|
|
597
|
-
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
|
|
598
|
-
logger.debug('lib_only: skipping %s', u_arcname)
|
|
599
|
-
continue
|
|
600
|
-
is_script = (u_arcname.startswith(script_pfx) and not u_arcname.endswith('.exe'))
|
|
601
|
-
|
|
602
|
-
if u_arcname.startswith(data_pfx):
|
|
603
|
-
_, where, rp = u_arcname.split('/', 2)
|
|
604
|
-
outfile = os.path.join(paths[where], convert_path(rp))
|
|
605
|
-
else:
|
|
606
|
-
# meant for site-packages.
|
|
607
|
-
if u_arcname in (wheel_metadata_name, record_name):
|
|
608
|
-
continue
|
|
609
|
-
outfile = os.path.join(libdir, convert_path(u_arcname))
|
|
610
|
-
if not is_script:
|
|
611
|
-
with zf.open(arcname) as bf:
|
|
612
|
-
fileop.copy_stream(bf, outfile)
|
|
613
|
-
# Issue #147: permission bits aren't preserved. Using
|
|
614
|
-
# zf.extract(zinfo, libdir) should have worked, but didn't,
|
|
615
|
-
# see https://www.thetopsites.net/article/53834422.shtml
|
|
616
|
-
# So ... manually preserve permission bits as given in zinfo
|
|
617
|
-
if os.name == 'posix':
|
|
618
|
-
# just set the normal permission bits
|
|
619
|
-
os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF)
|
|
620
|
-
outfiles.append(outfile)
|
|
621
|
-
# Double check the digest of the written file
|
|
622
|
-
if not dry_run and row[1]:
|
|
623
|
-
with open(outfile, 'rb') as bf:
|
|
624
|
-
data = bf.read()
|
|
625
|
-
_, newdigest = self.get_hash(data, kind)
|
|
626
|
-
if newdigest != digest:
|
|
627
|
-
raise DistlibException('digest mismatch '
|
|
628
|
-
'on write for '
|
|
629
|
-
'%s' % outfile)
|
|
630
|
-
if bc and outfile.endswith('.py'):
|
|
631
|
-
try:
|
|
632
|
-
pyc = fileop.byte_compile(outfile, hashed_invalidation=bc_hashed_invalidation)
|
|
633
|
-
outfiles.append(pyc)
|
|
634
|
-
except Exception:
|
|
635
|
-
# Don't give up if byte-compilation fails,
|
|
636
|
-
# but log it and perhaps warn the user
|
|
637
|
-
logger.warning('Byte-compilation failed', exc_info=True)
|
|
638
|
-
else:
|
|
639
|
-
fn = os.path.basename(convert_path(arcname))
|
|
640
|
-
workname = os.path.join(workdir, fn)
|
|
641
|
-
with zf.open(arcname) as bf:
|
|
642
|
-
fileop.copy_stream(bf, workname)
|
|
643
|
-
|
|
644
|
-
dn, fn = os.path.split(outfile)
|
|
645
|
-
maker.target_dir = dn
|
|
646
|
-
filenames = maker.make(fn)
|
|
647
|
-
fileop.set_executable_mode(filenames)
|
|
648
|
-
outfiles.extend(filenames)
|
|
649
|
-
|
|
650
|
-
if lib_only:
|
|
651
|
-
logger.debug('lib_only: returning None')
|
|
652
|
-
dist = None
|
|
653
|
-
else:
|
|
654
|
-
# Generate scripts
|
|
655
|
-
|
|
656
|
-
# Try to get pydist.json so we can see if there are
|
|
657
|
-
# any commands to generate. If this fails (e.g. because
|
|
658
|
-
# of a legacy wheel), log a warning but don't give up.
|
|
659
|
-
commands = None
|
|
660
|
-
file_version = self.info['Wheel-Version']
|
|
661
|
-
if file_version == '1.0':
|
|
662
|
-
# Use legacy info
|
|
663
|
-
ep = posixpath.join(info_dir, 'entry_points.txt')
|
|
664
|
-
try:
|
|
665
|
-
with zf.open(ep) as bwf:
|
|
666
|
-
epdata = read_exports(bwf)
|
|
667
|
-
commands = {}
|
|
668
|
-
for key in ('console', 'gui'):
|
|
669
|
-
k = '%s_scripts' % key
|
|
670
|
-
if k in epdata:
|
|
671
|
-
commands['wrap_%s' % key] = d = {}
|
|
672
|
-
for v in epdata[k].values():
|
|
673
|
-
s = '%s:%s' % (v.prefix, v.suffix)
|
|
674
|
-
if v.flags:
|
|
675
|
-
s += ' [%s]' % ','.join(v.flags)
|
|
676
|
-
d[v.name] = s
|
|
677
|
-
except Exception:
|
|
678
|
-
logger.warning('Unable to read legacy script '
|
|
679
|
-
'metadata, so cannot generate '
|
|
680
|
-
'scripts')
|
|
681
|
-
else:
|
|
682
|
-
try:
|
|
683
|
-
with zf.open(metadata_name) as bwf:
|
|
684
|
-
wf = wrapper(bwf)
|
|
685
|
-
commands = json.load(wf).get('extensions')
|
|
686
|
-
if commands:
|
|
687
|
-
commands = commands.get('python.commands')
|
|
688
|
-
except Exception:
|
|
689
|
-
logger.warning('Unable to read JSON metadata, so '
|
|
690
|
-
'cannot generate scripts')
|
|
691
|
-
if commands:
|
|
692
|
-
console_scripts = commands.get('wrap_console', {})
|
|
693
|
-
gui_scripts = commands.get('wrap_gui', {})
|
|
694
|
-
if console_scripts or gui_scripts:
|
|
695
|
-
script_dir = paths.get('scripts', '')
|
|
696
|
-
if not os.path.isdir(script_dir):
|
|
697
|
-
raise ValueError('Valid script path not '
|
|
698
|
-
'specified')
|
|
699
|
-
maker.target_dir = script_dir
|
|
700
|
-
for k, v in console_scripts.items():
|
|
701
|
-
script = '%s = %s' % (k, v)
|
|
702
|
-
filenames = maker.make(script)
|
|
703
|
-
fileop.set_executable_mode(filenames)
|
|
704
|
-
|
|
705
|
-
if gui_scripts:
|
|
706
|
-
options = {'gui': True}
|
|
707
|
-
for k, v in gui_scripts.items():
|
|
708
|
-
script = '%s = %s' % (k, v)
|
|
709
|
-
filenames = maker.make(script, options)
|
|
710
|
-
fileop.set_executable_mode(filenames)
|
|
711
|
-
|
|
712
|
-
p = os.path.join(libdir, info_dir)
|
|
713
|
-
dist = InstalledDistribution(p)
|
|
714
|
-
|
|
715
|
-
# Write SHARED
|
|
716
|
-
paths = dict(paths) # don't change passed in dict
|
|
717
|
-
del paths['purelib']
|
|
718
|
-
del paths['platlib']
|
|
719
|
-
paths['lib'] = libdir
|
|
720
|
-
p = dist.write_shared_locations(paths, dry_run)
|
|
721
|
-
if p:
|
|
722
|
-
outfiles.append(p)
|
|
723
|
-
|
|
724
|
-
# Write RECORD
|
|
725
|
-
dist.write_installed_files(outfiles, paths['prefix'], dry_run)
|
|
726
|
-
return dist
|
|
727
|
-
except Exception: # pragma: no cover
|
|
728
|
-
logger.exception('installation failed.')
|
|
729
|
-
fileop.rollback()
|
|
730
|
-
raise
|
|
731
|
-
finally:
|
|
732
|
-
shutil.rmtree(workdir)
|
|
733
|
-
|
|
734
|
-
def _get_dylib_cache(self):
|
|
735
|
-
global cache
|
|
736
|
-
if cache is None:
|
|
737
|
-
# Use native string to avoid issues on 2.x: see Python #20140.
|
|
738
|
-
base = os.path.join(get_cache_base(), str('dylib-cache'), '%s.%s' % sys.version_info[:2])
|
|
739
|
-
cache = Cache(base)
|
|
740
|
-
return cache
|
|
741
|
-
|
|
742
|
-
def _get_extensions(self):
|
|
743
|
-
pathname = os.path.join(self.dirname, self.filename)
|
|
744
|
-
name_ver = '%s-%s' % (self.name, self.version)
|
|
745
|
-
info_dir = '%s.dist-info' % name_ver
|
|
746
|
-
arcname = posixpath.join(info_dir, 'EXTENSIONS')
|
|
747
|
-
wrapper = codecs.getreader('utf-8')
|
|
748
|
-
result = []
|
|
749
|
-
with ZipFile(pathname, 'r') as zf:
|
|
750
|
-
try:
|
|
751
|
-
with zf.open(arcname) as bf:
|
|
752
|
-
wf = wrapper(bf)
|
|
753
|
-
extensions = json.load(wf)
|
|
754
|
-
cache = self._get_dylib_cache()
|
|
755
|
-
prefix = cache.prefix_to_dir(self.filename, use_abspath=False)
|
|
756
|
-
cache_base = os.path.join(cache.base, prefix)
|
|
757
|
-
if not os.path.isdir(cache_base):
|
|
758
|
-
os.makedirs(cache_base)
|
|
759
|
-
for name, relpath in extensions.items():
|
|
760
|
-
dest = os.path.join(cache_base, convert_path(relpath))
|
|
761
|
-
if not os.path.exists(dest):
|
|
762
|
-
extract = True
|
|
763
|
-
else:
|
|
764
|
-
file_time = os.stat(dest).st_mtime
|
|
765
|
-
file_time = datetime.datetime.fromtimestamp(file_time)
|
|
766
|
-
info = zf.getinfo(relpath)
|
|
767
|
-
wheel_time = datetime.datetime(*info.date_time)
|
|
768
|
-
extract = wheel_time > file_time
|
|
769
|
-
if extract:
|
|
770
|
-
zf.extract(relpath, cache_base)
|
|
771
|
-
result.append((name, dest))
|
|
772
|
-
except KeyError:
|
|
773
|
-
pass
|
|
774
|
-
return result
|
|
775
|
-
|
|
776
|
-
def is_compatible(self):
|
|
777
|
-
"""
|
|
778
|
-
Determine if a wheel is compatible with the running system.
|
|
779
|
-
"""
|
|
780
|
-
return is_compatible(self)
|
|
781
|
-
|
|
782
|
-
def is_mountable(self):
|
|
783
|
-
"""
|
|
784
|
-
Determine if a wheel is asserted as mountable by its metadata.
|
|
785
|
-
"""
|
|
786
|
-
return True # for now - metadata details TBD
|
|
787
|
-
|
|
788
|
-
def mount(self, append=False):
|
|
789
|
-
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
|
|
790
|
-
if not self.is_compatible():
|
|
791
|
-
msg = 'Wheel %s not compatible with this Python.' % pathname
|
|
792
|
-
raise DistlibException(msg)
|
|
793
|
-
if not self.is_mountable():
|
|
794
|
-
msg = 'Wheel %s is marked as not mountable.' % pathname
|
|
795
|
-
raise DistlibException(msg)
|
|
796
|
-
if pathname in sys.path:
|
|
797
|
-
logger.debug('%s already in path', pathname)
|
|
798
|
-
else:
|
|
799
|
-
if append:
|
|
800
|
-
sys.path.append(pathname)
|
|
801
|
-
else:
|
|
802
|
-
sys.path.insert(0, pathname)
|
|
803
|
-
extensions = self._get_extensions()
|
|
804
|
-
if extensions:
|
|
805
|
-
if _hook not in sys.meta_path:
|
|
806
|
-
sys.meta_path.append(_hook)
|
|
807
|
-
_hook.add(pathname, extensions)
|
|
808
|
-
|
|
809
|
-
def unmount(self):
|
|
810
|
-
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
|
|
811
|
-
if pathname not in sys.path:
|
|
812
|
-
logger.debug('%s not in path', pathname)
|
|
813
|
-
else:
|
|
814
|
-
sys.path.remove(pathname)
|
|
815
|
-
if pathname in _hook.impure_wheels:
|
|
816
|
-
_hook.remove(pathname)
|
|
817
|
-
if not _hook.impure_wheels:
|
|
818
|
-
if _hook in sys.meta_path:
|
|
819
|
-
sys.meta_path.remove(_hook)
|
|
820
|
-
|
|
821
|
-
def verify(self):
|
|
822
|
-
pathname = os.path.join(self.dirname, self.filename)
|
|
823
|
-
name_ver = '%s-%s' % (self.name, self.version)
|
|
824
|
-
# data_dir = '%s.data' % name_ver
|
|
825
|
-
info_dir = '%s.dist-info' % name_ver
|
|
826
|
-
|
|
827
|
-
# metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
|
|
828
|
-
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
|
|
829
|
-
record_name = posixpath.join(info_dir, 'RECORD')
|
|
830
|
-
|
|
831
|
-
wrapper = codecs.getreader('utf-8')
|
|
832
|
-
|
|
833
|
-
with ZipFile(pathname, 'r') as zf:
|
|
834
|
-
with zf.open(wheel_metadata_name) as bwf:
|
|
835
|
-
wf = wrapper(bwf)
|
|
836
|
-
message_from_file(wf)
|
|
837
|
-
# wv = message['Wheel-Version'].split('.', 1)
|
|
838
|
-
# file_version = tuple([int(i) for i in wv])
|
|
839
|
-
# TODO version verification
|
|
840
|
-
|
|
841
|
-
records = {}
|
|
842
|
-
with zf.open(record_name) as bf:
|
|
843
|
-
with CSVReader(stream=bf) as reader:
|
|
844
|
-
for row in reader:
|
|
845
|
-
p = row[0]
|
|
846
|
-
records[p] = row
|
|
847
|
-
|
|
848
|
-
for zinfo in zf.infolist():
|
|
849
|
-
arcname = zinfo.filename
|
|
850
|
-
if isinstance(arcname, text_type):
|
|
851
|
-
u_arcname = arcname
|
|
852
|
-
else:
|
|
853
|
-
u_arcname = arcname.decode('utf-8')
|
|
854
|
-
# See issue #115: some wheels have .. in their entries, but
|
|
855
|
-
# in the filename ... e.g. __main__..py ! So the check is
|
|
856
|
-
# updated to look for .. in the directory portions
|
|
857
|
-
p = u_arcname.split('/')
|
|
858
|
-
if '..' in p:
|
|
859
|
-
raise DistlibException('invalid entry in '
|
|
860
|
-
'wheel: %r' % u_arcname)
|
|
861
|
-
|
|
862
|
-
if self.skip_entry(u_arcname):
|
|
863
|
-
continue
|
|
864
|
-
row = records[u_arcname]
|
|
865
|
-
if row[2] and str(zinfo.file_size) != row[2]:
|
|
866
|
-
raise DistlibException('size mismatch for '
|
|
867
|
-
'%s' % u_arcname)
|
|
868
|
-
if row[1]:
|
|
869
|
-
kind, value = row[1].split('=', 1)
|
|
870
|
-
with zf.open(arcname) as bf:
|
|
871
|
-
data = bf.read()
|
|
872
|
-
_, digest = self.get_hash(data, kind)
|
|
873
|
-
if digest != value:
|
|
874
|
-
raise DistlibException('digest mismatch for '
|
|
875
|
-
'%s' % arcname)
|
|
876
|
-
|
|
877
|
-
def update(self, modifier, dest_dir=None, **kwargs):
|
|
878
|
-
"""
|
|
879
|
-
Update the contents of a wheel in a generic way. The modifier should
|
|
880
|
-
be a callable which expects a dictionary argument: its keys are
|
|
881
|
-
archive-entry paths, and its values are absolute filesystem paths
|
|
882
|
-
where the contents the corresponding archive entries can be found. The
|
|
883
|
-
modifier is free to change the contents of the files pointed to, add
|
|
884
|
-
new entries and remove entries, before returning. This method will
|
|
885
|
-
extract the entire contents of the wheel to a temporary location, call
|
|
886
|
-
the modifier, and then use the passed (and possibly updated)
|
|
887
|
-
dictionary to write a new wheel. If ``dest_dir`` is specified, the new
|
|
888
|
-
wheel is written there -- otherwise, the original wheel is overwritten.
|
|
889
|
-
|
|
890
|
-
The modifier should return True if it updated the wheel, else False.
|
|
891
|
-
This method returns the same value the modifier returns.
|
|
892
|
-
"""
|
|
893
|
-
|
|
894
|
-
def get_version(path_map, info_dir):
|
|
895
|
-
version = path = None
|
|
896
|
-
key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME)
|
|
897
|
-
if key not in path_map:
|
|
898
|
-
key = '%s/PKG-INFO' % info_dir
|
|
899
|
-
if key in path_map:
|
|
900
|
-
path = path_map[key]
|
|
901
|
-
version = Metadata(path=path).version
|
|
902
|
-
return version, path
|
|
903
|
-
|
|
904
|
-
def update_version(version, path):
|
|
905
|
-
updated = None
|
|
906
|
-
try:
|
|
907
|
-
NormalizedVersion(version)
|
|
908
|
-
i = version.find('-')
|
|
909
|
-
if i < 0:
|
|
910
|
-
updated = '%s+1' % version
|
|
911
|
-
else:
|
|
912
|
-
parts = [int(s) for s in version[i + 1:].split('.')]
|
|
913
|
-
parts[-1] += 1
|
|
914
|
-
updated = '%s+%s' % (version[:i], '.'.join(str(i) for i in parts))
|
|
915
|
-
except UnsupportedVersionError:
|
|
916
|
-
logger.debug('Cannot update non-compliant (PEP-440) '
|
|
917
|
-
'version %r', version)
|
|
918
|
-
if updated:
|
|
919
|
-
md = Metadata(path=path)
|
|
920
|
-
md.version = updated
|
|
921
|
-
legacy = path.endswith(LEGACY_METADATA_FILENAME)
|
|
922
|
-
md.write(path=path, legacy=legacy)
|
|
923
|
-
logger.debug('Version updated from %r to %r', version, updated)
|
|
924
|
-
|
|
925
|
-
pathname = os.path.join(self.dirname, self.filename)
|
|
926
|
-
name_ver = '%s-%s' % (self.name, self.version)
|
|
927
|
-
info_dir = '%s.dist-info' % name_ver
|
|
928
|
-
record_name = posixpath.join(info_dir, 'RECORD')
|
|
929
|
-
with tempdir() as workdir:
|
|
930
|
-
with ZipFile(pathname, 'r') as zf:
|
|
931
|
-
path_map = {}
|
|
932
|
-
for zinfo in zf.infolist():
|
|
933
|
-
arcname = zinfo.filename
|
|
934
|
-
if isinstance(arcname, text_type):
|
|
935
|
-
u_arcname = arcname
|
|
936
|
-
else:
|
|
937
|
-
u_arcname = arcname.decode('utf-8')
|
|
938
|
-
if u_arcname == record_name:
|
|
939
|
-
continue
|
|
940
|
-
if '..' in u_arcname:
|
|
941
|
-
raise DistlibException('invalid entry in '
|
|
942
|
-
'wheel: %r' % u_arcname)
|
|
943
|
-
zf.extract(zinfo, workdir)
|
|
944
|
-
path = os.path.join(workdir, convert_path(u_arcname))
|
|
945
|
-
path_map[u_arcname] = path
|
|
946
|
-
|
|
947
|
-
# Remember the version.
|
|
948
|
-
original_version, _ = get_version(path_map, info_dir)
|
|
949
|
-
# Files extracted. Call the modifier.
|
|
950
|
-
modified = modifier(path_map, **kwargs)
|
|
951
|
-
if modified:
|
|
952
|
-
# Something changed - need to build a new wheel.
|
|
953
|
-
current_version, path = get_version(path_map, info_dir)
|
|
954
|
-
if current_version and (current_version == original_version):
|
|
955
|
-
# Add or update local version to signify changes.
|
|
956
|
-
update_version(current_version, path)
|
|
957
|
-
# Decide where the new wheel goes.
|
|
958
|
-
if dest_dir is None:
|
|
959
|
-
fd, newpath = tempfile.mkstemp(suffix='.whl', prefix='wheel-update-', dir=workdir)
|
|
960
|
-
os.close(fd)
|
|
961
|
-
else:
|
|
962
|
-
if not os.path.isdir(dest_dir):
|
|
963
|
-
raise DistlibException('Not a directory: %r' % dest_dir)
|
|
964
|
-
newpath = os.path.join(dest_dir, self.filename)
|
|
965
|
-
archive_paths = list(path_map.items())
|
|
966
|
-
distinfo = os.path.join(workdir, info_dir)
|
|
967
|
-
info = distinfo, info_dir
|
|
968
|
-
self.write_records(info, workdir, archive_paths)
|
|
969
|
-
self.build_zip(newpath, archive_paths)
|
|
970
|
-
if dest_dir is None:
|
|
971
|
-
shutil.copyfile(newpath, pathname)
|
|
972
|
-
return modified
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
def _get_glibc_version():
|
|
976
|
-
import platform
|
|
977
|
-
ver = platform.libc_ver()
|
|
978
|
-
result = []
|
|
979
|
-
if ver[0] == 'glibc':
|
|
980
|
-
for s in ver[1].split('.'):
|
|
981
|
-
result.append(int(s) if s.isdigit() else 0)
|
|
982
|
-
result = tuple(result)
|
|
983
|
-
return result
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
def compatible_tags():
|
|
987
|
-
"""
|
|
988
|
-
Return (pyver, abi, arch) tuples compatible with this Python.
|
|
989
|
-
"""
|
|
990
|
-
class _Version:
|
|
991
|
-
def __init__(self, major, minor):
|
|
992
|
-
self.major = major
|
|
993
|
-
self.major_minor = (major, minor)
|
|
994
|
-
self.string = ''.join((str(major), str(minor)))
|
|
995
|
-
|
|
996
|
-
def __str__(self):
|
|
997
|
-
return self.string
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
versions = [
|
|
1001
|
-
_Version(sys.version_info.major, minor_version)
|
|
1002
|
-
for minor_version in range(sys.version_info.minor, -1, -1)
|
|
1003
|
-
]
|
|
1004
|
-
abis = []
|
|
1005
|
-
for suffix in _get_suffixes():
|
|
1006
|
-
if suffix.startswith('.abi'):
|
|
1007
|
-
abis.append(suffix.split('.', 2)[1])
|
|
1008
|
-
abis.sort()
|
|
1009
|
-
if ABI != 'none':
|
|
1010
|
-
abis.insert(0, ABI)
|
|
1011
|
-
abis.append('none')
|
|
1012
|
-
result = []
|
|
1013
|
-
|
|
1014
|
-
arches = [ARCH]
|
|
1015
|
-
if sys.platform == 'darwin':
|
|
1016
|
-
m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
|
|
1017
|
-
if m:
|
|
1018
|
-
name, major, minor, arch = m.groups()
|
|
1019
|
-
minor = int(minor)
|
|
1020
|
-
matches = [arch]
|
|
1021
|
-
if arch in ('i386', 'ppc'):
|
|
1022
|
-
matches.append('fat')
|
|
1023
|
-
if arch in ('i386', 'ppc', 'x86_64'):
|
|
1024
|
-
matches.append('fat3')
|
|
1025
|
-
if arch in ('ppc64', 'x86_64'):
|
|
1026
|
-
matches.append('fat64')
|
|
1027
|
-
if arch in ('i386', 'x86_64'):
|
|
1028
|
-
matches.append('intel')
|
|
1029
|
-
if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
|
|
1030
|
-
matches.append('universal')
|
|
1031
|
-
while minor >= 0:
|
|
1032
|
-
for match in matches:
|
|
1033
|
-
s = '%s_%s_%s_%s' % (name, major, minor, match)
|
|
1034
|
-
if s != ARCH: # already there
|
|
1035
|
-
arches.append(s)
|
|
1036
|
-
minor -= 1
|
|
1037
|
-
|
|
1038
|
-
# Most specific - our Python version, ABI and arch
|
|
1039
|
-
for i, version_object in enumerate(versions):
|
|
1040
|
-
version = str(version_object)
|
|
1041
|
-
add_abis = []
|
|
1042
|
-
|
|
1043
|
-
if i == 0:
|
|
1044
|
-
add_abis = abis
|
|
1045
|
-
|
|
1046
|
-
if IMP_PREFIX == 'cp' and version_object.major_minor >= (3, 2):
|
|
1047
|
-
limited_api_abi = 'abi' + str(version_object.major)
|
|
1048
|
-
if limited_api_abi not in add_abis:
|
|
1049
|
-
add_abis.append(limited_api_abi)
|
|
1050
|
-
|
|
1051
|
-
for abi in add_abis:
|
|
1052
|
-
for arch in arches:
|
|
1053
|
-
result.append((''.join((IMP_PREFIX, version)), abi, arch))
|
|
1054
|
-
# manylinux
|
|
1055
|
-
if abi != 'none' and sys.platform.startswith('linux'):
|
|
1056
|
-
arch = arch.replace('linux_', '')
|
|
1057
|
-
parts = _get_glibc_version()
|
|
1058
|
-
if len(parts) == 2:
|
|
1059
|
-
if parts >= (2, 5):
|
|
1060
|
-
result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux1_%s' % arch))
|
|
1061
|
-
if parts >= (2, 12):
|
|
1062
|
-
result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux2010_%s' % arch))
|
|
1063
|
-
if parts >= (2, 17):
|
|
1064
|
-
result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux2014_%s' % arch))
|
|
1065
|
-
result.append((''.join(
|
|
1066
|
-
(IMP_PREFIX, version)), abi, 'manylinux_%s_%s_%s' % (parts[0], parts[1], arch)))
|
|
1067
|
-
|
|
1068
|
-
# where no ABI / arch dependency, but IMP_PREFIX dependency
|
|
1069
|
-
for i, version_object in enumerate(versions):
|
|
1070
|
-
version = str(version_object)
|
|
1071
|
-
result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
|
|
1072
|
-
if i == 0:
|
|
1073
|
-
result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
|
|
1074
|
-
|
|
1075
|
-
# no IMP_PREFIX, ABI or arch dependency
|
|
1076
|
-
for i, version_object in enumerate(versions):
|
|
1077
|
-
version = str(version_object)
|
|
1078
|
-
result.append((''.join(('py', version)), 'none', 'any'))
|
|
1079
|
-
if i == 0:
|
|
1080
|
-
result.append((''.join(('py', version[0])), 'none', 'any'))
|
|
1081
|
-
|
|
1082
|
-
return set(result)
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
COMPATIBLE_TAGS = compatible_tags()
|
|
1086
|
-
|
|
1087
|
-
del compatible_tags
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
def is_compatible(wheel, tags=None):
|
|
1091
|
-
if not isinstance(wheel, Wheel):
|
|
1092
|
-
wheel = Wheel(wheel) # assume it's a filename
|
|
1093
|
-
result = False
|
|
1094
|
-
if tags is None:
|
|
1095
|
-
tags = COMPATIBLE_TAGS
|
|
1096
|
-
for ver, abi, arch in tags:
|
|
1097
|
-
if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
|
|
1098
|
-
result = True
|
|
1099
|
-
break
|
|
1100
|
-
return result
|