pex 2.64.1__py2.py3-none-any.whl → 2.69.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pex might be problematic. Click here for more details.
- pex/bin/pex.py +2 -1
- pex/build_backend/configuration.py +5 -5
- pex/build_backend/wrap.py +2 -19
- pex/cli/commands/lock.py +4 -2
- pex/cli/commands/run.py +10 -11
- pex/cli/pex.py +11 -4
- pex/dist_metadata.py +29 -2
- pex/docs/html/_pagefind/fragment/en_4250138.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_7125dad.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_785d562.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_8e94bb8.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/{en_17782b6.pf_fragment → en_a0396bb.pf_fragment} +0 -0
- pex/docs/html/_pagefind/fragment/en_a8a3588.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_c07d988.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_d718411.pf_fragment +0 -0
- pex/docs/html/_pagefind/index/en_a2e3c5e.pf_index +0 -0
- pex/docs/html/_pagefind/pagefind-entry.json +1 -1
- pex/docs/html/_pagefind/pagefind.en_4ce1afa9e3.pf_meta +0 -0
- pex/docs/html/_static/documentation_options.js +1 -1
- pex/docs/html/api/vars.html +5 -5
- pex/docs/html/buildingpex.html +5 -5
- pex/docs/html/genindex.html +5 -5
- pex/docs/html/index.html +5 -5
- pex/docs/html/recipes.html +5 -5
- pex/docs/html/scie.html +5 -5
- pex/docs/html/search.html +5 -5
- pex/docs/html/whatispex.html +5 -5
- pex/hashing.py +71 -9
- pex/interpreter_constraints.py +1 -1
- pex/jobs.py +13 -6
- pex/pep_376.py +21 -6
- pex/pep_427.py +30 -8
- pex/pex_builder.py +1 -4
- pex/pip/local_project.py +6 -14
- pex/pip/tool.py +3 -3
- pex/pip/vcs.py +93 -44
- pex/pip/version.py +7 -0
- pex/resolve/configured_resolve.py +13 -5
- pex/resolve/lock_downloader.py +1 -0
- pex/resolve/locker.py +30 -14
- pex/resolve/lockfile/create.py +2 -7
- pex/resolve/pre_resolved_resolver.py +1 -7
- pex/resolve/project.py +233 -47
- pex/resolve/resolver_configuration.py +1 -1
- pex/resolve/resolver_options.py +14 -9
- pex/resolve/venv_resolver.py +221 -65
- pex/resolver.py +59 -55
- pex/scie/__init__.py +40 -1
- pex/scie/model.py +2 -0
- pex/scie/science.py +25 -3
- pex/sdist.py +219 -0
- pex/version.py +1 -1
- pex/wheel.py +16 -12
- {pex-2.64.1.dist-info → pex-2.69.0.dist-info}/METADATA +4 -4
- {pex-2.64.1.dist-info → pex-2.69.0.dist-info}/RECORD +60 -59
- {pex-2.64.1.dist-info → pex-2.69.0.dist-info}/entry_points.txt +1 -0
- pex/docs/html/_pagefind/fragment/en_1048255.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_3f7efc3.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_40667cd.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_55ee2f4.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_d6d92dd.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_d834316.pf_fragment +0 -0
- pex/docs/html/_pagefind/fragment/en_ec2ce54.pf_fragment +0 -0
- pex/docs/html/_pagefind/index/en_17effb2.pf_index +0 -0
- pex/docs/html/_pagefind/pagefind.en_49ec86cf86.pf_meta +0 -0
- {pex-2.64.1.dist-info → pex-2.69.0.dist-info}/WHEEL +0 -0
- {pex-2.64.1.dist-info → pex-2.69.0.dist-info}/licenses/LICENSE +0 -0
- {pex-2.64.1.dist-info → pex-2.69.0.dist-info}/pylock/pylock.toml +0 -0
- {pex-2.64.1.dist-info → pex-2.69.0.dist-info}/top_level.txt +0 -0
|
@@ -20,7 +20,7 @@ from pex.resolve.resolver_configuration import (
|
|
|
20
20
|
VenvRepositoryConfiguration,
|
|
21
21
|
)
|
|
22
22
|
from pex.resolve.resolvers import ResolveResult
|
|
23
|
-
from pex.resolve.venv_resolver import
|
|
23
|
+
from pex.resolve.venv_resolver import resolve_from_venvs
|
|
24
24
|
from pex.resolver import resolve as resolve_via_pip
|
|
25
25
|
from pex.result import try_
|
|
26
26
|
from pex.targets import Targets
|
|
@@ -143,14 +143,22 @@ def resolve(
|
|
|
143
143
|
)
|
|
144
144
|
elif isinstance(resolver_configuration, VenvRepositoryConfiguration):
|
|
145
145
|
with TRACER.timed(
|
|
146
|
-
"Resolving requirements from
|
|
147
|
-
|
|
146
|
+
"Resolving requirements from {count} {venvs} at:{paths}.".format(
|
|
147
|
+
count=len(resolver_configuration.venvs),
|
|
148
|
+
venvs=pluralize(resolver_configuration.venvs, "venv"),
|
|
149
|
+
paths=(
|
|
150
|
+
" {venv}".format(venv=resolver_configuration.venvs[0].venv_dir)
|
|
151
|
+
if len(resolver_configuration.venvs) == 1
|
|
152
|
+
else "\n {venvs}".format(
|
|
153
|
+
venvs="\n ".join(venv.venv_dir for venv in resolver_configuration.venvs)
|
|
154
|
+
)
|
|
155
|
+
),
|
|
148
156
|
)
|
|
149
157
|
):
|
|
150
158
|
return try_(
|
|
151
|
-
|
|
159
|
+
resolve_from_venvs(
|
|
152
160
|
targets=targets,
|
|
153
|
-
|
|
161
|
+
venvs=resolver_configuration.venvs,
|
|
154
162
|
requirement_configuration=requirement_configuration,
|
|
155
163
|
pip_configuration=resolver_configuration.pip_configuration,
|
|
156
164
|
compile=compile_pyc,
|
pex/resolve/lock_downloader.py
CHANGED
|
@@ -154,6 +154,7 @@ class VCSArtifactDownloadManager(DownloadManager[VCSArtifact]):
|
|
|
154
154
|
local_distribution = downloaded_vcs.local_distributions[0]
|
|
155
155
|
filename = os.path.basename(local_distribution.path)
|
|
156
156
|
digest_vcs_archive(
|
|
157
|
+
project_name=project_name,
|
|
157
158
|
archive_path=local_distribution.path,
|
|
158
159
|
vcs=artifact.vcs,
|
|
159
160
|
digest=digest,
|
pex/resolve/locker.py
CHANGED
|
@@ -30,6 +30,7 @@ from pex.resolve.pep_691.model import Endpoint
|
|
|
30
30
|
from pex.resolve.resolved_requirement import PartialArtifact, Pin, ResolvedRequirement
|
|
31
31
|
from pex.resolve.resolvers import Resolver
|
|
32
32
|
from pex.resolve.target_system import UniversalTarget
|
|
33
|
+
from pex.result import try_
|
|
33
34
|
from pex.targets import Target
|
|
34
35
|
from pex.typing import TYPE_CHECKING
|
|
35
36
|
|
|
@@ -291,6 +292,16 @@ class Locker(LogAnalyzer):
|
|
|
291
292
|
def _maybe_record_wheel(self, url):
|
|
292
293
|
# type: (str) -> ArtifactURL
|
|
293
294
|
artifact_url = self.parse_url_and_maybe_record_fingerprint(url)
|
|
295
|
+
|
|
296
|
+
# N.B.: Lock resolves driven by `pip install --dry-run --report` will only consult PEP-658
|
|
297
|
+
# `.whl.metadata` side-car files in the happy path; so we must use these as a proxy for the
|
|
298
|
+
# `.whl` file they are paired with.
|
|
299
|
+
# See: https://peps.python.org/pep-0658/
|
|
300
|
+
if not self._lock_is_via_pip_download and artifact_url.url_info.path.endswith(".metadata"):
|
|
301
|
+
artifact_url = ArtifactURL.from_url_info(
|
|
302
|
+
artifact_url.url_info._replace(path=artifact_url.url_info.path[:-9])
|
|
303
|
+
)
|
|
304
|
+
|
|
294
305
|
if artifact_url.is_wheel:
|
|
295
306
|
pin, partial_artifact = self._extract_resolve_data(artifact_url)
|
|
296
307
|
|
|
@@ -371,8 +382,8 @@ class Locker(LogAnalyzer):
|
|
|
371
382
|
if isinstance(artifact_url.scheme, VCSScheme):
|
|
372
383
|
source_fingerprint, archive_path = fingerprint_downloaded_vcs_archive(
|
|
373
384
|
download_dir=self._download_dir,
|
|
374
|
-
project_name=
|
|
375
|
-
version=
|
|
385
|
+
project_name=build_result.pin.project_name,
|
|
386
|
+
version=build_result.pin.version,
|
|
376
387
|
vcs=artifact_url.scheme.vcs,
|
|
377
388
|
)
|
|
378
389
|
verified = True
|
|
@@ -407,12 +418,14 @@ class Locker(LogAnalyzer):
|
|
|
407
418
|
os.path.basename(artifact_url.path)
|
|
408
419
|
] = build_result.pin
|
|
409
420
|
else:
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
421
|
+
try_(
|
|
422
|
+
digest_local_project(
|
|
423
|
+
directory=artifact_url.path,
|
|
424
|
+
digest=digest,
|
|
425
|
+
pip_version=self._pip_version,
|
|
426
|
+
target=self._target,
|
|
427
|
+
resolver=self._resolver,
|
|
428
|
+
)
|
|
416
429
|
)
|
|
417
430
|
self._local_projects.add(artifact_url.path)
|
|
418
431
|
self._saved.add(build_result.pin)
|
|
@@ -445,6 +458,7 @@ class Locker(LogAnalyzer):
|
|
|
445
458
|
if isinstance(artifact_url.scheme, VCSScheme):
|
|
446
459
|
digest = Sha256()
|
|
447
460
|
digest_vcs_repo(
|
|
461
|
+
project_name=build_result.pin.project_name,
|
|
448
462
|
repo_path=build_result.path,
|
|
449
463
|
vcs=artifact_url.scheme.vcs,
|
|
450
464
|
digest=digest,
|
|
@@ -479,12 +493,14 @@ class Locker(LogAnalyzer):
|
|
|
479
493
|
os.path.basename(artifact_url.path)
|
|
480
494
|
] = build_result.pin
|
|
481
495
|
else:
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
496
|
+
try_(
|
|
497
|
+
digest_local_project(
|
|
498
|
+
directory=artifact_url.path,
|
|
499
|
+
digest=digest,
|
|
500
|
+
pip_version=self._pip_version,
|
|
501
|
+
target=self._target,
|
|
502
|
+
resolver=self._resolver,
|
|
503
|
+
)
|
|
488
504
|
)
|
|
489
505
|
self._local_projects.add(artifact_url.path)
|
|
490
506
|
self._saved.add(build_result.pin)
|
pex/resolve/lockfile/create.py
CHANGED
|
@@ -357,8 +357,6 @@ class LockObserver(ResolveObserver):
|
|
|
357
357
|
target=local_distribution.download_target,
|
|
358
358
|
source_path=local_distribution.path,
|
|
359
359
|
subdirectory=local_distribution.subdirectory,
|
|
360
|
-
resolver=self.resolver,
|
|
361
|
-
pip_version=self.package_index_configuration.pip_version,
|
|
362
360
|
)
|
|
363
361
|
)
|
|
364
362
|
|
|
@@ -369,10 +367,7 @@ class LockObserver(ResolveObserver):
|
|
|
369
367
|
lock_result = analysis.analyzer.lock_result
|
|
370
368
|
build_requests.update(
|
|
371
369
|
BuildRequest.for_directory(
|
|
372
|
-
target=analysis.download_target,
|
|
373
|
-
source_path=local_project,
|
|
374
|
-
resolver=self.resolver,
|
|
375
|
-
pip_version=self.package_index_configuration.pip_version,
|
|
370
|
+
target=analysis.download_target, source_path=local_project
|
|
376
371
|
)
|
|
377
372
|
for local_project in lock_result.local_projects
|
|
378
373
|
)
|
|
@@ -403,7 +398,7 @@ class LockObserver(ResolveObserver):
|
|
|
403
398
|
targets_and_project_directories,
|
|
404
399
|
# MyPy just can't figure out the next two args types; they're OK.
|
|
405
400
|
self._spawn_prepare_metadata, # type: ignore[arg-type]
|
|
406
|
-
error_handler=Retain[str](), # type: ignore[arg-type]
|
|
401
|
+
error_handler=Retain["Tuple[Target, str]"](), # type: ignore[arg-type]
|
|
407
402
|
max_jobs=self.max_parallel_jobs,
|
|
408
403
|
),
|
|
409
404
|
):
|
|
@@ -101,19 +101,13 @@ def resolve_from_dists(
|
|
|
101
101
|
extra_pip_requirements=pip_configuration.extra_requirements,
|
|
102
102
|
keyring_provider=pip_configuration.keyring_provider,
|
|
103
103
|
)
|
|
104
|
-
resolver = ConfiguredResolver(pip_configuration=pip_configuration)
|
|
105
104
|
build_requests = [
|
|
106
105
|
BuildRequest.for_file(target=target, source_path=sdist)
|
|
107
106
|
for sdist in sdists
|
|
108
107
|
for target in unique_targets
|
|
109
108
|
]
|
|
110
109
|
build_requests.extend(
|
|
111
|
-
BuildRequest.for_directory(
|
|
112
|
-
target=target,
|
|
113
|
-
source_path=local_project.path,
|
|
114
|
-
resolver=resolver,
|
|
115
|
-
pip_version=pip_configuration.version,
|
|
116
|
-
)
|
|
110
|
+
BuildRequest.for_directory(target=target, source_path=local_project.path)
|
|
117
111
|
for local_project in local_projects
|
|
118
112
|
for target in unique_targets
|
|
119
113
|
)
|
pex/resolve/project.py
CHANGED
|
@@ -3,35 +3,44 @@
|
|
|
3
3
|
|
|
4
4
|
from __future__ import absolute_import
|
|
5
5
|
|
|
6
|
+
import hashlib
|
|
6
7
|
import os.path
|
|
7
8
|
from argparse import Namespace, _ActionsContainer
|
|
8
9
|
|
|
9
|
-
from pex import requirements, toml
|
|
10
|
+
from pex import requirements, sdist, toml
|
|
10
11
|
from pex.build_system import pep_517
|
|
11
|
-
from pex.common import pluralize
|
|
12
|
+
from pex.common import pluralize, safe_mkdtemp
|
|
12
13
|
from pex.compatibility import string
|
|
13
14
|
from pex.dependency_configuration import DependencyConfiguration
|
|
14
|
-
from pex.dist_metadata import DistMetadata, Requirement, RequirementParseError
|
|
15
|
+
from pex.dist_metadata import DistMetadata, Requirement, RequirementParseError, is_wheel
|
|
15
16
|
from pex.fingerprinted_distribution import FingerprintedDistribution
|
|
16
17
|
from pex.interpreter import PythonInterpreter
|
|
17
|
-
from pex.jobs import
|
|
18
|
+
from pex.jobs import Job, Retain, SpawnedJob, execute_parallel
|
|
18
19
|
from pex.orderedset import OrderedSet
|
|
19
20
|
from pex.pep_427 import InstallableType
|
|
20
21
|
from pex.pep_503 import ProjectName
|
|
22
|
+
from pex.pip.tool import PackageIndexConfiguration
|
|
21
23
|
from pex.pip.version import PipVersionValue
|
|
22
|
-
from pex.requirements import LocalProjectRequirement, ParseError
|
|
24
|
+
from pex.requirements import LocalProjectRequirement, ParseError, URLRequirement
|
|
23
25
|
from pex.resolve.configured_resolve import resolve
|
|
26
|
+
from pex.resolve.configured_resolver import ConfiguredResolver
|
|
24
27
|
from pex.resolve.requirement_configuration import RequirementConfiguration
|
|
25
28
|
from pex.resolve.resolver_configuration import PipConfiguration
|
|
26
|
-
from pex.resolve.resolvers import Resolver
|
|
29
|
+
from pex.resolve.resolvers import Resolver
|
|
30
|
+
from pex.resolver import BuildAndInstallRequest, BuildRequest, InstallRequest
|
|
31
|
+
from pex.result import Error, ResultError
|
|
27
32
|
from pex.sorted_tuple import SortedTuple
|
|
28
33
|
from pex.targets import LocalInterpreter, Target, Targets
|
|
34
|
+
from pex.tracer import TRACER
|
|
29
35
|
from pex.typing import TYPE_CHECKING
|
|
36
|
+
from pex.util import CacheHelper
|
|
30
37
|
|
|
31
38
|
if TYPE_CHECKING:
|
|
32
|
-
from typing import Any, Iterable, Iterator, List, Mapping, Optional, Set, Tuple, Union
|
|
39
|
+
from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, Set, Tuple, Union
|
|
33
40
|
|
|
34
41
|
import attr # vendor:skip
|
|
42
|
+
|
|
43
|
+
from pex.requirements import ParsedRequirement
|
|
35
44
|
else:
|
|
36
45
|
from pex.third_party import attr
|
|
37
46
|
|
|
@@ -69,10 +78,14 @@ class BuiltProject(object):
|
|
|
69
78
|
|
|
70
79
|
|
|
71
80
|
@attr.s(frozen=True)
|
|
72
|
-
class
|
|
73
|
-
path = attr.ib() # type: str
|
|
81
|
+
class ProjectDirectory(object):
|
|
74
82
|
requirement = attr.ib() # type: LocalProjectRequirement
|
|
75
83
|
|
|
84
|
+
@property
|
|
85
|
+
def path(self):
|
|
86
|
+
# type: () -> str
|
|
87
|
+
return self.requirement.path
|
|
88
|
+
|
|
76
89
|
@property
|
|
77
90
|
def requirement_str(self):
|
|
78
91
|
# type: () -> str
|
|
@@ -80,9 +93,30 @@ class Project(object):
|
|
|
80
93
|
return str(self.requirement.line.processed_text)
|
|
81
94
|
|
|
82
95
|
|
|
96
|
+
@attr.s(frozen=True)
|
|
97
|
+
class ProjectArchive(object):
|
|
98
|
+
requirement = attr.ib() # type: URLRequirement
|
|
99
|
+
|
|
100
|
+
@property
|
|
101
|
+
def path(self):
|
|
102
|
+
# type: () -> str
|
|
103
|
+
return self.requirement.url.path
|
|
104
|
+
|
|
105
|
+
@property
|
|
106
|
+
def is_wheel(self):
|
|
107
|
+
# type: () -> bool
|
|
108
|
+
return is_wheel(self.path)
|
|
109
|
+
|
|
110
|
+
@property
|
|
111
|
+
def subdirectory(self):
|
|
112
|
+
# type: () -> Optional[str]
|
|
113
|
+
return self.requirement.subdirectory
|
|
114
|
+
|
|
115
|
+
|
|
83
116
|
@attr.s(frozen=True)
|
|
84
117
|
class Projects(object):
|
|
85
|
-
|
|
118
|
+
project_directories = attr.ib(default=()) # type: Tuple[ProjectDirectory, ...]
|
|
119
|
+
project_archives = attr.ib(default=()) # type: Tuple[ProjectArchive, ...]
|
|
86
120
|
|
|
87
121
|
def build(
|
|
88
122
|
self,
|
|
@@ -95,24 +129,93 @@ class Projects(object):
|
|
|
95
129
|
):
|
|
96
130
|
# type: (...) -> Iterator[BuiltProject]
|
|
97
131
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
132
|
+
if self.project_directories:
|
|
133
|
+
resolve_result = resolve(
|
|
134
|
+
targets=targets,
|
|
135
|
+
requirement_configuration=RequirementConfiguration(
|
|
136
|
+
requirements=[project.requirement_str for project in self.project_directories]
|
|
137
|
+
),
|
|
138
|
+
resolver_configuration=attr.evolve(pip_configuration, transitive=False),
|
|
139
|
+
compile_pyc=compile_pyc,
|
|
140
|
+
ignore_errors=ignore_errors,
|
|
141
|
+
result_type=result_type,
|
|
142
|
+
dependency_configuration=dependency_config,
|
|
143
|
+
)
|
|
144
|
+
for resolved_distribution in resolve_result.distributions:
|
|
145
|
+
yield BuiltProject(
|
|
146
|
+
target=resolved_distribution.target,
|
|
147
|
+
fingerprinted_distribution=resolved_distribution.fingerprinted_distribution,
|
|
148
|
+
satisfied_direct_requirements=resolved_distribution.direct_requirements,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
if self.project_archives:
|
|
152
|
+
build_requests = [] # type: List[BuildRequest]
|
|
153
|
+
install_requests = [] # type: List[InstallRequest]
|
|
154
|
+
direct_requirements = [] # type: List[ParsedRequirement]
|
|
155
|
+
for project_archive in self.project_archives:
|
|
156
|
+
fingerprint = CacheHelper.hash(project_archive.path, hasher=hashlib.sha256)
|
|
157
|
+
direct_requirements.append(project_archive.requirement)
|
|
158
|
+
for target in targets.unique_targets():
|
|
159
|
+
if project_archive.is_wheel:
|
|
160
|
+
install_requests.append(
|
|
161
|
+
InstallRequest(
|
|
162
|
+
download_target=target,
|
|
163
|
+
wheel_path=project_archive.path,
|
|
164
|
+
fingerprint=fingerprint,
|
|
165
|
+
)
|
|
166
|
+
)
|
|
167
|
+
else:
|
|
168
|
+
build_requests.append(
|
|
169
|
+
BuildRequest(
|
|
170
|
+
download_target=target,
|
|
171
|
+
source_path=project_archive.path,
|
|
172
|
+
fingerprint=fingerprint,
|
|
173
|
+
subdirectory=project_archive.subdirectory,
|
|
174
|
+
)
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
build_and_install_request = BuildAndInstallRequest(
|
|
178
|
+
build_requests=build_requests,
|
|
179
|
+
install_requests=install_requests,
|
|
180
|
+
direct_requirements=direct_requirements,
|
|
181
|
+
package_index_configuration=PackageIndexConfiguration.create(
|
|
182
|
+
pip_version=pip_configuration.version,
|
|
183
|
+
resolver_version=pip_configuration.resolver_version,
|
|
184
|
+
repos_configuration=pip_configuration.repos_configuration,
|
|
185
|
+
network_configuration=pip_configuration.network_configuration,
|
|
186
|
+
use_pip_config=pip_configuration.use_pip_config,
|
|
187
|
+
extra_pip_requirements=pip_configuration.extra_requirements,
|
|
188
|
+
keyring_provider=pip_configuration.keyring_provider,
|
|
189
|
+
),
|
|
190
|
+
compile=compile_pyc,
|
|
191
|
+
build_configuration=pip_configuration.build_configuration,
|
|
192
|
+
pip_version=pip_configuration.version,
|
|
193
|
+
resolver=ConfiguredResolver(pip_configuration=pip_configuration),
|
|
194
|
+
dependency_configuration=dependency_config,
|
|
114
195
|
)
|
|
115
196
|
|
|
197
|
+
# This checks the resolve, but we're not doing a full resolve here - we're installing
|
|
198
|
+
# projects to gather their requirements and _then_ perform a resolve of those
|
|
199
|
+
# requirements.
|
|
200
|
+
ignore_errors = True
|
|
201
|
+
|
|
202
|
+
if result_type is InstallableType.INSTALLED_WHEEL_CHROOT:
|
|
203
|
+
resolved_distributions = build_and_install_request.install_distributions(
|
|
204
|
+
max_parallel_jobs=pip_configuration.max_jobs, ignore_errors=ignore_errors
|
|
205
|
+
)
|
|
206
|
+
else:
|
|
207
|
+
resolved_distributions = build_and_install_request.build_distributions(
|
|
208
|
+
max_parallel_jobs=pip_configuration.max_jobs,
|
|
209
|
+
ignore_errors=ignore_errors,
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
for resolved_distribution in resolved_distributions:
|
|
213
|
+
yield BuiltProject(
|
|
214
|
+
target=resolved_distribution.target,
|
|
215
|
+
fingerprinted_distribution=resolved_distribution.fingerprinted_distribution,
|
|
216
|
+
satisfied_direct_requirements=resolved_distribution.direct_requirements,
|
|
217
|
+
)
|
|
218
|
+
|
|
116
219
|
def collect_requirements(
|
|
117
220
|
self,
|
|
118
221
|
resolver, # type: Resolver
|
|
@@ -123,33 +226,105 @@ class Projects(object):
|
|
|
123
226
|
# type: (...) -> Iterator[Requirement]
|
|
124
227
|
|
|
125
228
|
target = LocalInterpreter.create(interpreter)
|
|
229
|
+
seen = set() # type: Set[Requirement]
|
|
230
|
+
|
|
231
|
+
source_projects = list(
|
|
232
|
+
self.project_directories
|
|
233
|
+
) # type: List[Union[ProjectDirectory, ProjectArchive]]
|
|
234
|
+
for project_archive in self.project_archives:
|
|
235
|
+
if project_archive.is_wheel:
|
|
236
|
+
for req in DistMetadata.load(project_archive.path).requires_dists:
|
|
237
|
+
if req not in seen:
|
|
238
|
+
seen.add(req)
|
|
239
|
+
yield req
|
|
240
|
+
else:
|
|
241
|
+
source_projects.append(project_archive)
|
|
242
|
+
|
|
243
|
+
wheels_to_build = [] # type: List[str]
|
|
244
|
+
prepare_metadata_errors = {} # type: Dict[str, str]
|
|
245
|
+
|
|
246
|
+
def spawn_prepare_metadata_func(project):
|
|
247
|
+
# type: (Union[ProjectDirectory, ProjectArchive]) -> SpawnedJob[DistMetadata]
|
|
248
|
+
|
|
249
|
+
if isinstance(project, ProjectDirectory):
|
|
250
|
+
project_dir = project.path
|
|
251
|
+
else:
|
|
252
|
+
project_dir = sdist.extract_tarball(
|
|
253
|
+
tarball_path=project.path, dest_dir=safe_mkdtemp()
|
|
254
|
+
)
|
|
126
255
|
|
|
127
|
-
def spawn_func(project):
|
|
128
|
-
# type: (Project) -> SpawnedJob[DistMetadata]
|
|
129
256
|
return pep_517.spawn_prepare_metadata(
|
|
130
|
-
|
|
257
|
+
project_directory=project_dir,
|
|
258
|
+
target=target,
|
|
259
|
+
resolver=resolver,
|
|
260
|
+
pip_version=pip_version,
|
|
131
261
|
)
|
|
132
262
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
self.projects,
|
|
263
|
+
for project_directory, dist_metadata_result in zip(
|
|
264
|
+
source_projects,
|
|
136
265
|
execute_parallel(
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
266
|
+
source_projects,
|
|
267
|
+
# MyPy just can't figure out the next two args types; they're OK.
|
|
268
|
+
spawn_func=spawn_prepare_metadata_func, # type: ignore[arg-type]
|
|
269
|
+
error_handler=Retain["Union[ProjectDirectory, ProjectArchive]"](), # type: ignore[arg-type]
|
|
140
270
|
max_jobs=max_jobs,
|
|
141
271
|
),
|
|
142
272
|
):
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
273
|
+
if isinstance(dist_metadata_result, DistMetadata):
|
|
274
|
+
for req in _iter_requirements(
|
|
275
|
+
target=target,
|
|
276
|
+
dist_metadata=dist_metadata_result,
|
|
277
|
+
extras=project_directory.requirement.extras,
|
|
278
|
+
):
|
|
279
|
+
if req not in seen:
|
|
280
|
+
seen.add(req)
|
|
281
|
+
yield req
|
|
282
|
+
else:
|
|
283
|
+
_item, error = dist_metadata_result
|
|
284
|
+
if isinstance(error, Job.Error) and pep_517.is_hook_unavailable_error(error):
|
|
285
|
+
TRACER.log(
|
|
286
|
+
"Failed to prepare metadata for {project}, trying to build a wheel "
|
|
287
|
+
"instead: {err}".format(
|
|
288
|
+
project=project_directory.path, err=dist_metadata_result
|
|
289
|
+
),
|
|
290
|
+
V=3,
|
|
291
|
+
)
|
|
292
|
+
wheels_to_build.append(project_directory.path)
|
|
293
|
+
else:
|
|
294
|
+
prepare_metadata_errors[project_directory.path] = str(error)
|
|
295
|
+
|
|
296
|
+
if wheels_to_build:
|
|
297
|
+
resolve_result = resolver.resolve_requirements(
|
|
298
|
+
requirements=wheels_to_build,
|
|
299
|
+
targets=Targets.from_target(target),
|
|
300
|
+
pip_version=pip_version,
|
|
301
|
+
)
|
|
302
|
+
for resolved_distribution in resolve_result.distributions:
|
|
303
|
+
for req in resolved_distribution.distribution.requires():
|
|
304
|
+
if req not in seen:
|
|
305
|
+
seen.add(req)
|
|
306
|
+
yield req
|
|
307
|
+
|
|
308
|
+
if prepare_metadata_errors:
|
|
309
|
+
raise ResultError(
|
|
310
|
+
Error(
|
|
311
|
+
"Encountered {count} {errors} collecting project requirements:\n"
|
|
312
|
+
"{error_items}".format(
|
|
313
|
+
count=len(prepare_metadata_errors),
|
|
314
|
+
errors=pluralize(prepare_metadata_errors, "error"),
|
|
315
|
+
error_items="\n".join(
|
|
316
|
+
"{index}. {path}: {error}".format(index=index, path=path, error=error)
|
|
317
|
+
for index, (path, error) in enumerate(
|
|
318
|
+
prepare_metadata_errors.items(), start=1
|
|
319
|
+
)
|
|
320
|
+
),
|
|
321
|
+
)
|
|
322
|
+
)
|
|
323
|
+
)
|
|
149
324
|
|
|
150
325
|
def __len__(self):
|
|
151
326
|
# type: () -> int
|
|
152
|
-
return len(self.
|
|
327
|
+
return len(self.project_directories) + len(self.project_archives)
|
|
153
328
|
|
|
154
329
|
|
|
155
330
|
@attr.s(frozen=True)
|
|
@@ -330,7 +505,8 @@ def register_options(
|
|
|
330
505
|
def get_projects(options):
|
|
331
506
|
# type: (Namespace) -> Projects
|
|
332
507
|
|
|
333
|
-
|
|
508
|
+
project_directories = [] # type: List[ProjectDirectory]
|
|
509
|
+
project_archives = [] # type: List[ProjectArchive]
|
|
334
510
|
errors = [] # type: List[str]
|
|
335
511
|
for project in getattr(options, "projects", ()):
|
|
336
512
|
try:
|
|
@@ -342,18 +518,26 @@ def get_projects(options):
|
|
|
342
518
|
)
|
|
343
519
|
)
|
|
344
520
|
else:
|
|
345
|
-
if isinstance(parsed, LocalProjectRequirement):
|
|
521
|
+
if isinstance(parsed, (LocalProjectRequirement, URLRequirement)):
|
|
346
522
|
if parsed.marker:
|
|
347
523
|
errors.append(
|
|
348
524
|
"The --project {project} has a marker, which is not supported. "
|
|
349
525
|
"Remove marker: ;{marker}".format(project=project, marker=parsed.marker)
|
|
350
526
|
)
|
|
527
|
+
elif isinstance(parsed, LocalProjectRequirement):
|
|
528
|
+
project_directories.append(ProjectDirectory(requirement=parsed))
|
|
529
|
+
elif parsed.url.scheme != "file":
|
|
530
|
+
errors.append(
|
|
531
|
+
"The --project {project} URL must be a local file: URL.".format(
|
|
532
|
+
project=project
|
|
533
|
+
)
|
|
534
|
+
)
|
|
351
535
|
else:
|
|
352
|
-
|
|
536
|
+
project_archives.append(ProjectArchive(requirement=parsed))
|
|
353
537
|
else:
|
|
354
538
|
errors.append(
|
|
355
539
|
"The --project {project} does not appear to point to a directory containing a "
|
|
356
|
-
"Python project.".format(project=project)
|
|
540
|
+
"Python project or a project archive (sdist or whl).".format(project=project)
|
|
357
541
|
)
|
|
358
542
|
|
|
359
543
|
if errors:
|
|
@@ -368,7 +552,9 @@ def get_projects(options):
|
|
|
368
552
|
)
|
|
369
553
|
)
|
|
370
554
|
|
|
371
|
-
return Projects(
|
|
555
|
+
return Projects(
|
|
556
|
+
project_directories=tuple(project_directories), project_archives=tuple(project_archives)
|
|
557
|
+
)
|
|
372
558
|
|
|
373
559
|
|
|
374
560
|
def get_group_requirements(options):
|
|
@@ -251,7 +251,7 @@ class PreResolvedConfiguration(object):
|
|
|
251
251
|
|
|
252
252
|
@attr.s(frozen=True)
|
|
253
253
|
class VenvRepositoryConfiguration(object):
|
|
254
|
-
|
|
254
|
+
venvs = attr.ib() # type: Tuple[Virtualenv, ...]
|
|
255
255
|
pip_configuration = attr.ib() # type: PipConfiguration
|
|
256
256
|
|
|
257
257
|
@property
|
pex/resolve/resolver_options.py
CHANGED
|
@@ -56,9 +56,10 @@ class _HandleTransitiveAction(Action):
|
|
|
56
56
|
class _ResolveVenvAction(Action):
|
|
57
57
|
def __init__(self, *args, **kwargs):
|
|
58
58
|
kwargs["nargs"] = "?"
|
|
59
|
-
super(_ResolveVenvAction, self).__init__(*args, **kwargs)
|
|
59
|
+
super(_ResolveVenvAction, self).__init__(*args, default=[], **kwargs)
|
|
60
60
|
|
|
61
61
|
def __call__(self, parser, namespace, value, option_str=None):
|
|
62
|
+
venvs = getattr(namespace, self.dest)
|
|
62
63
|
if value:
|
|
63
64
|
if not os.path.exists(value):
|
|
64
65
|
raise ArgumentError(
|
|
@@ -86,7 +87,7 @@ class _ResolveVenvAction(Action):
|
|
|
86
87
|
"path.".format(option=option_str, value=value)
|
|
87
88
|
),
|
|
88
89
|
)
|
|
89
|
-
|
|
90
|
+
venvs.append(venv)
|
|
90
91
|
else:
|
|
91
92
|
current_venv = Virtualenv.enclosing(python=sys.executable)
|
|
92
93
|
if not current_venv:
|
|
@@ -105,7 +106,7 @@ class _ResolveVenvAction(Action):
|
|
|
105
106
|
)
|
|
106
107
|
),
|
|
107
108
|
)
|
|
108
|
-
|
|
109
|
+
venvs.append(current_venv)
|
|
109
110
|
|
|
110
111
|
|
|
111
112
|
def register(
|
|
@@ -320,14 +321,18 @@ def register(
|
|
|
320
321
|
if include_venv_repository:
|
|
321
322
|
repository_choice.add_argument(
|
|
322
323
|
"--venv-repository",
|
|
323
|
-
dest="
|
|
324
|
+
dest="venv_repositories",
|
|
324
325
|
action=_ResolveVenvAction,
|
|
325
|
-
type=str,
|
|
326
326
|
help=(
|
|
327
327
|
"Resolve requirements from the given virtual environment instead of from "
|
|
328
328
|
"--index servers, --find-links repos or a --lock file. The virtual environment to "
|
|
329
329
|
"resolve from can be specified as the path to the venv or the path of its"
|
|
330
|
-
"interpreter. If no value is specified, the current active venv is used."
|
|
330
|
+
"interpreter. If no value is specified, the current active venv is used. Multiple "
|
|
331
|
+
"virtual environments may be specified via multiple --venv-repository options and "
|
|
332
|
+
"the resolve will be the combined results. Each virtual environment will be "
|
|
333
|
+
"resolved from individually and must contain the full transitive closure of "
|
|
334
|
+
"requirements. This allows for creating a multi-platform PEX by specifying "
|
|
335
|
+
"multiple virtual environments; say one for Python 3.12 and one for Python 3.13."
|
|
331
336
|
),
|
|
332
337
|
)
|
|
333
338
|
|
|
@@ -803,9 +808,9 @@ def configure(
|
|
|
803
808
|
sdists=tuple(sdists), wheels=tuple(wheels), pip_configuration=pip_configuration
|
|
804
809
|
)
|
|
805
810
|
|
|
806
|
-
|
|
807
|
-
if
|
|
808
|
-
return VenvRepositoryConfiguration(
|
|
811
|
+
venvs = getattr(options, "venv_repositories", None)
|
|
812
|
+
if venvs:
|
|
813
|
+
return VenvRepositoryConfiguration(venvs=tuple(venvs), pip_configuration=pip_configuration)
|
|
809
814
|
|
|
810
815
|
if pylock:
|
|
811
816
|
return PylockRepositoryConfiguration(
|