pex 2.54.2__py2.py3-none-any.whl → 2.69.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pex might be problematic. Click here for more details.

Files changed (180) hide show
  1. pex/auth.py +1 -1
  2. pex/bin/pex.py +15 -2
  3. pex/build_backend/configuration.py +5 -5
  4. pex/build_backend/wrap.py +27 -23
  5. pex/build_system/pep_517.py +4 -1
  6. pex/cache/dirs.py +17 -12
  7. pex/cli/commands/lock.py +302 -165
  8. pex/cli/commands/pip/core.py +4 -12
  9. pex/cli/commands/pip/wheel.py +1 -1
  10. pex/cli/commands/run.py +13 -20
  11. pex/cli/commands/venv.py +85 -16
  12. pex/cli/pex.py +11 -4
  13. pex/common.py +57 -7
  14. pex/compatibility.py +1 -1
  15. pex/dependency_configuration.py +87 -15
  16. pex/dist_metadata.py +143 -25
  17. pex/docs/html/_pagefind/fragment/en_4250138.pf_fragment +0 -0
  18. pex/docs/html/_pagefind/fragment/en_7125dad.pf_fragment +0 -0
  19. pex/docs/html/_pagefind/fragment/en_785d562.pf_fragment +0 -0
  20. pex/docs/html/_pagefind/fragment/en_8e94bb8.pf_fragment +0 -0
  21. pex/docs/html/_pagefind/fragment/en_a0396bb.pf_fragment +0 -0
  22. pex/docs/html/_pagefind/fragment/en_a8a3588.pf_fragment +0 -0
  23. pex/docs/html/_pagefind/fragment/en_c07d988.pf_fragment +0 -0
  24. pex/docs/html/_pagefind/fragment/en_d718411.pf_fragment +0 -0
  25. pex/docs/html/_pagefind/index/en_a2e3c5e.pf_index +0 -0
  26. pex/docs/html/_pagefind/pagefind-entry.json +1 -1
  27. pex/docs/html/_pagefind/pagefind.en_4ce1afa9e3.pf_meta +0 -0
  28. pex/docs/html/_static/documentation_options.js +1 -1
  29. pex/docs/html/_static/pygments.css +164 -146
  30. pex/docs/html/_static/styles/furo.css +1 -1
  31. pex/docs/html/_static/styles/furo.css.map +1 -1
  32. pex/docs/html/api/vars.html +25 -34
  33. pex/docs/html/buildingpex.html +25 -34
  34. pex/docs/html/genindex.html +24 -33
  35. pex/docs/html/index.html +25 -34
  36. pex/docs/html/recipes.html +25 -34
  37. pex/docs/html/scie.html +25 -34
  38. pex/docs/html/search.html +24 -33
  39. pex/docs/html/whatispex.html +25 -34
  40. pex/entry_points_txt.py +98 -0
  41. pex/environment.py +54 -33
  42. pex/finders.py +1 -1
  43. pex/hashing.py +71 -9
  44. pex/installed_wheel.py +141 -0
  45. pex/interpreter.py +41 -38
  46. pex/interpreter_constraints.py +25 -25
  47. pex/interpreter_implementation.py +40 -0
  48. pex/jobs.py +13 -6
  49. pex/pep_376.py +68 -384
  50. pex/pep_425.py +11 -2
  51. pex/pep_427.py +937 -205
  52. pex/pep_508.py +4 -5
  53. pex/pex_builder.py +5 -8
  54. pex/pex_info.py +14 -9
  55. pex/pip/dependencies/__init__.py +85 -13
  56. pex/pip/dependencies/requires.py +38 -3
  57. pex/pip/foreign_platform/__init__.py +4 -3
  58. pex/pip/installation.py +2 -2
  59. pex/pip/local_project.py +6 -14
  60. pex/pip/package_repositories/__init__.py +78 -0
  61. pex/pip/package_repositories/link_collector.py +96 -0
  62. pex/pip/tool.py +139 -33
  63. pex/pip/vcs.py +109 -43
  64. pex/pip/version.py +8 -1
  65. pex/requirements.py +121 -16
  66. pex/resolve/config.py +5 -1
  67. pex/resolve/configured_resolve.py +32 -10
  68. pex/resolve/configured_resolver.py +10 -39
  69. pex/resolve/downloads.py +4 -3
  70. pex/resolve/lock_downloader.py +16 -23
  71. pex/resolve/lock_resolver.py +41 -51
  72. pex/resolve/locked_resolve.py +89 -32
  73. pex/resolve/locker.py +145 -101
  74. pex/resolve/locker_patches.py +123 -197
  75. pex/resolve/lockfile/create.py +232 -87
  76. pex/resolve/lockfile/download_manager.py +5 -1
  77. pex/resolve/lockfile/json_codec.py +103 -28
  78. pex/resolve/lockfile/model.py +13 -35
  79. pex/resolve/lockfile/pep_751.py +117 -98
  80. pex/resolve/lockfile/requires_dist.py +17 -262
  81. pex/resolve/lockfile/subset.py +11 -0
  82. pex/resolve/lockfile/targets.py +445 -0
  83. pex/resolve/lockfile/updater.py +22 -10
  84. pex/resolve/package_repository.py +406 -0
  85. pex/resolve/pex_repository_resolver.py +1 -1
  86. pex/resolve/pre_resolved_resolver.py +19 -16
  87. pex/resolve/project.py +233 -47
  88. pex/resolve/requirement_configuration.py +28 -10
  89. pex/resolve/resolver_configuration.py +18 -32
  90. pex/resolve/resolver_options.py +234 -28
  91. pex/resolve/resolvers.py +3 -12
  92. pex/resolve/target_options.py +18 -2
  93. pex/resolve/target_system.py +908 -0
  94. pex/resolve/venv_resolver.py +670 -0
  95. pex/resolver.py +673 -209
  96. pex/scie/__init__.py +40 -1
  97. pex/scie/model.py +2 -0
  98. pex/scie/science.py +25 -3
  99. pex/sdist.py +219 -0
  100. pex/sh_boot.py +24 -21
  101. pex/sysconfig.py +5 -3
  102. pex/targets.py +31 -10
  103. pex/third_party/__init__.py +1 -1
  104. pex/tools/commands/repository.py +48 -25
  105. pex/vendor/__init__.py +4 -9
  106. pex/vendor/__main__.py +65 -41
  107. pex/vendor/_vendored/ansicolors/.layout.json +1 -1
  108. pex/vendor/_vendored/ansicolors/ansicolors-1.1.8.dist-info/RECORD +11 -0
  109. pex/vendor/_vendored/ansicolors/ansicolors-1.1.8.pex-info/original-whl-info.json +1 -0
  110. pex/vendor/_vendored/appdirs/.layout.json +1 -1
  111. pex/vendor/_vendored/appdirs/appdirs-1.4.4.dist-info/RECORD +7 -0
  112. pex/vendor/_vendored/appdirs/appdirs-1.4.4.pex-info/original-whl-info.json +1 -0
  113. pex/vendor/_vendored/attrs/.layout.json +1 -1
  114. pex/vendor/_vendored/attrs/attrs-21.5.0.dev0.dist-info/RECORD +37 -0
  115. pex/vendor/_vendored/attrs/attrs-21.5.0.dev0.pex-info/original-whl-info.json +1 -0
  116. pex/vendor/_vendored/packaging_20_9/.layout.json +1 -1
  117. pex/vendor/_vendored/packaging_20_9/packaging-20.9.dist-info/RECORD +20 -0
  118. pex/vendor/_vendored/packaging_20_9/packaging-20.9.pex-info/original-whl-info.json +1 -0
  119. pex/vendor/_vendored/packaging_20_9/pyparsing-2.4.7.dist-info/RECORD +7 -0
  120. pex/vendor/_vendored/packaging_20_9/pyparsing-2.4.7.pex-info/original-whl-info.json +1 -0
  121. pex/vendor/_vendored/packaging_21_3/.layout.json +1 -1
  122. pex/vendor/_vendored/packaging_21_3/packaging-21.3.dist-info/RECORD +20 -0
  123. pex/vendor/_vendored/packaging_21_3/packaging-21.3.pex-info/original-whl-info.json +1 -0
  124. pex/vendor/_vendored/packaging_21_3/pyparsing-3.0.7.dist-info/RECORD +18 -0
  125. pex/vendor/_vendored/packaging_21_3/pyparsing-3.0.7.pex-info/original-whl-info.json +1 -0
  126. pex/vendor/_vendored/packaging_24_0/.layout.json +1 -1
  127. pex/vendor/_vendored/packaging_24_0/packaging-24.0.dist-info/RECORD +22 -0
  128. pex/vendor/_vendored/packaging_24_0/packaging-24.0.pex-info/original-whl-info.json +1 -0
  129. pex/vendor/_vendored/packaging_25_0/.layout.json +1 -1
  130. pex/vendor/_vendored/packaging_25_0/packaging-25.0.dist-info/RECORD +24 -0
  131. pex/vendor/_vendored/packaging_25_0/packaging-25.0.pex-info/original-whl-info.json +1 -0
  132. pex/vendor/_vendored/pip/.layout.json +1 -1
  133. pex/vendor/_vendored/pip/pip/_vendor/certifi/cacert.pem +63 -1
  134. pex/vendor/_vendored/pip/pip-20.3.4.dist-info/RECORD +388 -0
  135. pex/vendor/_vendored/pip/pip-20.3.4.pex-info/original-whl-info.json +1 -0
  136. pex/vendor/_vendored/setuptools/.layout.json +1 -1
  137. pex/vendor/_vendored/setuptools/setuptools-44.0.0+3acb925dd708430aeaf197ea53ac8a752f7c1863.dist-info/RECORD +107 -0
  138. pex/vendor/_vendored/setuptools/setuptools-44.0.0+3acb925dd708430aeaf197ea53ac8a752f7c1863.pex-info/original-whl-info.json +1 -0
  139. pex/vendor/_vendored/toml/.layout.json +1 -1
  140. pex/vendor/_vendored/toml/toml-0.10.2.dist-info/RECORD +11 -0
  141. pex/vendor/_vendored/toml/toml-0.10.2.pex-info/original-whl-info.json +1 -0
  142. pex/vendor/_vendored/tomli/.layout.json +1 -1
  143. pex/vendor/_vendored/tomli/tomli-2.0.1.dist-info/RECORD +10 -0
  144. pex/vendor/_vendored/tomli/tomli-2.0.1.pex-info/original-whl-info.json +1 -0
  145. pex/venv/installer.py +46 -19
  146. pex/venv/venv_pex.py +6 -3
  147. pex/version.py +1 -1
  148. pex/wheel.py +188 -40
  149. pex/whl.py +67 -0
  150. pex/windows/__init__.py +14 -11
  151. {pex-2.54.2.dist-info → pex-2.69.0.dist-info}/METADATA +6 -5
  152. {pex-2.54.2.dist-info → pex-2.69.0.dist-info}/RECORD +157 -133
  153. {pex-2.54.2.dist-info → pex-2.69.0.dist-info}/entry_points.txt +1 -0
  154. {pex-2.54.2.dist-info → pex-2.69.0.dist-info}/pylock/pylock.toml +1 -1
  155. pex/docs/html/_pagefind/fragment/en_42c9d8c.pf_fragment +0 -0
  156. pex/docs/html/_pagefind/fragment/en_45dd5a2.pf_fragment +0 -0
  157. pex/docs/html/_pagefind/fragment/en_4ca74d2.pf_fragment +0 -0
  158. pex/docs/html/_pagefind/fragment/en_77273d5.pf_fragment +0 -0
  159. pex/docs/html/_pagefind/fragment/en_87a59c5.pf_fragment +0 -0
  160. pex/docs/html/_pagefind/fragment/en_8dc89b5.pf_fragment +0 -0
  161. pex/docs/html/_pagefind/fragment/en_9d1319b.pf_fragment +0 -0
  162. pex/docs/html/_pagefind/fragment/en_e55df9d.pf_fragment +0 -0
  163. pex/docs/html/_pagefind/index/en_1e98c6f.pf_index +0 -0
  164. pex/docs/html/_pagefind/pagefind.en_d1c488ecae.pf_meta +0 -0
  165. pex/vendor/_vendored/ansicolors/ansicolors-1.1.8.dist-info/INSTALLER +0 -1
  166. pex/vendor/_vendored/appdirs/appdirs-1.4.4.dist-info/INSTALLER +0 -1
  167. pex/vendor/_vendored/attrs/attrs-21.5.0.dev0.dist-info/INSTALLER +0 -1
  168. pex/vendor/_vendored/packaging_20_9/packaging-20.9.dist-info/INSTALLER +0 -1
  169. pex/vendor/_vendored/packaging_20_9/pyparsing-2.4.7.dist-info/INSTALLER +0 -1
  170. pex/vendor/_vendored/packaging_21_3/packaging-21.3.dist-info/INSTALLER +0 -1
  171. pex/vendor/_vendored/packaging_21_3/pyparsing-3.0.7.dist-info/INSTALLER +0 -1
  172. pex/vendor/_vendored/packaging_24_0/packaging-24.0.dist-info/INSTALLER +0 -1
  173. pex/vendor/_vendored/packaging_25_0/packaging-25.0.dist-info/INSTALLER +0 -1
  174. pex/vendor/_vendored/pip/pip-20.3.4.dist-info/INSTALLER +0 -1
  175. pex/vendor/_vendored/setuptools/setuptools-44.0.0+3acb925dd708430aeaf197ea53ac8a752f7c1863.dist-info/INSTALLER +0 -1
  176. pex/vendor/_vendored/toml/toml-0.10.2.dist-info/INSTALLER +0 -1
  177. pex/vendor/_vendored/tomli/tomli-2.0.1.dist-info/INSTALLER +0 -1
  178. {pex-2.54.2.dist-info → pex-2.69.0.dist-info}/WHEEL +0 -0
  179. {pex-2.54.2.dist-info → pex-2.69.0.dist-info}/licenses/LICENSE +0 -0
  180. {pex-2.54.2.dist-info → pex-2.69.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,445 @@
1
+ from __future__ import absolute_import
2
+
3
+ import itertools
4
+ import os.path
5
+ import tempfile
6
+ from collections import OrderedDict, defaultdict
7
+
8
+ from pex import atexit
9
+ from pex.common import pluralize, safe_delete
10
+ from pex.interpreter_constraints import iter_compatible_versions
11
+ from pex.interpreter_implementation import InterpreterImplementation
12
+ from pex.network_configuration import NetworkConfiguration
13
+ from pex.orderedset import OrderedSet
14
+ from pex.pep_503 import ProjectName
15
+ from pex.resolve.package_repository import ReposConfiguration
16
+ from pex.resolve.requirement_configuration import RequirementConfiguration
17
+ from pex.resolve.target_system import (
18
+ ExtraMarkers,
19
+ MarkerEnv,
20
+ TargetSystem,
21
+ UniversalTarget,
22
+ has_marker,
23
+ )
24
+ from pex.resolver import DownloadRequest
25
+ from pex.targets import LocalInterpreter, Targets
26
+ from pex.third_party.packaging.markers import Marker
27
+ from pex.third_party.packaging.specifiers import SpecifierSet
28
+ from pex.typing import TYPE_CHECKING
29
+
30
+ if TYPE_CHECKING:
31
+ from typing import (
32
+ DefaultDict,
33
+ Dict,
34
+ FrozenSet,
35
+ Iterable,
36
+ Iterator,
37
+ List,
38
+ Mapping,
39
+ Optional,
40
+ Text,
41
+ Tuple,
42
+ )
43
+
44
+ import attr # vendor:skip
45
+ else:
46
+ from pex.third_party import attr
47
+
48
+
49
+ def _calculate_split_markers(
50
+ requirement_configuration, # type: RequirementConfiguration
51
+ network_configuration, # type: NetworkConfiguration
52
+ repos_configuration, # type: ReposConfiguration
53
+ ):
54
+ # type: (...) -> Mapping[str, Marker]
55
+
56
+ split_markers = {
57
+ str(scope.marker): scope.marker
58
+ for repo in itertools.chain(
59
+ repos_configuration.index_repos, repos_configuration.find_links_repos
60
+ )
61
+ for scope in repo.scopes
62
+ if scope.marker
63
+ }
64
+
65
+ projects_with_markers = defaultdict(dict) # type: DefaultDict[ProjectName, Dict[str, Marker]]
66
+ for requirement in requirement_configuration.parse_requirements(network_configuration):
67
+ if requirement.project_name and requirement.marker:
68
+ projects_with_markers[requirement.project_name][
69
+ str(requirement.marker)
70
+ ] = requirement.marker
71
+ for constraint in requirement_configuration.parse_constraints(network_configuration):
72
+ if constraint.marker:
73
+ projects_with_markers[constraint.project_name][
74
+ str(constraint.marker)
75
+ ] = constraint.marker
76
+ split_markers.update(
77
+ (marker_str, marker)
78
+ for markers in projects_with_markers.values()
79
+ for marker_str, marker in markers.items()
80
+ # N.B.: We only split the universal resolve for root requirements that have two or
81
+ # more marker variants. If there is just one, that represents a conditional
82
+ # dependency which can be included in a single resolve without splitting.
83
+ if len(markers) > 1
84
+ )
85
+
86
+ return split_markers
87
+
88
+
89
+ def _iter_universal_targets(
90
+ universal_target, # type: UniversalTarget
91
+ split_markers, # type: Mapping[str, Marker]
92
+ ):
93
+ # type: (...) -> Iterator[UniversalTarget]
94
+
95
+ target_systems = universal_target.systems or TargetSystem.values()
96
+
97
+ interpreter_implementations = (
98
+ (universal_target.implementation,)
99
+ if universal_target.implementation
100
+ else InterpreterImplementation.values()
101
+ )
102
+
103
+ requires_pythons = OrderedSet() # type: OrderedSet[SpecifierSet]
104
+ has_python_version = any(
105
+ has_marker(marker, "python_version") for marker in split_markers.values()
106
+ )
107
+ has_python_full_version = any(
108
+ has_marker(marker, "python_full_version") for marker in split_markers.values()
109
+ )
110
+ if has_python_full_version:
111
+ requires_pythons.update(
112
+ SpecifierSet(
113
+ "=={major}.{minor}.{patch}".format(
114
+ major=version[0], minor=version[1], patch=version[2]
115
+ )
116
+ )
117
+ for version in iter_compatible_versions(universal_target.requires_python)
118
+ )
119
+ elif has_python_version:
120
+ requires_pythons.update(
121
+ SpecifierSet("=={major}.{minor}.*".format(major=version[0], minor=version[1]))
122
+ for version in OrderedSet(
123
+ version[:2]
124
+ for version in iter_compatible_versions(universal_target.requires_python)
125
+ )
126
+ )
127
+ else:
128
+ requires_pythons.update(universal_target.requires_python)
129
+ if not requires_pythons:
130
+ requires_pythons.add(SpecifierSet())
131
+
132
+ systems_by_markers = defaultdict(
133
+ list
134
+ ) # type: DefaultDict[FrozenSet[str], List[Tuple[TargetSystem.Value, InterpreterImplementation.Value, SpecifierSet]]]
135
+ for system in target_systems:
136
+ for implementation in interpreter_implementations:
137
+ for python_specifier in requires_pythons:
138
+ marker_env = MarkerEnv.create(
139
+ extras=(),
140
+ universal_target=UniversalTarget(
141
+ implementation=implementation,
142
+ systems=(system,),
143
+ requires_python=(python_specifier,),
144
+ ),
145
+ )
146
+ system_repo_markers = frozenset(
147
+ marker_str
148
+ for marker_str, marker in split_markers.items()
149
+ if marker_env.evaluate(marker)
150
+ )
151
+ systems_by_markers[system_repo_markers].append(
152
+ (system, implementation, python_specifier)
153
+ )
154
+
155
+ for markers, value in systems_by_markers.items():
156
+ systems = OrderedSet() # type: OrderedSet[TargetSystem.Value]
157
+ implementations = OrderedSet() # type: OrderedSet[InterpreterImplementation.Value]
158
+ requires_python = OrderedSet() # type: OrderedSet[SpecifierSet]
159
+ for system, implementation, python_specifier in value:
160
+ systems.add(system)
161
+ implementations.add(implementation)
162
+ requires_python.add(python_specifier)
163
+ impl = implementations.pop() if len(implementations) == 1 else None
164
+ yield UniversalTarget(
165
+ implementation=impl,
166
+ systems=tuple(systems),
167
+ requires_python=tuple(requires_python),
168
+ )
169
+
170
+
171
+ if TYPE_CHECKING:
172
+ from pex.requirements import ParsedRequirement
173
+
174
+
175
+ @attr.s(frozen=True)
176
+ class DownloadInput(object):
177
+ download_requests = attr.ib() # type: Tuple[DownloadRequest, ...]
178
+ direct_requirements = attr.ib() # type: Tuple[ParsedRequirement, ...]
179
+
180
+
181
+ def _comment_out_requirements(
182
+ requirements_file, # type: Text
183
+ requirements, # type: Iterable[ParsedRequirement]
184
+ ):
185
+ # type: (...) -> Text
186
+
187
+ lines_to_comment_out = set(
188
+ itertools.chain.from_iterable(
189
+ range(requirement.line.start_line, requirement.line.end_line + 1)
190
+ for requirement in requirements
191
+ )
192
+ )
193
+
194
+ # N.B.: We create the edited requirements file in the same directory as the original so that any
195
+ # relative path references in the requirements file are still valid.
196
+ out_fd, edited_requirements_file = tempfile.mkstemp(
197
+ dir=os.path.dirname(requirements_file),
198
+ prefix="pex_lock_split.",
199
+ suffix=".{file_name}".format(file_name=os.path.basename(requirements_file)),
200
+ )
201
+ atexit.register(safe_delete, edited_requirements_file)
202
+ try:
203
+ with open(requirements_file, "rb") as in_fp:
204
+ for line_no, text in enumerate(in_fp, start=1):
205
+ if line_no in lines_to_comment_out:
206
+ os.write(out_fd, b"# ")
207
+ os.write(out_fd, text)
208
+ finally:
209
+ os.close(out_fd)
210
+ return edited_requirements_file
211
+
212
+
213
+ @attr.s
214
+ class Split(object):
215
+ requirements_by_project_name = attr.ib(
216
+ factory=OrderedDict
217
+ ) # type: OrderedDict[ProjectName, ParsedRequirement]
218
+ provenance = attr.ib(factory=OrderedSet) # type: OrderedSet[ParsedRequirement]
219
+
220
+ def applies(
221
+ self,
222
+ universal_target, # type: UniversalTarget
223
+ requirement, # type: ParsedRequirement
224
+ ):
225
+ # type: (...) -> bool
226
+
227
+ if not requirement.marker:
228
+ return True
229
+
230
+ requirements = [(req.marker, str(req)) for req in self.provenance if req.marker]
231
+ if not requirements:
232
+ return True
233
+
234
+ marker_env = attr.evolve(
235
+ universal_target, extra_markers=ExtraMarkers.extract(requirements)
236
+ ).marker_env()
237
+ return marker_env.evaluate(requirement.marker)
238
+
239
+ def add(
240
+ self,
241
+ universal_target, # type: UniversalTarget
242
+ project_name, # type: ProjectName
243
+ requirement, # type: ParsedRequirement
244
+ ):
245
+ # type: (...) -> Optional[Split]
246
+
247
+ if not self.applies(universal_target, requirement):
248
+ return None
249
+
250
+ existing_requirement = self.requirements_by_project_name.setdefault(
251
+ project_name, requirement
252
+ )
253
+ if existing_requirement == requirement:
254
+ return None
255
+
256
+ self.provenance.add(existing_requirement)
257
+
258
+ provenance = OrderedSet(req for req in self.provenance if req != existing_requirement)
259
+ provenance.add(requirement)
260
+
261
+ requirements_by_project_name = self.requirements_by_project_name.copy()
262
+ requirements_by_project_name[project_name] = requirement
263
+
264
+ return Split(
265
+ requirements_by_project_name=requirements_by_project_name, provenance=provenance
266
+ )
267
+
268
+ def requirement_configuration(
269
+ self,
270
+ unnamed_requirements, # type: Iterable[ParsedRequirement]
271
+ requirement_configuration, # type: RequirementConfiguration
272
+ network_configuration=None, # type: Optional[NetworkConfiguration]
273
+ ):
274
+ # type: (...) -> RequirementConfiguration
275
+
276
+ if not self.provenance:
277
+ return requirement_configuration
278
+
279
+ requirements = list(str(req) for req in unnamed_requirements)
280
+
281
+ requirement_files = OrderedSet(
282
+ os.path.realpath(requirement_file)
283
+ for requirement_file in requirement_configuration.requirement_files
284
+ ) # type: OrderedSet[Text]
285
+ if requirement_files:
286
+ provenance_by_project_name = {
287
+ parsed_requirement.project_name: parsed_requirement
288
+ for parsed_requirement in self.provenance
289
+ if parsed_requirement.project_name
290
+ }
291
+
292
+ requirements_to_comment_out_by_source = defaultdict(
293
+ list
294
+ ) # type: DefaultDict[Text, List[ParsedRequirement]]
295
+ for parsed_requirement in requirement_configuration.parse_requirements(
296
+ network_configuration=network_configuration
297
+ ):
298
+ if not parsed_requirement.project_name:
299
+ continue
300
+
301
+ provenance = provenance_by_project_name.get(parsed_requirement.project_name)
302
+ if provenance and (parsed_requirement != provenance):
303
+ if parsed_requirement.line.source in requirement_files:
304
+ # We comment out the requirement
305
+ requirements_to_comment_out_by_source[
306
+ parsed_requirement.line.source
307
+ ].append(parsed_requirement)
308
+ else:
309
+ # We drop the requirement
310
+ pass
311
+ elif parsed_requirement.line.source not in requirement_files:
312
+ requirements.append(str(parsed_requirement))
313
+ if requirements_to_comment_out_by_source:
314
+ new_requirement_files = OrderedSet() # type: OrderedSet[Text]
315
+ for source, parsed_requirements in requirements_to_comment_out_by_source.items():
316
+ if source in requirement_files:
317
+ new_requirement_files.add(
318
+ _comment_out_requirements(source, parsed_requirements)
319
+ )
320
+ else:
321
+ new_requirement_files.add(source)
322
+ requirement_files = new_requirement_files
323
+ else:
324
+ requirements.extend(str(req) for req in self.requirements_by_project_name.values())
325
+
326
+ return RequirementConfiguration(
327
+ requirements=tuple(requirements),
328
+ requirement_files=tuple(requirement_files),
329
+ constraint_files=requirement_configuration.constraint_files,
330
+ )
331
+
332
+
333
+ def calculate_download_input(
334
+ targets, # type: Targets
335
+ requirement_configuration, # type: RequirementConfiguration
336
+ network_configuration, # type: NetworkConfiguration
337
+ repos_configuration, # type: ReposConfiguration
338
+ universal_target=None, # type: Optional[UniversalTarget]
339
+ ):
340
+ # type: (...) -> DownloadInput
341
+
342
+ direct_requirements = requirement_configuration.parse_requirements(network_configuration)
343
+ if not universal_target:
344
+ return DownloadInput(
345
+ download_requests=tuple(
346
+ DownloadRequest.create(
347
+ target=target, requirement_configuration=requirement_configuration
348
+ )
349
+ for target in targets.unique_targets()
350
+ ),
351
+ direct_requirements=direct_requirements,
352
+ )
353
+
354
+ target = LocalInterpreter.create(targets.interpreter)
355
+ split_markers = _calculate_split_markers(
356
+ requirement_configuration, network_configuration, repos_configuration
357
+ )
358
+ if not split_markers:
359
+ return DownloadInput(
360
+ download_requests=tuple(
361
+ [
362
+ DownloadRequest.create(
363
+ target=target,
364
+ universal_target=universal_target,
365
+ requirement_configuration=requirement_configuration,
366
+ )
367
+ ]
368
+ ),
369
+ direct_requirements=direct_requirements,
370
+ )
371
+
372
+ named_requirements = (
373
+ OrderedDict()
374
+ ) # type: OrderedDict[ProjectName, OrderedSet[ParsedRequirement]]
375
+ unnamed_requirements = OrderedSet() # type: OrderedSet[ParsedRequirement]
376
+ for direct_requirement in direct_requirements:
377
+ if direct_requirement.project_name:
378
+ named_requirements.setdefault(direct_requirement.project_name, OrderedSet()).add(
379
+ direct_requirement
380
+ )
381
+ else:
382
+ unnamed_requirements.add(direct_requirement)
383
+
384
+ requirement_splits_by_universal_target = defaultdict(
385
+ lambda: [Split()]
386
+ ) # type: DefaultDict[UniversalTarget, List[Split]]
387
+ for universal_target in _iter_universal_targets(universal_target, split_markers):
388
+ marker_env = universal_target.marker_env()
389
+ requirement_splits = requirement_splits_by_universal_target[universal_target]
390
+ for project_name, remote_requirements in named_requirements.items():
391
+ for requirement_split in list(requirement_splits):
392
+ for remote_requirement in remote_requirements:
393
+ if remote_requirement.marker and not marker_env.evaluate(
394
+ remote_requirement.marker
395
+ ):
396
+ continue
397
+ new_split = requirement_split.add(
398
+ universal_target, project_name, remote_requirement
399
+ )
400
+ if new_split:
401
+ requirement_splits.append(new_split)
402
+
403
+ download_requests = []
404
+ for universal_target, splits in requirement_splits_by_universal_target.items():
405
+ if len(splits) == 1:
406
+ download_requests.append(
407
+ DownloadRequest.create(
408
+ target=target,
409
+ universal_target=universal_target,
410
+ requirement_configuration=splits[0].requirement_configuration(
411
+ unnamed_requirements,
412
+ requirement_configuration,
413
+ network_configuration=network_configuration,
414
+ ),
415
+ )
416
+ )
417
+ continue
418
+
419
+ for split in splits:
420
+ download_requests.append(
421
+ DownloadRequest.create(
422
+ target=target,
423
+ universal_target=attr.evolve(
424
+ universal_target,
425
+ extra_markers=ExtraMarkers.extract(
426
+ (requirement.marker, str(requirement))
427
+ for requirement in split.provenance
428
+ if requirement.marker
429
+ ),
430
+ ),
431
+ requirement_configuration=split.requirement_configuration(
432
+ unnamed_requirements,
433
+ requirement_configuration,
434
+ network_configuration=network_configuration,
435
+ ),
436
+ provenance="split by {requirements} {reqs}".format(
437
+ requirements=pluralize(split.provenance, "requirement"),
438
+ reqs=", ".join("'{req}'".format(req=req) for req in split.provenance),
439
+ ),
440
+ )
441
+ )
442
+
443
+ return DownloadInput(
444
+ download_requests=tuple(download_requests), direct_requirements=direct_requirements
445
+ )
@@ -26,8 +26,9 @@ from pex.resolve.locked_resolve import (
26
26
  )
27
27
  from pex.resolve.lockfile.create import create
28
28
  from pex.resolve.lockfile.model import Lockfile
29
+ from pex.resolve.package_repository import ReposConfiguration
29
30
  from pex.resolve.requirement_configuration import RequirementConfiguration
30
- from pex.resolve.resolver_configuration import PipConfiguration, PipLog, ReposConfiguration
31
+ from pex.resolve.resolver_configuration import PipConfiguration, PipLog
31
32
  from pex.result import Error, ResultError, catch, try_
32
33
  from pex.sorted_tuple import SortedTuple
33
34
  from pex.targets import Target, Targets
@@ -251,6 +252,7 @@ class ResolveUpdater(object):
251
252
  lock_configuration, # type: LockConfiguration
252
253
  pip_configuration, # type: PipConfiguration
253
254
  dependency_configuration, # type: DependencyConfiguration
255
+ avoid_downloads, # type: bool
254
256
  ):
255
257
  # type: (...) -> Union[ResolveUpdater, Error]
256
258
 
@@ -309,6 +311,7 @@ class ResolveUpdater(object):
309
311
  lock_configuration=lock_configuration,
310
312
  pip_configuration=pip_configuration,
311
313
  dependency_configuration=dependency_configuration,
314
+ avoid_downloads=avoid_downloads,
312
315
  )
313
316
 
314
317
  @classmethod
@@ -321,6 +324,7 @@ class ResolveUpdater(object):
321
324
  lock_configuration, # type: LockConfiguration
322
325
  pip_configuration, # type: PipConfiguration
323
326
  dependency_configuration, # type: DependencyConfiguration
327
+ avoid_downloads, # type: bool
324
328
  ):
325
329
  # type: (...) -> ResolveUpdater
326
330
 
@@ -361,6 +365,7 @@ class ResolveUpdater(object):
361
365
  lock_configuration=lock_configuration,
362
366
  pip_configuration=pip_configuration,
363
367
  dependency_configuration=dependency_configuration,
368
+ avoid_downloads=avoid_downloads,
364
369
  )
365
370
 
366
371
  requirement_configuration = attr.ib() # type: RequirementConfiguration
@@ -376,6 +381,7 @@ class ResolveUpdater(object):
376
381
  update_requirements_by_project_name = attr.ib(
377
382
  factory=dict
378
383
  ) # type: Mapping[ProjectName, Requirement]
384
+ avoid_downloads = attr.ib(default=False)
379
385
 
380
386
  def iter_updated_requirements(self):
381
387
  # type: () -> Iterator[Requirement]
@@ -501,6 +507,7 @@ class ResolveUpdater(object):
501
507
  targets=Targets.from_target(target),
502
508
  pip_configuration=self.pip_configuration,
503
509
  dependency_configuration=self.dependency_configuration,
510
+ avoid_downloads=self.avoid_downloads,
504
511
  )
505
512
  )
506
513
  assert 1 == len(updated_lock_file.locked_resolves)
@@ -690,6 +697,7 @@ class LockUpdater(object):
690
697
  use_pip_config, # type: bool
691
698
  dependency_configuration, # type: DependencyConfiguration
692
699
  pip_log, # type: Optional[PipLog]
700
+ avoid_downloads, # type: bool
693
701
  ):
694
702
  # type: (...) -> LockUpdater
695
703
 
@@ -707,15 +715,17 @@ class LockUpdater(object):
707
715
  )
708
716
  return cls(
709
717
  lock_file=lock_file,
710
- lock_configuration=lock_file.lock_configuration(),
718
+ lock_configuration=lock_file.configuration,
711
719
  pip_configuration=pip_configuration,
712
720
  dependency_configuration=dependency_configuration,
721
+ avoid_downloads=avoid_downloads,
713
722
  )
714
723
 
715
724
  lock_file = attr.ib() # type: Lockfile
716
725
  lock_configuration = attr.ib() # type: LockConfiguration
717
726
  pip_configuration = attr.ib() # type: PipConfiguration
718
727
  dependency_configuration = attr.ib() # type: DependencyConfiguration
728
+ avoid_downloads = attr.ib() # type: bool
719
729
 
720
730
  def sync(
721
731
  self,
@@ -737,6 +747,7 @@ class LockUpdater(object):
737
747
  lock_configuration=self.lock_configuration,
738
748
  pip_configuration=self.pip_configuration,
739
749
  dependency_configuration=self.dependency_configuration,
750
+ avoid_downloads=self.avoid_downloads,
740
751
  )
741
752
  )
742
753
  return self._perform_update(
@@ -772,6 +783,7 @@ class LockUpdater(object):
772
783
  lock_configuration=self.lock_configuration,
773
784
  pip_configuration=self.pip_configuration,
774
785
  dependency_configuration=self.dependency_configuration,
786
+ avoid_downloads=self.avoid_downloads,
775
787
  )
776
788
  return self._perform_update(
777
789
  update_requests=update_requests,
@@ -788,11 +800,11 @@ class LockUpdater(object):
788
800
  # type: (...) -> Union[LockUpdate, Error]
789
801
 
790
802
  error_by_target = OrderedDict() # type: OrderedDict[Target, Error]
791
- locked_resolve_by_platform_tag = OrderedDict(
792
- (locked_resolve.platform_tag, locked_resolve)
803
+ locked_resolve_by_target_platform = OrderedDict(
804
+ (locked_resolve.target_platform, locked_resolve)
793
805
  for locked_resolve in self.lock_file.locked_resolves
794
806
  ) # type: OrderedDict[Optional[tags.Tag], LockedResolve]
795
- resolve_updates_by_platform_tag = (
807
+ resolve_updates_by_target_platform = (
796
808
  {}
797
809
  ) # type: Dict[Optional[tags.Tag], Mapping[ProjectName, Optional[Update]]]
798
810
 
@@ -808,9 +820,9 @@ class LockUpdater(object):
808
820
  if isinstance(result, Error):
809
821
  error_by_target[update_request.target] = result
810
822
  else:
811
- platform_tag = update_request.locked_resolve.platform_tag
812
- locked_resolve_by_platform_tag[platform_tag] = result.updated_resolve
813
- resolve_updates_by_platform_tag[platform_tag] = result.updates
823
+ target_platform = update_request.locked_resolve.target_platform
824
+ locked_resolve_by_target_platform[target_platform] = result.updated_resolve
825
+ resolve_updates_by_target_platform[target_platform] = result.updates
814
826
 
815
827
  if error_by_target:
816
828
  return Error(
@@ -833,8 +845,8 @@ class LockUpdater(object):
833
845
  resolves=tuple(
834
846
  ResolveUpdate(
835
847
  updated_resolve=updated_resolve,
836
- updates=resolve_updates_by_platform_tag.get(platform_tag, {}),
848
+ updates=resolve_updates_by_target_platform.get(target_platform, {}),
837
849
  )
838
- for platform_tag, updated_resolve in locked_resolve_by_platform_tag.items()
850
+ for target_platform, updated_resolve in locked_resolve_by_target_platform.items()
839
851
  ),
840
852
  )