pex 2.61.1__py2.py3-none-any.whl → 2.62.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pex might be problematic. Click here for more details.

Files changed (44) hide show
  1. pex/docs/html/_pagefind/fragment/en_1bbeb07.pf_fragment +0 -0
  2. pex/docs/html/_pagefind/fragment/{en_bf32fcd.pf_fragment → en_1befd43.pf_fragment} +0 -0
  3. pex/docs/html/_pagefind/fragment/en_45eea4b.pf_fragment +0 -0
  4. pex/docs/html/_pagefind/fragment/{en_c9714ee.pf_fragment → en_7822de6.pf_fragment} +0 -0
  5. pex/docs/html/_pagefind/fragment/en_87f76ba.pf_fragment +0 -0
  6. pex/docs/html/_pagefind/fragment/{en_4695b51.pf_fragment → en_a89f2ec.pf_fragment} +0 -0
  7. pex/docs/html/_pagefind/fragment/{en_57f2ab1.pf_fragment → en_c2a647e.pf_fragment} +0 -0
  8. pex/docs/html/_pagefind/fragment/en_d2f2c1b.pf_fragment +0 -0
  9. pex/docs/html/_pagefind/index/en_31a0754.pf_index +0 -0
  10. pex/docs/html/_pagefind/pagefind-entry.json +1 -1
  11. pex/docs/html/_pagefind/pagefind.en_32e8257caf.pf_meta +0 -0
  12. pex/docs/html/_static/documentation_options.js +1 -1
  13. pex/docs/html/api/vars.html +5 -5
  14. pex/docs/html/buildingpex.html +5 -5
  15. pex/docs/html/genindex.html +5 -5
  16. pex/docs/html/index.html +5 -5
  17. pex/docs/html/recipes.html +5 -5
  18. pex/docs/html/scie.html +5 -5
  19. pex/docs/html/search.html +5 -5
  20. pex/docs/html/whatispex.html +5 -5
  21. pex/pep_427.py +23 -2
  22. pex/resolve/locker.py +6 -51
  23. pex/resolve/locker_patches.py +123 -209
  24. pex/resolve/lockfile/create.py +9 -10
  25. pex/resolve/lockfile/targets.py +292 -26
  26. pex/resolve/requirement_configuration.py +15 -8
  27. pex/resolve/target_system.py +512 -119
  28. pex/resolver.py +181 -90
  29. pex/vendor/__main__.py +2 -0
  30. pex/venv/venv_pex.py +1 -1
  31. pex/version.py +1 -1
  32. {pex-2.61.1.dist-info → pex-2.62.1.dist-info}/METADATA +4 -4
  33. {pex-2.61.1.dist-info → pex-2.62.1.dist-info}/RECORD +38 -38
  34. pex/docs/html/_pagefind/fragment/en_245699a.pf_fragment +0 -0
  35. pex/docs/html/_pagefind/fragment/en_aefc110.pf_fragment +0 -0
  36. pex/docs/html/_pagefind/fragment/en_b7fad62.pf_fragment +0 -0
  37. pex/docs/html/_pagefind/fragment/en_e6c0aae.pf_fragment +0 -0
  38. pex/docs/html/_pagefind/index/en_7e57d09.pf_index +0 -0
  39. pex/docs/html/_pagefind/pagefind.en_c578c4b677.pf_meta +0 -0
  40. {pex-2.61.1.dist-info → pex-2.62.1.dist-info}/WHEEL +0 -0
  41. {pex-2.61.1.dist-info → pex-2.62.1.dist-info}/entry_points.txt +0 -0
  42. {pex-2.61.1.dist-info → pex-2.62.1.dist-info}/licenses/LICENSE +0 -0
  43. {pex-2.61.1.dist-info → pex-2.62.1.dist-info}/pylock/pylock.toml +0 -0
  44. {pex-2.61.1.dist-info → pex-2.62.1.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,45 @@
1
1
  from __future__ import absolute_import
2
2
 
3
3
  import itertools
4
- from collections import defaultdict
4
+ import os.path
5
+ import tempfile
6
+ from collections import OrderedDict, defaultdict
5
7
 
8
+ from pex import atexit
9
+ from pex.common import pluralize, safe_delete
6
10
  from pex.interpreter_constraints import iter_compatible_versions
7
11
  from pex.interpreter_implementation import InterpreterImplementation
8
12
  from pex.network_configuration import NetworkConfiguration
9
13
  from pex.orderedset import OrderedSet
10
14
  from pex.pep_503 import ProjectName
11
- from pex.requirements import LocalProjectRequirement
12
15
  from pex.resolve.package_repository import ReposConfiguration
13
16
  from pex.resolve.requirement_configuration import RequirementConfiguration
14
- from pex.resolve.target_system import MarkerEnv, TargetSystem, UniversalTarget, has_marker
17
+ from pex.resolve.target_system import (
18
+ ExtraMarkers,
19
+ MarkerEnv,
20
+ TargetSystem,
21
+ UniversalTarget,
22
+ has_marker,
23
+ )
24
+ from pex.resolver import DownloadRequest
15
25
  from pex.targets import LocalInterpreter, Targets
16
26
  from pex.third_party.packaging.markers import Marker
17
27
  from pex.third_party.packaging.specifiers import SpecifierSet
18
28
  from pex.typing import TYPE_CHECKING
19
29
 
20
30
  if TYPE_CHECKING:
21
- from typing import DefaultDict, Dict, FrozenSet, Iterator, List, Mapping, Optional, Tuple
31
+ from typing import (
32
+ DefaultDict,
33
+ Dict,
34
+ FrozenSet,
35
+ Iterable,
36
+ Iterator,
37
+ List,
38
+ Mapping,
39
+ Optional,
40
+ Text,
41
+ Tuple,
42
+ )
22
43
 
23
44
  import attr # vendor:skip
24
45
  else:
@@ -43,7 +64,7 @@ def _calculate_split_markers(
43
64
 
44
65
  projects_with_markers = defaultdict(dict) # type: DefaultDict[ProjectName, Dict[str, Marker]]
45
66
  for requirement in requirement_configuration.parse_requirements(network_configuration):
46
- if not isinstance(requirement, LocalProjectRequirement) and requirement.marker:
67
+ if requirement.project_name and requirement.marker:
47
68
  projects_with_markers[requirement.project_name][
48
69
  str(requirement.marker)
49
70
  ] = requirement.marker
@@ -147,33 +168,278 @@ def _iter_universal_targets(
147
168
  )
148
169
 
149
170
 
171
+ if TYPE_CHECKING:
172
+ from pex.requirements import ParsedRequirement
173
+
174
+
150
175
  @attr.s(frozen=True)
151
- class LockTargets(object):
152
- @classmethod
153
- def calculate(
154
- cls,
155
- targets, # type: Targets
176
+ class DownloadInput(object):
177
+ download_requests = attr.ib() # type: Tuple[DownloadRequest, ...]
178
+ direct_requirements = attr.ib() # type: Tuple[ParsedRequirement, ...]
179
+
180
+
181
+ def _comment_out_requirements(
182
+ requirements_file, # type: Text
183
+ requirements, # type: Iterable[ParsedRequirement]
184
+ ):
185
+ # type: (...) -> Text
186
+
187
+ lines_to_comment_out = set(
188
+ itertools.chain.from_iterable(
189
+ range(requirement.line.start_line, requirement.line.end_line + 1)
190
+ for requirement in requirements
191
+ )
192
+ )
193
+
194
+ # N.B.: We create the edited requirements file in the same directory as the original so that any
195
+ # relative path references in the requirements file are still valid.
196
+ out_fd, edited_requirements_file = tempfile.mkstemp(
197
+ dir=os.path.dirname(requirements_file),
198
+ prefix="pex_lock_split.",
199
+ suffix=".{file_name}".format(file_name=os.path.basename(requirements_file)),
200
+ )
201
+ atexit.register(safe_delete, edited_requirements_file)
202
+ try:
203
+ with open(requirements_file, "rb") as in_fp:
204
+ for line_no, text in enumerate(in_fp, start=1):
205
+ if line_no in lines_to_comment_out:
206
+ os.write(out_fd, b"# ")
207
+ os.write(out_fd, text)
208
+ finally:
209
+ os.close(out_fd)
210
+ return edited_requirements_file
211
+
212
+
213
+ @attr.s
214
+ class Split(object):
215
+ requirements_by_project_name = attr.ib(
216
+ factory=OrderedDict
217
+ ) # type: OrderedDict[ProjectName, ParsedRequirement]
218
+ provenance = attr.ib(factory=OrderedSet) # type: OrderedSet[ParsedRequirement]
219
+
220
+ def applies(
221
+ self,
222
+ universal_target, # type: UniversalTarget
223
+ requirement, # type: ParsedRequirement
224
+ ):
225
+ # type: (...) -> bool
226
+
227
+ if not requirement.marker:
228
+ return True
229
+
230
+ requirements = [(req.marker, str(req)) for req in self.provenance if req.marker]
231
+ if not requirements:
232
+ return True
233
+
234
+ marker_env = attr.evolve(
235
+ universal_target, extra_markers=ExtraMarkers.extract(requirements)
236
+ ).marker_env()
237
+ return marker_env.evaluate(requirement.marker)
238
+
239
+ def add(
240
+ self,
241
+ universal_target, # type: UniversalTarget
242
+ project_name, # type: ProjectName
243
+ requirement, # type: ParsedRequirement
244
+ ):
245
+ # type: (...) -> Optional[Split]
246
+
247
+ if not self.applies(universal_target, requirement):
248
+ return None
249
+
250
+ existing_requirement = self.requirements_by_project_name.setdefault(
251
+ project_name, requirement
252
+ )
253
+ if existing_requirement == requirement:
254
+ return None
255
+
256
+ self.provenance.add(existing_requirement)
257
+
258
+ provenance = OrderedSet(req for req in self.provenance if req != existing_requirement)
259
+ provenance.add(requirement)
260
+
261
+ requirements_by_project_name = self.requirements_by_project_name.copy()
262
+ requirements_by_project_name[project_name] = requirement
263
+
264
+ return Split(
265
+ requirements_by_project_name=requirements_by_project_name, provenance=provenance
266
+ )
267
+
268
+ def requirement_configuration(
269
+ self,
270
+ unnamed_requirements, # type: Iterable[ParsedRequirement]
156
271
  requirement_configuration, # type: RequirementConfiguration
157
- network_configuration, # type: NetworkConfiguration
158
- repos_configuration, # type: ReposConfiguration
159
- universal_target=None, # type: Optional[UniversalTarget]
272
+ network_configuration=None, # type: Optional[NetworkConfiguration]
160
273
  ):
161
- # type: (...) -> LockTargets
274
+ # type: (...) -> RequirementConfiguration
275
+
276
+ if not self.provenance:
277
+ return requirement_configuration
278
+
279
+ requirements = list(str(req) for req in unnamed_requirements)
280
+
281
+ requirement_files = OrderedSet(
282
+ os.path.realpath(requirement_file)
283
+ for requirement_file in requirement_configuration.requirement_files
284
+ ) # type: OrderedSet[Text]
285
+ if requirement_files:
286
+ provenance_by_project_name = {
287
+ parsed_requirement.project_name: parsed_requirement
288
+ for parsed_requirement in self.provenance
289
+ if parsed_requirement.project_name
290
+ }
162
291
 
163
- if not universal_target:
164
- return cls(targets=targets)
292
+ requirements_to_comment_out_by_source = defaultdict(
293
+ list
294
+ ) # type: DefaultDict[Text, List[ParsedRequirement]]
295
+ for parsed_requirement in requirement_configuration.parse_requirements(
296
+ network_configuration=network_configuration
297
+ ):
298
+ if not parsed_requirement.project_name:
299
+ continue
165
300
 
166
- targets = Targets.from_target(LocalInterpreter.create(targets.interpreter))
167
- split_markers = _calculate_split_markers(
168
- requirement_configuration, network_configuration, repos_configuration
301
+ provenance = provenance_by_project_name.get(parsed_requirement.project_name)
302
+ if provenance and (parsed_requirement != provenance):
303
+ if parsed_requirement.line.source in requirement_files:
304
+ # We comment out the requirement
305
+ requirements_to_comment_out_by_source[
306
+ parsed_requirement.line.source
307
+ ].append(parsed_requirement)
308
+ else:
309
+ # We drop the requirement
310
+ pass
311
+ elif parsed_requirement.line.source not in requirement_files:
312
+ requirements.append(str(parsed_requirement))
313
+ if requirements_to_comment_out_by_source:
314
+ new_requirement_files = OrderedSet() # type: OrderedSet[Text]
315
+ for source, parsed_requirements in requirements_to_comment_out_by_source.items():
316
+ if source in requirement_files:
317
+ new_requirement_files.add(
318
+ _comment_out_requirements(source, parsed_requirements)
319
+ )
320
+ else:
321
+ new_requirement_files.add(source)
322
+ requirement_files = new_requirement_files
323
+ else:
324
+ requirements.extend(str(req) for req in self.requirements_by_project_name.values())
325
+
326
+ return RequirementConfiguration(
327
+ requirements=tuple(requirements),
328
+ requirement_files=tuple(requirement_files),
329
+ constraint_files=requirement_configuration.constraint_files,
169
330
  )
170
- if not split_markers:
171
- return cls(targets=targets, universal_targets=(universal_target,))
172
331
 
173
- return cls(
174
- targets=targets,
175
- universal_targets=tuple(_iter_universal_targets(universal_target, split_markers)),
332
+
333
+ def calculate_download_input(
334
+ targets, # type: Targets
335
+ requirement_configuration, # type: RequirementConfiguration
336
+ network_configuration, # type: NetworkConfiguration
337
+ repos_configuration, # type: ReposConfiguration
338
+ universal_target=None, # type: Optional[UniversalTarget]
339
+ ):
340
+ # type: (...) -> DownloadInput
341
+
342
+ direct_requirements = requirement_configuration.parse_requirements(network_configuration)
343
+ if not universal_target:
344
+ return DownloadInput(
345
+ download_requests=tuple(
346
+ DownloadRequest.create(
347
+ target=target, requirement_configuration=requirement_configuration
348
+ )
349
+ for target in targets.unique_targets()
350
+ ),
351
+ direct_requirements=direct_requirements,
352
+ )
353
+
354
+ target = LocalInterpreter.create(targets.interpreter)
355
+ split_markers = _calculate_split_markers(
356
+ requirement_configuration, network_configuration, repos_configuration
357
+ )
358
+ if not split_markers:
359
+ return DownloadInput(
360
+ download_requests=tuple(
361
+ [
362
+ DownloadRequest.create(
363
+ target=target,
364
+ universal_target=universal_target,
365
+ requirement_configuration=requirement_configuration,
366
+ )
367
+ ]
368
+ ),
369
+ direct_requirements=direct_requirements,
176
370
  )
177
371
 
178
- targets = attr.ib() # type: Targets
179
- universal_targets = attr.ib(default=()) # type: Tuple[UniversalTarget, ...]
372
+ named_requirements = (
373
+ OrderedDict()
374
+ ) # type: OrderedDict[ProjectName, OrderedSet[ParsedRequirement]]
375
+ unnamed_requirements = OrderedSet() # type: OrderedSet[ParsedRequirement]
376
+ for direct_requirement in direct_requirements:
377
+ if direct_requirement.project_name:
378
+ named_requirements.setdefault(direct_requirement.project_name, OrderedSet()).add(
379
+ direct_requirement
380
+ )
381
+ else:
382
+ unnamed_requirements.add(direct_requirement)
383
+
384
+ requirement_splits_by_universal_target = defaultdict(
385
+ lambda: [Split()]
386
+ ) # type: DefaultDict[UniversalTarget, List[Split]]
387
+ for universal_target in _iter_universal_targets(universal_target, split_markers):
388
+ marker_env = universal_target.marker_env()
389
+ requirement_splits = requirement_splits_by_universal_target[universal_target]
390
+ for project_name, remote_requirements in named_requirements.items():
391
+ for requirement_split in list(requirement_splits):
392
+ for remote_requirement in remote_requirements:
393
+ if remote_requirement.marker and not marker_env.evaluate(
394
+ remote_requirement.marker
395
+ ):
396
+ continue
397
+ new_split = requirement_split.add(
398
+ universal_target, project_name, remote_requirement
399
+ )
400
+ if new_split:
401
+ requirement_splits.append(new_split)
402
+
403
+ download_requests = []
404
+ for universal_target, splits in requirement_splits_by_universal_target.items():
405
+ if len(splits) == 1:
406
+ download_requests.append(
407
+ DownloadRequest.create(
408
+ target=target,
409
+ universal_target=universal_target,
410
+ requirement_configuration=splits[0].requirement_configuration(
411
+ unnamed_requirements,
412
+ requirement_configuration,
413
+ network_configuration=network_configuration,
414
+ ),
415
+ )
416
+ )
417
+ continue
418
+
419
+ for split in splits:
420
+ download_requests.append(
421
+ DownloadRequest.create(
422
+ target=target,
423
+ universal_target=attr.evolve(
424
+ universal_target,
425
+ extra_markers=ExtraMarkers.extract(
426
+ (requirement.marker, str(requirement))
427
+ for requirement in split.provenance
428
+ if requirement.marker
429
+ ),
430
+ ),
431
+ requirement_configuration=split.requirement_configuration(
432
+ unnamed_requirements,
433
+ requirement_configuration,
434
+ network_configuration=network_configuration,
435
+ ),
436
+ provenance="split by {requirements} {reqs}".format(
437
+ requirements=pluralize(split.provenance, "requirement"),
438
+ reqs=", ".join("'{req}'".format(req=req) for req in split.provenance),
439
+ ),
440
+ )
441
+ )
442
+
443
+ return DownloadInput(
444
+ download_requests=tuple(download_requests), direct_requirements=direct_requirements
445
+ )
@@ -17,7 +17,7 @@ from pex.requirements import (
17
17
  from pex.typing import TYPE_CHECKING
18
18
 
19
19
  if TYPE_CHECKING:
20
- from typing import Iterable, List, Optional
20
+ from typing import Iterable, List, Optional, Tuple
21
21
 
22
22
  import attr # vendor:skip
23
23
 
@@ -26,14 +26,21 @@ else:
26
26
  from pex.third_party import attr
27
27
 
28
28
 
29
+ def _as_str_tuple(items):
30
+ # type: (Optional[Iterable[str]]) -> Tuple[str, ...]
31
+ if not items:
32
+ return ()
33
+ return items if isinstance(items, tuple) else tuple(items)
34
+
35
+
29
36
  @attr.s(frozen=True)
30
37
  class RequirementConfiguration(object):
31
- requirements = attr.ib(default=None) # type: Optional[Iterable[str]]
32
- requirement_files = attr.ib(default=None) # type: Optional[Iterable[str]]
33
- constraint_files = attr.ib(default=None) # type: Optional[Iterable[str]]
38
+ requirements = attr.ib(default=(), converter=_as_str_tuple) # type: Tuple[str, ...]
39
+ requirement_files = attr.ib(default=(), converter=_as_str_tuple) # type: Tuple[str, ...]
40
+ constraint_files = attr.ib(default=(), converter=_as_str_tuple) # type: Tuple[str, ...]
34
41
 
35
42
  def parse_requirements(self, network_configuration=None):
36
- # type: (Optional[NetworkConfiguration]) -> Iterable[ParsedRequirement]
43
+ # type: (Optional[NetworkConfiguration]) -> Tuple[ParsedRequirement, ...]
37
44
  parsed_requirements = [] # type: List[ParsedRequirement]
38
45
  if self.requirements:
39
46
  parsed_requirements.extend(parse_requirement_strings(self.requirements))
@@ -50,10 +57,10 @@ class RequirementConfiguration(object):
50
57
  (PyPIRequirement, URLRequirement, VCSRequirement, LocalProjectRequirement),
51
58
  )
52
59
  )
53
- return parsed_requirements
60
+ return tuple(parsed_requirements)
54
61
 
55
62
  def parse_constraints(self, network_configuration=None):
56
- # type: (Optional[NetworkConfiguration]) -> Iterable[Constraint]
63
+ # type: (Optional[NetworkConfiguration]) -> Tuple[Constraint, ...]
57
64
  parsed_constraints = [] # type: List[Constraint]
58
65
  if self.constraint_files:
59
66
  fetcher = URLFetcher(network_configuration=network_configuration)
@@ -65,7 +72,7 @@ class RequirementConfiguration(object):
65
72
  )
66
73
  if isinstance(requirement_or_constraint, Constraint)
67
74
  )
68
- return parsed_constraints
75
+ return tuple(parsed_constraints)
69
76
 
70
77
  @property
71
78
  def has_requirements(self):