omdev 0.0.0.dev12__py3-none-any.whl → 0.0.0.dev14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omdev might be problematic. Click here for more details.

@@ -1,5 +1,7 @@
1
1
  #!/usr/bin/env python3
2
2
  # noinspection DuplicatedCode
3
+ # @omlish-lite
4
+ # @omlish-script
3
5
  # @omdev-amalg-output ../pyproject/cli.py
4
6
  # ruff: noqa: UP006 UP007
5
7
  """
@@ -27,6 +29,7 @@ import base64
27
29
  import collections
28
30
  import collections.abc
29
31
  import concurrent.futures as cf
32
+ import csv
30
33
  import dataclasses as dc
31
34
  import datetime
32
35
  import decimal
@@ -34,8 +37,10 @@ import enum
34
37
  import fractions
35
38
  import functools
36
39
  import glob
40
+ import hashlib
37
41
  import importlib
38
42
  import inspect
43
+ import io
39
44
  import itertools
40
45
  import json
41
46
  import logging
@@ -45,26 +50,37 @@ import os.path
45
50
  import re
46
51
  import shlex
47
52
  import shutil
53
+ import stat
48
54
  import string
49
55
  import subprocess
50
56
  import sys
57
+ import tarfile
51
58
  import threading
59
+ import time
52
60
  import types
53
61
  import typing as ta
54
62
  import uuid
55
63
  import weakref # noqa
64
+ import zipfile
56
65
 
57
66
 
67
+ # ../../toml/parser.py
58
68
  TomlParseFloat = ta.Callable[[str], ta.Any]
59
69
  TomlKey = ta.Tuple[str, ...]
60
70
  TomlPos = int # ta.TypeAlias
71
+
72
+ # ../../versioning/versions.py
61
73
  VersionLocalType = ta.Tuple[ta.Union[int, str], ...]
62
74
  VersionCmpPrePostDevType = ta.Union['InfinityVersionType', 'NegativeInfinityVersionType', ta.Tuple[str, int]]
63
75
  _VersionCmpLocalType0 = ta.Tuple[ta.Union[ta.Tuple[int, str], ta.Tuple['NegativeInfinityVersionType', ta.Union[int, str]]], ...] # noqa
64
76
  VersionCmpLocalType = ta.Union['NegativeInfinityVersionType', _VersionCmpLocalType0]
65
77
  VersionCmpKey = ta.Tuple[int, ta.Tuple[int, ...], VersionCmpPrePostDevType, VersionCmpPrePostDevType, VersionCmpPrePostDevType, VersionCmpLocalType] # noqa
66
78
  VersionComparisonMethod = ta.Callable[[VersionCmpKey, VersionCmpKey], bool]
79
+
80
+ # ../../../omlish/lite/check.py
67
81
  T = ta.TypeVar('T')
82
+
83
+ # ../../versioning/specifiers.py
68
84
  UnparsedVersion = ta.Union['Version', str]
69
85
  UnparsedVersionVar = ta.TypeVar('UnparsedVersionVar', bound=UnparsedVersion)
70
86
  CallableVersionOperator = ta.Callable[['Version', str], bool]
@@ -1400,6 +1416,245 @@ def canonicalize_version(
1400
1416
  return ''.join(parts)
1401
1417
 
1402
1418
 
1419
+ ########################################
1420
+ # ../../wheelfile.py
1421
+ # ruff: noqa: UP006 UP007
1422
+ # https://github.com/pypa/wheel/blob/7bb46d7727e6e89fe56b3c78297b3af2672bbbe2/src/wheel/wheelfile.py
1423
+ # MIT License
1424
+ #
1425
+ # Copyright (c) 2012 Daniel Holth <dholth@fastmail.fm> and contributors
1426
+ #
1427
+ # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
1428
+ # documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
1429
+ # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
1430
+ # persons to whom the Software is furnished to do so, subject to the following conditions:
1431
+ #
1432
+ # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
1433
+ # Software.
1434
+ #
1435
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
1436
+ # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
1437
+ # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
1438
+ # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
1439
+
1440
+
1441
+ class WheelError(Exception):
1442
+ pass
1443
+
1444
+
1445
+ # Non-greedy matching of an optional build number may be too clever (more invalid wheel filenames will match). Separate
1446
+ # regex for .dist-info?
1447
+ WHEEL_INFO_RE = re.compile(
1448
+ r'^'
1449
+ r'(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]+?))'
1450
+ r'(-(?P<build>\d[^\s-]*))?-'
1451
+ r'(?P<pyver>[^\s-]+?)-'
1452
+ r'(?P<abi>[^\s-]+?)-'
1453
+ r'(?P<plat>\S+)'
1454
+ r'\.whl$',
1455
+ re.VERBOSE,
1456
+ )
1457
+
1458
+
1459
+ class WheelFile(zipfile.ZipFile):
1460
+ """
1461
+ A ZipFile derivative class that also reads SHA-256 hashes from .dist-info/RECORD and checks any read files against
1462
+ those.
1463
+ """
1464
+
1465
+ _default_algorithm = hashlib.sha256
1466
+
1467
+ def __init__(
1468
+ self,
1469
+ file: str,
1470
+ mode: str = 'r', # ta.Literal["r", "w", "x", "a"]
1471
+ compression: int = zipfile.ZIP_DEFLATED,
1472
+ ) -> None:
1473
+ basename = os.path.basename(file)
1474
+ self.parsed_filename = WHEEL_INFO_RE.match(basename)
1475
+ if not basename.endswith('.whl') or self.parsed_filename is None:
1476
+ raise WheelError(f'Bad wheel filename {basename!r}')
1477
+
1478
+ super().__init__( # type: ignore
1479
+ file,
1480
+ mode,
1481
+ compression=compression,
1482
+ allowZip64=True,
1483
+ )
1484
+
1485
+ self.dist_info_path = '{}.dist-info'.format(self.parsed_filename.group('namever'))
1486
+ self.record_path = self.dist_info_path + '/RECORD'
1487
+ self._file_hashes: ta.Dict[str, ta.Union[ta.Tuple[None, None], ta.Tuple[int, bytes]]] = {}
1488
+ self._file_sizes: ta.Dict[str, int] = {}
1489
+
1490
+ if mode == 'r':
1491
+ # Ignore RECORD and any embedded wheel signatures
1492
+ self._file_hashes[self.record_path] = None, None
1493
+ self._file_hashes[self.record_path + '.jws'] = None, None
1494
+ self._file_hashes[self.record_path + '.p7s'] = None, None
1495
+
1496
+ # Fill in the expected hashes by reading them from RECORD
1497
+ try:
1498
+ record = self.open(self.record_path)
1499
+ except KeyError:
1500
+ raise WheelError(f'Missing {self.record_path} file') from None
1501
+
1502
+ with record:
1503
+ for line in csv.reader(io.TextIOWrapper(record, newline='', encoding='utf-8')):
1504
+ path, hash_sum, size = line
1505
+ if not hash_sum:
1506
+ continue
1507
+
1508
+ algorithm, hash_sum = hash_sum.split('=')
1509
+ try:
1510
+ hashlib.new(algorithm)
1511
+ except ValueError:
1512
+ raise WheelError(f'Unsupported hash algorithm: {algorithm}') from None
1513
+
1514
+ if algorithm.lower() in {'md5', 'sha1'}:
1515
+ raise WheelError(f'Weak hash algorithm ({algorithm}) is not permitted by PEP 427')
1516
+
1517
+ self._file_hashes[path] = ( # type: ignore
1518
+ algorithm,
1519
+ self._urlsafe_b64decode(hash_sum.encode('ascii')),
1520
+ )
1521
+
1522
+ @staticmethod
1523
+ def _urlsafe_b64encode(data: bytes) -> bytes:
1524
+ """urlsafe_b64encode without padding"""
1525
+ return base64.urlsafe_b64encode(data).rstrip(b'=')
1526
+
1527
+ @staticmethod
1528
+ def _urlsafe_b64decode(data: bytes) -> bytes:
1529
+ """urlsafe_b64decode without padding"""
1530
+ pad = b'=' * (4 - (len(data) & 3))
1531
+ return base64.urlsafe_b64decode(data + pad)
1532
+
1533
+ def open( # type: ignore # noqa
1534
+ self,
1535
+ name_or_info: ta.Union[str, zipfile.ZipInfo],
1536
+ mode: str = 'r', # ta.Literal["r", "w"]
1537
+ pwd: ta.Optional[bytes] = None,
1538
+ ) -> ta.IO[bytes]:
1539
+ def _update_crc(newdata: bytes) -> None:
1540
+ eof = ef._eof # type: ignore # noqa
1541
+ update_crc_orig(newdata)
1542
+ running_hash.update(newdata)
1543
+ if eof and running_hash.digest() != expected_hash:
1544
+ raise WheelError(f"Hash mismatch for file '{ef_name}'")
1545
+
1546
+ ef_name = name_or_info.filename if isinstance(name_or_info, zipfile.ZipInfo) else name_or_info
1547
+ if (
1548
+ mode == 'r'
1549
+ and not ef_name.endswith('/')
1550
+ and ef_name not in self._file_hashes
1551
+ ):
1552
+ raise WheelError(f"No hash found for file '{ef_name}'")
1553
+
1554
+ ef = super().open(name_or_info, mode, pwd) # noqa
1555
+ if mode == 'r' and not ef_name.endswith('/'):
1556
+ algorithm, expected_hash = self._file_hashes[ef_name]
1557
+ if expected_hash is not None:
1558
+ # Monkey patch the _update_crc method to also check for the hash from RECORD
1559
+ running_hash = hashlib.new(algorithm) # type: ignore
1560
+ update_crc_orig, ef._update_crc = ef._update_crc, _update_crc # type: ignore # noqa
1561
+
1562
+ return ef
1563
+
1564
+ def write_files(self, base_dir: str) -> None:
1565
+ deferred: list[tuple[str, str]] = []
1566
+ for root, dirnames, filenames in os.walk(base_dir):
1567
+ # Sort the directory names so that `os.walk` will walk them in a defined order on the next iteration.
1568
+ dirnames.sort()
1569
+ for name in sorted(filenames):
1570
+ path = os.path.normpath(os.path.join(root, name))
1571
+ if os.path.isfile(path):
1572
+ arcname = os.path.relpath(path, base_dir).replace(os.path.sep, '/')
1573
+ if arcname == self.record_path:
1574
+ pass
1575
+ elif root.endswith('.dist-info'):
1576
+ deferred.append((path, arcname))
1577
+ else:
1578
+ self.write(path, arcname)
1579
+
1580
+ deferred.sort()
1581
+ for path, arcname in deferred:
1582
+ self.write(path, arcname)
1583
+
1584
+ def write( # type: ignore # noqa
1585
+ self,
1586
+ filename: str,
1587
+ arcname: ta.Optional[str] = None,
1588
+ compress_type: ta.Optional[int] = None,
1589
+ ) -> None:
1590
+ with open(filename, 'rb') as f:
1591
+ st = os.fstat(f.fileno())
1592
+ data = f.read()
1593
+
1594
+ zinfo = zipfile.ZipInfo(
1595
+ arcname or filename,
1596
+ date_time=self._get_zipinfo_datetime(st.st_mtime),
1597
+ )
1598
+ zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
1599
+ zinfo.compress_type = compress_type or self.compression
1600
+ self.writestr(zinfo, data, compress_type)
1601
+
1602
+ _MINIMUM_TIMESTAMP = 315532800 # 1980-01-01 00:00:00 UTC
1603
+
1604
+ @classmethod
1605
+ def _get_zipinfo_datetime(cls, timestamp: ta.Optional[float] = None) -> ta.Any:
1606
+ # Some applications need reproducible .whl files, but they can't do this without forcing the timestamp of the
1607
+ # individual ZipInfo objects. See issue #143.
1608
+ timestamp = int(os.environ.get('SOURCE_DATE_EPOCH', timestamp or time.time()))
1609
+ timestamp = max(timestamp, cls._MINIMUM_TIMESTAMP)
1610
+ return time.gmtime(timestamp)[0:6]
1611
+
1612
+ def writestr( # type: ignore # noqa
1613
+ self,
1614
+ zinfo_or_arcname: ta.Union[str, zipfile.ZipInfo],
1615
+ data: ta.Any, # SizedBuffer | str,
1616
+ compress_type: ta.Optional[int] = None,
1617
+ ) -> None:
1618
+ if isinstance(zinfo_or_arcname, str):
1619
+ zinfo_or_arcname = zipfile.ZipInfo(
1620
+ zinfo_or_arcname,
1621
+ date_time=self._get_zipinfo_datetime(),
1622
+ )
1623
+ zinfo_or_arcname.compress_type = self.compression
1624
+ zinfo_or_arcname.external_attr = (0o664 | stat.S_IFREG) << 16
1625
+
1626
+ if isinstance(data, str):
1627
+ data = data.encode('utf-8')
1628
+
1629
+ super().writestr(zinfo_or_arcname, data, compress_type)
1630
+ fname = (
1631
+ zinfo_or_arcname.filename
1632
+ if isinstance(zinfo_or_arcname, zipfile.ZipInfo)
1633
+ else zinfo_or_arcname
1634
+ )
1635
+ if fname != self.record_path:
1636
+ hash_ = self._default_algorithm(data) # type: ignore
1637
+ self._file_hashes[fname] = ( # type: ignore
1638
+ hash_.name,
1639
+ self._urlsafe_b64encode(hash_.digest()).decode('ascii'),
1640
+ )
1641
+ self._file_sizes[fname] = len(data)
1642
+
1643
+ def close(self) -> None:
1644
+ # Write RECORD
1645
+ if self.fp is not None and self.mode == 'w' and self._file_hashes:
1646
+ data = io.StringIO()
1647
+ writer = csv.writer(data, delimiter=',', quotechar='"', lineterminator='\n')
1648
+ writer.writerows((
1649
+ (fname, algorithm + '=' + hash_, self._file_sizes[fname]) # type: ignore
1650
+ for fname, (algorithm, hash_) in self._file_hashes.items()
1651
+ ))
1652
+ writer.writerow((format(self.record_path), '', ''))
1653
+ self.writestr(self.record_path, data.getvalue())
1654
+
1655
+ super().close()
1656
+
1657
+
1403
1658
  ########################################
1404
1659
  # ../../../omlish/lite/cached.py
1405
1660
 
@@ -2709,210 +2964,148 @@ class PyprojectConfigPreparer:
2709
2964
 
2710
2965
 
2711
2966
  ########################################
2712
- # ../pkg.py
2967
+ # ../../tools/revisions.py
2713
2968
  """
2714
2969
  TODO:
2715
- - ext scanning
2716
- - __revision__
2717
- - entry_points
2718
-
2719
- https://setuptools.pypa.io/en/latest/references/keywords.html
2720
- https://packaging.python.org/en/latest/specifications/pyproject-toml
2721
-
2722
- How to build a C extension in keeping with PEP 517, i.e. with pyproject.toml instead of setup.py?
2723
- https://stackoverflow.com/a/66479252
2724
-
2725
- https://github.com/pypa/sampleproject/blob/db5806e0a3204034c51b1c00dde7d5eb3fa2532e/setup.py
2726
-
2727
- https://pip.pypa.io/en/stable/cli/pip_install/#vcs-support
2728
- vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir
2729
- 'git+https://github.com/wrmsr/omlish@master#subdirectory=.pip/omlish'
2970
+ - omlish-lite, move to pyproject/
2971
+ - vendor-lite wheel.wheelfile
2730
2972
  """
2731
- # ruff: noqa: UP006 UP007
2732
-
2733
-
2734
- class PyprojectPackageGenerator:
2735
- def __init__(
2736
- self,
2737
- dir_name: str,
2738
- build_root: str,
2739
- ) -> None:
2740
- super().__init__()
2741
- self._dir_name = dir_name
2742
- self._build_root = build_root
2743
-
2744
- #
2745
-
2746
- @cached_nullary
2747
- def about(self) -> types.ModuleType:
2748
- return importlib.import_module(f'{self._dir_name}.__about__')
2749
-
2750
- @cached_nullary
2751
- def project_cls(self) -> type:
2752
- return self.about().Project
2753
-
2754
- @cached_nullary
2755
- def setuptools_cls(self) -> type:
2756
- return self.about().Setuptools
2757
-
2758
- #
2759
-
2760
- @cached_nullary
2761
- def _build_dir(self) -> str:
2762
- build_dir: str = os.path.join(self._build_root, self._dir_name)
2763
- if os.path.isdir(build_dir):
2764
- shutil.rmtree(build_dir)
2765
- os.makedirs(build_dir)
2766
- return build_dir
2767
-
2768
- #
2769
-
2770
- def _write_git_ignore(self) -> None:
2771
- git_ignore = [
2772
- '/*.egg-info/',
2773
- '/dist',
2774
- ]
2775
- with open(os.path.join(self._build_dir(), '.gitignore'), 'w') as f:
2776
- f.write('\n'.join(git_ignore))
2777
-
2778
- #
2779
-
2780
- def _symlink_source_dir(self) -> None:
2781
- os.symlink(
2782
- os.path.relpath(self._dir_name, self._build_dir()),
2783
- os.path.join(self._build_dir(), self._dir_name),
2784
- )
2785
-
2786
- #
2787
-
2788
- @dc.dataclass(frozen=True)
2789
- class FileContents:
2790
- pyproject_dct: ta.Mapping[str, ta.Any]
2791
- manifest_in: ta.Optional[ta.Sequence[str]]
2792
-
2793
- @staticmethod
2794
- def _build_cls_dct(cls: type) -> ta.Dict[str, ta.Any]: # noqa
2795
- dct = {}
2796
- for b in reversed(cls.__mro__):
2797
- for k, v in b.__dict__.items():
2798
- if k.startswith('_'):
2799
- continue
2800
- dct[k] = v
2801
- return dct
2802
-
2803
- @staticmethod
2804
- def _move_dict_key(
2805
- sd: ta.Dict[str, ta.Any],
2806
- sk: str,
2807
- dd: ta.Dict[str, ta.Any],
2808
- dk: str,
2809
- ) -> None:
2810
- if sk in sd:
2811
- dd[dk] = sd.pop(sk)
2812
-
2813
- @cached_nullary
2814
- def file_contents(self) -> FileContents:
2815
- pyp_dct = {}
2816
-
2817
- #
2818
-
2819
- pyp_dct['build-system'] = {
2820
- 'requires': ['setuptools'],
2821
- 'build-backend': 'setuptools.build_meta',
2822
- }
2823
-
2824
- prj = self._build_cls_dct(self.project_cls())
2825
- pyp_dct['project'] = prj
2973
+ # ruff: noqa: TCH003 UP006 UP007
2826
2974
 
2827
- self._move_dict_key(prj, 'optional_dependencies', pyp_dct, extrask := 'project.optional-dependencies')
2828
- if (extras := pyp_dct.get(extrask)):
2829
- pyp_dct[extrask] = {
2830
- 'all': [
2831
- e
2832
- for lst in extras.values()
2833
- for e in lst
2834
- ],
2835
- **extras,
2836
- }
2837
-
2838
- #
2839
-
2840
- st = self._build_cls_dct(self.setuptools_cls())
2841
- pyp_dct['tool.setuptools'] = st
2842
2975
 
2843
- self._move_dict_key(st, 'find_packages', pyp_dct, 'tool.setuptools.packages.find')
2844
-
2845
- mani_in = st.pop('manifest_in', None)
2846
-
2847
- #
2848
-
2849
- return self.FileContents(
2850
- pyp_dct,
2851
- mani_in,
2852
- )
2976
+ ##
2853
2977
 
2854
- def _write_file_contents(self) -> None:
2855
- fc = self.file_contents()
2856
2978
 
2857
- with open(os.path.join(self._build_dir(), 'pyproject.toml'), 'w') as f:
2858
- TomlWriter(f).write_root(fc.pyproject_dct)
2979
+ def get_git_revision() -> str:
2980
+ has_untracked = bool(subprocess.check_output([
2981
+ 'git',
2982
+ 'ls-files',
2983
+ '.',
2984
+ '--exclude-standard',
2985
+ '--others',
2986
+ ]).decode().strip())
2859
2987
 
2860
- if fc.manifest_in:
2861
- with open(os.path.join(self._build_dir(), 'MANIFEST.in'), 'w') as f:
2862
- f.write('\n'.join(fc.manifest_in)) # noqa
2988
+ dirty_rev = subprocess.check_output([
2989
+ 'git',
2990
+ 'describe',
2991
+ '--match=NeVeRmAtCh',
2992
+ '--always',
2993
+ '--abbrev=40',
2994
+ '--dirty',
2995
+ ]).decode().strip()
2863
2996
 
2864
- #
2997
+ return dirty_rev + ('-untracked' if has_untracked else '')
2865
2998
 
2866
- _STANDARD_FILES: ta.Sequence[str] = [
2867
- 'LICENSE',
2868
- 'README.rst',
2869
- ]
2870
2999
 
2871
- def _symlink_standard_files(self) -> None:
2872
- for fn in self._STANDARD_FILES:
2873
- if os.path.exists(fn):
2874
- os.symlink(os.path.relpath(fn, self._build_dir()), os.path.join(self._build_dir(), fn))
3000
+ ##
2875
3001
 
2876
- #
2877
3002
 
2878
- def _run_build(
3003
+ class GitRevisionAdder:
3004
+ def __init__(
2879
3005
  self,
2880
- build_output_dir: ta.Optional[str] = None,
3006
+ revision: ta.Optional[str] = None,
3007
+ output_suffix: ta.Optional[str] = None,
2881
3008
  ) -> None:
2882
- subprocess.check_call(
2883
- [
2884
- sys.executable,
2885
- '-m',
2886
- 'build',
2887
- ],
2888
- cwd=self._build_dir(),
2889
- )
2890
-
2891
- if build_output_dir is not None:
2892
- dist_dir = os.path.join(self._build_dir(), 'dist')
2893
- for fn in os.listdir(dist_dir):
2894
- shutil.copyfile(os.path.join(dist_dir, fn), os.path.join(build_output_dir, fn))
3009
+ super().__init__()
3010
+ self._given_revision = revision
3011
+ self._output_suffix = output_suffix
2895
3012
 
2896
- #
3013
+ @cached_nullary
3014
+ def revision(self) -> str:
3015
+ if self._given_revision is not None:
3016
+ return self._given_revision
3017
+ return get_git_revision()
2897
3018
 
2898
- def gen(
2899
- self,
2900
- *,
2901
- run_build: bool = False,
2902
- build_output_dir: ta.Optional[str] = None,
2903
- ) -> str:
2904
- log.info('Generating pyproject package: %s -> %s', self._dir_name, self._build_root)
3019
+ REVISION_ATTR = '__revision__'
2905
3020
 
2906
- self._build_dir()
2907
- self._write_git_ignore()
2908
- self._symlink_source_dir()
2909
- self._write_file_contents()
2910
- self._symlink_standard_files()
3021
+ def add_to_contents(self, dct: ta.Dict[str, bytes]) -> bool:
3022
+ changed = False
3023
+ for n in dct:
3024
+ if not n.endswith('__about__.py'):
3025
+ continue
3026
+ src = dct[n].decode('utf-8')
3027
+ lines = src.splitlines(keepends=True)
3028
+ for i, l in enumerate(lines):
3029
+ if l != f'{self.REVISION_ATTR} = None\n':
3030
+ continue
3031
+ lines[i] = f"{self.REVISION_ATTR} = '{self.revision()}'\n"
3032
+ changed = True
3033
+ dct[n] = ''.join(lines).encode('utf-8')
3034
+ return changed
3035
+
3036
+ def add_to_wheel(self, f: str) -> None:
3037
+ if not f.endswith('.whl'):
3038
+ raise Exception(f)
3039
+ log.info('Scanning wheel %s', f)
3040
+
3041
+ zis: ta.Dict[str, zipfile.ZipInfo] = {}
3042
+ dct: ta.Dict[str, bytes] = {}
3043
+ with WheelFile(f) as wf:
3044
+ for zi in wf.filelist:
3045
+ if zi.filename == wf.record_path:
3046
+ continue
3047
+ zis[zi.filename] = zi
3048
+ dct[zi.filename] = wf.read(zi.filename)
3049
+
3050
+ if self.add_to_contents(dct):
3051
+ of = f[:-4] + (self._output_suffix or '') + '.whl'
3052
+ log.info('Repacking wheel %s', of)
3053
+ with WheelFile(of, 'w') as wf:
3054
+ for n, d in dct.items():
3055
+ log.info('Adding zipinfo %s', n)
3056
+ wf.writestr(zis[n], d)
3057
+
3058
+ def add_to_tgz(self, f: str) -> None:
3059
+ if not f.endswith('.tar.gz'):
3060
+ raise Exception(f)
3061
+ log.info('Scanning tgz %s', f)
3062
+
3063
+ tis: ta.Dict[str, tarfile.TarInfo] = {}
3064
+ dct: ta.Dict[str, bytes] = {}
3065
+ with tarfile.open(f, 'r:gz') as tf:
3066
+ for ti in tf:
3067
+ tis[ti.name] = ti
3068
+ if ti.type == tarfile.REGTYPE:
3069
+ with tf.extractfile(ti.name) as tif: # type: ignore
3070
+ dct[ti.name] = tif.read()
3071
+
3072
+ if self.add_to_contents(dct):
3073
+ of = f[:-7] + (self._output_suffix or '') + '.tar.gz'
3074
+ log.info('Repacking tgz %s', of)
3075
+ with tarfile.open(of, 'w:gz') as tf:
3076
+ for n, ti in tis.items():
3077
+ log.info('Adding tarinfo %s', n)
3078
+ if n in dct:
3079
+ data = dct[n]
3080
+ ti.size = len(data)
3081
+ fo = io.BytesIO(data)
3082
+ else:
3083
+ fo = None
3084
+ tf.addfile(ti, fileobj=fo)
3085
+
3086
+ EXTS = ('.tar.gz', '.whl')
3087
+
3088
+ def add_to_file(self, f: str) -> None:
3089
+ if f.endswith('.whl'):
3090
+ self.add_to_wheel(f)
3091
+
3092
+ elif f.endswith('.tar.gz'):
3093
+ self.add_to_tgz(f)
3094
+
3095
+ def add_to(self, tgt: str) -> None:
3096
+ log.info('Using revision %s', self.revision())
3097
+
3098
+ if os.path.isfile(tgt):
3099
+ self.add_to_file(tgt)
3100
+
3101
+ elif os.path.isdir(tgt):
3102
+ for dp, dns, fns in os.walk(tgt): # noqa
3103
+ for f in fns:
3104
+ if any(f.endswith(ext) for ext in self.EXTS):
3105
+ self.add_to_file(os.path.join(dp, f))
2911
3106
 
2912
- if run_build:
2913
- self._run_build(build_output_dir)
2914
3107
 
2915
- return self._build_dir()
3108
+ #
2916
3109
 
2917
3110
 
2918
3111
  ########################################
@@ -3119,6 +3312,223 @@ class InterpInspector:
3119
3312
  INTERP_INSPECTOR = InterpInspector()
3120
3313
 
3121
3314
 
3315
+ ########################################
3316
+ # ../pkg.py
3317
+ """
3318
+ TODO:
3319
+ - ext scanning
3320
+ - __revision__
3321
+ - entry_points
3322
+
3323
+ https://setuptools.pypa.io/en/latest/references/keywords.html
3324
+ https://packaging.python.org/en/latest/specifications/pyproject-toml
3325
+
3326
+ How to build a C extension in keeping with PEP 517, i.e. with pyproject.toml instead of setup.py?
3327
+ https://stackoverflow.com/a/66479252
3328
+
3329
+ https://github.com/pypa/sampleproject/blob/db5806e0a3204034c51b1c00dde7d5eb3fa2532e/setup.py
3330
+
3331
+ https://pip.pypa.io/en/stable/cli/pip_install/#vcs-support
3332
+ vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir
3333
+ 'git+https://github.com/wrmsr/omlish@master#subdirectory=.pip/omlish'
3334
+ """
3335
+ # ruff: noqa: UP006 UP007
3336
+
3337
+
3338
+ class PyprojectPackageGenerator:
3339
+ def __init__(
3340
+ self,
3341
+ dir_name: str,
3342
+ build_root: str,
3343
+ ) -> None:
3344
+ super().__init__()
3345
+ self._dir_name = dir_name
3346
+ self._build_root = build_root
3347
+
3348
+ #
3349
+
3350
+ @cached_nullary
3351
+ def about(self) -> types.ModuleType:
3352
+ return importlib.import_module(f'{self._dir_name}.__about__')
3353
+
3354
+ @cached_nullary
3355
+ def project_cls(self) -> type:
3356
+ return self.about().Project
3357
+
3358
+ @cached_nullary
3359
+ def setuptools_cls(self) -> type:
3360
+ return self.about().Setuptools
3361
+
3362
+ #
3363
+
3364
+ @cached_nullary
3365
+ def _build_dir(self) -> str:
3366
+ build_dir: str = os.path.join(self._build_root, self._dir_name)
3367
+ if os.path.isdir(build_dir):
3368
+ shutil.rmtree(build_dir)
3369
+ os.makedirs(build_dir)
3370
+ return build_dir
3371
+
3372
+ #
3373
+
3374
+ def _write_git_ignore(self) -> None:
3375
+ git_ignore = [
3376
+ '/*.egg-info/',
3377
+ '/dist',
3378
+ ]
3379
+ with open(os.path.join(self._build_dir(), '.gitignore'), 'w') as f:
3380
+ f.write('\n'.join(git_ignore))
3381
+
3382
+ #
3383
+
3384
+ def _symlink_source_dir(self) -> None:
3385
+ os.symlink(
3386
+ os.path.relpath(self._dir_name, self._build_dir()),
3387
+ os.path.join(self._build_dir(), self._dir_name),
3388
+ )
3389
+
3390
+ #
3391
+
3392
+ @dc.dataclass(frozen=True)
3393
+ class FileContents:
3394
+ pyproject_dct: ta.Mapping[str, ta.Any]
3395
+ manifest_in: ta.Optional[ta.Sequence[str]]
3396
+
3397
+ @staticmethod
3398
+ def _build_cls_dct(cls: type) -> ta.Dict[str, ta.Any]: # noqa
3399
+ dct = {}
3400
+ for b in reversed(cls.__mro__):
3401
+ for k, v in b.__dict__.items():
3402
+ if k.startswith('_'):
3403
+ continue
3404
+ dct[k] = v
3405
+ return dct
3406
+
3407
+ @staticmethod
3408
+ def _move_dict_key(
3409
+ sd: ta.Dict[str, ta.Any],
3410
+ sk: str,
3411
+ dd: ta.Dict[str, ta.Any],
3412
+ dk: str,
3413
+ ) -> None:
3414
+ if sk in sd:
3415
+ dd[dk] = sd.pop(sk)
3416
+
3417
+ @cached_nullary
3418
+ def file_contents(self) -> FileContents:
3419
+ pyp_dct = {}
3420
+
3421
+ #
3422
+
3423
+ pyp_dct['build-system'] = {
3424
+ 'requires': ['setuptools'],
3425
+ 'build-backend': 'setuptools.build_meta',
3426
+ }
3427
+
3428
+ prj = self._build_cls_dct(self.project_cls())
3429
+ pyp_dct['project'] = prj
3430
+
3431
+ self._move_dict_key(prj, 'optional_dependencies', pyp_dct, extrask := 'project.optional-dependencies')
3432
+ if (extras := pyp_dct.get(extrask)):
3433
+ pyp_dct[extrask] = {
3434
+ 'all': [
3435
+ e
3436
+ for lst in extras.values()
3437
+ for e in lst
3438
+ ],
3439
+ **extras,
3440
+ }
3441
+
3442
+ #
3443
+
3444
+ st = self._build_cls_dct(self.setuptools_cls())
3445
+ pyp_dct['tool.setuptools'] = st
3446
+
3447
+ self._move_dict_key(st, 'find_packages', pyp_dct, 'tool.setuptools.packages.find')
3448
+
3449
+ mani_in = st.pop('manifest_in', None)
3450
+
3451
+ #
3452
+
3453
+ return self.FileContents(
3454
+ pyp_dct,
3455
+ mani_in,
3456
+ )
3457
+
3458
+ def _write_file_contents(self) -> None:
3459
+ fc = self.file_contents()
3460
+
3461
+ with open(os.path.join(self._build_dir(), 'pyproject.toml'), 'w') as f:
3462
+ TomlWriter(f).write_root(fc.pyproject_dct)
3463
+
3464
+ if fc.manifest_in:
3465
+ with open(os.path.join(self._build_dir(), 'MANIFEST.in'), 'w') as f:
3466
+ f.write('\n'.join(fc.manifest_in)) # noqa
3467
+
3468
+ #
3469
+
3470
+ _STANDARD_FILES: ta.Sequence[str] = [
3471
+ 'LICENSE',
3472
+ 'README.rst',
3473
+ ]
3474
+
3475
+ def _symlink_standard_files(self) -> None:
3476
+ for fn in self._STANDARD_FILES:
3477
+ if os.path.exists(fn):
3478
+ os.symlink(os.path.relpath(fn, self._build_dir()), os.path.join(self._build_dir(), fn))
3479
+
3480
+ #
3481
+
3482
+ def _run_build(
3483
+ self,
3484
+ build_output_dir: ta.Optional[str] = None,
3485
+ *,
3486
+ add_revision: bool = False,
3487
+ ) -> None:
3488
+ subprocess.check_call(
3489
+ [
3490
+ sys.executable,
3491
+ '-m',
3492
+ 'build',
3493
+ ],
3494
+ cwd=self._build_dir(),
3495
+ )
3496
+
3497
+ dist_dir = os.path.join(self._build_dir(), 'dist')
3498
+
3499
+ if add_revision:
3500
+ GitRevisionAdder().add_to(dist_dir)
3501
+
3502
+ if build_output_dir is not None:
3503
+ for fn in os.listdir(dist_dir):
3504
+ shutil.copyfile(os.path.join(dist_dir, fn), os.path.join(build_output_dir, fn))
3505
+
3506
+ #
3507
+
3508
+ def gen(
3509
+ self,
3510
+ *,
3511
+ run_build: bool = False,
3512
+ build_output_dir: ta.Optional[str] = None,
3513
+ add_revision: bool = False,
3514
+ ) -> str:
3515
+ log.info('Generating pyproject package: %s -> %s', self._dir_name, self._build_root)
3516
+
3517
+ self._build_dir()
3518
+ self._write_git_ignore()
3519
+ self._symlink_source_dir()
3520
+ self._write_file_contents()
3521
+ self._symlink_standard_files()
3522
+
3523
+ if run_build:
3524
+ self._run_build(
3525
+ build_output_dir,
3526
+ add_revision=add_revision,
3527
+ )
3528
+
3529
+ return self._build_dir()
3530
+
3531
+
3122
3532
  ########################################
3123
3533
  # ../../interp/providers.py
3124
3534
  """
@@ -3960,6 +4370,7 @@ def _pkg_cmd(args) -> None:
3960
4370
 
3961
4371
  build_output_dir = 'dist'
3962
4372
  run_build = bool(args.build)
4373
+ add_revision = bool(args.revision)
3963
4374
 
3964
4375
  if run_build:
3965
4376
  os.makedirs(build_output_dir, exist_ok=True)
@@ -3974,6 +4385,7 @@ def _pkg_cmd(args) -> None:
3974
4385
  ).gen,
3975
4386
  run_build=run_build,
3976
4387
  build_output_dir=build_output_dir,
4388
+ add_revision=add_revision,
3977
4389
  ))
3978
4390
  for dir_name in run.cfg().pkgs
3979
4391
  ]
@@ -4002,6 +4414,7 @@ def _build_parser() -> argparse.ArgumentParser:
4002
4414
 
4003
4415
  parser_resolve = subparsers.add_parser('pkg')
4004
4416
  parser_resolve.add_argument('-b', '--build', action='store_true')
4417
+ parser_resolve.add_argument('-r', '--revision', action='store_true')
4005
4418
  parser_resolve.add_argument('cmd', nargs='?')
4006
4419
  parser_resolve.add_argument('args', nargs=argparse.REMAINDER)
4007
4420
  parser_resolve.set_defaults(func=_pkg_cmd)