omdev 0.0.0.dev440__py3-none-any.whl → 0.0.0.dev486__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omdev might be problematic. Click here for more details.

Files changed (132) hide show
  1. omdev/.omlish-manifests.json +17 -29
  2. omdev/__about__.py +9 -7
  3. omdev/amalg/gen/gen.py +49 -6
  4. omdev/amalg/gen/imports.py +1 -1
  5. omdev/amalg/gen/manifests.py +1 -1
  6. omdev/amalg/gen/resources.py +1 -1
  7. omdev/amalg/gen/srcfiles.py +13 -3
  8. omdev/amalg/gen/strip.py +1 -1
  9. omdev/amalg/gen/types.py +1 -1
  10. omdev/amalg/gen/typing.py +1 -1
  11. omdev/amalg/info.py +32 -0
  12. omdev/cache/data/actions.py +1 -1
  13. omdev/cache/data/specs.py +1 -1
  14. omdev/cexts/_boilerplate.cc +2 -3
  15. omdev/cexts/cmake.py +4 -1
  16. omdev/ci/cli.py +1 -2
  17. omdev/cmdlog/cli.py +1 -2
  18. omdev/dataclasses/_dumping.py +1960 -0
  19. omdev/dataclasses/_template.py +22 -0
  20. omdev/dataclasses/cli.py +6 -1
  21. omdev/dataclasses/codegen.py +340 -60
  22. omdev/dataclasses/dumping.py +200 -0
  23. omdev/interp/uv/provider.py +1 -0
  24. omdev/interp/venvs.py +1 -0
  25. omdev/irc/messages/base.py +50 -0
  26. omdev/irc/messages/formats.py +92 -0
  27. omdev/irc/messages/messages.py +775 -0
  28. omdev/irc/messages/parsing.py +99 -0
  29. omdev/irc/numerics/__init__.py +0 -0
  30. omdev/irc/numerics/formats.py +97 -0
  31. omdev/irc/numerics/numerics.py +865 -0
  32. omdev/irc/numerics/types.py +59 -0
  33. omdev/irc/protocol/LICENSE +11 -0
  34. omdev/irc/protocol/__init__.py +61 -0
  35. omdev/irc/protocol/consts.py +6 -0
  36. omdev/irc/protocol/errors.py +30 -0
  37. omdev/irc/protocol/message.py +21 -0
  38. omdev/irc/protocol/nuh.py +55 -0
  39. omdev/irc/protocol/parsing.py +158 -0
  40. omdev/irc/protocol/rendering.py +153 -0
  41. omdev/irc/protocol/tags.py +102 -0
  42. omdev/irc/protocol/utils.py +30 -0
  43. omdev/manifests/_dumping.py +125 -25
  44. omdev/markdown/__init__.py +0 -0
  45. omdev/markdown/incparse.py +116 -0
  46. omdev/markdown/tokens.py +51 -0
  47. omdev/packaging/marshal.py +8 -8
  48. omdev/packaging/requires.py +6 -6
  49. omdev/packaging/specifiers.py +2 -1
  50. omdev/packaging/versions.py +4 -4
  51. omdev/packaging/wheelfile.py +2 -0
  52. omdev/precheck/blanklines.py +66 -0
  53. omdev/precheck/caches.py +1 -1
  54. omdev/precheck/imports.py +14 -1
  55. omdev/precheck/main.py +4 -3
  56. omdev/precheck/unicode.py +39 -15
  57. omdev/py/asts/__init__.py +0 -0
  58. omdev/py/asts/parents.py +28 -0
  59. omdev/py/asts/toplevel.py +123 -0
  60. omdev/py/asts/visitors.py +18 -0
  61. omdev/py/attrdocs.py +1 -1
  62. omdev/py/bracepy.py +12 -4
  63. omdev/py/reprs.py +32 -0
  64. omdev/py/srcheaders.py +1 -1
  65. omdev/py/tokens/__init__.py +0 -0
  66. omdev/py/tools/mkrelimp.py +1 -1
  67. omdev/py/tools/pipdepup.py +629 -0
  68. omdev/pyproject/pkg.py +190 -45
  69. omdev/pyproject/reqs.py +31 -9
  70. omdev/pyproject/tools/__init__.py +0 -0
  71. omdev/pyproject/tools/aboutdeps.py +55 -0
  72. omdev/pyproject/venvs.py +8 -1
  73. omdev/rs/__init__.py +0 -0
  74. omdev/scripts/ci.py +398 -80
  75. omdev/scripts/interp.py +193 -35
  76. omdev/scripts/lib/inject.py +74 -27
  77. omdev/scripts/lib/logs.py +75 -27
  78. omdev/scripts/lib/marshal.py +67 -25
  79. omdev/scripts/pyproject.py +941 -90
  80. omdev/tools/git/cli.py +10 -0
  81. omdev/tools/json/processing.py +5 -2
  82. omdev/tools/jsonview/cli.py +31 -5
  83. omdev/tools/pawk/pawk.py +2 -2
  84. omdev/tools/pip.py +8 -0
  85. omdev/tui/__init__.py +0 -0
  86. omdev/tui/apps/__init__.py +0 -0
  87. omdev/tui/apps/edit/__init__.py +0 -0
  88. omdev/tui/apps/edit/main.py +163 -0
  89. omdev/tui/apps/irc/__init__.py +0 -0
  90. omdev/tui/apps/irc/__main__.py +4 -0
  91. omdev/tui/apps/irc/app.py +278 -0
  92. omdev/tui/apps/irc/client.py +187 -0
  93. omdev/tui/apps/irc/commands.py +175 -0
  94. omdev/tui/apps/irc/main.py +26 -0
  95. omdev/tui/apps/markdown/__init__.py +0 -0
  96. omdev/tui/apps/markdown/__main__.py +11 -0
  97. omdev/{ptk → tui/apps}/markdown/cli.py +5 -7
  98. omdev/tui/rich/__init__.py +34 -0
  99. omdev/tui/rich/console2.py +20 -0
  100. omdev/tui/rich/markdown2.py +186 -0
  101. omdev/tui/textual/__init__.py +226 -0
  102. omdev/tui/textual/app2.py +11 -0
  103. omdev/tui/textual/autocomplete/LICENSE +21 -0
  104. omdev/tui/textual/autocomplete/__init__.py +33 -0
  105. omdev/tui/textual/autocomplete/matching.py +226 -0
  106. omdev/tui/textual/autocomplete/paths.py +202 -0
  107. omdev/tui/textual/autocomplete/widget.py +612 -0
  108. omdev/tui/textual/drivers2.py +55 -0
  109. {omdev-0.0.0.dev440.dist-info → omdev-0.0.0.dev486.dist-info}/METADATA +11 -9
  110. {omdev-0.0.0.dev440.dist-info → omdev-0.0.0.dev486.dist-info}/RECORD +119 -73
  111. omdev/ptk/__init__.py +0 -103
  112. omdev/ptk/apps/ncdu.py +0 -167
  113. omdev/ptk/confirm.py +0 -60
  114. omdev/ptk/markdown/LICENSE +0 -22
  115. omdev/ptk/markdown/__init__.py +0 -10
  116. omdev/ptk/markdown/__main__.py +0 -11
  117. omdev/ptk/markdown/border.py +0 -94
  118. omdev/ptk/markdown/markdown.py +0 -390
  119. omdev/ptk/markdown/parser.py +0 -42
  120. omdev/ptk/markdown/styles.py +0 -29
  121. omdev/ptk/markdown/tags.py +0 -299
  122. omdev/ptk/markdown/utils.py +0 -366
  123. omdev/pyproject/cexts.py +0 -110
  124. /omdev/{ptk/apps → irc}/__init__.py +0 -0
  125. /omdev/{tokens → irc/messages}/__init__.py +0 -0
  126. /omdev/{tokens → py/tokens}/all.py +0 -0
  127. /omdev/{tokens → py/tokens}/tokenizert.py +0 -0
  128. /omdev/{tokens → py/tokens}/utils.py +0 -0
  129. {omdev-0.0.0.dev440.dist-info → omdev-0.0.0.dev486.dist-info}/WHEEL +0 -0
  130. {omdev-0.0.0.dev440.dist-info → omdev-0.0.0.dev486.dist-info}/entry_points.txt +0 -0
  131. {omdev-0.0.0.dev440.dist-info → omdev-0.0.0.dev486.dist-info}/licenses/LICENSE +0 -0
  132. {omdev-0.0.0.dev440.dist-info → omdev-0.0.0.dev486.dist-info}/top_level.txt +0 -0
@@ -27,6 +27,7 @@ See:
27
27
  """
28
28
  import abc
29
29
  import argparse
30
+ import ast
30
31
  import asyncio
31
32
  import asyncio.base_subprocess
32
33
  import asyncio.subprocess
@@ -52,6 +53,7 @@ import itertools
52
53
  import json
53
54
  import logging
54
55
  import multiprocessing as mp
56
+ import operator
55
57
  import os
56
58
  import os.path
57
59
  import re
@@ -63,6 +65,7 @@ import subprocess
63
65
  import sys
64
66
  import tarfile
65
67
  import tempfile
68
+ import textwrap
66
69
  import threading
67
70
  import time
68
71
  import traceback
@@ -80,6 +83,82 @@ if sys.version_info < (3, 8):
80
83
  raise OSError(f'Requires python (3, 8), got {sys.version_info} from {sys.executable}') # noqa
81
84
 
82
85
 
86
+ def __omlish_amalg__(): # noqa
87
+ return dict(
88
+ src_files=[
89
+ dict(path='../magic/magic.py', sha1='16a7598eac927e7994d78b9f851dd6cd1fce34c9'),
90
+ dict(path='../magic/prepare.py', sha1='a9b6bd7408d86a52fab7aae2c522032fb251cb8e'),
91
+ dict(path='../magic/styles.py', sha1='60fc56d089669eaad72c5c134b91bc69d4bc107e'),
92
+ dict(path='../packaging/versions.py', sha1='71627ad600b3529b829b0e227b0952f2c63c7271'),
93
+ dict(path='../packaging/wheelfile.py', sha1='2e1113819aa85ed00e1fe25c71a2b3dbc0a383f5'),
94
+ dict(path='../../omlish/formats/toml/parser.py', sha1='73dac82289350ab951c4bcdbfe61167fa221f26f'),
95
+ dict(path='../../omlish/formats/toml/writer.py', sha1='6ea41d7e724bb1dcf6bd84b88993ff4e8798e021'),
96
+ dict(path='../../omlish/lite/abstract.py', sha1='a2fc3f3697fa8de5247761e9d554e70176f37aac'),
97
+ dict(path='../../omlish/lite/cached.py', sha1='0c33cf961ac8f0727284303c7a30c5ea98f714f2'),
98
+ dict(path='../../omlish/lite/check.py', sha1='bb6b6b63333699b84462951a854d99ae83195b94'),
99
+ dict(path='../../omlish/lite/json.py', sha1='57eeddc4d23a17931e00284ffa5cb6e3ce089486'),
100
+ dict(path='../../omlish/lite/objects.py', sha1='9566bbf3530fd71fcc56321485216b592fae21e9'),
101
+ dict(path='../../omlish/lite/reflect.py', sha1='c4fec44bf144e9d93293c996af06f6c65fc5e63d'),
102
+ dict(path='../../omlish/lite/strings.py', sha1='89831ecbc34ad80e118a865eceb390ed399dc4d6'),
103
+ dict(path='../../omlish/lite/typing.py', sha1='deaaa560b63d9a0e40991ec0006451f5f0df04c1'),
104
+ dict(path='../../omlish/logs/levels.py', sha1='91405563d082a5eba874da82aac89d83ce7b6152'),
105
+ dict(path='../../omlish/logs/std/filters.py', sha1='f36aab646d84d31e295b33aaaaa6f8b67ff38b3d'),
106
+ dict(path='../../omlish/logs/std/proxy.py', sha1='3e7301a2aa351127f9c85f61b2f85dcc3f15aafb'),
107
+ dict(path='../../omlish/logs/warnings.py', sha1='c4eb694b24773351107fcc058f3620f1dbfb6799'),
108
+ dict(path='../cexts/magic.py', sha1='4e5ce6732454f75c9dd27352959708d8fa7b1666'),
109
+ dict(path='../magic/find.py', sha1='436228a9cf1d8bab6b9234d09f72913b0960382f'),
110
+ dict(path='../packaging/specifiers.py', sha1='a56ab4e8c9b174adb523921f6280ac41e0fce749'),
111
+ dict(path='../../omlish/argparse/cli.py', sha1='f4dc3cd353d14386b5da0306768700e396afd2b3'),
112
+ dict(path='../../omlish/lite/marshal.py', sha1='96348f5f2a26dc27d842d33cc3927e9da163436b'),
113
+ dict(path='../../omlish/lite/maybes.py', sha1='bdf5136654ccd14b6a072588cad228925bdfbabd'),
114
+ dict(path='../../omlish/lite/runtime.py', sha1='2e752a27ae2bf89b1bb79b4a2da522a3ec360c70'),
115
+ dict(path='../../omlish/lite/timeouts.py', sha1='a0f673033a6943f242e35848d78a41892b9c62a1'),
116
+ dict(path='../../omlish/logs/infos.py', sha1='4dd104bd468a8c438601dd0bbda619b47d2f1620'),
117
+ dict(path='../../omlish/logs/protocols.py', sha1='05ca4d1d7feb50c4e3b9f22ee371aa7bf4b3dbd1'),
118
+ dict(path='../../omlish/logs/std/json.py', sha1='2a75553131e4d5331bb0cedde42aa183f403fc3b'),
119
+ dict(path='../interp/types.py', sha1='cfc14929777fb19f723c875bcafc8f7c66593d6d'),
120
+ dict(path='../packaging/requires.py', sha1='5818353abd45135e0e638e28fa6247b24122231b'),
121
+ dict(path='../../omlish/asyncs/asyncio/timeouts.py', sha1='4d31b02b3c39b8f2fa7e94db36552fde6942e36a'),
122
+ dict(path='../../omlish/lite/inject.py', sha1='6f097e3170019a34ff6834d36fcc9cbeed3a7ab4'),
123
+ dict(path='../../omlish/logs/contexts.py', sha1='7456964ade9ac66460e9ade4e242dbdc24b39501'),
124
+ dict(path='../../omlish/logs/standard.py', sha1='818b674f7d15012f25b79f52f6e8e7368b633038'),
125
+ dict(path='../../omlish/subprocesses/run.py', sha1='8200e48f0c49d164df3503cd0143038d0c4d30aa'),
126
+ dict(path='../../omlish/subprocesses/wrap.py', sha1='8a9b7d2255481fae15c05f5624b0cdc0766f4b3f'),
127
+ dict(path='../interp/providers/base.py', sha1='f5d068c21f230d742e9015b033cd6320f4c68898'),
128
+ dict(path='../../omlish/logs/base.py', sha1='a376460b11b9dc0555fd4ead5437af62c2109a4b'),
129
+ dict(path='../../omlish/logs/std/records.py', sha1='8bbf6ef9eccb3a012c6ca416ddf3969450fd8fc9'),
130
+ dict(path='../../omlish/subprocesses/base.py', sha1='cb9f668be5422fecb27222caabb67daac6c1bab9'),
131
+ dict(path='../interp/resolvers.py', sha1='817b8e76401cd7a19eb43ca54d65272e4c8a4b0e'),
132
+ dict(path='../../omlish/logs/std/loggers.py', sha1='daa35bdc4adea5006e442688017f0de3392579b7'),
133
+ dict(path='../../omlish/subprocesses/asyncs.py', sha1='bba44d524c24c6ac73168aee6343488414e5bf48'),
134
+ dict(path='../../omlish/subprocesses/sync.py', sha1='8434919eba4da67825773d56918fdc0cb2f1883b'),
135
+ dict(path='../git/revisions.py', sha1='a26b5afa568313e034b6b2d3a5d2dd0b065979d4'),
136
+ dict(path='../../omlish/asyncs/asyncio/subprocesses.py', sha1='b6b5f9ae3fd0b9c83593bad2e04a08f726e5904d'),
137
+ dict(path='../../omlish/logs/modules.py', sha1='99e73cde6872fd5eda6af3dbf0fc9322bdeb641a'),
138
+ dict(path='../interp/inspect.py', sha1='736287b4ec8d14a8c30afa0ba23996fdc0662caa'),
139
+ dict(path='../interp/pyenv/pyenv.py', sha1='d1f6e657c671c1b1a5b0e627284df656fe2d10d3'),
140
+ dict(path='../interp/uv/uv.py', sha1='8c6515cd6755efab3972da92a285e94ccb255515'),
141
+ dict(path='../packaging/revisions.py', sha1='4ea4ac3006ae5b0bdc0c5a6c587cfed8fbad87b3'),
142
+ dict(path='reqs.py', sha1='822e265b0d2e6d9548ee24d3ac60c81066e40ee8'),
143
+ dict(path='../interp/providers/running.py', sha1='85c9cc69ff6fbd6c8cf78ed6262619a30856c2f1'),
144
+ dict(path='../interp/providers/system.py', sha1='9638a154475ca98775159d27739563ac7fb2eb16'),
145
+ dict(path='../interp/pyenv/install.py', sha1='4a10a19717364b4ba9f3b8bf1d12621cf21ba8b8'),
146
+ dict(path='../interp/uv/provider.py', sha1='997dc9453589a4cee0658d2fa0893c4ec60b5a0d'),
147
+ dict(path='pkg.py', sha1='a7b64fcf267ba385442393b90c9711af08ba9ac3'),
148
+ dict(path='../interp/providers/inject.py', sha1='7cc9ebf58cf2ec09545321456bd9da9f9a3a79fb'),
149
+ dict(path='../interp/pyenv/provider.py', sha1='377542ce01a35849e2a5b4a4dbafedc26882f983'),
150
+ dict(path='../interp/uv/inject.py', sha1='e95d058c2340baa5a3155ec3440f311d1daa10a8'),
151
+ dict(path='../interp/pyenv/inject.py', sha1='b8fb68f5a7cae86c70fe1bad6c29a8b2dfc985c3'),
152
+ dict(path='../interp/inject.py', sha1='b039abbadf0b096d2724182af2e0ebda2a230852'),
153
+ dict(path='../interp/default.py', sha1='a799969a0d3f4b57538587b13ceb08f6334ebc16'),
154
+ dict(path='../interp/venvs.py', sha1='9ba8f2c3131d7d519d5cf36ca69b75f9c6fe2b27'),
155
+ dict(path='configs.py', sha1='7b1c1ed034ecb728d67ff15e3bb2b21a218773c9'),
156
+ dict(path='venvs.py', sha1='9f1935171017aeb802da56e14d7f41d632a7aa25'),
157
+ dict(path='cli.py', sha1='e2f06505bb59793af3a2779bfa2c15aefb308539'),
158
+ ],
159
+ )
160
+
161
+
83
162
  ########################################
84
163
 
85
164
 
@@ -96,8 +175,10 @@ TomlParseFloat = ta.Callable[[str], ta.Any] # ta.TypeAlias
96
175
  TomlKey = ta.Tuple[str, ...] # ta.TypeAlias
97
176
  TomlPos = int # ta.TypeAlias
98
177
 
99
- # ../../omlish/lite/cached.py
178
+ # ../../omlish/lite/abstract.py
100
179
  T = ta.TypeVar('T')
180
+
181
+ # ../../omlish/lite/cached.py
101
182
  CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
102
183
 
103
184
  # ../../omlish/lite/check.py
@@ -137,6 +218,11 @@ LoggingExcInfo = ta.Union[BaseException, LoggingExcInfoTuple] # ta.TypeAlias
137
218
  LoggingExcInfoArg = ta.Union[LoggingExcInfo, bool, None] # ta.TypeAlias
138
219
  LoggingContextInfo = ta.Any # ta.TypeAlias
139
220
 
221
+ # ../packaging/requires.py
222
+ RequiresMarkerVar = ta.Union['RequiresVariable', 'RequiresValue'] # ta.TypeAlias
223
+ RequiresMarkerAtom = ta.Union['RequiresMarkerItem', ta.Sequence['RequiresMarkerAtom']] # ta.TypeAlias
224
+ RequiresMarkerList = ta.Sequence[ta.Union['RequiresMarkerList', 'RequiresMarkerAtom', str]] # ta.TypeAlias
225
+
140
226
  # ../../omlish/asyncs/asyncio/timeouts.py
141
227
  AwaitableT = ta.TypeVar('AwaitableT', bound=ta.Awaitable)
142
228
 
@@ -361,12 +447,12 @@ class _BaseVersion:
361
447
 
362
448
  def __lt__(self, other: '_BaseVersion') -> bool:
363
449
  if not isinstance(other, _BaseVersion):
364
- return NotImplemented # type: ignore
450
+ return NotImplemented
365
451
  return self._key < other._key
366
452
 
367
453
  def __le__(self, other: '_BaseVersion') -> bool:
368
454
  if not isinstance(other, _BaseVersion):
369
- return NotImplemented # type: ignore
455
+ return NotImplemented
370
456
  return self._key <= other._key
371
457
 
372
458
  def __eq__(self, other: object) -> bool:
@@ -376,12 +462,12 @@ class _BaseVersion:
376
462
 
377
463
  def __ge__(self, other: '_BaseVersion') -> bool:
378
464
  if not isinstance(other, _BaseVersion):
379
- return NotImplemented # type: ignore
465
+ return NotImplemented
380
466
  return self._key >= other._key
381
467
 
382
468
  def __gt__(self, other: '_BaseVersion') -> bool:
383
469
  if not isinstance(other, _BaseVersion):
384
- return NotImplemented # type: ignore
470
+ return NotImplemented
385
471
  return self._key > other._key
386
472
 
387
473
  def __ne__(self, other: object) -> bool:
@@ -765,11 +851,13 @@ class WheelFile(zipfile.ZipFile):
765
851
  @staticmethod
766
852
  def _urlsafe_b64encode(data: bytes) -> bytes:
767
853
  """urlsafe_b64encode without padding"""
854
+
768
855
  return base64.urlsafe_b64encode(data).rstrip(b'=')
769
856
 
770
857
  @staticmethod
771
858
  def _urlsafe_b64decode(data: bytes) -> bytes:
772
859
  """urlsafe_b64decode without padding"""
860
+
773
861
  pad = b'=' * (4 - (len(data) & 3))
774
862
  return base64.urlsafe_b64decode(data + pad)
775
863
 
@@ -1959,25 +2047,49 @@ def is_abstract_method(obj: ta.Any) -> bool:
1959
2047
  return bool(getattr(obj, _IS_ABSTRACT_METHOD_ATTR, False))
1960
2048
 
1961
2049
 
1962
- def update_abstracts(cls, *, force=False):
2050
+ def compute_abstract_methods(cls: type) -> ta.FrozenSet[str]:
2051
+ # ~> https://github.com/python/cpython/blob/f3476c6507381ca860eec0989f53647b13517423/Modules/_abc.c#L358
2052
+
2053
+ # Stage 1: direct abstract methods
2054
+
2055
+ abstracts = {
2056
+ a
2057
+ # Get items as a list to avoid mutation issues during iteration
2058
+ for a, v in list(cls.__dict__.items())
2059
+ if is_abstract_method(v)
2060
+ }
2061
+
2062
+ # Stage 2: inherited abstract methods
2063
+
2064
+ for base in cls.__bases__:
2065
+ # Get __abstractmethods__ from base if it exists
2066
+ if (base_abstracts := getattr(base, _ABSTRACT_METHODS_ATTR, None)) is None:
2067
+ continue
2068
+
2069
+ # Iterate over abstract methods in base
2070
+ for key in base_abstracts:
2071
+ # Check if this class has an attribute with this name
2072
+ try:
2073
+ value = getattr(cls, key)
2074
+ except AttributeError:
2075
+ # Attribute not found in this class, skip
2076
+ continue
2077
+
2078
+ # Check if it's still abstract
2079
+ if is_abstract_method(value):
2080
+ abstracts.add(key)
2081
+
2082
+ return frozenset(abstracts)
2083
+
2084
+
2085
+ def update_abstracts(cls: ta.Type[T], *, force: bool = False) -> ta.Type[T]:
1963
2086
  if not force and not hasattr(cls, _ABSTRACT_METHODS_ATTR):
1964
2087
  # Per stdlib: We check for __abstractmethods__ here because cls might by a C implementation or a python
1965
2088
  # implementation (especially during testing), and we want to handle both cases.
1966
2089
  return cls
1967
2090
 
1968
- abstracts: ta.Set[str] = set()
1969
-
1970
- for scls in cls.__bases__:
1971
- for name in getattr(scls, _ABSTRACT_METHODS_ATTR, ()):
1972
- value = getattr(cls, name, None)
1973
- if getattr(value, _IS_ABSTRACT_METHOD_ATTR, False):
1974
- abstracts.add(name)
1975
-
1976
- for name, value in cls.__dict__.items():
1977
- if getattr(value, _IS_ABSTRACT_METHOD_ATTR, False):
1978
- abstracts.add(name)
1979
-
1980
- setattr(cls, _ABSTRACT_METHODS_ATTR, frozenset(abstracts))
2091
+ abstracts = compute_abstract_methods(cls)
2092
+ setattr(cls, _ABSTRACT_METHODS_ATTR, abstracts)
1981
2093
  return cls
1982
2094
 
1983
2095
 
@@ -2031,23 +2143,26 @@ class Abstract:
2031
2143
  super().__init_subclass__(**kwargs)
2032
2144
 
2033
2145
  if not (Abstract in cls.__bases__ or abc.ABC in cls.__bases__):
2034
- ams = {a: cls for a, o in cls.__dict__.items() if is_abstract_method(o)}
2146
+ if ams := compute_abstract_methods(cls):
2147
+ amd = {
2148
+ a: mcls
2149
+ for mcls in cls.__mro__[::-1]
2150
+ for a in ams
2151
+ if a in mcls.__dict__
2152
+ }
2035
2153
 
2036
- seen = set(cls.__dict__)
2037
- for b in cls.__bases__:
2038
- ams.update({a: b for a in set(getattr(b, _ABSTRACT_METHODS_ATTR, [])) - seen}) # noqa
2039
- seen.update(dir(b))
2040
-
2041
- if ams:
2042
2154
  raise AbstractTypeError(
2043
2155
  f'Cannot subclass abstract class {cls.__name__} with abstract methods: ' +
2044
2156
  ', '.join(sorted([
2045
2157
  '.'.join([
2046
- *([m] if (m := getattr(c, '__module__')) else []),
2047
- getattr(c, '__qualname__', getattr(c, '__name__')),
2158
+ *([
2159
+ *([m] if (m := getattr(c, '__module__')) else []),
2160
+ getattr(c, '__qualname__', getattr(c, '__name__')),
2161
+ ] if c is not None else '?'),
2048
2162
  a,
2049
2163
  ])
2050
- for a, c in ams.items()
2164
+ for a in ams
2165
+ for c in [amd.get(a)]
2051
2166
  ])),
2052
2167
  )
2053
2168
 
@@ -2121,6 +2236,62 @@ def async_cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
2121
2236
  return _AsyncCachedNullary(fn)
2122
2237
 
2123
2238
 
2239
+ ##
2240
+
2241
+
2242
+ cached_property = functools.cached_property
2243
+
2244
+
2245
+ class _cached_property: # noqa
2246
+ """Backported to pick up https://github.com/python/cpython/commit/056dfc71dce15f81887f0bd6da09d6099d71f979 ."""
2247
+
2248
+ def __init__(self, func):
2249
+ self.func = func
2250
+ self.attrname = None # noqa
2251
+ self.__doc__ = func.__doc__
2252
+ self.__module__ = func.__module__
2253
+
2254
+ _NOT_FOUND = object()
2255
+
2256
+ def __set_name__(self, owner, name):
2257
+ if self.attrname is None:
2258
+ self.attrname = name # noqa
2259
+ elif name != self.attrname:
2260
+ raise TypeError(
2261
+ f'Cannot assign the same cached_property to two different names ({self.attrname!r} and {name!r}).',
2262
+ )
2263
+
2264
+ def __get__(self, instance, owner=None):
2265
+ if instance is None:
2266
+ return self
2267
+ if self.attrname is None:
2268
+ raise TypeError('Cannot use cached_property instance without calling __set_name__ on it.')
2269
+
2270
+ try:
2271
+ cache = instance.__dict__
2272
+ except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
2273
+ raise TypeError(
2274
+ f"No '__dict__' attribute on {type(instance).__name__!r} instance to cache {self.attrname!r} property.",
2275
+ ) from None
2276
+
2277
+ val = cache.get(self.attrname, self._NOT_FOUND)
2278
+
2279
+ if val is self._NOT_FOUND:
2280
+ val = self.func(instance)
2281
+ try:
2282
+ cache[self.attrname] = val
2283
+ except TypeError:
2284
+ raise TypeError(
2285
+ f"The '__dict__' attribute on {type(instance).__name__!r} instance does not support item "
2286
+ f"assignment for caching {self.attrname!r} property.",
2287
+ ) from None
2288
+
2289
+ return val
2290
+
2291
+
2292
+ globals()['cached_property'] = _cached_property
2293
+
2294
+
2124
2295
  ########################################
2125
2296
  # ../../../omlish/lite/check.py
2126
2297
  """
@@ -2909,6 +3080,12 @@ def format_num_bytes(num_bytes: int) -> str:
2909
3080
 
2910
3081
  ##
2911
3082
  # A workaround for typing deficiencies (like `Argument 2 to NewType(...) must be subclassable`).
3083
+ #
3084
+ # Note that this problem doesn't happen at runtime - it happens in mypy:
3085
+ #
3086
+ # mypy <(echo "import typing as ta; MyCallback = ta.NewType('MyCallback', ta.Callable[[], None])")
3087
+ # /dev/fd/11:1:22: error: Argument 2 to NewType(...) must be subclassable (got "Callable[[], None]") [valid-newtype]
3088
+ #
2912
3089
 
2913
3090
 
2914
3091
  @dc.dataclass(frozen=True)
@@ -3868,7 +4045,7 @@ class SpecifierSet(BaseSpecifier):
3868
4045
  if isinstance(other, str):
3869
4046
  other = SpecifierSet(other)
3870
4047
  elif not isinstance(other, SpecifierSet):
3871
- return NotImplemented # type: ignore
4048
+ return NotImplemented
3872
4049
 
3873
4050
  specifier = SpecifierSet()
3874
4051
  specifier._specs = frozenset(self._specs | other._specs)
@@ -3888,6 +4065,7 @@ class SpecifierSet(BaseSpecifier):
3888
4065
  if isinstance(other, (str, Specifier)):
3889
4066
  other = SpecifierSet(str(other))
3890
4067
  elif not isinstance(other, SpecifierSet):
4068
+
3891
4069
  return NotImplemented
3892
4070
 
3893
4071
  return self._specs == other._specs
@@ -3959,6 +4137,7 @@ TODO:
3959
4137
  - pre-run, post-run hooks
3960
4138
  - exitstack?
3961
4139
  - suggestion - difflib.get_close_matches
4140
+ - add_argument_group - group kw on ArgparseKwarg?
3962
4141
  """
3963
4142
 
3964
4143
 
@@ -3969,6 +4148,7 @@ TODO:
3969
4148
  class ArgparseArg:
3970
4149
  args: ta.Sequence[ta.Any]
3971
4150
  kwargs: ta.Mapping[str, ta.Any]
4151
+ group: ta.Optional[str] = None
3972
4152
  dest: ta.Optional[str] = None
3973
4153
 
3974
4154
  def __get__(self, instance, owner=None):
@@ -3978,7 +4158,11 @@ class ArgparseArg:
3978
4158
 
3979
4159
 
3980
4160
  def argparse_arg(*args, **kwargs) -> ArgparseArg:
3981
- return ArgparseArg(args, kwargs)
4161
+ return ArgparseArg(
4162
+ args=args,
4163
+ group=kwargs.pop('group', None),
4164
+ kwargs=kwargs,
4165
+ )
3982
4166
 
3983
4167
 
3984
4168
  def argparse_arg_(*args, **kwargs) -> ta.Any:
@@ -4148,6 +4332,10 @@ class ArgparseCli:
4148
4332
  subparser.set_defaults(_cmd=obj)
4149
4333
 
4150
4334
  elif isinstance(obj, ArgparseArg):
4335
+ if obj.group is not None:
4336
+ # FIXME: add_argument_group
4337
+ raise NotImplementedError
4338
+
4151
4339
  if att in anns:
4152
4340
  ann_kwargs = _get_argparse_arg_ann_kwargs(anns[att])
4153
4341
  obj.kwargs = {**ann_kwargs, **obj.kwargs}
@@ -4193,7 +4381,7 @@ class ArgparseCli:
4193
4381
 
4194
4382
  if self._unknown_args and not (cmd is not None and cmd.accepts_unknown):
4195
4383
  msg = f'unrecognized arguments: {" ".join(self._unknown_args)}'
4196
- if (parser := self.get_parser()).exit_on_error:
4384
+ if (parser := self.get_parser()).exit_on_error: # noqa
4197
4385
  parser.error(msg)
4198
4386
  else:
4199
4387
  raise argparse.ArgumentError(None, msg)
@@ -4213,7 +4401,10 @@ class ArgparseCli:
4213
4401
  return fn()
4214
4402
 
4215
4403
  def cli_run_and_exit(self) -> ta.NoReturn:
4216
- sys.exit(rc if isinstance(rc := self.cli_run(), int) else 0)
4404
+ rc = self.cli_run()
4405
+ if not isinstance(rc, int):
4406
+ rc = 0
4407
+ raise SystemExit(rc)
4217
4408
 
4218
4409
  def __call__(self, *, exit: bool = False) -> ta.Optional[int]: # noqa
4219
4410
  if exit:
@@ -5165,8 +5356,6 @@ class _JustMaybe(_Maybe[T]):
5165
5356
  __slots__ = ('_v', '_hash')
5166
5357
 
5167
5358
  def __init__(self, v: T) -> None:
5168
- super().__init__()
5169
-
5170
5359
  self._v = v
5171
5360
 
5172
5361
  @property
@@ -5224,6 +5413,13 @@ class _EmptyMaybe(_Maybe[T]):
5224
5413
  Maybe._empty = _EmptyMaybe() # noqa
5225
5414
 
5226
5415
 
5416
+ ##
5417
+
5418
+
5419
+ setattr(Maybe, 'just', _JustMaybe) # noqa
5420
+ setattr(Maybe, 'empty', functools.partial(operator.attrgetter('_empty'), Maybe))
5421
+
5422
+
5227
5423
  ########################################
5228
5424
  # ../../../omlish/lite/runtime.py
5229
5425
 
@@ -5994,6 +6190,488 @@ class Interp:
5994
6190
  version: InterpVersion
5995
6191
 
5996
6192
 
6193
+ ########################################
6194
+ # ../../packaging/requires.py
6195
+ # Copyright (c) Donald Stufft and individual contributors.
6196
+ # All rights reserved.
6197
+ #
6198
+ # Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
6199
+ # following conditions are met:
6200
+ #
6201
+ # 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
6202
+ # following disclaimer.
6203
+ #
6204
+ # 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
6205
+ # following disclaimer in the documentation and/or other materials provided with the distribution.
6206
+ #
6207
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
6208
+ # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
6209
+ # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
6210
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
6211
+ # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
6212
+ # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
6213
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. This file is dual licensed under the terms of the
6214
+ # Apache License, Version 2.0, and the BSD License. See the LICENSE file in the root of this repository for complete
6215
+ # details.
6216
+ # https://github.com/pypa/packaging/blob/cf2cbe2aec28f87c6228a6fb136c27931c9af407/src/packaging/_parser.py#L65
6217
+
6218
+
6219
+ ##
6220
+
6221
+
6222
+ @dc.dataclass()
6223
+ class RequiresToken:
6224
+ name: str
6225
+ text: str
6226
+ position: int
6227
+
6228
+
6229
+ class RequiresParserSyntaxError(Exception):
6230
+ def __init__(
6231
+ self,
6232
+ message: str,
6233
+ *,
6234
+ source: str,
6235
+ span: ta.Tuple[int, int],
6236
+ ) -> None:
6237
+ self.span = span
6238
+ self.message = message
6239
+ self.source = source
6240
+
6241
+ super().__init__()
6242
+
6243
+ def __str__(self) -> str:
6244
+ marker = ' ' * self.span[0] + '~' * (self.span[1] - self.span[0]) + '^'
6245
+ return '\n '.join([self.message, self.source, marker])
6246
+
6247
+
6248
+ REQUIRES_DEFAULT_RULES: ta.Dict[str, ta.Union[str, ta.Pattern[str]]] = {
6249
+ 'LEFT_PARENTHESIS': r'\(',
6250
+ 'RIGHT_PARENTHESIS': r'\)',
6251
+ 'LEFT_BRACKET': r'\[',
6252
+ 'RIGHT_BRACKET': r'\]',
6253
+ 'SEMICOLON': r';',
6254
+ 'COMMA': r',',
6255
+ 'QUOTED_STRING': re.compile(
6256
+ r"""
6257
+ (
6258
+ ('[^']*')
6259
+ |
6260
+ ("[^"]*")
6261
+ )
6262
+ """,
6263
+ re.VERBOSE,
6264
+ ),
6265
+ 'OP': r'(===|==|~=|!=|<=|>=|<|>)',
6266
+ 'BOOLOP': r'\b(or|and)\b',
6267
+ 'IN': r'\bin\b',
6268
+ 'NOT': r'\bnot\b',
6269
+ 'VARIABLE': re.compile(
6270
+ r"""
6271
+ \b(
6272
+ python_version
6273
+ |python_full_version
6274
+ |os[._]name
6275
+ |sys[._]platform
6276
+ |platform_(release|system)
6277
+ |platform[._](version|machine|python_implementation)
6278
+ |python_implementation
6279
+ |implementation_(name|version)
6280
+ |extra
6281
+ )\b
6282
+ """,
6283
+ re.VERBOSE,
6284
+ ),
6285
+ 'SPECIFIER': re.compile(
6286
+ Specifier._operator_regex_str + Specifier._version_regex_str, # noqa
6287
+ re.VERBOSE | re.IGNORECASE,
6288
+ ),
6289
+ 'AT': r'\@',
6290
+ 'URL': r'[^ \t]+',
6291
+ 'IDENTIFIER': r'\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b',
6292
+ 'VERSION_PREFIX_TRAIL': r'\.\*',
6293
+ 'VERSION_LOCAL_LABEL_TRAIL': r'\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*',
6294
+ 'WS': r'[ \t]+',
6295
+ 'END': r'$',
6296
+ }
6297
+
6298
+
6299
+ class RequiresTokenizer:
6300
+ def __init__(
6301
+ self,
6302
+ source: str,
6303
+ *,
6304
+ rules: ta.Dict[str, ta.Union[str, ta.Pattern[str]]],
6305
+ ) -> None:
6306
+ super().__init__()
6307
+
6308
+ self.source = source
6309
+ self.rules: ta.Dict[str, ta.Pattern[str]] = {name: re.compile(pattern) for name, pattern in rules.items()}
6310
+ self.next_token: ta.Optional[RequiresToken] = None
6311
+ self.position = 0
6312
+
6313
+ def consume(self, name: str) -> None:
6314
+ if self.check(name):
6315
+ self.read()
6316
+
6317
+ def check(self, name: str, *, peek: bool = False) -> bool:
6318
+ check.state(self.next_token is None, f'Cannot check for {name!r}, already have {self.next_token!r}')
6319
+ check.state(name in self.rules, f'Unknown token name: {name!r}')
6320
+
6321
+ expression = self.rules[name]
6322
+
6323
+ match = expression.match(self.source, self.position)
6324
+ if match is None:
6325
+ return False
6326
+ if not peek:
6327
+ self.next_token = RequiresToken(name, match[0], self.position)
6328
+ return True
6329
+
6330
+ def expect(self, name: str, *, expected: str) -> RequiresToken:
6331
+ if not self.check(name):
6332
+ raise self.raise_syntax_error(f'Expected {expected}')
6333
+ return self.read()
6334
+
6335
+ def read(self) -> RequiresToken:
6336
+ token = self.next_token
6337
+ check.state(token is not None)
6338
+
6339
+ self.position += len(check.not_none(token).text)
6340
+ self.next_token = None
6341
+
6342
+ return check.not_none(token)
6343
+
6344
+ def raise_syntax_error(
6345
+ self,
6346
+ message: str,
6347
+ *,
6348
+ span_start: ta.Optional[int] = None,
6349
+ span_end: ta.Optional[int] = None,
6350
+ ) -> ta.NoReturn:
6351
+ span = (
6352
+ self.position if span_start is None else span_start,
6353
+ self.position if span_end is None else span_end,
6354
+ )
6355
+ raise RequiresParserSyntaxError(
6356
+ message,
6357
+ source=self.source,
6358
+ span=span,
6359
+ )
6360
+
6361
+ @contextlib.contextmanager
6362
+ def enclosing_tokens(self, open_token: str, close_token: str, *, around: str) -> ta.Iterator[None]:
6363
+ if self.check(open_token):
6364
+ open_position = self.position
6365
+ self.read()
6366
+ else:
6367
+ open_position = None
6368
+
6369
+ yield
6370
+
6371
+ if open_position is None:
6372
+ return
6373
+
6374
+ if not self.check(close_token):
6375
+ self.raise_syntax_error(
6376
+ f'Expected matching {close_token} for {open_token}, after {around}',
6377
+ span_start=open_position,
6378
+ )
6379
+
6380
+ self.read()
6381
+
6382
+
6383
+ @dc.dataclass(frozen=True)
6384
+ class RequiresNode:
6385
+ value: str
6386
+
6387
+ def __str__(self) -> str:
6388
+ return self.value
6389
+
6390
+ def __repr__(self) -> str:
6391
+ return f"<{self.__class__.__name__}('{self}')>"
6392
+
6393
+ def serialize(self) -> str:
6394
+ raise NotImplementedError
6395
+
6396
+
6397
+ @dc.dataclass(frozen=True)
6398
+ class RequiresVariable(RequiresNode):
6399
+ def serialize(self) -> str:
6400
+ return str(self)
6401
+
6402
+
6403
+ @dc.dataclass(frozen=True)
6404
+ class RequiresValue(RequiresNode):
6405
+ def serialize(self) -> str:
6406
+ return f'"{self}"'
6407
+
6408
+
6409
+ @dc.dataclass(frozen=True)
6410
+ class RequiresOp(RequiresNode):
6411
+ def serialize(self) -> str:
6412
+ return str(self)
6413
+
6414
+
6415
+ class RequiresMarkerItem(ta.NamedTuple):
6416
+ l: ta.Union[RequiresVariable, RequiresValue]
6417
+ op: RequiresOp
6418
+ r: ta.Union[RequiresVariable, RequiresValue]
6419
+
6420
+
6421
+ class ParsedRequirement(ta.NamedTuple):
6422
+ name: str
6423
+ url: str
6424
+ extras: ta.List[str]
6425
+ specifier: str
6426
+ marker: ta.Optional[RequiresMarkerList]
6427
+
6428
+
6429
+ def parse_requirement(source: str) -> ParsedRequirement:
6430
+ return _parse_requirement(RequiresTokenizer(source, rules=REQUIRES_DEFAULT_RULES))
6431
+
6432
+
6433
+ def _parse_requirement(tokenizer: RequiresTokenizer) -> ParsedRequirement:
6434
+ tokenizer.consume('WS')
6435
+
6436
+ name_token = tokenizer.expect('IDENTIFIER', expected='package name at the start of dependency specifier')
6437
+ name = name_token.text
6438
+ tokenizer.consume('WS')
6439
+
6440
+ extras = _parse_requires_extras(tokenizer)
6441
+ tokenizer.consume('WS')
6442
+
6443
+ url, specifier, marker = _parse_requirement_details(tokenizer)
6444
+ tokenizer.expect('END', expected='end of dependency specifier')
6445
+
6446
+ return ParsedRequirement(name, url, extras, specifier, marker)
6447
+
6448
+
6449
+ def _parse_requirement_details(tokenizer: RequiresTokenizer) -> ta.Tuple[str, str, ta.Optional[RequiresMarkerList]]:
6450
+ specifier = ''
6451
+ url = ''
6452
+ marker = None
6453
+
6454
+ if tokenizer.check('AT'):
6455
+ tokenizer.read()
6456
+ tokenizer.consume('WS')
6457
+
6458
+ url_start = tokenizer.position
6459
+ url = tokenizer.expect('URL', expected='URL after @').text
6460
+ if tokenizer.check('END', peek=True):
6461
+ return (url, specifier, marker)
6462
+
6463
+ tokenizer.expect('WS', expected='whitespace after URL')
6464
+
6465
+ # The input might end after whitespace.
6466
+ if tokenizer.check('END', peek=True):
6467
+ return (url, specifier, marker)
6468
+
6469
+ marker = _parse_requirement_marker(
6470
+ tokenizer, span_start=url_start, after='URL and whitespace',
6471
+ )
6472
+ else:
6473
+ specifier_start = tokenizer.position
6474
+ specifier = _parse_requires_specifier(tokenizer)
6475
+ tokenizer.consume('WS')
6476
+
6477
+ if tokenizer.check('END', peek=True):
6478
+ return (url, specifier, marker)
6479
+
6480
+ marker = _parse_requirement_marker(
6481
+ tokenizer,
6482
+ span_start=specifier_start,
6483
+ after=(
6484
+ 'version specifier'
6485
+ if specifier
6486
+ else 'name and no valid version specifier'
6487
+ ),
6488
+ )
6489
+
6490
+ return (url, specifier, marker)
6491
+
6492
+
6493
+ def _parse_requirement_marker(
6494
+ tokenizer: RequiresTokenizer, *, span_start: int, after: str,
6495
+ ) -> RequiresMarkerList:
6496
+ if not tokenizer.check('SEMICOLON'):
6497
+ tokenizer.raise_syntax_error(
6498
+ f'Expected end or semicolon (after {after})',
6499
+ span_start=span_start,
6500
+ )
6501
+ tokenizer.read()
6502
+
6503
+ marker = _parse_requires_marker(tokenizer)
6504
+ tokenizer.consume('WS')
6505
+
6506
+ return marker
6507
+
6508
+
6509
+ def _parse_requires_extras(tokenizer: RequiresTokenizer) -> ta.List[str]:
6510
+ if not tokenizer.check('LEFT_BRACKET', peek=True):
6511
+ return []
6512
+
6513
+ with tokenizer.enclosing_tokens(
6514
+ 'LEFT_BRACKET',
6515
+ 'RIGHT_BRACKET',
6516
+ around='extras',
6517
+ ):
6518
+ tokenizer.consume('WS')
6519
+ extras = _parse_requires_extras_list(tokenizer)
6520
+ tokenizer.consume('WS')
6521
+
6522
+ return extras
6523
+
6524
+
6525
+ def _parse_requires_extras_list(tokenizer: RequiresTokenizer) -> ta.List[str]:
6526
+ extras: ta.List[str] = []
6527
+
6528
+ if not tokenizer.check('IDENTIFIER'):
6529
+ return extras
6530
+
6531
+ extras.append(tokenizer.read().text)
6532
+
6533
+ while True:
6534
+ tokenizer.consume('WS')
6535
+ if tokenizer.check('IDENTIFIER', peek=True):
6536
+ tokenizer.raise_syntax_error('Expected comma between extra names')
6537
+ elif not tokenizer.check('COMMA'):
6538
+ break
6539
+
6540
+ tokenizer.read()
6541
+ tokenizer.consume('WS')
6542
+
6543
+ extra_token = tokenizer.expect('IDENTIFIER', expected='extra name after comma')
6544
+ extras.append(extra_token.text)
6545
+
6546
+ return extras
6547
+
6548
+
6549
+ def _parse_requires_specifier(tokenizer: RequiresTokenizer) -> str:
6550
+ with tokenizer.enclosing_tokens(
6551
+ 'LEFT_PARENTHESIS',
6552
+ 'RIGHT_PARENTHESIS',
6553
+ around='version specifier',
6554
+ ):
6555
+ tokenizer.consume('WS')
6556
+ parsed_specifiers = _parse_requires_version_many(tokenizer)
6557
+ tokenizer.consume('WS')
6558
+
6559
+ return parsed_specifiers
6560
+
6561
+
6562
+ def _parse_requires_version_many(tokenizer: RequiresTokenizer) -> str:
6563
+ parsed_specifiers = ''
6564
+ while tokenizer.check('SPECIFIER'):
6565
+ span_start = tokenizer.position
6566
+ parsed_specifiers += tokenizer.read().text
6567
+ if tokenizer.check('VERSION_PREFIX_TRAIL', peek=True):
6568
+ tokenizer.raise_syntax_error(
6569
+ '.* suffix can only be used with `==` or `!=` operators',
6570
+ span_start=span_start,
6571
+ span_end=tokenizer.position + 1,
6572
+ )
6573
+ if tokenizer.check('VERSION_LOCAL_LABEL_TRAIL', peek=True):
6574
+ tokenizer.raise_syntax_error(
6575
+ 'Local version label can only be used with `==` or `!=` operators',
6576
+ span_start=span_start,
6577
+ span_end=tokenizer.position,
6578
+ )
6579
+ tokenizer.consume('WS')
6580
+ if not tokenizer.check('COMMA'):
6581
+ break
6582
+ parsed_specifiers += tokenizer.read().text
6583
+ tokenizer.consume('WS')
6584
+
6585
+ return parsed_specifiers
6586
+
6587
+
6588
+ def parse_requires_marker(source: str) -> RequiresMarkerList:
6589
+ return _parse_requires_full_marker(RequiresTokenizer(source, rules=REQUIRES_DEFAULT_RULES))
6590
+
6591
+
6592
+ def _parse_requires_full_marker(tokenizer: RequiresTokenizer) -> RequiresMarkerList:
6593
+ retval = _parse_requires_marker(tokenizer)
6594
+ tokenizer.expect('END', expected='end of marker expression')
6595
+ return retval
6596
+
6597
+
6598
+ def _parse_requires_marker(tokenizer: RequiresTokenizer) -> RequiresMarkerList:
6599
+ expression = [_parse_requires_marker_atom(tokenizer)]
6600
+ while tokenizer.check('BOOLOP'):
6601
+ token = tokenizer.read()
6602
+ expr_right = _parse_requires_marker_atom(tokenizer)
6603
+ expression.extend((token.text, expr_right))
6604
+ return expression
6605
+
6606
+
6607
+ def _parse_requires_marker_atom(tokenizer: RequiresTokenizer) -> RequiresMarkerAtom:
6608
+ tokenizer.consume('WS')
6609
+ if tokenizer.check('LEFT_PARENTHESIS', peek=True):
6610
+ with tokenizer.enclosing_tokens(
6611
+ 'LEFT_PARENTHESIS',
6612
+ 'RIGHT_PARENTHESIS',
6613
+ around='marker expression',
6614
+ ):
6615
+ tokenizer.consume('WS')
6616
+ marker: RequiresMarkerAtom = _parse_requires_marker(tokenizer)
6617
+ tokenizer.consume('WS')
6618
+ else:
6619
+ marker = _parse_requires_marker_item(tokenizer)
6620
+ tokenizer.consume('WS')
6621
+ return marker
6622
+
6623
+
6624
+ def _parse_requires_marker_item(tokenizer: RequiresTokenizer) -> RequiresMarkerItem:
6625
+ tokenizer.consume('WS')
6626
+ marker_var_left = _parse_requires_marker_var(tokenizer)
6627
+ tokenizer.consume('WS')
6628
+ marker_op = _parse_requires_marker_op(tokenizer)
6629
+ tokenizer.consume('WS')
6630
+ marker_var_right = _parse_requires_marker_var(tokenizer)
6631
+ tokenizer.consume('WS')
6632
+ return RequiresMarkerItem(marker_var_left, marker_op, marker_var_right)
6633
+
6634
+
6635
+ def _parse_requires_marker_var(tokenizer: RequiresTokenizer) -> RequiresMarkerVar:
6636
+ if tokenizer.check('VARIABLE'):
6637
+ return process_requires_env_var(tokenizer.read().text.replace('.', '_'))
6638
+ elif tokenizer.check('QUOTED_STRING'):
6639
+ return process_requires_python_str(tokenizer.read().text)
6640
+ else:
6641
+ tokenizer.raise_syntax_error(message='Expected a marker variable or quoted string')
6642
+ raise RuntimeError # noqa
6643
+
6644
+
6645
+ def process_requires_env_var(env_var: str) -> RequiresVariable:
6646
+ if env_var in ('platform_python_implementation', 'python_implementation'):
6647
+ return RequiresVariable('platform_python_implementation')
6648
+ else:
6649
+ return RequiresVariable(env_var)
6650
+
6651
+
6652
+ def process_requires_python_str(python_str: str) -> RequiresValue:
6653
+ value = ast.literal_eval(python_str)
6654
+ return RequiresValue(str(value))
6655
+
6656
+
6657
+ def _parse_requires_marker_op(tokenizer: RequiresTokenizer) -> RequiresOp:
6658
+ if tokenizer.check('IN'):
6659
+ tokenizer.read()
6660
+ return RequiresOp('in')
6661
+ elif tokenizer.check('NOT'):
6662
+ tokenizer.read()
6663
+ tokenizer.expect('WS', expected="whitespace after 'not'")
6664
+ tokenizer.expect('IN', expected="'in' after 'not'")
6665
+ return RequiresOp('not in')
6666
+ elif tokenizer.check('OP'):
6667
+ return RequiresOp(tokenizer.read().text)
6668
+ else:
6669
+ return tokenizer.raise_syntax_error(
6670
+ 'Expected marker operator, one of '
6671
+ '<=, <, !=, ==, >=, >, ~=, ===, in, not in',
6672
+ )
6673
+
6674
+
5997
6675
  ########################################
5998
6676
  # ../../../omlish/asyncs/asyncio/timeouts.py
5999
6677
 
@@ -8495,6 +9173,10 @@ class VerboseCalledProcessError(subprocess.CalledProcessError):
8495
9173
  class BaseSubprocesses(Abstract):
8496
9174
  DEFAULT_LOGGER: ta.ClassVar[ta.Optional[LoggerLike]] = None
8497
9175
 
9176
+ PIPE: ta.ClassVar[int] = subprocess.PIPE
9177
+ STDOUT: ta.ClassVar[int] = subprocess.STDOUT
9178
+ DEVNULL: ta.ClassVar[int] = subprocess.DEVNULL
9179
+
8498
9180
  def __init__(
8499
9181
  self,
8500
9182
  *,
@@ -9684,11 +10366,14 @@ log = get_module_logger(globals()) # noqa
9684
10366
  class RequirementsRewriter:
9685
10367
  def __init__(
9686
10368
  self,
10369
+ *,
9687
10370
  venv: ta.Optional[str] = None,
10371
+ only_pats: ta.Optional[ta.Sequence[re.Pattern]] = None,
9688
10372
  ) -> None:
9689
10373
  super().__init__()
9690
10374
 
9691
10375
  self._venv = venv
10376
+ self._only_pats = only_pats
9692
10377
 
9693
10378
  @cached_nullary
9694
10379
  def _tmp_dir(self) -> str:
@@ -9704,17 +10389,32 @@ class RequirementsRewriter:
9704
10389
  out_lines = []
9705
10390
 
9706
10391
  for l in in_lines:
9707
- if self.VENV_MAGIC in l:
9708
- lp, _, rp = l.partition(self.VENV_MAGIC)
9709
- rp = rp.partition('#')[0]
10392
+ if l.split('#')[0].strip():
9710
10393
  omit = False
9711
- for v in rp.split():
9712
- if v[0] == '!':
9713
- if self._venv is not None and self._venv == v[1:]:
9714
- omit = True
9715
- break
10394
+
10395
+ if self.VENV_MAGIC in l:
10396
+ lp, _, rp = l.partition(self.VENV_MAGIC)
10397
+ rp = rp.partition('#')[0]
10398
+ for v in rp.split():
10399
+ if v[0] == '!':
10400
+ if self._venv is not None and self._venv == v[1:]:
10401
+ omit = True
10402
+ break
10403
+ else:
10404
+ raise NotImplementedError
10405
+
10406
+ if (
10407
+ not omit and
10408
+ (ops := self._only_pats) is not None and
10409
+ not l.strip().startswith('-')
10410
+ ):
10411
+ try:
10412
+ pr = parse_requirement(l.split('#')[0].strip())
10413
+ except RequiresParserSyntaxError:
10414
+ pass
9716
10415
  else:
9717
- raise NotImplementedError
10416
+ if not any(op.fullmatch(pr.name) for op in ops):
10417
+ omit = True
9718
10418
 
9719
10419
  if omit:
9720
10420
  out_lines.append('# OMITTED: ' + l)
@@ -10171,6 +10871,7 @@ uv run pip
10171
10871
  uv run --python 3.11.6 pip
10172
10872
  uv venv --python 3.11.6 --seed barf
10173
10873
  python3 -m venv barf && barf/bin/pip install uv && barf/bin/uv venv --python 3.11.6 --seed barf2
10874
+ uv python find '3.13.10'
10174
10875
  """
10175
10876
 
10176
10877
 
@@ -10273,7 +10974,7 @@ class BasePyprojectPackageGenerator(Abstract):
10273
10974
 
10274
10975
  def _write_git_ignore(self) -> None:
10275
10976
  with open(os.path.join(self._pkg_dir(), '.gitignore'), 'w') as f:
10276
- f.write('\n'.join(self._GIT_IGNORE))
10977
+ f.write('\n'.join([*self._GIT_IGNORE, '']))
10277
10978
 
10278
10979
  #
10279
10980
 
@@ -10442,6 +11143,7 @@ class BasePyprojectPackageGenerator(Abstract):
10442
11143
  )
10443
11144
 
10444
11145
  if output_dir is not None:
11146
+ log.info(lambda: f'Copying {dist_dir} to {output_dir}')
10445
11147
  for fn in os.listdir(dist_dir):
10446
11148
  shutil.copyfile(os.path.join(dist_dir, fn), os.path.join(output_dir, fn))
10447
11149
 
@@ -10497,7 +11199,11 @@ class PyprojectPackageGenerator(BasePyprojectPackageGenerator):
10497
11199
  st = dict(specs.setuptools)
10498
11200
  pyp_dct['tool.setuptools'] = st
10499
11201
 
10500
- st.pop('cexts', None)
11202
+ for k in [
11203
+ 'cext',
11204
+ 'rs',
11205
+ ]:
11206
+ st.pop(k, None)
10501
11207
 
10502
11208
  #
10503
11209
 
@@ -10570,13 +11276,20 @@ class PyprojectPackageGenerator(BasePyprojectPackageGenerator):
10570
11276
  def children(self) -> ta.Sequence[BasePyprojectPackageGenerator]:
10571
11277
  out: ta.List[BasePyprojectPackageGenerator] = []
10572
11278
 
10573
- if self.build_specs().setuptools.get('cexts'):
11279
+ if self.build_specs().setuptools.get('cext'):
10574
11280
  out.append(_PyprojectCextPackageGenerator(
10575
11281
  self._dir_name,
10576
11282
  self._pkgs_root,
10577
11283
  pkg_suffix='-cext',
10578
11284
  ))
10579
11285
 
11286
+ if self.build_specs().setuptools.get('rs'):
11287
+ out.append(_PyprojectRsPackageGenerator(
11288
+ self._dir_name,
11289
+ self._pkgs_root,
11290
+ pkg_suffix='-rs',
11291
+ ))
11292
+
10580
11293
  if self.build_specs().pyproject.get('cli_scripts'):
10581
11294
  out.append(_PyprojectCliPackageGenerator(
10582
11295
  self._dir_name,
@@ -10587,10 +11300,67 @@ class PyprojectPackageGenerator(BasePyprojectPackageGenerator):
10587
11300
  return out
10588
11301
 
10589
11302
 
10590
- #
11303
+ ##
11304
+
11305
+
11306
+ class _PyprojectExtensionPackageGenerator(BasePyprojectPackageGenerator, Abstract):
11307
+ #
11308
+
11309
+ def _build_project_dict(self) -> ta.Dict[str, ta.Any]:
11310
+ prj = dict(self.build_specs().pyproject)
11311
+
11312
+ prj['dependencies'] = [f'{prj["name"]} == {prj["version"]}']
11313
+ prj['name'] += self._pkg_suffix
11314
+
11315
+ for k in [
11316
+ 'optional_dependencies',
11317
+ 'entry_points',
11318
+ 'scripts',
11319
+ 'cli_scripts',
11320
+ ]:
11321
+ prj.pop(k, None)
11322
+
11323
+ return prj
11324
+
11325
+ def _build_setuptools_dict(self) -> ta.Dict[str, ta.Any]:
11326
+ st = dict(self.build_specs().setuptools)
11327
+
11328
+ for k in [
11329
+ 'cext',
11330
+ 'rs',
11331
+
11332
+ 'find_packages',
11333
+ 'package_data',
11334
+ 'manifest_in',
11335
+ ]:
11336
+ st.pop(k, None)
11337
+
11338
+ return st
11339
+
11340
+ #
11341
+
11342
+ @dc.dataclass(frozen=True)
11343
+ class FileContents:
11344
+ pyproject_dct: ta.Mapping[str, ta.Any]
11345
+ setup_py: str
11346
+
11347
+ @abc.abstractmethod
11348
+ def file_contents(self) -> FileContents:
11349
+ raise NotImplementedError
10591
11350
 
11351
+ #
11352
+
11353
+ def _write_file_contents(self) -> None:
11354
+ fc = self.file_contents()
11355
+
11356
+ with open(os.path.join(self._pkg_dir(), 'pyproject.toml'), 'w') as f:
11357
+ TomlWriter(f).write_root(fc.pyproject_dct)
10592
11358
 
10593
- class _PyprojectCextPackageGenerator(BasePyprojectPackageGenerator):
11359
+ with open(os.path.join(self._pkg_dir(), 'setup.py'), 'w') as f:
11360
+ f.write(fc.setup_py)
11361
+
11362
+
11363
+ class _PyprojectCextPackageGenerator(_PyprojectExtensionPackageGenerator):
10594
11364
  @cached_nullary
10595
11365
  def find_cext_srcs(self) -> ta.Sequence[str]:
10596
11366
  return sorted(find_magic_files(
@@ -10601,14 +11371,10 @@ class _PyprojectCextPackageGenerator(BasePyprojectPackageGenerator):
10601
11371
 
10602
11372
  #
10603
11373
 
10604
- @dc.dataclass(frozen=True)
10605
- class FileContents:
10606
- pyproject_dct: ta.Mapping[str, ta.Any]
10607
- setup_py: str
10608
-
10609
11374
  @cached_nullary
10610
- def file_contents(self) -> FileContents:
10611
- specs = self.build_specs()
11375
+ def file_contents(self) -> _PyprojectExtensionPackageGenerator.FileContents:
11376
+ prj = self._build_project_dict()
11377
+ st = self._build_setuptools_dict()
10612
11378
 
10613
11379
  #
10614
11380
 
@@ -10619,33 +11385,9 @@ class _PyprojectCextPackageGenerator(BasePyprojectPackageGenerator):
10619
11385
  'build-backend': 'setuptools.build_meta',
10620
11386
  }
10621
11387
 
10622
- prj = specs.pyproject
10623
- prj['dependencies'] = [f'{prj["name"]} == {prj["version"]}']
10624
- prj['name'] += self._pkg_suffix
10625
- for k in [
10626
- 'optional_dependencies',
10627
- 'entry_points',
10628
- 'scripts',
10629
- 'cli_scripts',
10630
- ]:
10631
- prj.pop(k, None)
10632
-
10633
11388
  pyp_dct['project'] = prj
10634
-
10635
- #
10636
-
10637
- st = dict(specs.setuptools)
10638
11389
  pyp_dct['tool.setuptools'] = st
10639
11390
 
10640
- for k in [
10641
- 'cexts',
10642
-
10643
- 'find_packages',
10644
- 'package_data',
10645
- 'manifest_in',
10646
- ]:
10647
- st.pop(k, None)
10648
-
10649
11391
  pyp_dct['tool.setuptools.packages.find'] = {
10650
11392
  'include': [],
10651
11393
  }
@@ -10655,12 +11397,17 @@ class _PyprojectCextPackageGenerator(BasePyprojectPackageGenerator):
10655
11397
  ext_lines = []
10656
11398
 
10657
11399
  for ext_src in self.find_cext_srcs():
11400
+ ext_lang = ext_src.rpartition('.')[2]
11401
+ compile_args = {
11402
+ 'c': ['-std=c11'],
11403
+ 'cc': ['-std=c++20'],
11404
+ }.get(ext_lang, [])
10658
11405
  ext_name = ext_src.rpartition('.')[0].replace(os.sep, '.')
10659
11406
  ext_lines.extend([
10660
11407
  'st.Extension(',
10661
11408
  f" name='{ext_name}',",
10662
11409
  f" sources=['{ext_src}'],",
10663
- " extra_compile_args=['-std=c++20'],",
11410
+ f' extra_compile_args={compile_args!r},',
10664
11411
  '),',
10665
11412
  ])
10666
11413
 
@@ -10671,7 +11418,7 @@ class _PyprojectCextPackageGenerator(BasePyprojectPackageGenerator):
10671
11418
  'st.setup(',
10672
11419
  ' ext_modules=[',
10673
11420
  *[' ' + l for l in ext_lines],
10674
- ' ]',
11421
+ ' ],',
10675
11422
  ')',
10676
11423
  '',
10677
11424
  ])
@@ -10683,14 +11430,110 @@ class _PyprojectCextPackageGenerator(BasePyprojectPackageGenerator):
10683
11430
  src,
10684
11431
  )
10685
11432
 
10686
- def _write_file_contents(self) -> None:
10687
- fc = self.file_contents()
10688
11433
 
10689
- with open(os.path.join(self._pkg_dir(), 'pyproject.toml'), 'w') as f:
10690
- TomlWriter(f).write_root(fc.pyproject_dct)
11434
+ class _PyprojectRsPackageGenerator(_PyprojectExtensionPackageGenerator):
11435
+ @cached_nullary
11436
+ def find_rs_dirs(self) -> ta.Sequence[str]:
11437
+ return sorted(
11438
+ dp
11439
+ for dp, dns, fns in os.walk(self._dir_name)
11440
+ for fn in fns
11441
+ if fn == '.omlish-rs-ext'
11442
+ )
10691
11443
 
10692
- with open(os.path.join(self._pkg_dir(), 'setup.py'), 'w') as f:
10693
- f.write(fc.setup_py)
11444
+ #
11445
+
11446
+ @staticmethod
11447
+ def _sdist_patch_body() -> None:
11448
+ def _patch_sdist():
11449
+ def _sdist_add_defaults(old, self):
11450
+ import os.path
11451
+
11452
+ old(self)
11453
+
11454
+ if self.distribution.rust_extensions and len(self.distribution.rust_extensions) > 0:
11455
+ build_rust = self.get_finalized_command('build_rust') # noqa
11456
+ for ext in build_rust.extensions:
11457
+ ext_dir = os.path.dirname(ext.path)
11458
+ for n in os.listdir(ext_dir):
11459
+ if n.startswith('.') or n == 'target':
11460
+ continue
11461
+ p = os.path.join(ext_dir, n)
11462
+ if os.path.isfile(p):
11463
+ self.filelist.append(p)
11464
+ elif os.path.isdir(p):
11465
+ self.filelist.extend(os.path.join(dp, f) for dp, dn, fn in os.walk(p) for f in fn)
11466
+
11467
+ # Sadly, we can't just subclass sdist and override it via cmdclass because manifest_maker calls
11468
+ # `sdist.add_defaults` as an unbound function, not a bound method:
11469
+ # https://github.com/pypa/setuptools/blob/9c4d383631d3951fcae0afd73b5d08ff5a262976/setuptools/command/egg_info.py#L581
11470
+ from setuptools.command.sdist import sdist # noqa
11471
+ sdist.add_defaults = (lambda old: lambda sdist: _sdist_add_defaults(old, sdist))(sdist.add_defaults) # noqa
11472
+
11473
+ _patch_sdist()
11474
+
11475
+ @cached_nullary
11476
+ def sdist_patch_code(self) -> str:
11477
+ return textwrap.dedent(''.join(inspect.getsource(self._sdist_patch_body).splitlines(keepends=True)[2:])).strip()
11478
+
11479
+ #
11480
+
11481
+ @cached_nullary
11482
+ def file_contents(self) -> _PyprojectExtensionPackageGenerator.FileContents:
11483
+ prj = self._build_project_dict()
11484
+ st = self._build_setuptools_dict()
11485
+
11486
+ #
11487
+
11488
+ pyp_dct = {}
11489
+
11490
+ pyp_dct['build-system'] = {
11491
+ 'requires': ['setuptools', 'setuptools-rust'],
11492
+ 'build-backend': 'setuptools.build_meta',
11493
+ }
11494
+
11495
+ pyp_dct['project'] = prj
11496
+ pyp_dct['tool.setuptools'] = st
11497
+
11498
+ pyp_dct['tool.setuptools.packages.find'] = {
11499
+ 'include': [],
11500
+ }
11501
+
11502
+ #
11503
+
11504
+ ext_lines: list = []
11505
+
11506
+ for ext_dir in self.find_rs_dirs(): # noqa
11507
+ ext_name = ext_dir.replace(os.sep, '.')
11508
+ ext_lines.extend([
11509
+ 'st_rs.RustExtension(',
11510
+ f" '{ext_name}',",
11511
+ f" path='{ext_dir}/Cargo.toml',",
11512
+ '),',
11513
+ ])
11514
+
11515
+ src = '\n'.join([
11516
+ 'import setuptools as st',
11517
+ 'import setuptools_rust as st_rs',
11518
+ '',
11519
+ '',
11520
+ self.sdist_patch_code(),
11521
+ '',
11522
+ '',
11523
+ 'st.setup(',
11524
+ ' rust_extensions=[',
11525
+ *[' ' + l for l in ext_lines],
11526
+ ' ],',
11527
+ ')',
11528
+ '',
11529
+ ])
11530
+
11531
+ #
11532
+
11533
+ return self.FileContents(
11534
+ pyp_dct,
11535
+ src,
11536
+ )
10694
11537
 
10695
11538
 
10696
11539
  ##
@@ -10735,7 +11578,8 @@ class _PyprojectCliPackageGenerator(BasePyprojectPackageGenerator):
10735
11578
  pyp_dct['tool.setuptools'] = st
10736
11579
 
10737
11580
  for k in [
10738
- 'cexts',
11581
+ 'cext',
11582
+ 'rs',
10739
11583
 
10740
11584
  'find_packages',
10741
11585
  'package_data',
@@ -11020,6 +11864,7 @@ def get_default_interp_resolver() -> InterpResolver:
11020
11864
  class InterpVenvConfig:
11021
11865
  interp: ta.Optional[str] = None
11022
11866
  requires: ta.Optional[ta.Sequence[str]] = None
11867
+ requires_pats: ta.Optional[ta.Sequence[str]] = None
11023
11868
  use_uv: ta.Optional[bool] = None
11024
11869
 
11025
11870
 
@@ -11244,7 +12089,13 @@ class Venv:
11244
12089
 
11245
12090
  @cached_nullary
11246
12091
  def _iv(self) -> InterpVenv:
11247
- rr = RequirementsRewriter(self._name)
12092
+ rr = RequirementsRewriter(
12093
+ venv=self._name,
12094
+ only_pats=(
12095
+ [re.compile(p) for p in self._cfg.requires_pats]
12096
+ if self._cfg.requires_pats is not None else None
12097
+ ),
12098
+ )
11248
12099
 
11249
12100
  return InterpVenv(
11250
12101
  self.dir_name,