omdev 0.0.0.dev439__py3-none-any.whl → 0.0.0.dev486__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of omdev might be problematic. Click here for more details.
- omdev/.omlish-manifests.json +18 -30
- omdev/__about__.py +9 -7
- omdev/amalg/gen/gen.py +49 -6
- omdev/amalg/gen/imports.py +1 -1
- omdev/amalg/gen/manifests.py +1 -1
- omdev/amalg/gen/resources.py +1 -1
- omdev/amalg/gen/srcfiles.py +13 -3
- omdev/amalg/gen/strip.py +1 -1
- omdev/amalg/gen/types.py +1 -1
- omdev/amalg/gen/typing.py +1 -1
- omdev/amalg/info.py +32 -0
- omdev/cache/data/actions.py +1 -1
- omdev/cache/data/specs.py +1 -1
- omdev/cexts/_boilerplate.cc +2 -3
- omdev/cexts/cmake.py +4 -1
- omdev/ci/cli.py +1 -2
- omdev/ci/github/api/v2/api.py +2 -0
- omdev/cmdlog/cli.py +1 -2
- omdev/dataclasses/_dumping.py +1960 -0
- omdev/dataclasses/_template.py +22 -0
- omdev/dataclasses/cli.py +6 -1
- omdev/dataclasses/codegen.py +340 -60
- omdev/dataclasses/dumping.py +200 -0
- omdev/interp/uv/provider.py +1 -0
- omdev/interp/venvs.py +1 -0
- omdev/irc/messages/base.py +50 -0
- omdev/irc/messages/formats.py +92 -0
- omdev/irc/messages/messages.py +775 -0
- omdev/irc/messages/parsing.py +99 -0
- omdev/irc/numerics/__init__.py +0 -0
- omdev/irc/numerics/formats.py +97 -0
- omdev/irc/numerics/numerics.py +865 -0
- omdev/irc/numerics/types.py +59 -0
- omdev/irc/protocol/LICENSE +11 -0
- omdev/irc/protocol/__init__.py +61 -0
- omdev/irc/protocol/consts.py +6 -0
- omdev/irc/protocol/errors.py +30 -0
- omdev/irc/protocol/message.py +21 -0
- omdev/irc/protocol/nuh.py +55 -0
- omdev/irc/protocol/parsing.py +158 -0
- omdev/irc/protocol/rendering.py +153 -0
- omdev/irc/protocol/tags.py +102 -0
- omdev/irc/protocol/utils.py +30 -0
- omdev/manifests/_dumping.py +125 -25
- omdev/markdown/__init__.py +0 -0
- omdev/markdown/incparse.py +116 -0
- omdev/markdown/tokens.py +51 -0
- omdev/packaging/marshal.py +8 -8
- omdev/packaging/requires.py +6 -6
- omdev/packaging/specifiers.py +2 -1
- omdev/packaging/versions.py +4 -4
- omdev/packaging/wheelfile.py +2 -0
- omdev/precheck/blanklines.py +66 -0
- omdev/precheck/caches.py +1 -1
- omdev/precheck/imports.py +14 -1
- omdev/precheck/main.py +4 -3
- omdev/precheck/unicode.py +39 -15
- omdev/py/asts/__init__.py +0 -0
- omdev/py/asts/parents.py +28 -0
- omdev/py/asts/toplevel.py +123 -0
- omdev/py/asts/visitors.py +18 -0
- omdev/py/attrdocs.py +6 -7
- omdev/py/bracepy.py +12 -4
- omdev/py/reprs.py +32 -0
- omdev/py/srcheaders.py +1 -1
- omdev/py/tokens/__init__.py +0 -0
- omdev/py/tools/mkrelimp.py +1 -1
- omdev/py/tools/pipdepup.py +629 -0
- omdev/pyproject/pkg.py +190 -45
- omdev/pyproject/reqs.py +31 -9
- omdev/pyproject/tools/__init__.py +0 -0
- omdev/pyproject/tools/aboutdeps.py +55 -0
- omdev/pyproject/venvs.py +8 -1
- omdev/rs/__init__.py +0 -0
- omdev/scripts/ci.py +400 -80
- omdev/scripts/interp.py +193 -35
- omdev/scripts/lib/__init__.py +0 -0
- omdev/scripts/{inject.py → lib/inject.py} +75 -28
- omdev/scripts/lib/logs.py +2079 -0
- omdev/scripts/{marshal.py → lib/marshal.py} +68 -26
- omdev/scripts/pyproject.py +941 -90
- omdev/tools/git/cli.py +12 -1
- omdev/tools/json/processing.py +5 -2
- omdev/tools/jsonview/cli.py +31 -5
- omdev/tools/pawk/pawk.py +2 -2
- omdev/tools/pip.py +8 -0
- omdev/tui/__init__.py +0 -0
- omdev/tui/apps/__init__.py +0 -0
- omdev/tui/apps/edit/__init__.py +0 -0
- omdev/tui/apps/edit/main.py +163 -0
- omdev/tui/apps/irc/__init__.py +0 -0
- omdev/tui/apps/irc/__main__.py +4 -0
- omdev/tui/apps/irc/app.py +278 -0
- omdev/tui/apps/irc/client.py +187 -0
- omdev/tui/apps/irc/commands.py +175 -0
- omdev/tui/apps/irc/main.py +26 -0
- omdev/tui/apps/markdown/__init__.py +0 -0
- omdev/tui/apps/markdown/__main__.py +11 -0
- omdev/{ptk → tui/apps}/markdown/cli.py +5 -7
- omdev/tui/rich/__init__.py +34 -0
- omdev/tui/rich/console2.py +20 -0
- omdev/tui/rich/markdown2.py +186 -0
- omdev/tui/textual/__init__.py +226 -0
- omdev/tui/textual/app2.py +11 -0
- omdev/tui/textual/autocomplete/LICENSE +21 -0
- omdev/tui/textual/autocomplete/__init__.py +33 -0
- omdev/tui/textual/autocomplete/matching.py +226 -0
- omdev/tui/textual/autocomplete/paths.py +202 -0
- omdev/tui/textual/autocomplete/widget.py +612 -0
- omdev/tui/textual/drivers2.py +55 -0
- {omdev-0.0.0.dev439.dist-info → omdev-0.0.0.dev486.dist-info}/METADATA +11 -9
- {omdev-0.0.0.dev439.dist-info → omdev-0.0.0.dev486.dist-info}/RECORD +121 -73
- omdev/ptk/__init__.py +0 -103
- omdev/ptk/apps/ncdu.py +0 -167
- omdev/ptk/confirm.py +0 -60
- omdev/ptk/markdown/LICENSE +0 -22
- omdev/ptk/markdown/__init__.py +0 -10
- omdev/ptk/markdown/__main__.py +0 -11
- omdev/ptk/markdown/border.py +0 -94
- omdev/ptk/markdown/markdown.py +0 -390
- omdev/ptk/markdown/parser.py +0 -42
- omdev/ptk/markdown/styles.py +0 -29
- omdev/ptk/markdown/tags.py +0 -299
- omdev/ptk/markdown/utils.py +0 -366
- omdev/pyproject/cexts.py +0 -110
- /omdev/{ptk/apps → irc}/__init__.py +0 -0
- /omdev/{tokens → irc/messages}/__init__.py +0 -0
- /omdev/{tokens → py/tokens}/all.py +0 -0
- /omdev/{tokens → py/tokens}/tokenizert.py +0 -0
- /omdev/{tokens → py/tokens}/utils.py +0 -0
- {omdev-0.0.0.dev439.dist-info → omdev-0.0.0.dev486.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev439.dist-info → omdev-0.0.0.dev486.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev439.dist-info → omdev-0.0.0.dev486.dist-info}/licenses/LICENSE +0 -0
- {omdev-0.0.0.dev439.dist-info → omdev-0.0.0.dev486.dist-info}/top_level.txt +0 -0
omdev/manifests/_dumping.py
CHANGED
|
@@ -34,11 +34,28 @@ if sys.version_info < (3, 8):
|
|
|
34
34
|
raise OSError(f'Requires python (3, 8), got {sys.version_info} from {sys.executable}') # noqa
|
|
35
35
|
|
|
36
36
|
|
|
37
|
+
def __omlish_amalg__(): # noqa
|
|
38
|
+
return dict(
|
|
39
|
+
src_files=[
|
|
40
|
+
dict(path='../../omlish/lite/abstract.py', sha1='a2fc3f3697fa8de5247761e9d554e70176f37aac'),
|
|
41
|
+
dict(path='../../omlish/lite/cached.py', sha1='0c33cf961ac8f0727284303c7a30c5ea98f714f2'),
|
|
42
|
+
dict(path='../../omlish/lite/check.py', sha1='bb6b6b63333699b84462951a854d99ae83195b94'),
|
|
43
|
+
dict(path='../../omlish/lite/objects.py', sha1='9566bbf3530fd71fcc56321485216b592fae21e9'),
|
|
44
|
+
dict(path='../../omlish/lite/reflect.py', sha1='c4fec44bf144e9d93293c996af06f6c65fc5e63d'),
|
|
45
|
+
dict(path='../../omlish/lite/strings.py', sha1='89831ecbc34ad80e118a865eceb390ed399dc4d6'),
|
|
46
|
+
dict(path='../../omlish/lite/marshal.py', sha1='96348f5f2a26dc27d842d33cc3927e9da163436b'),
|
|
47
|
+
dict(path='dumping.py', sha1='49acd06fdcc3427f4a255fea295d7042bb655a13'),
|
|
48
|
+
],
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
|
|
37
52
|
########################################
|
|
38
53
|
|
|
39
54
|
|
|
40
|
-
# ../../omlish/lite/
|
|
55
|
+
# ../../omlish/lite/abstract.py
|
|
41
56
|
T = ta.TypeVar('T')
|
|
57
|
+
|
|
58
|
+
# ../../omlish/lite/cached.py
|
|
42
59
|
CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
|
|
43
60
|
|
|
44
61
|
# ../../omlish/lite/check.py
|
|
@@ -65,25 +82,49 @@ def is_abstract_method(obj: ta.Any) -> bool:
|
|
|
65
82
|
return bool(getattr(obj, _IS_ABSTRACT_METHOD_ATTR, False))
|
|
66
83
|
|
|
67
84
|
|
|
68
|
-
def
|
|
85
|
+
def compute_abstract_methods(cls: type) -> ta.FrozenSet[str]:
|
|
86
|
+
# ~> https://github.com/python/cpython/blob/f3476c6507381ca860eec0989f53647b13517423/Modules/_abc.c#L358
|
|
87
|
+
|
|
88
|
+
# Stage 1: direct abstract methods
|
|
89
|
+
|
|
90
|
+
abstracts = {
|
|
91
|
+
a
|
|
92
|
+
# Get items as a list to avoid mutation issues during iteration
|
|
93
|
+
for a, v in list(cls.__dict__.items())
|
|
94
|
+
if is_abstract_method(v)
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
# Stage 2: inherited abstract methods
|
|
98
|
+
|
|
99
|
+
for base in cls.__bases__:
|
|
100
|
+
# Get __abstractmethods__ from base if it exists
|
|
101
|
+
if (base_abstracts := getattr(base, _ABSTRACT_METHODS_ATTR, None)) is None:
|
|
102
|
+
continue
|
|
103
|
+
|
|
104
|
+
# Iterate over abstract methods in base
|
|
105
|
+
for key in base_abstracts:
|
|
106
|
+
# Check if this class has an attribute with this name
|
|
107
|
+
try:
|
|
108
|
+
value = getattr(cls, key)
|
|
109
|
+
except AttributeError:
|
|
110
|
+
# Attribute not found in this class, skip
|
|
111
|
+
continue
|
|
112
|
+
|
|
113
|
+
# Check if it's still abstract
|
|
114
|
+
if is_abstract_method(value):
|
|
115
|
+
abstracts.add(key)
|
|
116
|
+
|
|
117
|
+
return frozenset(abstracts)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def update_abstracts(cls: ta.Type[T], *, force: bool = False) -> ta.Type[T]:
|
|
69
121
|
if not force and not hasattr(cls, _ABSTRACT_METHODS_ATTR):
|
|
70
122
|
# Per stdlib: We check for __abstractmethods__ here because cls might by a C implementation or a python
|
|
71
123
|
# implementation (especially during testing), and we want to handle both cases.
|
|
72
124
|
return cls
|
|
73
125
|
|
|
74
|
-
abstracts
|
|
75
|
-
|
|
76
|
-
for scls in cls.__bases__:
|
|
77
|
-
for name in getattr(scls, _ABSTRACT_METHODS_ATTR, ()):
|
|
78
|
-
value = getattr(cls, name, None)
|
|
79
|
-
if getattr(value, _IS_ABSTRACT_METHOD_ATTR, False):
|
|
80
|
-
abstracts.add(name)
|
|
81
|
-
|
|
82
|
-
for name, value in cls.__dict__.items():
|
|
83
|
-
if getattr(value, _IS_ABSTRACT_METHOD_ATTR, False):
|
|
84
|
-
abstracts.add(name)
|
|
85
|
-
|
|
86
|
-
setattr(cls, _ABSTRACT_METHODS_ATTR, frozenset(abstracts))
|
|
126
|
+
abstracts = compute_abstract_methods(cls)
|
|
127
|
+
setattr(cls, _ABSTRACT_METHODS_ATTR, abstracts)
|
|
87
128
|
return cls
|
|
88
129
|
|
|
89
130
|
|
|
@@ -137,23 +178,26 @@ class Abstract:
|
|
|
137
178
|
super().__init_subclass__(**kwargs)
|
|
138
179
|
|
|
139
180
|
if not (Abstract in cls.__bases__ or abc.ABC in cls.__bases__):
|
|
140
|
-
ams
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
181
|
+
if ams := compute_abstract_methods(cls):
|
|
182
|
+
amd = {
|
|
183
|
+
a: mcls
|
|
184
|
+
for mcls in cls.__mro__[::-1]
|
|
185
|
+
for a in ams
|
|
186
|
+
if a in mcls.__dict__
|
|
187
|
+
}
|
|
146
188
|
|
|
147
|
-
if ams:
|
|
148
189
|
raise AbstractTypeError(
|
|
149
190
|
f'Cannot subclass abstract class {cls.__name__} with abstract methods: ' +
|
|
150
191
|
', '.join(sorted([
|
|
151
192
|
'.'.join([
|
|
152
|
-
*([
|
|
153
|
-
|
|
193
|
+
*([
|
|
194
|
+
*([m] if (m := getattr(c, '__module__')) else []),
|
|
195
|
+
getattr(c, '__qualname__', getattr(c, '__name__')),
|
|
196
|
+
] if c is not None else '?'),
|
|
154
197
|
a,
|
|
155
198
|
])
|
|
156
|
-
for a
|
|
199
|
+
for a in ams
|
|
200
|
+
for c in [amd.get(a)]
|
|
157
201
|
])),
|
|
158
202
|
)
|
|
159
203
|
|
|
@@ -227,6 +271,62 @@ def async_cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
|
|
|
227
271
|
return _AsyncCachedNullary(fn)
|
|
228
272
|
|
|
229
273
|
|
|
274
|
+
##
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
cached_property = functools.cached_property
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
class _cached_property: # noqa
|
|
281
|
+
"""Backported to pick up https://github.com/python/cpython/commit/056dfc71dce15f81887f0bd6da09d6099d71f979 ."""
|
|
282
|
+
|
|
283
|
+
def __init__(self, func):
|
|
284
|
+
self.func = func
|
|
285
|
+
self.attrname = None # noqa
|
|
286
|
+
self.__doc__ = func.__doc__
|
|
287
|
+
self.__module__ = func.__module__
|
|
288
|
+
|
|
289
|
+
_NOT_FOUND = object()
|
|
290
|
+
|
|
291
|
+
def __set_name__(self, owner, name):
|
|
292
|
+
if self.attrname is None:
|
|
293
|
+
self.attrname = name # noqa
|
|
294
|
+
elif name != self.attrname:
|
|
295
|
+
raise TypeError(
|
|
296
|
+
f'Cannot assign the same cached_property to two different names ({self.attrname!r} and {name!r}).',
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
def __get__(self, instance, owner=None):
|
|
300
|
+
if instance is None:
|
|
301
|
+
return self
|
|
302
|
+
if self.attrname is None:
|
|
303
|
+
raise TypeError('Cannot use cached_property instance without calling __set_name__ on it.')
|
|
304
|
+
|
|
305
|
+
try:
|
|
306
|
+
cache = instance.__dict__
|
|
307
|
+
except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
|
|
308
|
+
raise TypeError(
|
|
309
|
+
f"No '__dict__' attribute on {type(instance).__name__!r} instance to cache {self.attrname!r} property.",
|
|
310
|
+
) from None
|
|
311
|
+
|
|
312
|
+
val = cache.get(self.attrname, self._NOT_FOUND)
|
|
313
|
+
|
|
314
|
+
if val is self._NOT_FOUND:
|
|
315
|
+
val = self.func(instance)
|
|
316
|
+
try:
|
|
317
|
+
cache[self.attrname] = val
|
|
318
|
+
except TypeError:
|
|
319
|
+
raise TypeError(
|
|
320
|
+
f"The '__dict__' attribute on {type(instance).__name__!r} instance does not support item "
|
|
321
|
+
f"assignment for caching {self.attrname!r} property.",
|
|
322
|
+
) from None
|
|
323
|
+
|
|
324
|
+
return val
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
globals()['cached_property'] = _cached_property
|
|
328
|
+
|
|
329
|
+
|
|
230
330
|
########################################
|
|
231
331
|
# ../../../omlish/lite/check.py
|
|
232
332
|
"""
|
|
File without changes
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import dataclasses as dc
|
|
2
|
+
import typing as ta
|
|
3
|
+
|
|
4
|
+
from omlish import lang
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
with lang.auto_proxy_import(globals()):
|
|
8
|
+
import markdown_it as md
|
|
9
|
+
import markdown_it.token # noqa
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
##
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class IncrementalMarkdownParser:
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
*,
|
|
19
|
+
parser: ta.Optional['md.MarkdownIt'] = None,
|
|
20
|
+
) -> None:
|
|
21
|
+
super().__init__()
|
|
22
|
+
|
|
23
|
+
if parser is None:
|
|
24
|
+
parser = md.MarkdownIt()
|
|
25
|
+
self._parser = parser
|
|
26
|
+
|
|
27
|
+
self._stable_tokens: list[md.token.Token] = []
|
|
28
|
+
self._buffer = ''
|
|
29
|
+
self._num_stable_lines = 0 # Number of lines in stable tokens
|
|
30
|
+
|
|
31
|
+
class FeedOutput(ta.NamedTuple):
|
|
32
|
+
stable: ta.Sequence['md.token.Token']
|
|
33
|
+
new_stable: ta.Sequence['md.token.Token']
|
|
34
|
+
unstable: ta.Sequence['md.token.Token']
|
|
35
|
+
|
|
36
|
+
def feed2(self, chunk: str) -> FeedOutput:
|
|
37
|
+
self._buffer += chunk
|
|
38
|
+
|
|
39
|
+
# Parse the current buffer
|
|
40
|
+
new_tokens = self._parser.parse(self._buffer)
|
|
41
|
+
|
|
42
|
+
# Adjust ALL tokens to account for stable lines from previous parses (new_tokens have line numbers relative to
|
|
43
|
+
# current buffer)
|
|
44
|
+
adjusted_tokens = self._adjust_token_line_numbers(new_tokens, self._num_stable_lines)
|
|
45
|
+
|
|
46
|
+
# Find stable tokens (all but the last parent and its children)
|
|
47
|
+
stable_count = self._find_stable_token_count(adjusted_tokens)
|
|
48
|
+
|
|
49
|
+
newly_stable: ta.Sequence[md.token.Token]
|
|
50
|
+
if stable_count > 0:
|
|
51
|
+
# Extract newly stable tokens (already have adjusted line numbers)
|
|
52
|
+
newly_stable = adjusted_tokens[:stable_count]
|
|
53
|
+
|
|
54
|
+
# Calculate how many lines these stable tokens cover
|
|
55
|
+
max_line = 0
|
|
56
|
+
for token in newly_stable:
|
|
57
|
+
if token.map:
|
|
58
|
+
max_line = max(max_line, token.map[1])
|
|
59
|
+
|
|
60
|
+
# Update buffer to only contain unstable content
|
|
61
|
+
if max_line > self._num_stable_lines:
|
|
62
|
+
# max_line is absolute, num_stable_lines is the current buffer offset
|
|
63
|
+
lines_to_remove = max_line - self._num_stable_lines
|
|
64
|
+
lines = self._buffer.split('\n')
|
|
65
|
+
self._buffer = '\n'.join(lines[lines_to_remove:])
|
|
66
|
+
|
|
67
|
+
# Store newly stable tokens (with adjusted line numbers)
|
|
68
|
+
self._stable_tokens.extend(newly_stable)
|
|
69
|
+
self._num_stable_lines = max_line
|
|
70
|
+
|
|
71
|
+
else:
|
|
72
|
+
newly_stable = ()
|
|
73
|
+
|
|
74
|
+
return IncrementalMarkdownParser.FeedOutput(
|
|
75
|
+
stable=self._stable_tokens,
|
|
76
|
+
new_stable=newly_stable,
|
|
77
|
+
unstable=adjusted_tokens[stable_count:],
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
def feed(self, chunk: str) -> list['md.token.Token']:
|
|
81
|
+
out = self.feed2(chunk)
|
|
82
|
+
return [*out.stable, *out.unstable]
|
|
83
|
+
|
|
84
|
+
def _find_stable_token_count(self, tokens: list['md.token.Token']) -> int:
|
|
85
|
+
if not tokens:
|
|
86
|
+
return 0
|
|
87
|
+
|
|
88
|
+
# Find indices of all parent-level tokens (nesting = 0)
|
|
89
|
+
parent_indices = []
|
|
90
|
+
for i, token in enumerate(tokens):
|
|
91
|
+
if token.nesting in (1, 0) and token.level == 0:
|
|
92
|
+
parent_indices.append(i)
|
|
93
|
+
|
|
94
|
+
if len(parent_indices) < 2:
|
|
95
|
+
# Need at least 2 parent tokens to have stable content
|
|
96
|
+
return 0
|
|
97
|
+
|
|
98
|
+
# The last parent and everything after it is unstable. Everything before the second-to-last parent is stable.
|
|
99
|
+
return parent_indices[-2]
|
|
100
|
+
|
|
101
|
+
def _adjust_token_line_numbers(
|
|
102
|
+
self,
|
|
103
|
+
tokens: list['md.token.Token'],
|
|
104
|
+
line_offset: int,
|
|
105
|
+
) -> list['md.token.Token']:
|
|
106
|
+
adjusted = []
|
|
107
|
+
for token in tokens:
|
|
108
|
+
if token.map:
|
|
109
|
+
token = dc.replace(
|
|
110
|
+
token,
|
|
111
|
+
map=[token.map[0] + line_offset, token.map[1] + line_offset],
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
adjusted.append(token)
|
|
115
|
+
|
|
116
|
+
return adjusted
|
omdev/markdown/tokens.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# ruff: noqa: TC002
|
|
2
|
+
import typing as ta
|
|
3
|
+
|
|
4
|
+
from omlish import lang
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
with lang.auto_proxy_import(globals()):
|
|
8
|
+
import markdown_it as md
|
|
9
|
+
import markdown_it.token # noqa
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
##
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def token_repr(t: 'md.token.Token') -> str:
|
|
16
|
+
return ''.join([
|
|
17
|
+
'Token(',
|
|
18
|
+
f'type={t.type!r}',
|
|
19
|
+
*([f', tag={t.tag!r}'] if t.tag else []),
|
|
20
|
+
*([f', nesting={t.nesting!r}'] if t.nesting else []),
|
|
21
|
+
*([f', attrs={t.attrs!r}'] if t.attrs else []),
|
|
22
|
+
*([f', map={t.map!r}'] if t.map else []),
|
|
23
|
+
*([f', level={t.level!r}'] if t.level else []),
|
|
24
|
+
*([f', children={t.children!r}'] if t.children else []),
|
|
25
|
+
*([f', content={t.content!r}'] if t.content else []),
|
|
26
|
+
*([f', markup={t.markup!r}'] if t.markup else []),
|
|
27
|
+
*([f', info={t.info!r}'] if t.info else []),
|
|
28
|
+
*([f', meta={t.meta!r}'] if t.meta else []),
|
|
29
|
+
*([f', block={t.block!r}'] if t.block else []),
|
|
30
|
+
*([f', hidden={t.hidden!r}'] if t.hidden else []),
|
|
31
|
+
')',
|
|
32
|
+
])
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
##
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def flatten_tokens(
|
|
39
|
+
tokens: ta.Iterable['md.token.Token'],
|
|
40
|
+
*,
|
|
41
|
+
filter: ta.Callable[['md.token.Token'], bool] | None = None, # noqa
|
|
42
|
+
) -> ta.Iterable['md.token.Token']:
|
|
43
|
+
def rec(tks: ta.Iterable['md.token.Token']) -> ta.Iterator['md.token.Token']:
|
|
44
|
+
for tk in tks:
|
|
45
|
+
if tk.children and not (filter is not None and not filter(tk)):
|
|
46
|
+
yield from rec(tk.children)
|
|
47
|
+
|
|
48
|
+
else:
|
|
49
|
+
yield tk
|
|
50
|
+
|
|
51
|
+
return rec(tokens)
|
omdev/packaging/marshal.py
CHANGED
|
@@ -8,7 +8,6 @@ import typing as ta
|
|
|
8
8
|
from omlish import lang
|
|
9
9
|
from omlish import marshal as msh
|
|
10
10
|
from omlish import reflect as rfl
|
|
11
|
-
from omlish.funcs import match as mfs
|
|
12
11
|
|
|
13
12
|
from .requires import RequiresMarkerItem
|
|
14
13
|
from .requires import RequiresMarkerList
|
|
@@ -43,12 +42,13 @@ class RequiresMarkerListMarshaler(msh.Marshaler):
|
|
|
43
42
|
return [inner(e) for e in o]
|
|
44
43
|
|
|
45
44
|
|
|
46
|
-
class RequiresMarkerListMarshalerFactory(msh.
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
ctx.
|
|
45
|
+
class RequiresMarkerListMarshalerFactory(msh.MarshalerFactory):
|
|
46
|
+
def make_marshaler(self, ctx: msh.MarshalFactoryContext, rty: rfl.Type) -> ta.Callable[[], msh.Marshaler] | None:
|
|
47
|
+
if rty is not MarshalRequiresMarkerList:
|
|
48
|
+
return None
|
|
49
|
+
return lambda: RequiresMarkerListMarshaler(
|
|
50
|
+
ctx.make_marshaler(RequiresMarkerItem),
|
|
51
|
+
ctx.make_marshaler(RequiresNode),
|
|
52
52
|
)
|
|
53
53
|
|
|
54
54
|
|
|
@@ -56,7 +56,7 @@ class RequiresMarkerListMarshalerFactory(msh.MarshalerFactoryMatchClass):
|
|
|
56
56
|
|
|
57
57
|
|
|
58
58
|
@lang.static_init
|
|
59
|
-
def
|
|
59
|
+
def _install_standard_marshaling() -> None:
|
|
60
60
|
requires_node_poly = msh.Polymorphism(
|
|
61
61
|
RequiresNode,
|
|
62
62
|
[
|
omdev/packaging/requires.py
CHANGED
|
@@ -33,6 +33,12 @@ from omlish.lite.check import check
|
|
|
33
33
|
from .specifiers import Specifier
|
|
34
34
|
|
|
35
35
|
|
|
36
|
+
RequiresMarkerVar = ta.Union['RequiresVariable', 'RequiresValue'] # ta.TypeAlias
|
|
37
|
+
|
|
38
|
+
RequiresMarkerAtom = ta.Union['RequiresMarkerItem', ta.Sequence['RequiresMarkerAtom']] # ta.TypeAlias
|
|
39
|
+
RequiresMarkerList = ta.Sequence[ta.Union['RequiresMarkerList', 'RequiresMarkerAtom', str]] # ta.TypeAlias
|
|
40
|
+
|
|
41
|
+
|
|
36
42
|
##
|
|
37
43
|
|
|
38
44
|
|
|
@@ -229,12 +235,6 @@ class RequiresOp(RequiresNode):
|
|
|
229
235
|
return str(self)
|
|
230
236
|
|
|
231
237
|
|
|
232
|
-
RequiresMarkerVar = ta.Union['RequiresVariable', 'RequiresValue']
|
|
233
|
-
|
|
234
|
-
RequiresMarkerAtom = ta.Union['RequiresMarkerItem', ta.Sequence['RequiresMarkerAtom']]
|
|
235
|
-
RequiresMarkerList = ta.Sequence[ta.Union['RequiresMarkerList', 'RequiresMarkerAtom', str]]
|
|
236
|
-
|
|
237
|
-
|
|
238
238
|
class RequiresMarkerItem(ta.NamedTuple):
|
|
239
239
|
l: ta.Union[RequiresVariable, RequiresValue]
|
|
240
240
|
op: RequiresOp
|
omdev/packaging/specifiers.py
CHANGED
|
@@ -451,7 +451,7 @@ class SpecifierSet(BaseSpecifier):
|
|
|
451
451
|
if isinstance(other, str):
|
|
452
452
|
other = SpecifierSet(other)
|
|
453
453
|
elif not isinstance(other, SpecifierSet):
|
|
454
|
-
return NotImplemented
|
|
454
|
+
return NotImplemented
|
|
455
455
|
|
|
456
456
|
specifier = SpecifierSet()
|
|
457
457
|
specifier._specs = frozenset(self._specs | other._specs)
|
|
@@ -471,6 +471,7 @@ class SpecifierSet(BaseSpecifier):
|
|
|
471
471
|
if isinstance(other, (str, Specifier)):
|
|
472
472
|
other = SpecifierSet(str(other))
|
|
473
473
|
elif not isinstance(other, SpecifierSet):
|
|
474
|
+
|
|
474
475
|
return NotImplemented
|
|
475
476
|
|
|
476
477
|
return self._specs == other._specs
|
omdev/packaging/versions.py
CHANGED
|
@@ -121,12 +121,12 @@ class _BaseVersion:
|
|
|
121
121
|
|
|
122
122
|
def __lt__(self, other: '_BaseVersion') -> bool:
|
|
123
123
|
if not isinstance(other, _BaseVersion):
|
|
124
|
-
return NotImplemented
|
|
124
|
+
return NotImplemented
|
|
125
125
|
return self._key < other._key
|
|
126
126
|
|
|
127
127
|
def __le__(self, other: '_BaseVersion') -> bool:
|
|
128
128
|
if not isinstance(other, _BaseVersion):
|
|
129
|
-
return NotImplemented
|
|
129
|
+
return NotImplemented
|
|
130
130
|
return self._key <= other._key
|
|
131
131
|
|
|
132
132
|
def __eq__(self, other: object) -> bool:
|
|
@@ -136,12 +136,12 @@ class _BaseVersion:
|
|
|
136
136
|
|
|
137
137
|
def __ge__(self, other: '_BaseVersion') -> bool:
|
|
138
138
|
if not isinstance(other, _BaseVersion):
|
|
139
|
-
return NotImplemented
|
|
139
|
+
return NotImplemented
|
|
140
140
|
return self._key >= other._key
|
|
141
141
|
|
|
142
142
|
def __gt__(self, other: '_BaseVersion') -> bool:
|
|
143
143
|
if not isinstance(other, _BaseVersion):
|
|
144
|
-
return NotImplemented
|
|
144
|
+
return NotImplemented
|
|
145
145
|
return self._key > other._key
|
|
146
146
|
|
|
147
147
|
def __ne__(self, other: object) -> bool:
|
omdev/packaging/wheelfile.py
CHANGED
|
@@ -116,11 +116,13 @@ class WheelFile(zipfile.ZipFile):
|
|
|
116
116
|
@staticmethod
|
|
117
117
|
def _urlsafe_b64encode(data: bytes) -> bytes:
|
|
118
118
|
"""urlsafe_b64encode without padding"""
|
|
119
|
+
|
|
119
120
|
return base64.urlsafe_b64encode(data).rstrip(b'=')
|
|
120
121
|
|
|
121
122
|
@staticmethod
|
|
122
123
|
def _urlsafe_b64decode(data: bytes) -> bytes:
|
|
123
124
|
"""urlsafe_b64decode without padding"""
|
|
125
|
+
|
|
124
126
|
pad = b'=' * (4 - (len(data) & 3))
|
|
125
127
|
return base64.urlsafe_b64decode(data + pad)
|
|
126
128
|
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import dataclasses as dc
|
|
2
|
+
import os
|
|
3
|
+
import typing as ta
|
|
4
|
+
|
|
5
|
+
from omlish.text.filecache import TextFileCache
|
|
6
|
+
|
|
7
|
+
from .base import Precheck
|
|
8
|
+
from .base import PrecheckContext
|
|
9
|
+
from .caches import DirWalkCache
|
|
10
|
+
from .caches import HeadersCache
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BlankLinesPrecheck(Precheck['BlankLinesPrecheck.Config']):
|
|
17
|
+
@dc.dataclass(frozen=True)
|
|
18
|
+
class Config(Precheck.Config):
|
|
19
|
+
DEFAULT_FILE_EXTENSIONS: ta.ClassVar[ta.AbstractSet[str]] = frozenset([
|
|
20
|
+
'py',
|
|
21
|
+
|
|
22
|
+
'c',
|
|
23
|
+
'cc',
|
|
24
|
+
'cu',
|
|
25
|
+
'h',
|
|
26
|
+
'hh',
|
|
27
|
+
])
|
|
28
|
+
|
|
29
|
+
file_extensions: ta.AbstractSet[str] = DEFAULT_FILE_EXTENSIONS
|
|
30
|
+
|
|
31
|
+
def __init__(
|
|
32
|
+
self,
|
|
33
|
+
context: PrecheckContext,
|
|
34
|
+
config: Config = Config(),
|
|
35
|
+
*,
|
|
36
|
+
dir_walk_cache: DirWalkCache,
|
|
37
|
+
text_file_cache: TextFileCache,
|
|
38
|
+
headers_cache: HeadersCache,
|
|
39
|
+
) -> None:
|
|
40
|
+
super().__init__(config)
|
|
41
|
+
|
|
42
|
+
self._context = context
|
|
43
|
+
|
|
44
|
+
self._dir_walk_cache = dir_walk_cache
|
|
45
|
+
self._text_file_cache = text_file_cache
|
|
46
|
+
self._headers_cache = headers_cache
|
|
47
|
+
|
|
48
|
+
async def _run_file(self, file: str) -> ta.AsyncGenerator[Precheck.Violation]:
|
|
49
|
+
src = self._text_file_cache.get_entry(file).text()
|
|
50
|
+
|
|
51
|
+
if src and not src.splitlines()[0]:
|
|
52
|
+
yield Precheck.Violation(self, f'source file {file} starts with blank line')
|
|
53
|
+
|
|
54
|
+
async def run(self) -> ta.AsyncGenerator[Precheck.Violation]:
|
|
55
|
+
files = [
|
|
56
|
+
os.path.join(e.root, f)
|
|
57
|
+
for src_root in self._context.src_roots
|
|
58
|
+
for e in self._dir_walk_cache.list_dir(src_root)
|
|
59
|
+
for f in e.files
|
|
60
|
+
if '.' in f
|
|
61
|
+
and any(f.endswith('.' + ext) for ext in self._config.file_extensions)
|
|
62
|
+
]
|
|
63
|
+
|
|
64
|
+
for file in sorted(files):
|
|
65
|
+
async for v in self._run_file(file):
|
|
66
|
+
yield v
|
omdev/precheck/caches.py
CHANGED
omdev/precheck/imports.py
CHANGED
|
@@ -3,6 +3,8 @@ import dataclasses as dc
|
|
|
3
3
|
import os.path
|
|
4
4
|
import typing as ta
|
|
5
5
|
|
|
6
|
+
from omlish.text.filecache import TextFileCache
|
|
7
|
+
|
|
6
8
|
from .base import Precheck
|
|
7
9
|
from .base import PrecheckContext
|
|
8
10
|
from .caches import AstCache
|
|
@@ -26,6 +28,7 @@ class RootRelativeImportPrecheck(Precheck['RootRelativeImportPrecheck.Config']):
|
|
|
26
28
|
dir_walk_cache: DirWalkCache,
|
|
27
29
|
headers_cache: HeadersCache,
|
|
28
30
|
ast_cache: AstCache,
|
|
31
|
+
text_file_cache: TextFileCache,
|
|
29
32
|
) -> None:
|
|
30
33
|
super().__init__(config)
|
|
31
34
|
|
|
@@ -34,6 +37,7 @@ class RootRelativeImportPrecheck(Precheck['RootRelativeImportPrecheck.Config']):
|
|
|
34
37
|
self._dir_walk_cache = dir_walk_cache
|
|
35
38
|
self._headers_cache = headers_cache
|
|
36
39
|
self._ast_cache = ast_cache
|
|
40
|
+
self._text_file_cache = text_file_cache
|
|
37
41
|
|
|
38
42
|
async def _run_py_file(self, py_file: str, src_root: str) -> ta.AsyncGenerator[Precheck.Violation]:
|
|
39
43
|
if isinstance(header_lines := self._headers_cache.get_file_headers(py_file), Exception):
|
|
@@ -41,11 +45,20 @@ class RootRelativeImportPrecheck(Precheck['RootRelativeImportPrecheck.Config']):
|
|
|
41
45
|
if any(hl.src.strip() == '# ruff: noqa' for hl in header_lines):
|
|
42
46
|
return
|
|
43
47
|
|
|
48
|
+
py_file_lines = self._text_file_cache.get_entry(py_file).lines()
|
|
49
|
+
|
|
44
50
|
py_file_ast = self._ast_cache.get_file_ast(py_file)
|
|
45
51
|
if not isinstance(py_file_ast, ast.Module):
|
|
46
52
|
return
|
|
47
53
|
|
|
48
|
-
for cur_node in py_file_ast
|
|
54
|
+
for cur_node in ast.walk(py_file_ast):
|
|
55
|
+
if not isinstance(cur_node, (ast.Import, ast.ImportFrom)):
|
|
56
|
+
continue
|
|
57
|
+
|
|
58
|
+
# FIXME: lame lol
|
|
59
|
+
if py_file_lines[cur_node.lineno - 1].strip().endswith('# noqa'):
|
|
60
|
+
continue
|
|
61
|
+
|
|
49
62
|
if isinstance(cur_node, ast.Import):
|
|
50
63
|
imp_alias: ast.alias
|
|
51
64
|
for imp_alias in cur_node.names:
|
omdev/precheck/main.py
CHANGED
|
@@ -21,7 +21,6 @@ TODO:
|
|
|
21
21
|
import argparse
|
|
22
22
|
import asyncio
|
|
23
23
|
import os.path
|
|
24
|
-
import sys
|
|
25
24
|
import typing as ta
|
|
26
25
|
|
|
27
26
|
from omlish import inject as inj
|
|
@@ -29,6 +28,7 @@ from omlish.logs import all as logs
|
|
|
29
28
|
|
|
30
29
|
from .base import Precheck
|
|
31
30
|
from .base import PrecheckContext
|
|
31
|
+
from .blanklines import BlankLinesPrecheck
|
|
32
32
|
from .caches import AstCache
|
|
33
33
|
from .caches import DirWalkCache
|
|
34
34
|
from .caches import HeadersCache
|
|
@@ -87,6 +87,7 @@ def _check_cmd(args) -> None:
|
|
|
87
87
|
)
|
|
88
88
|
|
|
89
89
|
pc_cfgs: list[Precheck.Config] = [
|
|
90
|
+
BlankLinesPrecheck.Config(),
|
|
90
91
|
GitBlacklistPrecheck.Config(),
|
|
91
92
|
LitePython8Precheck.Config(),
|
|
92
93
|
ManifestsPrecheck.Config(),
|
|
@@ -124,7 +125,7 @@ def _check_cmd(args) -> None:
|
|
|
124
125
|
|
|
125
126
|
if vs:
|
|
126
127
|
print(f'{len(vs)} violations found')
|
|
127
|
-
|
|
128
|
+
raise SystemExit(1)
|
|
128
129
|
|
|
129
130
|
|
|
130
131
|
##
|
|
@@ -132,12 +133,12 @@ def _check_cmd(args) -> None:
|
|
|
132
133
|
|
|
133
134
|
def _build_parser() -> argparse.ArgumentParser:
|
|
134
135
|
parser = argparse.ArgumentParser()
|
|
136
|
+
parser.add_argument('-v', '--verbose', action='store_true')
|
|
135
137
|
|
|
136
138
|
subparsers = parser.add_subparsers()
|
|
137
139
|
|
|
138
140
|
parser_check = subparsers.add_parser('check')
|
|
139
141
|
parser_check.add_argument('roots', nargs='+')
|
|
140
|
-
parser_check.add_argument('-v', '--verbose', action='store_true')
|
|
141
142
|
parser_check.set_defaults(func=_check_cmd)
|
|
142
143
|
|
|
143
144
|
return parser
|