omdev 0.0.0.dev7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of omdev might be problematic. Click here for more details.
- omdev-0.0.0.dev7/LICENSE +21 -0
- omdev-0.0.0.dev7/MANIFEST.in +1 -0
- omdev-0.0.0.dev7/PKG-INFO +23 -0
- omdev-0.0.0.dev7/README.rst +1 -0
- omdev-0.0.0.dev7/omdev/__about__.py +35 -0
- omdev-0.0.0.dev7/omdev/__init__.py +0 -0
- omdev-0.0.0.dev7/omdev/amalg/__init__.py +0 -0
- omdev-0.0.0.dev7/omdev/amalg/__main__.py +4 -0
- omdev-0.0.0.dev7/omdev/amalg/amalg.py +513 -0
- omdev-0.0.0.dev7/omdev/classdot.py +61 -0
- omdev-0.0.0.dev7/omdev/cmake.py +164 -0
- omdev-0.0.0.dev7/omdev/exts/__init__.py +0 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/__init__.py +10 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/build_ext.py +367 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/compilers/__init__.py +3 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/compilers/ccompiler.py +1032 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/compilers/options.py +80 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/compilers/unixccompiler.py +385 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/dir_util.py +76 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/errors.py +62 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/extension.py +107 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/file_util.py +216 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/modified.py +47 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/spawn.py +103 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/sysconfig.py +349 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/util.py +201 -0
- omdev-0.0.0.dev7/omdev/exts/_distutils/version.py +308 -0
- omdev-0.0.0.dev7/omdev/exts/build.py +43 -0
- omdev-0.0.0.dev7/omdev/exts/cmake.py +195 -0
- omdev-0.0.0.dev7/omdev/exts/importhook.py +88 -0
- omdev-0.0.0.dev7/omdev/exts/scan.py +74 -0
- omdev-0.0.0.dev7/omdev/interp/__init__.py +1 -0
- omdev-0.0.0.dev7/omdev/interp/__main__.py +4 -0
- omdev-0.0.0.dev7/omdev/interp/cli.py +63 -0
- omdev-0.0.0.dev7/omdev/interp/inspect.py +105 -0
- omdev-0.0.0.dev7/omdev/interp/providers.py +67 -0
- omdev-0.0.0.dev7/omdev/interp/pyenv.py +353 -0
- omdev-0.0.0.dev7/omdev/interp/resolvers.py +76 -0
- omdev-0.0.0.dev7/omdev/interp/standalone.py +187 -0
- omdev-0.0.0.dev7/omdev/interp/system.py +125 -0
- omdev-0.0.0.dev7/omdev/interp/types.py +92 -0
- omdev-0.0.0.dev7/omdev/mypy/__init__.py +0 -0
- omdev-0.0.0.dev7/omdev/mypy/debug.py +86 -0
- omdev-0.0.0.dev7/omdev/pyproject/__init__.py +1 -0
- omdev-0.0.0.dev7/omdev/pyproject/__main__.py +4 -0
- omdev-0.0.0.dev7/omdev/pyproject/cli.py +319 -0
- omdev-0.0.0.dev7/omdev/pyproject/configs.py +97 -0
- omdev-0.0.0.dev7/omdev/pyproject/ext.py +107 -0
- omdev-0.0.0.dev7/omdev/pyproject/pkg.py +196 -0
- omdev-0.0.0.dev7/omdev/scripts/__init__.py +0 -0
- omdev-0.0.0.dev7/omdev/scripts/execrss.py +19 -0
- omdev-0.0.0.dev7/omdev/scripts/findimports.py +62 -0
- omdev-0.0.0.dev7/omdev/scripts/findmagic.py +70 -0
- omdev-0.0.0.dev7/omdev/scripts/interp.py +2118 -0
- omdev-0.0.0.dev7/omdev/scripts/pyproject.py +3584 -0
- omdev-0.0.0.dev7/omdev/scripts/traceimport.py +502 -0
- omdev-0.0.0.dev7/omdev/tokens.py +42 -0
- omdev-0.0.0.dev7/omdev/toml/__init__.py +1 -0
- omdev-0.0.0.dev7/omdev/toml/parser.py +823 -0
- omdev-0.0.0.dev7/omdev/toml/writer.py +104 -0
- omdev-0.0.0.dev7/omdev/tools/__init__.py +0 -0
- omdev-0.0.0.dev7/omdev/tools/dockertools.py +81 -0
- omdev-0.0.0.dev7/omdev/tools/sqlrepl.py +193 -0
- omdev-0.0.0.dev7/omdev/versioning/__init__.py +1 -0
- omdev-0.0.0.dev7/omdev/versioning/specifiers.py +531 -0
- omdev-0.0.0.dev7/omdev/versioning/versions.py +416 -0
- omdev-0.0.0.dev7/omdev.egg-info/PKG-INFO +23 -0
- omdev-0.0.0.dev7/omdev.egg-info/SOURCES.txt +71 -0
- omdev-0.0.0.dev7/omdev.egg-info/dependency_links.txt +1 -0
- omdev-0.0.0.dev7/omdev.egg-info/requires.txt +12 -0
- omdev-0.0.0.dev7/omdev.egg-info/top_level.txt +1 -0
- omdev-0.0.0.dev7/pyproject.toml +52 -0
- omdev-0.0.0.dev7/setup.cfg +4 -0
omdev-0.0.0.dev7/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
Copyright 2023- wrmsr
|
|
2
|
+
|
|
3
|
+
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
|
|
4
|
+
following conditions are met:
|
|
5
|
+
|
|
6
|
+
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
|
|
7
|
+
disclaimer.
|
|
8
|
+
|
|
9
|
+
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
|
|
10
|
+
disclaimer in the documentation and/or other materials provided with the distribution.
|
|
11
|
+
|
|
12
|
+
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
|
|
13
|
+
derived from this software without specific prior written permission.
|
|
14
|
+
|
|
15
|
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
|
16
|
+
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
17
|
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
18
|
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
19
|
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
|
20
|
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
|
21
|
+
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
global-exclude **/conftest.py
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: omdev
|
|
3
|
+
Version: 0.0.0.dev7
|
|
4
|
+
Summary: omdev
|
|
5
|
+
Author: wrmsr
|
|
6
|
+
License: BSD-3-Clause
|
|
7
|
+
Project-URL: source, https://github.com/wrmsr/omlish
|
|
8
|
+
Classifier: License :: OSI Approved :: BSD License
|
|
9
|
+
Classifier: Development Status :: 2 - Pre-Alpha
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: Operating System :: OS Independent
|
|
12
|
+
Classifier: Operating System :: POSIX
|
|
13
|
+
Requires-Python: >=3.12
|
|
14
|
+
License-File: LICENSE
|
|
15
|
+
Requires-Dist: omlish==0.0.0.dev7
|
|
16
|
+
Provides-Extra: c
|
|
17
|
+
Requires-Dist: pycparser>=2.22; extra == "c"
|
|
18
|
+
Requires-Dist: cffi>=1.17; extra == "c"
|
|
19
|
+
Requires-Dist: pcpp>=1.30; extra == "c"
|
|
20
|
+
Provides-Extra: mypy
|
|
21
|
+
Requires-Dist: mypy>=1.11; extra == "mypy"
|
|
22
|
+
Provides-Extra: tokens
|
|
23
|
+
Requires-Dist: tokenize_rt>=6; extra == "tokens"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
*omlish*
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from omlish.__about__ import ProjectBase
|
|
2
|
+
from omlish.__about__ import SetuptoolsBase
|
|
3
|
+
from omlish.__about__ import __version__
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Project(ProjectBase):
|
|
7
|
+
name = 'omdev'
|
|
8
|
+
description = 'omdev'
|
|
9
|
+
|
|
10
|
+
dependencies = [
|
|
11
|
+
f'omlish == {__version__}',
|
|
12
|
+
]
|
|
13
|
+
|
|
14
|
+
optional_dependencies = {
|
|
15
|
+
'c': [
|
|
16
|
+
'pycparser >= 2.22',
|
|
17
|
+
'cffi >= 1.17',
|
|
18
|
+
'pcpp >= 1.30',
|
|
19
|
+
],
|
|
20
|
+
|
|
21
|
+
'mypy': [
|
|
22
|
+
'mypy >= 1.11',
|
|
23
|
+
],
|
|
24
|
+
|
|
25
|
+
'tokens': [
|
|
26
|
+
'tokenize_rt >= 6',
|
|
27
|
+
],
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class Setuptools(SetuptoolsBase):
|
|
32
|
+
find_packages = {
|
|
33
|
+
'include': ['omdev', 'omdev.*'],
|
|
34
|
+
'exclude': [*SetuptoolsBase.find_packages['exclude']],
|
|
35
|
+
}
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,513 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Conventions:
|
|
3
|
+
- must import whole global modules, if aliased must all match
|
|
4
|
+
- must import 'from' items for local modules
|
|
5
|
+
|
|
6
|
+
TODO:
|
|
7
|
+
- check 3.8 compat
|
|
8
|
+
- more sanity checks lol
|
|
9
|
+
- flake8 / ruff mgmt
|
|
10
|
+
- typealias - support # noqa, other comments, and lamely support multiline by just stealing lines till it parses
|
|
11
|
+
|
|
12
|
+
See:
|
|
13
|
+
- https://github.com/xonsh/amalgamate - mine is for portability not speed, and doesn't try to work on unmodified code
|
|
14
|
+
|
|
15
|
+
Targets:
|
|
16
|
+
- interp
|
|
17
|
+
- pyproject
|
|
18
|
+
- precheck
|
|
19
|
+
- build
|
|
20
|
+
- pyremote
|
|
21
|
+
- bootstrap
|
|
22
|
+
- deploy
|
|
23
|
+
- supervisor?
|
|
24
|
+
"""
|
|
25
|
+
import argparse
|
|
26
|
+
import dataclasses as dc
|
|
27
|
+
import io
|
|
28
|
+
import logging
|
|
29
|
+
import os.path
|
|
30
|
+
import typing as ta
|
|
31
|
+
|
|
32
|
+
import tokenize_rt as trt
|
|
33
|
+
|
|
34
|
+
from omlish import check
|
|
35
|
+
from omlish import collections as col
|
|
36
|
+
from omlish import logs
|
|
37
|
+
|
|
38
|
+
from .. import tokens as tks
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
Tokens: ta.TypeAlias = tks.Tokens
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
log = logging.getLogger(__name__)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
##
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
HEADER_NAMES = (*tks.WS_NAMES, 'COMMENT', 'STRING')
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def split_header_lines(lines: ta.Iterable[Tokens]) -> tuple[list[Tokens], list[Tokens]]:
|
|
54
|
+
ws = []
|
|
55
|
+
nws = []
|
|
56
|
+
for line in (it := iter(lines)):
|
|
57
|
+
if line[0].name in HEADER_NAMES:
|
|
58
|
+
ws.append(line)
|
|
59
|
+
else:
|
|
60
|
+
nws.append(line)
|
|
61
|
+
nws.extend(it)
|
|
62
|
+
break
|
|
63
|
+
return ws, nws
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
##
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
70
|
+
class Import:
|
|
71
|
+
mod: str
|
|
72
|
+
item: str | None
|
|
73
|
+
as_: str | None
|
|
74
|
+
|
|
75
|
+
src_path: str
|
|
76
|
+
line: int
|
|
77
|
+
|
|
78
|
+
mod_path: str | None
|
|
79
|
+
|
|
80
|
+
toks: Tokens = dc.field(repr=False)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def make_import(
|
|
84
|
+
lts: Tokens,
|
|
85
|
+
*,
|
|
86
|
+
src_path: str,
|
|
87
|
+
mounts: ta.Mapping[str, str],
|
|
88
|
+
) -> Import | None:
|
|
89
|
+
if not lts:
|
|
90
|
+
return None
|
|
91
|
+
ft = lts[0]
|
|
92
|
+
|
|
93
|
+
if ft.name != 'NAME' or ft.src not in ('import', 'from'):
|
|
94
|
+
return None
|
|
95
|
+
|
|
96
|
+
ml = []
|
|
97
|
+
il: list[str] | None = None
|
|
98
|
+
as_ = None
|
|
99
|
+
for tok in (it := iter(tks.ignore_ws(lts[1:]))):
|
|
100
|
+
if tok.name in ('NAME', 'OP'):
|
|
101
|
+
if tok.src == 'as':
|
|
102
|
+
check.none(as_)
|
|
103
|
+
nt = next(it)
|
|
104
|
+
check.equal(nt.name, 'NAME')
|
|
105
|
+
as_ = nt.src
|
|
106
|
+
elif tok.src == 'import':
|
|
107
|
+
check.equal(ft.src, 'from')
|
|
108
|
+
il = []
|
|
109
|
+
elif il is not None:
|
|
110
|
+
il.append(tok.src)
|
|
111
|
+
else:
|
|
112
|
+
ml.append(tok.src)
|
|
113
|
+
else:
|
|
114
|
+
raise Exception(tok)
|
|
115
|
+
|
|
116
|
+
mod = ''.join(ml)
|
|
117
|
+
item = ''.join(il) if il is not None else None
|
|
118
|
+
|
|
119
|
+
if (mnt := mounts.get(mod.partition('.')[0])) is not None:
|
|
120
|
+
ps = mod.split('.')
|
|
121
|
+
mod_path = os.path.abspath(os.path.join(
|
|
122
|
+
mnt,
|
|
123
|
+
*ps[1:-1],
|
|
124
|
+
ps[-1] + '.py',
|
|
125
|
+
))
|
|
126
|
+
|
|
127
|
+
elif not mod.startswith('.'):
|
|
128
|
+
mod_path = None
|
|
129
|
+
|
|
130
|
+
else:
|
|
131
|
+
parts = mod.split('.')
|
|
132
|
+
nd = len(parts) - parts[::-1].index('')
|
|
133
|
+
mod_path = os.path.abspath(os.path.join(
|
|
134
|
+
os.path.dirname(src_path),
|
|
135
|
+
'../' * (nd - 1),
|
|
136
|
+
*parts[nd:-1],
|
|
137
|
+
parts[-1] + '.py',
|
|
138
|
+
))
|
|
139
|
+
|
|
140
|
+
mod = check.isinstance(mod_path, str)
|
|
141
|
+
|
|
142
|
+
return Import(
|
|
143
|
+
mod=mod,
|
|
144
|
+
item=item,
|
|
145
|
+
as_=as_,
|
|
146
|
+
|
|
147
|
+
src_path=src_path,
|
|
148
|
+
line=ft.line,
|
|
149
|
+
|
|
150
|
+
mod_path=mod_path,
|
|
151
|
+
|
|
152
|
+
toks=lts,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
##
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
TYPE_ALIAS_COMMENT = '# ta.TypeAlias'
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
163
|
+
class Typing:
|
|
164
|
+
src: str
|
|
165
|
+
|
|
166
|
+
src_path: str
|
|
167
|
+
line: int
|
|
168
|
+
|
|
169
|
+
toks: Tokens = dc.field(repr=False)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _is_typing(lts: Tokens) -> bool:
|
|
173
|
+
if tks.join_toks(lts).strip().endswith(TYPE_ALIAS_COMMENT):
|
|
174
|
+
return True
|
|
175
|
+
|
|
176
|
+
wts = list(tks.ignore_ws(lts))
|
|
177
|
+
if (
|
|
178
|
+
len(wts) >= 5 and
|
|
179
|
+
wts[0].name == 'NAME' and
|
|
180
|
+
wts[1].name == 'OP' and wts[1].src == '=' and
|
|
181
|
+
wts[2].name == 'NAME' and wts[2].src == 'ta' and
|
|
182
|
+
wts[3].name == 'OP' and wts[3].src == '.'
|
|
183
|
+
):
|
|
184
|
+
return True
|
|
185
|
+
|
|
186
|
+
return False
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def make_typing(
|
|
190
|
+
lts: Tokens,
|
|
191
|
+
*,
|
|
192
|
+
src_path: str,
|
|
193
|
+
) -> Typing | None:
|
|
194
|
+
if not lts or lts[0].name == 'UNIMPORTANT_WS':
|
|
195
|
+
return None
|
|
196
|
+
|
|
197
|
+
if not _is_typing(lts):
|
|
198
|
+
return None
|
|
199
|
+
|
|
200
|
+
ft = next(iter(tks.ignore_ws(lts)))
|
|
201
|
+
return Typing(
|
|
202
|
+
src=tks.join_toks(lts),
|
|
203
|
+
|
|
204
|
+
src_path=src_path,
|
|
205
|
+
line=ft.line,
|
|
206
|
+
|
|
207
|
+
toks=lts,
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
##
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
215
|
+
class SrcFile:
|
|
216
|
+
path: str
|
|
217
|
+
|
|
218
|
+
src: str = dc.field(repr=False)
|
|
219
|
+
tokens: Tokens = dc.field(repr=False)
|
|
220
|
+
lines: ta.Sequence[Tokens] = dc.field(repr=False)
|
|
221
|
+
|
|
222
|
+
header_lines: ta.Sequence[Tokens] = dc.field(repr=False)
|
|
223
|
+
imports: ta.Sequence[Import] = dc.field(repr=False)
|
|
224
|
+
typings: ta.Sequence[Typing] = dc.field(repr=False)
|
|
225
|
+
content_lines: ta.Sequence[Tokens] = dc.field(repr=False)
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def make_src_file(
|
|
229
|
+
path: str,
|
|
230
|
+
*,
|
|
231
|
+
mounts: ta.Mapping[str, str],
|
|
232
|
+
) -> SrcFile:
|
|
233
|
+
with open(path) as f:
|
|
234
|
+
src = f.read().strip()
|
|
235
|
+
|
|
236
|
+
tokens = trt.src_to_tokens(src)
|
|
237
|
+
lines = tks.split_lines(tokens)
|
|
238
|
+
|
|
239
|
+
hls, cls = split_header_lines(lines)
|
|
240
|
+
|
|
241
|
+
imps: list[Import] = []
|
|
242
|
+
tys: list[Typing] = []
|
|
243
|
+
ctls: list[Tokens] = []
|
|
244
|
+
|
|
245
|
+
for line in cls:
|
|
246
|
+
if (imp := make_import(
|
|
247
|
+
line,
|
|
248
|
+
src_path=path,
|
|
249
|
+
mounts=mounts,
|
|
250
|
+
)) is not None:
|
|
251
|
+
imps.append(imp)
|
|
252
|
+
|
|
253
|
+
elif (ty := make_typing(
|
|
254
|
+
line,
|
|
255
|
+
src_path=path,
|
|
256
|
+
)) is not None:
|
|
257
|
+
tys.append(ty)
|
|
258
|
+
|
|
259
|
+
else:
|
|
260
|
+
ctls.append(line)
|
|
261
|
+
|
|
262
|
+
return SrcFile(
|
|
263
|
+
path=path,
|
|
264
|
+
|
|
265
|
+
src=src,
|
|
266
|
+
tokens=tokens,
|
|
267
|
+
lines=lines,
|
|
268
|
+
|
|
269
|
+
header_lines=hls,
|
|
270
|
+
imports=imps,
|
|
271
|
+
typings=tys,
|
|
272
|
+
content_lines=ctls,
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
##
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
SECTION_SEP = '#' * 40 + '\n'
|
|
280
|
+
|
|
281
|
+
RUFF_DISABLES: ta.Sequence[str] = [
|
|
282
|
+
# 'UP006', # non-pep585-annotation
|
|
283
|
+
# 'UP007', # non-pep604-annotation
|
|
284
|
+
]
|
|
285
|
+
|
|
286
|
+
OUTPUT_COMMENT = '# @omdev-amalg-output '
|
|
287
|
+
SCAN_COMMENT = '# @omdev-amalg '
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def gen_amalg(
|
|
291
|
+
main_path: str,
|
|
292
|
+
*,
|
|
293
|
+
mounts: ta.Mapping[str, str],
|
|
294
|
+
output_dir: str | None = None,
|
|
295
|
+
) -> str:
|
|
296
|
+
src_files: dict[str, SrcFile] = {}
|
|
297
|
+
todo = [main_path]
|
|
298
|
+
while todo:
|
|
299
|
+
src_path = todo.pop()
|
|
300
|
+
if src_path in src_files:
|
|
301
|
+
continue
|
|
302
|
+
|
|
303
|
+
f = make_src_file(
|
|
304
|
+
src_path,
|
|
305
|
+
mounts=mounts,
|
|
306
|
+
)
|
|
307
|
+
src_files[src_path] = f
|
|
308
|
+
|
|
309
|
+
for imp in f.imports:
|
|
310
|
+
if (mp := imp.mod_path) is not None:
|
|
311
|
+
todo.append(mp)
|
|
312
|
+
|
|
313
|
+
##
|
|
314
|
+
|
|
315
|
+
out = io.StringIO()
|
|
316
|
+
|
|
317
|
+
##
|
|
318
|
+
|
|
319
|
+
mf = src_files[main_path]
|
|
320
|
+
if mf.header_lines:
|
|
321
|
+
hls = [
|
|
322
|
+
hl
|
|
323
|
+
for hlts in mf.header_lines
|
|
324
|
+
if not (hl := tks.join_toks(hlts)).startswith(SCAN_COMMENT)
|
|
325
|
+
]
|
|
326
|
+
if output_dir is not None:
|
|
327
|
+
ogf = os.path.relpath(main_path, output_dir)
|
|
328
|
+
else:
|
|
329
|
+
ogf = os.path.basename(main_path)
|
|
330
|
+
nhls = []
|
|
331
|
+
if hls[0].startswith('#!'):
|
|
332
|
+
nhls.append(hls.pop(0))
|
|
333
|
+
nhls.extend([
|
|
334
|
+
'# noinspection DuplicatedCode\n',
|
|
335
|
+
f'{OUTPUT_COMMENT.strip()} {ogf}\n',
|
|
336
|
+
])
|
|
337
|
+
hls = [*nhls, *hls]
|
|
338
|
+
out.write(''.join(hls))
|
|
339
|
+
|
|
340
|
+
if RUFF_DISABLES:
|
|
341
|
+
out.write(f'# ruff: noqa: {" ".join(RUFF_DISABLES)}\n')
|
|
342
|
+
|
|
343
|
+
##
|
|
344
|
+
|
|
345
|
+
all_imps = [i for f in src_files.values() for i in f.imports]
|
|
346
|
+
gl_imps = [i for i in all_imps if i.mod_path is None]
|
|
347
|
+
|
|
348
|
+
dct: dict = {}
|
|
349
|
+
for imp in gl_imps:
|
|
350
|
+
dct.setdefault((imp.mod, imp.item, imp.as_), []).append(imp)
|
|
351
|
+
for _, l in sorted(dct.items()):
|
|
352
|
+
out.write(tks.join_toks(l[0].toks))
|
|
353
|
+
if dct:
|
|
354
|
+
out.write('\n\n')
|
|
355
|
+
|
|
356
|
+
##
|
|
357
|
+
|
|
358
|
+
ts = list(col.toposort({ # noqa
|
|
359
|
+
f.path: {mp for i in f.imports if (mp := i.mod_path) is not None}
|
|
360
|
+
for f in src_files.values()
|
|
361
|
+
}))
|
|
362
|
+
sfs = [sf for ss in ts for sf in sorted(ss)]
|
|
363
|
+
|
|
364
|
+
##
|
|
365
|
+
|
|
366
|
+
tys = set()
|
|
367
|
+
for sf in sfs:
|
|
368
|
+
f = src_files[sf]
|
|
369
|
+
for ty in f.typings:
|
|
370
|
+
if ty.src not in tys:
|
|
371
|
+
out.write(ty.src)
|
|
372
|
+
tys.add(ty.src)
|
|
373
|
+
if tys:
|
|
374
|
+
out.write('\n\n')
|
|
375
|
+
|
|
376
|
+
##
|
|
377
|
+
|
|
378
|
+
for i, sf in enumerate(sfs):
|
|
379
|
+
f = src_files[sf]
|
|
380
|
+
out.write(SECTION_SEP)
|
|
381
|
+
if f is not mf:
|
|
382
|
+
rp = os.path.relpath(f.path, mf.path)
|
|
383
|
+
else:
|
|
384
|
+
rp = os.path.basename(f.path)
|
|
385
|
+
out.write(f'# {rp}\n')
|
|
386
|
+
if f is not mf and f.header_lines:
|
|
387
|
+
out.write(tks.join_lines(f.header_lines))
|
|
388
|
+
out.write(f'\n\n')
|
|
389
|
+
sf_src = tks.join_lines(f.content_lines)
|
|
390
|
+
out.write(sf_src.strip())
|
|
391
|
+
if i < len(sfs) - 1:
|
|
392
|
+
out.write('\n\n\n')
|
|
393
|
+
else:
|
|
394
|
+
out.write('\n')
|
|
395
|
+
|
|
396
|
+
##
|
|
397
|
+
|
|
398
|
+
return out.getvalue()
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
##
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def _gen_one(
|
|
405
|
+
input_path: str,
|
|
406
|
+
output_path: str | None,
|
|
407
|
+
*,
|
|
408
|
+
mounts: ta.Mapping[str, str],
|
|
409
|
+
) -> None:
|
|
410
|
+
log.info('Generating: %s -> %s', input_path, output_path)
|
|
411
|
+
|
|
412
|
+
src = gen_amalg(
|
|
413
|
+
input_path,
|
|
414
|
+
mounts=mounts,
|
|
415
|
+
output_dir=os.path.dirname(output_path if output_path is not None else input_path),
|
|
416
|
+
)
|
|
417
|
+
|
|
418
|
+
if output_path is not None:
|
|
419
|
+
with open(output_path, 'w') as f:
|
|
420
|
+
f.write(src)
|
|
421
|
+
os.chmod(output_path, os.stat(input_path).st_mode)
|
|
422
|
+
|
|
423
|
+
else:
|
|
424
|
+
print(src)
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
def _scan_one(
|
|
428
|
+
input_path: str,
|
|
429
|
+
**kwargs: ta.Any,
|
|
430
|
+
) -> None:
|
|
431
|
+
if not input_path.endswith('.py'):
|
|
432
|
+
return
|
|
433
|
+
|
|
434
|
+
with open(input_path) as f:
|
|
435
|
+
src = f.read()
|
|
436
|
+
|
|
437
|
+
sls = [l for l in src.splitlines() if l.startswith(SCAN_COMMENT)]
|
|
438
|
+
for sl in sls:
|
|
439
|
+
sas = sl[len(SCAN_COMMENT):].split()
|
|
440
|
+
if len(sas) != 1:
|
|
441
|
+
raise Exception(f'Invalid scan args: {input_path=} {sas=}')
|
|
442
|
+
|
|
443
|
+
output_path = os.path.abspath(os.path.join(os.path.dirname(input_path), sas[0]))
|
|
444
|
+
_gen_one(
|
|
445
|
+
input_path,
|
|
446
|
+
output_path,
|
|
447
|
+
**kwargs,
|
|
448
|
+
)
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
def _gen_cmd(args) -> None:
|
|
452
|
+
if not os.path.isfile('pyproject.toml'):
|
|
453
|
+
raise Exception('Not in project root')
|
|
454
|
+
|
|
455
|
+
mounts = {}
|
|
456
|
+
for m in args.mounts or ():
|
|
457
|
+
if ':' not in m:
|
|
458
|
+
mounts[m] = os.path.abspath(m)
|
|
459
|
+
else:
|
|
460
|
+
k, v = m.split(':')
|
|
461
|
+
mounts[k] = os.path.abspath(v)
|
|
462
|
+
|
|
463
|
+
for i in args.inputs:
|
|
464
|
+
if os.path.isdir(i):
|
|
465
|
+
log.info('Scanning %s', i)
|
|
466
|
+
for we_dirpath, we_dirnames, we_filenames in os.walk(i): # noqa
|
|
467
|
+
for fname in we_filenames:
|
|
468
|
+
_scan_one(
|
|
469
|
+
os.path.abspath(os.path.join(we_dirpath, fname)),
|
|
470
|
+
mounts=mounts,
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
else:
|
|
474
|
+
output_dir = args.output
|
|
475
|
+
if output_dir is not None:
|
|
476
|
+
output_path = check.isinstance(os.path.join(output_dir, os.path.basename(i)), str)
|
|
477
|
+
else:
|
|
478
|
+
output_path = None
|
|
479
|
+
|
|
480
|
+
_gen_one(
|
|
481
|
+
os.path.abspath(i),
|
|
482
|
+
output_path,
|
|
483
|
+
mounts=mounts,
|
|
484
|
+
)
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
def _build_parser() -> argparse.ArgumentParser:
|
|
488
|
+
parser = argparse.ArgumentParser()
|
|
489
|
+
|
|
490
|
+
subparsers = parser.add_subparsers()
|
|
491
|
+
|
|
492
|
+
parser_gen = subparsers.add_parser('gen')
|
|
493
|
+
parser_gen.add_argument('--mount', '-m', dest='mounts', action='append')
|
|
494
|
+
parser_gen.add_argument('--output', '-o')
|
|
495
|
+
parser_gen.add_argument('inputs', nargs='+')
|
|
496
|
+
parser_gen.set_defaults(func=_gen_cmd)
|
|
497
|
+
|
|
498
|
+
return parser
|
|
499
|
+
|
|
500
|
+
|
|
501
|
+
def _main() -> None:
|
|
502
|
+
logs.configure_standard_logging('INFO')
|
|
503
|
+
|
|
504
|
+
parser = _build_parser()
|
|
505
|
+
args = parser.parse_args()
|
|
506
|
+
if not getattr(args, 'func', None):
|
|
507
|
+
parser.print_help()
|
|
508
|
+
else:
|
|
509
|
+
args.func(args)
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
if __name__ == '__main__':
|
|
513
|
+
_main()
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TODO:
|
|
3
|
+
- https://stackoverflow.com/questions/19308847/graphviz-vertical-ordering
|
|
4
|
+
- (same expected mro past top ~= same expected ver pos, order by name)
|
|
5
|
+
"""
|
|
6
|
+
import typing as ta
|
|
7
|
+
|
|
8
|
+
from omlish.graphs import dot
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def gen_class_dot(roots: ta.Iterable[type]) -> dot.Graph:
|
|
12
|
+
roots = set(roots)
|
|
13
|
+
root_tup = tuple(roots)
|
|
14
|
+
|
|
15
|
+
stmts: list[dot.Stmt] = []
|
|
16
|
+
stmts.append(dot.RawStmt('rankdir=LR;'))
|
|
17
|
+
|
|
18
|
+
todo = set(roots)
|
|
19
|
+
seen = set()
|
|
20
|
+
while todo:
|
|
21
|
+
cur = todo.pop()
|
|
22
|
+
seen.add(cur)
|
|
23
|
+
stmts.append(dot.Node(str(id(cur)), {'label': '.'.join([cur.__module__, cur.__qualname__])}))
|
|
24
|
+
for base in cur.__bases__:
|
|
25
|
+
if issubclass(base, root_tup):
|
|
26
|
+
stmts.append(dot.Edge(str(id(base)), str(id(cur))))
|
|
27
|
+
for sub in cur.__subclasses__():
|
|
28
|
+
if issubclass(sub, root_tup):
|
|
29
|
+
if sub not in seen:
|
|
30
|
+
todo.add(sub)
|
|
31
|
+
|
|
32
|
+
return dot.Graph(stmts)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _main() -> None:
|
|
36
|
+
import argparse
|
|
37
|
+
|
|
38
|
+
from omlish import lang
|
|
39
|
+
|
|
40
|
+
parser = argparse.ArgumentParser()
|
|
41
|
+
parser.add_argument('--import', '-i', action='append', dest='imports')
|
|
42
|
+
parser.add_argument('roots', nargs='+')
|
|
43
|
+
args = parser.parse_args()
|
|
44
|
+
|
|
45
|
+
for imp in (args.imports or ()):
|
|
46
|
+
lang.import_module(imp)
|
|
47
|
+
|
|
48
|
+
roots = []
|
|
49
|
+
for spec in args.roots:
|
|
50
|
+
cls = lang.import_module_attr(spec)
|
|
51
|
+
roots.append(cls)
|
|
52
|
+
|
|
53
|
+
if not roots:
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
scd = gen_class_dot(roots)
|
|
57
|
+
dot.open_dot(dot.render(scd), sleep_s=1.)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
if __name__ == '__main__':
|
|
61
|
+
_main()
|