omdev 0.0.0.dev163__py3-none-any.whl → 0.0.0.dev164__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omdev might be problematic. Click here for more details.

omdev/amalg/__main__.py CHANGED
@@ -6,6 +6,6 @@ _CLI_MODULE = CliModule('amalg', __name__)
6
6
 
7
7
 
8
8
  if __name__ == '__main__':
9
- from .amalg import _main
9
+ from .main import _main
10
10
 
11
11
  _main()
omdev/amalg/gen.py ADDED
@@ -0,0 +1,212 @@
1
+ import io
2
+ import os.path
3
+ import textwrap
4
+ import typing as ta
5
+
6
+ from omlish import cached
7
+ from omlish import collections as col
8
+ from omlish import lang
9
+ from omlish.lite.runtime import LITE_REQUIRED_PYTHON_VERSION
10
+
11
+ from .. import tokens as tks
12
+ from .srcfiles import SrcFile
13
+ from .srcfiles import make_src_file
14
+ from .strip import strip_main_lines
15
+ from .types import Tokens
16
+ from .typing import Typing
17
+
18
+
19
+ ##
20
+
21
+
22
+ SECTION_SEP = '#' * 40 + '\n'
23
+
24
+ RUFF_DISABLES: ta.AbstractSet[str] = {
25
+ 'UP006', # non-pep585-annotation
26
+ 'UP007', # non-pep604-annotation
27
+ 'UP036', # outdated-version-block
28
+ }
29
+
30
+ OUTPUT_COMMENT = '# @omlish-amalg-output '
31
+ SCAN_COMMENT = '# @omlish-amalg '
32
+
33
+
34
+ class AmalgGenerator:
35
+ def __init__(
36
+ self,
37
+ main_path: str,
38
+ *,
39
+ mounts: ta.Mapping[str, str],
40
+ output_dir: str | None = None,
41
+ ) -> None:
42
+ super().__init__()
43
+
44
+ self._main_path = main_path
45
+ self._mounts = mounts
46
+ self._output_dir = output_dir
47
+
48
+ @cached.function
49
+ def _src_files(self) -> dict[str, SrcFile]:
50
+ src_files: dict[str, SrcFile] = {}
51
+ todo = [self._main_path]
52
+ while todo:
53
+ src_path = todo.pop()
54
+ if src_path in src_files:
55
+ continue
56
+
57
+ f = make_src_file(
58
+ src_path,
59
+ mounts=self._mounts,
60
+ )
61
+ src_files[src_path] = f
62
+
63
+ for imp in f.imports:
64
+ if (mp := imp.mod_path) is not None:
65
+ todo.append(mp)
66
+
67
+ return src_files
68
+
69
+ @cached.function
70
+ def _main_file(self) -> SrcFile:
71
+ return self._src_files()[self._main_path]
72
+
73
+ @cached.function
74
+ def _header_lines(self) -> list[Tokens]:
75
+ header_lines = []
76
+
77
+ if self._main_file().header_lines:
78
+ header_lines.extend([
79
+ hl
80
+ for hlts in self._main_file().header_lines
81
+ if not (hl := tks.join_toks(hlts)).startswith(SCAN_COMMENT)
82
+ ])
83
+
84
+ if self._output_dir is not None:
85
+ ogf = os.path.relpath(self._main_path, self._output_dir)
86
+ else:
87
+ ogf = os.path.basename(self._main_path)
88
+
89
+ additional_header_lines = [
90
+ '#!/usr/bin/env python3\n',
91
+ '# noinspection DuplicatedCode\n',
92
+ '# @omlish-lite\n',
93
+ '# @omlish-script\n',
94
+ f'{OUTPUT_COMMENT.strip()} {ogf}\n',
95
+ ]
96
+
97
+ ruff_disables = sorted({
98
+ *lang.flatten(f.ruff_noqa for f in self._src_files().values()),
99
+ *RUFF_DISABLES,
100
+ })
101
+ if ruff_disables:
102
+ additional_header_lines.append(f'# ruff: noqa: {" ".join(sorted(ruff_disables))}\n')
103
+
104
+ return [*additional_header_lines, *header_lines]
105
+
106
+ @cached.function
107
+ def gen_amalg(self) -> str:
108
+ src_files = self._src_files()
109
+
110
+ ##
111
+
112
+ out = io.StringIO()
113
+
114
+ ##
115
+
116
+ out.write(''.join(self._header_lines()))
117
+
118
+ ##
119
+
120
+ all_imps = [i for f in src_files.values() for i in f.imports]
121
+ gl_imps = [i for i in all_imps if i.mod_path is None]
122
+
123
+ dct: dict = {
124
+ ('sys', None, None): ['import sys\n'],
125
+ }
126
+ if any(sf.has_binary_resources for sf in src_files.values()):
127
+ dct[('base64', None, None)] = ['import base64\n']
128
+ for imp in gl_imps:
129
+ dct.setdefault((imp.mod, imp.item, imp.as_), []).append(imp)
130
+ for _, l in sorted(dct.items()):
131
+ il = l[0]
132
+ out.write(il if isinstance(il, str) else tks.join_toks(il.toks))
133
+ if dct:
134
+ out.write('\n\n')
135
+
136
+ ##
137
+
138
+ out.write(SECTION_SEP)
139
+ out.write('\n\n')
140
+
141
+ version_check_fail_msg = (
142
+ f'Requires python {LITE_REQUIRED_PYTHON_VERSION!r}, '
143
+ f'got {{sys.version_info}} from {{sys.executable}}'
144
+ )
145
+ out.write(textwrap.dedent(f"""
146
+ if sys.version_info < {LITE_REQUIRED_PYTHON_VERSION!r}:
147
+ raise OSError(f{version_check_fail_msg!r}) # noqa
148
+ """).lstrip())
149
+ out.write('\n\n')
150
+
151
+ ##
152
+
153
+ ts = list(col.toposort({ # noqa
154
+ f.path: {mp for i in f.imports if (mp := i.mod_path) is not None}
155
+ for f in src_files.values()
156
+ }))
157
+ sfs = [sf for ss in ts for sf in sorted(ss)]
158
+
159
+ ##
160
+
161
+ tyd: dict[str, list[Typing]] = {}
162
+ tys = set()
163
+ for sf in sfs:
164
+ f = src_files[sf]
165
+ for ty in f.typings:
166
+ if ty.src not in tys:
167
+ tyd.setdefault(f.path, []).append(ty)
168
+ tys.add(ty.src)
169
+ if tys:
170
+ out.write(SECTION_SEP)
171
+ out.write('\n\n')
172
+ for i, (sf, ftys) in enumerate(tyd.items()):
173
+ f = src_files[sf]
174
+ if i:
175
+ out.write('\n')
176
+ if f is not self._main_file():
177
+ rp = os.path.relpath(f.path, os.path.dirname(self._main_file().path))
178
+ else:
179
+ rp = os.path.basename(f.path)
180
+ out.write(f'# {rp}\n')
181
+ for ty in ftys:
182
+ out.write(ty.src)
183
+ if tys:
184
+ out.write('\n\n')
185
+
186
+ ##
187
+
188
+ main_file = self._main_file()
189
+ for i, sf in enumerate(sfs):
190
+ f = src_files[sf]
191
+ out.write(SECTION_SEP)
192
+ if f is not main_file:
193
+ rp = os.path.relpath(f.path, main_file.path)
194
+ else:
195
+ rp = os.path.basename(f.path)
196
+ out.write(f'# {rp}\n')
197
+ if f is not main_file and f.header_lines:
198
+ out.write(tks.join_lines(f.header_lines))
199
+ out.write(f'\n\n')
200
+ cls = f.content_lines
201
+ if f is not main_file:
202
+ cls = strip_main_lines(cls)
203
+ sf_src = tks.join_lines(cls)
204
+ out.write(sf_src.strip())
205
+ if i < len(sfs) - 1:
206
+ out.write('\n\n\n')
207
+ else:
208
+ out.write('\n')
209
+
210
+ ##
211
+
212
+ return out.getvalue()
omdev/amalg/imports.py ADDED
@@ -0,0 +1,98 @@
1
+ import dataclasses as dc
2
+ import os.path
3
+ import typing as ta
4
+
5
+ from omlish import check
6
+
7
+ from .. import tokens as tks
8
+ from .types import Tokens
9
+
10
+
11
+ ##
12
+
13
+
14
+ @dc.dataclass(frozen=True, kw_only=True)
15
+ class Import:
16
+ mod: str
17
+ item: str | None
18
+ as_: str | None
19
+
20
+ src_path: str
21
+ line: int
22
+
23
+ mod_path: str | None
24
+
25
+ toks: Tokens = dc.field(repr=False)
26
+
27
+
28
+ def make_import(
29
+ lts: Tokens,
30
+ *,
31
+ src_path: str,
32
+ mounts: ta.Mapping[str, str],
33
+ ) -> Import | None:
34
+ if not lts:
35
+ return None
36
+ ft = lts[0]
37
+
38
+ if ft.name != 'NAME' or ft.src not in ('import', 'from'):
39
+ return None
40
+
41
+ ml = []
42
+ il: list[str] | None = None
43
+ as_ = None
44
+ for tok in (it := iter(tks.ignore_ws(lts[1:]))):
45
+ if tok.name in ('NAME', 'OP'):
46
+ if tok.src == 'as':
47
+ check.none(as_)
48
+ nt = next(it)
49
+ check.equal(nt.name, 'NAME')
50
+ as_ = nt.src
51
+ elif tok.src == 'import':
52
+ check.equal(ft.src, 'from')
53
+ il = []
54
+ elif il is not None:
55
+ il.append(tok.src)
56
+ else:
57
+ ml.append(tok.src)
58
+ else:
59
+ raise Exception(tok)
60
+
61
+ mod = ''.join(ml)
62
+ item = ''.join(il) if il is not None else None
63
+
64
+ if (mnt := mounts.get(mod.partition('.')[0])) is not None:
65
+ ps = mod.split('.')
66
+ mod_path = os.path.abspath(os.path.join(
67
+ mnt,
68
+ *ps[1:-1],
69
+ ps[-1] + '.py',
70
+ ))
71
+
72
+ elif not mod.startswith('.'):
73
+ mod_path = None
74
+
75
+ else:
76
+ parts = mod.split('.')
77
+ nd = len(parts) - parts[::-1].index('')
78
+ mod_path = os.path.abspath(os.path.join(
79
+ os.path.dirname(src_path),
80
+ '../' * (nd - 1),
81
+ *parts[nd:-1],
82
+ parts[-1] + '.py',
83
+ ))
84
+
85
+ mod = check.isinstance(mod_path, str)
86
+
87
+ return Import(
88
+ mod=mod,
89
+ item=item,
90
+ as_=as_,
91
+
92
+ src_path=src_path,
93
+ line=ft.line,
94
+
95
+ mod_path=mod_path,
96
+
97
+ toks=lts,
98
+ )
omdev/amalg/main.py ADDED
@@ -0,0 +1,154 @@
1
+ """
2
+ Conventions:
3
+ - must import whole global modules, if aliased must all match
4
+ - must import 'from' items for local modules
5
+
6
+ TODO:
7
+ - !! check only importing lite code
8
+ - !! strip manifests? or relegate them to a separate tiny module ala __main__.py?
9
+ - # @omlish-no-amalg ? in cli.types? will strip stmt (more than 1 line) following @manifest, so shouldn't import
10
+ - more sanity checks lol
11
+ - typealias - support # noqa, other comments, and lamely support multiline by just stealing lines till it parses
12
+ - remove `if __name__ == '__main__':` blocks - thus, convention: no def _main() for these
13
+
14
+ See:
15
+ - https://github.com/xonsh/amalgamate - mine is for portability not speed, and doesn't try to work on unmodified code
16
+
17
+ Targets:
18
+ - interp
19
+ - pyproject
20
+ - precheck
21
+ - build
22
+ - pyremote
23
+ - bootstrap
24
+ - deploy
25
+ - supervisor?
26
+ """
27
+ import argparse
28
+ import logging
29
+ import os.path
30
+ import typing as ta
31
+
32
+ from omlish import check
33
+ from omlish.logs import all as logs
34
+
35
+ from .gen import SCAN_COMMENT
36
+ from .gen import AmalgGenerator
37
+
38
+
39
+ log = logging.getLogger(__name__)
40
+
41
+
42
+ ##
43
+
44
+
45
+ def _gen_one(
46
+ input_path: str,
47
+ output_path: str | None,
48
+ *,
49
+ mounts: ta.Mapping[str, str],
50
+ ) -> None:
51
+ log.info('Generating: %s -> %s', input_path, output_path)
52
+
53
+ src = AmalgGenerator(
54
+ input_path,
55
+ mounts=mounts,
56
+ output_dir=os.path.dirname(output_path if output_path is not None else input_path),
57
+ ).gen_amalg()
58
+
59
+ if output_path is not None:
60
+ with open(output_path, 'w') as f:
61
+ f.write(src)
62
+ os.chmod(output_path, os.stat(input_path).st_mode)
63
+
64
+ else:
65
+ print(src)
66
+
67
+
68
+ def _scan_one(
69
+ input_path: str,
70
+ **kwargs: ta.Any,
71
+ ) -> None:
72
+ if not input_path.endswith('.py'):
73
+ return
74
+
75
+ with open(input_path) as f:
76
+ src = f.read()
77
+
78
+ sls = [l for l in src.splitlines() if l.startswith(SCAN_COMMENT)]
79
+ for sl in sls:
80
+ sas = sl[len(SCAN_COMMENT):].split()
81
+ if len(sas) != 1:
82
+ raise Exception(f'Invalid scan args: {input_path=} {sas=}')
83
+
84
+ output_path = os.path.abspath(os.path.join(os.path.dirname(input_path), sas[0]))
85
+ _gen_one(
86
+ input_path,
87
+ output_path,
88
+ **kwargs,
89
+ )
90
+
91
+
92
+ def _gen_cmd(args) -> None:
93
+ if not os.path.isfile('pyproject.toml'):
94
+ raise Exception('Not in project root')
95
+
96
+ mounts = {}
97
+ for m in args.mounts or ():
98
+ if ':' not in m:
99
+ mounts[m] = os.path.abspath(m)
100
+ else:
101
+ k, v = m.split(':')
102
+ mounts[k] = os.path.abspath(v)
103
+
104
+ for i in args.inputs:
105
+ if os.path.isdir(i):
106
+ log.info('Scanning %s', i)
107
+ for we_dirpath, we_dirnames, we_filenames in os.walk(i): # noqa
108
+ for fname in we_filenames:
109
+ _scan_one(
110
+ os.path.abspath(os.path.join(we_dirpath, fname)),
111
+ mounts=mounts,
112
+ )
113
+
114
+ else:
115
+ output_dir = args.output
116
+ if output_dir is not None:
117
+ output_path = check.isinstance(os.path.join(output_dir, os.path.basename(i)), str)
118
+ else:
119
+ output_path = None
120
+
121
+ _gen_one(
122
+ os.path.abspath(i),
123
+ output_path,
124
+ mounts=mounts,
125
+ )
126
+
127
+
128
+ def _build_parser() -> argparse.ArgumentParser:
129
+ parser = argparse.ArgumentParser()
130
+
131
+ subparsers = parser.add_subparsers()
132
+
133
+ parser_gen = subparsers.add_parser('gen')
134
+ parser_gen.add_argument('--mount', '-m', dest='mounts', action='append')
135
+ parser_gen.add_argument('--output', '-o')
136
+ parser_gen.add_argument('inputs', nargs='+')
137
+ parser_gen.set_defaults(func=_gen_cmd)
138
+
139
+ return parser
140
+
141
+
142
+ def _main() -> None:
143
+ logs.configure_standard_logging('INFO')
144
+
145
+ parser = _build_parser()
146
+ args = parser.parse_args()
147
+ if not getattr(args, 'func', None):
148
+ parser.print_help()
149
+ else:
150
+ args.func(args)
151
+
152
+
153
+ if __name__ == '__main__':
154
+ _main()
@@ -0,0 +1,49 @@
1
+ import ast
2
+
3
+ import tokenize_rt as trt
4
+
5
+ from omlish import check
6
+
7
+ from .. import tokens as tks
8
+ from .types import Tokens
9
+
10
+
11
+ ##
12
+
13
+
14
+ def is_manifest_comment(line: Tokens) -> bool:
15
+ if not line:
16
+ return False
17
+
18
+ return (
19
+ (ft := line[0]).name == 'COMMENT' and
20
+ ft.src.startswith('# @omlish-manifest')
21
+ )
22
+
23
+
24
+ def comment_out_manifest_comment(
25
+ line: Tokens,
26
+ cls: list[Tokens],
27
+ i: int,
28
+ ) -> tuple[list[Tokens], int]:
29
+ mls = [line]
30
+ while True:
31
+ mls.append(cls[i])
32
+ i += 1
33
+
34
+ msrc = tks.join_lines(mls).strip()
35
+ try:
36
+ node = ast.parse(msrc)
37
+ except SyntaxError:
38
+ continue
39
+
40
+ mmod = check.isinstance(node, ast.Module)
41
+ check.isinstance(check.single(mmod.body), ast.Assign)
42
+ break
43
+
44
+ out: list[Tokens] = [
45
+ [trt.Token('COMMENT', '# ' + tks.join_toks(ml))]
46
+ for ml in mls
47
+ ]
48
+
49
+ return out, i
@@ -0,0 +1,99 @@
1
+ import ast
2
+ import base64
3
+ import itertools
4
+ import os.path
5
+ import typing as ta
6
+
7
+ import tokenize_rt as trt
8
+
9
+ from .. import tokens as tks
10
+ from .types import Tokens
11
+
12
+
13
+ ##
14
+
15
+
16
+ class RootLevelResourcesRead(ta.NamedTuple):
17
+ variable: str
18
+ kind: ta.Literal['binary', 'text']
19
+ resource: str
20
+
21
+
22
+ def is_root_level_resources_read(lts: Tokens) -> RootLevelResourcesRead | None:
23
+ wts = list(tks.ignore_ws(lts, keep=['INDENT']))
24
+
25
+ if not tks.match_toks(wts, [
26
+ ('NAME', None),
27
+ ('OP', '='),
28
+ ('NAME', ('read_package_resource_binary', 'read_package_resource_text')),
29
+ ('OP', '('),
30
+ ('NAME', '__package__'),
31
+ ('OP', ','),
32
+ ('STRING', None),
33
+ ('OP', ')'),
34
+ ]):
35
+ return None
36
+
37
+ return RootLevelResourcesRead(
38
+ wts[0].src,
39
+ 'binary' if wts[2].src == 'read_package_resource_binary' else 'text',
40
+ ast.literal_eval(wts[6].src),
41
+ )
42
+
43
+
44
+ ##
45
+
46
+
47
+ def build_resource_lines(
48
+ rsrc: RootLevelResourcesRead,
49
+ path: str,
50
+ ) -> list[Tokens]:
51
+ rf = os.path.join(os.path.dirname(path), rsrc.resource)
52
+
53
+ if rsrc.kind == 'binary':
54
+ with open(rf, 'rb') as bf:
55
+ rb = bf.read() # noqa
56
+
57
+ out: list[Tokens] = [[
58
+ trt.Token(name='NAME', src=rsrc.variable),
59
+ trt.Token(name='UNIMPORTANT_WS', src=' '),
60
+ trt.Token(name='OP', src='='),
61
+ trt.Token(name='UNIMPORTANT_WS', src=' '),
62
+ trt.Token(name='NAME', src='base64'),
63
+ trt.Token(name='OP', src='.'),
64
+ trt.Token(name='NAME', src='b64decode'),
65
+ trt.Token(name='OP', src='('),
66
+ trt.Token(name='NL', src='\n'),
67
+ ]]
68
+
69
+ rb64 = base64.b64encode(rb).decode('ascii')
70
+ for chunk in itertools.batched(rb64, 96):
71
+ out.append([
72
+ trt.Token(name='UNIMPORTANT_WS', src=' '),
73
+ trt.Token(name='STRING', src=f"'{''.join(chunk)}'"),
74
+ trt.Token(name='NL', src='\n'),
75
+ ])
76
+
77
+ out.append([
78
+ trt.Token(name='OP', src=')'),
79
+ trt.Token(name='NEWLINE', src='\n'),
80
+ ])
81
+
82
+ return out
83
+
84
+ elif rsrc.kind == 'text':
85
+ with open(rf) as tf:
86
+ rt = tf.read() # noqa
87
+ rt = rt.replace('\\', '\\\\') # Escape backslashes
88
+ rt = rt.replace('"""', r'\"\"\"')
89
+ return [[
90
+ trt.Token(name='NAME', src=rsrc.variable),
91
+ trt.Token(name='UNIMPORTANT_WS', src=' '),
92
+ trt.Token(name='OP', src='='),
93
+ trt.Token(name='UNIMPORTANT_WS', src=' '),
94
+ trt.Token(name='STRING', src=f'"""\\\n{rt}"""\n'),
95
+ trt.Token(name='NEWLINE', src=''),
96
+ ]]
97
+
98
+ else:
99
+ raise ValueError(rsrc.kind)