omdev 0.0.0.dev7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. omdev/__about__.py +35 -0
  2. omdev/__init__.py +0 -0
  3. omdev/amalg/__init__.py +0 -0
  4. omdev/amalg/__main__.py +4 -0
  5. omdev/amalg/amalg.py +513 -0
  6. omdev/classdot.py +61 -0
  7. omdev/cmake.py +164 -0
  8. omdev/exts/__init__.py +0 -0
  9. omdev/exts/_distutils/__init__.py +10 -0
  10. omdev/exts/_distutils/build_ext.py +367 -0
  11. omdev/exts/_distutils/compilers/__init__.py +3 -0
  12. omdev/exts/_distutils/compilers/ccompiler.py +1032 -0
  13. omdev/exts/_distutils/compilers/options.py +80 -0
  14. omdev/exts/_distutils/compilers/unixccompiler.py +385 -0
  15. omdev/exts/_distutils/dir_util.py +76 -0
  16. omdev/exts/_distutils/errors.py +62 -0
  17. omdev/exts/_distutils/extension.py +107 -0
  18. omdev/exts/_distutils/file_util.py +216 -0
  19. omdev/exts/_distutils/modified.py +47 -0
  20. omdev/exts/_distutils/spawn.py +103 -0
  21. omdev/exts/_distutils/sysconfig.py +349 -0
  22. omdev/exts/_distutils/util.py +201 -0
  23. omdev/exts/_distutils/version.py +308 -0
  24. omdev/exts/build.py +43 -0
  25. omdev/exts/cmake.py +195 -0
  26. omdev/exts/importhook.py +88 -0
  27. omdev/exts/scan.py +74 -0
  28. omdev/interp/__init__.py +1 -0
  29. omdev/interp/__main__.py +4 -0
  30. omdev/interp/cli.py +63 -0
  31. omdev/interp/inspect.py +105 -0
  32. omdev/interp/providers.py +67 -0
  33. omdev/interp/pyenv.py +353 -0
  34. omdev/interp/resolvers.py +76 -0
  35. omdev/interp/standalone.py +187 -0
  36. omdev/interp/system.py +125 -0
  37. omdev/interp/types.py +92 -0
  38. omdev/mypy/__init__.py +0 -0
  39. omdev/mypy/debug.py +86 -0
  40. omdev/pyproject/__init__.py +1 -0
  41. omdev/pyproject/__main__.py +4 -0
  42. omdev/pyproject/cli.py +319 -0
  43. omdev/pyproject/configs.py +97 -0
  44. omdev/pyproject/ext.py +107 -0
  45. omdev/pyproject/pkg.py +196 -0
  46. omdev/scripts/__init__.py +0 -0
  47. omdev/scripts/execrss.py +19 -0
  48. omdev/scripts/findimports.py +62 -0
  49. omdev/scripts/findmagic.py +70 -0
  50. omdev/scripts/interp.py +2118 -0
  51. omdev/scripts/pyproject.py +3584 -0
  52. omdev/scripts/traceimport.py +502 -0
  53. omdev/tokens.py +42 -0
  54. omdev/toml/__init__.py +1 -0
  55. omdev/toml/parser.py +823 -0
  56. omdev/toml/writer.py +104 -0
  57. omdev/tools/__init__.py +0 -0
  58. omdev/tools/dockertools.py +81 -0
  59. omdev/tools/sqlrepl.py +193 -0
  60. omdev/versioning/__init__.py +1 -0
  61. omdev/versioning/specifiers.py +531 -0
  62. omdev/versioning/versions.py +416 -0
  63. omdev-0.0.0.dev7.dist-info/LICENSE +21 -0
  64. omdev-0.0.0.dev7.dist-info/METADATA +24 -0
  65. omdev-0.0.0.dev7.dist-info/RECORD +67 -0
  66. omdev-0.0.0.dev7.dist-info/WHEEL +5 -0
  67. omdev-0.0.0.dev7.dist-info/top_level.txt +1 -0
@@ -0,0 +1,502 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ TODO:
4
+ - hoist first stacktrace file to a full field
5
+ - no psutil on lin / togglable on mac
6
+ - create table paths(path varchar(1024); - norm, dedupe, index, etc (bonus points for 32bit key)
7
+ - gviz
8
+
9
+ http://www.logilab.org/856
10
+ http://www.python.org/dev/peps/pep-0302/
11
+
12
+ start / end / cumulative / exclusive time / vm_rss / vm_vms
13
+
14
+ jq '..?|.loaded_name?|select(.!=null)'
15
+ """
16
+ # ruff: noqa: S608
17
+ import dataclasses as dc
18
+ import functools
19
+ import inspect
20
+ import logging
21
+ import operator
22
+ import os
23
+ import sys
24
+ import threading
25
+ import time
26
+ import types
27
+ import typing as ta
28
+
29
+
30
+ psutil: ta.Any
31
+ try:
32
+ psutil = __import__('psutil')
33
+ except ImportError:
34
+ psutil = None
35
+
36
+
37
+ def _late_import(m):
38
+ def get():
39
+ try:
40
+ return globals()[k]
41
+ except KeyError:
42
+ ret = globals()[k] = __import__(m)
43
+ return ret
44
+ k = '_' + m
45
+ return get
46
+
47
+
48
+ json = _late_import('json')
49
+
50
+
51
+ log = logging.getLogger(__name__)
52
+
53
+
54
+ REQUIRED_PYTHON_VERSION = (3, 8)
55
+
56
+
57
+ ##
58
+
59
+
60
+ def _json_dumps(o: ta.Any) -> str:
61
+ return json().dumps(o, indent=None, separators=(',', ':'))
62
+
63
+
64
+ def _json_dumps_pretty(o: ta.Any) -> str:
65
+ return json().dumps(o, indent=2, separators=(', ', ': '))
66
+
67
+
68
+ ##
69
+
70
+
71
+ @dc.dataclass()
72
+ class Stats:
73
+ ATTRS: ta.ClassVar[ta.Sequence[str]]
74
+
75
+ time: float = 0.
76
+ vm_rss: int = 0
77
+ vm_vms: int = 0
78
+
79
+ def _op(self, op, other):
80
+ return Stats(*[op(getattr(self, a), getattr(other, a)) for a in Stats.ATTRS])
81
+
82
+ def __add__(self, other):
83
+ return self._op(operator.add, other)
84
+
85
+ def __sub__(self, other):
86
+ return self._op(operator.sub, other)
87
+
88
+
89
+ Stats.ATTRS = [f.name for f in dc.fields(Stats)]
90
+
91
+
92
+ class StatsFactory:
93
+
94
+ def __init__(self, *, start_time: ta.Optional[float] = None) -> None:
95
+ super().__init__()
96
+
97
+ self._start_time = start_time if start_time is not None else time.time()
98
+
99
+ def __call__(self) -> Stats:
100
+ kw: dict = {}
101
+
102
+ if psutil is not None:
103
+ mem = psutil.Process().memory_info()
104
+ kw.update(
105
+ vm_rss=mem.rss,
106
+ vm_vms=mem.vms,
107
+ )
108
+
109
+ return Stats(
110
+ time=time.time() - self._start_time,
111
+ **kw,
112
+ )
113
+
114
+ _PROC_MEM_KEYS_BY_FIELD: ta.ClassVar[ta.Mapping[str, str]] = {
115
+ 'vm_vms': 'VmSize',
116
+ 'vm_rss': 'VmRSS',
117
+ }
118
+
119
+ _PROC_MEM_SCALE: ta.ClassVar[ta.Mapping[str, float]] = {
120
+ 'kb': 1024.0,
121
+ 'mb': 1024.0 * 1024.0,
122
+ }
123
+
124
+ @classmethod
125
+ def get_proc_status(cls) -> ta.Mapping[str, ta.Any]:
126
+ with open('/proc/self/status', 'r') as status_file:
127
+ status_block = status_file.read()
128
+
129
+ status_fields = {
130
+ field: value.strip()
131
+ for line in status_block.split('\n')
132
+ for field, _, value in [line.partition(':')]
133
+ }
134
+
135
+ status = {}
136
+
137
+ for key, field in cls._PROC_MEM_KEYS_BY_FIELD.items():
138
+ num, unit = status_fields[field].split()
139
+
140
+ status[key] = int(float(num) * cls._PROC_MEM_SCALE[unit.lower()])
141
+
142
+ return status
143
+
144
+
145
+ ##
146
+
147
+
148
+ @dc.dataclass()
149
+ class StackTraceEntry:
150
+ file: str
151
+ line: int
152
+ func: str
153
+
154
+
155
+ @dc.dataclass()
156
+ class Node:
157
+ seq: ta.Optional[int] = None
158
+ depth: int = 0
159
+
160
+ children: ta.List['Node'] = dc.field(default_factory=list)
161
+
162
+ import_name: ta.Optional[str] = None
163
+ import_fromlist: ta.Optional[ta.Iterable[str]] = None
164
+ import_level: ta.Optional[int] = None
165
+
166
+ pid: ta.Optional[int] = None
167
+ tid: ta.Optional[int] = None
168
+
169
+ stacktrace: ta.Optional[ta.Sequence[StackTraceEntry]] = None
170
+ exception: ta.Optional[ta.Union[Exception, str]] = None
171
+
172
+ cached_id: ta.Optional[int] = None
173
+
174
+ loaded_name: ta.Optional[str] = None
175
+ loaded_file: ta.Optional[str] = None
176
+ loaded_id: ta.Optional[int] = None
177
+
178
+ start_stats: ta.Optional[Stats] = None
179
+ end_stats: ta.Optional[Stats] = None
180
+
181
+ stats: ta.Optional[Stats] = None
182
+
183
+ self_stats: ta.Optional[Stats] = None
184
+ child_stats: ta.Optional[Stats] = None
185
+
186
+
187
+ class ImportTracer:
188
+
189
+ def __init__(self, *, stringify_fields: bool = False) -> None:
190
+ super().__init__()
191
+
192
+ self._stats_factory = StatsFactory()
193
+ self._stringify_fields = stringify_fields
194
+
195
+ def _stringify_value(self, o: ta.Any) -> ta.Any:
196
+ if self._stringify_fields:
197
+ return repr(o)
198
+ else:
199
+ return o
200
+
201
+ def _fixup_node(
202
+ self,
203
+ node: Node,
204
+ *,
205
+ depth: int = 0,
206
+ seq: int = 0,
207
+ ) -> int:
208
+ node.depth = depth
209
+ node.seq = seq
210
+ node.child_stats = Stats()
211
+
212
+ for child in node.children:
213
+ seq = self._fixup_node(child, depth=depth + 1, seq=seq + 1)
214
+ node.child_stats += child.stats # type: ignore
215
+
216
+ node.self_stats = node.stats - node.child_stats # type: ignore
217
+ return seq
218
+
219
+ def trace(self, root_module: str) -> Node:
220
+ node_stack = [Node()]
221
+
222
+ def new_import(name, globals=None, locals=None, fromlist=(), level=0): # noqa
223
+ node = Node(
224
+ import_name=name,
225
+ import_fromlist=fromlist,
226
+ import_level=level,
227
+
228
+ pid=os.getpid(),
229
+ tid=threading.current_thread().ident,
230
+
231
+ stacktrace=[
232
+ StackTraceEntry(*s[1:4])
233
+ for s in inspect.stack()
234
+ if s[0].f_code.co_filename != __file__
235
+ ],
236
+
237
+ cached_id=id(sys.modules[name]) if name in sys.modules else None,
238
+
239
+ start_stats=self._stats_factory(),
240
+ )
241
+
242
+ node_stack[-1].children.append(node)
243
+ node_stack.append(node)
244
+
245
+ try:
246
+ loaded = old_import(name, globals, locals, fromlist, level)
247
+ if not isinstance(loaded, types.ModuleType):
248
+ raise TypeError(loaded) # noqa
249
+ node.loaded_name = loaded.__name__
250
+ node.loaded_id = id(loaded)
251
+ node.loaded_file = getattr(loaded, '__file__', None)
252
+ return loaded
253
+
254
+ except Exception as ex:
255
+ node.exception = self._stringify_value(ex)
256
+ raise
257
+
258
+ finally:
259
+ node.end_stats = self._stats_factory()
260
+ node.stats = node.end_stats - node.start_stats
261
+ if node_stack.pop() is not node:
262
+ raise RuntimeError(node_stack)
263
+
264
+ old_import = __builtins__.__import__
265
+ __builtins__.__import__ = new_import
266
+ try:
267
+ try:
268
+ __import__(root_module, globals(), locals(), [], 0)
269
+ except Exception:
270
+ log.exception('root_module: %s', root_module)
271
+ finally:
272
+ __builtins__.__import__ = old_import
273
+
274
+ if len(node_stack) != 1 or len(node_stack[0].children) != 1:
275
+ raise RuntimeError(node_stack)
276
+
277
+ node = node_stack[0].children[0]
278
+ self._fixup_node(node)
279
+ return node
280
+
281
+
282
+ ##
283
+
284
+
285
+ sqlite3 = _late_import('sqlite3')
286
+
287
+
288
+ def sqlite_retrying(max_retries: int = 10):
289
+ def outer(fn):
290
+ @functools.wraps(fn)
291
+ def inner(*args, **kwargs):
292
+ n = 0
293
+ while True:
294
+ try:
295
+ return fn(*args, **kwargs)
296
+ except sqlite3().OperationalError:
297
+ if n >= max_retries:
298
+ raise
299
+ n += 1
300
+ log.exception('Sqlite error')
301
+ return inner
302
+ return outer
303
+
304
+
305
+ class SqliteWriter:
306
+
307
+ def __init__(self, db_path: str) -> None:
308
+ super().__init__()
309
+
310
+ self._db_path = db_path
311
+
312
+ self._table = self._DEFAULT_TABLE
313
+ self._columns = self._build_columns(self._DEFAULT_COLUMNS)
314
+ self._indexes = self._DEFAULT_INDEXES
315
+
316
+ _conn: ta.Any
317
+
318
+ _DEFAULT_TABLE = 'nodes'
319
+
320
+ _DEFAULT_COLUMNS: ta.ClassVar[ta.Sequence[ta.Tuple[str, str]]] = [
321
+ ('root_id', 'int'),
322
+ ('parent_id', 'int'),
323
+
324
+ ('has_exception', 'int not null'),
325
+ ]
326
+
327
+ _DEFAULT_INDEXES: ta.ClassVar[ta.Sequence[str]] = [
328
+ 'root_id',
329
+ 'parent_id',
330
+
331
+ 'has_exception',
332
+
333
+ 'import_name',
334
+
335
+ 'loaded_name',
336
+ 'loaded_file',
337
+ ]
338
+
339
+ def _build_columns(
340
+ self,
341
+ base: ta.Iterable[ta.Tuple[str, str]],
342
+ ) -> ta.Sequence[ta.Tuple[str, str]]:
343
+ cols = list(base)
344
+ for f in dc.fields(Node):
345
+ if f.type in (str, ta.Optional[str]):
346
+ cols.append((f.name, 'text'))
347
+ elif f.type in (int, ta.Optional[int]):
348
+ cols.append((f.name, 'int'))
349
+ elif f.name == 'children':
350
+ continue
351
+ elif f.type in (Stats, ta.Optional[Stats]):
352
+ pfx = f.name[:-5] if f.name != 'stats' else ''
353
+ cols.extend([
354
+ (pfx + 'time', 'real'),
355
+ (pfx + 'vm_rss', 'int'),
356
+ (pfx + 'vm_vms', 'int'),
357
+ ])
358
+ else:
359
+ cols.append((f.name, 'text')) # json
360
+ return cols
361
+
362
+ @sqlite_retrying()
363
+ def _init_db(self, cursor: ta.Any) -> None:
364
+ cols = ', '.join(f'{n} {t}' for n, t in self._columns)
365
+ stmt = f'create table if not exists {self._table} ({cols});'
366
+ cursor.execute(stmt)
367
+
368
+ for c in self._indexes:
369
+ cursor.execute(f'create index if not exists {self._table}_by_{c} on {self._table} ({c});')
370
+
371
+ def _insert_node(
372
+ self,
373
+ cursor: ta.Any,
374
+ node: Node,
375
+ *,
376
+ root_id: ta.Optional[int] = None,
377
+ parent_id: ta.Optional[int] = None,
378
+ ) -> int:
379
+ cols: ta.List[ta.Tuple[str, ta.Any]] = [
380
+ ('root_id', root_id),
381
+ ('parent_id', parent_id),
382
+
383
+ ('has_exception', int(node.exception is not None)),
384
+ ]
385
+
386
+ for f in dc.fields(Node):
387
+ v = getattr(node, f.name)
388
+ if f.type in (str, ta.Optional[str], int, ta.Optional[int]):
389
+ cols.append((f.name, v))
390
+ elif f.name == 'children':
391
+ continue
392
+ elif f.type in (Stats, ta.Optional[Stats]):
393
+ pfx = f.name[:-5] if f.name != 'stats' else ''
394
+ cols.extend((pfx + a, getattr(v, a)) for a in Stats.ATTRS)
395
+ elif f.name == 'stacktrace':
396
+ cols.append((f.name, _json_dumps([dc.asdict(e) for e in v])))
397
+ else:
398
+ cols.append((f.name, _json_dumps(v)))
399
+
400
+ stmt = f'insert into {self._table} ({", ".join(k for k, v in cols)}) values ({", ".join("?" for _ in cols)});'
401
+
402
+ cursor.execute(stmt, [v for k, v in cols])
403
+ row_id = cursor.lastrowid
404
+
405
+ if root_id is None:
406
+ root_id = row_id
407
+ cursor.execute(f'update {self._table} set root_id = ? where rowid = ?;', (root_id, root_id))
408
+
409
+ return 1 + sum(
410
+ self._insert_node(
411
+ cursor,
412
+ child,
413
+ root_id=root_id,
414
+ parent_id=row_id,
415
+ )
416
+ for child in (node.children or [])
417
+ )
418
+
419
+ @sqlite_retrying()
420
+ def _write_node(self, node: Node) -> None:
421
+ if node.seq != 0:
422
+ raise ValueError(node.seq)
423
+
424
+ cursor = self._conn.cursor()
425
+ try:
426
+ node_count = self._insert_node(cursor, node)
427
+
428
+ cursor.execute(f'select count(*) from {self._table};')
429
+ total_node_count, = cursor.fetchone()
430
+
431
+ cursor.execute(f'select count(*) from {self._table} where parent_id is null;')
432
+ total_root_count, = cursor.fetchone()
433
+
434
+ self._conn.commit()
435
+
436
+ log.info(
437
+ '%s: %d import trace nodes (db: %d roots, %d total)',
438
+ node.import_name,
439
+ node_count,
440
+ total_root_count,
441
+ total_node_count,
442
+ )
443
+
444
+ finally:
445
+ cursor.close()
446
+
447
+ @sqlite_retrying()
448
+ def __enter__(self: 'SqliteWriter') -> 'SqliteWriter':
449
+ log.info('initializing database %s', self._db_path)
450
+
451
+ try:
452
+ self._conn = sqlite3().connect(self._db_path, isolation_level='immediate', timeout=20)
453
+
454
+ cursor = self._conn.cursor()
455
+ try:
456
+ self._init_db(cursor)
457
+ self._conn.commit()
458
+ finally:
459
+ cursor.close()
460
+
461
+ return self
462
+
463
+ except Exception:
464
+ del self._conn
465
+ raise
466
+
467
+ def __exit__(self, *exc_info) -> None:
468
+ self._conn = None
469
+
470
+ log.info('done with database %s', self._db_path)
471
+
472
+ def write(self, node: Node) -> None:
473
+ return self._write_node(node)
474
+
475
+
476
+ ##
477
+
478
+
479
+ def _main() -> None:
480
+ if sys.version_info < REQUIRED_PYTHON_VERSION:
481
+ raise EnvironmentError(f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
482
+
483
+ import argparse
484
+ parser = argparse.ArgumentParser()
485
+ parser.add_argument('--sqlite')
486
+ parser.add_argument('--pretty', action='store_true')
487
+ parser.add_argument('mod')
488
+ args = parser.parse_args()
489
+
490
+ node = ImportTracer(stringify_fields=True).trace(args.mod)
491
+
492
+ if args.sqlite:
493
+ with SqliteWriter(args.sqlite) as sw:
494
+ sw.write(node)
495
+
496
+ else:
497
+ fn = _json_dumps_pretty if args.pretty else _json_dumps
498
+ print(fn(dc.asdict(node)))
499
+
500
+
501
+ if __name__ == '__main__':
502
+ _main()
omdev/tokens.py ADDED
@@ -0,0 +1,42 @@
1
+ import itertools
2
+ import typing as ta
3
+
4
+ from omlish import lang
5
+
6
+
7
+ if ta.TYPE_CHECKING:
8
+ import tokenize_rt as trt
9
+ else:
10
+ trt = lang.proxy_import('tokenize_rt')
11
+
12
+
13
+ Tokens: ta.TypeAlias = ta.Sequence['trt.Token']
14
+
15
+
16
+ ##
17
+
18
+
19
+ WS_NAMES = ('UNIMPORTANT_WS', 'NEWLINE', 'COMMENT')
20
+
21
+
22
+ def is_ws(tok: 'trt.Token') -> bool:
23
+ return tok.name in WS_NAMES
24
+
25
+
26
+ def ignore_ws(toks: ta.Iterable['trt.Token']) -> ta.Iterable['trt.Token']:
27
+ return (t for t in toks if not is_ws(t))
28
+
29
+
30
+ ##
31
+
32
+
33
+ def split_lines(ts: Tokens) -> list[Tokens]:
34
+ return [list(it) for g, it in itertools.groupby(ts, lambda t: t.line)]
35
+
36
+
37
+ def join_toks(ts: Tokens) -> str:
38
+ return ''.join(t.src for t in ts)
39
+
40
+
41
+ def join_lines(ls: ta.Iterable[Tokens]) -> str:
42
+ return ''.join(map(join_toks, ls))
omdev/toml/__init__.py ADDED
@@ -0,0 +1 @@
1
+ # @omlish-lite