omlish 0.0.0.dev1__py3-none-any.whl → 0.0.0.dev2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of omlish might be problematic. Click here for more details.
- omlish/__about__.py +2 -2
- omlish/argparse.py +4 -4
- omlish/asyncs/anyio.py +62 -1
- omlish/asyncs/futures.py +6 -6
- omlish/c3.py +4 -4
- omlish/check.py +6 -6
- omlish/collections/__init__.py +98 -63
- omlish/collections/cache/descriptor.py +5 -5
- omlish/collections/cache/impl.py +4 -4
- omlish/collections/coerce.py +43 -43
- omlish/collections/frozen.py +3 -3
- omlish/collections/identity.py +1 -1
- omlish/collections/mappings.py +3 -3
- omlish/collections/ordered.py +1 -1
- omlish/collections/skiplist.py +6 -6
- omlish/collections/sorted.py +3 -3
- omlish/collections/treap.py +17 -17
- omlish/collections/treapmap.py +2 -2
- omlish/collections/unmodifiable.py +28 -27
- omlish/configs/flattening.py +1 -1
- omlish/configs/props.py +1 -1
- omlish/dataclasses/impl/__init__.py +2 -0
- omlish/dataclasses/impl/api.py +5 -13
- omlish/dataclasses/impl/fields.py +1 -1
- omlish/dataclasses/impl/init.py +1 -1
- omlish/dataclasses/impl/internals.py +15 -0
- omlish/dataclasses/impl/main.py +4 -4
- omlish/dataclasses/impl/metaclass.py +1 -1
- omlish/dataclasses/impl/metadata.py +1 -1
- omlish/dataclasses/impl/order.py +1 -1
- omlish/dataclasses/impl/params.py +4 -38
- omlish/dataclasses/impl/reflect.py +1 -7
- omlish/dataclasses/impl/repr.py +23 -5
- omlish/dataclasses/impl/simple.py +2 -2
- omlish/dataclasses/impl/slots.py +2 -2
- omlish/dataclasses/impl/utils.py +4 -4
- omlish/dispatch/dispatch.py +9 -8
- omlish/dispatch/methods.py +2 -2
- omlish/docker.py +8 -6
- omlish/dynamic.py +5 -5
- omlish/graphs/dot/items.py +1 -1
- omlish/graphs/trees.py +15 -21
- omlish/inject/elements.py +1 -1
- omlish/inject/exceptions.py +1 -1
- omlish/inject/impl/injector.py +1 -1
- omlish/inject/impl/inspect.py +1 -1
- omlish/inject/injector.py +1 -1
- omlish/inject/providers.py +2 -2
- omlish/iterators.py +43 -2
- omlish/lang/__init__.py +167 -112
- omlish/lang/cached.py +13 -5
- omlish/lang/classes/__init__.py +35 -24
- omlish/lang/classes/abstract.py +1 -1
- omlish/lang/classes/simple.py +1 -1
- omlish/lang/clsdct.py +1 -1
- omlish/lang/contextmanagers.py +23 -15
- omlish/lang/datetimes.py +1 -1
- omlish/lang/descriptors.py +35 -2
- omlish/lang/exceptions.py +2 -0
- omlish/lang/functions.py +43 -13
- omlish/lang/imports.py +8 -8
- omlish/lang/iterables.py +1 -1
- omlish/lang/maybes.py +1 -1
- omlish/lang/objects.py +2 -2
- omlish/lang/timeouts.py +53 -0
- omlish/lang/typing.py +2 -2
- omlish/libc.py +6 -6
- omlish/marshal/base.py +6 -6
- omlish/marshal/dataclasses.py +2 -2
- omlish/marshal/enums.py +2 -2
- omlish/marshal/factories.py +10 -10
- omlish/marshal/iterables.py +2 -2
- omlish/marshal/mappings.py +2 -2
- omlish/marshal/optionals.py +4 -4
- omlish/marshal/polymorphism.py +4 -4
- omlish/marshal/standard.py +6 -6
- omlish/marshal/utils.py +1 -1
- omlish/os.py +13 -4
- omlish/procfs.py +336 -0
- omlish/reflect.py +2 -12
- omlish/replserver/console.py +9 -9
- omlish/replserver/server.py +4 -4
- omlish/sql/__init__.py +0 -0
- omlish/sql/_abcs.py +65 -0
- omlish/sql/dbs.py +90 -0
- omlish/stats.py +3 -3
- omlish/testing/pydevd.py +4 -6
- omlish/testing/pytest/inject/__init__.py +7 -0
- omlish/testing/pytest/inject/harness.py +23 -1
- omlish/testing/pytest/plugins/__init__.py +1 -1
- omlish/testing/pytest/plugins/pydevd.py +12 -0
- omlish/testing/pytest/plugins/switches.py +2 -2
- omlish/testing/testing.py +5 -5
- omlish/text/parts.py +3 -3
- omlish-0.0.0.dev2.dist-info/METADATA +31 -0
- omlish-0.0.0.dev2.dist-info/RECORD +193 -0
- {omlish-0.0.0.dev1.dist-info → omlish-0.0.0.dev2.dist-info}/WHEEL +1 -1
- omlish/testing/pytest/plugins/pycharm.py +0 -54
- omlish-0.0.0.dev1.dist-info/METADATA +0 -17
- omlish-0.0.0.dev1.dist-info/RECORD +0 -187
- {omlish-0.0.0.dev1.dist-info → omlish-0.0.0.dev2.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev1.dist-info → omlish-0.0.0.dev2.dist-info}/top_level.txt +0 -0
omlish/procfs.py
ADDED
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TODO:
|
|
3
|
+
- dataclasses
|
|
4
|
+
"""
|
|
5
|
+
import argparse
|
|
6
|
+
import logging
|
|
7
|
+
import os
|
|
8
|
+
import re
|
|
9
|
+
import resource
|
|
10
|
+
import struct
|
|
11
|
+
import sys
|
|
12
|
+
import typing as ta
|
|
13
|
+
|
|
14
|
+
from . import iterators as it
|
|
15
|
+
from . import json
|
|
16
|
+
from . import lang
|
|
17
|
+
from . import os as oos
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
log = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
PidLike = int | str
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
RLIMIT_RESOURCES = {
|
|
27
|
+
getattr(resource, k): k
|
|
28
|
+
for k in dir(resource)
|
|
29
|
+
if k.startswith('RLIMIT_')
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def parse_size(s: str) -> int:
|
|
34
|
+
if ' ' not in s:
|
|
35
|
+
return int(s)
|
|
36
|
+
us = {'kB': 1024, 'mB': 1024 * 1024}
|
|
37
|
+
v, u = s.split()
|
|
38
|
+
return int(v) * us[u]
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class ProcStat(lang.Namespace):
|
|
42
|
+
PID = 0
|
|
43
|
+
COMM = 1
|
|
44
|
+
STATE = 2
|
|
45
|
+
PPID = 3
|
|
46
|
+
PGRP = 4
|
|
47
|
+
SESSION = 5
|
|
48
|
+
TTY_NR = 6
|
|
49
|
+
TPGID = 7
|
|
50
|
+
FLAGS = 8
|
|
51
|
+
MINFLT = 9
|
|
52
|
+
CMINFLT = 10
|
|
53
|
+
MAJFLT = 11
|
|
54
|
+
CMAJFLT = 12
|
|
55
|
+
UTIME = 13
|
|
56
|
+
STIME = 14
|
|
57
|
+
CUTIME = 15
|
|
58
|
+
CSTIME = 16
|
|
59
|
+
PRIORITY = 17
|
|
60
|
+
NICE = 18
|
|
61
|
+
NUM_THREADS = 19
|
|
62
|
+
ITREALVALUE = 20
|
|
63
|
+
STARTTIME = 21
|
|
64
|
+
VSIZE = 22
|
|
65
|
+
RSS = 23
|
|
66
|
+
RSSLIM = 24
|
|
67
|
+
STARTCODE = 25
|
|
68
|
+
ENDCODE = 26
|
|
69
|
+
STARTSTACK = 27
|
|
70
|
+
KSTKESP = 28
|
|
71
|
+
KSTKEIP = 29
|
|
72
|
+
SIGNAL = 30
|
|
73
|
+
BLOCKED = 31
|
|
74
|
+
SIGIGNORE = 32
|
|
75
|
+
SIGCATCH = 33
|
|
76
|
+
WCHAN = 34
|
|
77
|
+
NSWAP = 35
|
|
78
|
+
CNSWAP = 36
|
|
79
|
+
EXIT_SIGNAL = 37
|
|
80
|
+
PROCESSOR = 38
|
|
81
|
+
RT_PRIORITY = 39
|
|
82
|
+
POLICY = 40
|
|
83
|
+
DELAYACCT_BLKIO_TICKS = 41
|
|
84
|
+
GUEST_TIME = 42
|
|
85
|
+
CGUEST_TIME = 43
|
|
86
|
+
START_DATA = 44
|
|
87
|
+
END_DATA = 45
|
|
88
|
+
START_BRK = 46
|
|
89
|
+
ARG_START = 47
|
|
90
|
+
ARG_END = 48
|
|
91
|
+
ENV_START = 49
|
|
92
|
+
ENV_END = 50
|
|
93
|
+
EXIT_CODE = 51
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _check_linux() -> None:
|
|
97
|
+
if sys.platform != 'linux':
|
|
98
|
+
raise OSError
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def get_process_stats(pid: PidLike = 'self') -> ta.List[str]:
|
|
102
|
+
"""http://man7.org/linux/man-pages/man5/proc.5.html -> /proc/[pid]/stat"""
|
|
103
|
+
|
|
104
|
+
_check_linux()
|
|
105
|
+
with open('/proc/%s/stat' % (pid,)) as f:
|
|
106
|
+
buf = f.read()
|
|
107
|
+
l, _, r = buf.rpartition(')')
|
|
108
|
+
pid, _, comm = l.partition('(')
|
|
109
|
+
return [pid.strip(), comm] + r.strip().split(' ')
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def get_process_chain(pid: PidLike = 'self') -> ta.List[ta.Tuple[int, str]]:
|
|
113
|
+
_check_linux()
|
|
114
|
+
lst = []
|
|
115
|
+
while pid:
|
|
116
|
+
process_stats = get_process_stats(pid)
|
|
117
|
+
lst.append((int(process_stats[ProcStat.PID]), process_stats[ProcStat.COMM]))
|
|
118
|
+
pid = int(process_stats[ProcStat.PPID])
|
|
119
|
+
return lst
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def get_process_start_time(pid: PidLike = 'self') -> int:
|
|
123
|
+
"""https://stackoverflow.com/questions/2598145/how-to-retrieve-the-process-start-time-or-uptime-in-python"""
|
|
124
|
+
|
|
125
|
+
_check_linux()
|
|
126
|
+
hz = os.sysconf(os.sysconf_names['SC_CLK_TCK'])
|
|
127
|
+
with open('/proc/stat') as f:
|
|
128
|
+
system_stats = f.readlines()
|
|
129
|
+
for line in system_stats:
|
|
130
|
+
if line.startswith('btime'):
|
|
131
|
+
boot_timestamp = int(line.split()[1])
|
|
132
|
+
break
|
|
133
|
+
else:
|
|
134
|
+
raise ValueError
|
|
135
|
+
process_stats = get_process_stats(pid)
|
|
136
|
+
age_from_boot_jiffies = int(process_stats[ProcStat.STARTTIME])
|
|
137
|
+
age_from_boot_timestamp = age_from_boot_jiffies // hz
|
|
138
|
+
return boot_timestamp + age_from_boot_timestamp
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def get_process_rss(pid: PidLike = 'self') -> int:
|
|
142
|
+
return int(get_process_stats(pid)[ProcStat.RSS])
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def set_process_oom_score_adj(score: str, pid: PidLike = 'self') -> None:
|
|
146
|
+
_check_linux()
|
|
147
|
+
with open('/proc/%s/oom_score_adj' % (pid,), 'w') as f:
|
|
148
|
+
f.write(str(score))
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
MAP_LINE_RX = re.compile(
|
|
152
|
+
r'^'
|
|
153
|
+
r'(?P<address>[A-Fa-f0-9]+)-(?P<end_address>[A-Fa-f0-9]+)\s+'
|
|
154
|
+
r'(?P<permissions>\S+)\s+'
|
|
155
|
+
r'(?P<offset>[A-Fa-f0-9]+)\s+'
|
|
156
|
+
r'(?P<device>\S+)\s+'
|
|
157
|
+
r'(?P<inode>\d+)\s+'
|
|
158
|
+
r'(?P<path>.*)'
|
|
159
|
+
r'$'
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def get_process_maps(pid: PidLike = 'self', sharing: bool = False) -> ta.Iterator[ta.Dict[str, ta.Any]]:
|
|
164
|
+
"""http://man7.org/linux/man-pages/man5/proc.5.html -> /proc/[pid]/maps"""
|
|
165
|
+
|
|
166
|
+
_check_linux()
|
|
167
|
+
with open('/proc/%s/%s' % (pid, 'smaps' if sharing else 'maps'), 'r') as map_file:
|
|
168
|
+
while True:
|
|
169
|
+
line = map_file.readline()
|
|
170
|
+
if not line:
|
|
171
|
+
break
|
|
172
|
+
m = MAP_LINE_RX.match(line)
|
|
173
|
+
if not m:
|
|
174
|
+
raise ValueError(line)
|
|
175
|
+
address = int(m.group('address'), 16)
|
|
176
|
+
end_address = int(m.group('end_address'), 16)
|
|
177
|
+
d = {
|
|
178
|
+
'address': address,
|
|
179
|
+
'end_address': end_address,
|
|
180
|
+
'size': end_address - address,
|
|
181
|
+
'permissions': [x for x in m.group('permissions') if x != '-'],
|
|
182
|
+
'offset': int(m.group('offset'), 16),
|
|
183
|
+
'device': m.group('device'),
|
|
184
|
+
'inode': int(m.group('inode')),
|
|
185
|
+
'path': m.group('path'),
|
|
186
|
+
}
|
|
187
|
+
if sharing:
|
|
188
|
+
s: dict[str, ta.Any] = {}
|
|
189
|
+
while True:
|
|
190
|
+
line = map_file.readline()
|
|
191
|
+
k, v = line.split(':')
|
|
192
|
+
if k.lower() == 'vmflags':
|
|
193
|
+
break
|
|
194
|
+
s[k.lower()] = parse_size(v.strip())
|
|
195
|
+
_, v = line.split(':')
|
|
196
|
+
s['vmflags'] = [p for p in [j.strip() for j in v.split(' ')] if p]
|
|
197
|
+
d['sharing'] = s
|
|
198
|
+
yield d
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
PAGEMAP_KEYS = (
|
|
202
|
+
'address',
|
|
203
|
+
'pfn',
|
|
204
|
+
'swap_type',
|
|
205
|
+
'swap_offset',
|
|
206
|
+
'pte_soft_dirty',
|
|
207
|
+
'file_page_or_shared_anon',
|
|
208
|
+
'page_swapped',
|
|
209
|
+
'page_present',
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def get_process_range_pagemaps(start: int, end: int, pid: PidLike = 'self') -> ta.Iterable[ta.Dict[str, int]]:
|
|
214
|
+
"""https://www.kernel.org/doc/Documentation/vm/pagemap.txt"""
|
|
215
|
+
|
|
216
|
+
_check_linux()
|
|
217
|
+
offset = (start // oos.PAGE_SIZE) * 8
|
|
218
|
+
npages = ((end - start) // oos.PAGE_SIZE)
|
|
219
|
+
size = npages * 8
|
|
220
|
+
with open('/proc/%s/pagemap' % (pid,), 'rb') as pagemap_file:
|
|
221
|
+
pagemap_file.seek(offset)
|
|
222
|
+
pagemap_buf = pagemap_file.read(size)
|
|
223
|
+
if not pagemap_buf:
|
|
224
|
+
return
|
|
225
|
+
_struct_unpack = struct.unpack
|
|
226
|
+
for pagenum in range(npages):
|
|
227
|
+
[packed] = _struct_unpack('Q', pagemap_buf[pagenum * 8:(pagenum + 1) * 8])
|
|
228
|
+
yield {
|
|
229
|
+
'address': start + (pagenum * oos.PAGE_SIZE),
|
|
230
|
+
'pfn': (packed & ((1 << (54 + 1)) - 1)),
|
|
231
|
+
'swap_type': (packed & ((1 << (4 + 1)) - 1)),
|
|
232
|
+
'swap_offset': (packed & ((1 << (54 + 1)) - 1)) >> 5,
|
|
233
|
+
'pte_soft_dirty': ((packed >> 55) & 1) > 0,
|
|
234
|
+
'file_page_or_shared_anon': ((packed >> 61) & 1) > 0,
|
|
235
|
+
'page_swapped': ((packed >> 62) & 1) > 0,
|
|
236
|
+
'page_present': ((packed >> 63) & 1) > 0,
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def get_process_pagemaps(pid: PidLike = 'self') -> ta.Iterable[ta.Dict[str, int]]:
|
|
241
|
+
_check_linux()
|
|
242
|
+
for m in get_process_maps(pid):
|
|
243
|
+
for p in get_process_range_pagemaps(m['address'], m['end_address'], pid):
|
|
244
|
+
yield p
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def _dump_cmd(args):
|
|
248
|
+
total = 0
|
|
249
|
+
dirty_total = 0
|
|
250
|
+
for m in get_process_maps(args.pid, sharing=True):
|
|
251
|
+
total += m['sharing']['rss']
|
|
252
|
+
sys.stdout.write(json.dumps({'map': m}))
|
|
253
|
+
sys.stdout.write('\n')
|
|
254
|
+
for pm in get_process_range_pagemaps(m['address'], m['end_address'], args.pid):
|
|
255
|
+
if pm['pte_soft_dirty']:
|
|
256
|
+
dirty_total += oos.PAGE_SIZE
|
|
257
|
+
sys.stdout.write(json.dumps({'page': tuple(pm[k] for k in PAGEMAP_KEYS)}))
|
|
258
|
+
sys.stdout.write('\n')
|
|
259
|
+
dct = {
|
|
260
|
+
'total': total,
|
|
261
|
+
'dirty_total': dirty_total,
|
|
262
|
+
}
|
|
263
|
+
sys.stdout.write(json.dumps(dct))
|
|
264
|
+
sys.stdout.write('\n')
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
def _cmp_cmd(args):
|
|
268
|
+
if len(args.pids) == 1:
|
|
269
|
+
[rpid] = args.pids
|
|
270
|
+
lpid = get_process_chain(rpid)[1][0]
|
|
271
|
+
elif len(args.pids) == 2:
|
|
272
|
+
lpid, rpid = args.pids
|
|
273
|
+
else:
|
|
274
|
+
raise TypeError('Invalid arguments')
|
|
275
|
+
|
|
276
|
+
def g(pid):
|
|
277
|
+
for m in get_process_maps(pid, sharing=True):
|
|
278
|
+
for pm in get_process_range_pagemaps(m['address'], m['end_address'], pid):
|
|
279
|
+
yield pm
|
|
280
|
+
|
|
281
|
+
lpms, rpms = [g(pid) for pid in (lpid, rpid)]
|
|
282
|
+
|
|
283
|
+
l_pages = 0
|
|
284
|
+
r_pages = 0
|
|
285
|
+
c_pages = 0
|
|
286
|
+
for _, ps in it.merge_on(lambda pm: pm['address'], lpms, rpms):
|
|
287
|
+
l, r = it.expand_indexed_pairs(ps, None, width=2)
|
|
288
|
+
if l is not None and r is None:
|
|
289
|
+
l_pages += 1
|
|
290
|
+
elif l is None and r is not None:
|
|
291
|
+
r_pages += 1
|
|
292
|
+
elif l['pfn'] != r['pfn']:
|
|
293
|
+
c_pages += 1
|
|
294
|
+
else:
|
|
295
|
+
continue
|
|
296
|
+
if not args.quiet:
|
|
297
|
+
sys.stdout.write(json.dumps([l, r]))
|
|
298
|
+
sys.stdout.write('\n')
|
|
299
|
+
l_pages += c_pages
|
|
300
|
+
r_pages += c_pages
|
|
301
|
+
dct = {
|
|
302
|
+
'l_pages': l_pages,
|
|
303
|
+
'l_bytes': l_pages * oos.PAGE_SIZE,
|
|
304
|
+
'r_pages': r_pages,
|
|
305
|
+
'r_bytes': r_pages * oos.PAGE_SIZE,
|
|
306
|
+
'c_pages': c_pages,
|
|
307
|
+
'c_bytes': c_pages * oos.PAGE_SIZE,
|
|
308
|
+
}
|
|
309
|
+
sys.stdout.write(json.dumps(dct))
|
|
310
|
+
sys.stdout.write('\n')
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def _main():
|
|
314
|
+
_check_linux()
|
|
315
|
+
|
|
316
|
+
arg_parser = argparse.ArgumentParser()
|
|
317
|
+
arg_parser.add_argument('-q', '--quiet', action='store_true')
|
|
318
|
+
arg_subparsers = arg_parser.add_subparsers()
|
|
319
|
+
|
|
320
|
+
dump_arg_parser = arg_subparsers.add_parser('dump')
|
|
321
|
+
dump_arg_parser.add_argument('pid', type=int)
|
|
322
|
+
dump_arg_parser.set_defaults(func=_dump_cmd)
|
|
323
|
+
|
|
324
|
+
cmp_arg_parser = arg_subparsers.add_parser('cmp')
|
|
325
|
+
cmp_arg_parser.add_argument('pids', type=int, nargs='*')
|
|
326
|
+
cmp_arg_parser.set_defaults(func=_cmp_cmd)
|
|
327
|
+
|
|
328
|
+
args = arg_parser.parse_args()
|
|
329
|
+
if not hasattr(args, 'func'):
|
|
330
|
+
arg_parser.print_help()
|
|
331
|
+
else:
|
|
332
|
+
args.func(args)
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
if __name__ == '__main__':
|
|
336
|
+
_main()
|
omlish/reflect.py
CHANGED
|
@@ -63,16 +63,6 @@ _KNOWN_SPECIAL_TYPE_VARS = tuple(
|
|
|
63
63
|
##
|
|
64
64
|
|
|
65
65
|
|
|
66
|
-
try:
|
|
67
|
-
from types import get_original_bases # type: ignore
|
|
68
|
-
except ImportError:
|
|
69
|
-
def get_original_bases(cls, /):
|
|
70
|
-
try:
|
|
71
|
-
return cls.__dict__.get('__orig_bases__', cls.__bases__)
|
|
72
|
-
except AttributeError:
|
|
73
|
-
raise TypeError(f'Expected an instance of type, not {type(cls).__name__!r}') from None
|
|
74
|
-
|
|
75
|
-
|
|
76
66
|
def get_params(obj: ta.Any) -> tuple[ta.TypeVar, ...]:
|
|
77
67
|
if isinstance(obj, type):
|
|
78
68
|
if issubclass(obj, ta.Generic): # type: ignore
|
|
@@ -218,7 +208,7 @@ def get_underlying(nt: NewType) -> Type:
|
|
|
218
208
|
return type_(nt.obj.__supertype__) # noqa
|
|
219
209
|
|
|
220
210
|
|
|
221
|
-
def get_concrete_type(ty: Type) ->
|
|
211
|
+
def get_concrete_type(ty: Type) -> type | None:
|
|
222
212
|
if isinstance(ty, type):
|
|
223
213
|
return ty
|
|
224
214
|
if isinstance(ty, Generic):
|
|
@@ -299,7 +289,7 @@ class GenericSubstitution:
|
|
|
299
289
|
if (cty := get_concrete_type(ty)) is not None:
|
|
300
290
|
rpl = get_type_var_replacements(ty)
|
|
301
291
|
ret: list[Type] = []
|
|
302
|
-
for b in get_original_bases(cty):
|
|
292
|
+
for b in types.get_original_bases(cty):
|
|
303
293
|
bty = type_(b)
|
|
304
294
|
if isinstance(bty, Generic) and isinstance(b, type):
|
|
305
295
|
# FIXME: throws away relative types, but can't use original vars as they're class-contextual
|
omlish/replserver/console.py
CHANGED
|
@@ -43,8 +43,8 @@ class InteractiveSocketConsole:
|
|
|
43
43
|
def __init__(
|
|
44
44
|
self,
|
|
45
45
|
conn: sock.socket,
|
|
46
|
-
locals:
|
|
47
|
-
filename: str = '<console>'
|
|
46
|
+
locals: dict[str, ta.Any] | None = None,
|
|
47
|
+
filename: str = '<console>',
|
|
48
48
|
) -> None:
|
|
49
49
|
super().__init__()
|
|
50
50
|
|
|
@@ -73,7 +73,7 @@ class InteractiveSocketConsole:
|
|
|
73
73
|
|
|
74
74
|
CPRT = 'Type "help", "copyright", "credits" or "license" for more information.'
|
|
75
75
|
|
|
76
|
-
def interact(self, banner:
|
|
76
|
+
def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None:
|
|
77
77
|
log.info(f'Console {id(self)} on thread {threading.current_thread().ident} interacting')
|
|
78
78
|
|
|
79
79
|
try:
|
|
@@ -144,10 +144,10 @@ class InteractiveSocketConsole:
|
|
|
144
144
|
|
|
145
145
|
def compile(
|
|
146
146
|
self,
|
|
147
|
-
source:
|
|
147
|
+
source: str | ast.AST,
|
|
148
148
|
filename: str = '<input>',
|
|
149
|
-
symbol: str = 'single'
|
|
150
|
-
) ->
|
|
149
|
+
symbol: str = 'single',
|
|
150
|
+
) -> types.CodeType | None:
|
|
151
151
|
if isinstance(source, ast.AST):
|
|
152
152
|
return self._compiler.compiler(source, filename, symbol) # type: ignore
|
|
153
153
|
else:
|
|
@@ -155,7 +155,7 @@ class InteractiveSocketConsole:
|
|
|
155
155
|
|
|
156
156
|
def run_source(
|
|
157
157
|
self,
|
|
158
|
-
source:
|
|
158
|
+
source: str | ast.AST,
|
|
159
159
|
filename: str = '<input>',
|
|
160
160
|
symbol: str = 'single',
|
|
161
161
|
) -> bool:
|
|
@@ -195,7 +195,7 @@ class InteractiveSocketConsole:
|
|
|
195
195
|
expr.value,
|
|
196
196
|
lineno=expr.lineno,
|
|
197
197
|
col_offset=expr.col_offset,
|
|
198
|
-
)
|
|
198
|
+
),
|
|
199
199
|
],
|
|
200
200
|
)
|
|
201
201
|
ast.fix_missing_locations(source)
|
|
@@ -227,7 +227,7 @@ class InteractiveSocketConsole:
|
|
|
227
227
|
finally:
|
|
228
228
|
last_tb = ei = None # type: ignore # noqa
|
|
229
229
|
|
|
230
|
-
def show_syntax_error(self, filename:
|
|
230
|
+
def show_syntax_error(self, filename: str | None = None) -> None:
|
|
231
231
|
type, value, tb = sys.exc_info()
|
|
232
232
|
sys.last_type = type
|
|
233
233
|
sys.last_value = value
|
omlish/replserver/server.py
CHANGED
|
@@ -38,7 +38,7 @@ class ReplServer:
|
|
|
38
38
|
@dc.dataclass(frozen=True)
|
|
39
39
|
class Config:
|
|
40
40
|
path: str
|
|
41
|
-
file_mode:
|
|
41
|
+
file_mode: int | None = None
|
|
42
42
|
poll_interval: float = 0.5
|
|
43
43
|
exit_timeout: float = 10.0
|
|
44
44
|
|
|
@@ -51,7 +51,7 @@ class ReplServer:
|
|
|
51
51
|
check.not_empty(config.path)
|
|
52
52
|
self._config = check.isinstance(config, ReplServer.Config)
|
|
53
53
|
|
|
54
|
-
self._socket:
|
|
54
|
+
self._socket: sock.socket | None = None
|
|
55
55
|
self._is_running = False
|
|
56
56
|
self._consoles_by_threads: ta.MutableMapping[threading.Thread, InteractiveSocketConsole] = \
|
|
57
57
|
weakref.WeakKeyDictionary() # noqa
|
|
@@ -106,7 +106,7 @@ class ReplServer:
|
|
|
106
106
|
log.info(
|
|
107
107
|
f'Starting console {id(console)} repl server connection '
|
|
108
108
|
f'on file {self._config.path} '
|
|
109
|
-
f'on thread {threading.current_thread().ident}'
|
|
109
|
+
f'on thread {threading.current_thread().ident}',
|
|
110
110
|
)
|
|
111
111
|
self._consoles_by_threads[threading.current_thread()] = console
|
|
112
112
|
console.interact()
|
|
@@ -135,7 +135,7 @@ class ReplServer:
|
|
|
135
135
|
self._is_shutdown.set()
|
|
136
136
|
self._is_running = False
|
|
137
137
|
|
|
138
|
-
def shutdown(self, block: bool = False, timeout:
|
|
138
|
+
def shutdown(self, block: bool = False, timeout: float | None = None) -> None:
|
|
139
139
|
self._should_shutdown = True
|
|
140
140
|
if block:
|
|
141
141
|
self._is_shutdown.wait(timeout=timeout)
|
omlish/sql/__init__.py
ADDED
|
File without changes
|
omlish/sql/_abcs.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import typing as ta
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
DBAPITypeCode: ta.TypeAlias = ta.Any | None
|
|
5
|
+
|
|
6
|
+
DBAPIColumnDescription: ta.TypeAlias = tuple[
|
|
7
|
+
str,
|
|
8
|
+
DBAPITypeCode,
|
|
9
|
+
int | None,
|
|
10
|
+
int | None,
|
|
11
|
+
int | None,
|
|
12
|
+
int | None,
|
|
13
|
+
bool | None,
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DBAPIConnection(ta.Protocol):
|
|
18
|
+
def close(self) -> object: ...
|
|
19
|
+
|
|
20
|
+
def commit(self) -> object: ...
|
|
21
|
+
|
|
22
|
+
# optional:
|
|
23
|
+
# def rollback(self) -> ta.Any: ...
|
|
24
|
+
|
|
25
|
+
def cursor(self) -> 'DBAPICursor': ...
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class DBAPICursor(ta.Protocol):
|
|
29
|
+
@property
|
|
30
|
+
def description(self) -> ta.Sequence[DBAPIColumnDescription] | None: ...
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def rowcount(self) -> int: ...
|
|
34
|
+
|
|
35
|
+
# optional:
|
|
36
|
+
# def callproc(self, procname: str, parameters: Sequence[ta.Any] = ...) -> Sequence[ta.Any]: ...
|
|
37
|
+
|
|
38
|
+
def close(self) -> object: ...
|
|
39
|
+
|
|
40
|
+
def execute(
|
|
41
|
+
self,
|
|
42
|
+
operation: str,
|
|
43
|
+
parameters: ta.Sequence[ta.Any] | ta.Mapping[str, ta.Any] = ...,
|
|
44
|
+
) -> object: ...
|
|
45
|
+
|
|
46
|
+
def executemany(
|
|
47
|
+
self,
|
|
48
|
+
operation: str,
|
|
49
|
+
seq_of_parameters: ta.Sequence[ta.Sequence[ta.Any]],
|
|
50
|
+
) -> object: ...
|
|
51
|
+
|
|
52
|
+
def fetchone(self) -> ta.Sequence[ta.Any] | None: ...
|
|
53
|
+
|
|
54
|
+
def fetchmany(self, size: int = ...) -> ta.Sequence[ta.Sequence[ta.Any]]: ...
|
|
55
|
+
|
|
56
|
+
def fetchall(self) -> ta.Sequence[ta.Sequence[ta.Any]]: ...
|
|
57
|
+
|
|
58
|
+
# optional:
|
|
59
|
+
# def nextset(self) -> None | Literal[True]: ...
|
|
60
|
+
|
|
61
|
+
arraysize: int
|
|
62
|
+
|
|
63
|
+
def setinputsizes(self, sizes: ta.Sequence[DBAPITypeCode | int | None]) -> object: ...
|
|
64
|
+
|
|
65
|
+
def setoutputsize(self, size: int, column: int = ...) -> object: ...
|
omlish/sql/dbs.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import typing as ta
|
|
2
|
+
import urllib.parse
|
|
3
|
+
|
|
4
|
+
from .. import dataclasses as dc
|
|
5
|
+
from .. import lang
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
##
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
|
12
|
+
class DbType:
|
|
13
|
+
name: str
|
|
14
|
+
dialect_name: str
|
|
15
|
+
|
|
16
|
+
default_port: int | None = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class DbTypes(lang.Namespace):
|
|
20
|
+
MYSQL = DbType(
|
|
21
|
+
name='mysql',
|
|
22
|
+
dialect_name='mysql',
|
|
23
|
+
default_port=3306,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
POSTGRES = DbType(
|
|
27
|
+
name='postgres',
|
|
28
|
+
dialect_name='postgresql',
|
|
29
|
+
default_port=5432,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
SQLITE = DbType(
|
|
33
|
+
name='sqlite',
|
|
34
|
+
dialect_name='sqlite',
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
##
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class DbLoc(lang.Abstract):
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dc.dataclass(frozen=True)
|
|
46
|
+
class UrlDbLoc(DbLoc, lang.Final):
|
|
47
|
+
url: str
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dc.dataclass(frozen=True)
|
|
51
|
+
class HostDbLoc(DbLoc, lang.Final):
|
|
52
|
+
host: str
|
|
53
|
+
port: int | None = None
|
|
54
|
+
|
|
55
|
+
username: str | None = None
|
|
56
|
+
password: str | None = dc.xfield(default=None, repr_fn=lambda pw: '...' if pw is not None else None)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
##
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
@dc.dataclass(frozen=True)
|
|
63
|
+
class DbSpec:
|
|
64
|
+
name: str
|
|
65
|
+
type: DbType
|
|
66
|
+
loc: DbLoc
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
##
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def rebuild_url(url: str, fn: ta.Callable[[urllib.parse.ParseResult], urllib.parse.ParseResult]) -> str:
|
|
73
|
+
if '://' in url:
|
|
74
|
+
engine, _, url = url.partition('://')
|
|
75
|
+
url = 'sql://' + url
|
|
76
|
+
else:
|
|
77
|
+
engine = None
|
|
78
|
+
parsed = urllib.parse.urlparse(url)
|
|
79
|
+
parsed = fn(parsed)
|
|
80
|
+
if engine is not None and parsed.scheme == 'sql':
|
|
81
|
+
parsed = parsed._replace(scheme=engine)
|
|
82
|
+
return urllib.parse.urlunparse(parsed) # noqa
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def set_url_engine(url: str, engine: str) -> str:
|
|
86
|
+
return rebuild_url(url, lambda parsed: parsed._replace(scheme=engine)) # noqa
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def set_url_database(url: str, database: str) -> str:
|
|
90
|
+
return rebuild_url(url, lambda parsed: parsed._replace(path='/' + database)) # noqa
|
omlish/stats.py
CHANGED
|
@@ -272,10 +272,10 @@ class SamplingHistogram:
|
|
|
272
272
|
|
|
273
273
|
self._percentile_pos_list = [self._calc_percentile_pos(p, self._size) for p in self._percentiles]
|
|
274
274
|
|
|
275
|
-
self._ring: list[
|
|
275
|
+
self._ring: list[SamplingHistogram.Entry | None] = [None] * size
|
|
276
276
|
self._ring_pos = 0
|
|
277
277
|
|
|
278
|
-
self._sample: list[
|
|
278
|
+
self._sample: list[SamplingHistogram.Entry | None] = [None] * size
|
|
279
279
|
self._sample_pos_queue = list(reversed(range(size)))
|
|
280
280
|
|
|
281
281
|
def add(self, value: float) -> None:
|
|
@@ -303,7 +303,7 @@ class SamplingHistogram:
|
|
|
303
303
|
def _calc_percentile_pos(p: float, sz: int) -> int:
|
|
304
304
|
return int(round((p * sz) - 1))
|
|
305
305
|
|
|
306
|
-
def _calc_percentiles(self, entries: list[
|
|
306
|
+
def _calc_percentiles(self, entries: list[Entry | None]) -> list[Percentile]:
|
|
307
307
|
entries = list(filter(None, entries))
|
|
308
308
|
sz = len(entries)
|
|
309
309
|
if not sz:
|
omlish/testing/pydevd.py
CHANGED
|
@@ -5,8 +5,6 @@ an already-debugging PyCharm instance to debug PySpark jobs.
|
|
|
5
5
|
|
|
6
6
|
TODO:
|
|
7
7
|
- https://www.jetbrains.com/help/pycharm/remote-debugging-with-product.html#
|
|
8
|
-
- move to dev?
|
|
9
|
-
- cython help? or in cython.py
|
|
10
8
|
"""
|
|
11
9
|
import json
|
|
12
10
|
import os
|
|
@@ -87,7 +85,7 @@ def patch_for_trio_asyncio() -> None:
|
|
|
87
85
|
|
|
88
86
|
|
|
89
87
|
@lang.cached_function
|
|
90
|
-
def _pydevd() ->
|
|
88
|
+
def _pydevd() -> types.ModuleType | None:
|
|
91
89
|
try:
|
|
92
90
|
return __import__('pydevd')
|
|
93
91
|
except ImportError:
|
|
@@ -98,7 +96,7 @@ def is_present() -> bool:
|
|
|
98
96
|
return _pydevd() is not None
|
|
99
97
|
|
|
100
98
|
|
|
101
|
-
def get_setup() ->
|
|
99
|
+
def get_setup() -> dict | None:
|
|
102
100
|
if is_present():
|
|
103
101
|
return _pydevd().SetupHolder.setup
|
|
104
102
|
else:
|
|
@@ -154,8 +152,8 @@ def save_args() -> None:
|
|
|
154
152
|
|
|
155
153
|
def maybe_reexec(
|
|
156
154
|
*,
|
|
157
|
-
file:
|
|
158
|
-
module:
|
|
155
|
+
file: str | None = None,
|
|
156
|
+
module: str | None = None,
|
|
159
157
|
silence: bool = False,
|
|
160
158
|
) -> None:
|
|
161
159
|
if ARGS_ENV_VAR not in os.environ:
|