omlish 0.0.0.dev132__py3-none-any.whl → 0.0.0.dev177__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- omlish/.manifests.json +265 -7
- omlish/__about__.py +7 -5
- omlish/antlr/_runtime/__init__.py +0 -22
- omlish/antlr/_runtime/_all.py +24 -0
- omlish/antlr/_runtime/atn/ParserATNSimulator.py +1 -1
- omlish/antlr/_runtime/dfa/DFASerializer.py +1 -1
- omlish/antlr/_runtime/error/DiagnosticErrorListener.py +2 -1
- omlish/antlr/_runtime/xpath/XPath.py +7 -1
- omlish/antlr/_runtime/xpath/XPathLexer.py +1 -1
- omlish/antlr/delimit.py +106 -0
- omlish/antlr/dot.py +31 -0
- omlish/antlr/errors.py +11 -0
- omlish/antlr/input.py +96 -0
- omlish/antlr/parsing.py +19 -0
- omlish/antlr/runtime.py +102 -0
- omlish/antlr/utils.py +38 -0
- omlish/argparse/all.py +45 -0
- omlish/{argparse.py → argparse/cli.py} +112 -107
- omlish/asyncs/__init__.py +0 -35
- omlish/asyncs/all.py +35 -0
- omlish/asyncs/asyncio/all.py +7 -0
- omlish/asyncs/asyncio/channels.py +40 -0
- omlish/asyncs/asyncio/streams.py +45 -0
- omlish/asyncs/asyncio/subprocesses.py +238 -0
- omlish/asyncs/asyncio/timeouts.py +16 -0
- omlish/asyncs/bluelet/LICENSE +6 -0
- omlish/asyncs/bluelet/all.py +67 -0
- omlish/asyncs/bluelet/api.py +23 -0
- omlish/asyncs/bluelet/core.py +178 -0
- omlish/asyncs/bluelet/events.py +78 -0
- omlish/asyncs/bluelet/files.py +80 -0
- omlish/asyncs/bluelet/runner.py +416 -0
- omlish/asyncs/bluelet/sockets.py +214 -0
- omlish/bootstrap/sys.py +3 -3
- omlish/cached.py +2 -2
- omlish/check.py +49 -460
- omlish/codecs/__init__.py +72 -0
- omlish/codecs/base.py +106 -0
- omlish/codecs/bytes.py +119 -0
- omlish/codecs/chain.py +23 -0
- omlish/codecs/funcs.py +39 -0
- omlish/codecs/registry.py +139 -0
- omlish/codecs/standard.py +4 -0
- omlish/codecs/text.py +217 -0
- omlish/collections/cache/impl.py +50 -57
- omlish/collections/coerce.py +1 -0
- omlish/collections/mappings.py +1 -1
- omlish/configs/flattening.py +1 -1
- omlish/defs.py +1 -1
- omlish/diag/_pycharm/runhack.py +8 -2
- omlish/diag/procfs.py +8 -8
- omlish/docker/__init__.py +0 -36
- omlish/docker/all.py +31 -0
- omlish/docker/consts.py +4 -0
- omlish/{lite/docker.py → docker/detect.py} +18 -0
- omlish/docker/{helpers.py → timebomb.py} +0 -21
- omlish/formats/cbor.py +31 -0
- omlish/formats/cloudpickle.py +31 -0
- omlish/formats/codecs.py +93 -0
- omlish/formats/json/codecs.py +29 -0
- omlish/formats/json/delimted.py +4 -0
- omlish/formats/json/stream/errors.py +2 -0
- omlish/formats/json/stream/lex.py +12 -6
- omlish/formats/json/stream/parse.py +38 -22
- omlish/formats/json5.py +31 -0
- omlish/formats/pickle.py +31 -0
- omlish/formats/repr.py +25 -0
- omlish/formats/toml.py +17 -0
- omlish/formats/yaml.py +25 -0
- omlish/funcs/__init__.py +0 -0
- omlish/{genmachine.py → funcs/genmachine.py} +5 -4
- omlish/{matchfns.py → funcs/match.py} +1 -1
- omlish/funcs/pairs.py +215 -0
- omlish/http/__init__.py +0 -48
- omlish/http/all.py +48 -0
- omlish/http/coro/__init__.py +0 -0
- omlish/{lite/fdio/corohttp.py → http/coro/fdio.py} +21 -19
- omlish/{lite/http/coroserver.py → http/coro/server.py} +20 -21
- omlish/{lite/http → http}/handlers.py +3 -2
- omlish/{lite/http → http}/parsing.py +1 -0
- omlish/http/sessions.py +1 -1
- omlish/{lite/http → http}/versions.py +1 -0
- omlish/inject/managed.py +2 -2
- omlish/io/__init__.py +0 -3
- omlish/{lite/io.py → io/buffers.py} +8 -9
- omlish/io/compress/__init__.py +9 -0
- omlish/io/compress/abc.py +104 -0
- omlish/io/compress/adapters.py +148 -0
- omlish/io/compress/base.py +24 -0
- omlish/io/compress/brotli.py +47 -0
- omlish/io/compress/bz2.py +61 -0
- omlish/io/compress/codecs.py +78 -0
- omlish/io/compress/gzip.py +350 -0
- omlish/io/compress/lz4.py +91 -0
- omlish/io/compress/lzma.py +81 -0
- omlish/io/compress/snappy.py +34 -0
- omlish/io/compress/zlib.py +74 -0
- omlish/io/compress/zstd.py +44 -0
- omlish/io/fdio/__init__.py +1 -0
- omlish/{lite → io}/fdio/handlers.py +5 -5
- omlish/{lite → io}/fdio/kqueue.py +8 -8
- omlish/{lite → io}/fdio/manager.py +7 -7
- omlish/{lite → io}/fdio/pollers.py +13 -13
- omlish/io/generators/__init__.py +56 -0
- omlish/io/generators/consts.py +1 -0
- omlish/io/generators/direct.py +13 -0
- omlish/io/generators/readers.py +189 -0
- omlish/io/generators/stepped.py +191 -0
- omlish/io/pyio.py +5 -2
- omlish/iterators/__init__.py +24 -0
- omlish/iterators/iterators.py +132 -0
- omlish/iterators/recipes.py +18 -0
- omlish/iterators/tools.py +96 -0
- omlish/iterators/unique.py +67 -0
- omlish/lang/__init__.py +13 -1
- omlish/lang/functions.py +11 -2
- omlish/lang/generators.py +243 -0
- omlish/lang/iterables.py +46 -49
- omlish/lang/maybes.py +4 -4
- omlish/lite/cached.py +39 -6
- omlish/lite/check.py +438 -75
- omlish/lite/contextmanagers.py +17 -4
- omlish/lite/dataclasses.py +42 -0
- omlish/lite/inject.py +28 -45
- omlish/lite/logs.py +0 -270
- omlish/lite/marshal.py +309 -144
- omlish/lite/pycharm.py +47 -0
- omlish/lite/reflect.py +33 -0
- omlish/lite/resources.py +8 -0
- omlish/lite/runtime.py +4 -4
- omlish/lite/shlex.py +12 -0
- omlish/lite/socketserver.py +2 -2
- omlish/lite/strings.py +31 -0
- omlish/logs/__init__.py +0 -32
- omlish/logs/{_abc.py → abc.py} +0 -1
- omlish/logs/all.py +37 -0
- omlish/logs/{formatters.py → color.py} +1 -2
- omlish/logs/configs.py +7 -38
- omlish/logs/filters.py +10 -0
- omlish/logs/handlers.py +4 -1
- omlish/logs/json.py +56 -0
- omlish/logs/proxy.py +99 -0
- omlish/logs/standard.py +128 -0
- omlish/logs/utils.py +2 -2
- omlish/manifests/__init__.py +2 -0
- omlish/manifests/load.py +209 -0
- omlish/manifests/types.py +17 -0
- omlish/marshal/base.py +1 -1
- omlish/marshal/factories.py +1 -1
- omlish/marshal/forbidden.py +1 -1
- omlish/marshal/iterables.py +1 -1
- omlish/marshal/literals.py +50 -0
- omlish/marshal/mappings.py +1 -1
- omlish/marshal/maybes.py +1 -1
- omlish/marshal/standard.py +5 -1
- omlish/marshal/unions.py +1 -1
- omlish/os/__init__.py +0 -0
- omlish/os/atomics.py +205 -0
- omlish/os/deathsig.py +23 -0
- omlish/{os.py → os/files.py} +0 -9
- omlish/{lite → os}/journald.py +2 -1
- omlish/os/linux.py +484 -0
- omlish/os/paths.py +36 -0
- omlish/{lite → os}/pidfile.py +1 -0
- omlish/os/sizes.py +9 -0
- omlish/reflect/__init__.py +3 -0
- omlish/reflect/subst.py +2 -1
- omlish/reflect/types.py +126 -44
- omlish/secrets/pwhash.py +1 -1
- omlish/secrets/subprocesses.py +3 -1
- omlish/specs/jsonrpc/marshal.py +1 -1
- omlish/specs/openapi/marshal.py +1 -1
- omlish/sql/alchemy/asyncs.py +1 -1
- omlish/sql/queries/__init__.py +9 -1
- omlish/sql/queries/building.py +3 -0
- omlish/sql/queries/exprs.py +10 -27
- omlish/sql/queries/idents.py +48 -10
- omlish/sql/queries/names.py +80 -13
- omlish/sql/queries/params.py +64 -0
- omlish/sql/queries/rendering.py +1 -1
- omlish/subprocesses.py +340 -0
- omlish/term.py +29 -14
- omlish/testing/pytest/marks.py +2 -2
- omlish/testing/pytest/plugins/asyncs.py +6 -1
- omlish/testing/pytest/plugins/logging.py +1 -1
- omlish/testing/pytest/plugins/switches.py +1 -1
- {omlish-0.0.0.dev132.dist-info → omlish-0.0.0.dev177.dist-info}/METADATA +13 -11
- {omlish-0.0.0.dev132.dist-info → omlish-0.0.0.dev177.dist-info}/RECORD +200 -117
- omlish/fnpairs.py +0 -496
- omlish/formats/json/cli/__main__.py +0 -11
- omlish/formats/json/cli/cli.py +0 -298
- omlish/formats/json/cli/formats.py +0 -71
- omlish/formats/json/cli/io.py +0 -74
- omlish/formats/json/cli/parsing.py +0 -82
- omlish/formats/json/cli/processing.py +0 -48
- omlish/formats/json/cli/rendering.py +0 -92
- omlish/iterators.py +0 -300
- omlish/lite/subprocesses.py +0 -130
- /omlish/{formats/json/cli → argparse}/__init__.py +0 -0
- /omlish/{lite/fdio → asyncs/asyncio}/__init__.py +0 -0
- /omlish/asyncs/{asyncio.py → asyncio/asyncio.py} +0 -0
- /omlish/{lite/http → asyncs/bluelet}/__init__.py +0 -0
- /omlish/collections/{_abc.py → abc.py} +0 -0
- /omlish/{fnpipes.py → funcs/pipes.py} +0 -0
- /omlish/io/{_abc.py → abc.py} +0 -0
- /omlish/sql/{_abc.py → abc.py} +0 -0
- {omlish-0.0.0.dev132.dist-info → omlish-0.0.0.dev177.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev132.dist-info → omlish-0.0.0.dev177.dist-info}/WHEEL +0 -0
- {omlish-0.0.0.dev132.dist-info → omlish-0.0.0.dev177.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev132.dist-info → omlish-0.0.0.dev177.dist-info}/top_level.txt +0 -0
omlish/collections/cache/impl.py
CHANGED
@@ -51,63 +51,56 @@ class CacheImpl(Cache[K, V]):
|
|
51
51
|
https://google.github.io/guava/releases/16.0/api/docs/com/google/common/cache/CacheBuilder.html
|
52
52
|
"""
|
53
53
|
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
f'value={self.value!r}, '
|
105
|
-
f'weight={self.weight}, '
|
106
|
-
f'written={self.written}, '
|
107
|
-
f'accessed={self.accessed}, '
|
108
|
-
f'hits={self.hits}, '
|
109
|
-
f'unlinked={self.unlinked})'
|
110
|
-
)
|
54
|
+
class Link:
|
55
|
+
__slots__ = [
|
56
|
+
'seq',
|
57
|
+
'ins_prev',
|
58
|
+
'ins_next',
|
59
|
+
'lru_prev',
|
60
|
+
'lru_next',
|
61
|
+
'lfu_prev',
|
62
|
+
'lfu_next',
|
63
|
+
'key',
|
64
|
+
'value',
|
65
|
+
'weight',
|
66
|
+
'written',
|
67
|
+
'accessed',
|
68
|
+
'hits',
|
69
|
+
'unlinked',
|
70
|
+
]
|
71
|
+
|
72
|
+
seq: int
|
73
|
+
ins_prev: 'CacheImpl.Link'
|
74
|
+
ins_next: 'CacheImpl.Link'
|
75
|
+
lru_prev: 'CacheImpl.Link'
|
76
|
+
lru_next: 'CacheImpl.Link'
|
77
|
+
lfu_prev: 'CacheImpl.Link'
|
78
|
+
lfu_next: 'CacheImpl.Link'
|
79
|
+
key: ta.Any | weakref.ref
|
80
|
+
value: ta.Any | weakref.ref
|
81
|
+
weight: float
|
82
|
+
written: float
|
83
|
+
accessed: float
|
84
|
+
hits: int
|
85
|
+
unlinked: bool
|
86
|
+
|
87
|
+
def __repr__(self) -> str:
|
88
|
+
return (
|
89
|
+
f'Link@{self.seq!s}('
|
90
|
+
f'ins_prev={("@" + str(self.ins_prev.seq)) if self.ins_prev is not None else None}, '
|
91
|
+
f'ins_next={("@" + str(self.ins_next.seq)) if self.ins_next is not None else None}, '
|
92
|
+
f'lru_prev={("@" + str(self.lru_prev.seq)) if self.lru_prev is not None else None}, '
|
93
|
+
f'lru_next={("@" + str(self.lru_next.seq)) if self.lru_next is not None else None}, '
|
94
|
+
f'lfu_prev={("@" + str(self.lfu_prev.seq)) if self.lfu_prev is not None else None}, '
|
95
|
+
f'lfu_next={("@" + str(self.lfu_next.seq)) if self.lfu_next is not None else None}, '
|
96
|
+
f'key={self.key!r}, '
|
97
|
+
f'value={self.value!r}, '
|
98
|
+
f'weight={self.weight}, '
|
99
|
+
f'written={self.written}, '
|
100
|
+
f'accessed={self.accessed}, '
|
101
|
+
f'hits={self.hits}, '
|
102
|
+
f'unlinked={self.unlinked})'
|
103
|
+
)
|
111
104
|
|
112
105
|
_cache: ta.MutableMapping[ta.Any, Link]
|
113
106
|
|
omlish/collections/coerce.py
CHANGED
omlish/collections/mappings.py
CHANGED
@@ -86,7 +86,7 @@ class DynamicTypeMap(ta.Generic[V]):
|
|
86
86
|
self._items = list(items)
|
87
87
|
self._weak = bool(weak)
|
88
88
|
|
89
|
-
self._cache: ta.MutableMapping[type, ta.Any] = weakref.WeakKeyDictionary()
|
89
|
+
self._cache: ta.MutableMapping[type, ta.Any] = weakref.WeakKeyDictionary() if weak else {}
|
90
90
|
|
91
91
|
@property
|
92
92
|
def items(self) -> ta.Sequence[V]:
|
omlish/configs/flattening.py
CHANGED
omlish/defs.py
CHANGED
@@ -199,7 +199,7 @@ def abstract_method(cls_dct, *names):
|
|
199
199
|
|
200
200
|
@lang.cls_dct_fn()
|
201
201
|
def abstract_property(cls_dct, *names):
|
202
|
-
return not_implemented(cls_dct, *names, wrapper=abc.abstractmethod)
|
202
|
+
return not_implemented(cls_dct, *names, wrapper=lambda o: property(abc.abstractmethod(o)))
|
203
203
|
|
204
204
|
|
205
205
|
@lang.cls_dct_fn()
|
omlish/diag/_pycharm/runhack.py
CHANGED
@@ -3,7 +3,10 @@
|
|
3
3
|
.venv/bin/python $(curl -LsSf https://raw.githubusercontent.com/wrmsr/omlish/master/omlish/diag/_pycharm/runhack.py -o $(mktemp) && echo "$_") install
|
4
4
|
|
5
5
|
==
|
6
|
+
TODO:
|
7
|
+
- check for existing files - can't run regular dep entrypoints now
|
6
8
|
|
9
|
+
==
|
7
10
|
See:
|
8
11
|
- https://github.com/JetBrains/intellij-community/blob/6400f70dde6f743e39a257a5a78cc51b644c835e/python/helpers/pycharm/_jb_pytest_runner.py
|
9
12
|
- https://github.com/JetBrains/intellij-community/blob/5a4e584aa59767f2e7cf4bd377adfaaf7503984b/python/helpers/pycharm/_jb_runner_tools.py
|
@@ -1278,7 +1281,7 @@ class HackRunner:
|
|
1278
1281
|
def run(self) -> None:
|
1279
1282
|
# breakpoint()
|
1280
1283
|
|
1281
|
-
env = self._env()
|
1284
|
+
env = self._env() # type: RunEnv
|
1282
1285
|
self._debug(env.as_json())
|
1283
1286
|
|
1284
1287
|
if not self._is_enabled:
|
@@ -1292,6 +1295,10 @@ class HackRunner:
|
|
1292
1295
|
self._debug('not pycharm hosted')
|
1293
1296
|
return
|
1294
1297
|
|
1298
|
+
if len(env.orig_argv) < 2:
|
1299
|
+
self._debug('no enough interpreter arguments')
|
1300
|
+
return
|
1301
|
+
|
1295
1302
|
exe = self._exe()
|
1296
1303
|
dec = self._decider().decide(exe.target)
|
1297
1304
|
if dec is None:
|
@@ -1301,7 +1308,6 @@ class HackRunner:
|
|
1301
1308
|
self._debug(dec.as_json())
|
1302
1309
|
self._apply(dec)
|
1303
1310
|
|
1304
|
-
|
1305
1311
|
##
|
1306
1312
|
|
1307
1313
|
|
omlish/diag/procfs.py
CHANGED
@@ -13,8 +13,8 @@ import typing as ta
|
|
13
13
|
|
14
14
|
from .. import iterators as it
|
15
15
|
from .. import lang
|
16
|
-
from .. import os as oos
|
17
16
|
from ..formats import json
|
17
|
+
from ..os.sizes import PAGE_SIZE
|
18
18
|
from .procstats import ProcStats
|
19
19
|
|
20
20
|
|
@@ -239,8 +239,8 @@ def get_process_range_pagemaps(start: int, end: int, pid: PidLike = 'self') -> t
|
|
239
239
|
"""https://www.kernel.org/doc/Documentation/vm/pagemap.txt"""
|
240
240
|
|
241
241
|
_check_linux()
|
242
|
-
offset = (start //
|
243
|
-
npages = ((end - start) //
|
242
|
+
offset = (start // PAGE_SIZE) * 8
|
243
|
+
npages = ((end - start) // PAGE_SIZE)
|
244
244
|
size = npages * 8
|
245
245
|
with open(f'/proc/{pid}/pagemap', 'rb') as pagemap_file:
|
246
246
|
pagemap_file.seek(offset)
|
@@ -251,7 +251,7 @@ def get_process_range_pagemaps(start: int, end: int, pid: PidLike = 'self') -> t
|
|
251
251
|
for pagenum in range(npages):
|
252
252
|
[packed] = _struct_unpack('Q', pagemap_buf[pagenum * 8:(pagenum + 1) * 8])
|
253
253
|
yield {
|
254
|
-
'address': start + (pagenum *
|
254
|
+
'address': start + (pagenum * PAGE_SIZE),
|
255
255
|
'pfn': (packed & ((1 << (54 + 1)) - 1)),
|
256
256
|
'swap_type': (packed & ((1 << (4 + 1)) - 1)),
|
257
257
|
'swap_offset': (packed & ((1 << (54 + 1)) - 1)) >> 5,
|
@@ -280,7 +280,7 @@ def _dump_cmd(args: ta.Any) -> None:
|
|
280
280
|
sys.stdout.write('\n')
|
281
281
|
for pm in get_process_range_pagemaps(m['address'], m['end_address'], args.pid):
|
282
282
|
if pm['pte_soft_dirty']:
|
283
|
-
dirty_total +=
|
283
|
+
dirty_total += PAGE_SIZE
|
284
284
|
sys.stdout.write(json.dumps({'page': tuple(pm[k] for k in PAGEMAP_KEYS)}))
|
285
285
|
sys.stdout.write('\n')
|
286
286
|
dct = {
|
@@ -326,11 +326,11 @@ def _cmp_cmd(args: ta.Any) -> None:
|
|
326
326
|
r_pages += c_pages
|
327
327
|
dct = {
|
328
328
|
'l_pages': l_pages,
|
329
|
-
'l_bytes': l_pages *
|
329
|
+
'l_bytes': l_pages * PAGE_SIZE,
|
330
330
|
'r_pages': r_pages,
|
331
|
-
'r_bytes': r_pages *
|
331
|
+
'r_bytes': r_pages * PAGE_SIZE,
|
332
332
|
'c_pages': c_pages,
|
333
|
-
'c_bytes': c_pages *
|
333
|
+
'c_bytes': c_pages * PAGE_SIZE,
|
334
334
|
}
|
335
335
|
sys.stdout.write(json.dumps(dct))
|
336
336
|
sys.stdout.write('\n')
|
omlish/docker/__init__.py
CHANGED
@@ -1,36 +0,0 @@
|
|
1
|
-
from .cli import ( # noqa
|
2
|
-
Inspect,
|
3
|
-
Port,
|
4
|
-
PsItem,
|
5
|
-
cli_inspect,
|
6
|
-
cli_ps,
|
7
|
-
has_cli,
|
8
|
-
parse_port,
|
9
|
-
)
|
10
|
-
|
11
|
-
from .compose import ( # noqa
|
12
|
-
ComposeConfig,
|
13
|
-
get_compose_port,
|
14
|
-
)
|
15
|
-
|
16
|
-
from .helpers import ( # noqa
|
17
|
-
DOCKER_FOR_MAC_HOSTNAME,
|
18
|
-
DOCKER_HOST_PLATFORM_KEY,
|
19
|
-
get_docker_host_platform,
|
20
|
-
timebomb_payload,
|
21
|
-
)
|
22
|
-
|
23
|
-
from .hub import ( # noqa
|
24
|
-
HubRepoInfo,
|
25
|
-
get_hub_repo_info,
|
26
|
-
select_latest_tag,
|
27
|
-
split_tag_suffix,
|
28
|
-
)
|
29
|
-
|
30
|
-
|
31
|
-
##
|
32
|
-
|
33
|
-
|
34
|
-
from ..lite.docker import ( # noqa
|
35
|
-
is_likely_in_docker,
|
36
|
-
)
|
omlish/docker/all.py
ADDED
@@ -0,0 +1,31 @@
|
|
1
|
+
from .cli import ( # noqa
|
2
|
+
Inspect,
|
3
|
+
Port,
|
4
|
+
PsItem,
|
5
|
+
cli_inspect,
|
6
|
+
cli_ps,
|
7
|
+
has_cli,
|
8
|
+
parse_port,
|
9
|
+
)
|
10
|
+
|
11
|
+
from .compose import ( # noqa
|
12
|
+
ComposeConfig,
|
13
|
+
get_compose_port,
|
14
|
+
)
|
15
|
+
|
16
|
+
from .detect import ( # noqa
|
17
|
+
DOCKER_HOST_PLATFORM_KEY,
|
18
|
+
get_docker_host_platform,
|
19
|
+
is_likely_in_docker,
|
20
|
+
)
|
21
|
+
|
22
|
+
from .hub import ( # noqa
|
23
|
+
HubRepoInfo,
|
24
|
+
get_hub_repo_info,
|
25
|
+
select_latest_tag,
|
26
|
+
split_tag_suffix,
|
27
|
+
)
|
28
|
+
|
29
|
+
from .timebomb import ( # noqa
|
30
|
+
timebomb_payload,
|
31
|
+
)
|
omlish/docker/consts.py
ADDED
@@ -1,5 +1,23 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
import os
|
1
4
|
import re
|
2
5
|
import sys
|
6
|
+
import typing as ta
|
7
|
+
|
8
|
+
|
9
|
+
##
|
10
|
+
|
11
|
+
|
12
|
+
# Set by pyproject, docker-dev script
|
13
|
+
DOCKER_HOST_PLATFORM_KEY = 'DOCKER_HOST_PLATFORM'
|
14
|
+
|
15
|
+
|
16
|
+
def get_docker_host_platform() -> ta.Optional[str]:
|
17
|
+
return os.environ.get(DOCKER_HOST_PLATFORM_KEY)
|
18
|
+
|
19
|
+
|
20
|
+
##
|
3
21
|
|
4
22
|
|
5
23
|
_LIKELY_IN_DOCKER_PATTERN = re.compile(r'^overlay / .*/(docker|desktop-containerd)/')
|
@@ -1,10 +1,6 @@
|
|
1
|
-
import os
|
2
1
|
import shlex
|
3
2
|
|
4
3
|
|
5
|
-
##
|
6
|
-
|
7
|
-
|
8
4
|
_DEFAULT_TIMEBOMB_NAME = '-'.join([*__name__.split('.'), 'timebomb'])
|
9
5
|
|
10
6
|
|
@@ -16,20 +12,3 @@ def timebomb_payload(delay_s: float, name: str = _DEFAULT_TIMEBOMB_NAME) -> str:
|
|
16
12
|
'sh -c \'killall5 -9 -o $PPID -o $$ ; kill 1\''
|
17
13
|
') &'
|
18
14
|
)
|
19
|
-
|
20
|
-
|
21
|
-
##
|
22
|
-
|
23
|
-
|
24
|
-
DOCKER_FOR_MAC_HOSTNAME = 'docker.for.mac.localhost'
|
25
|
-
|
26
|
-
|
27
|
-
##
|
28
|
-
|
29
|
-
|
30
|
-
# Set by pyproject, docker-dev script
|
31
|
-
DOCKER_HOST_PLATFORM_KEY = 'DOCKER_HOST_PLATFORM'
|
32
|
-
|
33
|
-
|
34
|
-
def get_docker_host_platform() -> str | None:
|
35
|
-
return os.environ.get(DOCKER_HOST_PLATFORM_KEY)
|
omlish/formats/cbor.py
ADDED
@@ -0,0 +1,31 @@
|
|
1
|
+
import typing as ta
|
2
|
+
|
3
|
+
from .. import lang
|
4
|
+
from .codecs import make_bytes_object_codec
|
5
|
+
from .codecs import make_object_lazy_loaded_codec
|
6
|
+
|
7
|
+
|
8
|
+
if ta.TYPE_CHECKING:
|
9
|
+
import cbor2
|
10
|
+
else:
|
11
|
+
cbor2 = lang.proxy_import('cbor2')
|
12
|
+
|
13
|
+
|
14
|
+
##
|
15
|
+
|
16
|
+
|
17
|
+
def dump(obj: ta.Any) -> bytes:
|
18
|
+
return cbor2.dumps(obj)
|
19
|
+
|
20
|
+
|
21
|
+
def load(s: bytes) -> ta.Any:
|
22
|
+
return cbor2.loads(s)
|
23
|
+
|
24
|
+
|
25
|
+
##
|
26
|
+
|
27
|
+
|
28
|
+
CBOR_CODEC = make_bytes_object_codec('cbor', dump, load)
|
29
|
+
|
30
|
+
# @omlish-manifest
|
31
|
+
_CBOR_LAZY_CODEC = make_object_lazy_loaded_codec(__name__, 'CBOR_CODEC', CBOR_CODEC)
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import typing as ta
|
2
|
+
|
3
|
+
from .. import lang
|
4
|
+
from .codecs import make_bytes_object_codec
|
5
|
+
from .codecs import make_object_lazy_loaded_codec
|
6
|
+
|
7
|
+
|
8
|
+
if ta.TYPE_CHECKING:
|
9
|
+
import cloudpickle
|
10
|
+
else:
|
11
|
+
cloudpickle = lang.proxy_import('cloudpickle')
|
12
|
+
|
13
|
+
|
14
|
+
##
|
15
|
+
|
16
|
+
|
17
|
+
def dump(obj: ta.Any) -> bytes:
|
18
|
+
return cloudpickle.dumps(obj)
|
19
|
+
|
20
|
+
|
21
|
+
def load(s: bytes) -> ta.Any:
|
22
|
+
return cloudpickle.loads(s)
|
23
|
+
|
24
|
+
|
25
|
+
##
|
26
|
+
|
27
|
+
|
28
|
+
CLOUDPICKLE_CODEC = make_bytes_object_codec('cloudpickle', dump, load)
|
29
|
+
|
30
|
+
# @omlish-manifest
|
31
|
+
_CLOUDPICKLE_LAZY_CODEC = make_object_lazy_loaded_codec(__name__, 'CLOUDPICKLE_CODEC', CLOUDPICKLE_CODEC)
|
omlish/formats/codecs.py
ADDED
@@ -0,0 +1,93 @@
|
|
1
|
+
import typing as ta
|
2
|
+
|
3
|
+
from .. import codecs
|
4
|
+
from .. import reflect as rfl
|
5
|
+
|
6
|
+
|
7
|
+
ObjectCodecT = ta.TypeVar('ObjectCodecT', bound='ObjectCodec')
|
8
|
+
|
9
|
+
|
10
|
+
##
|
11
|
+
|
12
|
+
|
13
|
+
class ObjectCodec(codecs.Codec):
|
14
|
+
pass
|
15
|
+
|
16
|
+
|
17
|
+
def make_object_codec(
|
18
|
+
cls: type[ObjectCodecT],
|
19
|
+
name: str,
|
20
|
+
dumps: ta.Callable,
|
21
|
+
loads: ta.Callable,
|
22
|
+
*,
|
23
|
+
input: rfl.Type = rfl.type_(ta.Any), # noqa
|
24
|
+
aliases: ta.Collection[str] | None = None,
|
25
|
+
) -> ObjectCodecT:
|
26
|
+
return cls(
|
27
|
+
name=name,
|
28
|
+
aliases=aliases,
|
29
|
+
|
30
|
+
input=input,
|
31
|
+
output=bytes,
|
32
|
+
|
33
|
+
new=lambda: codecs.FnPairEagerCodec.of(dumps, loads),
|
34
|
+
)
|
35
|
+
|
36
|
+
|
37
|
+
##
|
38
|
+
|
39
|
+
|
40
|
+
class BytesObjectCodec(ObjectCodec):
|
41
|
+
pass
|
42
|
+
|
43
|
+
|
44
|
+
def make_bytes_object_codec(
|
45
|
+
name: str,
|
46
|
+
dumps: ta.Callable[[ta.Any], bytes],
|
47
|
+
loads: ta.Callable[[bytes], ta.Any],
|
48
|
+
**kwargs: ta.Any,
|
49
|
+
) -> BytesObjectCodec:
|
50
|
+
return make_object_codec(
|
51
|
+
BytesObjectCodec,
|
52
|
+
name,
|
53
|
+
dumps,
|
54
|
+
loads,
|
55
|
+
**kwargs,
|
56
|
+
)
|
57
|
+
|
58
|
+
|
59
|
+
##
|
60
|
+
|
61
|
+
|
62
|
+
class StrObjectCodec(ObjectCodec):
|
63
|
+
pass
|
64
|
+
|
65
|
+
|
66
|
+
def make_str_object_codec(
|
67
|
+
name: str,
|
68
|
+
dumps: ta.Callable[[ta.Any], str],
|
69
|
+
loads: ta.Callable[[str], ta.Any],
|
70
|
+
**kwargs: ta.Any,
|
71
|
+
) -> StrObjectCodec:
|
72
|
+
return make_object_codec(
|
73
|
+
StrObjectCodec,
|
74
|
+
name,
|
75
|
+
dumps,
|
76
|
+
loads,
|
77
|
+
**kwargs,
|
78
|
+
)
|
79
|
+
|
80
|
+
|
81
|
+
##
|
82
|
+
|
83
|
+
|
84
|
+
def make_object_lazy_loaded_codec(
|
85
|
+
mod_name: str,
|
86
|
+
attr_name: str,
|
87
|
+
codec: ObjectCodec,
|
88
|
+
) -> codecs.LazyLoadedCodec:
|
89
|
+
return codecs.LazyLoadedCodec.new(
|
90
|
+
mod_name,
|
91
|
+
attr_name,
|
92
|
+
codec,
|
93
|
+
)
|
@@ -0,0 +1,29 @@
|
|
1
|
+
from ..codecs import make_object_lazy_loaded_codec
|
2
|
+
from ..codecs import make_str_object_codec
|
3
|
+
from .json import dumps
|
4
|
+
from .json import dumps_compact
|
5
|
+
from .json import dumps_pretty
|
6
|
+
from .json import loads
|
7
|
+
|
8
|
+
|
9
|
+
##
|
10
|
+
|
11
|
+
|
12
|
+
JSON_CODEC = make_str_object_codec('json', dumps, loads)
|
13
|
+
|
14
|
+
# @omlish-manifest
|
15
|
+
_JSON_LAZY_CODEC = make_object_lazy_loaded_codec(__name__, 'JSON_CODEC', JSON_CODEC)
|
16
|
+
|
17
|
+
#
|
18
|
+
|
19
|
+
JSON_COMPACT_CODEC = make_str_object_codec('json-compact', dumps_compact, loads)
|
20
|
+
|
21
|
+
# @omlish-manifest
|
22
|
+
_JSON_COMPACT_LAZY_CODEC = make_object_lazy_loaded_codec(__name__, 'JSON_COMPACT_CODEC', JSON_COMPACT_CODEC)
|
23
|
+
|
24
|
+
#
|
25
|
+
|
26
|
+
JSON_PRETTY_CODEC = make_str_object_codec('json-pretty', dumps_pretty, loads)
|
27
|
+
|
28
|
+
# @omlish-manifest
|
29
|
+
_JSON_PRETTY_LAZY_CODEC = make_object_lazy_loaded_codec(__name__, 'JSON_PRETTY_CODEC', JSON_PRETTY_CODEC)
|
@@ -3,6 +3,7 @@ TODO:
|
|
3
3
|
- max buf size
|
4
4
|
- max recursion depth
|
5
5
|
- mark start pos of tokens, currently returning end
|
6
|
+
- _do_string inner loop optimization somehow
|
6
7
|
"""
|
7
8
|
import dataclasses as dc
|
8
9
|
import io
|
@@ -11,7 +12,8 @@ import re
|
|
11
12
|
import typing as ta
|
12
13
|
|
13
14
|
from .... import check
|
14
|
-
from ....genmachine import GenMachine
|
15
|
+
from ....funcs.genmachine import GenMachine
|
16
|
+
from .errors import JsonStreamError
|
15
17
|
|
16
18
|
|
17
19
|
##
|
@@ -95,7 +97,7 @@ CONST_TOKENS: ta.Mapping[str, tuple[TokenKind, str | float | None]] = {
|
|
95
97
|
|
96
98
|
|
97
99
|
@dc.dataclass()
|
98
|
-
class
|
100
|
+
class JsonStreamLexError(JsonStreamError):
|
99
101
|
message: str
|
100
102
|
|
101
103
|
pos: Position
|
@@ -160,8 +162,8 @@ class JsonStreamLexer(GenMachine[str, Token]):
|
|
160
162
|
self._buf.truncate()
|
161
163
|
return raw
|
162
164
|
|
163
|
-
def _raise(self, msg: str) -> ta.NoReturn:
|
164
|
-
raise
|
165
|
+
def _raise(self, msg: str, src: Exception | None = None) -> ta.NoReturn:
|
166
|
+
raise JsonStreamLexError(msg, self.pos) from src
|
165
167
|
|
166
168
|
def _do_main(self):
|
167
169
|
while True:
|
@@ -202,7 +204,7 @@ class JsonStreamLexer(GenMachine[str, Token]):
|
|
202
204
|
self._raise('Unexpected end of input')
|
203
205
|
|
204
206
|
if not c:
|
205
|
-
|
207
|
+
self._raise(f'Unterminated string literal: {self._buf.getvalue()}')
|
206
208
|
|
207
209
|
self._buf.write(c)
|
208
210
|
if c == '"' and last != '\\':
|
@@ -210,7 +212,11 @@ class JsonStreamLexer(GenMachine[str, Token]):
|
|
210
212
|
last = c
|
211
213
|
|
212
214
|
raw = self._flip_buf()
|
213
|
-
|
215
|
+
try:
|
216
|
+
sv = json.loads(raw)
|
217
|
+
except json.JSONDecodeError as e:
|
218
|
+
self._raise(f'Invalid string literal: {raw!r}', e)
|
219
|
+
|
214
220
|
yield self._make_tok('STRING', sv, raw, pos)
|
215
221
|
|
216
222
|
return self._do_main()
|