omlish 0.0.0.dev4__py3-none-any.whl → 0.0.0.dev6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of omlish might be problematic. Click here for more details.
- omlish/__about__.py +1 -1
- omlish/__init__.py +1 -1
- omlish/asyncs/__init__.py +10 -4
- omlish/asyncs/anyio.py +142 -12
- omlish/asyncs/asyncio.py +23 -0
- omlish/asyncs/asyncs.py +9 -6
- omlish/asyncs/bridge.py +316 -0
- omlish/asyncs/flavors.py +27 -1
- omlish/asyncs/trio_asyncio.py +28 -18
- omlish/c3.py +1 -1
- omlish/cached.py +1 -2
- omlish/collections/__init__.py +5 -1
- omlish/collections/cache/impl.py +1 -1
- omlish/collections/identity.py +7 -0
- omlish/collections/indexed.py +1 -1
- omlish/collections/utils.py +38 -6
- omlish/configs/__init__.py +5 -0
- omlish/configs/classes.py +53 -0
- omlish/configs/strings.py +94 -0
- omlish/dataclasses/__init__.py +9 -0
- omlish/dataclasses/impl/api.py +1 -1
- omlish/dataclasses/impl/as_.py +1 -1
- omlish/dataclasses/impl/copy.py +30 -0
- omlish/dataclasses/impl/exceptions.py +6 -0
- omlish/dataclasses/impl/fields.py +25 -25
- omlish/dataclasses/impl/init.py +5 -3
- omlish/dataclasses/impl/main.py +3 -0
- omlish/dataclasses/impl/metaclass.py +6 -1
- omlish/dataclasses/impl/order.py +1 -1
- omlish/dataclasses/impl/reflect.py +15 -2
- omlish/dataclasses/utils.py +44 -0
- omlish/defs.py +1 -1
- omlish/diag/__init__.py +4 -0
- omlish/diag/procfs.py +31 -3
- omlish/diag/procstats.py +32 -0
- omlish/{testing → diag}/pydevd.py +35 -0
- omlish/diag/replserver/console.py +3 -3
- omlish/diag/replserver/server.py +6 -5
- omlish/diag/threads.py +86 -0
- omlish/dispatch/_dispatch2.py +65 -0
- omlish/dispatch/_dispatch3.py +104 -0
- omlish/docker.py +20 -1
- omlish/fnpairs.py +37 -18
- omlish/graphs/dags.py +113 -0
- omlish/graphs/domination.py +268 -0
- omlish/graphs/trees.py +2 -2
- omlish/http/__init__.py +25 -0
- omlish/http/asgi.py +132 -0
- omlish/http/collections.py +15 -0
- omlish/http/consts.py +47 -5
- omlish/http/cookies.py +194 -0
- omlish/http/dates.py +70 -0
- omlish/http/encodings.py +6 -0
- omlish/http/json.py +273 -0
- omlish/http/sessions.py +204 -0
- omlish/inject/__init__.py +51 -17
- omlish/inject/binder.py +185 -5
- omlish/inject/bindings.py +3 -36
- omlish/inject/eagers.py +2 -8
- omlish/inject/elements.py +30 -9
- omlish/inject/exceptions.py +3 -3
- omlish/inject/impl/elements.py +65 -31
- omlish/inject/impl/injector.py +20 -2
- omlish/inject/impl/inspect.py +33 -5
- omlish/inject/impl/multis.py +74 -0
- omlish/inject/impl/origins.py +75 -0
- omlish/inject/impl/{private.py → privates.py} +2 -2
- omlish/inject/impl/providers.py +19 -39
- omlish/inject/{proxy.py → impl/proxy.py} +2 -2
- omlish/inject/impl/scopes.py +7 -2
- omlish/inject/injector.py +9 -4
- omlish/inject/inspect.py +18 -0
- omlish/inject/keys.py +11 -23
- omlish/inject/listeners.py +26 -0
- omlish/inject/managed.py +76 -10
- omlish/inject/multis.py +120 -0
- omlish/inject/origins.py +27 -0
- omlish/inject/overrides.py +5 -4
- omlish/inject/{private.py → privates.py} +6 -10
- omlish/inject/providers.py +12 -85
- omlish/inject/scopes.py +20 -9
- omlish/inject/types.py +2 -8
- omlish/iterators.py +13 -0
- omlish/lang/__init__.py +12 -2
- omlish/lang/cached.py +2 -2
- omlish/lang/classes/restrict.py +3 -2
- omlish/lang/classes/simple.py +18 -8
- omlish/lang/classes/virtual.py +2 -2
- omlish/lang/contextmanagers.py +75 -2
- omlish/lang/datetimes.py +6 -5
- omlish/lang/descriptors.py +131 -0
- omlish/lang/functions.py +18 -28
- omlish/lang/imports.py +11 -2
- omlish/lang/iterables.py +20 -1
- omlish/lang/typing.py +6 -0
- omlish/lifecycles/__init__.py +34 -0
- omlish/lifecycles/abstract.py +43 -0
- omlish/lifecycles/base.py +51 -0
- omlish/lifecycles/contextmanagers.py +74 -0
- omlish/lifecycles/controller.py +116 -0
- omlish/lifecycles/manager.py +161 -0
- omlish/lifecycles/states.py +43 -0
- omlish/lifecycles/transitions.py +64 -0
- omlish/logs/formatters.py +1 -1
- omlish/logs/utils.py +1 -1
- omlish/marshal/__init__.py +4 -0
- omlish/marshal/datetimes.py +1 -1
- omlish/marshal/naming.py +4 -0
- omlish/marshal/objects.py +1 -0
- omlish/marshal/polymorphism.py +4 -4
- omlish/reflect.py +139 -18
- omlish/secrets/__init__.py +7 -0
- omlish/secrets/marshal.py +41 -0
- omlish/secrets/passwords.py +120 -0
- omlish/secrets/secrets.py +47 -0
- omlish/serde/__init__.py +0 -0
- omlish/serde/dotenv.py +574 -0
- omlish/{json.py → serde/json.py} +4 -2
- omlish/serde/props.py +604 -0
- omlish/serde/yaml.py +223 -0
- omlish/sql/dbs.py +1 -1
- omlish/sql/duckdb.py +136 -0
- omlish/sql/sqlean.py +17 -0
- omlish/sync.py +70 -0
- omlish/term.py +7 -2
- omlish/testing/pytest/__init__.py +8 -2
- omlish/testing/pytest/helpers.py +0 -24
- omlish/testing/pytest/inject/harness.py +4 -4
- omlish/testing/pytest/marks.py +45 -0
- omlish/testing/pytest/plugins/__init__.py +3 -0
- omlish/testing/pytest/plugins/asyncs.py +136 -0
- omlish/testing/pytest/plugins/managermarks.py +60 -0
- omlish/testing/pytest/plugins/pydevd.py +1 -1
- omlish/testing/testing.py +10 -0
- omlish/text/delimit.py +4 -0
- omlish/text/glyphsplit.py +92 -0
- {omlish-0.0.0.dev4.dist-info → omlish-0.0.0.dev6.dist-info}/METADATA +1 -1
- omlish-0.0.0.dev6.dist-info/RECORD +240 -0
- {omlish-0.0.0.dev4.dist-info → omlish-0.0.0.dev6.dist-info}/WHEEL +1 -1
- omlish/configs/props.py +0 -64
- omlish-0.0.0.dev4.dist-info/RECORD +0 -195
- {omlish-0.0.0.dev4.dist-info → omlish-0.0.0.dev6.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev4.dist-info → omlish-0.0.0.dev6.dist-info}/top_level.txt +0 -0
omlish/fnpairs.py
CHANGED
|
@@ -18,31 +18,36 @@ import typing as ta
|
|
|
18
18
|
|
|
19
19
|
from . import lang
|
|
20
20
|
|
|
21
|
+
|
|
21
22
|
if ta.TYPE_CHECKING:
|
|
22
23
|
import bz2 as _bz2
|
|
23
|
-
import cloudpickle as _cloudpickle
|
|
24
24
|
import gzip as _gzip
|
|
25
25
|
import json as _json
|
|
26
|
-
import lz4.frame as _lz4_frame
|
|
27
26
|
import lzma as _lzma
|
|
28
27
|
import pickle as _pickle
|
|
29
|
-
import snappy as _snappy
|
|
30
28
|
import struct as _struct
|
|
31
29
|
import tomllib as _tomllib
|
|
30
|
+
|
|
31
|
+
import cloudpickle as _cloudpickle
|
|
32
|
+
import json5 as _json5
|
|
33
|
+
import lz4.frame as _lz4_frame
|
|
34
|
+
import snappy as _snappy
|
|
32
35
|
import yaml as _yaml
|
|
33
36
|
import zstd as _zstd
|
|
34
37
|
|
|
35
38
|
else:
|
|
36
39
|
_bz2 = lang.proxy_import('bz2')
|
|
37
|
-
_cloudpickle = lang.proxy_import('cloudpickle')
|
|
38
40
|
_gzip = lang.proxy_import('gzip')
|
|
39
41
|
_json = lang.proxy_import('json')
|
|
40
|
-
_lz4_frame = lang.proxy_import('lz4.frame')
|
|
41
42
|
_lzma = lang.proxy_import('lzma')
|
|
42
43
|
_pickle = lang.proxy_import('pickle')
|
|
43
|
-
_snappy = lang.proxy_import('snappy')
|
|
44
44
|
_struct = lang.proxy_import('struct')
|
|
45
45
|
_tomllib = lang.proxy_import('tomllib')
|
|
46
|
+
|
|
47
|
+
_cloudpickle = lang.proxy_import('cloudpickle')
|
|
48
|
+
_json5 = lang.proxy_import('json5')
|
|
49
|
+
_lz4_frame = lang.proxy_import('lz4.frame')
|
|
50
|
+
_snappy = lang.proxy_import('snappy')
|
|
46
51
|
_yaml = lang.proxy_import('yaml')
|
|
47
52
|
_zstd = lang.proxy_import('zstd')
|
|
48
53
|
|
|
@@ -177,13 +182,13 @@ UTF8 = text('utf-8')
|
|
|
177
182
|
|
|
178
183
|
|
|
179
184
|
@dc.dataclass(frozen=True)
|
|
180
|
-
class Optional(FnPair[
|
|
185
|
+
class Optional(FnPair[F | None, T | None]):
|
|
181
186
|
fp: FnPair[F, T]
|
|
182
187
|
|
|
183
|
-
def forward(self, f:
|
|
188
|
+
def forward(self, f: F | None) -> T | None:
|
|
184
189
|
return None if f is None else self.fp.forward(f)
|
|
185
190
|
|
|
186
|
-
def backward(self, t:
|
|
191
|
+
def backward(self, t: T | None) -> F | None:
|
|
187
192
|
return None if t is None else self.fp.backward(t)
|
|
188
193
|
|
|
189
194
|
|
|
@@ -305,15 +310,20 @@ class Struct(FnPair[tuple, bytes]):
|
|
|
305
310
|
##
|
|
306
311
|
|
|
307
312
|
|
|
308
|
-
|
|
313
|
+
Object: ta.TypeAlias = FnPair[ta.Any, T]
|
|
314
|
+
ObjectStr: ta.TypeAlias = Object[str]
|
|
315
|
+
ObjectBytes: ta.TypeAlias = Object[bytes]
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
class Object_(FnPair[ta.Any, T], lang.Abstract): # noqa
|
|
309
319
|
pass
|
|
310
320
|
|
|
311
321
|
|
|
312
|
-
class
|
|
322
|
+
class ObjectStr_(Object_[str], lang.Abstract): # noqa
|
|
313
323
|
pass
|
|
314
324
|
|
|
315
325
|
|
|
316
|
-
class
|
|
326
|
+
class ObjectBytes_(Object_[bytes], lang.Abstract): # noqa
|
|
317
327
|
pass
|
|
318
328
|
|
|
319
329
|
|
|
@@ -322,7 +332,7 @@ class ObjectBytes(Object[bytes], lang.Abstract): # noqa
|
|
|
322
332
|
|
|
323
333
|
@_register_extension('pkl')
|
|
324
334
|
@dc.dataclass(frozen=True)
|
|
325
|
-
class Pickle(
|
|
335
|
+
class Pickle(ObjectBytes_):
|
|
326
336
|
protocol: int | None = None
|
|
327
337
|
|
|
328
338
|
def forward(self, f: ta.Any) -> bytes:
|
|
@@ -334,7 +344,7 @@ class Pickle(ObjectBytes):
|
|
|
334
344
|
|
|
335
345
|
@_register_extension('json')
|
|
336
346
|
@dc.dataclass(frozen=True)
|
|
337
|
-
class Json(
|
|
347
|
+
class Json(ObjectStr_):
|
|
338
348
|
indent: int | str | None = dc.field(default=None, kw_only=True)
|
|
339
349
|
separators: tuple[str, str] | None = dc.field(default=None, kw_only=True)
|
|
340
350
|
|
|
@@ -360,7 +370,7 @@ class JsonLines(FnPair[ta.Sequence[ta.Any], str]):
|
|
|
360
370
|
|
|
361
371
|
|
|
362
372
|
@_register_extension('toml')
|
|
363
|
-
class Toml(
|
|
373
|
+
class Toml(ObjectStr_):
|
|
364
374
|
def forward(self, f: ta.Any) -> str:
|
|
365
375
|
raise NotImplementedError
|
|
366
376
|
|
|
@@ -373,7 +383,7 @@ class Toml(ObjectStr):
|
|
|
373
383
|
|
|
374
384
|
@_register_extension('cpkl')
|
|
375
385
|
@dc.dataclass(frozen=True)
|
|
376
|
-
class Cloudpickle(
|
|
386
|
+
class Cloudpickle(ObjectBytes_):
|
|
377
387
|
protocol: int | None = None
|
|
378
388
|
|
|
379
389
|
def forward(self, f: ta.Any) -> bytes:
|
|
@@ -383,8 +393,17 @@ class Cloudpickle(ObjectBytes):
|
|
|
383
393
|
return _cloudpickle.loads(t)
|
|
384
394
|
|
|
385
395
|
|
|
396
|
+
@_register_extension('json5')
|
|
397
|
+
class Json5(ObjectStr_):
|
|
398
|
+
def forward(self, f: ta.Any) -> str:
|
|
399
|
+
return _json5.dumps(f)
|
|
400
|
+
|
|
401
|
+
def backward(self, t: str) -> ta.Any:
|
|
402
|
+
return _json5.loads(t)
|
|
403
|
+
|
|
404
|
+
|
|
386
405
|
@_register_extension('yml', 'yaml')
|
|
387
|
-
class Yaml(
|
|
406
|
+
class Yaml(ObjectStr_):
|
|
388
407
|
def forward(self, f: ta.Any) -> str:
|
|
389
408
|
return _yaml.dump(f)
|
|
390
409
|
|
|
@@ -392,7 +411,7 @@ class Yaml(ObjectStr):
|
|
|
392
411
|
return _yaml.safe_load(t)
|
|
393
412
|
|
|
394
413
|
|
|
395
|
-
class YamlUnsafe(
|
|
414
|
+
class YamlUnsafe(ObjectStr_):
|
|
396
415
|
def forward(self, f: ta.Any) -> str:
|
|
397
416
|
return _yaml.dump(f)
|
|
398
417
|
|
omlish/graphs/dags.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TODO:
|
|
3
|
+
- parser?
|
|
4
|
+
- js? viz.js, d3, visjs
|
|
5
|
+
- cycle detection
|
|
6
|
+
- networkx adapter
|
|
7
|
+
- https://docs.python.org/3.9/library/graphlib.html#module-graphlib
|
|
8
|
+
"""
|
|
9
|
+
import typing as ta
|
|
10
|
+
|
|
11
|
+
from .. import check
|
|
12
|
+
from .. import lang
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
K = ta.TypeVar('K')
|
|
16
|
+
V = ta.TypeVar('V')
|
|
17
|
+
T = ta.TypeVar('T')
|
|
18
|
+
U = ta.TypeVar('U')
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def traverse_links(data: ta.Mapping[T, ta.Iterable[T]], keys: ta.Iterable[T]) -> set[T]:
|
|
22
|
+
keys = set(keys)
|
|
23
|
+
todo = set(keys)
|
|
24
|
+
seen: set[T] = set()
|
|
25
|
+
while todo:
|
|
26
|
+
key = todo.pop()
|
|
27
|
+
seen.add(key)
|
|
28
|
+
cur = data.get(key, [])
|
|
29
|
+
todo.update(set(cur) - seen)
|
|
30
|
+
return seen - keys
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def invert_set_map(src: ta.Mapping[K, ta.Iterable[V]]) -> dict[V, set[K]]:
|
|
34
|
+
dst: dict[V, set[K]] = {}
|
|
35
|
+
for l, rs in src.items():
|
|
36
|
+
for r in rs:
|
|
37
|
+
try:
|
|
38
|
+
s = dst[r]
|
|
39
|
+
except KeyError:
|
|
40
|
+
s = dst[r] = set()
|
|
41
|
+
s.add(l)
|
|
42
|
+
return dst
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def invert_symmetric_set_map(src: ta.Mapping[T, ta.Iterable[T]]) -> dict[T, set[T]]:
|
|
46
|
+
dst: dict[T, set[T]] = {l: set() for l in src}
|
|
47
|
+
for l, rs in src.items():
|
|
48
|
+
for r in rs:
|
|
49
|
+
dst[r].add(l)
|
|
50
|
+
return dst
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class Dag(ta.Generic[T]):
|
|
54
|
+
|
|
55
|
+
def __init__(self, input_its_by_outputs: ta.Mapping[T, ta.Iterable[T]]) -> None:
|
|
56
|
+
super().__init__()
|
|
57
|
+
|
|
58
|
+
self._input_sets_by_output = {u: set(d) for u, d in input_its_by_outputs.items()}
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def input_sets_by_output(self) -> ta.Mapping[T, ta.AbstractSet[T]]:
|
|
62
|
+
return self._input_sets_by_output
|
|
63
|
+
|
|
64
|
+
@lang.cached_property
|
|
65
|
+
def output_sets_by_input(self) -> ta.Mapping[T, ta.AbstractSet[T]]:
|
|
66
|
+
return invert_symmetric_set_map(self._input_sets_by_output)
|
|
67
|
+
|
|
68
|
+
def subdag(self, *args, **kwargs) -> 'Subdag[T]':
|
|
69
|
+
return Subdag(self, *args, **kwargs)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class Subdag(ta.Generic[U]):
|
|
73
|
+
|
|
74
|
+
def __init__(
|
|
75
|
+
self,
|
|
76
|
+
dag: 'Dag[U]',
|
|
77
|
+
targets: ta.Iterable[U],
|
|
78
|
+
*,
|
|
79
|
+
ignored: ta.Iterable[U] | None = None,
|
|
80
|
+
) -> None:
|
|
81
|
+
super().__init__()
|
|
82
|
+
|
|
83
|
+
self._dag: Dag[U] = check.isinstance(dag, Dag) # type: ignore
|
|
84
|
+
self._targets = set(targets)
|
|
85
|
+
self._ignored = set(ignored or []) - self._targets
|
|
86
|
+
|
|
87
|
+
@property
|
|
88
|
+
def dag(self) -> 'Dag[U]':
|
|
89
|
+
return self._dag
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def targets(self) -> ta.AbstractSet[U]:
|
|
93
|
+
return self._targets
|
|
94
|
+
|
|
95
|
+
@property
|
|
96
|
+
def ignored(self) -> ta.AbstractSet[U]:
|
|
97
|
+
return self._ignored
|
|
98
|
+
|
|
99
|
+
@lang.cached_property
|
|
100
|
+
def inputs(self) -> ta.AbstractSet[U]:
|
|
101
|
+
return traverse_links(self.dag.input_sets_by_output, self.targets) - self.ignored
|
|
102
|
+
|
|
103
|
+
@lang.cached_property
|
|
104
|
+
def outputs(self) -> ta.AbstractSet[U]:
|
|
105
|
+
return traverse_links(self.dag.output_sets_by_input, self.targets) - self.ignored
|
|
106
|
+
|
|
107
|
+
@lang.cached_property
|
|
108
|
+
def output_inputs(self) -> ta.AbstractSet[U]:
|
|
109
|
+
return traverse_links(self.dag.input_sets_by_output, self.outputs) - self.ignored
|
|
110
|
+
|
|
111
|
+
@lang.cached_property
|
|
112
|
+
def all(self) -> ta.AbstractSet[U]:
|
|
113
|
+
return self.targets | self.inputs | self.outputs | self.output_inputs
|
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import contextlib
|
|
3
|
+
import typing as ta
|
|
4
|
+
|
|
5
|
+
from .. import check
|
|
6
|
+
from .. import lang
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
V = ta.TypeVar('V')
|
|
10
|
+
MK = ta.TypeVar('MK')
|
|
11
|
+
MV = ta.TypeVar('MV')
|
|
12
|
+
SetMap = ta.Mapping[MK, ta.AbstractSet[MV]]
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DirectedGraph(ta.Generic[V], lang.Abstract):
|
|
16
|
+
|
|
17
|
+
@abc.abstractmethod
|
|
18
|
+
def get_successors(self, vertex: V) -> ta.Collection[V]:
|
|
19
|
+
raise NotImplementedError
|
|
20
|
+
|
|
21
|
+
@abc.abstractmethod
|
|
22
|
+
def yield_depth_first(self, root: V) -> ta.Iterator[V]:
|
|
23
|
+
raise NotImplementedError
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class ListDictDirectedGraph(DirectedGraph[V]):
|
|
27
|
+
|
|
28
|
+
def __init__(self, items: ta.Iterable[tuple[V, ta.Iterable[V]]]) -> None:
|
|
29
|
+
super().__init__()
|
|
30
|
+
|
|
31
|
+
lst_dct: dict[V, list[V]] = {}
|
|
32
|
+
all_children = set()
|
|
33
|
+
for parent, children in items:
|
|
34
|
+
check.not_in(parent, lst_dct)
|
|
35
|
+
lst = []
|
|
36
|
+
seen = set()
|
|
37
|
+
for child in children:
|
|
38
|
+
if child not in seen:
|
|
39
|
+
seen.add(child)
|
|
40
|
+
lst.append(child)
|
|
41
|
+
all_children.add(child)
|
|
42
|
+
lst_dct[parent] = lst
|
|
43
|
+
check.empty(all_children - set(lst_dct))
|
|
44
|
+
self._lst_dct = lst_dct
|
|
45
|
+
|
|
46
|
+
def get_successors(self, vertex: V) -> ta.Collection[V]:
|
|
47
|
+
return self._lst_dct[vertex]
|
|
48
|
+
|
|
49
|
+
def yield_depth_first(self, root: V) -> ta.Iterator[V]:
|
|
50
|
+
stack: list[V] = [root]
|
|
51
|
+
seen: set[V] = set()
|
|
52
|
+
while stack:
|
|
53
|
+
cur = stack.pop()
|
|
54
|
+
yield cur
|
|
55
|
+
for child in self._lst_dct[cur]:
|
|
56
|
+
if child not in seen:
|
|
57
|
+
seen.add(child)
|
|
58
|
+
stack.append(child)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class DominatorTree(ta.Generic[V]):
|
|
62
|
+
|
|
63
|
+
def __init__(self, graph: DirectedGraph[V], root: V) -> None:
|
|
64
|
+
super().__init__()
|
|
65
|
+
|
|
66
|
+
self._graph = check.not_none(graph)
|
|
67
|
+
self._root = check.not_none(root)
|
|
68
|
+
check.not_none(self._graph.get_successors(root))
|
|
69
|
+
self._dfs = _Dfs(graph, root)
|
|
70
|
+
|
|
71
|
+
@lang.cached_property
|
|
72
|
+
def immediate_dominators(self) -> ta.Mapping[V, V]:
|
|
73
|
+
return _ImmediateDominanceComputer(self._dfs).immediate_dominators
|
|
74
|
+
|
|
75
|
+
@lang.cached_property
|
|
76
|
+
def dominator_tree(self) -> SetMap[V, V]:
|
|
77
|
+
tree: dict[V, set[V]] = {}
|
|
78
|
+
for node, dom in self.immediate_dominators.items():
|
|
79
|
+
tree.setdefault(dom, set()).add(node)
|
|
80
|
+
return tree
|
|
81
|
+
|
|
82
|
+
@lang.cached_property
|
|
83
|
+
def deep_dominated(self) -> SetMap[V, V]:
|
|
84
|
+
seen: set[V] = set()
|
|
85
|
+
ret: dict[V, set[V]] = {}
|
|
86
|
+
|
|
87
|
+
def rec(node: V) -> ta.Collection[V]:
|
|
88
|
+
check.not_in(node, seen)
|
|
89
|
+
seen.add(node)
|
|
90
|
+
# FIXME: pyrsistent
|
|
91
|
+
st = set()
|
|
92
|
+
for child in self.dominator_tree.get(node, []):
|
|
93
|
+
st.add(child)
|
|
94
|
+
st.update(rec(child))
|
|
95
|
+
if st:
|
|
96
|
+
ret[node] = st
|
|
97
|
+
return st
|
|
98
|
+
|
|
99
|
+
rec(self._root)
|
|
100
|
+
return ret
|
|
101
|
+
|
|
102
|
+
@lang.cached_property
|
|
103
|
+
def dominance_frontiers(self) -> SetMap[V, V]:
|
|
104
|
+
dominance_frontiers: dict[V, set[V]] = {}
|
|
105
|
+
|
|
106
|
+
for x in self.reverse_topological_traversal:
|
|
107
|
+
dfx = dominance_frontiers.setdefault(x, set())
|
|
108
|
+
|
|
109
|
+
for y in self._graph.get_successors(x):
|
|
110
|
+
if self.immediate_dominators[y] != x:
|
|
111
|
+
dfx.add(y)
|
|
112
|
+
|
|
113
|
+
for z in self.dominator_tree.get(x, []):
|
|
114
|
+
for y in dominance_frontiers.get(z, []):
|
|
115
|
+
if self.immediate_dominators[y] != x:
|
|
116
|
+
dfx.add(y)
|
|
117
|
+
|
|
118
|
+
return {k: v for k, v in dominance_frontiers.items() if v}
|
|
119
|
+
|
|
120
|
+
@lang.cached_property
|
|
121
|
+
def topological_traversal(self) -> list[V]:
|
|
122
|
+
# FIXME: LinkedList
|
|
123
|
+
lst: list[V] = []
|
|
124
|
+
|
|
125
|
+
for node in self._dfs.vertex:
|
|
126
|
+
try:
|
|
127
|
+
idx = lst.index(self.immediate_dominators[node])
|
|
128
|
+
except (KeyError, ValueError):
|
|
129
|
+
lst.append(node)
|
|
130
|
+
else:
|
|
131
|
+
lst.insert(idx + 1, node)
|
|
132
|
+
|
|
133
|
+
return lst
|
|
134
|
+
|
|
135
|
+
@lang.cached_property
|
|
136
|
+
def reverse_topological_traversal(self) -> list[V]:
|
|
137
|
+
return list(reversed(self.topological_traversal))
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class _Dfs(ta.Generic[V]):
|
|
141
|
+
|
|
142
|
+
def __init__(self, graph: DirectedGraph[V], root: V) -> None:
|
|
143
|
+
super().__init__()
|
|
144
|
+
|
|
145
|
+
semi: dict[V, int] = {}
|
|
146
|
+
vertex: list[V] = []
|
|
147
|
+
parent: dict[V, V] = {}
|
|
148
|
+
pred: dict[V, set[V]] = {}
|
|
149
|
+
label: dict[V, V] = {}
|
|
150
|
+
|
|
151
|
+
for node in graph.yield_depth_first(root):
|
|
152
|
+
if node not in semi:
|
|
153
|
+
vertex.append(node)
|
|
154
|
+
|
|
155
|
+
check.not_in(node, semi)
|
|
156
|
+
semi[node] = len(semi)
|
|
157
|
+
check.not_in(node, label)
|
|
158
|
+
label[node] = node
|
|
159
|
+
|
|
160
|
+
for child in graph.get_successors(node):
|
|
161
|
+
pred.setdefault(child, set()).add(node)
|
|
162
|
+
if child not in semi:
|
|
163
|
+
check.not_in(child, parent)
|
|
164
|
+
parent[child] = node
|
|
165
|
+
|
|
166
|
+
self._semi = semi
|
|
167
|
+
self._vertex = vertex
|
|
168
|
+
self._parent = parent
|
|
169
|
+
self._pred = pred
|
|
170
|
+
self._label = label
|
|
171
|
+
|
|
172
|
+
@property
|
|
173
|
+
def semi(self) -> dict[V, int]:
|
|
174
|
+
return self._semi
|
|
175
|
+
|
|
176
|
+
@property
|
|
177
|
+
def vertex(self) -> list[V]:
|
|
178
|
+
return self._vertex
|
|
179
|
+
|
|
180
|
+
@property
|
|
181
|
+
def parent(self) -> dict[V, V]:
|
|
182
|
+
return self._parent
|
|
183
|
+
|
|
184
|
+
@property
|
|
185
|
+
def pred(self) -> dict[V, set[V]]:
|
|
186
|
+
return self._pred
|
|
187
|
+
|
|
188
|
+
@property
|
|
189
|
+
def label(self) -> dict[V, V]:
|
|
190
|
+
return self._label
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
class _ImmediateDominanceComputer(ta.Generic[V]):
|
|
194
|
+
|
|
195
|
+
def __init__(self, dfs: _Dfs[V]) -> None:
|
|
196
|
+
super().__init__()
|
|
197
|
+
|
|
198
|
+
self._dfs: _Dfs[V] = check.isinstance(dfs, _Dfs) # type: ignore
|
|
199
|
+
|
|
200
|
+
self._ancestor: dict[V, V] = {}
|
|
201
|
+
self._semi = dict(self._dfs.semi)
|
|
202
|
+
self._label = dict(self._dfs.label)
|
|
203
|
+
|
|
204
|
+
@lang.cached_property
|
|
205
|
+
def immediate_dominators(self) -> ta.Mapping[V, V]:
|
|
206
|
+
idom: dict[V, V] = {}
|
|
207
|
+
bucket: dict[V, set[V]] = {}
|
|
208
|
+
|
|
209
|
+
last_semi_number = len(self._semi) - 1
|
|
210
|
+
|
|
211
|
+
for i in range(last_semi_number, 0, -1):
|
|
212
|
+
w = self._dfs.vertex[i]
|
|
213
|
+
p = self._dfs.parent[w]
|
|
214
|
+
|
|
215
|
+
semidominator = self._semi[w]
|
|
216
|
+
for v in self._dfs.pred.get(w, []):
|
|
217
|
+
semidominator = min(semidominator, self._semi[self._eval(v)])
|
|
218
|
+
|
|
219
|
+
self._semi[w] = semidominator
|
|
220
|
+
bucket.setdefault(self._dfs.vertex[semidominator], set()).add(w)
|
|
221
|
+
|
|
222
|
+
self._ancestor[w] = p
|
|
223
|
+
|
|
224
|
+
for v in bucket.get(p, []):
|
|
225
|
+
u = self._eval(v)
|
|
226
|
+
|
|
227
|
+
if self._semi[u] < self._semi[v]:
|
|
228
|
+
idom[v] = u
|
|
229
|
+
else:
|
|
230
|
+
idom[v] = p
|
|
231
|
+
|
|
232
|
+
with contextlib.suppress(KeyError):
|
|
233
|
+
del bucket[p]
|
|
234
|
+
|
|
235
|
+
for i in range(1, last_semi_number + 1):
|
|
236
|
+
w = self._dfs.vertex[i]
|
|
237
|
+
|
|
238
|
+
if idom[w] != self._dfs.vertex[self._semi[w]]:
|
|
239
|
+
idom[w] = idom[idom[w]]
|
|
240
|
+
|
|
241
|
+
return idom
|
|
242
|
+
|
|
243
|
+
def _eval(self, v: V) -> V:
|
|
244
|
+
self._compress(v)
|
|
245
|
+
return self._label[v]
|
|
246
|
+
|
|
247
|
+
def _compress(self, v: V) -> None:
|
|
248
|
+
worklist: list[V] = [v]
|
|
249
|
+
|
|
250
|
+
a = self._ancestor.get(v)
|
|
251
|
+
|
|
252
|
+
while a in self._ancestor:
|
|
253
|
+
worklist.append(a)
|
|
254
|
+
a = self._ancestor[a]
|
|
255
|
+
|
|
256
|
+
ancestor = worklist.pop()
|
|
257
|
+
least_semi = self._semi[self._label[ancestor]]
|
|
258
|
+
|
|
259
|
+
while worklist:
|
|
260
|
+
descendent = worklist.pop()
|
|
261
|
+
current_semi = self._semi[self._label[descendent]]
|
|
262
|
+
|
|
263
|
+
if current_semi > least_semi:
|
|
264
|
+
self._label[descendent] = self._label[ancestor]
|
|
265
|
+
else:
|
|
266
|
+
least_semi = current_semi
|
|
267
|
+
|
|
268
|
+
ancestor = descendent
|
omlish/graphs/trees.py
CHANGED
|
@@ -79,7 +79,7 @@ class BasicTreeAnalysis(ta.Generic[NodeT]):
|
|
|
79
79
|
nodes: list[NodeT] = []
|
|
80
80
|
node_set: ta.MutableSet[NodeT] = self._set_fac()
|
|
81
81
|
children_by_node: ta.MutableMapping[NodeT | None, ta.Sequence[NodeT]] = self._dict_fac()
|
|
82
|
-
child_sets_by_node: ta.MutableMapping[
|
|
82
|
+
child_sets_by_node: ta.MutableMapping[NodeT | None, ta.AbstractSet[NodeT]] = self._dict_fac()
|
|
83
83
|
parents_by_node: ta.MutableMapping[NodeT, NodeT | None] = self._dict_fac()
|
|
84
84
|
|
|
85
85
|
children_by_node[None] = [root]
|
|
@@ -190,7 +190,7 @@ class BasicTreeAnalysis(ta.Generic[NodeT]):
|
|
|
190
190
|
e: ta.Any
|
|
191
191
|
d: ta.Any
|
|
192
192
|
if identity:
|
|
193
|
-
e, d = id, col.
|
|
193
|
+
e, d = id, col.unique_map((id(n), n) for n, _ in pairs)
|
|
194
194
|
else:
|
|
195
195
|
e, d = lang.identity, lang.identity
|
|
196
196
|
tsd = {e(n): {e(p)} for n, p in parents_by_node.items()}
|
omlish/http/__init__.py
CHANGED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from . import consts # noqa
|
|
2
|
+
|
|
3
|
+
from .cookies import ( # noqa
|
|
4
|
+
CookieTooBigError,
|
|
5
|
+
dump_cookie,
|
|
6
|
+
parse_cookie,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
from .dates import ( # noqa
|
|
10
|
+
http_date,
|
|
11
|
+
parse_date,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from .encodings import ( # noqa
|
|
15
|
+
latin1_decode,
|
|
16
|
+
latin1_encode,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
from .json import ( # noqa
|
|
20
|
+
JSON_TAGGER,
|
|
21
|
+
JsonTag,
|
|
22
|
+
JsonTagger,
|
|
23
|
+
json_dumps,
|
|
24
|
+
json_loads,
|
|
25
|
+
)
|
omlish/http/asgi.py
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import logging
|
|
3
|
+
import typing as ta
|
|
4
|
+
import urllib.parse
|
|
5
|
+
|
|
6
|
+
from .. import check
|
|
7
|
+
from . import consts
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
log = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
AsgiScope: ta.TypeAlias = ta.Mapping[str, ta.Any]
|
|
17
|
+
AsgiMessage: ta.TypeAlias = ta.Mapping[str, ta.Any]
|
|
18
|
+
AsgiRecv: ta.TypeAlias = ta.Callable[[], ta.Awaitable[AsgiMessage]]
|
|
19
|
+
AsgiSend: ta.TypeAlias = ta.Callable[[AsgiMessage], ta.Awaitable[None]]
|
|
20
|
+
AsgiApp: ta.TypeAlias = ta.Callable[[AsgiScope, AsgiRecv, AsgiSend], ta.Awaitable[None]]
|
|
21
|
+
AsgiWrapper: ta.TypeAlias = ta.Callable[[AsgiApp, AsgiScope, AsgiRecv, AsgiSend], ta.Awaitable[None]]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class AsgiApp_(abc.ABC): # noqa
|
|
25
|
+
@abc.abstractmethod
|
|
26
|
+
async def __call__(self, scope: AsgiScope, recv: AsgiRecv, send: AsgiSend) -> None:
|
|
27
|
+
raise NotImplementedError
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
##
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
async def stub_lifespan(scope: AsgiScope, recv: AsgiRecv, send: AsgiSend, *, verbose: bool = False) -> None:
|
|
34
|
+
while True:
|
|
35
|
+
message = await recv()
|
|
36
|
+
if message['type'] == 'lifespan.startup':
|
|
37
|
+
if verbose:
|
|
38
|
+
log.info('Lifespan starting up')
|
|
39
|
+
await send({'type': 'lifespan.startup.complete'})
|
|
40
|
+
|
|
41
|
+
elif message['type'] == 'lifespan.shutdown':
|
|
42
|
+
if verbose:
|
|
43
|
+
log.info('Lifespan shutting down')
|
|
44
|
+
await send({'type': 'lifespan.shutdown.complete'})
|
|
45
|
+
return
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
##
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
async def start_response(
|
|
52
|
+
send: AsgiSend,
|
|
53
|
+
status: int,
|
|
54
|
+
content_type: bytes = consts.CONTENT_TYPE_TEXT_UTF8,
|
|
55
|
+
headers: ta.Sequence[tuple[bytes, bytes]] | None = None,
|
|
56
|
+
) -> None:
|
|
57
|
+
await send({
|
|
58
|
+
'type': 'http.response.start',
|
|
59
|
+
'status': status,
|
|
60
|
+
'headers': [
|
|
61
|
+
(b'content-type', content_type),
|
|
62
|
+
*(headers or ()),
|
|
63
|
+
],
|
|
64
|
+
})
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
async def finish_response(
|
|
68
|
+
send: AsgiSend,
|
|
69
|
+
body: bytes = b'',
|
|
70
|
+
) -> None:
|
|
71
|
+
await send({
|
|
72
|
+
'type': 'http.response.body',
|
|
73
|
+
'body': body,
|
|
74
|
+
})
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
async def send_response(
|
|
78
|
+
send: AsgiSend,
|
|
79
|
+
status: int,
|
|
80
|
+
content_type: bytes = consts.CONTENT_TYPE_TEXT_UTF8,
|
|
81
|
+
headers: ta.Sequence[tuple[bytes, bytes]] | None = None,
|
|
82
|
+
body: bytes = b'',
|
|
83
|
+
) -> None:
|
|
84
|
+
await start_response(
|
|
85
|
+
send,
|
|
86
|
+
status=status,
|
|
87
|
+
content_type=content_type, headers=headers,
|
|
88
|
+
)
|
|
89
|
+
await finish_response(
|
|
90
|
+
send,
|
|
91
|
+
body=body,
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
async def redirect_response(
|
|
96
|
+
send: AsgiSend,
|
|
97
|
+
url: str,
|
|
98
|
+
headers: ta.Sequence[tuple[bytes, bytes]] | None = None,
|
|
99
|
+
) -> None:
|
|
100
|
+
log.info('Redirecting to %s', url)
|
|
101
|
+
await send({
|
|
102
|
+
'type': 'http.response.start',
|
|
103
|
+
'status': 302,
|
|
104
|
+
'headers': [
|
|
105
|
+
(b'content-type', consts.CONTENT_TYPE_TEXT_UTF8),
|
|
106
|
+
(b'location', url.encode()),
|
|
107
|
+
*(headers or ()),
|
|
108
|
+
],
|
|
109
|
+
})
|
|
110
|
+
await send({
|
|
111
|
+
'type': 'http.response.body',
|
|
112
|
+
'body': b'',
|
|
113
|
+
})
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
##
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
async def read_body(recv: AsgiRecv) -> bytes:
|
|
120
|
+
body = b''
|
|
121
|
+
more_body = True
|
|
122
|
+
while more_body:
|
|
123
|
+
message = await recv()
|
|
124
|
+
body += message.get('body', b'')
|
|
125
|
+
more_body = message.get('more_body', False)
|
|
126
|
+
return body
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
async def read_form_body(recv: AsgiRecv) -> dict[bytes, bytes]:
|
|
130
|
+
body = await read_body(recv)
|
|
131
|
+
dct = urllib.parse.parse_qs(body) # noqa
|
|
132
|
+
return {k: check.single(v) for k, v in dct.items()}
|