omlish 0.0.0.dev3__py3-none-any.whl → 0.0.0.dev5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of omlish might be problematic. Click here for more details.
- omlish/__about__.py +1 -1
- omlish/__init__.py +8 -0
- omlish/asyncs/__init__.py +18 -0
- omlish/asyncs/anyio.py +66 -0
- omlish/asyncs/flavors.py +227 -0
- omlish/asyncs/trio_asyncio.py +47 -0
- omlish/c3.py +1 -1
- omlish/cached.py +1 -2
- omlish/collections/__init__.py +4 -1
- omlish/collections/cache/impl.py +1 -1
- omlish/collections/indexed.py +1 -1
- omlish/collections/utils.py +38 -6
- omlish/configs/__init__.py +5 -0
- omlish/configs/classes.py +53 -0
- omlish/configs/dotenv.py +586 -0
- omlish/configs/props.py +589 -49
- omlish/dataclasses/impl/api.py +1 -1
- omlish/dataclasses/impl/as_.py +1 -1
- omlish/dataclasses/impl/fields.py +1 -0
- omlish/dataclasses/impl/init.py +1 -1
- omlish/dataclasses/impl/main.py +1 -0
- omlish/dataclasses/impl/metaclass.py +6 -1
- omlish/dataclasses/impl/order.py +1 -1
- omlish/dataclasses/impl/reflect.py +15 -2
- omlish/defs.py +1 -1
- omlish/diag/procfs.py +29 -1
- omlish/diag/procstats.py +32 -0
- omlish/diag/replserver/console.py +3 -3
- omlish/diag/replserver/server.py +6 -5
- omlish/diag/threads.py +86 -0
- omlish/docker.py +19 -0
- omlish/dynamic.py +2 -2
- omlish/fnpairs.py +121 -24
- omlish/graphs/dags.py +113 -0
- omlish/graphs/domination.py +268 -0
- omlish/graphs/trees.py +2 -2
- omlish/http/__init__.py +25 -0
- omlish/http/asgi.py +131 -0
- omlish/http/consts.py +31 -4
- omlish/http/cookies.py +194 -0
- omlish/http/dates.py +70 -0
- omlish/http/encodings.py +6 -0
- omlish/http/json.py +273 -0
- omlish/http/sessions.py +197 -0
- omlish/inject/__init__.py +8 -2
- omlish/inject/bindings.py +3 -3
- omlish/inject/exceptions.py +3 -3
- omlish/inject/impl/elements.py +46 -25
- omlish/inject/impl/injector.py +8 -5
- omlish/inject/impl/multis.py +74 -0
- omlish/inject/impl/providers.py +19 -39
- omlish/inject/{proxy.py → impl/proxy.py} +2 -2
- omlish/inject/impl/scopes.py +4 -2
- omlish/inject/injector.py +1 -0
- omlish/inject/keys.py +3 -9
- omlish/inject/multis.py +70 -0
- omlish/inject/providers.py +23 -23
- omlish/inject/scopes.py +7 -3
- omlish/inject/types.py +0 -8
- omlish/iterators.py +13 -0
- omlish/json.py +138 -1
- omlish/lang/__init__.py +8 -0
- omlish/lang/classes/restrict.py +1 -1
- omlish/lang/classes/virtual.py +2 -2
- omlish/lang/contextmanagers.py +64 -0
- omlish/lang/datetimes.py +6 -5
- omlish/lang/functions.py +10 -0
- omlish/lang/imports.py +11 -2
- omlish/lang/sys.py +7 -0
- omlish/lang/typing.py +1 -0
- omlish/logs/utils.py +1 -1
- omlish/marshal/datetimes.py +1 -1
- omlish/reflect.py +8 -2
- omlish/sql/__init__.py +9 -0
- omlish/sql/asyncs.py +148 -0
- omlish/sync.py +70 -0
- omlish/term.py +6 -1
- omlish/testing/pydevd.py +2 -0
- omlish/testing/pytest/__init__.py +5 -0
- omlish/testing/pytest/helpers.py +0 -24
- omlish/testing/pytest/inject/harness.py +1 -1
- omlish/testing/pytest/marks.py +48 -0
- omlish/testing/pytest/plugins/__init__.py +2 -0
- omlish/testing/pytest/plugins/managermarks.py +60 -0
- omlish/testing/testing.py +10 -0
- omlish/text/delimit.py +4 -0
- {omlish-0.0.0.dev3.dist-info → omlish-0.0.0.dev5.dist-info}/METADATA +4 -1
- {omlish-0.0.0.dev3.dist-info → omlish-0.0.0.dev5.dist-info}/RECORD +91 -70
- {omlish-0.0.0.dev3.dist-info → omlish-0.0.0.dev5.dist-info}/WHEEL +1 -1
- {omlish-0.0.0.dev3.dist-info → omlish-0.0.0.dev5.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev3.dist-info → omlish-0.0.0.dev5.dist-info}/top_level.txt +0 -0
omlish/graphs/dags.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TODO:
|
|
3
|
+
- parser?
|
|
4
|
+
- js? viz.js, d3, visjs
|
|
5
|
+
- cycle detection
|
|
6
|
+
- networkx adapter
|
|
7
|
+
- https://docs.python.org/3.9/library/graphlib.html#module-graphlib
|
|
8
|
+
"""
|
|
9
|
+
import typing as ta
|
|
10
|
+
|
|
11
|
+
from .. import check
|
|
12
|
+
from .. import lang
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
K = ta.TypeVar('K')
|
|
16
|
+
V = ta.TypeVar('V')
|
|
17
|
+
T = ta.TypeVar('T')
|
|
18
|
+
U = ta.TypeVar('U')
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def traverse_links(data: ta.Mapping[T, ta.Iterable[T]], keys: ta.Iterable[T]) -> set[T]:
|
|
22
|
+
keys = set(keys)
|
|
23
|
+
todo = set(keys)
|
|
24
|
+
seen: set[T] = set()
|
|
25
|
+
while todo:
|
|
26
|
+
key = todo.pop()
|
|
27
|
+
seen.add(key)
|
|
28
|
+
cur = data.get(key, [])
|
|
29
|
+
todo.update(set(cur) - seen)
|
|
30
|
+
return seen - keys
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def invert_set_map(src: ta.Mapping[K, ta.Iterable[V]]) -> dict[V, set[K]]:
|
|
34
|
+
dst: dict[V, set[K]] = {}
|
|
35
|
+
for l, rs in src.items():
|
|
36
|
+
for r in rs:
|
|
37
|
+
try:
|
|
38
|
+
s = dst[r]
|
|
39
|
+
except KeyError:
|
|
40
|
+
s = dst[r] = set()
|
|
41
|
+
s.add(l)
|
|
42
|
+
return dst
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def invert_symmetric_set_map(src: ta.Mapping[T, ta.Iterable[T]]) -> dict[T, set[T]]:
|
|
46
|
+
dst: dict[T, set[T]] = {l: set() for l in src}
|
|
47
|
+
for l, rs in src.items():
|
|
48
|
+
for r in rs:
|
|
49
|
+
dst[r].add(l)
|
|
50
|
+
return dst
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class Dag(ta.Generic[T]):
|
|
54
|
+
|
|
55
|
+
def __init__(self, input_its_by_outputs: ta.Mapping[T, ta.Iterable[T]]) -> None:
|
|
56
|
+
super().__init__()
|
|
57
|
+
|
|
58
|
+
self._input_sets_by_output = {u: set(d) for u, d in input_its_by_outputs.items()}
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def input_sets_by_output(self) -> ta.Mapping[T, ta.AbstractSet[T]]:
|
|
62
|
+
return self._input_sets_by_output
|
|
63
|
+
|
|
64
|
+
@lang.cached_property
|
|
65
|
+
def output_sets_by_input(self) -> ta.Mapping[T, ta.AbstractSet[T]]:
|
|
66
|
+
return invert_symmetric_set_map(self._input_sets_by_output)
|
|
67
|
+
|
|
68
|
+
def subdag(self, *args, **kwargs) -> 'Subdag[T]':
|
|
69
|
+
return Subdag(self, *args, **kwargs)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class Subdag(ta.Generic[U]):
|
|
73
|
+
|
|
74
|
+
def __init__(
|
|
75
|
+
self,
|
|
76
|
+
dag: 'Dag[U]',
|
|
77
|
+
targets: ta.Iterable[U],
|
|
78
|
+
*,
|
|
79
|
+
ignored: ta.Iterable[U] | None = None,
|
|
80
|
+
) -> None:
|
|
81
|
+
super().__init__()
|
|
82
|
+
|
|
83
|
+
self._dag: Dag[U] = check.isinstance(dag, Dag) # type: ignore
|
|
84
|
+
self._targets = set(targets)
|
|
85
|
+
self._ignored = set(ignored or []) - self._targets
|
|
86
|
+
|
|
87
|
+
@property
|
|
88
|
+
def dag(self) -> 'Dag[U]':
|
|
89
|
+
return self._dag
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def targets(self) -> ta.AbstractSet[U]:
|
|
93
|
+
return self._targets
|
|
94
|
+
|
|
95
|
+
@property
|
|
96
|
+
def ignored(self) -> ta.AbstractSet[U]:
|
|
97
|
+
return self._ignored
|
|
98
|
+
|
|
99
|
+
@lang.cached_property
|
|
100
|
+
def inputs(self) -> ta.AbstractSet[U]:
|
|
101
|
+
return traverse_links(self.dag.input_sets_by_output, self.targets) - self.ignored
|
|
102
|
+
|
|
103
|
+
@lang.cached_property
|
|
104
|
+
def outputs(self) -> ta.AbstractSet[U]:
|
|
105
|
+
return traverse_links(self.dag.output_sets_by_input, self.targets) - self.ignored
|
|
106
|
+
|
|
107
|
+
@lang.cached_property
|
|
108
|
+
def output_inputs(self) -> ta.AbstractSet[U]:
|
|
109
|
+
return traverse_links(self.dag.input_sets_by_output, self.outputs) - self.ignored
|
|
110
|
+
|
|
111
|
+
@lang.cached_property
|
|
112
|
+
def all(self) -> ta.AbstractSet[U]:
|
|
113
|
+
return self.targets | self.inputs | self.outputs | self.output_inputs
|
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import contextlib
|
|
3
|
+
import typing as ta
|
|
4
|
+
|
|
5
|
+
from .. import check
|
|
6
|
+
from .. import lang
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
V = ta.TypeVar('V')
|
|
10
|
+
MK = ta.TypeVar('MK')
|
|
11
|
+
MV = ta.TypeVar('MV')
|
|
12
|
+
SetMap = ta.Mapping[MK, ta.AbstractSet[MV]]
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DirectedGraph(ta.Generic[V], lang.Abstract):
|
|
16
|
+
|
|
17
|
+
@abc.abstractmethod
|
|
18
|
+
def get_successors(self, vertex: V) -> ta.Collection[V]:
|
|
19
|
+
raise NotImplementedError
|
|
20
|
+
|
|
21
|
+
@abc.abstractmethod
|
|
22
|
+
def yield_depth_first(self, root: V) -> ta.Iterator[V]:
|
|
23
|
+
raise NotImplementedError
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class ListDictDirectedGraph(DirectedGraph[V]):
|
|
27
|
+
|
|
28
|
+
def __init__(self, items: ta.Iterable[tuple[V, ta.Iterable[V]]]) -> None:
|
|
29
|
+
super().__init__()
|
|
30
|
+
|
|
31
|
+
lst_dct: dict[V, list[V]] = {}
|
|
32
|
+
all_children = set()
|
|
33
|
+
for parent, children in items:
|
|
34
|
+
check.not_in(parent, lst_dct)
|
|
35
|
+
lst = []
|
|
36
|
+
seen = set()
|
|
37
|
+
for child in children:
|
|
38
|
+
if child not in seen:
|
|
39
|
+
seen.add(child)
|
|
40
|
+
lst.append(child)
|
|
41
|
+
all_children.add(child)
|
|
42
|
+
lst_dct[parent] = lst
|
|
43
|
+
check.empty(all_children - set(lst_dct))
|
|
44
|
+
self._lst_dct = lst_dct
|
|
45
|
+
|
|
46
|
+
def get_successors(self, vertex: V) -> ta.Collection[V]:
|
|
47
|
+
return self._lst_dct[vertex]
|
|
48
|
+
|
|
49
|
+
def yield_depth_first(self, root: V) -> ta.Iterator[V]:
|
|
50
|
+
stack: list[V] = [root]
|
|
51
|
+
seen: set[V] = set()
|
|
52
|
+
while stack:
|
|
53
|
+
cur = stack.pop()
|
|
54
|
+
yield cur
|
|
55
|
+
for child in self._lst_dct[cur]:
|
|
56
|
+
if child not in seen:
|
|
57
|
+
seen.add(child)
|
|
58
|
+
stack.append(child)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class DominatorTree(ta.Generic[V]):
|
|
62
|
+
|
|
63
|
+
def __init__(self, graph: DirectedGraph[V], root: V) -> None:
|
|
64
|
+
super().__init__()
|
|
65
|
+
|
|
66
|
+
self._graph = check.not_none(graph)
|
|
67
|
+
self._root = check.not_none(root)
|
|
68
|
+
check.not_none(self._graph.get_successors(root))
|
|
69
|
+
self._dfs = _Dfs(graph, root)
|
|
70
|
+
|
|
71
|
+
@lang.cached_property
|
|
72
|
+
def immediate_dominators(self) -> ta.Mapping[V, V]:
|
|
73
|
+
return _ImmediateDominanceComputer(self._dfs).immediate_dominators
|
|
74
|
+
|
|
75
|
+
@lang.cached_property
|
|
76
|
+
def dominator_tree(self) -> SetMap[V, V]:
|
|
77
|
+
tree: dict[V, set[V]] = {}
|
|
78
|
+
for node, dom in self.immediate_dominators.items():
|
|
79
|
+
tree.setdefault(dom, set()).add(node)
|
|
80
|
+
return tree
|
|
81
|
+
|
|
82
|
+
@lang.cached_property
|
|
83
|
+
def deep_dominated(self) -> SetMap[V, V]:
|
|
84
|
+
seen: set[V] = set()
|
|
85
|
+
ret: dict[V, set[V]] = {}
|
|
86
|
+
|
|
87
|
+
def rec(node: V) -> ta.Collection[V]:
|
|
88
|
+
check.not_in(node, seen)
|
|
89
|
+
seen.add(node)
|
|
90
|
+
# FIXME: pyrsistent
|
|
91
|
+
st = set()
|
|
92
|
+
for child in self.dominator_tree.get(node, []):
|
|
93
|
+
st.add(child)
|
|
94
|
+
st.update(rec(child))
|
|
95
|
+
if st:
|
|
96
|
+
ret[node] = st
|
|
97
|
+
return st
|
|
98
|
+
|
|
99
|
+
rec(self._root)
|
|
100
|
+
return ret
|
|
101
|
+
|
|
102
|
+
@lang.cached_property
|
|
103
|
+
def dominance_frontiers(self) -> SetMap[V, V]:
|
|
104
|
+
dominance_frontiers: dict[V, set[V]] = {}
|
|
105
|
+
|
|
106
|
+
for x in self.reverse_topological_traversal:
|
|
107
|
+
dfx = dominance_frontiers.setdefault(x, set())
|
|
108
|
+
|
|
109
|
+
for y in self._graph.get_successors(x):
|
|
110
|
+
if self.immediate_dominators[y] != x:
|
|
111
|
+
dfx.add(y)
|
|
112
|
+
|
|
113
|
+
for z in self.dominator_tree.get(x, []):
|
|
114
|
+
for y in dominance_frontiers.get(z, []):
|
|
115
|
+
if self.immediate_dominators[y] != x:
|
|
116
|
+
dfx.add(y)
|
|
117
|
+
|
|
118
|
+
return {k: v for k, v in dominance_frontiers.items() if v}
|
|
119
|
+
|
|
120
|
+
@lang.cached_property
|
|
121
|
+
def topological_traversal(self) -> list[V]:
|
|
122
|
+
# FIXME: LinkedList
|
|
123
|
+
lst: list[V] = []
|
|
124
|
+
|
|
125
|
+
for node in self._dfs.vertex:
|
|
126
|
+
try:
|
|
127
|
+
idx = lst.index(self.immediate_dominators[node])
|
|
128
|
+
except (KeyError, ValueError):
|
|
129
|
+
lst.append(node)
|
|
130
|
+
else:
|
|
131
|
+
lst.insert(idx + 1, node)
|
|
132
|
+
|
|
133
|
+
return lst
|
|
134
|
+
|
|
135
|
+
@lang.cached_property
|
|
136
|
+
def reverse_topological_traversal(self) -> list[V]:
|
|
137
|
+
return list(reversed(self.topological_traversal))
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class _Dfs(ta.Generic[V]):
|
|
141
|
+
|
|
142
|
+
def __init__(self, graph: DirectedGraph[V], root: V) -> None:
|
|
143
|
+
super().__init__()
|
|
144
|
+
|
|
145
|
+
semi: dict[V, int] = {}
|
|
146
|
+
vertex: list[V] = []
|
|
147
|
+
parent: dict[V, V] = {}
|
|
148
|
+
pred: dict[V, set[V]] = {}
|
|
149
|
+
label: dict[V, V] = {}
|
|
150
|
+
|
|
151
|
+
for node in graph.yield_depth_first(root):
|
|
152
|
+
if node not in semi:
|
|
153
|
+
vertex.append(node)
|
|
154
|
+
|
|
155
|
+
check.not_in(node, semi)
|
|
156
|
+
semi[node] = len(semi)
|
|
157
|
+
check.not_in(node, label)
|
|
158
|
+
label[node] = node
|
|
159
|
+
|
|
160
|
+
for child in graph.get_successors(node):
|
|
161
|
+
pred.setdefault(child, set()).add(node)
|
|
162
|
+
if child not in semi:
|
|
163
|
+
check.not_in(child, parent)
|
|
164
|
+
parent[child] = node
|
|
165
|
+
|
|
166
|
+
self._semi = semi
|
|
167
|
+
self._vertex = vertex
|
|
168
|
+
self._parent = parent
|
|
169
|
+
self._pred = pred
|
|
170
|
+
self._label = label
|
|
171
|
+
|
|
172
|
+
@property
|
|
173
|
+
def semi(self) -> dict[V, int]:
|
|
174
|
+
return self._semi
|
|
175
|
+
|
|
176
|
+
@property
|
|
177
|
+
def vertex(self) -> list[V]:
|
|
178
|
+
return self._vertex
|
|
179
|
+
|
|
180
|
+
@property
|
|
181
|
+
def parent(self) -> dict[V, V]:
|
|
182
|
+
return self._parent
|
|
183
|
+
|
|
184
|
+
@property
|
|
185
|
+
def pred(self) -> dict[V, set[V]]:
|
|
186
|
+
return self._pred
|
|
187
|
+
|
|
188
|
+
@property
|
|
189
|
+
def label(self) -> dict[V, V]:
|
|
190
|
+
return self._label
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
class _ImmediateDominanceComputer(ta.Generic[V]):
|
|
194
|
+
|
|
195
|
+
def __init__(self, dfs: _Dfs[V]) -> None:
|
|
196
|
+
super().__init__()
|
|
197
|
+
|
|
198
|
+
self._dfs: _Dfs[V] = check.isinstance(dfs, _Dfs) # type: ignore
|
|
199
|
+
|
|
200
|
+
self._ancestor: dict[V, V] = {}
|
|
201
|
+
self._semi = dict(self._dfs.semi)
|
|
202
|
+
self._label = dict(self._dfs.label)
|
|
203
|
+
|
|
204
|
+
@lang.cached_property
|
|
205
|
+
def immediate_dominators(self) -> ta.Mapping[V, V]:
|
|
206
|
+
idom: dict[V, V] = {}
|
|
207
|
+
bucket: dict[V, set[V]] = {}
|
|
208
|
+
|
|
209
|
+
last_semi_number = len(self._semi) - 1
|
|
210
|
+
|
|
211
|
+
for i in range(last_semi_number, 0, -1):
|
|
212
|
+
w = self._dfs.vertex[i]
|
|
213
|
+
p = self._dfs.parent[w]
|
|
214
|
+
|
|
215
|
+
semidominator = self._semi[w]
|
|
216
|
+
for v in self._dfs.pred.get(w, []):
|
|
217
|
+
semidominator = min(semidominator, self._semi[self._eval(v)])
|
|
218
|
+
|
|
219
|
+
self._semi[w] = semidominator
|
|
220
|
+
bucket.setdefault(self._dfs.vertex[semidominator], set()).add(w)
|
|
221
|
+
|
|
222
|
+
self._ancestor[w] = p
|
|
223
|
+
|
|
224
|
+
for v in bucket.get(p, []):
|
|
225
|
+
u = self._eval(v)
|
|
226
|
+
|
|
227
|
+
if self._semi[u] < self._semi[v]:
|
|
228
|
+
idom[v] = u
|
|
229
|
+
else:
|
|
230
|
+
idom[v] = p
|
|
231
|
+
|
|
232
|
+
with contextlib.suppress(KeyError):
|
|
233
|
+
del bucket[p]
|
|
234
|
+
|
|
235
|
+
for i in range(1, last_semi_number + 1):
|
|
236
|
+
w = self._dfs.vertex[i]
|
|
237
|
+
|
|
238
|
+
if idom[w] != self._dfs.vertex[self._semi[w]]:
|
|
239
|
+
idom[w] = idom[idom[w]]
|
|
240
|
+
|
|
241
|
+
return idom
|
|
242
|
+
|
|
243
|
+
def _eval(self, v: V) -> V:
|
|
244
|
+
self._compress(v)
|
|
245
|
+
return self._label[v]
|
|
246
|
+
|
|
247
|
+
def _compress(self, v: V) -> None:
|
|
248
|
+
worklist: list[V] = [v]
|
|
249
|
+
|
|
250
|
+
a = self._ancestor.get(v)
|
|
251
|
+
|
|
252
|
+
while a in self._ancestor:
|
|
253
|
+
worklist.append(a)
|
|
254
|
+
a = self._ancestor[a]
|
|
255
|
+
|
|
256
|
+
ancestor = worklist.pop()
|
|
257
|
+
least_semi = self._semi[self._label[ancestor]]
|
|
258
|
+
|
|
259
|
+
while worklist:
|
|
260
|
+
descendent = worklist.pop()
|
|
261
|
+
current_semi = self._semi[self._label[descendent]]
|
|
262
|
+
|
|
263
|
+
if current_semi > least_semi:
|
|
264
|
+
self._label[descendent] = self._label[ancestor]
|
|
265
|
+
else:
|
|
266
|
+
least_semi = current_semi
|
|
267
|
+
|
|
268
|
+
ancestor = descendent
|
omlish/graphs/trees.py
CHANGED
|
@@ -79,7 +79,7 @@ class BasicTreeAnalysis(ta.Generic[NodeT]):
|
|
|
79
79
|
nodes: list[NodeT] = []
|
|
80
80
|
node_set: ta.MutableSet[NodeT] = self._set_fac()
|
|
81
81
|
children_by_node: ta.MutableMapping[NodeT | None, ta.Sequence[NodeT]] = self._dict_fac()
|
|
82
|
-
child_sets_by_node: ta.MutableMapping[
|
|
82
|
+
child_sets_by_node: ta.MutableMapping[NodeT | None, ta.AbstractSet[NodeT]] = self._dict_fac()
|
|
83
83
|
parents_by_node: ta.MutableMapping[NodeT, NodeT | None] = self._dict_fac()
|
|
84
84
|
|
|
85
85
|
children_by_node[None] = [root]
|
|
@@ -190,7 +190,7 @@ class BasicTreeAnalysis(ta.Generic[NodeT]):
|
|
|
190
190
|
e: ta.Any
|
|
191
191
|
d: ta.Any
|
|
192
192
|
if identity:
|
|
193
|
-
e, d = id, col.
|
|
193
|
+
e, d = id, col.unique_map((id(n), n) for n, _ in pairs)
|
|
194
194
|
else:
|
|
195
195
|
e, d = lang.identity, lang.identity
|
|
196
196
|
tsd = {e(n): {e(p)} for n, p in parents_by_node.items()}
|
omlish/http/__init__.py
CHANGED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from . import consts # noqa
|
|
2
|
+
|
|
3
|
+
from .cookies import ( # noqa
|
|
4
|
+
CookieTooBigError,
|
|
5
|
+
dump_cookie,
|
|
6
|
+
parse_cookie,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
from .dates import ( # noqa
|
|
10
|
+
http_date,
|
|
11
|
+
parse_date,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from .encodings import ( # noqa
|
|
15
|
+
latin1_decode,
|
|
16
|
+
latin1_encode,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
from .json import ( # noqa
|
|
20
|
+
JSON_TAGGER,
|
|
21
|
+
JsonTag,
|
|
22
|
+
JsonTagger,
|
|
23
|
+
json_dumps,
|
|
24
|
+
json_loads,
|
|
25
|
+
)
|
omlish/http/asgi.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import logging
|
|
3
|
+
import typing as ta
|
|
4
|
+
import urllib.parse
|
|
5
|
+
|
|
6
|
+
from .. import check
|
|
7
|
+
from . import consts
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
log = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
AsgiScope: ta.TypeAlias = ta.Mapping[str, ta.Any]
|
|
17
|
+
AsgiMessage: ta.TypeAlias = ta.Mapping[str, ta.Any]
|
|
18
|
+
AsgiRecv: ta.TypeAlias = ta.Callable[[], ta.Awaitable[AsgiMessage]]
|
|
19
|
+
AsgiSend: ta.TypeAlias = ta.Callable[[AsgiMessage], ta.Awaitable[None]]
|
|
20
|
+
AsgiApp: ta.TypeAlias = ta.Callable[[AsgiScope, AsgiRecv, AsgiSend], ta.Awaitable[None]]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class AbstractAsgiApp(abc.ABC):
|
|
24
|
+
@abc.abstractmethod
|
|
25
|
+
async def __call__(self, scope: AsgiScope, recv: AsgiRecv, send: AsgiSend) -> None:
|
|
26
|
+
raise NotImplementedError
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
##
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
async def stub_lifespan(scope: AsgiScope, recv: AsgiRecv, send: AsgiSend, *, verbose: bool = False) -> None:
|
|
33
|
+
while True:
|
|
34
|
+
message = await recv()
|
|
35
|
+
if message['type'] == 'lifespan.startup':
|
|
36
|
+
if verbose:
|
|
37
|
+
log.info('Lifespan starting up')
|
|
38
|
+
await send({'type': 'lifespan.startup.complete'})
|
|
39
|
+
|
|
40
|
+
elif message['type'] == 'lifespan.shutdown':
|
|
41
|
+
if verbose:
|
|
42
|
+
log.info('Lifespan shutting down')
|
|
43
|
+
await send({'type': 'lifespan.shutdown.complete'})
|
|
44
|
+
return
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
##
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
async def start_response(
|
|
51
|
+
send: AsgiSend,
|
|
52
|
+
status: int,
|
|
53
|
+
content_type: bytes = consts.CONTENT_TYPE_TEXT_UTF8,
|
|
54
|
+
headers: ta.Sequence[tuple[bytes, bytes]] | None = None,
|
|
55
|
+
) -> None:
|
|
56
|
+
await send({
|
|
57
|
+
'type': 'http.response.start',
|
|
58
|
+
'status': status,
|
|
59
|
+
'headers': [
|
|
60
|
+
(b'content-type', content_type),
|
|
61
|
+
*(headers or ()),
|
|
62
|
+
],
|
|
63
|
+
})
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
async def finish_response(
|
|
67
|
+
send: AsgiSend,
|
|
68
|
+
body: bytes = b'',
|
|
69
|
+
) -> None:
|
|
70
|
+
await send({
|
|
71
|
+
'type': 'http.response.body',
|
|
72
|
+
'body': body,
|
|
73
|
+
})
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
async def send_response(
|
|
77
|
+
send: AsgiSend,
|
|
78
|
+
status: int,
|
|
79
|
+
content_type: bytes = consts.CONTENT_TYPE_TEXT_UTF8,
|
|
80
|
+
headers: ta.Sequence[tuple[bytes, bytes]] | None = None,
|
|
81
|
+
body: bytes = b'',
|
|
82
|
+
) -> None:
|
|
83
|
+
await start_response(
|
|
84
|
+
send,
|
|
85
|
+
status=status,
|
|
86
|
+
content_type=content_type, headers=headers,
|
|
87
|
+
)
|
|
88
|
+
await finish_response(
|
|
89
|
+
send,
|
|
90
|
+
body=body,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
async def redirect_response(
|
|
95
|
+
send: AsgiSend,
|
|
96
|
+
url: str,
|
|
97
|
+
headers: ta.Sequence[tuple[bytes, bytes]] | None = None,
|
|
98
|
+
) -> None:
|
|
99
|
+
log.info('Redirecting to %s', url)
|
|
100
|
+
await send({
|
|
101
|
+
'type': 'http.response.start',
|
|
102
|
+
'status': 302,
|
|
103
|
+
'headers': [
|
|
104
|
+
(b'content-type', consts.CONTENT_TYPE_TEXT_UTF8),
|
|
105
|
+
(b'location', url.encode()),
|
|
106
|
+
*(headers or ()),
|
|
107
|
+
],
|
|
108
|
+
})
|
|
109
|
+
await send({
|
|
110
|
+
'type': 'http.response.body',
|
|
111
|
+
'body': b'',
|
|
112
|
+
})
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
##
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
async def read_body(recv: AsgiRecv) -> bytes:
|
|
119
|
+
body = b''
|
|
120
|
+
more_body = True
|
|
121
|
+
while more_body:
|
|
122
|
+
message = await recv()
|
|
123
|
+
body += message.get('body', b'')
|
|
124
|
+
more_body = message.get('more_body', False)
|
|
125
|
+
return body
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
async def read_form_body(recv: AsgiRecv) -> dict[bytes, bytes]:
|
|
129
|
+
body = await read_body(recv)
|
|
130
|
+
dct = urllib.parse.parse_qs(body) # noqa
|
|
131
|
+
return {k: check.single(v) for k, v in dct.items()}
|
omlish/http/consts.py
CHANGED
|
@@ -1,20 +1,47 @@
|
|
|
1
1
|
import http # noqa
|
|
2
2
|
|
|
3
3
|
|
|
4
|
+
##
|
|
5
|
+
|
|
6
|
+
|
|
4
7
|
def format_status(status: http.HTTPStatus) -> str:
|
|
5
8
|
return '%d %s' % (int(status), status.phrase)
|
|
6
9
|
|
|
7
10
|
|
|
8
11
|
STATUS_OK = format_status(http.HTTPStatus.OK)
|
|
12
|
+
|
|
13
|
+
STATUS_FOUND = format_status(http.HTTPStatus.FOUND)
|
|
14
|
+
STATUS_TEMPORARY_REDIRECT = format_status(http.HTTPStatus.TEMPORARY_REDIRECT)
|
|
15
|
+
|
|
9
16
|
STATUS_BAD_REQUEST = format_status(http.HTTPStatus.BAD_REQUEST)
|
|
17
|
+
STATUS_UNAUTHORIZED = format_status(http.HTTPStatus.UNAUTHORIZED)
|
|
10
18
|
STATUS_FORBIDDEN = format_status(http.HTTPStatus.FORBIDDEN)
|
|
11
19
|
STATUS_NOT_FOUND = format_status(http.HTTPStatus.NOT_FOUND)
|
|
12
20
|
STATUS_METHOD_NOT_ALLOWED = format_status(http.HTTPStatus.METHOD_NOT_ALLOWED)
|
|
21
|
+
STATUS_REQUEST_TIMEOUT = format_status(http.HTTPStatus.REQUEST_TIMEOUT)
|
|
22
|
+
|
|
23
|
+
STATUS_INTERNAL_SERVER_ERROR = format_status(http.HTTPStatus.INTERNAL_SERVER_ERROR)
|
|
24
|
+
STATUS_NOT_IMPLEMENTED = format_status(http.HTTPStatus.NOT_IMPLEMENTED)
|
|
25
|
+
STATUS_BAD_GATEWAY = format_status(http.HTTPStatus.BAD_GATEWAY)
|
|
26
|
+
STATUS_SERVICE_UNAVAILABLE = format_status(http.HTTPStatus.SERVICE_UNAVAILABLE)
|
|
27
|
+
STATUS_GATEWAY_TIMEOUT = format_status(http.HTTPStatus.GATEWAY_TIMEOUT)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
##
|
|
13
31
|
|
|
14
32
|
|
|
15
33
|
HEADER_CONTENT_TYPE = b'Content-Type'
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
CONTENT_TYPE_JSON = b'application/json'
|
|
19
|
-
CONTENT_TYPE_ICON = b'image/x-icon'
|
|
34
|
+
CONTENT_CHARSET_UTF8 = b'charset=utf-8'
|
|
35
|
+
|
|
20
36
|
CONTENT_TYPE_BYTES = b'application/octet-stream'
|
|
37
|
+
|
|
38
|
+
CONTENT_TYPE_HTML = b'text/html'
|
|
39
|
+
CONTENT_TYPE_HTML_UTF8 = b'; '.join([CONTENT_TYPE_HTML, CONTENT_CHARSET_UTF8])
|
|
40
|
+
|
|
41
|
+
CONTENT_TYPE_ICON = b'image/x-icon'
|
|
42
|
+
|
|
43
|
+
CONTENT_TYPE_JSON = b'application/json'
|
|
44
|
+
CONTENT_TYPE_JSON_UTF8 = b'; '.join([CONTENT_TYPE_JSON, CONTENT_CHARSET_UTF8])
|
|
45
|
+
|
|
46
|
+
CONTENT_TYPE_TEXT = b'text/plain'
|
|
47
|
+
CONTENT_TYPE_TEXT_UTF8 = b'; '.join([CONTENT_TYPE_TEXT, CONTENT_CHARSET_UTF8])
|