omlish 0.0.0.dev4__py3-none-any.whl → 0.0.0.dev5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of omlish might be problematic. Click here for more details.
- omlish/__about__.py +1 -1
- omlish/__init__.py +1 -1
- omlish/asyncs/__init__.py +1 -4
- omlish/asyncs/anyio.py +66 -0
- omlish/asyncs/flavors.py +27 -1
- omlish/asyncs/trio_asyncio.py +24 -18
- omlish/c3.py +1 -1
- omlish/cached.py +1 -2
- omlish/collections/__init__.py +4 -1
- omlish/collections/cache/impl.py +1 -1
- omlish/collections/indexed.py +1 -1
- omlish/collections/utils.py +38 -6
- omlish/configs/__init__.py +5 -0
- omlish/configs/classes.py +53 -0
- omlish/configs/dotenv.py +586 -0
- omlish/configs/props.py +589 -49
- omlish/dataclasses/impl/api.py +1 -1
- omlish/dataclasses/impl/as_.py +1 -1
- omlish/dataclasses/impl/fields.py +1 -0
- omlish/dataclasses/impl/init.py +1 -1
- omlish/dataclasses/impl/main.py +1 -0
- omlish/dataclasses/impl/metaclass.py +6 -1
- omlish/dataclasses/impl/order.py +1 -1
- omlish/dataclasses/impl/reflect.py +15 -2
- omlish/defs.py +1 -1
- omlish/diag/procfs.py +29 -1
- omlish/diag/procstats.py +32 -0
- omlish/diag/replserver/console.py +3 -3
- omlish/diag/replserver/server.py +6 -5
- omlish/diag/threads.py +86 -0
- omlish/docker.py +19 -0
- omlish/fnpairs.py +26 -18
- omlish/graphs/dags.py +113 -0
- omlish/graphs/domination.py +268 -0
- omlish/graphs/trees.py +2 -2
- omlish/http/__init__.py +25 -0
- omlish/http/asgi.py +131 -0
- omlish/http/consts.py +31 -4
- omlish/http/cookies.py +194 -0
- omlish/http/dates.py +70 -0
- omlish/http/encodings.py +6 -0
- omlish/http/json.py +273 -0
- omlish/http/sessions.py +197 -0
- omlish/inject/__init__.py +8 -2
- omlish/inject/bindings.py +3 -3
- omlish/inject/exceptions.py +3 -3
- omlish/inject/impl/elements.py +33 -24
- omlish/inject/impl/injector.py +1 -0
- omlish/inject/impl/multis.py +74 -0
- omlish/inject/impl/providers.py +19 -39
- omlish/inject/{proxy.py → impl/proxy.py} +2 -2
- omlish/inject/impl/scopes.py +1 -0
- omlish/inject/injector.py +1 -0
- omlish/inject/keys.py +3 -9
- omlish/inject/multis.py +70 -0
- omlish/inject/providers.py +23 -23
- omlish/inject/scopes.py +7 -3
- omlish/inject/types.py +0 -8
- omlish/iterators.py +13 -0
- omlish/json.py +2 -1
- omlish/lang/__init__.py +4 -0
- omlish/lang/classes/restrict.py +1 -1
- omlish/lang/classes/virtual.py +2 -2
- omlish/lang/contextmanagers.py +64 -0
- omlish/lang/datetimes.py +6 -5
- omlish/lang/functions.py +10 -0
- omlish/lang/imports.py +11 -2
- omlish/lang/typing.py +1 -0
- omlish/logs/utils.py +1 -1
- omlish/marshal/datetimes.py +1 -1
- omlish/reflect.py +8 -2
- omlish/sync.py +70 -0
- omlish/term.py +6 -1
- omlish/testing/pytest/__init__.py +5 -0
- omlish/testing/pytest/helpers.py +0 -24
- omlish/testing/pytest/inject/harness.py +1 -1
- omlish/testing/pytest/marks.py +48 -0
- omlish/testing/pytest/plugins/__init__.py +2 -0
- omlish/testing/pytest/plugins/managermarks.py +60 -0
- omlish/testing/testing.py +10 -0
- omlish/text/delimit.py +4 -0
- {omlish-0.0.0.dev4.dist-info → omlish-0.0.0.dev5.dist-info}/METADATA +1 -1
- {omlish-0.0.0.dev4.dist-info → omlish-0.0.0.dev5.dist-info}/RECORD +86 -69
- {omlish-0.0.0.dev4.dist-info → omlish-0.0.0.dev5.dist-info}/WHEEL +1 -1
- {omlish-0.0.0.dev4.dist-info → omlish-0.0.0.dev5.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev4.dist-info → omlish-0.0.0.dev5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import contextlib
|
|
3
|
+
import typing as ta
|
|
4
|
+
|
|
5
|
+
from .. import check
|
|
6
|
+
from .. import lang
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
V = ta.TypeVar('V')
|
|
10
|
+
MK = ta.TypeVar('MK')
|
|
11
|
+
MV = ta.TypeVar('MV')
|
|
12
|
+
SetMap = ta.Mapping[MK, ta.AbstractSet[MV]]
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DirectedGraph(ta.Generic[V], lang.Abstract):
|
|
16
|
+
|
|
17
|
+
@abc.abstractmethod
|
|
18
|
+
def get_successors(self, vertex: V) -> ta.Collection[V]:
|
|
19
|
+
raise NotImplementedError
|
|
20
|
+
|
|
21
|
+
@abc.abstractmethod
|
|
22
|
+
def yield_depth_first(self, root: V) -> ta.Iterator[V]:
|
|
23
|
+
raise NotImplementedError
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class ListDictDirectedGraph(DirectedGraph[V]):
|
|
27
|
+
|
|
28
|
+
def __init__(self, items: ta.Iterable[tuple[V, ta.Iterable[V]]]) -> None:
|
|
29
|
+
super().__init__()
|
|
30
|
+
|
|
31
|
+
lst_dct: dict[V, list[V]] = {}
|
|
32
|
+
all_children = set()
|
|
33
|
+
for parent, children in items:
|
|
34
|
+
check.not_in(parent, lst_dct)
|
|
35
|
+
lst = []
|
|
36
|
+
seen = set()
|
|
37
|
+
for child in children:
|
|
38
|
+
if child not in seen:
|
|
39
|
+
seen.add(child)
|
|
40
|
+
lst.append(child)
|
|
41
|
+
all_children.add(child)
|
|
42
|
+
lst_dct[parent] = lst
|
|
43
|
+
check.empty(all_children - set(lst_dct))
|
|
44
|
+
self._lst_dct = lst_dct
|
|
45
|
+
|
|
46
|
+
def get_successors(self, vertex: V) -> ta.Collection[V]:
|
|
47
|
+
return self._lst_dct[vertex]
|
|
48
|
+
|
|
49
|
+
def yield_depth_first(self, root: V) -> ta.Iterator[V]:
|
|
50
|
+
stack: list[V] = [root]
|
|
51
|
+
seen: set[V] = set()
|
|
52
|
+
while stack:
|
|
53
|
+
cur = stack.pop()
|
|
54
|
+
yield cur
|
|
55
|
+
for child in self._lst_dct[cur]:
|
|
56
|
+
if child not in seen:
|
|
57
|
+
seen.add(child)
|
|
58
|
+
stack.append(child)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class DominatorTree(ta.Generic[V]):
|
|
62
|
+
|
|
63
|
+
def __init__(self, graph: DirectedGraph[V], root: V) -> None:
|
|
64
|
+
super().__init__()
|
|
65
|
+
|
|
66
|
+
self._graph = check.not_none(graph)
|
|
67
|
+
self._root = check.not_none(root)
|
|
68
|
+
check.not_none(self._graph.get_successors(root))
|
|
69
|
+
self._dfs = _Dfs(graph, root)
|
|
70
|
+
|
|
71
|
+
@lang.cached_property
|
|
72
|
+
def immediate_dominators(self) -> ta.Mapping[V, V]:
|
|
73
|
+
return _ImmediateDominanceComputer(self._dfs).immediate_dominators
|
|
74
|
+
|
|
75
|
+
@lang.cached_property
|
|
76
|
+
def dominator_tree(self) -> SetMap[V, V]:
|
|
77
|
+
tree: dict[V, set[V]] = {}
|
|
78
|
+
for node, dom in self.immediate_dominators.items():
|
|
79
|
+
tree.setdefault(dom, set()).add(node)
|
|
80
|
+
return tree
|
|
81
|
+
|
|
82
|
+
@lang.cached_property
|
|
83
|
+
def deep_dominated(self) -> SetMap[V, V]:
|
|
84
|
+
seen: set[V] = set()
|
|
85
|
+
ret: dict[V, set[V]] = {}
|
|
86
|
+
|
|
87
|
+
def rec(node: V) -> ta.Collection[V]:
|
|
88
|
+
check.not_in(node, seen)
|
|
89
|
+
seen.add(node)
|
|
90
|
+
# FIXME: pyrsistent
|
|
91
|
+
st = set()
|
|
92
|
+
for child in self.dominator_tree.get(node, []):
|
|
93
|
+
st.add(child)
|
|
94
|
+
st.update(rec(child))
|
|
95
|
+
if st:
|
|
96
|
+
ret[node] = st
|
|
97
|
+
return st
|
|
98
|
+
|
|
99
|
+
rec(self._root)
|
|
100
|
+
return ret
|
|
101
|
+
|
|
102
|
+
@lang.cached_property
|
|
103
|
+
def dominance_frontiers(self) -> SetMap[V, V]:
|
|
104
|
+
dominance_frontiers: dict[V, set[V]] = {}
|
|
105
|
+
|
|
106
|
+
for x in self.reverse_topological_traversal:
|
|
107
|
+
dfx = dominance_frontiers.setdefault(x, set())
|
|
108
|
+
|
|
109
|
+
for y in self._graph.get_successors(x):
|
|
110
|
+
if self.immediate_dominators[y] != x:
|
|
111
|
+
dfx.add(y)
|
|
112
|
+
|
|
113
|
+
for z in self.dominator_tree.get(x, []):
|
|
114
|
+
for y in dominance_frontiers.get(z, []):
|
|
115
|
+
if self.immediate_dominators[y] != x:
|
|
116
|
+
dfx.add(y)
|
|
117
|
+
|
|
118
|
+
return {k: v for k, v in dominance_frontiers.items() if v}
|
|
119
|
+
|
|
120
|
+
@lang.cached_property
|
|
121
|
+
def topological_traversal(self) -> list[V]:
|
|
122
|
+
# FIXME: LinkedList
|
|
123
|
+
lst: list[V] = []
|
|
124
|
+
|
|
125
|
+
for node in self._dfs.vertex:
|
|
126
|
+
try:
|
|
127
|
+
idx = lst.index(self.immediate_dominators[node])
|
|
128
|
+
except (KeyError, ValueError):
|
|
129
|
+
lst.append(node)
|
|
130
|
+
else:
|
|
131
|
+
lst.insert(idx + 1, node)
|
|
132
|
+
|
|
133
|
+
return lst
|
|
134
|
+
|
|
135
|
+
@lang.cached_property
|
|
136
|
+
def reverse_topological_traversal(self) -> list[V]:
|
|
137
|
+
return list(reversed(self.topological_traversal))
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class _Dfs(ta.Generic[V]):
|
|
141
|
+
|
|
142
|
+
def __init__(self, graph: DirectedGraph[V], root: V) -> None:
|
|
143
|
+
super().__init__()
|
|
144
|
+
|
|
145
|
+
semi: dict[V, int] = {}
|
|
146
|
+
vertex: list[V] = []
|
|
147
|
+
parent: dict[V, V] = {}
|
|
148
|
+
pred: dict[V, set[V]] = {}
|
|
149
|
+
label: dict[V, V] = {}
|
|
150
|
+
|
|
151
|
+
for node in graph.yield_depth_first(root):
|
|
152
|
+
if node not in semi:
|
|
153
|
+
vertex.append(node)
|
|
154
|
+
|
|
155
|
+
check.not_in(node, semi)
|
|
156
|
+
semi[node] = len(semi)
|
|
157
|
+
check.not_in(node, label)
|
|
158
|
+
label[node] = node
|
|
159
|
+
|
|
160
|
+
for child in graph.get_successors(node):
|
|
161
|
+
pred.setdefault(child, set()).add(node)
|
|
162
|
+
if child not in semi:
|
|
163
|
+
check.not_in(child, parent)
|
|
164
|
+
parent[child] = node
|
|
165
|
+
|
|
166
|
+
self._semi = semi
|
|
167
|
+
self._vertex = vertex
|
|
168
|
+
self._parent = parent
|
|
169
|
+
self._pred = pred
|
|
170
|
+
self._label = label
|
|
171
|
+
|
|
172
|
+
@property
|
|
173
|
+
def semi(self) -> dict[V, int]:
|
|
174
|
+
return self._semi
|
|
175
|
+
|
|
176
|
+
@property
|
|
177
|
+
def vertex(self) -> list[V]:
|
|
178
|
+
return self._vertex
|
|
179
|
+
|
|
180
|
+
@property
|
|
181
|
+
def parent(self) -> dict[V, V]:
|
|
182
|
+
return self._parent
|
|
183
|
+
|
|
184
|
+
@property
|
|
185
|
+
def pred(self) -> dict[V, set[V]]:
|
|
186
|
+
return self._pred
|
|
187
|
+
|
|
188
|
+
@property
|
|
189
|
+
def label(self) -> dict[V, V]:
|
|
190
|
+
return self._label
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
class _ImmediateDominanceComputer(ta.Generic[V]):
|
|
194
|
+
|
|
195
|
+
def __init__(self, dfs: _Dfs[V]) -> None:
|
|
196
|
+
super().__init__()
|
|
197
|
+
|
|
198
|
+
self._dfs: _Dfs[V] = check.isinstance(dfs, _Dfs) # type: ignore
|
|
199
|
+
|
|
200
|
+
self._ancestor: dict[V, V] = {}
|
|
201
|
+
self._semi = dict(self._dfs.semi)
|
|
202
|
+
self._label = dict(self._dfs.label)
|
|
203
|
+
|
|
204
|
+
@lang.cached_property
|
|
205
|
+
def immediate_dominators(self) -> ta.Mapping[V, V]:
|
|
206
|
+
idom: dict[V, V] = {}
|
|
207
|
+
bucket: dict[V, set[V]] = {}
|
|
208
|
+
|
|
209
|
+
last_semi_number = len(self._semi) - 1
|
|
210
|
+
|
|
211
|
+
for i in range(last_semi_number, 0, -1):
|
|
212
|
+
w = self._dfs.vertex[i]
|
|
213
|
+
p = self._dfs.parent[w]
|
|
214
|
+
|
|
215
|
+
semidominator = self._semi[w]
|
|
216
|
+
for v in self._dfs.pred.get(w, []):
|
|
217
|
+
semidominator = min(semidominator, self._semi[self._eval(v)])
|
|
218
|
+
|
|
219
|
+
self._semi[w] = semidominator
|
|
220
|
+
bucket.setdefault(self._dfs.vertex[semidominator], set()).add(w)
|
|
221
|
+
|
|
222
|
+
self._ancestor[w] = p
|
|
223
|
+
|
|
224
|
+
for v in bucket.get(p, []):
|
|
225
|
+
u = self._eval(v)
|
|
226
|
+
|
|
227
|
+
if self._semi[u] < self._semi[v]:
|
|
228
|
+
idom[v] = u
|
|
229
|
+
else:
|
|
230
|
+
idom[v] = p
|
|
231
|
+
|
|
232
|
+
with contextlib.suppress(KeyError):
|
|
233
|
+
del bucket[p]
|
|
234
|
+
|
|
235
|
+
for i in range(1, last_semi_number + 1):
|
|
236
|
+
w = self._dfs.vertex[i]
|
|
237
|
+
|
|
238
|
+
if idom[w] != self._dfs.vertex[self._semi[w]]:
|
|
239
|
+
idom[w] = idom[idom[w]]
|
|
240
|
+
|
|
241
|
+
return idom
|
|
242
|
+
|
|
243
|
+
def _eval(self, v: V) -> V:
|
|
244
|
+
self._compress(v)
|
|
245
|
+
return self._label[v]
|
|
246
|
+
|
|
247
|
+
def _compress(self, v: V) -> None:
|
|
248
|
+
worklist: list[V] = [v]
|
|
249
|
+
|
|
250
|
+
a = self._ancestor.get(v)
|
|
251
|
+
|
|
252
|
+
while a in self._ancestor:
|
|
253
|
+
worklist.append(a)
|
|
254
|
+
a = self._ancestor[a]
|
|
255
|
+
|
|
256
|
+
ancestor = worklist.pop()
|
|
257
|
+
least_semi = self._semi[self._label[ancestor]]
|
|
258
|
+
|
|
259
|
+
while worklist:
|
|
260
|
+
descendent = worklist.pop()
|
|
261
|
+
current_semi = self._semi[self._label[descendent]]
|
|
262
|
+
|
|
263
|
+
if current_semi > least_semi:
|
|
264
|
+
self._label[descendent] = self._label[ancestor]
|
|
265
|
+
else:
|
|
266
|
+
least_semi = current_semi
|
|
267
|
+
|
|
268
|
+
ancestor = descendent
|
omlish/graphs/trees.py
CHANGED
|
@@ -79,7 +79,7 @@ class BasicTreeAnalysis(ta.Generic[NodeT]):
|
|
|
79
79
|
nodes: list[NodeT] = []
|
|
80
80
|
node_set: ta.MutableSet[NodeT] = self._set_fac()
|
|
81
81
|
children_by_node: ta.MutableMapping[NodeT | None, ta.Sequence[NodeT]] = self._dict_fac()
|
|
82
|
-
child_sets_by_node: ta.MutableMapping[
|
|
82
|
+
child_sets_by_node: ta.MutableMapping[NodeT | None, ta.AbstractSet[NodeT]] = self._dict_fac()
|
|
83
83
|
parents_by_node: ta.MutableMapping[NodeT, NodeT | None] = self._dict_fac()
|
|
84
84
|
|
|
85
85
|
children_by_node[None] = [root]
|
|
@@ -190,7 +190,7 @@ class BasicTreeAnalysis(ta.Generic[NodeT]):
|
|
|
190
190
|
e: ta.Any
|
|
191
191
|
d: ta.Any
|
|
192
192
|
if identity:
|
|
193
|
-
e, d = id, col.
|
|
193
|
+
e, d = id, col.unique_map((id(n), n) for n, _ in pairs)
|
|
194
194
|
else:
|
|
195
195
|
e, d = lang.identity, lang.identity
|
|
196
196
|
tsd = {e(n): {e(p)} for n, p in parents_by_node.items()}
|
omlish/http/__init__.py
CHANGED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from . import consts # noqa
|
|
2
|
+
|
|
3
|
+
from .cookies import ( # noqa
|
|
4
|
+
CookieTooBigError,
|
|
5
|
+
dump_cookie,
|
|
6
|
+
parse_cookie,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
from .dates import ( # noqa
|
|
10
|
+
http_date,
|
|
11
|
+
parse_date,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from .encodings import ( # noqa
|
|
15
|
+
latin1_decode,
|
|
16
|
+
latin1_encode,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
from .json import ( # noqa
|
|
20
|
+
JSON_TAGGER,
|
|
21
|
+
JsonTag,
|
|
22
|
+
JsonTagger,
|
|
23
|
+
json_dumps,
|
|
24
|
+
json_loads,
|
|
25
|
+
)
|
omlish/http/asgi.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import logging
|
|
3
|
+
import typing as ta
|
|
4
|
+
import urllib.parse
|
|
5
|
+
|
|
6
|
+
from .. import check
|
|
7
|
+
from . import consts
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
log = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
##
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
AsgiScope: ta.TypeAlias = ta.Mapping[str, ta.Any]
|
|
17
|
+
AsgiMessage: ta.TypeAlias = ta.Mapping[str, ta.Any]
|
|
18
|
+
AsgiRecv: ta.TypeAlias = ta.Callable[[], ta.Awaitable[AsgiMessage]]
|
|
19
|
+
AsgiSend: ta.TypeAlias = ta.Callable[[AsgiMessage], ta.Awaitable[None]]
|
|
20
|
+
AsgiApp: ta.TypeAlias = ta.Callable[[AsgiScope, AsgiRecv, AsgiSend], ta.Awaitable[None]]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class AbstractAsgiApp(abc.ABC):
|
|
24
|
+
@abc.abstractmethod
|
|
25
|
+
async def __call__(self, scope: AsgiScope, recv: AsgiRecv, send: AsgiSend) -> None:
|
|
26
|
+
raise NotImplementedError
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
##
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
async def stub_lifespan(scope: AsgiScope, recv: AsgiRecv, send: AsgiSend, *, verbose: bool = False) -> None:
|
|
33
|
+
while True:
|
|
34
|
+
message = await recv()
|
|
35
|
+
if message['type'] == 'lifespan.startup':
|
|
36
|
+
if verbose:
|
|
37
|
+
log.info('Lifespan starting up')
|
|
38
|
+
await send({'type': 'lifespan.startup.complete'})
|
|
39
|
+
|
|
40
|
+
elif message['type'] == 'lifespan.shutdown':
|
|
41
|
+
if verbose:
|
|
42
|
+
log.info('Lifespan shutting down')
|
|
43
|
+
await send({'type': 'lifespan.shutdown.complete'})
|
|
44
|
+
return
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
##
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
async def start_response(
|
|
51
|
+
send: AsgiSend,
|
|
52
|
+
status: int,
|
|
53
|
+
content_type: bytes = consts.CONTENT_TYPE_TEXT_UTF8,
|
|
54
|
+
headers: ta.Sequence[tuple[bytes, bytes]] | None = None,
|
|
55
|
+
) -> None:
|
|
56
|
+
await send({
|
|
57
|
+
'type': 'http.response.start',
|
|
58
|
+
'status': status,
|
|
59
|
+
'headers': [
|
|
60
|
+
(b'content-type', content_type),
|
|
61
|
+
*(headers or ()),
|
|
62
|
+
],
|
|
63
|
+
})
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
async def finish_response(
|
|
67
|
+
send: AsgiSend,
|
|
68
|
+
body: bytes = b'',
|
|
69
|
+
) -> None:
|
|
70
|
+
await send({
|
|
71
|
+
'type': 'http.response.body',
|
|
72
|
+
'body': body,
|
|
73
|
+
})
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
async def send_response(
|
|
77
|
+
send: AsgiSend,
|
|
78
|
+
status: int,
|
|
79
|
+
content_type: bytes = consts.CONTENT_TYPE_TEXT_UTF8,
|
|
80
|
+
headers: ta.Sequence[tuple[bytes, bytes]] | None = None,
|
|
81
|
+
body: bytes = b'',
|
|
82
|
+
) -> None:
|
|
83
|
+
await start_response(
|
|
84
|
+
send,
|
|
85
|
+
status=status,
|
|
86
|
+
content_type=content_type, headers=headers,
|
|
87
|
+
)
|
|
88
|
+
await finish_response(
|
|
89
|
+
send,
|
|
90
|
+
body=body,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
async def redirect_response(
|
|
95
|
+
send: AsgiSend,
|
|
96
|
+
url: str,
|
|
97
|
+
headers: ta.Sequence[tuple[bytes, bytes]] | None = None,
|
|
98
|
+
) -> None:
|
|
99
|
+
log.info('Redirecting to %s', url)
|
|
100
|
+
await send({
|
|
101
|
+
'type': 'http.response.start',
|
|
102
|
+
'status': 302,
|
|
103
|
+
'headers': [
|
|
104
|
+
(b'content-type', consts.CONTENT_TYPE_TEXT_UTF8),
|
|
105
|
+
(b'location', url.encode()),
|
|
106
|
+
*(headers or ()),
|
|
107
|
+
],
|
|
108
|
+
})
|
|
109
|
+
await send({
|
|
110
|
+
'type': 'http.response.body',
|
|
111
|
+
'body': b'',
|
|
112
|
+
})
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
##
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
async def read_body(recv: AsgiRecv) -> bytes:
|
|
119
|
+
body = b''
|
|
120
|
+
more_body = True
|
|
121
|
+
while more_body:
|
|
122
|
+
message = await recv()
|
|
123
|
+
body += message.get('body', b'')
|
|
124
|
+
more_body = message.get('more_body', False)
|
|
125
|
+
return body
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
async def read_form_body(recv: AsgiRecv) -> dict[bytes, bytes]:
|
|
129
|
+
body = await read_body(recv)
|
|
130
|
+
dct = urllib.parse.parse_qs(body) # noqa
|
|
131
|
+
return {k: check.single(v) for k, v in dct.items()}
|
omlish/http/consts.py
CHANGED
|
@@ -1,20 +1,47 @@
|
|
|
1
1
|
import http # noqa
|
|
2
2
|
|
|
3
3
|
|
|
4
|
+
##
|
|
5
|
+
|
|
6
|
+
|
|
4
7
|
def format_status(status: http.HTTPStatus) -> str:
|
|
5
8
|
return '%d %s' % (int(status), status.phrase)
|
|
6
9
|
|
|
7
10
|
|
|
8
11
|
STATUS_OK = format_status(http.HTTPStatus.OK)
|
|
12
|
+
|
|
13
|
+
STATUS_FOUND = format_status(http.HTTPStatus.FOUND)
|
|
14
|
+
STATUS_TEMPORARY_REDIRECT = format_status(http.HTTPStatus.TEMPORARY_REDIRECT)
|
|
15
|
+
|
|
9
16
|
STATUS_BAD_REQUEST = format_status(http.HTTPStatus.BAD_REQUEST)
|
|
17
|
+
STATUS_UNAUTHORIZED = format_status(http.HTTPStatus.UNAUTHORIZED)
|
|
10
18
|
STATUS_FORBIDDEN = format_status(http.HTTPStatus.FORBIDDEN)
|
|
11
19
|
STATUS_NOT_FOUND = format_status(http.HTTPStatus.NOT_FOUND)
|
|
12
20
|
STATUS_METHOD_NOT_ALLOWED = format_status(http.HTTPStatus.METHOD_NOT_ALLOWED)
|
|
21
|
+
STATUS_REQUEST_TIMEOUT = format_status(http.HTTPStatus.REQUEST_TIMEOUT)
|
|
22
|
+
|
|
23
|
+
STATUS_INTERNAL_SERVER_ERROR = format_status(http.HTTPStatus.INTERNAL_SERVER_ERROR)
|
|
24
|
+
STATUS_NOT_IMPLEMENTED = format_status(http.HTTPStatus.NOT_IMPLEMENTED)
|
|
25
|
+
STATUS_BAD_GATEWAY = format_status(http.HTTPStatus.BAD_GATEWAY)
|
|
26
|
+
STATUS_SERVICE_UNAVAILABLE = format_status(http.HTTPStatus.SERVICE_UNAVAILABLE)
|
|
27
|
+
STATUS_GATEWAY_TIMEOUT = format_status(http.HTTPStatus.GATEWAY_TIMEOUT)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
##
|
|
13
31
|
|
|
14
32
|
|
|
15
33
|
HEADER_CONTENT_TYPE = b'Content-Type'
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
CONTENT_TYPE_JSON = b'application/json'
|
|
19
|
-
CONTENT_TYPE_ICON = b'image/x-icon'
|
|
34
|
+
CONTENT_CHARSET_UTF8 = b'charset=utf-8'
|
|
35
|
+
|
|
20
36
|
CONTENT_TYPE_BYTES = b'application/octet-stream'
|
|
37
|
+
|
|
38
|
+
CONTENT_TYPE_HTML = b'text/html'
|
|
39
|
+
CONTENT_TYPE_HTML_UTF8 = b'; '.join([CONTENT_TYPE_HTML, CONTENT_CHARSET_UTF8])
|
|
40
|
+
|
|
41
|
+
CONTENT_TYPE_ICON = b'image/x-icon'
|
|
42
|
+
|
|
43
|
+
CONTENT_TYPE_JSON = b'application/json'
|
|
44
|
+
CONTENT_TYPE_JSON_UTF8 = b'; '.join([CONTENT_TYPE_JSON, CONTENT_CHARSET_UTF8])
|
|
45
|
+
|
|
46
|
+
CONTENT_TYPE_TEXT = b'text/plain'
|
|
47
|
+
CONTENT_TYPE_TEXT_UTF8 = b'; '.join([CONTENT_TYPE_TEXT, CONTENT_CHARSET_UTF8])
|
omlish/http/cookies.py
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
"""
|
|
2
|
+
https://github.com/pallets/werkzeug/blob/9e050f7750214d6779636813b8d661250804e811/src/werkzeug/http.py
|
|
3
|
+
https://github.com/pallets/werkzeug/blob/9e050f7750214d6779636813b8d661250804e811/src/werkzeug/sansio/http.py
|
|
4
|
+
"""
|
|
5
|
+
# Copyright 2007 Pallets
|
|
6
|
+
#
|
|
7
|
+
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
|
|
8
|
+
# following conditions are met:
|
|
9
|
+
#
|
|
10
|
+
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
|
|
11
|
+
# disclaimer.
|
|
12
|
+
#
|
|
13
|
+
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
|
|
14
|
+
# following disclaimer in the documentation and/or other materials provided with the distribution.
|
|
15
|
+
#
|
|
16
|
+
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
|
|
17
|
+
# products derived from this software without specific prior written permission.
|
|
18
|
+
#
|
|
19
|
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
|
20
|
+
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
21
|
+
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
22
|
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
23
|
+
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
|
24
|
+
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
25
|
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
26
|
+
import datetime
|
|
27
|
+
import re
|
|
28
|
+
import typing as ta
|
|
29
|
+
import urllib.parse
|
|
30
|
+
|
|
31
|
+
from .. import collections as col
|
|
32
|
+
from .dates import http_date
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
##
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
_COOKIE_RE = re.compile(
|
|
39
|
+
r"""
|
|
40
|
+
([^=;]*)
|
|
41
|
+
(?:\s*=\s*
|
|
42
|
+
(
|
|
43
|
+
"(?:[^\\"]|\\.)*"
|
|
44
|
+
|
|
|
45
|
+
.*?
|
|
46
|
+
)
|
|
47
|
+
)?
|
|
48
|
+
\s*;\s*
|
|
49
|
+
""",
|
|
50
|
+
flags=re.ASCII | re.VERBOSE,
|
|
51
|
+
)
|
|
52
|
+
_COOKIE_UNSLASH_RE = re.compile(rb'\\([0-3][0-7]{2}|.)')
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _cookie_unslash_replace(m: ta.Match[bytes]) -> bytes:
|
|
56
|
+
v = m.group(1)
|
|
57
|
+
|
|
58
|
+
if len(v) == 1:
|
|
59
|
+
return v
|
|
60
|
+
|
|
61
|
+
return int(v, 8).to_bytes(1, 'big')
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def parse_cookie(
|
|
65
|
+
cookie: str | None = None,
|
|
66
|
+
*,
|
|
67
|
+
no_latin1: bool = False,
|
|
68
|
+
) -> ta.MutableMapping[str, list[str]]:
|
|
69
|
+
if (not no_latin1) and cookie:
|
|
70
|
+
cookie = cookie.encode('latin1').decode()
|
|
71
|
+
|
|
72
|
+
if not cookie:
|
|
73
|
+
return {}
|
|
74
|
+
|
|
75
|
+
cookie = f'{cookie};'
|
|
76
|
+
out = []
|
|
77
|
+
|
|
78
|
+
for ck, cv in _COOKIE_RE.findall(cookie):
|
|
79
|
+
ck = ck.strip()
|
|
80
|
+
cv = cv.strip()
|
|
81
|
+
|
|
82
|
+
if not ck:
|
|
83
|
+
continue
|
|
84
|
+
|
|
85
|
+
if len(cv) >= 2 and cv[0] == cv[-1] == '"':
|
|
86
|
+
# Work with bytes here, since a UTF-8 character could be multiple bytes.
|
|
87
|
+
cv = _COOKIE_UNSLASH_RE.sub(
|
|
88
|
+
_cookie_unslash_replace,
|
|
89
|
+
cv[1:-1].encode(),
|
|
90
|
+
).decode(errors='replace')
|
|
91
|
+
|
|
92
|
+
out.append((ck, cv))
|
|
93
|
+
|
|
94
|
+
return col.multi_map(out)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
##
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
_COOKIE_NO_QUOTE_RE = re.compile(r"[\w!#$%&'()*+\-./:<=>?@\[\]^`{|}~]*", re.ASCII)
|
|
101
|
+
_COOKIE_SLASH_RE = re.compile(rb'[\x00-\x19\",;\\\x7f-\xff]', re.ASCII)
|
|
102
|
+
_COOKIE_SLASH_MAP = {b'"': b'\\"', b'\\': b'\\\\'}
|
|
103
|
+
_COOKIE_SLASH_MAP.update(
|
|
104
|
+
(v.to_bytes(1, 'big'), b'\\%03o' % v)
|
|
105
|
+
for v in [*range(0x20), *b',;', *range(0x7F, 256)]
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class CookieTooBigError(Exception):
|
|
110
|
+
pass
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def dump_cookie(
|
|
114
|
+
key: str,
|
|
115
|
+
value: str = '',
|
|
116
|
+
*,
|
|
117
|
+
max_age: datetime.timedelta | int | None = None,
|
|
118
|
+
expires: str | datetime.datetime | float | None = None,
|
|
119
|
+
path: str | None = '/',
|
|
120
|
+
domain: str | None = None,
|
|
121
|
+
secure: bool = False,
|
|
122
|
+
httponly: bool = False,
|
|
123
|
+
sync_expires: bool = True,
|
|
124
|
+
max_size: int = 4093,
|
|
125
|
+
samesite: str | None = None,
|
|
126
|
+
partitioned: bool = False,
|
|
127
|
+
) -> str:
|
|
128
|
+
if path is not None:
|
|
129
|
+
# safe = https://url.spec.whatwg.org/#url-path-segment-string as well as percent for things that are already
|
|
130
|
+
# quoted excluding semicolon since it's part of the header syntax
|
|
131
|
+
path = urllib.parse.quote(path, safe="%!$&'()*+,/:=@")
|
|
132
|
+
|
|
133
|
+
if domain:
|
|
134
|
+
domain = domain.partition(':')[0].lstrip('.').encode('idna').decode('ascii')
|
|
135
|
+
|
|
136
|
+
if isinstance(max_age, datetime.timedelta):
|
|
137
|
+
max_age = int(max_age.total_seconds())
|
|
138
|
+
|
|
139
|
+
if expires is not None:
|
|
140
|
+
if not isinstance(expires, str):
|
|
141
|
+
expires = http_date(expires)
|
|
142
|
+
elif max_age is not None and sync_expires:
|
|
143
|
+
expires = http_date(datetime.datetime.now(tz=datetime.UTC).timestamp() + max_age)
|
|
144
|
+
|
|
145
|
+
if samesite is not None:
|
|
146
|
+
samesite = samesite.title()
|
|
147
|
+
|
|
148
|
+
if samesite not in {'Strict', 'Lax', 'None'}:
|
|
149
|
+
raise ValueError("SameSite must be 'Strict', 'Lax', or 'None'.")
|
|
150
|
+
|
|
151
|
+
if partitioned:
|
|
152
|
+
secure = True
|
|
153
|
+
|
|
154
|
+
# Quote value if it contains characters not allowed by RFC 6265. Slash-escape with
|
|
155
|
+
# three octal digits, which matches http.cookies, although the RFC suggests base64.
|
|
156
|
+
if not _COOKIE_NO_QUOTE_RE.fullmatch(value):
|
|
157
|
+
# Work with bytes here, since a UTF-8 character could be multiple bytes.
|
|
158
|
+
value = _COOKIE_SLASH_RE.sub(
|
|
159
|
+
lambda m: _COOKIE_SLASH_MAP[m.group()], value.encode(),
|
|
160
|
+
).decode('ascii')
|
|
161
|
+
value = f'"{value}"'
|
|
162
|
+
|
|
163
|
+
# Send a non-ASCII key as mojibake. Everything else should already be ASCII.
|
|
164
|
+
# TODO Remove encoding dance, it seems like clients accept UTF-8 keys
|
|
165
|
+
buf = [f"{key.encode().decode('latin1')}={value}"]
|
|
166
|
+
|
|
167
|
+
for k, v in (
|
|
168
|
+
('Domain', domain),
|
|
169
|
+
('Expires', expires),
|
|
170
|
+
('Max-Age', max_age),
|
|
171
|
+
('Secure', secure),
|
|
172
|
+
('HttpOnly', httponly),
|
|
173
|
+
('Path', path),
|
|
174
|
+
('SameSite', samesite),
|
|
175
|
+
('Partitioned', partitioned),
|
|
176
|
+
):
|
|
177
|
+
if v is None or v is False:
|
|
178
|
+
continue
|
|
179
|
+
|
|
180
|
+
if v is True:
|
|
181
|
+
buf.append(k)
|
|
182
|
+
continue
|
|
183
|
+
|
|
184
|
+
buf.append(f'{k}={v}')
|
|
185
|
+
|
|
186
|
+
rv = '; '.join(buf)
|
|
187
|
+
|
|
188
|
+
# Warn if the final value of the cookie is larger than the limit. If the cookie is too large, then it may be
|
|
189
|
+
# silently ignored by the browser, which can be quite hard to debug.
|
|
190
|
+
cookie_size = len(rv)
|
|
191
|
+
if max_size and cookie_size > max_size:
|
|
192
|
+
raise CookieTooBigError(cookie_size)
|
|
193
|
+
|
|
194
|
+
return rv
|