omlish 0.0.0.dev195__py3-none-any.whl → 0.0.0.dev197__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- omlish/__about__.py +3 -3
- omlish/asyncs/asyncio/all.py +0 -1
- omlish/asyncs/asyncio/asyncio.py +2 -6
- omlish/asyncs/bluelet/runner.py +1 -1
- omlish/asyncs/bridge.py +2 -2
- omlish/codecs/base.py +5 -5
- omlish/codecs/text.py +1 -2
- omlish/io/compress/adapters.py +4 -4
- omlish/io/compress/base.py +4 -4
- omlish/io/compress/bz2.py +4 -4
- omlish/io/compress/codecs.py +2 -2
- omlish/io/compress/gzip.py +10 -10
- omlish/io/compress/lz4.py +5 -5
- omlish/io/compress/lzma.py +4 -4
- omlish/io/compress/zlib.py +4 -4
- omlish/io/coro/__init__.py +56 -0
- omlish/io/coro/direct.py +13 -0
- omlish/io/{generators → coro}/readers.py +31 -31
- omlish/io/{generators → coro}/stepped.py +28 -28
- omlish/multiprocessing/__init__.py +32 -0
- omlish/{multiprocessing.py → multiprocessing/death.py} +3 -88
- omlish/multiprocessing/proxies.py +30 -0
- omlish/multiprocessing/spawn.py +59 -0
- omlish/os/atomics.py +2 -2
- omlish/outcome.py +250 -0
- omlish/sockets/server.py +1 -2
- omlish/term/vt100/terminal.py +1 -1
- omlish/testing/pytest/__init__.py +0 -4
- omlish/testing/pytest/plugins/asyncs/__init__.py +1 -0
- omlish/testing/pytest/plugins/asyncs/backends/__init__.py +16 -0
- omlish/testing/pytest/plugins/asyncs/backends/asyncio.py +35 -0
- omlish/testing/pytest/plugins/asyncs/backends/base.py +30 -0
- omlish/testing/pytest/plugins/asyncs/backends/trio.py +91 -0
- omlish/testing/pytest/plugins/asyncs/backends/trio_asyncio.py +89 -0
- omlish/testing/pytest/plugins/asyncs/consts.py +3 -0
- omlish/testing/pytest/plugins/asyncs/fixtures.py +273 -0
- omlish/testing/pytest/plugins/asyncs/plugin.py +182 -0
- omlish/testing/pytest/plugins/asyncs/utils.py +10 -0
- omlish/testing/pytest/plugins/managermarks.py +0 -14
- omlish/text/indent.py +1 -1
- omlish/text/minja.py +2 -2
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev197.dist-info}/METADATA +5 -5
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev197.dist-info}/RECORD +48 -36
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev197.dist-info}/WHEEL +1 -1
- omlish/io/generators/__init__.py +0 -56
- omlish/io/generators/direct.py +0 -13
- omlish/testing/pytest/marks.py +0 -18
- omlish/testing/pytest/plugins/asyncs.py +0 -162
- /omlish/io/{generators → coro}/consts.py +0 -0
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev197.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev197.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev197.dist-info}/top_level.txt +0 -0
@@ -4,8 +4,8 @@ from ... import check
|
|
4
4
|
from ... import lang
|
5
5
|
from ..buffers import ReadableListBuffer
|
6
6
|
from .consts import DEFAULT_BUFFER_SIZE
|
7
|
-
from .direct import
|
8
|
-
from .direct import
|
7
|
+
from .direct import BytesDirectCoro
|
8
|
+
from .direct import StrDirectCoro
|
9
9
|
|
10
10
|
|
11
11
|
T = ta.TypeVar('T')
|
@@ -18,44 +18,44 @@ OF = ta.TypeVar('OF')
|
|
18
18
|
OT = ta.TypeVar('OT')
|
19
19
|
|
20
20
|
|
21
|
-
# Stepped
|
22
|
-
#
|
23
|
-
|
21
|
+
# Stepped coros accept a non-None input, then in response yield zero or more non-None outputs, until yielding None to
|
22
|
+
# signal they need more input again.
|
23
|
+
SteppedCoro: ta.TypeAlias = ta.Generator[O | None, I | None, R]
|
24
24
|
|
25
25
|
# Conventionally, these are sent and themselves yield an empty value to signify termination.
|
26
|
-
|
27
|
-
|
26
|
+
BytesSteppedCoro: ta.TypeAlias = SteppedCoro[bytes, bytes, R]
|
27
|
+
StrSteppedCoro: ta.TypeAlias = SteppedCoro[str, str, R]
|
28
28
|
|
29
|
-
|
30
|
-
|
29
|
+
BytesToStrSteppedCoro: ta.TypeAlias = SteppedCoro[str, bytes, R]
|
30
|
+
StrToBytesSteppedCoro: ta.TypeAlias = SteppedCoro[bytes, str, R]
|
31
31
|
|
32
32
|
|
33
33
|
# Stepped reader generators emit either an int or None to request input, or emit some other kind of output.
|
34
|
-
|
34
|
+
SteppedReaderCoro: ta.TypeAlias = ta.Generator[int | None | O, I | None, R]
|
35
35
|
|
36
|
-
|
37
|
-
|
36
|
+
BytesSteppedReaderCoro: ta.TypeAlias = SteppedReaderCoro[bytes, bytes, R]
|
37
|
+
StrSteppedReaderCoro: ta.TypeAlias = SteppedReaderCoro[str, str, R]
|
38
38
|
|
39
39
|
|
40
40
|
##
|
41
41
|
|
42
42
|
|
43
43
|
@lang.autostart
|
44
|
-
def
|
44
|
+
def flatmap_stepped_coro(
|
45
45
|
fn: ta.Callable[[list[OF]], OT],
|
46
|
-
g:
|
46
|
+
g: SteppedCoro[OF, I, R],
|
47
47
|
*,
|
48
48
|
terminate: ta.Callable[[OF], bool] | None = None,
|
49
49
|
) -> ta.Generator[OT, I, lang.Maybe[R]]:
|
50
50
|
"""
|
51
|
-
Given a stepped
|
52
|
-
|
51
|
+
Given a stepped coro and a function taking a list, returns a direct (1:1) coro which accepts input, builds a list of
|
52
|
+
yielded coro output, calls the given function with that list, and yields the result.
|
53
53
|
|
54
54
|
An optional terminate function may be provided which will cause this function to return early if it returns true for
|
55
55
|
an encountered yielded value. The encountered value causing termination will be included in the list sent to the
|
56
56
|
given fn.
|
57
57
|
|
58
|
-
Returns a Maybe of either the given
|
58
|
+
Returns a Maybe of either the given coro's return value or empty if the terminator was encountered.
|
59
59
|
"""
|
60
60
|
|
61
61
|
l: list[OF]
|
@@ -110,37 +110,37 @@ def _is_empty(o: T) -> bool:
|
|
110
110
|
return len(o) < 1 # type: ignore
|
111
111
|
|
112
112
|
|
113
|
-
def
|
114
|
-
return
|
113
|
+
def joined_bytes_stepped_coro(g: BytesSteppedCoro[R]) -> BytesDirectCoro[R]:
|
114
|
+
return flatmap_stepped_coro(_join_bytes, g, terminate=_is_empty)
|
115
115
|
|
116
116
|
|
117
|
-
def
|
118
|
-
return
|
117
|
+
def joined_str_stepped_coro(g: StrSteppedCoro[R]) -> StrDirectCoro[R]:
|
118
|
+
return flatmap_stepped_coro(_join_str, g, terminate=_is_empty)
|
119
119
|
|
120
120
|
|
121
121
|
##
|
122
122
|
|
123
123
|
|
124
|
-
def
|
125
|
-
g:
|
124
|
+
def read_into_bytes_stepped_coro(
|
125
|
+
g: BytesSteppedCoro,
|
126
126
|
f: ta.IO,
|
127
127
|
*,
|
128
128
|
read_size: int = DEFAULT_BUFFER_SIZE,
|
129
129
|
) -> ta.Iterator[bytes]:
|
130
130
|
yield from lang.genmap( # type: ignore[misc]
|
131
|
-
|
131
|
+
joined_bytes_stepped_coro(g),
|
132
132
|
lang.readiter(f, read_size),
|
133
133
|
)
|
134
134
|
|
135
135
|
|
136
|
-
def
|
137
|
-
g:
|
136
|
+
def read_into_str_stepped_coro(
|
137
|
+
g: StrSteppedCoro,
|
138
138
|
f: ta.TextIO,
|
139
139
|
*,
|
140
140
|
read_size: int = DEFAULT_BUFFER_SIZE,
|
141
141
|
) -> ta.Iterator[str]:
|
142
142
|
yield from lang.genmap(
|
143
|
-
|
143
|
+
joined_str_stepped_coro(g),
|
144
144
|
lang.readiter(f, read_size),
|
145
145
|
)
|
146
146
|
|
@@ -149,7 +149,7 @@ def read_into_str_stepped_generator(
|
|
149
149
|
|
150
150
|
|
151
151
|
@lang.autostart
|
152
|
-
def
|
152
|
+
def buffer_bytes_stepped_reader_coro(g: BytesSteppedReaderCoro) -> BytesSteppedCoro:
|
153
153
|
i: bytes | None
|
154
154
|
o = g.send(None)
|
155
155
|
rlb = ReadableListBuffer()
|
@@ -0,0 +1,32 @@
|
|
1
|
+
# ruff: noqa: I001
|
2
|
+
import typing as _ta
|
3
|
+
|
4
|
+
from .. import lang as _lang
|
5
|
+
|
6
|
+
|
7
|
+
from .death import ( # noqa
|
8
|
+
BaseDeathpact,
|
9
|
+
Deathpact,
|
10
|
+
NopDeathpact,
|
11
|
+
PipeDeathpact,
|
12
|
+
)
|
13
|
+
|
14
|
+
from .proxies import ( # noqa
|
15
|
+
DummyValueProxy,
|
16
|
+
ValueProxy,
|
17
|
+
)
|
18
|
+
|
19
|
+
if _ta.TYPE_CHECKING:
|
20
|
+
from .spawn import ( # noqa
|
21
|
+
ExtrasSpawnContext,
|
22
|
+
ExtrasSpawnPosixPopen,
|
23
|
+
ExtrasSpawnProcess,
|
24
|
+
SpawnExtras,
|
25
|
+
)
|
26
|
+
else:
|
27
|
+
_lang.proxy_init(globals(), '.spawn', [
|
28
|
+
'ExtrasSpawnContext',
|
29
|
+
'ExtrasSpawnPosixPopen',
|
30
|
+
'ExtrasSpawnProcess',
|
31
|
+
'SpawnExtras',
|
32
|
+
])
|
@@ -1,96 +1,11 @@
|
|
1
1
|
import abc
|
2
|
-
import dataclasses as dc
|
3
|
-
import multiprocessing as mp
|
4
|
-
import multiprocessing.popen_spawn_posix
|
5
2
|
import os
|
6
3
|
import signal
|
7
4
|
import sys
|
8
5
|
import time
|
9
6
|
import typing as ta
|
10
7
|
|
11
|
-
from
|
12
|
-
from . import lang
|
13
|
-
from . import libc
|
14
|
-
|
15
|
-
|
16
|
-
T = ta.TypeVar('T')
|
17
|
-
|
18
|
-
|
19
|
-
##
|
20
|
-
|
21
|
-
|
22
|
-
@ta.runtime_checkable
|
23
|
-
class ValueProxy(ta.Protocol[T]):
|
24
|
-
# value = property(get, set)
|
25
|
-
|
26
|
-
def get(self) -> T:
|
27
|
-
...
|
28
|
-
|
29
|
-
def set(self, value: T) -> None:
|
30
|
-
...
|
31
|
-
|
32
|
-
|
33
|
-
@dc.dataclass()
|
34
|
-
@lang.protocol_check(ValueProxy)
|
35
|
-
class DummyValueProxy(ValueProxy[T]):
|
36
|
-
value: T
|
37
|
-
|
38
|
-
def get(self) -> T:
|
39
|
-
return self.value
|
40
|
-
|
41
|
-
def set(self, value: T) -> None:
|
42
|
-
self.value = value
|
43
|
-
|
44
|
-
|
45
|
-
##
|
46
|
-
|
47
|
-
|
48
|
-
@dc.dataclass(frozen=True, kw_only=True)
|
49
|
-
class SpawnExtras:
|
50
|
-
pass_fds: ta.AbstractSet[int] | None = None
|
51
|
-
deathsig: int | None = None
|
52
|
-
|
53
|
-
|
54
|
-
class ExtrasSpawnPosixPopen(mp.popen_spawn_posix.Popen):
|
55
|
-
def __init__(self, process_obj: 'ExtrasSpawnProcess', *, extras: SpawnExtras) -> None:
|
56
|
-
self.__extras = extras
|
57
|
-
self.__pass_fds = extras.pass_fds
|
58
|
-
super().__init__(process_obj)
|
59
|
-
|
60
|
-
def _launch(self, process_obj: 'ExtrasSpawnProcess') -> None:
|
61
|
-
if self.__pass_fds:
|
62
|
-
for fd in self.__pass_fds:
|
63
|
-
self.duplicate_for_child(fd)
|
64
|
-
self._extra_fds = None
|
65
|
-
|
66
|
-
super()._launch(process_obj) # type: ignore # noqa
|
67
|
-
|
68
|
-
|
69
|
-
class ExtrasSpawnProcess(mp.context.SpawnProcess):
|
70
|
-
def __init__(self, *args: ta.Any, extras: SpawnExtras, **kwargs: ta.Any) -> None:
|
71
|
-
self.__extras = extras
|
72
|
-
super().__init__(*args, **kwargs)
|
73
|
-
|
74
|
-
def _Popen(self, process_obj: 'ExtrasSpawnProcess') -> ExtrasSpawnPosixPopen: # type: ignore # noqa
|
75
|
-
return ExtrasSpawnPosixPopen(
|
76
|
-
check.isinstance(process_obj, ExtrasSpawnProcess),
|
77
|
-
extras=self.__extras,
|
78
|
-
)
|
79
|
-
|
80
|
-
def run(self) -> None:
|
81
|
-
if self.__extras.deathsig is not None and sys.platform == 'linux':
|
82
|
-
libc.prctl(libc.PR_SET_PDEATHSIG, self.__extras.deathsig, 0, 0, 0, 0)
|
83
|
-
|
84
|
-
super().run()
|
85
|
-
|
86
|
-
|
87
|
-
class ExtrasSpawnContext(mp.context.SpawnContext):
|
88
|
-
def __init__(self, extras: SpawnExtras = SpawnExtras()) -> None:
|
89
|
-
self.__extras = extras
|
90
|
-
super().__init__()
|
91
|
-
|
92
|
-
def Process(self, *args: ta.Any, **kwargs: ta.Any): # type: ignore # noqa
|
93
|
-
return ExtrasSpawnProcess(*args, extras=self.__extras, **kwargs)
|
8
|
+
from .. import check
|
94
9
|
|
95
10
|
|
96
11
|
##
|
@@ -107,7 +22,7 @@ class NopDeathpact(Deathpact):
|
|
107
22
|
pass
|
108
23
|
|
109
24
|
|
110
|
-
|
25
|
+
##
|
111
26
|
|
112
27
|
|
113
28
|
class BaseDeathpact(Deathpact, abc.ABC):
|
@@ -163,7 +78,7 @@ class BaseDeathpact(Deathpact, abc.ABC):
|
|
163
78
|
self._last_check_t = time.monotonic()
|
164
79
|
|
165
80
|
|
166
|
-
|
81
|
+
##
|
167
82
|
|
168
83
|
|
169
84
|
class PipeDeathpact(BaseDeathpact):
|
@@ -0,0 +1,30 @@
|
|
1
|
+
import dataclasses as dc
|
2
|
+
import typing as ta
|
3
|
+
|
4
|
+
from .. import lang
|
5
|
+
|
6
|
+
|
7
|
+
T = ta.TypeVar('T')
|
8
|
+
|
9
|
+
|
10
|
+
@ta.runtime_checkable
|
11
|
+
class ValueProxy(ta.Protocol[T]):
|
12
|
+
# value = property(get, set)
|
13
|
+
|
14
|
+
def get(self) -> T:
|
15
|
+
...
|
16
|
+
|
17
|
+
def set(self, value: T) -> None:
|
18
|
+
...
|
19
|
+
|
20
|
+
|
21
|
+
@dc.dataclass()
|
22
|
+
@lang.protocol_check(ValueProxy)
|
23
|
+
class DummyValueProxy(ValueProxy[T]):
|
24
|
+
value: T
|
25
|
+
|
26
|
+
def get(self) -> T:
|
27
|
+
return self.value
|
28
|
+
|
29
|
+
def set(self, value: T) -> None:
|
30
|
+
self.value = value
|
@@ -0,0 +1,59 @@
|
|
1
|
+
import dataclasses as dc
|
2
|
+
import multiprocessing as mp
|
3
|
+
import multiprocessing.popen_spawn_posix
|
4
|
+
import sys
|
5
|
+
import typing as ta
|
6
|
+
|
7
|
+
from .. import check
|
8
|
+
from .. import libc
|
9
|
+
|
10
|
+
|
11
|
+
T = ta.TypeVar('T')
|
12
|
+
|
13
|
+
|
14
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
15
|
+
class SpawnExtras:
|
16
|
+
pass_fds: ta.AbstractSet[int] | None = None
|
17
|
+
deathsig: int | None = None
|
18
|
+
|
19
|
+
|
20
|
+
class ExtrasSpawnPosixPopen(mp.popen_spawn_posix.Popen):
|
21
|
+
def __init__(self, process_obj: 'ExtrasSpawnProcess', *, extras: SpawnExtras) -> None:
|
22
|
+
self.__extras = extras
|
23
|
+
self.__pass_fds = extras.pass_fds
|
24
|
+
super().__init__(process_obj)
|
25
|
+
|
26
|
+
def _launch(self, process_obj: 'ExtrasSpawnProcess') -> None:
|
27
|
+
if self.__pass_fds:
|
28
|
+
for fd in self.__pass_fds:
|
29
|
+
self.duplicate_for_child(fd)
|
30
|
+
self._extra_fds = None
|
31
|
+
|
32
|
+
super()._launch(process_obj) # type: ignore # noqa
|
33
|
+
|
34
|
+
|
35
|
+
class ExtrasSpawnProcess(mp.context.SpawnProcess):
|
36
|
+
def __init__(self, *args: ta.Any, extras: SpawnExtras, **kwargs: ta.Any) -> None:
|
37
|
+
self.__extras = extras
|
38
|
+
super().__init__(*args, **kwargs)
|
39
|
+
|
40
|
+
def _Popen(self, process_obj: 'ExtrasSpawnProcess') -> ExtrasSpawnPosixPopen: # type: ignore # noqa
|
41
|
+
return ExtrasSpawnPosixPopen(
|
42
|
+
check.isinstance(process_obj, ExtrasSpawnProcess),
|
43
|
+
extras=self.__extras,
|
44
|
+
)
|
45
|
+
|
46
|
+
def run(self) -> None:
|
47
|
+
if self.__extras.deathsig is not None and sys.platform == 'linux':
|
48
|
+
libc.prctl(libc.PR_SET_PDEATHSIG, self.__extras.deathsig, 0, 0, 0, 0)
|
49
|
+
|
50
|
+
super().run()
|
51
|
+
|
52
|
+
|
53
|
+
class ExtrasSpawnContext(mp.context.SpawnContext):
|
54
|
+
def __init__(self, extras: SpawnExtras = SpawnExtras()) -> None:
|
55
|
+
self.__extras = extras
|
56
|
+
super().__init__()
|
57
|
+
|
58
|
+
def Process(self, *args: ta.Any, **kwargs: ta.Any): # type: ignore # noqa
|
59
|
+
return ExtrasSpawnProcess(*args, extras=self.__extras, **kwargs)
|
omlish/os/atomics.py
CHANGED
@@ -6,8 +6,8 @@ import shutil
|
|
6
6
|
import tempfile
|
7
7
|
import typing as ta
|
8
8
|
|
9
|
-
from
|
10
|
-
from
|
9
|
+
from ..lite.check import check
|
10
|
+
from ..lite.strings import attr_repr
|
11
11
|
|
12
12
|
|
13
13
|
AtomicPathSwapKind = ta.Literal['dir', 'file']
|
omlish/outcome.py
ADDED
@@ -0,0 +1,250 @@
|
|
1
|
+
# https://github.com/python-trio/outcome/tree/6a3192f306ead4900a33fa8c47e5af5430e37692
|
2
|
+
#
|
3
|
+
# The MIT License (MIT)
|
4
|
+
#
|
5
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
|
6
|
+
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
|
7
|
+
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
|
8
|
+
# persons to whom the Software is furnished to do so, subject to the following conditions:
|
9
|
+
#
|
10
|
+
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
|
11
|
+
# Software.
|
12
|
+
#
|
13
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
14
|
+
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
15
|
+
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
16
|
+
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
17
|
+
import abc
|
18
|
+
import dataclasses as dc
|
19
|
+
import typing as ta
|
20
|
+
|
21
|
+
from . import check
|
22
|
+
|
23
|
+
|
24
|
+
ValueT_co = ta.TypeVar('ValueT_co', covariant=True)
|
25
|
+
ResultT = ta.TypeVar('ResultT')
|
26
|
+
ArgsT = ta.ParamSpec('ArgsT')
|
27
|
+
|
28
|
+
|
29
|
+
##
|
30
|
+
|
31
|
+
|
32
|
+
class AlreadyUsedError(RuntimeError):
|
33
|
+
"""An Outcome can only be unwrapped once."""
|
34
|
+
|
35
|
+
|
36
|
+
def _remove_tb_frames(exc: BaseException, n: int) -> BaseException:
|
37
|
+
tb: ta.Any = exc.__traceback__
|
38
|
+
for _ in range(n):
|
39
|
+
check.not_none(tb)
|
40
|
+
tb = tb.tb_next
|
41
|
+
return exc.with_traceback(tb)
|
42
|
+
|
43
|
+
|
44
|
+
##
|
45
|
+
|
46
|
+
|
47
|
+
@ta.overload
|
48
|
+
def capture(
|
49
|
+
sync_fn: ta.Callable[ArgsT, ta.NoReturn],
|
50
|
+
*args: ArgsT.args,
|
51
|
+
**kwargs: ArgsT.kwargs,
|
52
|
+
) -> 'Error':
|
53
|
+
...
|
54
|
+
|
55
|
+
|
56
|
+
@ta.overload
|
57
|
+
def capture(
|
58
|
+
sync_fn: ta.Callable[ArgsT, ResultT],
|
59
|
+
*args: ArgsT.args,
|
60
|
+
**kwargs: ArgsT.kwargs,
|
61
|
+
) -> ta.Union['Value[ResultT]', 'Error']:
|
62
|
+
...
|
63
|
+
|
64
|
+
|
65
|
+
def capture(
|
66
|
+
sync_fn: ta.Callable[ArgsT, ResultT],
|
67
|
+
*args: ArgsT.args,
|
68
|
+
**kwargs: ArgsT.kwargs,
|
69
|
+
) -> ta.Union['Value[ResultT]', 'Error']:
|
70
|
+
"""
|
71
|
+
Run ``sync_fn(*args, **kwargs)`` and capture the result.
|
72
|
+
|
73
|
+
Returns:
|
74
|
+
Either a :class:`Value` or :class:`Error` as appropriate.
|
75
|
+
"""
|
76
|
+
|
77
|
+
try:
|
78
|
+
return Value(sync_fn(*args, **kwargs))
|
79
|
+
|
80
|
+
except BaseException as exc: # noqa
|
81
|
+
exc = _remove_tb_frames(exc, 1)
|
82
|
+
return Error(exc)
|
83
|
+
|
84
|
+
|
85
|
+
#
|
86
|
+
|
87
|
+
|
88
|
+
@ta.overload
|
89
|
+
async def acapture(
|
90
|
+
async_fn: ta.Callable[ArgsT, ta.Awaitable[ta.NoReturn]],
|
91
|
+
*args: ArgsT.args,
|
92
|
+
**kwargs: ArgsT.kwargs,
|
93
|
+
) -> 'Error':
|
94
|
+
...
|
95
|
+
|
96
|
+
|
97
|
+
@ta.overload
|
98
|
+
async def acapture(
|
99
|
+
async_fn: ta.Callable[ArgsT, ta.Awaitable[ResultT]],
|
100
|
+
*args: ArgsT.args,
|
101
|
+
**kwargs: ArgsT.kwargs,
|
102
|
+
) -> ta.Union['Value[ResultT]', 'Error']:
|
103
|
+
...
|
104
|
+
|
105
|
+
|
106
|
+
async def acapture(
|
107
|
+
async_fn: ta.Callable[ArgsT, ta.Awaitable[ResultT]],
|
108
|
+
*args: ArgsT.args,
|
109
|
+
**kwargs: ArgsT.kwargs,
|
110
|
+
) -> ta.Union['Value[ResultT]', 'Error']:
|
111
|
+
"""
|
112
|
+
Run ``await async_fn(*args, **kwargs)`` and capture the result.
|
113
|
+
|
114
|
+
Returns:
|
115
|
+
Either a :class:`Value` or :class:`Error` as appropriate.
|
116
|
+
"""
|
117
|
+
|
118
|
+
try:
|
119
|
+
return Value(await async_fn(*args, **kwargs))
|
120
|
+
|
121
|
+
except BaseException as exc: # noqa
|
122
|
+
exc = _remove_tb_frames(exc, 1)
|
123
|
+
return Error(exc)
|
124
|
+
|
125
|
+
|
126
|
+
##
|
127
|
+
|
128
|
+
|
129
|
+
@dc.dataclass(repr=False, init=False, slots=True, frozen=True, order=True)
|
130
|
+
class Outcome(abc.ABC, ta.Generic[ValueT_co]):
|
131
|
+
"""
|
132
|
+
An abstract class representing the result of a Python computation.
|
133
|
+
|
134
|
+
This class has two concrete subclasses: :class:`Value` representing a value, and :class:`Error` representing an
|
135
|
+
exception.
|
136
|
+
|
137
|
+
In addition to the methods described below, comparison operators on :class:`Value` and :class:`Error` objects
|
138
|
+
(``==``, ``<``, etc.) check that the other object is also a :class:`Value` or :class:`Error` object respectively,
|
139
|
+
and then compare the contained objects.
|
140
|
+
|
141
|
+
:class:`Outcome` objects are hashable if the contained objects are hashable.
|
142
|
+
"""
|
143
|
+
|
144
|
+
_unwrapped: bool = dc.field(default=False, compare=False, init=False)
|
145
|
+
|
146
|
+
def _set_unwrapped(self) -> None:
|
147
|
+
if self._unwrapped:
|
148
|
+
raise AlreadyUsedError
|
149
|
+
object.__setattr__(self, '_unwrapped', True)
|
150
|
+
|
151
|
+
@abc.abstractmethod
|
152
|
+
def unwrap(self) -> ValueT_co:
|
153
|
+
"""
|
154
|
+
Return or raise the contained value or exception.
|
155
|
+
|
156
|
+
These two lines of code are equivalent::
|
157
|
+
|
158
|
+
x = fn(*args)
|
159
|
+
x = outcome.capture(fn, *args).unwrap()
|
160
|
+
"""
|
161
|
+
|
162
|
+
@abc.abstractmethod
|
163
|
+
def send(self, gen: ta.Generator[ResultT, ValueT_co, object]) -> ResultT:
|
164
|
+
"""
|
165
|
+
Send or throw the contained value or exception into the given generator object.
|
166
|
+
|
167
|
+
Args:
|
168
|
+
gen: A generator object supporting ``.send()`` and ``.throw()`` methods.
|
169
|
+
"""
|
170
|
+
|
171
|
+
@abc.abstractmethod
|
172
|
+
async def asend(self, agen: ta.AsyncGenerator[ResultT, ValueT_co]) -> ResultT:
|
173
|
+
"""
|
174
|
+
Send or throw the contained value or exception into the given async generator object.
|
175
|
+
|
176
|
+
Args:
|
177
|
+
agen: An async generator object supporting ``.asend()`` and ``.athrow()`` methods.
|
178
|
+
"""
|
179
|
+
|
180
|
+
|
181
|
+
@ta.final
|
182
|
+
@dc.dataclass(frozen=True, repr=False, slots=True, order=True)
|
183
|
+
class Value(Outcome[ValueT_co], ta.Generic[ValueT_co]):
|
184
|
+
"""Concrete :class:`Outcome` subclass representing a regular value."""
|
185
|
+
|
186
|
+
value: ValueT_co
|
187
|
+
|
188
|
+
def __repr__(self) -> str:
|
189
|
+
return f'Value({self.value!r})'
|
190
|
+
|
191
|
+
def unwrap(self) -> ValueT_co:
|
192
|
+
self._set_unwrapped()
|
193
|
+
return self.value
|
194
|
+
|
195
|
+
def send(self, gen: ta.Generator[ResultT, ValueT_co, object]) -> ResultT:
|
196
|
+
self._set_unwrapped()
|
197
|
+
return gen.send(self.value)
|
198
|
+
|
199
|
+
async def asend(self, agen: ta.AsyncGenerator[ResultT, ValueT_co]) -> ResultT:
|
200
|
+
self._set_unwrapped()
|
201
|
+
return await agen.asend(self.value)
|
202
|
+
|
203
|
+
|
204
|
+
@ta.final
|
205
|
+
@dc.dataclass(frozen=True, repr=False, slots=True, order=True)
|
206
|
+
class Error(Outcome[ta.NoReturn]):
|
207
|
+
"""Concrete :class:`Outcome` subclass representing a raised exception."""
|
208
|
+
|
209
|
+
error: BaseException
|
210
|
+
|
211
|
+
def __post_init__(self) -> None:
|
212
|
+
if not isinstance(self.error, BaseException):
|
213
|
+
raise TypeError(self.error)
|
214
|
+
|
215
|
+
def __repr__(self) -> str:
|
216
|
+
return f'Error({self.error!r})'
|
217
|
+
|
218
|
+
def unwrap(self) -> ta.NoReturn:
|
219
|
+
self._set_unwrapped()
|
220
|
+
|
221
|
+
# Tracebacks show the 'raise' line below out of context, so let's give this variable a name that makes sense out
|
222
|
+
# of context.
|
223
|
+
captured_error = self.error
|
224
|
+
|
225
|
+
try:
|
226
|
+
raise captured_error
|
227
|
+
|
228
|
+
finally:
|
229
|
+
# We want to avoid creating a reference cycle here. Python does collect cycles just fine, so it wouldn't be
|
230
|
+
# the end of the world if we did create a cycle, but the cyclic garbage collector adds latency to Python
|
231
|
+
# programs, and the more cycles you create, the more often it runs, so it's nicer to avoid creating them in
|
232
|
+
# the first place. For more details see:
|
233
|
+
#
|
234
|
+
# https://github.com/python-trio/trio/issues/1770
|
235
|
+
#
|
236
|
+
# In particular, by deleting this local variables from the 'unwrap' methods frame, we avoid the
|
237
|
+
# 'captured_error' object's __traceback__ from indirectly referencing 'captured_error'.
|
238
|
+
del captured_error, self
|
239
|
+
|
240
|
+
def send(self, gen: ta.Generator[ResultT, ta.NoReturn, object]) -> ResultT:
|
241
|
+
self._set_unwrapped()
|
242
|
+
return gen.throw(self.error)
|
243
|
+
|
244
|
+
async def asend(self, agen: ta.AsyncGenerator[ResultT, ta.NoReturn]) -> ResultT:
|
245
|
+
self._set_unwrapped()
|
246
|
+
return await agen.athrow(self.error)
|
247
|
+
|
248
|
+
|
249
|
+
# A convenience alias to a union of both results, allowing exhaustiveness checking.
|
250
|
+
Maybe = Value[ValueT_co] | Error
|
omlish/sockets/server.py
CHANGED
omlish/term/vt100/terminal.py
CHANGED
@@ -0,0 +1 @@
|
|
1
|
+
from .plugin import AsyncsPlugin # noqa
|
@@ -0,0 +1,16 @@
|
|
1
|
+
import typing as _ta
|
2
|
+
|
3
|
+
from .asyncio import AsyncioAsyncsBackend # noqa
|
4
|
+
from .base import AsyncsBackend # noqa
|
5
|
+
from .trio import TrioAsyncsBackend # noqa
|
6
|
+
from .trio_asyncio import TrioAsyncioAsyncsBackend # noqa
|
7
|
+
|
8
|
+
|
9
|
+
##
|
10
|
+
|
11
|
+
|
12
|
+
ASYNC_BACKENDS: _ta.Collection[type[AsyncsBackend]] = [
|
13
|
+
AsyncioAsyncsBackend,
|
14
|
+
TrioAsyncioAsyncsBackend,
|
15
|
+
TrioAsyncsBackend,
|
16
|
+
]
|