omlish 0.0.0.dev195__py3-none-any.whl → 0.0.0.dev196__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omlish/__about__.py +3 -3
- omlish/asyncs/bluelet/runner.py +1 -1
- omlish/codecs/base.py +5 -5
- omlish/codecs/text.py +1 -2
- omlish/io/compress/adapters.py +4 -4
- omlish/io/compress/base.py +4 -4
- omlish/io/compress/bz2.py +4 -4
- omlish/io/compress/codecs.py +2 -2
- omlish/io/compress/gzip.py +10 -10
- omlish/io/compress/lz4.py +5 -5
- omlish/io/compress/lzma.py +4 -4
- omlish/io/compress/zlib.py +4 -4
- omlish/io/coro/__init__.py +56 -0
- omlish/io/coro/direct.py +13 -0
- omlish/io/{generators → coro}/readers.py +31 -31
- omlish/io/{generators → coro}/stepped.py +28 -28
- omlish/multiprocessing/__init__.py +32 -0
- omlish/{multiprocessing.py → multiprocessing/death.py} +3 -88
- omlish/multiprocessing/proxies.py +30 -0
- omlish/multiprocessing/spawn.py +59 -0
- omlish/os/atomics.py +2 -2
- omlish/outcome.py +250 -0
- omlish/sockets/server.py +1 -2
- omlish/term/vt100/terminal.py +1 -1
- omlish/testing/pytest/plugins/asyncs/__init__.py +1 -0
- omlish/testing/pytest/plugins/asyncs/backends/__init__.py +16 -0
- omlish/testing/pytest/plugins/asyncs/backends/asyncio.py +35 -0
- omlish/testing/pytest/plugins/asyncs/backends/base.py +30 -0
- omlish/testing/pytest/plugins/asyncs/backends/trio.py +91 -0
- omlish/testing/pytest/plugins/asyncs/backends/trio_asyncio.py +89 -0
- omlish/testing/pytest/plugins/asyncs/consts.py +3 -0
- omlish/testing/pytest/plugins/asyncs/fixtures.py +273 -0
- omlish/testing/pytest/plugins/asyncs/plugin.py +182 -0
- omlish/testing/pytest/plugins/asyncs/utils.py +10 -0
- omlish/text/indent.py +1 -1
- omlish/text/minja.py +2 -2
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev196.dist-info}/METADATA +5 -5
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev196.dist-info}/RECORD +43 -30
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev196.dist-info}/WHEEL +1 -1
- omlish/io/generators/__init__.py +0 -56
- omlish/io/generators/direct.py +0 -13
- omlish/testing/pytest/plugins/asyncs.py +0 -162
- /omlish/io/{generators → coro}/consts.py +0 -0
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev196.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev196.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev195.dist-info → omlish-0.0.0.dev196.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,32 @@
|
|
1
|
+
# ruff: noqa: I001
|
2
|
+
import typing as _ta
|
3
|
+
|
4
|
+
from .. import lang as _lang
|
5
|
+
|
6
|
+
|
7
|
+
from .death import ( # noqa
|
8
|
+
BaseDeathpact,
|
9
|
+
Deathpact,
|
10
|
+
NopDeathpact,
|
11
|
+
PipeDeathpact,
|
12
|
+
)
|
13
|
+
|
14
|
+
from .proxies import ( # noqa
|
15
|
+
DummyValueProxy,
|
16
|
+
ValueProxy,
|
17
|
+
)
|
18
|
+
|
19
|
+
if _ta.TYPE_CHECKING:
|
20
|
+
from .spawn import ( # noqa
|
21
|
+
ExtrasSpawnContext,
|
22
|
+
ExtrasSpawnPosixPopen,
|
23
|
+
ExtrasSpawnProcess,
|
24
|
+
SpawnExtras,
|
25
|
+
)
|
26
|
+
else:
|
27
|
+
_lang.proxy_init(globals(), '.spawn', [
|
28
|
+
'ExtrasSpawnContext',
|
29
|
+
'ExtrasSpawnPosixPopen',
|
30
|
+
'ExtrasSpawnProcess',
|
31
|
+
'SpawnExtras',
|
32
|
+
])
|
@@ -1,96 +1,11 @@
|
|
1
1
|
import abc
|
2
|
-
import dataclasses as dc
|
3
|
-
import multiprocessing as mp
|
4
|
-
import multiprocessing.popen_spawn_posix
|
5
2
|
import os
|
6
3
|
import signal
|
7
4
|
import sys
|
8
5
|
import time
|
9
6
|
import typing as ta
|
10
7
|
|
11
|
-
from
|
12
|
-
from . import lang
|
13
|
-
from . import libc
|
14
|
-
|
15
|
-
|
16
|
-
T = ta.TypeVar('T')
|
17
|
-
|
18
|
-
|
19
|
-
##
|
20
|
-
|
21
|
-
|
22
|
-
@ta.runtime_checkable
|
23
|
-
class ValueProxy(ta.Protocol[T]):
|
24
|
-
# value = property(get, set)
|
25
|
-
|
26
|
-
def get(self) -> T:
|
27
|
-
...
|
28
|
-
|
29
|
-
def set(self, value: T) -> None:
|
30
|
-
...
|
31
|
-
|
32
|
-
|
33
|
-
@dc.dataclass()
|
34
|
-
@lang.protocol_check(ValueProxy)
|
35
|
-
class DummyValueProxy(ValueProxy[T]):
|
36
|
-
value: T
|
37
|
-
|
38
|
-
def get(self) -> T:
|
39
|
-
return self.value
|
40
|
-
|
41
|
-
def set(self, value: T) -> None:
|
42
|
-
self.value = value
|
43
|
-
|
44
|
-
|
45
|
-
##
|
46
|
-
|
47
|
-
|
48
|
-
@dc.dataclass(frozen=True, kw_only=True)
|
49
|
-
class SpawnExtras:
|
50
|
-
pass_fds: ta.AbstractSet[int] | None = None
|
51
|
-
deathsig: int | None = None
|
52
|
-
|
53
|
-
|
54
|
-
class ExtrasSpawnPosixPopen(mp.popen_spawn_posix.Popen):
|
55
|
-
def __init__(self, process_obj: 'ExtrasSpawnProcess', *, extras: SpawnExtras) -> None:
|
56
|
-
self.__extras = extras
|
57
|
-
self.__pass_fds = extras.pass_fds
|
58
|
-
super().__init__(process_obj)
|
59
|
-
|
60
|
-
def _launch(self, process_obj: 'ExtrasSpawnProcess') -> None:
|
61
|
-
if self.__pass_fds:
|
62
|
-
for fd in self.__pass_fds:
|
63
|
-
self.duplicate_for_child(fd)
|
64
|
-
self._extra_fds = None
|
65
|
-
|
66
|
-
super()._launch(process_obj) # type: ignore # noqa
|
67
|
-
|
68
|
-
|
69
|
-
class ExtrasSpawnProcess(mp.context.SpawnProcess):
|
70
|
-
def __init__(self, *args: ta.Any, extras: SpawnExtras, **kwargs: ta.Any) -> None:
|
71
|
-
self.__extras = extras
|
72
|
-
super().__init__(*args, **kwargs)
|
73
|
-
|
74
|
-
def _Popen(self, process_obj: 'ExtrasSpawnProcess') -> ExtrasSpawnPosixPopen: # type: ignore # noqa
|
75
|
-
return ExtrasSpawnPosixPopen(
|
76
|
-
check.isinstance(process_obj, ExtrasSpawnProcess),
|
77
|
-
extras=self.__extras,
|
78
|
-
)
|
79
|
-
|
80
|
-
def run(self) -> None:
|
81
|
-
if self.__extras.deathsig is not None and sys.platform == 'linux':
|
82
|
-
libc.prctl(libc.PR_SET_PDEATHSIG, self.__extras.deathsig, 0, 0, 0, 0)
|
83
|
-
|
84
|
-
super().run()
|
85
|
-
|
86
|
-
|
87
|
-
class ExtrasSpawnContext(mp.context.SpawnContext):
|
88
|
-
def __init__(self, extras: SpawnExtras = SpawnExtras()) -> None:
|
89
|
-
self.__extras = extras
|
90
|
-
super().__init__()
|
91
|
-
|
92
|
-
def Process(self, *args: ta.Any, **kwargs: ta.Any): # type: ignore # noqa
|
93
|
-
return ExtrasSpawnProcess(*args, extras=self.__extras, **kwargs)
|
8
|
+
from .. import check
|
94
9
|
|
95
10
|
|
96
11
|
##
|
@@ -107,7 +22,7 @@ class NopDeathpact(Deathpact):
|
|
107
22
|
pass
|
108
23
|
|
109
24
|
|
110
|
-
|
25
|
+
##
|
111
26
|
|
112
27
|
|
113
28
|
class BaseDeathpact(Deathpact, abc.ABC):
|
@@ -163,7 +78,7 @@ class BaseDeathpact(Deathpact, abc.ABC):
|
|
163
78
|
self._last_check_t = time.monotonic()
|
164
79
|
|
165
80
|
|
166
|
-
|
81
|
+
##
|
167
82
|
|
168
83
|
|
169
84
|
class PipeDeathpact(BaseDeathpact):
|
@@ -0,0 +1,30 @@
|
|
1
|
+
import dataclasses as dc
|
2
|
+
import typing as ta
|
3
|
+
|
4
|
+
from .. import lang
|
5
|
+
|
6
|
+
|
7
|
+
T = ta.TypeVar('T')
|
8
|
+
|
9
|
+
|
10
|
+
@ta.runtime_checkable
|
11
|
+
class ValueProxy(ta.Protocol[T]):
|
12
|
+
# value = property(get, set)
|
13
|
+
|
14
|
+
def get(self) -> T:
|
15
|
+
...
|
16
|
+
|
17
|
+
def set(self, value: T) -> None:
|
18
|
+
...
|
19
|
+
|
20
|
+
|
21
|
+
@dc.dataclass()
|
22
|
+
@lang.protocol_check(ValueProxy)
|
23
|
+
class DummyValueProxy(ValueProxy[T]):
|
24
|
+
value: T
|
25
|
+
|
26
|
+
def get(self) -> T:
|
27
|
+
return self.value
|
28
|
+
|
29
|
+
def set(self, value: T) -> None:
|
30
|
+
self.value = value
|
@@ -0,0 +1,59 @@
|
|
1
|
+
import dataclasses as dc
|
2
|
+
import multiprocessing as mp
|
3
|
+
import multiprocessing.popen_spawn_posix
|
4
|
+
import sys
|
5
|
+
import typing as ta
|
6
|
+
|
7
|
+
from .. import check
|
8
|
+
from .. import libc
|
9
|
+
|
10
|
+
|
11
|
+
T = ta.TypeVar('T')
|
12
|
+
|
13
|
+
|
14
|
+
@dc.dataclass(frozen=True, kw_only=True)
|
15
|
+
class SpawnExtras:
|
16
|
+
pass_fds: ta.AbstractSet[int] | None = None
|
17
|
+
deathsig: int | None = None
|
18
|
+
|
19
|
+
|
20
|
+
class ExtrasSpawnPosixPopen(mp.popen_spawn_posix.Popen):
|
21
|
+
def __init__(self, process_obj: 'ExtrasSpawnProcess', *, extras: SpawnExtras) -> None:
|
22
|
+
self.__extras = extras
|
23
|
+
self.__pass_fds = extras.pass_fds
|
24
|
+
super().__init__(process_obj)
|
25
|
+
|
26
|
+
def _launch(self, process_obj: 'ExtrasSpawnProcess') -> None:
|
27
|
+
if self.__pass_fds:
|
28
|
+
for fd in self.__pass_fds:
|
29
|
+
self.duplicate_for_child(fd)
|
30
|
+
self._extra_fds = None
|
31
|
+
|
32
|
+
super()._launch(process_obj) # type: ignore # noqa
|
33
|
+
|
34
|
+
|
35
|
+
class ExtrasSpawnProcess(mp.context.SpawnProcess):
|
36
|
+
def __init__(self, *args: ta.Any, extras: SpawnExtras, **kwargs: ta.Any) -> None:
|
37
|
+
self.__extras = extras
|
38
|
+
super().__init__(*args, **kwargs)
|
39
|
+
|
40
|
+
def _Popen(self, process_obj: 'ExtrasSpawnProcess') -> ExtrasSpawnPosixPopen: # type: ignore # noqa
|
41
|
+
return ExtrasSpawnPosixPopen(
|
42
|
+
check.isinstance(process_obj, ExtrasSpawnProcess),
|
43
|
+
extras=self.__extras,
|
44
|
+
)
|
45
|
+
|
46
|
+
def run(self) -> None:
|
47
|
+
if self.__extras.deathsig is not None and sys.platform == 'linux':
|
48
|
+
libc.prctl(libc.PR_SET_PDEATHSIG, self.__extras.deathsig, 0, 0, 0, 0)
|
49
|
+
|
50
|
+
super().run()
|
51
|
+
|
52
|
+
|
53
|
+
class ExtrasSpawnContext(mp.context.SpawnContext):
|
54
|
+
def __init__(self, extras: SpawnExtras = SpawnExtras()) -> None:
|
55
|
+
self.__extras = extras
|
56
|
+
super().__init__()
|
57
|
+
|
58
|
+
def Process(self, *args: ta.Any, **kwargs: ta.Any): # type: ignore # noqa
|
59
|
+
return ExtrasSpawnProcess(*args, extras=self.__extras, **kwargs)
|
omlish/os/atomics.py
CHANGED
@@ -6,8 +6,8 @@ import shutil
|
|
6
6
|
import tempfile
|
7
7
|
import typing as ta
|
8
8
|
|
9
|
-
from
|
10
|
-
from
|
9
|
+
from ..lite.check import check
|
10
|
+
from ..lite.strings import attr_repr
|
11
11
|
|
12
12
|
|
13
13
|
AtomicPathSwapKind = ta.Literal['dir', 'file']
|
omlish/outcome.py
ADDED
@@ -0,0 +1,250 @@
|
|
1
|
+
# https://github.com/python-trio/outcome/tree/6a3192f306ead4900a33fa8c47e5af5430e37692
|
2
|
+
#
|
3
|
+
# The MIT License (MIT)
|
4
|
+
#
|
5
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
|
6
|
+
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
|
7
|
+
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
|
8
|
+
# persons to whom the Software is furnished to do so, subject to the following conditions:
|
9
|
+
#
|
10
|
+
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
|
11
|
+
# Software.
|
12
|
+
#
|
13
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
14
|
+
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
15
|
+
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
16
|
+
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
17
|
+
import abc
|
18
|
+
import dataclasses as dc
|
19
|
+
import typing as ta
|
20
|
+
|
21
|
+
from . import check
|
22
|
+
|
23
|
+
|
24
|
+
ValueT_co = ta.TypeVar('ValueT_co', covariant=True)
|
25
|
+
ResultT = ta.TypeVar('ResultT')
|
26
|
+
ArgsT = ta.ParamSpec('ArgsT')
|
27
|
+
|
28
|
+
|
29
|
+
##
|
30
|
+
|
31
|
+
|
32
|
+
class AlreadyUsedError(RuntimeError):
|
33
|
+
"""An Outcome can only be unwrapped once."""
|
34
|
+
|
35
|
+
|
36
|
+
def _remove_tb_frames(exc: BaseException, n: int) -> BaseException:
|
37
|
+
tb: ta.Any = exc.__traceback__
|
38
|
+
for _ in range(n):
|
39
|
+
check.not_none(tb)
|
40
|
+
tb = tb.tb_next
|
41
|
+
return exc.with_traceback(tb)
|
42
|
+
|
43
|
+
|
44
|
+
##
|
45
|
+
|
46
|
+
|
47
|
+
@ta.overload
|
48
|
+
def capture(
|
49
|
+
sync_fn: ta.Callable[ArgsT, ta.NoReturn],
|
50
|
+
*args: ArgsT.args,
|
51
|
+
**kwargs: ArgsT.kwargs,
|
52
|
+
) -> 'Error':
|
53
|
+
...
|
54
|
+
|
55
|
+
|
56
|
+
@ta.overload
|
57
|
+
def capture(
|
58
|
+
sync_fn: ta.Callable[ArgsT, ResultT],
|
59
|
+
*args: ArgsT.args,
|
60
|
+
**kwargs: ArgsT.kwargs,
|
61
|
+
) -> ta.Union['Value[ResultT]', 'Error']:
|
62
|
+
...
|
63
|
+
|
64
|
+
|
65
|
+
def capture(
|
66
|
+
sync_fn: ta.Callable[ArgsT, ResultT],
|
67
|
+
*args: ArgsT.args,
|
68
|
+
**kwargs: ArgsT.kwargs,
|
69
|
+
) -> ta.Union['Value[ResultT]', 'Error']:
|
70
|
+
"""
|
71
|
+
Run ``sync_fn(*args, **kwargs)`` and capture the result.
|
72
|
+
|
73
|
+
Returns:
|
74
|
+
Either a :class:`Value` or :class:`Error` as appropriate.
|
75
|
+
"""
|
76
|
+
|
77
|
+
try:
|
78
|
+
return Value(sync_fn(*args, **kwargs))
|
79
|
+
|
80
|
+
except BaseException as exc: # noqa
|
81
|
+
exc = _remove_tb_frames(exc, 1)
|
82
|
+
return Error(exc)
|
83
|
+
|
84
|
+
|
85
|
+
#
|
86
|
+
|
87
|
+
|
88
|
+
@ta.overload
|
89
|
+
async def acapture(
|
90
|
+
async_fn: ta.Callable[ArgsT, ta.Awaitable[ta.NoReturn]],
|
91
|
+
*args: ArgsT.args,
|
92
|
+
**kwargs: ArgsT.kwargs,
|
93
|
+
) -> 'Error':
|
94
|
+
...
|
95
|
+
|
96
|
+
|
97
|
+
@ta.overload
|
98
|
+
async def acapture(
|
99
|
+
async_fn: ta.Callable[ArgsT, ta.Awaitable[ResultT]],
|
100
|
+
*args: ArgsT.args,
|
101
|
+
**kwargs: ArgsT.kwargs,
|
102
|
+
) -> ta.Union['Value[ResultT]', 'Error']:
|
103
|
+
...
|
104
|
+
|
105
|
+
|
106
|
+
async def acapture(
|
107
|
+
async_fn: ta.Callable[ArgsT, ta.Awaitable[ResultT]],
|
108
|
+
*args: ArgsT.args,
|
109
|
+
**kwargs: ArgsT.kwargs,
|
110
|
+
) -> ta.Union['Value[ResultT]', 'Error']:
|
111
|
+
"""
|
112
|
+
Run ``await async_fn(*args, **kwargs)`` and capture the result.
|
113
|
+
|
114
|
+
Returns:
|
115
|
+
Either a :class:`Value` or :class:`Error` as appropriate.
|
116
|
+
"""
|
117
|
+
|
118
|
+
try:
|
119
|
+
return Value(await async_fn(*args, **kwargs))
|
120
|
+
|
121
|
+
except BaseException as exc: # noqa
|
122
|
+
exc = _remove_tb_frames(exc, 1)
|
123
|
+
return Error(exc)
|
124
|
+
|
125
|
+
|
126
|
+
##
|
127
|
+
|
128
|
+
|
129
|
+
@dc.dataclass(repr=False, init=False, slots=True, frozen=True, order=True)
|
130
|
+
class Outcome(abc.ABC, ta.Generic[ValueT_co]):
|
131
|
+
"""
|
132
|
+
An abstract class representing the result of a Python computation.
|
133
|
+
|
134
|
+
This class has two concrete subclasses: :class:`Value` representing a value, and :class:`Error` representing an
|
135
|
+
exception.
|
136
|
+
|
137
|
+
In addition to the methods described below, comparison operators on :class:`Value` and :class:`Error` objects
|
138
|
+
(``==``, ``<``, etc.) check that the other object is also a :class:`Value` or :class:`Error` object respectively,
|
139
|
+
and then compare the contained objects.
|
140
|
+
|
141
|
+
:class:`Outcome` objects are hashable if the contained objects are hashable.
|
142
|
+
"""
|
143
|
+
|
144
|
+
_unwrapped: bool = dc.field(default=False, compare=False, init=False)
|
145
|
+
|
146
|
+
def _set_unwrapped(self) -> None:
|
147
|
+
if self._unwrapped:
|
148
|
+
raise AlreadyUsedError
|
149
|
+
object.__setattr__(self, '_unwrapped', True)
|
150
|
+
|
151
|
+
@abc.abstractmethod
|
152
|
+
def unwrap(self) -> ValueT_co:
|
153
|
+
"""
|
154
|
+
Return or raise the contained value or exception.
|
155
|
+
|
156
|
+
These two lines of code are equivalent::
|
157
|
+
|
158
|
+
x = fn(*args)
|
159
|
+
x = outcome.capture(fn, *args).unwrap()
|
160
|
+
"""
|
161
|
+
|
162
|
+
@abc.abstractmethod
|
163
|
+
def send(self, gen: ta.Generator[ResultT, ValueT_co, object]) -> ResultT:
|
164
|
+
"""
|
165
|
+
Send or throw the contained value or exception into the given generator object.
|
166
|
+
|
167
|
+
Args:
|
168
|
+
gen: A generator object supporting ``.send()`` and ``.throw()`` methods.
|
169
|
+
"""
|
170
|
+
|
171
|
+
@abc.abstractmethod
|
172
|
+
async def asend(self, agen: ta.AsyncGenerator[ResultT, ValueT_co]) -> ResultT:
|
173
|
+
"""
|
174
|
+
Send or throw the contained value or exception into the given async generator object.
|
175
|
+
|
176
|
+
Args:
|
177
|
+
agen: An async generator object supporting ``.asend()`` and ``.athrow()`` methods.
|
178
|
+
"""
|
179
|
+
|
180
|
+
|
181
|
+
@ta.final
|
182
|
+
@dc.dataclass(frozen=True, repr=False, slots=True, order=True)
|
183
|
+
class Value(Outcome[ValueT_co], ta.Generic[ValueT_co]):
|
184
|
+
"""Concrete :class:`Outcome` subclass representing a regular value."""
|
185
|
+
|
186
|
+
value: ValueT_co
|
187
|
+
|
188
|
+
def __repr__(self) -> str:
|
189
|
+
return f'Value({self.value!r})'
|
190
|
+
|
191
|
+
def unwrap(self) -> ValueT_co:
|
192
|
+
self._set_unwrapped()
|
193
|
+
return self.value
|
194
|
+
|
195
|
+
def send(self, gen: ta.Generator[ResultT, ValueT_co, object]) -> ResultT:
|
196
|
+
self._set_unwrapped()
|
197
|
+
return gen.send(self.value)
|
198
|
+
|
199
|
+
async def asend(self, agen: ta.AsyncGenerator[ResultT, ValueT_co]) -> ResultT:
|
200
|
+
self._set_unwrapped()
|
201
|
+
return await agen.asend(self.value)
|
202
|
+
|
203
|
+
|
204
|
+
@ta.final
|
205
|
+
@dc.dataclass(frozen=True, repr=False, slots=True, order=True)
|
206
|
+
class Error(Outcome[ta.NoReturn]):
|
207
|
+
"""Concrete :class:`Outcome` subclass representing a raised exception."""
|
208
|
+
|
209
|
+
error: BaseException
|
210
|
+
|
211
|
+
def __post_init__(self) -> None:
|
212
|
+
if not isinstance(self.error, BaseException):
|
213
|
+
raise TypeError(self.error)
|
214
|
+
|
215
|
+
def __repr__(self) -> str:
|
216
|
+
return f'Error({self.error!r})'
|
217
|
+
|
218
|
+
def unwrap(self) -> ta.NoReturn:
|
219
|
+
self._set_unwrapped()
|
220
|
+
|
221
|
+
# Tracebacks show the 'raise' line below out of context, so let's give this variable a name that makes sense out
|
222
|
+
# of context.
|
223
|
+
captured_error = self.error
|
224
|
+
|
225
|
+
try:
|
226
|
+
raise captured_error
|
227
|
+
|
228
|
+
finally:
|
229
|
+
# We want to avoid creating a reference cycle here. Python does collect cycles just fine, so it wouldn't be
|
230
|
+
# the end of the world if we did create a cycle, but the cyclic garbage collector adds latency to Python
|
231
|
+
# programs, and the more cycles you create, the more often it runs, so it's nicer to avoid creating them in
|
232
|
+
# the first place. For more details see:
|
233
|
+
#
|
234
|
+
# https://github.com/python-trio/trio/issues/1770
|
235
|
+
#
|
236
|
+
# In particular, by deleting this local variables from the 'unwrap' methods frame, we avoid the
|
237
|
+
# 'captured_error' object's __traceback__ from indirectly referencing 'captured_error'.
|
238
|
+
del captured_error, self
|
239
|
+
|
240
|
+
def send(self, gen: ta.Generator[ResultT, ta.NoReturn, object]) -> ResultT:
|
241
|
+
self._set_unwrapped()
|
242
|
+
return gen.throw(self.error)
|
243
|
+
|
244
|
+
async def asend(self, agen: ta.AsyncGenerator[ResultT, ta.NoReturn]) -> ResultT:
|
245
|
+
self._set_unwrapped()
|
246
|
+
return await agen.athrow(self.error)
|
247
|
+
|
248
|
+
|
249
|
+
# A convenience alias to a union of both results, allowing exhaustiveness checking.
|
250
|
+
Maybe = Value[ValueT_co] | Error
|
omlish/sockets/server.py
CHANGED
omlish/term/vt100/terminal.py
CHANGED
@@ -0,0 +1 @@
|
|
1
|
+
from .plugin import AsyncsPlugin # noqa
|
@@ -0,0 +1,16 @@
|
|
1
|
+
import typing as _ta
|
2
|
+
|
3
|
+
from .asyncio import AsyncioAsyncsBackend # noqa
|
4
|
+
from .base import AsyncsBackend # noqa
|
5
|
+
from .trio import TrioAsyncsBackend # noqa
|
6
|
+
from .trio_asyncio import TrioAsyncioAsyncsBackend # noqa
|
7
|
+
|
8
|
+
|
9
|
+
##
|
10
|
+
|
11
|
+
|
12
|
+
ASYNC_BACKENDS: _ta.Collection[type[AsyncsBackend]] = [
|
13
|
+
AsyncioAsyncsBackend,
|
14
|
+
TrioAsyncioAsyncsBackend,
|
15
|
+
TrioAsyncsBackend,
|
16
|
+
]
|
@@ -0,0 +1,35 @@
|
|
1
|
+
import functools
|
2
|
+
import sys
|
3
|
+
import typing as ta
|
4
|
+
|
5
|
+
from ...... import check
|
6
|
+
from ...... import lang
|
7
|
+
from .base import AsyncsBackend
|
8
|
+
|
9
|
+
|
10
|
+
if ta.TYPE_CHECKING:
|
11
|
+
import asyncio
|
12
|
+
else:
|
13
|
+
asyncio = lang.proxy_import('asyncio')
|
14
|
+
|
15
|
+
|
16
|
+
class AsyncioAsyncsBackend(AsyncsBackend):
|
17
|
+
name = 'asyncio'
|
18
|
+
|
19
|
+
def is_available(self) -> bool:
|
20
|
+
return True
|
21
|
+
|
22
|
+
def is_imported(self) -> bool:
|
23
|
+
return 'asyncio' in sys.modules
|
24
|
+
|
25
|
+
#
|
26
|
+
|
27
|
+
def wrap_runner(self, fn):
|
28
|
+
@functools.wraps(fn)
|
29
|
+
def wrapper(**kwargs):
|
30
|
+
with asyncio.Runner(loop_factory=asyncio.get_event_loop_policy().new_event_loop) as runner:
|
31
|
+
loop_cls = type(runner.get_loop())
|
32
|
+
check.equal(loop_cls.__module__.split('.')[0], 'asyncio')
|
33
|
+
return runner.run(fn(**kwargs))
|
34
|
+
|
35
|
+
return wrapper
|
@@ -0,0 +1,30 @@
|
|
1
|
+
import abc
|
2
|
+
|
3
|
+
|
4
|
+
class AsyncsBackend(abc.ABC):
|
5
|
+
@property
|
6
|
+
@abc.abstractmethod
|
7
|
+
def name(self) -> str:
|
8
|
+
raise NotImplementedError
|
9
|
+
|
10
|
+
@abc.abstractmethod
|
11
|
+
def is_available(self) -> bool:
|
12
|
+
raise NotImplementedError
|
13
|
+
|
14
|
+
@abc.abstractmethod
|
15
|
+
def is_imported(self) -> bool:
|
16
|
+
raise NotImplementedError
|
17
|
+
|
18
|
+
#
|
19
|
+
|
20
|
+
def prepare_for_metafunc(self, metafunc) -> None: # noqa
|
21
|
+
pass
|
22
|
+
|
23
|
+
#
|
24
|
+
|
25
|
+
@abc.abstractmethod
|
26
|
+
def wrap_runner(self, fn):
|
27
|
+
raise NotImplementedError
|
28
|
+
|
29
|
+
async def install_context(self, contextvars_ctx): # noqa
|
30
|
+
pass
|
@@ -0,0 +1,91 @@
|
|
1
|
+
# Based on pytest-trio, licensed under the MIT license, duplicated below.
|
2
|
+
#
|
3
|
+
# https://github.com/python-trio/pytest-trio/tree/cd6cc14b061d34f35980e38c44052108ed5402d1
|
4
|
+
#
|
5
|
+
# The MIT License (MIT)
|
6
|
+
#
|
7
|
+
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
|
8
|
+
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
|
9
|
+
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
|
10
|
+
# persons to whom the Software is furnished to do so, subject to the following conditions:
|
11
|
+
#
|
12
|
+
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
|
13
|
+
# Software.
|
14
|
+
#
|
15
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
16
|
+
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
17
|
+
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
18
|
+
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
19
|
+
import functools
|
20
|
+
import sys
|
21
|
+
import typing as ta
|
22
|
+
|
23
|
+
import pytest
|
24
|
+
from _pytest.outcomes import Skipped # noqa
|
25
|
+
from _pytest.outcomes import XFailed # noqa
|
26
|
+
|
27
|
+
from ...... import lang
|
28
|
+
from .base import AsyncsBackend
|
29
|
+
|
30
|
+
|
31
|
+
if ta.TYPE_CHECKING:
|
32
|
+
import trio
|
33
|
+
else:
|
34
|
+
trio = lang.proxy_import('trio', extras=['abc'])
|
35
|
+
|
36
|
+
|
37
|
+
class TrioAsyncsBackend(AsyncsBackend):
|
38
|
+
name = 'trio'
|
39
|
+
|
40
|
+
def is_available(self) -> bool:
|
41
|
+
return lang.can_import('trio')
|
42
|
+
|
43
|
+
def is_imported(self) -> bool:
|
44
|
+
return 'trio' in sys.modules
|
45
|
+
|
46
|
+
#
|
47
|
+
|
48
|
+
def wrap_runner(self, fn):
|
49
|
+
@functools.wraps(fn)
|
50
|
+
def wrapper(**kwargs):
|
51
|
+
__tracebackhide__ = True
|
52
|
+
|
53
|
+
clocks = {k: c for k, c in kwargs.items() if isinstance(c, trio.abc.Clock)}
|
54
|
+
if not clocks:
|
55
|
+
clock = None
|
56
|
+
elif len(clocks) == 1:
|
57
|
+
clock = list(clocks.values())[0] # noqa
|
58
|
+
else:
|
59
|
+
raise ValueError(f'Expected at most one Clock in kwargs, got {clocks!r}')
|
60
|
+
|
61
|
+
instruments = [i for i in kwargs.values() if isinstance(i, trio.abc.Instrument)]
|
62
|
+
|
63
|
+
try:
|
64
|
+
return trio.run(
|
65
|
+
functools.partial(fn, **kwargs),
|
66
|
+
clock=clock,
|
67
|
+
instruments=instruments,
|
68
|
+
)
|
69
|
+
|
70
|
+
except BaseExceptionGroup as eg:
|
71
|
+
queue: list[BaseException] = [eg]
|
72
|
+
leaves = []
|
73
|
+
|
74
|
+
while queue:
|
75
|
+
ex = queue.pop()
|
76
|
+
if isinstance(ex, BaseExceptionGroup):
|
77
|
+
queue.extend(ex.exceptions)
|
78
|
+
else:
|
79
|
+
leaves.append(ex)
|
80
|
+
|
81
|
+
if len(leaves) == 1:
|
82
|
+
if isinstance(leaves[0], XFailed):
|
83
|
+
pytest.xfail()
|
84
|
+
if isinstance(leaves[0], Skipped):
|
85
|
+
pytest.skip()
|
86
|
+
|
87
|
+
# Since our leaf exceptions don't consist of exactly one 'magic' skipped or xfailed exception, re-raise
|
88
|
+
# the whole group.
|
89
|
+
raise
|
90
|
+
|
91
|
+
return wrapper
|