omlish 0.0.0.dev134__py3-none-any.whl → 0.0.0.dev137__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. omlish/.manifests.json +0 -12
  2. omlish/__about__.py +2 -2
  3. omlish/cached.py +2 -2
  4. omlish/collections/mappings.py +1 -1
  5. omlish/diag/_pycharm/runhack.py +3 -0
  6. omlish/formats/json/stream/lex.py +1 -1
  7. omlish/formats/json/stream/parse.py +1 -1
  8. omlish/{genmachine.py → funcs/genmachine.py} +4 -2
  9. omlish/{matchfns.py → funcs/match.py} +1 -1
  10. omlish/{fnpairs.py → funcs/pairs.py} +3 -3
  11. omlish/http/sessions.py +1 -1
  12. omlish/io/compress/__init__.py +0 -0
  13. omlish/io/compress/abc.py +104 -0
  14. omlish/io/compress/adapters.py +147 -0
  15. omlish/io/compress/bz2.py +41 -0
  16. omlish/io/compress/gzip.py +301 -0
  17. omlish/io/compress/lzma.py +31 -0
  18. omlish/io/compress/types.py +29 -0
  19. omlish/io/generators.py +50 -0
  20. omlish/lang/__init__.py +8 -1
  21. omlish/lang/generators.py +182 -0
  22. omlish/lang/iterables.py +28 -51
  23. omlish/lang/maybes.py +4 -4
  24. omlish/lite/fdio/corohttp.py +5 -1
  25. omlish/lite/marshal.py +9 -6
  26. omlish/marshal/base.py +1 -1
  27. omlish/marshal/factories.py +1 -1
  28. omlish/marshal/forbidden.py +1 -1
  29. omlish/marshal/iterables.py +1 -1
  30. omlish/marshal/mappings.py +1 -1
  31. omlish/marshal/maybes.py +1 -1
  32. omlish/marshal/standard.py +1 -1
  33. omlish/marshal/unions.py +1 -1
  34. omlish/secrets/pwhash.py +1 -1
  35. omlish/secrets/subprocesses.py +3 -1
  36. omlish/specs/jsonrpc/marshal.py +1 -1
  37. omlish/specs/openapi/marshal.py +1 -1
  38. {omlish-0.0.0.dev134.dist-info → omlish-0.0.0.dev137.dist-info}/METADATA +1 -1
  39. {omlish-0.0.0.dev134.dist-info → omlish-0.0.0.dev137.dist-info}/RECORD +49 -47
  40. omlish/formats/json/cli/__main__.py +0 -11
  41. omlish/formats/json/cli/cli.py +0 -298
  42. omlish/formats/json/cli/formats.py +0 -71
  43. omlish/formats/json/cli/io.py +0 -74
  44. omlish/formats/json/cli/parsing.py +0 -82
  45. omlish/formats/json/cli/processing.py +0 -48
  46. omlish/formats/json/cli/rendering.py +0 -92
  47. /omlish/collections/{_abc.py → abc.py} +0 -0
  48. /omlish/{formats/json/cli → funcs}/__init__.py +0 -0
  49. /omlish/{fnpipes.py → funcs/pipes.py} +0 -0
  50. /omlish/io/{_abc.py → abc.py} +0 -0
  51. /omlish/logs/{_abc.py → abc.py} +0 -0
  52. /omlish/sql/{_abc.py → abc.py} +0 -0
  53. {omlish-0.0.0.dev134.dist-info → omlish-0.0.0.dev137.dist-info}/LICENSE +0 -0
  54. {omlish-0.0.0.dev134.dist-info → omlish-0.0.0.dev137.dist-info}/WHEEL +0 -0
  55. {omlish-0.0.0.dev134.dist-info → omlish-0.0.0.dev137.dist-info}/entry_points.txt +0 -0
  56. {omlish-0.0.0.dev134.dist-info → omlish-0.0.0.dev137.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,301 @@
1
+ # PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
2
+ # --------------------------------------------
3
+ #
4
+ # 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization
5
+ # ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated
6
+ # documentation.
7
+ #
8
+ # 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive,
9
+ # royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative
10
+ # works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License
11
+ # Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
12
+ # 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All Rights Reserved" are retained in Python
13
+ # alone or in any derivative version prepared by Licensee.
14
+ #
15
+ # 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and
16
+ # wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in
17
+ # any such work a brief summary of the changes made to Python.
18
+ #
19
+ # 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES,
20
+ # EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY
21
+ # OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY
22
+ # RIGHTS.
23
+ #
24
+ # 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL
25
+ # DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF
26
+ # ADVISED OF THE POSSIBILITY THEREOF.
27
+ #
28
+ # 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
29
+ #
30
+ # 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint
31
+ # venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade
32
+ # name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
33
+ #
34
+ # 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this
35
+ # License Agreement.
36
+ import functools
37
+ import os.path
38
+ import struct
39
+ import time
40
+ import typing as ta
41
+
42
+ from ... import cached
43
+ from ... import check
44
+ from ... import lang
45
+ from ..generators import PrependableBytesGeneratorReader
46
+ from .types import IncrementalCompressor
47
+ from .types import IncrementalDecompressor
48
+
49
+
50
+ if ta.TYPE_CHECKING:
51
+ import gzip
52
+ import zlib
53
+ else:
54
+ gzip = lang.proxy_import('gzip')
55
+ zlib = lang.proxy_import('zlib')
56
+
57
+
58
+ ##
59
+
60
+
61
+ COMPRESS_LEVEL_FAST = 1
62
+ COMPRESS_LEVEL_TRADEOFF = 6
63
+ COMPRESS_LEVEL_BEST = 9
64
+
65
+
66
+ @cached.function
67
+ def _zero_crc() -> int:
68
+ return zlib.crc32(b'')
69
+
70
+
71
+ ##
72
+
73
+
74
+ class IncrementalGzipCompressor:
75
+ def __init__(
76
+ self,
77
+ *,
78
+ compresslevel: int = COMPRESS_LEVEL_BEST,
79
+ name: str | bytes | None = None,
80
+ mtime: float | None = None,
81
+ ) -> None:
82
+ super().__init__()
83
+
84
+ self._name = name or ''
85
+ self._compresslevel = compresslevel
86
+ self._mtime = mtime
87
+
88
+ def _write_gzip_header(self) -> ta.Generator[bytes, None, None]:
89
+ check.none((yield b'\037\213')) # magic header
90
+ check.none((yield b'\010')) # compression method
91
+
92
+ try:
93
+ # RFC 1952 requires the FNAME field to be Latin-1. Do not include filenames that cannot be represented that
94
+ # way.
95
+ fname = os.path.basename(self._name)
96
+ if not isinstance(fname, bytes):
97
+ fname = fname.encode('latin-1')
98
+ if fname.endswith(b'.gz'):
99
+ fname = fname[:-3]
100
+ except UnicodeEncodeError:
101
+ fname = b''
102
+
103
+ flags = 0
104
+ if fname:
105
+ flags = gzip.FNAME
106
+ check.none((yield chr(flags).encode('latin-1')))
107
+
108
+ mtime = self._mtime
109
+ if mtime is None:
110
+ mtime = time.time()
111
+ check.none((yield struct.pack('<L', int(mtime))))
112
+
113
+ if self._compresslevel == COMPRESS_LEVEL_BEST:
114
+ xfl = b'\002'
115
+ elif self._compresslevel == COMPRESS_LEVEL_FAST:
116
+ xfl = b'\004'
117
+ else:
118
+ xfl = b'\000'
119
+ check.none((yield xfl))
120
+
121
+ check.none((yield b'\377'))
122
+
123
+ if fname:
124
+ check.none((yield fname + b'\000'))
125
+
126
+ def __call__(self) -> IncrementalCompressor:
127
+ crc = _zero_crc()
128
+ size = 0
129
+ offset = 0 # Current file offset for seek(), tell(), etc
130
+
131
+ compress = zlib.compressobj(
132
+ self._compresslevel,
133
+ zlib.DEFLATED,
134
+ -zlib.MAX_WBITS,
135
+ zlib.DEF_MEM_LEVEL,
136
+ 0,
137
+ )
138
+
139
+ yield from self._write_gzip_header()
140
+
141
+ while True:
142
+ data: ta.Any = check.isinstance((yield None), bytes)
143
+ if not data:
144
+ break
145
+
146
+ # Called by our self._buffer underlying BufferedWriterDelegate.
147
+ if isinstance(data, (bytes, bytearray)):
148
+ length = len(data)
149
+ else:
150
+ # accept any data that supports the buffer protocol
151
+ data = memoryview(data)
152
+ length = data.nbytes
153
+
154
+ if length > 0:
155
+ if (fl := compress.compress(data)):
156
+ check.none((yield fl))
157
+ size += length
158
+ crc = zlib.crc32(data, crc)
159
+ offset += length
160
+
161
+ if (fl := compress.flush()):
162
+ check.none((yield fl))
163
+
164
+ yield struct.pack('<L', crc)
165
+ # size may exceed 2 GiB, or even 4 GiB
166
+ yield struct.pack('<L', size & 0xffffffff)
167
+
168
+ yield b''
169
+
170
+
171
+ ##
172
+
173
+
174
+ class IncrementalGzipDecompressor:
175
+ def __init__(self) -> None:
176
+ super().__init__()
177
+
178
+ self._factory = functools.partial(
179
+ zlib.decompressobj,
180
+ wbits=-zlib.MAX_WBITS,
181
+ )
182
+
183
+ def _read_gzip_header(
184
+ self,
185
+ rdr: PrependableBytesGeneratorReader,
186
+ ) -> ta.Generator[int | None, bytes, int | None]:
187
+ magic = yield from rdr.read(2)
188
+ if magic == b'':
189
+ return None
190
+
191
+ if magic != b'\037\213':
192
+ raise gzip.BadGzipFile(f'Not a gzipped file ({magic!r})')
193
+
194
+ buf = yield from rdr.read(8)
195
+ method, flag, last_mtime = struct.unpack('<BBIxx', buf)
196
+ if method != 8:
197
+ raise gzip.BadGzipFile('Unknown compression method')
198
+
199
+ if flag & gzip.FEXTRA:
200
+ # Read & discard the extra field, if present
201
+ buf = yield from rdr.read(2)
202
+ extra_len, = struct.unpack('<H', buf)
203
+ if extra_len:
204
+ yield from rdr.read(extra_len)
205
+
206
+ if flag & gzip.FNAME:
207
+ # Read and discard a null-terminated string containing the filename
208
+ while True:
209
+ s = yield from rdr.read(1)
210
+ if not s or s == b'\000':
211
+ break
212
+
213
+ if flag & gzip.FCOMMENT:
214
+ # Read and discard a null-terminated string containing a comment
215
+ while True:
216
+ s = yield from rdr.read(1)
217
+ if not s or s == b'\000':
218
+ break
219
+
220
+ if flag & gzip.FHCRC:
221
+ yield from rdr.read(2) # Read & discard the 16-bit header CRC
222
+
223
+ return last_mtime
224
+
225
+ def _read_eof(
226
+ self,
227
+ rdr: PrependableBytesGeneratorReader,
228
+ crc: int,
229
+ stream_size: int,
230
+ ) -> ta.Generator[int | None, bytes, None]:
231
+ # We've read to the end of the file.
232
+ # We check that the computed CRC and size of the uncompressed data matches the stored values. Note that the size
233
+ # stored is the true file size mod 2**32.
234
+ buf = yield from rdr.read(8)
235
+ crc32, isize = struct.unpack('<II', buf)
236
+ if crc32 != crc:
237
+ raise gzip.BadGzipFile(f'CRC check failed {hex(crc32)} != {hex(crc)}')
238
+ elif isize != (stream_size & 0xffffffff):
239
+ raise gzip.BadGzipFile('Incorrect length of data produced')
240
+
241
+ # Gzip files can be padded with zeroes and still have archives. Consume all zero bytes and set the file position
242
+ # to the first non-zero byte. See http://www.gzip.org/#faq8
243
+ c = b'\0'
244
+ while c == b'\0':
245
+ c = yield from rdr.read(1)
246
+ if c:
247
+ rdr.prepend(c)
248
+
249
+ def __call__(self) -> IncrementalDecompressor:
250
+ rdr = PrependableBytesGeneratorReader()
251
+
252
+ pos = 0 # Current offset in decompressed stream
253
+
254
+ crc = _zero_crc()
255
+ stream_size = 0 # Decompressed size of unconcatenated stream
256
+ new_member = True
257
+
258
+ decompressor = self._factory()
259
+
260
+ while True:
261
+ # For certain input data, a single call to decompress() may not return any data. In this case, retry until
262
+ # we get some data or reach EOF.
263
+ while True:
264
+ if decompressor.eof:
265
+ # Ending case: we've come to the end of a member in the file, so finish up this member, and read a
266
+ # new gzip header. Check the CRC and file size, and set the flag so we read a new member
267
+ yield from self._read_eof(rdr, crc, stream_size)
268
+ new_member = True
269
+ decompressor = self._factory()
270
+
271
+ if new_member:
272
+ # If the _new_member flag is set, we have to jump to the next member, if there is one.
273
+ crc = _zero_crc()
274
+ stream_size = 0 # Decompressed size of unconcatenated stream
275
+ last_mtime = yield from self._read_gzip_header(rdr)
276
+ if not last_mtime:
277
+ check.none((yield b''))
278
+ return
279
+ new_member = False
280
+
281
+ # Read a chunk of data from the file
282
+ if not decompressor.unconsumed_tail:
283
+ buf = yield from rdr.read(None)
284
+ uncompress = decompressor.decompress(buf)
285
+ else:
286
+ uncompress = decompressor.decompress(b'')
287
+
288
+ if decompressor.unused_data != b'':
289
+ # Prepend the already read bytes to the fileobj so they can be seen by _read_eof() and
290
+ # _read_gzip_header()
291
+ rdr.prepend(decompressor.unused_data)
292
+
293
+ if uncompress != b'':
294
+ break
295
+ if buf == b'': # noqa
296
+ raise EOFError('Compressed file ended before the end-of-stream marker was reached')
297
+
298
+ crc = zlib.crc32(uncompress, crc)
299
+ stream_size += len(uncompress)
300
+ pos += len(uncompress)
301
+ check.none((yield uncompress))
@@ -0,0 +1,31 @@
1
+ import typing as ta
2
+
3
+ from ... import lang
4
+ from .adapters import CompressorIncrementalAdapter
5
+ from .adapters import DecompressorIncrementalAdapter
6
+ from .types import IncrementalCompressor
7
+ from .types import IncrementalDecompressor
8
+
9
+
10
+ if ta.TYPE_CHECKING:
11
+ import lzma
12
+ else:
13
+ lzma = lang.proxy_import('lzma')
14
+
15
+
16
+ class IncrementalLzmaCompressor:
17
+ def __init__(self) -> None:
18
+ super().__init__()
19
+
20
+ def __call__(self) -> IncrementalCompressor:
21
+ return CompressorIncrementalAdapter(
22
+ lzma.LZMACompressor, # type: ignore
23
+ )()
24
+
25
+
26
+ class IncrementalLzmaDecompressor:
27
+ def __call__(self) -> IncrementalDecompressor:
28
+ return DecompressorIncrementalAdapter(
29
+ lzma.LZMADecompressor, # type: ignore
30
+ trailing_error=lzma.LZMAError,
31
+ )()
@@ -0,0 +1,29 @@
1
+ # ruff: noqa: UP007
2
+ import typing as ta
3
+
4
+
5
+ IncrementalCompressor: ta.TypeAlias = ta.Generator[
6
+ ta.Union[
7
+ bytes, # Compressed output
8
+ None, # Need input
9
+ ],
10
+ ta.Union[
11
+ bytes, # Input bytes
12
+ None, # Need output
13
+ ],
14
+ None,
15
+ ]
16
+
17
+
18
+ IncrementalDecompressor: ta.TypeAlias = ta.Generator[
19
+ ta.Union[
20
+ bytes, # Uncompressed output
21
+ int, # Need exactly n bytes
22
+ None, # Need any amount of bytes
23
+ ],
24
+ ta.Union[
25
+ bytes, # Input bytes
26
+ None, # Need output
27
+ ],
28
+ None,
29
+ ]
@@ -0,0 +1,50 @@
1
+ """
2
+ TODO:
3
+ - BufferedBytesGeneratorReader
4
+ """
5
+ import typing as ta
6
+
7
+ from .. import check
8
+
9
+
10
+ class PrependableBytesGeneratorReader:
11
+ def __init__(self) -> None:
12
+ super().__init__()
13
+
14
+ self._p: list[bytes] = []
15
+
16
+ def read(self, sz: int | None) -> ta.Generator[int | None, bytes, bytes]:
17
+ if not self._p:
18
+ d = check.isinstance((yield sz), bytes)
19
+ return d
20
+
21
+ if sz is None:
22
+ return self._p.pop(0)
23
+
24
+ l: list[bytes] = []
25
+ r = sz
26
+ while r > 0 and self._p:
27
+ c = self._p[0]
28
+
29
+ if len(c) > r:
30
+ l.append(c[:r])
31
+ self._p[0] = c[r:]
32
+ return b''.join(l)
33
+
34
+ l.append(c)
35
+ r -= len(c)
36
+ self._p.pop(0)
37
+
38
+ if r:
39
+ c = check.isinstance((yield r), bytes)
40
+ if not c:
41
+ return b''
42
+ if len(c) != r:
43
+ raise EOFError(f'Reader got {len(c)} bytes, expected {r}')
44
+ l.append(c)
45
+
46
+ return b''.join(l)
47
+
48
+ def prepend(self, d: bytes) -> None:
49
+ if d:
50
+ self._p.append(d)
omlish/lang/__init__.py CHANGED
@@ -120,6 +120,14 @@ from .functions import ( # noqa
120
120
  void,
121
121
  )
122
122
 
123
+ from .generators import ( # noqa
124
+ CoroutineGenerator,
125
+ Generator,
126
+ GeneratorLike,
127
+ corogen,
128
+ nextgen,
129
+ )
130
+
123
131
  from .imports import ( # noqa
124
132
  can_import,
125
133
  import_all,
@@ -136,7 +144,6 @@ from .imports import ( # noqa
136
144
 
137
145
  from .iterables import ( # noqa
138
146
  BUILTIN_SCALAR_ITERABLE_TYPES,
139
- Generator,
140
147
  asrange,
141
148
  exhaust,
142
149
  flatmap,
@@ -0,0 +1,182 @@
1
+ import abc
2
+ import typing as ta
3
+
4
+ from .maybes import Maybe
5
+
6
+
7
+ T = ta.TypeVar('T')
8
+ I = ta.TypeVar('I')
9
+ O = ta.TypeVar('O')
10
+ R = ta.TypeVar('R')
11
+ I_contra = ta.TypeVar('I_contra', contravariant=True)
12
+ O_co = ta.TypeVar('O_co', covariant=True)
13
+ R_co = ta.TypeVar('R_co', covariant=True)
14
+
15
+
16
+ ##
17
+
18
+
19
+ def nextgen(g: T) -> T:
20
+ next(g) # type: ignore
21
+ return g
22
+
23
+
24
+ ##
25
+
26
+
27
+ @ta.runtime_checkable
28
+ class GeneratorLike(ta.Protocol[O_co, I_contra, R_co]):
29
+ def send(self, i: I_contra) -> O_co: # Raises[StopIteration[R_co]]
30
+ ...
31
+
32
+ def close(self) -> None:
33
+ ...
34
+
35
+
36
+ class GeneratorLike_(abc.ABC, ta.Generic[O, I, R]): # noqa
37
+ @abc.abstractmethod
38
+ def send(self, i: I) -> O: # Raises[StopIteration[R]]
39
+ raise NotImplementedError
40
+
41
+ def close(self) -> None:
42
+ pass
43
+
44
+
45
+ @ta.overload
46
+ def adapt_generator_like(gl: GeneratorLike_[O, I, R]) -> ta.Generator[O, I, R]:
47
+ ...
48
+
49
+
50
+ @ta.overload
51
+ def adapt_generator_like(gl: GeneratorLike[O, I, R]) -> ta.Generator[O, I, R]:
52
+ ...
53
+
54
+
55
+ def adapt_generator_like(gl):
56
+ try:
57
+ i = yield
58
+ while True:
59
+ i = yield gl.send(i)
60
+ except StopIteration as e:
61
+ return e.value
62
+ finally:
63
+ gl.close()
64
+
65
+
66
+ ##
67
+
68
+
69
+ class Generator(ta.Generator[O, I, R]):
70
+ def __init__(self, g: ta.Generator[O, I, R]) -> None:
71
+ super().__init__()
72
+ self._g = g
73
+
74
+ @property
75
+ def g(self) -> ta.Generator[O, I, R]:
76
+ return self._g
77
+
78
+ value: R
79
+
80
+ def __iter__(self):
81
+ return self
82
+
83
+ def __next__(self):
84
+ try:
85
+ return next(self._g)
86
+ except StopIteration as e:
87
+ self.value = e.value
88
+ raise
89
+
90
+ def send(self, v):
91
+ try:
92
+ return self._g.send(v)
93
+ except StopIteration as e:
94
+ self.value = e.value
95
+ raise
96
+
97
+ def throw(self, *args):
98
+ try:
99
+ return self._g.throw(*args)
100
+ except StopIteration as e:
101
+ self.value = e.value
102
+ raise
103
+
104
+ def close(self):
105
+ self._g.close()
106
+
107
+
108
+ ##
109
+
110
+
111
+ class CoroutineGenerator(ta.Generic[O, I, R]):
112
+ def __init__(self, g: ta.Generator[O, I, R]) -> None:
113
+ super().__init__()
114
+ self._g = g
115
+
116
+ @property
117
+ def g(self) -> ta.Generator[O, I, R]:
118
+ return self._g
119
+
120
+ #
121
+
122
+ def close(self) -> None:
123
+ self._g.close()
124
+
125
+ def __enter__(self) -> ta.Self:
126
+ return self
127
+
128
+ def __exit__(self, exc_type, exc_val, exc_tb):
129
+ self._g.close()
130
+
131
+ #
132
+
133
+ class Output(ta.NamedTuple, ta.Generic[T]):
134
+ v: T
135
+
136
+ @property
137
+ def is_return(self) -> bool:
138
+ raise NotImplementedError
139
+
140
+ class Yield(Output[T]):
141
+ @property
142
+ def is_return(self) -> bool:
143
+ return False
144
+
145
+ class Return(Output[T]):
146
+ @property
147
+ def is_return(self) -> bool:
148
+ return True
149
+
150
+ class Nothing:
151
+ def __new__(cls):
152
+ raise TypeError
153
+
154
+ #
155
+
156
+ def send(self, /, v: I | type[Nothing] = Nothing) -> Yield[O] | Return[R]:
157
+ try:
158
+ if v is self.Nothing:
159
+ o = next(self._g)
160
+ else:
161
+ o = self._g.send(v) # type: ignore[arg-type]
162
+ except StopIteration as e:
163
+ return self.Return(e.value)
164
+ else:
165
+ return self.Yield(o)
166
+
167
+ def send_opt(self, v: I | None) -> Yield[O] | Return[R]:
168
+ return self.send(v if v is not None else self.Nothing)
169
+
170
+ def send_maybe(self, v: Maybe[I]) -> Yield[O] | Return[R]:
171
+ return self.send(v.or_else(self.Nothing))
172
+
173
+ def throw(self, v: BaseException) -> Yield[O] | Return[R]:
174
+ try:
175
+ o = self._g.throw(v)
176
+ except StopIteration as e:
177
+ return self.Return(e.value)
178
+ else:
179
+ return self.Yield(o)
180
+
181
+
182
+ corogen = CoroutineGenerator