omlish 0.0.0.dev137__py3-none-any.whl → 0.0.0.dev139__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
omlish/__about__.py CHANGED
@@ -1,5 +1,5 @@
1
- __version__ = '0.0.0.dev137'
2
- __revision__ = '895417d0442f51853415386ace94b1c41da0c990'
1
+ __version__ = '0.0.0.dev139'
2
+ __revision__ = '943c8c72d1baaa0ef3088fcadb2933d4abc11f36'
3
3
 
4
4
 
5
5
  #
@@ -130,7 +130,7 @@ class Flattening:
130
130
  .split(self._index_close + self._index_open):
131
131
  yield int(p)
132
132
  else:
133
- check.state(')' not in part)
133
+ check.state(self._index_close not in part)
134
134
  yield part
135
135
 
136
136
  for fk, v in flattened.items():
@@ -0,0 +1,2 @@
1
+ class JsonStreamError(Exception):
2
+ pass
@@ -3,6 +3,7 @@ TODO:
3
3
  - max buf size
4
4
  - max recursion depth
5
5
  - mark start pos of tokens, currently returning end
6
+ - _do_string inner loop optimization somehow
6
7
  """
7
8
  import dataclasses as dc
8
9
  import io
@@ -12,6 +13,7 @@ import typing as ta
12
13
 
13
14
  from .... import check
14
15
  from ....funcs.genmachine import GenMachine
16
+ from .errors import JsonStreamError
15
17
 
16
18
 
17
19
  ##
@@ -95,7 +97,7 @@ CONST_TOKENS: ta.Mapping[str, tuple[TokenKind, str | float | None]] = {
95
97
 
96
98
 
97
99
  @dc.dataclass()
98
- class JsonLexError(Exception):
100
+ class JsonStreamLexError(JsonStreamError):
99
101
  message: str
100
102
 
101
103
  pos: Position
@@ -160,8 +162,8 @@ class JsonStreamLexer(GenMachine[str, Token]):
160
162
  self._buf.truncate()
161
163
  return raw
162
164
 
163
- def _raise(self, msg: str) -> ta.NoReturn:
164
- raise JsonLexError(msg, self.pos)
165
+ def _raise(self, msg: str, src: Exception | None = None) -> ta.NoReturn:
166
+ raise JsonStreamLexError(msg, self.pos) from src
165
167
 
166
168
  def _do_main(self):
167
169
  while True:
@@ -202,7 +204,7 @@ class JsonStreamLexer(GenMachine[str, Token]):
202
204
  self._raise('Unexpected end of input')
203
205
 
204
206
  if not c:
205
- raise NotImplementedError
207
+ self._raise(f'Unterminated string literal: {self._buf.getvalue()}')
206
208
 
207
209
  self._buf.write(c)
208
210
  if c == '"' and last != '\\':
@@ -210,7 +212,11 @@ class JsonStreamLexer(GenMachine[str, Token]):
210
212
  last = c
211
213
 
212
214
  raw = self._flip_buf()
213
- sv = json.loads(raw)
215
+ try:
216
+ sv = json.loads(raw)
217
+ except json.JSONDecodeError as e:
218
+ self._raise(f'Invalid string literal: {raw!r}', e)
219
+
214
220
  yield self._make_tok('STRING', sv, raw, pos)
215
221
 
216
222
  return self._do_main()
@@ -1,9 +1,12 @@
1
+ import dataclasses as dc
1
2
  import typing as ta
2
3
 
3
4
  from .... import lang
4
5
  from ....funcs.genmachine import GenMachine
6
+ from .errors import JsonStreamError
5
7
  from .lex import SCALAR_VALUE_TYPES
6
8
  from .lex import VALUE_TOKEN_KINDS
9
+ from .lex import Position
7
10
  from .lex import ScalarValue
8
11
  from .lex import Token
9
12
 
@@ -79,6 +82,13 @@ def yield_parser_events(obj: ta.Any) -> ta.Generator[JsonStreamParserEvent, None
79
82
  ##
80
83
 
81
84
 
85
+ @dc.dataclass()
86
+ class JsonStreamParseError(JsonStreamError):
87
+ message: str
88
+
89
+ pos: Position | None = None
90
+
91
+
82
92
  class JsonStreamObject(list):
83
93
  def __repr__(self) -> str:
84
94
  return f'{self.__class__.__name__}({super().__repr__()})'
@@ -100,29 +110,29 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
100
110
  if tt == 'KEY':
101
111
  self._stack.pop()
102
112
  if not self._stack:
103
- raise self.StateError
113
+ raise JsonStreamParseError('Unexpected key')
104
114
 
105
115
  tt2 = self._stack[-1]
106
116
  if tt2 == 'OBJECT':
107
117
  return ((v,), self._do_after_pair())
108
118
 
109
119
  else:
110
- raise self.StateError
120
+ raise JsonStreamParseError('Unexpected key')
111
121
 
112
122
  elif tt == 'ARRAY':
113
123
  return ((v,), self._do_after_element())
114
124
 
115
125
  else:
116
- raise self.StateError
126
+ raise JsonStreamParseError(f'Unexpected value: {v!r}')
117
127
 
118
128
  #
119
129
 
120
- def _do_value(self):
130
+ def _do_value(self, *, must_be_present: bool = False):
121
131
  try:
122
132
  tok = yield None
123
133
  except GeneratorExit:
124
134
  if self._stack:
125
- raise self.StateError from None
135
+ raise JsonStreamParseError('Expected value') from None
126
136
  else:
127
137
  raise
128
138
 
@@ -141,13 +151,16 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
141
151
  yield y
142
152
  return r
143
153
 
154
+ elif must_be_present:
155
+ raise JsonStreamParseError('Expected value', tok.pos)
156
+
144
157
  elif tok.kind == 'RBRACKET':
145
158
  y, r = self._emit_end_array()
146
159
  yield y
147
160
  return r
148
161
 
149
162
  else:
150
- raise self.StateError
163
+ raise JsonStreamParseError('Expected value', tok.pos)
151
164
 
152
165
  #
153
166
 
@@ -157,19 +170,19 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
157
170
 
158
171
  def _emit_end_object(self):
159
172
  if not self._stack:
160
- raise self.StateError
173
+ raise JsonStreamParseError('Unexpected end object')
161
174
 
162
175
  tt = self._stack.pop()
163
176
  if tt != 'OBJECT':
164
- raise self.StateError
177
+ raise JsonStreamParseError('Unexpected end object')
165
178
 
166
179
  return self._emit_event(EndObject)
167
180
 
168
- def _do_object_body(self):
181
+ def _do_object_body(self, *, must_be_present: bool = False):
169
182
  try:
170
183
  tok = yield None
171
184
  except GeneratorExit:
172
- raise self.StateError from None
185
+ raise JsonStreamParseError('Expected object body') from None
173
186
 
174
187
  if tok.kind == 'STRING':
175
188
  k = tok.value
@@ -177,30 +190,33 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
177
190
  try:
178
191
  tok = yield None
179
192
  except GeneratorExit:
180
- raise self.StateError from None
193
+ raise JsonStreamParseError('Expected key') from None
181
194
  if tok.kind != 'COLON':
182
- raise self.StateError
195
+ raise JsonStreamParseError('Expected colon', tok.pos)
183
196
 
184
197
  yield (Key(k),)
185
198
  self._stack.append('KEY')
186
199
  return self._do_value()
187
200
 
201
+ elif must_be_present:
202
+ raise JsonStreamParseError('Expected value', tok.pos)
203
+
188
204
  elif tok.kind == 'RBRACE':
189
205
  y, r = self._emit_end_object()
190
206
  yield y
191
207
  return r
192
208
 
193
209
  else:
194
- raise self.StateError
210
+ raise JsonStreamParseError('Expected value', tok.pos)
195
211
 
196
212
  def _do_after_pair(self):
197
213
  try:
198
214
  tok = yield None
199
215
  except GeneratorExit:
200
- raise self.StateError from None
216
+ raise JsonStreamParseError('Expected continuation') from None
201
217
 
202
218
  if tok.kind == 'COMMA':
203
- return self._do_object_body()
219
+ return self._do_object_body(must_be_present=True)
204
220
 
205
221
  elif tok.kind == 'RBRACE':
206
222
  y, r = self._emit_end_object()
@@ -208,7 +224,7 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
208
224
  return r
209
225
 
210
226
  else:
211
- raise self.StateError
227
+ raise JsonStreamParseError('Expected continuation', tok.pos)
212
228
 
213
229
  #
214
230
 
@@ -218,11 +234,11 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
218
234
 
219
235
  def _emit_end_array(self):
220
236
  if not self._stack:
221
- raise self.StateError
237
+ raise JsonStreamParseError('Expected end array')
222
238
 
223
239
  tt = self._stack.pop()
224
240
  if tt != 'ARRAY':
225
- raise self.StateError
241
+ raise JsonStreamParseError('Unexpected end array')
226
242
 
227
243
  return self._emit_event(EndArray)
228
244
 
@@ -230,10 +246,10 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
230
246
  try:
231
247
  tok = yield None
232
248
  except GeneratorExit:
233
- raise self.StateError from None
249
+ raise JsonStreamParseError('Expected continuation') from None
234
250
 
235
251
  if tok.kind == 'COMMA':
236
- return self._do_value()
252
+ return self._do_value(must_be_present=True)
237
253
 
238
254
  elif tok.kind == 'RBRACKET':
239
255
  y, r = self._emit_end_array()
@@ -241,4 +257,4 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
241
257
  return r
242
258
 
243
259
  else:
244
- raise self.StateError
260
+ raise JsonStreamParseError('Expected continuation', tok.pos)
@@ -67,8 +67,7 @@ class GenMachine(ta.Generic[I, O]):
67
67
  return self
68
68
 
69
69
  def __exit__(self, exc_type, exc_val, exc_tb):
70
- if exc_type is None:
71
- self.close()
70
+ self.close()
72
71
 
73
72
  #
74
73
 
omlish/io/compress/bz2.py CHANGED
@@ -24,6 +24,7 @@ class IncrementalBz2Compressor:
24
24
 
25
25
  self._compresslevel = compresslevel
26
26
 
27
+ @lang.autostart
27
28
  def __call__(self) -> IncrementalCompressor:
28
29
  return CompressorIncrementalAdapter(
29
30
  functools.partial(
@@ -42,7 +42,7 @@ import typing as ta
42
42
  from ... import cached
43
43
  from ... import check
44
44
  from ... import lang
45
- from ..generators import PrependableBytesGeneratorReader
45
+ from ..generators.readers import PrependableBytesGeneratorReader
46
46
  from .types import IncrementalCompressor
47
47
  from .types import IncrementalDecompressor
48
48
 
@@ -123,10 +123,12 @@ class IncrementalGzipCompressor:
123
123
  if fname:
124
124
  check.none((yield fname + b'\000'))
125
125
 
126
+ @lang.autostart
126
127
  def __call__(self) -> IncrementalCompressor:
127
128
  crc = _zero_crc()
128
129
  size = 0
129
130
  offset = 0 # Current file offset for seek(), tell(), etc
131
+ wrote_header = False
130
132
 
131
133
  compress = zlib.compressobj(
132
134
  self._compresslevel,
@@ -136,10 +138,13 @@ class IncrementalGzipCompressor:
136
138
  0,
137
139
  )
138
140
 
139
- yield from self._write_gzip_header()
140
-
141
141
  while True:
142
142
  data: ta.Any = check.isinstance((yield None), bytes)
143
+
144
+ if not wrote_header:
145
+ yield from self._write_gzip_header()
146
+ wrote_header = True
147
+
143
148
  if not data:
144
149
  break
145
150
 
@@ -17,6 +17,7 @@ class IncrementalLzmaCompressor:
17
17
  def __init__(self) -> None:
18
18
  super().__init__()
19
19
 
20
+ @lang.autostart
20
21
  def __call__(self) -> IncrementalCompressor:
21
22
  return CompressorIncrementalAdapter(
22
23
  lzma.LZMACompressor, # type: ignore
File without changes
@@ -0,0 +1,183 @@
1
+ """
2
+ TODO:
3
+ - BufferedBytesGeneratorReader
4
+ - docstrings
5
+ - memoryviews
6
+ """
7
+ import abc
8
+ import typing as ta
9
+
10
+ from ... import check
11
+
12
+
13
+ T = ta.TypeVar('T')
14
+ I = ta.TypeVar('I')
15
+ R = ta.TypeVar('R')
16
+ AnyT = ta.TypeVar('AnyT', bound=ta.Any)
17
+
18
+
19
+ ReaderGenerator: ta.TypeAlias = ta.Generator[int | None, I, R]
20
+ ExactReaderGenerator: ta.TypeAlias = ta.Generator[int, I, R]
21
+
22
+ BytesReaderGenerator: ta.TypeAlias = ReaderGenerator[bytes, R]
23
+ BytesExactReaderGenerator: ta.TypeAlias = ExactReaderGenerator[bytes, R]
24
+
25
+ StrReaderGenerator: ta.TypeAlias = ReaderGenerator[str, R]
26
+ StrExactReaderGenerator: ta.TypeAlias = ExactReaderGenerator[str, R]
27
+
28
+
29
+ ##
30
+
31
+
32
+ class _BytesJoiner:
33
+ def _join(self, lst: list[bytes]) -> bytes:
34
+ return b''.join(lst)
35
+
36
+
37
+ class _StrJoiner:
38
+ def _join(self, lst: list[str]) -> str:
39
+ return ''.join(lst)
40
+
41
+
42
+ ##
43
+
44
+
45
+ class GeneratorReader(abc.ABC, ta.Generic[T]):
46
+ @abc.abstractmethod
47
+ def read(self, sz: int | None) -> ta.Generator[int | None, T, T]:
48
+ raise NotImplementedError
49
+
50
+ def read_exact(self, sz: int) -> ta.Generator[int | None, T, T]:
51
+ d: ta.Any = yield from self.read(sz)
52
+ if len(d) != sz:
53
+ raise EOFError(f'GeneratorReader got {len(d)}, expected {sz}')
54
+ return d
55
+
56
+
57
+ ##
58
+
59
+
60
+ class PrependableGeneratorReader(GeneratorReader[AnyT]):
61
+ def __init__(self) -> None:
62
+ super().__init__()
63
+
64
+ self._queue: list[tuple[AnyT, int]] = []
65
+
66
+ @abc.abstractmethod
67
+ def _join(self, lst: list[AnyT]) -> AnyT:
68
+ raise NotImplementedError
69
+
70
+ def read(self, sz: int | None) -> ta.Generator[int | None, AnyT, AnyT]:
71
+ if not self._queue:
72
+ d: AnyT = check.not_none((yield sz))
73
+ return d
74
+
75
+ if sz is None:
76
+ return self._queue.pop(0)[0]
77
+
78
+ lst: list[AnyT] = []
79
+ rem = sz
80
+ while rem > 0 and self._queue:
81
+ c, p = self._queue[0]
82
+
83
+ if len(c) - p > rem:
84
+ lst.append(c[p:p + rem])
85
+ self._queue[0] = (c, p + rem)
86
+ return self._join(lst)
87
+
88
+ lst.append(c[p:])
89
+ rem -= len(c) - p
90
+ self._queue.pop(0)
91
+
92
+ if rem:
93
+ d = check.not_none((yield rem))
94
+ if d:
95
+ lst.append(d) # type: ignore[unreachable]
96
+
97
+ if len(lst) == 1:
98
+ return lst[0]
99
+ else:
100
+ return self._join(lst)
101
+
102
+ def prepend(self, d: AnyT, p: int | None = None) -> None:
103
+ if d:
104
+ self._queue.insert(0, (d, p or 0))
105
+
106
+
107
+ class PrependableBytesGeneratorReader(
108
+ _BytesJoiner,
109
+ PrependableGeneratorReader[bytes],
110
+ ):
111
+ pass
112
+
113
+
114
+ class PrependableStrGeneratorReader(
115
+ _StrJoiner,
116
+ PrependableGeneratorReader[str],
117
+ ):
118
+ pass
119
+
120
+
121
+ prependable_bytes_generator_reader = PrependableBytesGeneratorReader
122
+ prependable_str_generator_reader = PrependableStrGeneratorReader
123
+
124
+
125
+ ##
126
+
127
+
128
+ class BufferedGeneratorReader(PrependableGeneratorReader[AnyT], abc.ABC):
129
+ DEFAULT_BUFFER_SIZE = 4 * 0x1000
130
+
131
+ def __init__(
132
+ self,
133
+ buffer_size: int = DEFAULT_BUFFER_SIZE,
134
+ ) -> None:
135
+ check.arg(buffer_size > 0)
136
+
137
+ super().__init__()
138
+
139
+ self._buffer_size = buffer_size
140
+
141
+ def read(self, sz: int | None) -> ta.Generator[int | None, AnyT, AnyT]:
142
+ g = super().read(sz)
143
+ i: ta.Any = None
144
+ while True:
145
+ try:
146
+ q = g.send(i)
147
+ except StopIteration as e:
148
+ return e.value
149
+
150
+ check.state(not self._queue)
151
+
152
+ if q is None:
153
+ i = check.not_none((yield None))
154
+ continue
155
+
156
+ r = max(q, self._buffer_size)
157
+ d: AnyT = check.not_none((yield r))
158
+ if len(d) < q:
159
+ i = d
160
+ continue
161
+
162
+ i = d[:q]
163
+ self.prepend(d, q)
164
+
165
+
166
+ class BufferedBytesGeneratorReader(
167
+ _BytesJoiner,
168
+ BufferedGeneratorReader[bytes],
169
+ PrependableGeneratorReader[bytes],
170
+ ):
171
+ pass
172
+
173
+
174
+ class BufferedStrGeneratorReader(
175
+ _StrJoiner,
176
+ BufferedGeneratorReader[str],
177
+ PrependableGeneratorReader[str],
178
+ ):
179
+ pass
180
+
181
+
182
+ buffered_bytes_generator_reader = BufferedBytesGeneratorReader
183
+ buffered_str_generator_reader = BufferedStrGeneratorReader
@@ -0,0 +1,104 @@
1
+ import typing as ta
2
+
3
+ from ... import lang
4
+
5
+
6
+ T = ta.TypeVar('T')
7
+ I = ta.TypeVar('I')
8
+ O = ta.TypeVar('O')
9
+ OF = ta.TypeVar('OF')
10
+ OT = ta.TypeVar('OT')
11
+ R = ta.TypeVar('R')
12
+
13
+
14
+ SteppedGenerator: ta.TypeAlias = ta.Generator[O | None, I | None, R]
15
+
16
+
17
+ ##
18
+
19
+
20
+ @lang.autostart
21
+ def flatmap_stepped_generator(
22
+ fn: ta.Callable[[list[OF]], OT],
23
+ g: SteppedGenerator[OF, I, R],
24
+ *,
25
+ terminate: ta.Callable[[OF], bool] | None = None,
26
+ ) -> ta.Generator[OT, I, lang.Maybe[R]]:
27
+ """
28
+ Given a 'stepped generator' - a generator which accepts input items and yields zero or more non-None values in
29
+ response until it signals it's ready for the next input by yielding None - and a function taking a list, returns a
30
+ 1:1 generator which accepts input, builds a list of yielded generator output, calls the given function with that
31
+ list, and yields the result.
32
+
33
+ An optional terminate function may be provided which will cause this function to return early if it returns true for
34
+ an encountered yielded value. The encountered value causing termination will be included in the list sent to the
35
+ given fn.
36
+
37
+ Returns a Maybe of either the given generator's return value or empty if the terminator was encountered.
38
+ """
39
+
40
+ l: list[OF]
41
+ i: I | None = yield # type: ignore
42
+ while True:
43
+ l = []
44
+
45
+ while True:
46
+ try:
47
+ o = g.send(i)
48
+ except StopIteration as e:
49
+ if l:
50
+ yield fn(l)
51
+ return lang.just(e.value)
52
+
53
+ i = None
54
+
55
+ if o is None:
56
+ break
57
+
58
+ l.append(o)
59
+
60
+ if terminate is not None and terminate(o):
61
+ yield fn(l)
62
+ return lang.empty()
63
+
64
+ i = yield fn(l)
65
+
66
+
67
+ ##
68
+
69
+
70
+ def _join_bytes(l: ta.Sequence[bytes]) -> bytes:
71
+ if not l:
72
+ return b''
73
+ elif len(l) == 1:
74
+ return l[0]
75
+ else:
76
+ return b''.join(l)
77
+
78
+
79
+ def _join_str(l: ta.Sequence[str]) -> str:
80
+ if not l:
81
+ return ''
82
+ elif len(l) == 1:
83
+ return l[0]
84
+ else:
85
+ return ''.join(l)
86
+
87
+
88
+ def _is_empty(o: T) -> bool:
89
+ return len(o) < 1 # type: ignore
90
+
91
+
92
+ ##
93
+
94
+
95
+ def joined_bytes_stepped_generator(
96
+ g: ta.Generator[bytes | None, bytes | None, R],
97
+ ) -> ta.Generator[bytes, bytes, R]:
98
+ return flatmap_stepped_generator(_join_bytes, g, terminate=_is_empty)
99
+
100
+
101
+ def joined_str_stepped_generator(
102
+ g: ta.Generator[str | None, str | None, R],
103
+ ) -> ta.Generator[str, str, R]:
104
+ return flatmap_stepped_generator(_join_str, g, terminate=_is_empty)
omlish/lang/__init__.py CHANGED
@@ -124,7 +124,10 @@ from .generators import ( # noqa
124
124
  CoroutineGenerator,
125
125
  Generator,
126
126
  GeneratorLike,
127
+ GeneratorMappedIterator,
128
+ autostart,
127
129
  corogen,
130
+ genmap,
128
131
  nextgen,
129
132
  )
130
133
 
omlish/lang/functions.py CHANGED
@@ -82,7 +82,6 @@ def identity(obj: T) -> T:
82
82
 
83
83
 
84
84
  class constant(ta.Generic[T]): # noqa
85
-
86
85
  def __init__(self, obj: T) -> None:
87
86
  super().__init__()
88
87
 
@@ -116,7 +115,6 @@ class VoidError(Exception):
116
115
 
117
116
 
118
117
  class Void:
119
-
120
118
  def __new__(cls, *args: ta.Any, **kwargs: ta.Any) -> None: # type: ignore # noqa
121
119
  raise VoidError
122
120
 
omlish/lang/generators.py CHANGED
@@ -1,7 +1,10 @@
1
1
  import abc
2
+ import functools
2
3
  import typing as ta
3
4
 
4
5
  from .maybes import Maybe
6
+ from .maybes import empty
7
+ from .maybes import just
5
8
 
6
9
 
7
10
  T = ta.TypeVar('T')
@@ -21,6 +24,16 @@ def nextgen(g: T) -> T:
21
24
  return g
22
25
 
23
26
 
27
+ def autostart(fn):
28
+ @functools.wraps(fn)
29
+ def inner(*args, **kwargs):
30
+ g = fn(*args, **kwargs)
31
+ if (o := next(g)) is not None:
32
+ raise TypeError(o)
33
+ return g
34
+ return inner
35
+
36
+
24
37
  ##
25
38
 
26
39
 
@@ -180,3 +193,51 @@ class CoroutineGenerator(ta.Generic[O, I, R]):
180
193
 
181
194
 
182
195
  corogen = CoroutineGenerator
196
+
197
+
198
+ ##
199
+
200
+
201
+ class GeneratorMappedIterator(ta.Generic[O, I, R]):
202
+ """
203
+ Like a `map` iterator but takes a generator instead of a function. Provided generator *must* yield outputs 1:1 with
204
+ inputs.
205
+
206
+ Generator return value will be captured on `value` property - if present generator stopped, it absent iterator
207
+ stopped.
208
+ """
209
+
210
+ def __init__(self, g: ta.Generator[O, I, R], it: ta.Iterator[I]) -> None:
211
+ super().__init__()
212
+
213
+ self._g = g
214
+ self._it = it
215
+ self._value: Maybe[R] = empty()
216
+
217
+ @property
218
+ def g(self) -> ta.Generator[O, I, R]:
219
+ return self._g
220
+
221
+ @property
222
+ def it(self) -> ta.Iterator[I]:
223
+ return self._it
224
+
225
+ @property
226
+ def value(self) -> Maybe[R]:
227
+ return self._value
228
+
229
+ def __iter__(self) -> ta.Iterator[O]:
230
+ return self
231
+
232
+ def __next__(self) -> O:
233
+ i = next(self._it)
234
+ try:
235
+ o = self._g.send(i)
236
+ except StopIteration as e:
237
+ self._value = just(e.value)
238
+ raise StopIteration from e
239
+ return o
240
+
241
+
242
+ def genmap(g: ta.Generator[O, I, R], it: ta.Iterable[I]) -> GeneratorMappedIterator[O, I, R]:
243
+ return GeneratorMappedIterator(g, iter(it))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: omlish
3
- Version: 0.0.0.dev137
3
+ Version: 0.0.0.dev139
4
4
  Summary: omlish
5
5
  Author: wrmsr
6
6
  License: BSD-3-Clause
@@ -1,5 +1,5 @@
1
1
  omlish/.manifests.json,sha256=RX24SRc6DCEg77PUVnaXOKCWa5TF_c9RQJdGIf7gl9c,1135
2
- omlish/__about__.py,sha256=keq7273S1vADjELx7wkIYWHvdZy7TM3pyrMqOHXwNYg,3379
2
+ omlish/__about__.py,sha256=Hl1Jnwstv8jPOdEO_LJglXfJ8ChFupW5ZTF3kYkK_LQ,3379
3
3
  omlish/__init__.py,sha256=SsyiITTuK0v74XpKV8dqNaCmjOlan1JZKrHQv5rWKPA,253
4
4
  omlish/argparse.py,sha256=cqKGAqcxuxv_s62z0gq29L9KAvg_3-_rFvXKjVpRJjo,8126
5
5
  omlish/c3.py,sha256=ubu7lHwss5V4UznbejAI0qXhXahrU01MysuHOZI9C4U,8116
@@ -118,7 +118,7 @@ omlish/concurrent/futures.py,sha256=J2s9wYURUskqRJiBbAR0PNEAp1pXbIMYldOVBTQduQY,
118
118
  omlish/concurrent/threadlets.py,sha256=JfirbTDJgy9Ouokz_VmHeAAPS7cih8qMUJrN-owwXD4,2423
119
119
  omlish/configs/__init__.py,sha256=3uh09ezodTwkMI0nRmAMP0eEuJ_0VdF-LYyNmPjHiCE,77
120
120
  omlish/configs/classes.py,sha256=GLbB8xKjHjjoUQRCUQm3nEjM8z1qNTx9gPV7ODSt5dg,1317
121
- omlish/configs/flattening.py,sha256=AOlRpBHm449MxwMp3CiIRGunStOC1DUNs1f3CLou0wc,4731
121
+ omlish/configs/flattening.py,sha256=rVxoTqgM9te86hUwQsHJ5u94jo2PARNfhk_HkrrDT9Y,4745
122
122
  omlish/configs/strings.py,sha256=0brx1duL85r1GpfbNvbHcSvH4jWzutwuvMFXda9NeI0,2651
123
123
  omlish/dataclasses/__init__.py,sha256=AHo-tN5V_b_VYFUF7VFRmuHrjZBXS1WytRAj061MUTA,1423
124
124
  omlish/dataclasses/utils.py,sha256=lcikCPiiX5Giu0Kb1hP18loZjmm_Z9D-XtJ-ZlHq9iM,3793
@@ -194,11 +194,12 @@ omlish/formats/json/backends/std.py,sha256=PM00Kh9ZR2XzollHMEvdo35Eml1N-zFfRW-LO
194
194
  omlish/formats/json/backends/ujson.py,sha256=BNJCU4kluGHdqTUKLJEuHhE2m2TmqR7HEN289S0Eokg,2278
195
195
  omlish/formats/json/stream/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
196
196
  omlish/formats/json/stream/build.py,sha256=MSxgreWSfI5CzNAdgQrArZ0yWqDsaHl-shI_jmjLDms,2505
197
- omlish/formats/json/stream/lex.py,sha256=_JYBFnAyHsw_3hu8I0rvZqSSkRCU1BvQzgO81KfqRBg,6489
198
- omlish/formats/json/stream/parse.py,sha256=s21PgiuNTcqc_i9QS1ggmEp8Qwp_hOqtosr5d0zpg_o,5204
197
+ omlish/formats/json/stream/errors.py,sha256=c8M8UAYmIZ-vWZLeKD2jMj4EDCJbr9QR8Jq_DyHjujQ,43
198
+ omlish/formats/json/stream/lex.py,sha256=bfy0fb3_Z6G18UGueX2DR6oPSVUsMoFhlbsvXC3ztzI,6793
199
+ omlish/formats/json/stream/parse.py,sha256=JuYmXwtTHmQJTFKoJNoEHUpCPxXdl_gvKPykVXgED34,6208
199
200
  omlish/formats/json/stream/render.py,sha256=NtmDsN92xZi5dkgSSuMeMXMAiJblmjz1arB4Ft7vBhc,3715
200
201
  omlish/funcs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
201
- omlish/funcs/genmachine.py,sha256=XEHy8SFgHCDYSAqlRm-7wlYQX2h6UWehR2_uMw9EOXU,2509
202
+ omlish/funcs/genmachine.py,sha256=EY2k-IFNKMEmHo9CmGRptFvhEMZVOWzZhn0wdQGeaFM,2476
202
203
  omlish/funcs/match.py,sha256=gMLZn1enNiFvQaWrQubY300M1BrmdKWzeePihBS7Ywc,6153
203
204
  omlish/funcs/pairs.py,sha256=OzAwnALkRJXVpD47UvBZHKzQfHtFNry_EgjTcC7vgLU,10606
204
205
  omlish/funcs/pipes.py,sha256=E7Sz8Aj8ke_vCs5AMNwg1I36kRdHVGTnzxVQaDyn43U,2490
@@ -257,17 +258,19 @@ omlish/inject/impl/proxy.py,sha256=1ko0VaKqzu9UG8bIldp9xtUrAVUOFTKWKTjOCqIGr4s,1
257
258
  omlish/inject/impl/scopes.py,sha256=hKnzNieB-fJSFEXDP_QG1mCfIKoVFIfFlf9LiIt5tk4,5920
258
259
  omlish/io/__init__.py,sha256=aaIEsXTSfytW-oEkUWczdUJ_ifFY7ihIpyidIbfjkwY,56
259
260
  omlish/io/abc.py,sha256=Cxs8KB1B_69rxpUYxI-MTsilAmNooJJn3w07DKqYKkE,1255
260
- omlish/io/generators.py,sha256=ZlAp_t0ZD_aKztlio1i_hezmpIFFjaiXtrnY6-2QsPs,1123
261
261
  omlish/io/pyio.py,sha256=YB3g6yg64MzcFwbzKBo4adnbsbZ3FZMlOZfjNtWmYoc,95316
262
262
  omlish/io/trampoline.py,sha256=oUKTQg1F5xQS1431Kt7MbK-NZpX509ubcXU-s86xJr8,7171
263
263
  omlish/io/compress/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
264
264
  omlish/io/compress/abc.py,sha256=R9ebpSjJK4VAimV3OevPJB-jSDTGB_xi2FKNZKbTdYE,3054
265
265
  omlish/io/compress/adapters.py,sha256=wS7cA_quham3C23j3_H6sf2EQ4gI0vURTQdPhapiiFE,6088
266
- omlish/io/compress/bz2.py,sha256=XxpAKdQ5pdWfa23a0F6ZspU_GxHhyJVVPjv2SCyw4hM,1006
267
- omlish/io/compress/gzip.py,sha256=TUjbE5cjiHXd2ZMsRgXBlSW3mCF7xMt_NvIRTYddBCQ,11054
268
- omlish/io/compress/lzma.py,sha256=rM6FXWeD6s-K-Sfxn04AaFILS-v4rliWKlPnAl_RrJw,803
266
+ omlish/io/compress/bz2.py,sha256=BX-xWrpYe5K9vJ28iB2wCNPtd6PW2RgCh3h6XfE4FtA,1026
267
+ omlish/io/compress/gzip.py,sha256=Vs8O3l1Sf50iAvBSQueMLEDLDbmh6FqPdvdzwwaDy3o,11189
268
+ omlish/io/compress/lzma.py,sha256=_fFNWY1R6z71zRnrejhpKs7DVCHn_Ei9ny_fxHvUwJo,823
269
269
  omlish/io/compress/types.py,sha256=IuCyxFX8v12fGqCq2ofCCRM5ZM-4zngHeeBW_PWqYbM,557
270
- omlish/lang/__init__.py,sha256=U6-WtzQL48e9wqWHmxuA1X2PoGlx6Di2HSAi13gTwjw,3866
270
+ omlish/io/generators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
271
+ omlish/io/generators/readers.py,sha256=nv6inmyJmYCJ2YFE2cv6jkVmoxGsbXzyuIki-MCjZDg,4195
272
+ omlish/io/generators/stepped.py,sha256=AYZmtZF1p95JW17XKe3ZIXWwWRuYmmbDLVuhB5x8Dog,2571
273
+ omlish/lang/__init__.py,sha256=lQ-w1MgrMtuyVNWtqODI0qNZ71SZPfvf-Lkn2LUMTec,3922
271
274
  omlish/lang/cached.py,sha256=92TvRZQ6sWlm7dNn4hgl7aWKbX0J1XUEo3DRjBpgVQk,7834
272
275
  omlish/lang/clsdct.py,sha256=AjtIWLlx2E6D5rC97zQ3Lwq2SOMkbg08pdO_AxpzEHI,1744
273
276
  omlish/lang/cmp.py,sha256=5vbzWWbqdzDmNKAGL19z6ZfUKe5Ci49e-Oegf9f4BsE,1346
@@ -275,8 +278,8 @@ omlish/lang/contextmanagers.py,sha256=NEwaTLQMfhKawD5x_0HgI2RpeLXbMa5r9NqWqfDnUX
275
278
  omlish/lang/datetimes.py,sha256=ehI_DhQRM-bDxAavnp470XcekbbXc4Gdw9y1KpHDJT0,223
276
279
  omlish/lang/descriptors.py,sha256=RRBbkMgTzg82fFFE4D0muqobpM-ZZaOta6yB1lpX3s8,6617
277
280
  omlish/lang/exceptions.py,sha256=qJBo3NU1mOWWm-NhQUHCY5feYXR3arZVyEHinLsmRH4,47
278
- omlish/lang/functions.py,sha256=kkPfcdocg-OmyN7skIqrFxNvqAv89Zc_kXKYAN8vw8g,3895
279
- omlish/lang/generators.py,sha256=AShh0x-9Z9qolAYEOZJgYJcxQuyA3HKq0c9tLwNcFs4,3766
281
+ omlish/lang/functions.py,sha256=tUqeqBNHtJtrwimbG6Kc1SjZQDDhqqC1o-8ANpXWn9E,3893
282
+ omlish/lang/generators.py,sha256=5LX17j-Ej3QXhwBgZvRTm_dq3n9veC4IOUcVmvSu2vU,5243
280
283
  omlish/lang/imports.py,sha256=TXLbj2F53LsmozlM05bQhvow9kEgWJOi9qYKsnm2D18,9258
281
284
  omlish/lang/iterables.py,sha256=1bc-Vn-b34T6Gy3li2tMNYpUvuwCC7fjg7dpjXkTfWY,1746
282
285
  omlish/lang/maybes.py,sha256=1RN7chX_x2XvgUwryZRz0W7hAX-be3eEFcFub5vvf6M,3417
@@ -489,9 +492,9 @@ omlish/text/glyphsplit.py,sha256=Ug-dPRO7x-OrNNr8g1y6DotSZ2KH0S-VcOmUobwa4B0,329
489
492
  omlish/text/indent.py,sha256=6Jj6TFY9unaPa4xPzrnZemJ-fHsV53IamP93XGjSUHs,1274
490
493
  omlish/text/parts.py,sha256=7vPF1aTZdvLVYJ4EwBZVzRSy8XB3YqPd7JwEnNGGAOo,6495
491
494
  omlish/text/random.py,sha256=jNWpqiaKjKyTdMXC-pWAsSC10AAP-cmRRPVhm59ZWLk,194
492
- omlish-0.0.0.dev137.dist-info/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
493
- omlish-0.0.0.dev137.dist-info/METADATA,sha256=iU_JPwXqYl0APiFu32-g6dTXny6kbXZWslNaWevwHEs,4173
494
- omlish-0.0.0.dev137.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
495
- omlish-0.0.0.dev137.dist-info/entry_points.txt,sha256=Lt84WvRZJskWCAS7xnQGZIeVWksprtUHj0llrvVmod8,35
496
- omlish-0.0.0.dev137.dist-info/top_level.txt,sha256=pePsKdLu7DvtUiecdYXJ78iO80uDNmBlqe-8hOzOmfs,7
497
- omlish-0.0.0.dev137.dist-info/RECORD,,
495
+ omlish-0.0.0.dev139.dist-info/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
496
+ omlish-0.0.0.dev139.dist-info/METADATA,sha256=Eje9SZhVJVmqyDQqodtffHM__nJNBuEZZPhVNuhqel0,4173
497
+ omlish-0.0.0.dev139.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
498
+ omlish-0.0.0.dev139.dist-info/entry_points.txt,sha256=Lt84WvRZJskWCAS7xnQGZIeVWksprtUHj0llrvVmod8,35
499
+ omlish-0.0.0.dev139.dist-info/top_level.txt,sha256=pePsKdLu7DvtUiecdYXJ78iO80uDNmBlqe-8hOzOmfs,7
500
+ omlish-0.0.0.dev139.dist-info/RECORD,,
omlish/io/generators.py DELETED
@@ -1,50 +0,0 @@
1
- """
2
- TODO:
3
- - BufferedBytesGeneratorReader
4
- """
5
- import typing as ta
6
-
7
- from .. import check
8
-
9
-
10
- class PrependableBytesGeneratorReader:
11
- def __init__(self) -> None:
12
- super().__init__()
13
-
14
- self._p: list[bytes] = []
15
-
16
- def read(self, sz: int | None) -> ta.Generator[int | None, bytes, bytes]:
17
- if not self._p:
18
- d = check.isinstance((yield sz), bytes)
19
- return d
20
-
21
- if sz is None:
22
- return self._p.pop(0)
23
-
24
- l: list[bytes] = []
25
- r = sz
26
- while r > 0 and self._p:
27
- c = self._p[0]
28
-
29
- if len(c) > r:
30
- l.append(c[:r])
31
- self._p[0] = c[r:]
32
- return b''.join(l)
33
-
34
- l.append(c)
35
- r -= len(c)
36
- self._p.pop(0)
37
-
38
- if r:
39
- c = check.isinstance((yield r), bytes)
40
- if not c:
41
- return b''
42
- if len(c) != r:
43
- raise EOFError(f'Reader got {len(c)} bytes, expected {r}')
44
- l.append(c)
45
-
46
- return b''.join(l)
47
-
48
- def prepend(self, d: bytes) -> None:
49
- if d:
50
- self._p.append(d)