omlish 0.0.0.dev133__py3-none-any.whl → 0.0.0.dev177__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omlish/.manifests.json +265 -7
- omlish/__about__.py +5 -3
- omlish/antlr/_runtime/__init__.py +0 -22
- omlish/antlr/_runtime/_all.py +24 -0
- omlish/antlr/_runtime/atn/ParserATNSimulator.py +1 -1
- omlish/antlr/_runtime/dfa/DFASerializer.py +1 -1
- omlish/antlr/_runtime/error/DiagnosticErrorListener.py +2 -1
- omlish/antlr/_runtime/xpath/XPath.py +7 -1
- omlish/antlr/_runtime/xpath/XPathLexer.py +1 -1
- omlish/antlr/delimit.py +106 -0
- omlish/antlr/dot.py +31 -0
- omlish/antlr/errors.py +11 -0
- omlish/antlr/input.py +96 -0
- omlish/antlr/parsing.py +19 -0
- omlish/antlr/runtime.py +102 -0
- omlish/antlr/utils.py +38 -0
- omlish/argparse/all.py +45 -0
- omlish/{argparse.py → argparse/cli.py} +112 -107
- omlish/asyncs/__init__.py +0 -35
- omlish/asyncs/all.py +35 -0
- omlish/asyncs/asyncio/all.py +7 -0
- omlish/asyncs/asyncio/channels.py +40 -0
- omlish/asyncs/asyncio/streams.py +45 -0
- omlish/asyncs/asyncio/subprocesses.py +238 -0
- omlish/asyncs/asyncio/timeouts.py +16 -0
- omlish/asyncs/bluelet/LICENSE +6 -0
- omlish/asyncs/bluelet/all.py +67 -0
- omlish/asyncs/bluelet/api.py +23 -0
- omlish/asyncs/bluelet/core.py +178 -0
- omlish/asyncs/bluelet/events.py +78 -0
- omlish/asyncs/bluelet/files.py +80 -0
- omlish/asyncs/bluelet/runner.py +416 -0
- omlish/asyncs/bluelet/sockets.py +214 -0
- omlish/bootstrap/sys.py +3 -3
- omlish/cached.py +2 -2
- omlish/check.py +49 -460
- omlish/codecs/__init__.py +72 -0
- omlish/codecs/base.py +106 -0
- omlish/codecs/bytes.py +119 -0
- omlish/codecs/chain.py +23 -0
- omlish/codecs/funcs.py +39 -0
- omlish/codecs/registry.py +139 -0
- omlish/codecs/standard.py +4 -0
- omlish/codecs/text.py +217 -0
- omlish/collections/cache/impl.py +50 -57
- omlish/collections/coerce.py +1 -0
- omlish/collections/mappings.py +1 -1
- omlish/configs/flattening.py +1 -1
- omlish/defs.py +1 -1
- omlish/diag/_pycharm/runhack.py +8 -2
- omlish/diag/procfs.py +8 -8
- omlish/docker/__init__.py +0 -36
- omlish/docker/all.py +31 -0
- omlish/docker/consts.py +4 -0
- omlish/{lite/docker.py → docker/detect.py} +18 -0
- omlish/docker/{helpers.py → timebomb.py} +0 -21
- omlish/formats/cbor.py +31 -0
- omlish/formats/cloudpickle.py +31 -0
- omlish/formats/codecs.py +93 -0
- omlish/formats/json/codecs.py +29 -0
- omlish/formats/json/delimted.py +4 -0
- omlish/formats/json/stream/errors.py +2 -0
- omlish/formats/json/stream/lex.py +12 -6
- omlish/formats/json/stream/parse.py +38 -22
- omlish/formats/json5.py +31 -0
- omlish/formats/pickle.py +31 -0
- omlish/formats/repr.py +25 -0
- omlish/formats/toml.py +17 -0
- omlish/formats/yaml.py +25 -0
- omlish/funcs/__init__.py +0 -0
- omlish/{genmachine.py → funcs/genmachine.py} +5 -4
- omlish/{matchfns.py → funcs/match.py} +1 -1
- omlish/funcs/pairs.py +215 -0
- omlish/http/__init__.py +0 -48
- omlish/http/all.py +48 -0
- omlish/http/coro/__init__.py +0 -0
- omlish/{lite/fdio/corohttp.py → http/coro/fdio.py} +21 -19
- omlish/{lite/http/coroserver.py → http/coro/server.py} +20 -21
- omlish/{lite/http → http}/handlers.py +3 -2
- omlish/{lite/http → http}/parsing.py +1 -0
- omlish/http/sessions.py +1 -1
- omlish/{lite/http → http}/versions.py +1 -0
- omlish/inject/managed.py +2 -2
- omlish/io/__init__.py +0 -3
- omlish/{lite/io.py → io/buffers.py} +8 -9
- omlish/io/compress/__init__.py +9 -0
- omlish/io/compress/abc.py +104 -0
- omlish/io/compress/adapters.py +148 -0
- omlish/io/compress/base.py +24 -0
- omlish/io/compress/brotli.py +47 -0
- omlish/io/compress/bz2.py +61 -0
- omlish/io/compress/codecs.py +78 -0
- omlish/io/compress/gzip.py +350 -0
- omlish/io/compress/lz4.py +91 -0
- omlish/io/compress/lzma.py +81 -0
- omlish/io/compress/snappy.py +34 -0
- omlish/io/compress/zlib.py +74 -0
- omlish/io/compress/zstd.py +44 -0
- omlish/io/fdio/__init__.py +1 -0
- omlish/{lite → io}/fdio/handlers.py +5 -5
- omlish/{lite → io}/fdio/kqueue.py +8 -8
- omlish/{lite → io}/fdio/manager.py +7 -7
- omlish/{lite → io}/fdio/pollers.py +13 -13
- omlish/io/generators/__init__.py +56 -0
- omlish/io/generators/consts.py +1 -0
- omlish/io/generators/direct.py +13 -0
- omlish/io/generators/readers.py +189 -0
- omlish/io/generators/stepped.py +191 -0
- omlish/io/pyio.py +5 -2
- omlish/iterators/__init__.py +24 -0
- omlish/iterators/iterators.py +132 -0
- omlish/iterators/recipes.py +18 -0
- omlish/iterators/tools.py +96 -0
- omlish/iterators/unique.py +67 -0
- omlish/lang/__init__.py +13 -1
- omlish/lang/functions.py +11 -2
- omlish/lang/generators.py +243 -0
- omlish/lang/iterables.py +46 -49
- omlish/lang/maybes.py +4 -4
- omlish/lite/cached.py +39 -6
- omlish/lite/check.py +438 -75
- omlish/lite/contextmanagers.py +17 -4
- omlish/lite/dataclasses.py +42 -0
- omlish/lite/inject.py +28 -45
- omlish/lite/logs.py +0 -270
- omlish/lite/marshal.py +309 -144
- omlish/lite/pycharm.py +47 -0
- omlish/lite/reflect.py +33 -0
- omlish/lite/resources.py +8 -0
- omlish/lite/runtime.py +4 -4
- omlish/lite/shlex.py +12 -0
- omlish/lite/socketserver.py +2 -2
- omlish/lite/strings.py +31 -0
- omlish/logs/__init__.py +0 -32
- omlish/logs/{_abc.py → abc.py} +0 -1
- omlish/logs/all.py +37 -0
- omlish/logs/{formatters.py → color.py} +1 -2
- omlish/logs/configs.py +7 -38
- omlish/logs/filters.py +10 -0
- omlish/logs/handlers.py +4 -1
- omlish/logs/json.py +56 -0
- omlish/logs/proxy.py +99 -0
- omlish/logs/standard.py +128 -0
- omlish/logs/utils.py +2 -2
- omlish/manifests/__init__.py +2 -0
- omlish/manifests/load.py +209 -0
- omlish/manifests/types.py +17 -0
- omlish/marshal/base.py +1 -1
- omlish/marshal/factories.py +1 -1
- omlish/marshal/forbidden.py +1 -1
- omlish/marshal/iterables.py +1 -1
- omlish/marshal/literals.py +50 -0
- omlish/marshal/mappings.py +1 -1
- omlish/marshal/maybes.py +1 -1
- omlish/marshal/standard.py +5 -1
- omlish/marshal/unions.py +1 -1
- omlish/os/__init__.py +0 -0
- omlish/os/atomics.py +205 -0
- omlish/os/deathsig.py +23 -0
- omlish/{os.py → os/files.py} +0 -9
- omlish/{lite → os}/journald.py +2 -1
- omlish/os/linux.py +484 -0
- omlish/os/paths.py +36 -0
- omlish/{lite → os}/pidfile.py +1 -0
- omlish/os/sizes.py +9 -0
- omlish/reflect/__init__.py +3 -0
- omlish/reflect/subst.py +2 -1
- omlish/reflect/types.py +126 -44
- omlish/secrets/pwhash.py +1 -1
- omlish/secrets/subprocesses.py +3 -1
- omlish/specs/jsonrpc/marshal.py +1 -1
- omlish/specs/openapi/marshal.py +1 -1
- omlish/sql/alchemy/asyncs.py +1 -1
- omlish/sql/queries/__init__.py +9 -1
- omlish/sql/queries/building.py +3 -0
- omlish/sql/queries/exprs.py +10 -27
- omlish/sql/queries/idents.py +48 -10
- omlish/sql/queries/names.py +80 -13
- omlish/sql/queries/params.py +64 -0
- omlish/sql/queries/rendering.py +1 -1
- omlish/subprocesses.py +340 -0
- omlish/term.py +29 -14
- omlish/testing/pytest/marks.py +2 -2
- omlish/testing/pytest/plugins/asyncs.py +6 -1
- omlish/testing/pytest/plugins/logging.py +1 -1
- omlish/testing/pytest/plugins/switches.py +1 -1
- {omlish-0.0.0.dev133.dist-info → omlish-0.0.0.dev177.dist-info}/METADATA +7 -5
- {omlish-0.0.0.dev133.dist-info → omlish-0.0.0.dev177.dist-info}/RECORD +200 -117
- omlish/fnpairs.py +0 -496
- omlish/formats/json/cli/__main__.py +0 -11
- omlish/formats/json/cli/cli.py +0 -298
- omlish/formats/json/cli/formats.py +0 -71
- omlish/formats/json/cli/io.py +0 -74
- omlish/formats/json/cli/parsing.py +0 -82
- omlish/formats/json/cli/processing.py +0 -48
- omlish/formats/json/cli/rendering.py +0 -92
- omlish/iterators.py +0 -300
- omlish/lite/subprocesses.py +0 -130
- /omlish/{formats/json/cli → argparse}/__init__.py +0 -0
- /omlish/{lite/fdio → asyncs/asyncio}/__init__.py +0 -0
- /omlish/asyncs/{asyncio.py → asyncio/asyncio.py} +0 -0
- /omlish/{lite/http → asyncs/bluelet}/__init__.py +0 -0
- /omlish/collections/{_abc.py → abc.py} +0 -0
- /omlish/{fnpipes.py → funcs/pipes.py} +0 -0
- /omlish/io/{_abc.py → abc.py} +0 -0
- /omlish/sql/{_abc.py → abc.py} +0 -0
- {omlish-0.0.0.dev133.dist-info → omlish-0.0.0.dev177.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev133.dist-info → omlish-0.0.0.dev177.dist-info}/WHEEL +0 -0
- {omlish-0.0.0.dev133.dist-info → omlish-0.0.0.dev177.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev133.dist-info → omlish-0.0.0.dev177.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,191 @@
|
|
1
|
+
import typing as ta
|
2
|
+
|
3
|
+
from ... import check
|
4
|
+
from ... import lang
|
5
|
+
from .consts import DEFAULT_BUFFER_SIZE
|
6
|
+
from .direct import BytesDirectGenerator
|
7
|
+
from .direct import StrDirectGenerator
|
8
|
+
|
9
|
+
|
10
|
+
T = ta.TypeVar('T')
|
11
|
+
|
12
|
+
O = ta.TypeVar('O')
|
13
|
+
I = ta.TypeVar('I')
|
14
|
+
R = ta.TypeVar('R')
|
15
|
+
|
16
|
+
OF = ta.TypeVar('OF')
|
17
|
+
OT = ta.TypeVar('OT')
|
18
|
+
|
19
|
+
|
20
|
+
# Stepped generators accept a non-None input, then in response yield zero or more non-None outputs, until yielding None
|
21
|
+
# to signal they need more input again.
|
22
|
+
SteppedGenerator: ta.TypeAlias = ta.Generator[O | None, I | None, R]
|
23
|
+
|
24
|
+
# Conventionally, these are sent and themselves yield an empty value to signify termination.
|
25
|
+
BytesSteppedGenerator: ta.TypeAlias = SteppedGenerator[bytes, bytes, R]
|
26
|
+
StrSteppedGenerator: ta.TypeAlias = SteppedGenerator[str, str, R]
|
27
|
+
|
28
|
+
BytesToStrSteppedGenerator: ta.TypeAlias = SteppedGenerator[str, bytes, R]
|
29
|
+
StrToBytesSteppedGenerator: ta.TypeAlias = SteppedGenerator[bytes, str, R]
|
30
|
+
|
31
|
+
|
32
|
+
# Stepped reader generators emit either an int or None to request input, or emit some other kind of output.
|
33
|
+
SteppedReaderGenerator: ta.TypeAlias = ta.Generator[int | None | O, I | None, R]
|
34
|
+
|
35
|
+
BytesSteppedReaderGenerator: ta.TypeAlias = SteppedReaderGenerator[bytes, bytes, R]
|
36
|
+
StrSteppedReaderGenerator: ta.TypeAlias = SteppedReaderGenerator[str, str, R]
|
37
|
+
|
38
|
+
|
39
|
+
##
|
40
|
+
|
41
|
+
|
42
|
+
@lang.autostart
|
43
|
+
def flatmap_stepped_generator(
|
44
|
+
fn: ta.Callable[[list[OF]], OT],
|
45
|
+
g: SteppedGenerator[OF, I, R],
|
46
|
+
*,
|
47
|
+
terminate: ta.Callable[[OF], bool] | None = None,
|
48
|
+
) -> ta.Generator[OT, I, lang.Maybe[R]]:
|
49
|
+
"""
|
50
|
+
Given a stepped generator and a function taking a list, returns a direct (1:1) generator which accepts input, builds
|
51
|
+
a list of yielded generator output, calls the given function with that list, and yields the result.
|
52
|
+
|
53
|
+
An optional terminate function may be provided which will cause this function to return early if it returns true for
|
54
|
+
an encountered yielded value. The encountered value causing termination will be included in the list sent to the
|
55
|
+
given fn.
|
56
|
+
|
57
|
+
Returns a Maybe of either the given generator's return value or empty if the terminator was encountered.
|
58
|
+
"""
|
59
|
+
|
60
|
+
l: list[OF]
|
61
|
+
i: I | None = yield # type: ignore
|
62
|
+
while True:
|
63
|
+
l = []
|
64
|
+
|
65
|
+
while True:
|
66
|
+
try:
|
67
|
+
o = g.send(i)
|
68
|
+
except StopIteration as e:
|
69
|
+
if l:
|
70
|
+
yield fn(l)
|
71
|
+
return lang.just(e.value)
|
72
|
+
|
73
|
+
i = None
|
74
|
+
|
75
|
+
if o is None:
|
76
|
+
break
|
77
|
+
|
78
|
+
l.append(o)
|
79
|
+
|
80
|
+
if terminate is not None and terminate(o):
|
81
|
+
yield fn(l)
|
82
|
+
return lang.empty()
|
83
|
+
|
84
|
+
i = yield fn(l)
|
85
|
+
|
86
|
+
|
87
|
+
##
|
88
|
+
|
89
|
+
|
90
|
+
def _join_bytes(l: ta.Sequence[bytes]) -> bytes:
|
91
|
+
if not l:
|
92
|
+
return b''
|
93
|
+
elif len(l) == 1:
|
94
|
+
return l[0]
|
95
|
+
else:
|
96
|
+
return b''.join(l)
|
97
|
+
|
98
|
+
|
99
|
+
def _join_str(l: ta.Sequence[str]) -> str:
|
100
|
+
if not l:
|
101
|
+
return ''
|
102
|
+
elif len(l) == 1:
|
103
|
+
return l[0]
|
104
|
+
else:
|
105
|
+
return ''.join(l)
|
106
|
+
|
107
|
+
|
108
|
+
def _is_empty(o: T) -> bool:
|
109
|
+
return len(o) < 1 # type: ignore
|
110
|
+
|
111
|
+
|
112
|
+
def joined_bytes_stepped_generator(g: BytesSteppedGenerator[R]) -> BytesDirectGenerator[R]:
|
113
|
+
return flatmap_stepped_generator(_join_bytes, g, terminate=_is_empty)
|
114
|
+
|
115
|
+
|
116
|
+
def joined_str_stepped_generator(g: StrSteppedGenerator[R]) -> StrDirectGenerator[R]:
|
117
|
+
return flatmap_stepped_generator(_join_str, g, terminate=_is_empty)
|
118
|
+
|
119
|
+
|
120
|
+
##
|
121
|
+
|
122
|
+
|
123
|
+
def read_into_bytes_stepped_generator(
|
124
|
+
g: BytesSteppedGenerator,
|
125
|
+
f: ta.IO,
|
126
|
+
*,
|
127
|
+
read_size: int = DEFAULT_BUFFER_SIZE,
|
128
|
+
) -> ta.Iterator[bytes]:
|
129
|
+
yield from lang.genmap( # type: ignore[misc]
|
130
|
+
joined_bytes_stepped_generator(g),
|
131
|
+
lang.readiter(f, read_size),
|
132
|
+
)
|
133
|
+
|
134
|
+
|
135
|
+
def read_into_str_stepped_generator(
|
136
|
+
g: StrSteppedGenerator,
|
137
|
+
f: ta.TextIO,
|
138
|
+
*,
|
139
|
+
read_size: int = DEFAULT_BUFFER_SIZE,
|
140
|
+
) -> ta.Iterator[str]:
|
141
|
+
yield from lang.genmap(
|
142
|
+
joined_str_stepped_generator(g),
|
143
|
+
lang.readiter(f, read_size),
|
144
|
+
)
|
145
|
+
|
146
|
+
|
147
|
+
##
|
148
|
+
|
149
|
+
|
150
|
+
@lang.autostart
|
151
|
+
def buffer_bytes_stepped_reader_generator(g: BytesSteppedReaderGenerator) -> BytesSteppedGenerator:
|
152
|
+
o = g.send(None)
|
153
|
+
buf: ta.Any = None
|
154
|
+
eof = False
|
155
|
+
|
156
|
+
while True:
|
157
|
+
if eof:
|
158
|
+
raise EOFError
|
159
|
+
|
160
|
+
if not buf:
|
161
|
+
buf = check.isinstance((yield None), bytes)
|
162
|
+
if not buf:
|
163
|
+
eof = True
|
164
|
+
|
165
|
+
if o is None:
|
166
|
+
i = buf
|
167
|
+
buf = None
|
168
|
+
|
169
|
+
elif isinstance(o, int):
|
170
|
+
while len(buf) < o:
|
171
|
+
more = check.isinstance((yield None), bytes)
|
172
|
+
if not more:
|
173
|
+
raise EOFError
|
174
|
+
# FIXME: lol - share guts with readers
|
175
|
+
buf += more
|
176
|
+
|
177
|
+
i = buf[:o]
|
178
|
+
buf = buf[o:]
|
179
|
+
|
180
|
+
else:
|
181
|
+
raise TypeError(o)
|
182
|
+
|
183
|
+
while True:
|
184
|
+
o = g.send(i)
|
185
|
+
i = None
|
186
|
+
if isinstance(o, bytes):
|
187
|
+
check.none((yield o))
|
188
|
+
if not o:
|
189
|
+
return
|
190
|
+
else:
|
191
|
+
break
|
omlish/io/pyio.py
CHANGED
@@ -4,7 +4,7 @@
|
|
4
4
|
"""
|
5
5
|
Python implementation of the io module.
|
6
6
|
|
7
|
-
https://github.com/python/cpython/blob/
|
7
|
+
https://github.com/python/cpython/blob/8b3cccf3f9508572d85b0044519f2bd5715dacad/Lib/_pyio.py
|
8
8
|
"""
|
9
9
|
# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
10
10
|
# --------------------------------------------
|
@@ -2580,8 +2580,11 @@ class TextIOWrapper(TextIOBase):
|
|
2580
2580
|
decoder = self._decoder or self._get_decoder()
|
2581
2581
|
|
2582
2582
|
if size < 0:
|
2583
|
+
chunk = self.buffer.read()
|
2584
|
+
if chunk is None:
|
2585
|
+
raise BlockingIOError("Read returned None.")
|
2583
2586
|
# Read everything.
|
2584
|
-
result = self._get_decoded_chars() + decoder.decode(
|
2587
|
+
result = self._get_decoded_chars() + decoder.decode(chunk, final=True)
|
2585
2588
|
if self._snapshot is not None:
|
2586
2589
|
self._set_decoded_chars('')
|
2587
2590
|
self._snapshot = None
|
@@ -0,0 +1,24 @@
|
|
1
|
+
from .iterators import ( # noqa
|
2
|
+
PeekIterator,
|
3
|
+
PrefetchIterator,
|
4
|
+
ProxyIterator,
|
5
|
+
RetainIterator,
|
6
|
+
)
|
7
|
+
|
8
|
+
from .recipes import ( # noqa
|
9
|
+
sliding_window,
|
10
|
+
)
|
11
|
+
|
12
|
+
from .tools import ( # noqa
|
13
|
+
chunk,
|
14
|
+
expand_indexed_pairs,
|
15
|
+
merge_on,
|
16
|
+
take,
|
17
|
+
unzip,
|
18
|
+
)
|
19
|
+
|
20
|
+
from .unique import ( # noqa
|
21
|
+
UniqueItem,
|
22
|
+
UniqueIterator,
|
23
|
+
UniqueStats,
|
24
|
+
)
|
@@ -0,0 +1,132 @@
|
|
1
|
+
import collections
|
2
|
+
import typing as ta
|
3
|
+
|
4
|
+
|
5
|
+
T = ta.TypeVar('T')
|
6
|
+
|
7
|
+
|
8
|
+
_MISSING = object()
|
9
|
+
|
10
|
+
|
11
|
+
class PeekIterator(ta.Iterator[T]):
|
12
|
+
|
13
|
+
def __init__(self, it: ta.Iterable[T]) -> None:
|
14
|
+
super().__init__()
|
15
|
+
|
16
|
+
self._it = iter(it)
|
17
|
+
self._pos = -1
|
18
|
+
self._next_item: ta.Any = _MISSING
|
19
|
+
|
20
|
+
_item: T
|
21
|
+
|
22
|
+
def __iter__(self) -> ta.Self:
|
23
|
+
return self
|
24
|
+
|
25
|
+
@property
|
26
|
+
def done(self) -> bool:
|
27
|
+
try:
|
28
|
+
self.peek()
|
29
|
+
except StopIteration:
|
30
|
+
return True
|
31
|
+
else:
|
32
|
+
return False
|
33
|
+
|
34
|
+
def __next__(self) -> T:
|
35
|
+
if self._next_item is not _MISSING:
|
36
|
+
self._item = ta.cast(T, self._next_item)
|
37
|
+
self._next_item = _MISSING
|
38
|
+
else:
|
39
|
+
self._item = next(self._it)
|
40
|
+
self._pos += 1
|
41
|
+
return self._item
|
42
|
+
|
43
|
+
def peek(self) -> T:
|
44
|
+
if self._next_item is not _MISSING:
|
45
|
+
return ta.cast(T, self._next_item)
|
46
|
+
self._next_item = next(self._it)
|
47
|
+
return self._next_item
|
48
|
+
|
49
|
+
def next_peek(self) -> T:
|
50
|
+
next(self)
|
51
|
+
return self.peek()
|
52
|
+
|
53
|
+
def takewhile(self, fn: ta.Callable[[T], bool]) -> ta.Iterator[T]:
|
54
|
+
while fn(self.peek()):
|
55
|
+
yield next(self)
|
56
|
+
|
57
|
+
def skipwhile(self, fn: ta.Callable[[T], bool]) -> None:
|
58
|
+
while fn(self.peek()):
|
59
|
+
next(self)
|
60
|
+
|
61
|
+
def takeuntil(self, fn: ta.Callable[[T], bool]) -> ta.Iterator[T]:
|
62
|
+
return self.takewhile(lambda e: not fn(e))
|
63
|
+
|
64
|
+
def skipuntil(self, fn: ta.Callable[[T], bool]) -> None:
|
65
|
+
self.skipwhile(lambda e: not fn(e))
|
66
|
+
|
67
|
+
def takethrough(self, pos: int) -> ta.Iterator[T]:
|
68
|
+
return self.takewhile(lambda _: self._pos < pos)
|
69
|
+
|
70
|
+
def skipthrough(self, pos: int) -> None:
|
71
|
+
self.skipwhile(lambda _: self._pos < pos)
|
72
|
+
|
73
|
+
def taketo(self, pos: int) -> ta.Iterator[T]:
|
74
|
+
return self.takethrough(pos - 1)
|
75
|
+
|
76
|
+
def skipto(self, pos: int) -> None:
|
77
|
+
self.skipthrough(pos - 1)
|
78
|
+
|
79
|
+
|
80
|
+
class ProxyIterator(ta.Iterator[T]):
|
81
|
+
|
82
|
+
def __init__(self, fn: ta.Callable[[], T]) -> None:
|
83
|
+
self._fn = fn
|
84
|
+
|
85
|
+
def __iter__(self) -> ta.Self:
|
86
|
+
return self
|
87
|
+
|
88
|
+
def __next__(self) -> T:
|
89
|
+
return self._fn()
|
90
|
+
|
91
|
+
|
92
|
+
class PrefetchIterator(ta.Iterator[T]):
|
93
|
+
|
94
|
+
def __init__(self, fn: ta.Callable[[], T] | None = None) -> None:
|
95
|
+
super().__init__()
|
96
|
+
|
97
|
+
self._fn = fn
|
98
|
+
self._deque: collections.deque[T] = collections.deque()
|
99
|
+
|
100
|
+
def __iter__(self) -> ta.Self:
|
101
|
+
return self
|
102
|
+
|
103
|
+
def push(self, item) -> None:
|
104
|
+
self._deque.append(item)
|
105
|
+
|
106
|
+
def __next__(self) -> T:
|
107
|
+
try:
|
108
|
+
return self._deque.popleft()
|
109
|
+
except IndexError:
|
110
|
+
if self._fn is None:
|
111
|
+
raise StopIteration from None
|
112
|
+
return self._fn()
|
113
|
+
|
114
|
+
|
115
|
+
class RetainIterator(ta.Iterator[T]):
|
116
|
+
|
117
|
+
def __init__(self, fn: ta.Callable[[], T]) -> None:
|
118
|
+
super().__init__()
|
119
|
+
|
120
|
+
self._fn = fn
|
121
|
+
self._deque: collections.deque[T] = collections.deque()
|
122
|
+
|
123
|
+
def __iter__(self) -> ta.Self:
|
124
|
+
return self
|
125
|
+
|
126
|
+
def pop(self) -> None:
|
127
|
+
self._deque.popleft()
|
128
|
+
|
129
|
+
def __next__(self) -> T:
|
130
|
+
item = self._fn()
|
131
|
+
self._deque.append(item)
|
132
|
+
return item
|
@@ -0,0 +1,18 @@
|
|
1
|
+
"""
|
2
|
+
https://docs.python.org/3/library/itertools.html#itertools-recipes
|
3
|
+
"""
|
4
|
+
import collections
|
5
|
+
import itertools
|
6
|
+
import typing as ta
|
7
|
+
|
8
|
+
|
9
|
+
T = ta.TypeVar('T')
|
10
|
+
|
11
|
+
|
12
|
+
def sliding_window(it: ta.Iterable[T], n: int) -> ta.Iterator[tuple[T, ...]]:
|
13
|
+
# sliding_window('ABCDEFG', 4) -> ABCD BCDE CDEF DEFG
|
14
|
+
iterator = iter(it)
|
15
|
+
window = collections.deque(itertools.islice(iterator, n - 1), maxlen=n)
|
16
|
+
for x in iterator:
|
17
|
+
window.append(x)
|
18
|
+
yield tuple(window)
|
@@ -0,0 +1,96 @@
|
|
1
|
+
import functools
|
2
|
+
import heapq
|
3
|
+
import itertools
|
4
|
+
import typing as ta
|
5
|
+
|
6
|
+
from .iterators import PeekIterator
|
7
|
+
from .iterators import PrefetchIterator
|
8
|
+
|
9
|
+
|
10
|
+
T = ta.TypeVar('T')
|
11
|
+
U = ta.TypeVar('U')
|
12
|
+
|
13
|
+
|
14
|
+
def unzip(it: ta.Iterable[T], width: int | None = None) -> list:
|
15
|
+
if width is None:
|
16
|
+
if not isinstance(it, PeekIterator):
|
17
|
+
it = PeekIterator(iter(it))
|
18
|
+
try:
|
19
|
+
width = len(it.peek())
|
20
|
+
except StopIteration:
|
21
|
+
return []
|
22
|
+
|
23
|
+
its: list[PrefetchIterator[T]] = []
|
24
|
+
running = True
|
25
|
+
|
26
|
+
def next_fn(idx):
|
27
|
+
nonlocal running
|
28
|
+
if not running:
|
29
|
+
raise StopIteration
|
30
|
+
try:
|
31
|
+
items = next(it) # type: ignore
|
32
|
+
except StopIteration:
|
33
|
+
running = False
|
34
|
+
raise
|
35
|
+
for item_idx, item in enumerate(items):
|
36
|
+
its[item_idx].push(item)
|
37
|
+
return next(its[idx])
|
38
|
+
|
39
|
+
its.extend(PrefetchIterator(functools.partial(next_fn, idx)) for idx in range(width))
|
40
|
+
return its
|
41
|
+
|
42
|
+
|
43
|
+
def take(n: int, iterable: ta.Iterable[T]) -> list[T]:
|
44
|
+
return list(itertools.islice(iterable, n))
|
45
|
+
|
46
|
+
|
47
|
+
def chunk(n: int, iterable: ta.Iterable[T], strict: bool = False) -> ta.Iterator[list[T]]:
|
48
|
+
iterator = iter(functools.partial(take, n, iter(iterable)), [])
|
49
|
+
if strict:
|
50
|
+
def ret():
|
51
|
+
for chunk in iterator:
|
52
|
+
if len(chunk) != n:
|
53
|
+
raise ValueError('iterable is not divisible by n.')
|
54
|
+
yield chunk
|
55
|
+
return iter(ret())
|
56
|
+
else:
|
57
|
+
return iterator
|
58
|
+
|
59
|
+
|
60
|
+
def merge_on(
|
61
|
+
function: ta.Callable[[T], U],
|
62
|
+
*its: ta.Iterable[T],
|
63
|
+
) -> ta.Iterator[tuple[U, list[tuple[int, T]]]]:
|
64
|
+
indexed_its = [
|
65
|
+
(
|
66
|
+
(function(item), it_idx, item)
|
67
|
+
for it_idx, item in zip(itertools.repeat(it_idx), it)
|
68
|
+
)
|
69
|
+
for it_idx, it in enumerate(its)
|
70
|
+
]
|
71
|
+
|
72
|
+
grouped_indexed_its = itertools.groupby(
|
73
|
+
heapq.merge(*indexed_its),
|
74
|
+
key=lambda item_tuple: item_tuple[0],
|
75
|
+
)
|
76
|
+
|
77
|
+
return (
|
78
|
+
(fn_item, [(it_idx, item) for _, it_idx, item in grp])
|
79
|
+
for fn_item, grp in grouped_indexed_its
|
80
|
+
)
|
81
|
+
|
82
|
+
|
83
|
+
def expand_indexed_pairs(
|
84
|
+
seq: ta.Iterable[tuple[int, T]],
|
85
|
+
default: T,
|
86
|
+
*,
|
87
|
+
width: int | None = None,
|
88
|
+
) -> list[T]:
|
89
|
+
width_ = width
|
90
|
+
if width_ is None:
|
91
|
+
width_ = (max(idx for idx, _ in seq) + 1) if seq else 0
|
92
|
+
result = [default] * width_
|
93
|
+
for idx, value in seq:
|
94
|
+
if idx < width_:
|
95
|
+
result[idx] = value
|
96
|
+
return result
|
@@ -0,0 +1,67 @@
|
|
1
|
+
import dataclasses as dc
|
2
|
+
import typing as ta
|
3
|
+
|
4
|
+
from .. import lang
|
5
|
+
|
6
|
+
|
7
|
+
T = ta.TypeVar('T')
|
8
|
+
|
9
|
+
|
10
|
+
@dc.dataclass()
|
11
|
+
class UniqueStats:
|
12
|
+
key: ta.Any
|
13
|
+
num_seen: int
|
14
|
+
first_idx: int
|
15
|
+
last_idx: int
|
16
|
+
|
17
|
+
|
18
|
+
@dc.dataclass(frozen=True)
|
19
|
+
class UniqueItem(ta.Generic[T]):
|
20
|
+
idx: int
|
21
|
+
item: T
|
22
|
+
stats: UniqueStats
|
23
|
+
out: lang.Maybe[T]
|
24
|
+
|
25
|
+
|
26
|
+
class UniqueIterator(ta.Iterator[UniqueItem[T]]):
|
27
|
+
def __init__(
|
28
|
+
self,
|
29
|
+
it: ta.Iterable[T],
|
30
|
+
keyer: ta.Callable[[T], ta.Any] = lang.identity,
|
31
|
+
) -> None:
|
32
|
+
super().__init__()
|
33
|
+
self._it = enumerate(it)
|
34
|
+
self._keyer = keyer
|
35
|
+
|
36
|
+
self.stats: dict[ta.Any, UniqueStats] = {}
|
37
|
+
|
38
|
+
def __next__(self) -> UniqueItem[T]:
|
39
|
+
idx, item = next(self._it)
|
40
|
+
key = self._keyer(item)
|
41
|
+
|
42
|
+
try:
|
43
|
+
stats = self.stats[key]
|
44
|
+
|
45
|
+
except KeyError:
|
46
|
+
stats = self.stats[key] = UniqueStats(
|
47
|
+
key,
|
48
|
+
num_seen=1,
|
49
|
+
first_idx=idx,
|
50
|
+
last_idx=idx,
|
51
|
+
)
|
52
|
+
return UniqueItem(
|
53
|
+
idx,
|
54
|
+
item,
|
55
|
+
stats,
|
56
|
+
lang.just(item),
|
57
|
+
)
|
58
|
+
|
59
|
+
else:
|
60
|
+
stats.num_seen += 1
|
61
|
+
stats.last_idx = idx
|
62
|
+
return UniqueItem(
|
63
|
+
idx,
|
64
|
+
item,
|
65
|
+
stats,
|
66
|
+
lang.empty(),
|
67
|
+
)
|
omlish/lang/__init__.py
CHANGED
@@ -112,6 +112,7 @@ from .functions import ( # noqa
|
|
112
112
|
issubclass_of,
|
113
113
|
maybe_call,
|
114
114
|
opt_coalesce,
|
115
|
+
opt_fn,
|
115
116
|
periodically,
|
116
117
|
raise_,
|
117
118
|
raising,
|
@@ -120,6 +121,17 @@ from .functions import ( # noqa
|
|
120
121
|
void,
|
121
122
|
)
|
122
123
|
|
124
|
+
from .generators import ( # noqa
|
125
|
+
CoroutineGenerator,
|
126
|
+
Generator,
|
127
|
+
GeneratorLike,
|
128
|
+
GeneratorMappedIterator,
|
129
|
+
autostart,
|
130
|
+
corogen,
|
131
|
+
genmap,
|
132
|
+
nextgen,
|
133
|
+
)
|
134
|
+
|
123
135
|
from .imports import ( # noqa
|
124
136
|
can_import,
|
125
137
|
import_all,
|
@@ -136,7 +148,6 @@ from .imports import ( # noqa
|
|
136
148
|
|
137
149
|
from .iterables import ( # noqa
|
138
150
|
BUILTIN_SCALAR_ITERABLE_TYPES,
|
139
|
-
Generator,
|
140
151
|
asrange,
|
141
152
|
exhaust,
|
142
153
|
flatmap,
|
@@ -146,6 +157,7 @@ from .iterables import ( # noqa
|
|
146
157
|
itergen,
|
147
158
|
peek,
|
148
159
|
prodrange,
|
160
|
+
readiter,
|
149
161
|
renumerate,
|
150
162
|
take,
|
151
163
|
)
|
omlish/lang/functions.py
CHANGED
@@ -4,6 +4,7 @@ import time
|
|
4
4
|
import typing as ta
|
5
5
|
|
6
6
|
|
7
|
+
F = ta.TypeVar('F')
|
7
8
|
T = ta.TypeVar('T')
|
8
9
|
P = ta.ParamSpec('P')
|
9
10
|
CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
|
@@ -81,8 +82,17 @@ def identity(obj: T) -> T:
|
|
81
82
|
return obj
|
82
83
|
|
83
84
|
|
84
|
-
|
85
|
+
def opt_fn(fn: ta.Callable[[F], T]) -> ta.Callable[[F | None], T | None]:
|
86
|
+
@functools.wraps(fn)
|
87
|
+
def inner(v: F | None) -> T | None:
|
88
|
+
if v is not None:
|
89
|
+
return fn(v)
|
90
|
+
else:
|
91
|
+
return None
|
92
|
+
return inner
|
85
93
|
|
94
|
+
|
95
|
+
class constant(ta.Generic[T]): # noqa
|
86
96
|
def __init__(self, obj: T) -> None:
|
87
97
|
super().__init__()
|
88
98
|
|
@@ -116,7 +126,6 @@ class VoidError(Exception):
|
|
116
126
|
|
117
127
|
|
118
128
|
class Void:
|
119
|
-
|
120
129
|
def __new__(cls, *args: ta.Any, **kwargs: ta.Any) -> None: # type: ignore # noqa
|
121
130
|
raise VoidError
|
122
131
|
|