omlish 0.0.0.dev165__py3-none-any.whl → 0.0.0.dev167__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- omlish/__about__.py +2 -2
- omlish/codecs/text.py +2 -2
- omlish/io/compress/codecs.py +20 -0
- omlish/io/generators/__init__.py +3 -0
- omlish/io/generators/stepped.py +19 -3
- omlish/iterators/__init__.py +24 -0
- omlish/iterators/iterators.py +132 -0
- omlish/iterators/recipes.py +18 -0
- omlish/iterators/tools.py +96 -0
- omlish/iterators/unique.py +67 -0
- omlish/lite/strings.py +9 -0
- omlish/os/atomics.py +3 -3
- omlish/os/paths.py +32 -0
- {omlish-0.0.0.dev165.dist-info → omlish-0.0.0.dev167.dist-info}/METADATA +1 -1
- {omlish-0.0.0.dev165.dist-info → omlish-0.0.0.dev167.dist-info}/RECORD +19 -14
- omlish/iterators.py +0 -300
- {omlish-0.0.0.dev165.dist-info → omlish-0.0.0.dev167.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev165.dist-info → omlish-0.0.0.dev167.dist-info}/WHEEL +0 -0
- {omlish-0.0.0.dev165.dist-info → omlish-0.0.0.dev167.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev165.dist-info → omlish-0.0.0.dev167.dist-info}/top_level.txt +0 -0
omlish/__about__.py
CHANGED
omlish/codecs/text.py
CHANGED
@@ -46,8 +46,8 @@ TextEncodingErrors: ta.TypeAlias = ta.Literal[
|
|
46
46
|
'namereplace',
|
47
47
|
|
48
48
|
##
|
49
|
-
# In addition, the following error handler is specific to the given codecs:
|
50
|
-
# utf-
|
49
|
+
# In addition, the following error handler is specific to the given codecs: utf-8, utf-16, utf-32, utf-16-be,
|
50
|
+
# utf-16-le, utf-32-be, utf-32-le
|
51
51
|
|
52
52
|
# Allow encoding and decoding surrogate code point (U+D800 - U+DFFF) as normal code point. Otherwise these codecs
|
53
53
|
# treat the presence of surrogate code point in str as an error.
|
omlish/io/compress/codecs.py
CHANGED
@@ -2,7 +2,9 @@ import dataclasses as dc
|
|
2
2
|
import typing as ta
|
3
3
|
|
4
4
|
from ... import codecs
|
5
|
+
from ..generators import buffer_bytes_stepped_reader_generator
|
5
6
|
from .base import Compression
|
7
|
+
from .base import IncrementalCompression
|
6
8
|
|
7
9
|
|
8
10
|
##
|
@@ -22,6 +24,20 @@ class CompressionEagerCodec(codecs.EagerCodec[bytes, bytes]):
|
|
22
24
|
##
|
23
25
|
|
24
26
|
|
27
|
+
@dc.dataclass(frozen=True)
|
28
|
+
class CompressionIncrementalCodec(codecs.IncrementalCodec[bytes, bytes]):
|
29
|
+
compression: IncrementalCompression
|
30
|
+
|
31
|
+
def encode_incremental(self) -> ta.Generator[bytes | None, bytes, None]:
|
32
|
+
return self.compression.compress_incremental()
|
33
|
+
|
34
|
+
def decode_incremental(self) -> ta.Generator[bytes | None, bytes, None]:
|
35
|
+
return buffer_bytes_stepped_reader_generator(self.compression.decompress_incremental())
|
36
|
+
|
37
|
+
|
38
|
+
##
|
39
|
+
|
40
|
+
|
25
41
|
class CompressionCodec(codecs.Codec):
|
26
42
|
pass
|
27
43
|
|
@@ -40,6 +56,10 @@ def make_compression_codec(
|
|
40
56
|
output=bytes,
|
41
57
|
|
42
58
|
new=lambda *args, **kwargs: CompressionEagerCodec(cls(*args, **kwargs)),
|
59
|
+
|
60
|
+
new_incremental=(
|
61
|
+
lambda *args, **kwargs: CompressionIncrementalCodec(cls(*args, **kwargs)) # noqa
|
62
|
+
) if issubclass(cls, IncrementalCompression) else None,
|
43
63
|
)
|
44
64
|
|
45
65
|
|
omlish/io/generators/__init__.py
CHANGED
omlish/io/generators/stepped.py
CHANGED
@@ -151,18 +151,32 @@ def read_into_str_stepped_generator(
|
|
151
151
|
def buffer_bytes_stepped_reader_generator(g: BytesSteppedReaderGenerator) -> BytesSteppedGenerator:
|
152
152
|
o = g.send(None)
|
153
153
|
buf: ta.Any = None
|
154
|
+
eof = False
|
154
155
|
|
155
156
|
while True:
|
157
|
+
if eof:
|
158
|
+
raise EOFError
|
159
|
+
|
156
160
|
if not buf:
|
157
161
|
buf = check.isinstance((yield None), bytes)
|
162
|
+
if not buf:
|
163
|
+
eof = True
|
158
164
|
|
159
|
-
if o is None
|
165
|
+
if o is None:
|
160
166
|
i = buf
|
167
|
+
buf = None
|
168
|
+
|
161
169
|
elif isinstance(o, int):
|
162
|
-
|
163
|
-
|
170
|
+
while len(buf) < o:
|
171
|
+
more = check.isinstance((yield None), bytes)
|
172
|
+
if not more:
|
173
|
+
raise EOFError
|
174
|
+
# FIXME: lol - share guts with readers
|
175
|
+
buf += more
|
176
|
+
|
164
177
|
i = buf[:o]
|
165
178
|
buf = buf[o:]
|
179
|
+
|
166
180
|
else:
|
167
181
|
raise TypeError(o)
|
168
182
|
|
@@ -171,5 +185,7 @@ def buffer_bytes_stepped_reader_generator(g: BytesSteppedReaderGenerator) -> Byt
|
|
171
185
|
i = None
|
172
186
|
if isinstance(o, bytes):
|
173
187
|
check.none((yield o))
|
188
|
+
if not o:
|
189
|
+
return
|
174
190
|
else:
|
175
191
|
break
|
@@ -0,0 +1,24 @@
|
|
1
|
+
from .iterators import ( # noqa
|
2
|
+
PeekIterator,
|
3
|
+
PrefetchIterator,
|
4
|
+
ProxyIterator,
|
5
|
+
RetainIterator,
|
6
|
+
)
|
7
|
+
|
8
|
+
from .recipes import ( # noqa
|
9
|
+
sliding_window,
|
10
|
+
)
|
11
|
+
|
12
|
+
from .tools import ( # noqa
|
13
|
+
chunk,
|
14
|
+
expand_indexed_pairs,
|
15
|
+
merge_on,
|
16
|
+
take,
|
17
|
+
unzip,
|
18
|
+
)
|
19
|
+
|
20
|
+
from .unique import ( # noqa
|
21
|
+
UniqueItem,
|
22
|
+
UniqueIterator,
|
23
|
+
UniqueStats,
|
24
|
+
)
|
@@ -0,0 +1,132 @@
|
|
1
|
+
import collections
|
2
|
+
import typing as ta
|
3
|
+
|
4
|
+
|
5
|
+
T = ta.TypeVar('T')
|
6
|
+
|
7
|
+
|
8
|
+
_MISSING = object()
|
9
|
+
|
10
|
+
|
11
|
+
class PeekIterator(ta.Iterator[T]):
|
12
|
+
|
13
|
+
def __init__(self, it: ta.Iterable[T]) -> None:
|
14
|
+
super().__init__()
|
15
|
+
|
16
|
+
self._it = iter(it)
|
17
|
+
self._pos = -1
|
18
|
+
self._next_item: ta.Any = _MISSING
|
19
|
+
|
20
|
+
_item: T
|
21
|
+
|
22
|
+
def __iter__(self) -> ta.Self:
|
23
|
+
return self
|
24
|
+
|
25
|
+
@property
|
26
|
+
def done(self) -> bool:
|
27
|
+
try:
|
28
|
+
self.peek()
|
29
|
+
except StopIteration:
|
30
|
+
return True
|
31
|
+
else:
|
32
|
+
return False
|
33
|
+
|
34
|
+
def __next__(self) -> T:
|
35
|
+
if self._next_item is not _MISSING:
|
36
|
+
self._item = ta.cast(T, self._next_item)
|
37
|
+
self._next_item = _MISSING
|
38
|
+
else:
|
39
|
+
self._item = next(self._it)
|
40
|
+
self._pos += 1
|
41
|
+
return self._item
|
42
|
+
|
43
|
+
def peek(self) -> T:
|
44
|
+
if self._next_item is not _MISSING:
|
45
|
+
return ta.cast(T, self._next_item)
|
46
|
+
self._next_item = next(self._it)
|
47
|
+
return self._next_item
|
48
|
+
|
49
|
+
def next_peek(self) -> T:
|
50
|
+
next(self)
|
51
|
+
return self.peek()
|
52
|
+
|
53
|
+
def takewhile(self, fn: ta.Callable[[T], bool]) -> ta.Iterator[T]:
|
54
|
+
while fn(self.peek()):
|
55
|
+
yield next(self)
|
56
|
+
|
57
|
+
def skipwhile(self, fn: ta.Callable[[T], bool]) -> None:
|
58
|
+
while fn(self.peek()):
|
59
|
+
next(self)
|
60
|
+
|
61
|
+
def takeuntil(self, fn: ta.Callable[[T], bool]) -> ta.Iterator[T]:
|
62
|
+
return self.takewhile(lambda e: not fn(e))
|
63
|
+
|
64
|
+
def skipuntil(self, fn: ta.Callable[[T], bool]) -> None:
|
65
|
+
self.skipwhile(lambda e: not fn(e))
|
66
|
+
|
67
|
+
def takethrough(self, pos: int) -> ta.Iterator[T]:
|
68
|
+
return self.takewhile(lambda _: self._pos < pos)
|
69
|
+
|
70
|
+
def skipthrough(self, pos: int) -> None:
|
71
|
+
self.skipwhile(lambda _: self._pos < pos)
|
72
|
+
|
73
|
+
def taketo(self, pos: int) -> ta.Iterator[T]:
|
74
|
+
return self.takethrough(pos - 1)
|
75
|
+
|
76
|
+
def skipto(self, pos: int) -> None:
|
77
|
+
self.skipthrough(pos - 1)
|
78
|
+
|
79
|
+
|
80
|
+
class ProxyIterator(ta.Iterator[T]):
|
81
|
+
|
82
|
+
def __init__(self, fn: ta.Callable[[], T]) -> None:
|
83
|
+
self._fn = fn
|
84
|
+
|
85
|
+
def __iter__(self) -> ta.Self:
|
86
|
+
return self
|
87
|
+
|
88
|
+
def __next__(self) -> T:
|
89
|
+
return self._fn()
|
90
|
+
|
91
|
+
|
92
|
+
class PrefetchIterator(ta.Iterator[T]):
|
93
|
+
|
94
|
+
def __init__(self, fn: ta.Callable[[], T] | None = None) -> None:
|
95
|
+
super().__init__()
|
96
|
+
|
97
|
+
self._fn = fn
|
98
|
+
self._deque: collections.deque[T] = collections.deque()
|
99
|
+
|
100
|
+
def __iter__(self) -> ta.Self:
|
101
|
+
return self
|
102
|
+
|
103
|
+
def push(self, item) -> None:
|
104
|
+
self._deque.append(item)
|
105
|
+
|
106
|
+
def __next__(self) -> T:
|
107
|
+
try:
|
108
|
+
return self._deque.popleft()
|
109
|
+
except IndexError:
|
110
|
+
if self._fn is None:
|
111
|
+
raise StopIteration from None
|
112
|
+
return self._fn()
|
113
|
+
|
114
|
+
|
115
|
+
class RetainIterator(ta.Iterator[T]):
|
116
|
+
|
117
|
+
def __init__(self, fn: ta.Callable[[], T]) -> None:
|
118
|
+
super().__init__()
|
119
|
+
|
120
|
+
self._fn = fn
|
121
|
+
self._deque: collections.deque[T] = collections.deque()
|
122
|
+
|
123
|
+
def __iter__(self) -> ta.Self:
|
124
|
+
return self
|
125
|
+
|
126
|
+
def pop(self) -> None:
|
127
|
+
self._deque.popleft()
|
128
|
+
|
129
|
+
def __next__(self) -> T:
|
130
|
+
item = self._fn()
|
131
|
+
self._deque.append(item)
|
132
|
+
return item
|
@@ -0,0 +1,18 @@
|
|
1
|
+
"""
|
2
|
+
https://docs.python.org/3/library/itertools.html#itertools-recipes
|
3
|
+
"""
|
4
|
+
import collections
|
5
|
+
import itertools
|
6
|
+
import typing as ta
|
7
|
+
|
8
|
+
|
9
|
+
T = ta.TypeVar('T')
|
10
|
+
|
11
|
+
|
12
|
+
def sliding_window(it: ta.Iterable[T], n: int) -> ta.Iterator[tuple[T, ...]]:
|
13
|
+
# sliding_window('ABCDEFG', 4) -> ABCD BCDE CDEF DEFG
|
14
|
+
iterator = iter(it)
|
15
|
+
window = collections.deque(itertools.islice(iterator, n - 1), maxlen=n)
|
16
|
+
for x in iterator:
|
17
|
+
window.append(x)
|
18
|
+
yield tuple(window)
|
@@ -0,0 +1,96 @@
|
|
1
|
+
import functools
|
2
|
+
import heapq
|
3
|
+
import itertools
|
4
|
+
import typing as ta
|
5
|
+
|
6
|
+
from .iterators import PeekIterator
|
7
|
+
from .iterators import PrefetchIterator
|
8
|
+
|
9
|
+
|
10
|
+
T = ta.TypeVar('T')
|
11
|
+
U = ta.TypeVar('U')
|
12
|
+
|
13
|
+
|
14
|
+
def unzip(it: ta.Iterable[T], width: int | None = None) -> list:
|
15
|
+
if width is None:
|
16
|
+
if not isinstance(it, PeekIterator):
|
17
|
+
it = PeekIterator(iter(it))
|
18
|
+
try:
|
19
|
+
width = len(it.peek())
|
20
|
+
except StopIteration:
|
21
|
+
return []
|
22
|
+
|
23
|
+
its: list[PrefetchIterator[T]] = []
|
24
|
+
running = True
|
25
|
+
|
26
|
+
def next_fn(idx):
|
27
|
+
nonlocal running
|
28
|
+
if not running:
|
29
|
+
raise StopIteration
|
30
|
+
try:
|
31
|
+
items = next(it) # type: ignore
|
32
|
+
except StopIteration:
|
33
|
+
running = False
|
34
|
+
raise
|
35
|
+
for item_idx, item in enumerate(items):
|
36
|
+
its[item_idx].push(item)
|
37
|
+
return next(its[idx])
|
38
|
+
|
39
|
+
its.extend(PrefetchIterator(functools.partial(next_fn, idx)) for idx in range(width))
|
40
|
+
return its
|
41
|
+
|
42
|
+
|
43
|
+
def take(n: int, iterable: ta.Iterable[T]) -> list[T]:
|
44
|
+
return list(itertools.islice(iterable, n))
|
45
|
+
|
46
|
+
|
47
|
+
def chunk(n: int, iterable: ta.Iterable[T], strict: bool = False) -> ta.Iterator[list[T]]:
|
48
|
+
iterator = iter(functools.partial(take, n, iter(iterable)), [])
|
49
|
+
if strict:
|
50
|
+
def ret():
|
51
|
+
for chunk in iterator:
|
52
|
+
if len(chunk) != n:
|
53
|
+
raise ValueError('iterable is not divisible by n.')
|
54
|
+
yield chunk
|
55
|
+
return iter(ret())
|
56
|
+
else:
|
57
|
+
return iterator
|
58
|
+
|
59
|
+
|
60
|
+
def merge_on(
|
61
|
+
function: ta.Callable[[T], U],
|
62
|
+
*its: ta.Iterable[T],
|
63
|
+
) -> ta.Iterator[tuple[U, list[tuple[int, T]]]]:
|
64
|
+
indexed_its = [
|
65
|
+
(
|
66
|
+
(function(item), it_idx, item)
|
67
|
+
for it_idx, item in zip(itertools.repeat(it_idx), it)
|
68
|
+
)
|
69
|
+
for it_idx, it in enumerate(its)
|
70
|
+
]
|
71
|
+
|
72
|
+
grouped_indexed_its = itertools.groupby(
|
73
|
+
heapq.merge(*indexed_its),
|
74
|
+
key=lambda item_tuple: item_tuple[0],
|
75
|
+
)
|
76
|
+
|
77
|
+
return (
|
78
|
+
(fn_item, [(it_idx, item) for _, it_idx, item in grp])
|
79
|
+
for fn_item, grp in grouped_indexed_its
|
80
|
+
)
|
81
|
+
|
82
|
+
|
83
|
+
def expand_indexed_pairs(
|
84
|
+
seq: ta.Iterable[tuple[int, T]],
|
85
|
+
default: T,
|
86
|
+
*,
|
87
|
+
width: int | None = None,
|
88
|
+
) -> list[T]:
|
89
|
+
width_ = width
|
90
|
+
if width_ is None:
|
91
|
+
width_ = (max(idx for idx, _ in seq) + 1) if seq else 0
|
92
|
+
result = [default] * width_
|
93
|
+
for idx, value in seq:
|
94
|
+
if idx < width_:
|
95
|
+
result[idx] = value
|
96
|
+
return result
|
@@ -0,0 +1,67 @@
|
|
1
|
+
import dataclasses as dc
|
2
|
+
import typing as ta
|
3
|
+
|
4
|
+
from .. import lang
|
5
|
+
|
6
|
+
|
7
|
+
T = ta.TypeVar('T')
|
8
|
+
|
9
|
+
|
10
|
+
@dc.dataclass()
|
11
|
+
class UniqueStats:
|
12
|
+
key: ta.Any
|
13
|
+
num_seen: int
|
14
|
+
first_idx: int
|
15
|
+
last_idx: int
|
16
|
+
|
17
|
+
|
18
|
+
@dc.dataclass(frozen=True)
|
19
|
+
class UniqueItem(ta.Generic[T]):
|
20
|
+
idx: int
|
21
|
+
item: T
|
22
|
+
stats: UniqueStats
|
23
|
+
out: lang.Maybe[T]
|
24
|
+
|
25
|
+
|
26
|
+
class UniqueIterator(ta.Iterator[UniqueItem[T]]):
|
27
|
+
def __init__(
|
28
|
+
self,
|
29
|
+
it: ta.Iterable[T],
|
30
|
+
keyer: ta.Callable[[T], ta.Any] = lang.identity,
|
31
|
+
) -> None:
|
32
|
+
super().__init__()
|
33
|
+
self._it = enumerate(it)
|
34
|
+
self._keyer = keyer
|
35
|
+
|
36
|
+
self.stats: dict[ta.Any, UniqueStats] = {}
|
37
|
+
|
38
|
+
def __next__(self) -> UniqueItem[T]:
|
39
|
+
idx, item = next(self._it)
|
40
|
+
key = self._keyer(item)
|
41
|
+
|
42
|
+
try:
|
43
|
+
stats = self.stats[key]
|
44
|
+
|
45
|
+
except KeyError:
|
46
|
+
stats = self.stats[key] = UniqueStats(
|
47
|
+
key,
|
48
|
+
num_seen=1,
|
49
|
+
first_idx=idx,
|
50
|
+
last_idx=idx,
|
51
|
+
)
|
52
|
+
return UniqueItem(
|
53
|
+
idx,
|
54
|
+
item,
|
55
|
+
stats,
|
56
|
+
lang.just(item),
|
57
|
+
)
|
58
|
+
|
59
|
+
else:
|
60
|
+
stats.num_seen += 1
|
61
|
+
stats.last_idx = idx
|
62
|
+
return UniqueItem(
|
63
|
+
idx,
|
64
|
+
item,
|
65
|
+
stats,
|
66
|
+
lang.empty(),
|
67
|
+
)
|
omlish/lite/strings.py
CHANGED
omlish/os/atomics.py
CHANGED
@@ -122,7 +122,7 @@ class AtomicPathSwapping(abc.ABC):
|
|
122
122
|
##
|
123
123
|
|
124
124
|
|
125
|
-
class
|
125
|
+
class OsReplaceAtomicPathSwap(AtomicPathSwap):
|
126
126
|
def __init__(
|
127
127
|
self,
|
128
128
|
kind: AtomicPathSwapKind,
|
@@ -150,7 +150,7 @@ class OsRenameAtomicPathSwap(AtomicPathSwap):
|
|
150
150
|
return self._tmp_path
|
151
151
|
|
152
152
|
def _commit(self) -> None:
|
153
|
-
os.
|
153
|
+
os.replace(self._tmp_path, self._dst_path)
|
154
154
|
|
155
155
|
def _abort(self) -> None:
|
156
156
|
shutil.rmtree(self._tmp_path, ignore_errors=True)
|
@@ -197,7 +197,7 @@ class TempDirAtomicPathSwapping(AtomicPathSwapping):
|
|
197
197
|
else:
|
198
198
|
raise TypeError(kind)
|
199
199
|
|
200
|
-
return
|
200
|
+
return OsReplaceAtomicPathSwap(
|
201
201
|
kind,
|
202
202
|
dst_path,
|
203
203
|
tmp_path,
|
omlish/os/paths.py
ADDED
@@ -0,0 +1,32 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
import os.path
|
4
|
+
import typing as ta
|
5
|
+
|
6
|
+
|
7
|
+
def abs_real_path(p: str) -> str:
|
8
|
+
return os.path.abspath(os.path.realpath(p))
|
9
|
+
|
10
|
+
|
11
|
+
def is_path_in_dir(base_dir: str, target_path: str) -> bool:
|
12
|
+
base_dir = abs_real_path(base_dir)
|
13
|
+
target_path = abs_real_path(target_path)
|
14
|
+
|
15
|
+
return target_path.startswith(base_dir + os.path.sep)
|
16
|
+
|
17
|
+
|
18
|
+
def relative_symlink(
|
19
|
+
src: str,
|
20
|
+
dst: str,
|
21
|
+
*,
|
22
|
+
target_is_directory: bool = False,
|
23
|
+
dir_fd: ta.Optional[int] = None,
|
24
|
+
**kwargs: ta.Any,
|
25
|
+
) -> None:
|
26
|
+
os.symlink(
|
27
|
+
os.path.relpath(src, os.path.dirname(dst)),
|
28
|
+
dst,
|
29
|
+
target_is_directory=target_is_directory,
|
30
|
+
dir_fd=dir_fd,
|
31
|
+
**kwargs,
|
32
|
+
)
|
@@ -1,5 +1,5 @@
|
|
1
1
|
omlish/.manifests.json,sha256=0BnQGD2dcXEma0Jop2ZesvDNzSj3CAJBNq8aTGuBz9A,7276
|
2
|
-
omlish/__about__.py,sha256=
|
2
|
+
omlish/__about__.py,sha256=6MZuOZq8DwHRIcwPmVPdl5xZJjs_D7eV9-nFWn00wgI,3409
|
3
3
|
omlish/__init__.py,sha256=SsyiITTuK0v74XpKV8dqNaCmjOlan1JZKrHQv5rWKPA,253
|
4
4
|
omlish/c3.py,sha256=ubu7lHwss5V4UznbejAI0qXhXahrU01MysuHOZI9C4U,8116
|
5
5
|
omlish/cached.py,sha256=UI-XTFBwA6YXWJJJeBn-WkwBkfzDjLBBaZf4nIJA9y0,510
|
@@ -7,7 +7,6 @@ omlish/check.py,sha256=RzMJhp8_dDnpHXnPiHDyZTW13KGDTopeuMgpwKCowPs,1988
|
|
7
7
|
omlish/datetimes.py,sha256=HajeM1kBvwlTa-uR1TTZHmZ3zTPnnUr1uGGQhiO1XQ0,2152
|
8
8
|
omlish/defs.py,sha256=9uUjJuVIbCBL3g14fyzAp-9gH935MFofvlfOGwcBIaM,4913
|
9
9
|
omlish/dynamic.py,sha256=35C_cCX_Vq2HrHzGk5T-zbrMvmUdiIiwDzDNixczoDo,6541
|
10
|
-
omlish/iterators.py,sha256=GGLC7RIT86uXMjhIIIqnff_Iu5SI_b9rXYywYGFyzmo,7292
|
11
10
|
omlish/libc.py,sha256=8r7Ejyhttk9ruCfBkxNTrlzir5WPbDE2vmY7VPlceMA,15362
|
12
11
|
omlish/multiprocessing.py,sha256=QZT4C7I-uThCAjaEY3xgUYb-5GagUlnE4etN01LDyU4,5186
|
13
12
|
omlish/runmodule.py,sha256=PWvuAaJ9wQQn6bx9ftEL3_d04DyotNn8dR_twm2pgw0,700
|
@@ -116,7 +115,7 @@ omlish/codecs/chain.py,sha256=DrBi5vbaFfObfoppo6alwOmyW2XbrH2051cjExwr2Gs,527
|
|
116
115
|
omlish/codecs/funcs.py,sha256=p4imNt7TobyZVXWC-WhntHVu9KfJrO4QwdtPRh-cVOk,850
|
117
116
|
omlish/codecs/registry.py,sha256=8ySUG-kwGJoUN1HCRnz8VjcykB0wlIzoWF5WTAE1ny0,3860
|
118
117
|
omlish/codecs/standard.py,sha256=eiZ4u9ep0XrA4Z_D1zJI0vmWyuN8HLrX4Se_r_Cq_ZM,60
|
119
|
-
omlish/codecs/text.py,sha256=
|
118
|
+
omlish/codecs/text.py,sha256=8inuZbi_ODJB4G6eZfj2wBvCLgtpvhcTebKdGSori5c,5728
|
120
119
|
omlish/collections/__init__.py,sha256=zeUvcAz073ekko37QKya6sElTMfKTuF1bKrdbMtaRpI,2142
|
121
120
|
omlish/collections/abc.py,sha256=sP7BpTVhx6s6C59mTFeosBi4rHOWC6tbFBYbxdZmvh0,2365
|
122
121
|
omlish/collections/coerce.py,sha256=g68ROb_-5HgH-vI8612mU2S0FZ8-wp2ZHK5_Zy_kVC0,7037
|
@@ -311,7 +310,7 @@ omlish/io/compress/adapters.py,sha256=DQvbiqTiaVE-GsawpOpUTX07Gjg125Iuwd6IhjuSax
|
|
311
310
|
omlish/io/compress/base.py,sha256=yx8ifzs_j8y66gMCQcssjZ936NNBFhYn_kBRwNh3SWQ,647
|
312
311
|
omlish/io/compress/brotli.py,sha256=Q2t9uRqBEgRyJCSPsTaJv5w7d-rhsjDMluA4VRBHa_A,1182
|
313
312
|
omlish/io/compress/bz2.py,sha256=2ULaZwcpAkHKV-JargrLoXgL9G-gxrVnPqE_pGqNUrg,1566
|
314
|
-
omlish/io/compress/codecs.py,sha256=
|
313
|
+
omlish/io/compress/codecs.py,sha256=ySLHZdNKLx_CiBW9aGIu0gHbALGzF-uyH0usloU7s-8,1820
|
315
314
|
omlish/io/compress/gzip.py,sha256=ZKZdg0wg_nIgFdkfemOv8xZpHneBXZAiCH0n6gIibWY,12281
|
316
315
|
omlish/io/compress/lz4.py,sha256=4kppXZCXpSAQw6wJvCs9LLHFzukekENja7RiwmN8uMc,2790
|
317
316
|
omlish/io/compress/lzma.py,sha256=4bWNKk7uTFiRT_HogW2ZldgaNy1IukmqfVDVkf5M2Ok,2501
|
@@ -323,11 +322,16 @@ omlish/io/fdio/handlers.py,sha256=OOQhiazbhNMwxLwyzf8KUQrBQSuHIm-UqAMpXmmHGFQ,13
|
|
323
322
|
omlish/io/fdio/kqueue.py,sha256=YgGBQibkAUYODYDiGl7Enjtx1oQsJXuDsBLBXgqlLQw,3832
|
324
323
|
omlish/io/fdio/manager.py,sha256=q4wWf7nKrNtjx6yPEvrVnFt4UtK_BTvVlquEGw7poEo,1250
|
325
324
|
omlish/io/fdio/pollers.py,sha256=yNadAt3W5wd90PFmd3vD77bq5QwoVb2A6SM2JjZpKRs,5507
|
326
|
-
omlish/io/generators/__init__.py,sha256=
|
325
|
+
omlish/io/generators/__init__.py,sha256=YsSLJY9uw72eX3iXd_A0pM69g7EvEqMFdCdR_BBD4RA,1216
|
327
326
|
omlish/io/generators/consts.py,sha256=4r6IMLBMic6MJHVn9UiORIkkPAuxsqtzFT3KV0fatC0,33
|
328
327
|
omlish/io/generators/direct.py,sha256=A9VJB1rNKU3l-NatpYIwyCLI3R_ybGglmdx6sAtoTo4,324
|
329
328
|
omlish/io/generators/readers.py,sha256=MolTFCzcnD5XoP0su0YUNHJ0xlHC3KTihvWAi75y8Bo,4336
|
330
|
-
omlish/io/generators/stepped.py,sha256=
|
329
|
+
omlish/io/generators/stepped.py,sha256=sl-3-hNVYi7qGYZjwBPHs0hKxmz7XkfDMosCXbhIYlE,5025
|
330
|
+
omlish/iterators/__init__.py,sha256=yMavf5FofiS1EU4UFuWPXiFZ03W0H-y7MuMxW8FUaEE,358
|
331
|
+
omlish/iterators/iterators.py,sha256=ghI4dO6WPyyFOLTIIMaHQ_IOy2xXaFpGPqveZ5YGIBU,3158
|
332
|
+
omlish/iterators/recipes.py,sha256=53mkexitMhkwXQZbL6DrhpT0WePQ_56uXd5Jaw3DfzI,467
|
333
|
+
omlish/iterators/tools.py,sha256=SvXyyQJh7aceLYhRl6pQB-rfSaXw5IMIWukeEeOZt-0,2492
|
334
|
+
omlish/iterators/unique.py,sha256=0jAX3kwzVfRNhe0Tmh7kVP_Q2WBIn8POo_O-rgFV0rQ,1390
|
331
335
|
omlish/lang/__init__.py,sha256=wMfsQjBFNsZ_Y4352iEr0DlEnNebf26JmR4ETtDsQow,3948
|
332
336
|
omlish/lang/cached.py,sha256=92TvRZQ6sWlm7dNn4hgl7aWKbX0J1XUEo3DRjBpgVQk,7834
|
333
337
|
omlish/lang/clsdct.py,sha256=AjtIWLlx2E6D5rC97zQ3Lwq2SOMkbg08pdO_AxpzEHI,1744
|
@@ -377,7 +381,7 @@ omlish/lite/runtime.py,sha256=XQo408zxTdJdppUZqOWHyeUR50VlCpNIExNGHz4U6O4,459
|
|
377
381
|
omlish/lite/secrets.py,sha256=3Mz3V2jf__XU9qNHcH56sBSw95L3U2UPL24bjvobG0c,816
|
378
382
|
omlish/lite/socket.py,sha256=7OYgkXTcQv0wq7TQuLnl9y6dJA1ZT6Vbc1JH59QlxgY,1792
|
379
383
|
omlish/lite/socketserver.py,sha256=doTXIctu_6c8XneFtzPFVG_Wq6xVmA3p9ymut8IvBoU,1586
|
380
|
-
omlish/lite/strings.py,sha256=
|
384
|
+
omlish/lite/strings.py,sha256=SkCQPtw1grKGr1KFgJr3CL3ocvCEcPwH7XIzb-JxFAY,1610
|
381
385
|
omlish/lite/typing.py,sha256=U3-JaEnkDSYxK4tsu_MzUn3RP6qALBe5FXQXpD-licE,1090
|
382
386
|
omlish/logs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
383
387
|
omlish/logs/abc.py,sha256=ho4ABKYMKX-V7g4sp1BByuOLzslYzLlQ0MESmjEpT-o,8005
|
@@ -429,11 +433,12 @@ omlish/math/bits.py,sha256=yip1l8agOYzT7bFyMGc0RR3XlnGCfHMpjw_SECLLh1I,3477
|
|
429
433
|
omlish/math/floats.py,sha256=UimhOT7KRl8LXTzOI5cQWoX_9h6WNWe_3vcOuO7-h_8,327
|
430
434
|
omlish/math/stats.py,sha256=MegzKVsmv2kra4jDWLOUgV0X7Ee2Tbl5u6ql1v4-dEY,10053
|
431
435
|
omlish/os/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
432
|
-
omlish/os/atomics.py,sha256=
|
436
|
+
omlish/os/atomics.py,sha256=NQfwifLu48ZniOvaqAuDy8dBTB3tKZntL22T0-8NYwM,5074
|
433
437
|
omlish/os/deathsig.py,sha256=hk9Yq2kyDdI-cI7OQH7mOfpRbOKzY_TfPKEqgrjVYbA,641
|
434
438
|
omlish/os/files.py,sha256=1tNy1z5I_CgYKA5c6lOfsXc-hknP4tQDbSShdz8HArw,1308
|
435
439
|
omlish/os/journald.py,sha256=2nI8Res1poXkbLc31--MPUlzYMESnCcPUkIxDOCjZW0,3903
|
436
440
|
omlish/os/linux.py,sha256=whJ6scwMKSFBdXiVhJW0BCpJV4jOGMr-a_a3Bhwz6Ls,18938
|
441
|
+
omlish/os/paths.py,sha256=o1vTpQgbOQR0X6Wtb_7oqajxykMy58yJ0WCQIaY9gAA,735
|
437
442
|
omlish/os/pidfile.py,sha256=S4Nbe00oSxckY0qCC9AeTEZe7NSw4eJudnQX7wCXzks,1738
|
438
443
|
omlish/os/sizes.py,sha256=ohkALLvqSqBX4iR-7DMKJ4pfOCRdZXV8htH4QywUNM0,152
|
439
444
|
omlish/reflect/__init__.py,sha256=4-EuCSX1qpEWfScCFzAJv_XghHFu4cXxpxKeBKrosQ4,720
|
@@ -552,9 +557,9 @@ omlish/text/glyphsplit.py,sha256=Ug-dPRO7x-OrNNr8g1y6DotSZ2KH0S-VcOmUobwa4B0,329
|
|
552
557
|
omlish/text/indent.py,sha256=6Jj6TFY9unaPa4xPzrnZemJ-fHsV53IamP93XGjSUHs,1274
|
553
558
|
omlish/text/parts.py,sha256=7vPF1aTZdvLVYJ4EwBZVzRSy8XB3YqPd7JwEnNGGAOo,6495
|
554
559
|
omlish/text/random.py,sha256=jNWpqiaKjKyTdMXC-pWAsSC10AAP-cmRRPVhm59ZWLk,194
|
555
|
-
omlish-0.0.0.
|
556
|
-
omlish-0.0.0.
|
557
|
-
omlish-0.0.0.
|
558
|
-
omlish-0.0.0.
|
559
|
-
omlish-0.0.0.
|
560
|
-
omlish-0.0.0.
|
560
|
+
omlish-0.0.0.dev167.dist-info/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
|
561
|
+
omlish-0.0.0.dev167.dist-info/METADATA,sha256=XQoZ5IEtdiX_6-I2wMBv-GB3-Nz6-WbQLpzCKbEjxkI,4264
|
562
|
+
omlish-0.0.0.dev167.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
563
|
+
omlish-0.0.0.dev167.dist-info/entry_points.txt,sha256=Lt84WvRZJskWCAS7xnQGZIeVWksprtUHj0llrvVmod8,35
|
564
|
+
omlish-0.0.0.dev167.dist-info/top_level.txt,sha256=pePsKdLu7DvtUiecdYXJ78iO80uDNmBlqe-8hOzOmfs,7
|
565
|
+
omlish-0.0.0.dev167.dist-info/RECORD,,
|
omlish/iterators.py
DELETED
@@ -1,300 +0,0 @@
|
|
1
|
-
import collections
|
2
|
-
import dataclasses as dc
|
3
|
-
import functools
|
4
|
-
import heapq
|
5
|
-
import itertools
|
6
|
-
import typing as ta
|
7
|
-
|
8
|
-
# from . import check
|
9
|
-
from . import lang
|
10
|
-
|
11
|
-
|
12
|
-
T = ta.TypeVar('T')
|
13
|
-
U = ta.TypeVar('U')
|
14
|
-
|
15
|
-
_MISSING = object()
|
16
|
-
|
17
|
-
|
18
|
-
class PeekIterator(ta.Iterator[T]):
|
19
|
-
|
20
|
-
def __init__(self, it: ta.Iterable[T]) -> None:
|
21
|
-
super().__init__()
|
22
|
-
|
23
|
-
self._it = iter(it)
|
24
|
-
self._pos = -1
|
25
|
-
self._next_item: ta.Any = _MISSING
|
26
|
-
|
27
|
-
_item: T
|
28
|
-
|
29
|
-
def __iter__(self) -> ta.Self:
|
30
|
-
return self
|
31
|
-
|
32
|
-
@property
|
33
|
-
def done(self) -> bool:
|
34
|
-
try:
|
35
|
-
self.peek()
|
36
|
-
except StopIteration:
|
37
|
-
return True
|
38
|
-
else:
|
39
|
-
return False
|
40
|
-
|
41
|
-
def __next__(self) -> T:
|
42
|
-
if self._next_item is not _MISSING:
|
43
|
-
self._item = ta.cast(T, self._next_item)
|
44
|
-
self._next_item = _MISSING
|
45
|
-
else:
|
46
|
-
self._item = next(self._it)
|
47
|
-
self._pos += 1
|
48
|
-
return self._item
|
49
|
-
|
50
|
-
def peek(self) -> T:
|
51
|
-
if self._next_item is not _MISSING:
|
52
|
-
return ta.cast(T, self._next_item)
|
53
|
-
self._next_item = next(self._it)
|
54
|
-
return self._next_item
|
55
|
-
|
56
|
-
def next_peek(self) -> T:
|
57
|
-
next(self)
|
58
|
-
return self.peek()
|
59
|
-
|
60
|
-
def takewhile(self, fn: ta.Callable[[T], bool]) -> ta.Iterator[T]:
|
61
|
-
while fn(self.peek()):
|
62
|
-
yield next(self)
|
63
|
-
|
64
|
-
def skipwhile(self, fn: ta.Callable[[T], bool]) -> None:
|
65
|
-
while fn(self.peek()):
|
66
|
-
next(self)
|
67
|
-
|
68
|
-
def takeuntil(self, fn: ta.Callable[[T], bool]) -> ta.Iterator[T]:
|
69
|
-
return self.takewhile(lambda e: not fn(e))
|
70
|
-
|
71
|
-
def skipuntil(self, fn: ta.Callable[[T], bool]) -> None:
|
72
|
-
self.skipwhile(lambda e: not fn(e))
|
73
|
-
|
74
|
-
def takethrough(self, pos: int) -> ta.Iterator[T]:
|
75
|
-
return self.takewhile(lambda _: self._pos < pos)
|
76
|
-
|
77
|
-
def skipthrough(self, pos: int) -> None:
|
78
|
-
self.skipwhile(lambda _: self._pos < pos)
|
79
|
-
|
80
|
-
def taketo(self, pos: int) -> ta.Iterator[T]:
|
81
|
-
return self.takethrough(pos - 1)
|
82
|
-
|
83
|
-
def skipto(self, pos: int) -> None:
|
84
|
-
self.skipthrough(pos - 1)
|
85
|
-
|
86
|
-
|
87
|
-
class ProxyIterator(ta.Iterator[T]):
|
88
|
-
|
89
|
-
def __init__(self, fn: ta.Callable[[], T]) -> None:
|
90
|
-
self._fn = fn
|
91
|
-
|
92
|
-
def __iter__(self) -> ta.Self:
|
93
|
-
return self
|
94
|
-
|
95
|
-
def __next__(self) -> T:
|
96
|
-
return self._fn()
|
97
|
-
|
98
|
-
|
99
|
-
class PrefetchIterator(ta.Iterator[T]):
|
100
|
-
|
101
|
-
def __init__(self, fn: ta.Callable[[], T] | None = None) -> None:
|
102
|
-
super().__init__()
|
103
|
-
|
104
|
-
self._fn = fn
|
105
|
-
self._deque: collections.deque[T] = collections.deque()
|
106
|
-
|
107
|
-
def __iter__(self) -> ta.Self:
|
108
|
-
return self
|
109
|
-
|
110
|
-
def push(self, item) -> None:
|
111
|
-
self._deque.append(item)
|
112
|
-
|
113
|
-
def __next__(self) -> T:
|
114
|
-
try:
|
115
|
-
return self._deque.popleft()
|
116
|
-
except IndexError:
|
117
|
-
if self._fn is None:
|
118
|
-
raise StopIteration from None
|
119
|
-
return self._fn()
|
120
|
-
|
121
|
-
|
122
|
-
class RetainIterator(ta.Iterator[T]):
|
123
|
-
|
124
|
-
def __init__(self, fn: ta.Callable[[], T]) -> None:
|
125
|
-
super().__init__()
|
126
|
-
|
127
|
-
self._fn = fn
|
128
|
-
self._deque: collections.deque[T] = collections.deque()
|
129
|
-
|
130
|
-
def __iter__(self) -> ta.Self:
|
131
|
-
return self
|
132
|
-
|
133
|
-
def pop(self) -> None:
|
134
|
-
self._deque.popleft()
|
135
|
-
|
136
|
-
def __next__(self) -> T:
|
137
|
-
item = self._fn()
|
138
|
-
self._deque.append(item)
|
139
|
-
return item
|
140
|
-
|
141
|
-
|
142
|
-
def unzip(it: ta.Iterable[T], width: int | None = None) -> list:
|
143
|
-
if width is None:
|
144
|
-
if not isinstance(it, PeekIterator):
|
145
|
-
it = PeekIterator(iter(it))
|
146
|
-
try:
|
147
|
-
width = len(it.peek())
|
148
|
-
except StopIteration:
|
149
|
-
return []
|
150
|
-
|
151
|
-
its: list[PrefetchIterator[T]] = []
|
152
|
-
running = True
|
153
|
-
|
154
|
-
def next_fn(idx):
|
155
|
-
nonlocal running
|
156
|
-
if not running:
|
157
|
-
raise StopIteration
|
158
|
-
try:
|
159
|
-
items = next(it) # type: ignore
|
160
|
-
except StopIteration:
|
161
|
-
running = False
|
162
|
-
raise
|
163
|
-
for item_idx, item in enumerate(items):
|
164
|
-
its[item_idx].push(item)
|
165
|
-
return next(its[idx])
|
166
|
-
|
167
|
-
its.extend(PrefetchIterator(functools.partial(next_fn, idx)) for idx in range(width))
|
168
|
-
return its
|
169
|
-
|
170
|
-
|
171
|
-
def take(n: int, iterable: ta.Iterable[T]) -> list[T]:
|
172
|
-
return list(itertools.islice(iterable, n))
|
173
|
-
|
174
|
-
|
175
|
-
def chunk(n: int, iterable: ta.Iterable[T], strict: bool = False) -> ta.Iterator[list[T]]:
|
176
|
-
iterator = iter(functools.partial(take, n, iter(iterable)), [])
|
177
|
-
if strict:
|
178
|
-
def ret():
|
179
|
-
for chunk in iterator:
|
180
|
-
if len(chunk) != n:
|
181
|
-
raise ValueError('iterable is not divisible by n.')
|
182
|
-
yield chunk
|
183
|
-
return iter(ret())
|
184
|
-
else:
|
185
|
-
return iterator
|
186
|
-
|
187
|
-
|
188
|
-
def merge_on(
|
189
|
-
function: ta.Callable[[T], U],
|
190
|
-
*its: ta.Iterable[T],
|
191
|
-
) -> ta.Iterator[tuple[U, list[tuple[int, T]]]]:
|
192
|
-
indexed_its = [
|
193
|
-
(
|
194
|
-
(function(item), it_idx, item)
|
195
|
-
for it_idx, item in zip(itertools.repeat(it_idx), it)
|
196
|
-
)
|
197
|
-
for it_idx, it in enumerate(its)
|
198
|
-
]
|
199
|
-
|
200
|
-
grouped_indexed_its = itertools.groupby(
|
201
|
-
heapq.merge(*indexed_its),
|
202
|
-
key=lambda item_tuple: item_tuple[0],
|
203
|
-
)
|
204
|
-
|
205
|
-
return (
|
206
|
-
(fn_item, [(it_idx, item) for _, it_idx, item in grp])
|
207
|
-
for fn_item, grp in grouped_indexed_its
|
208
|
-
)
|
209
|
-
|
210
|
-
|
211
|
-
def expand_indexed_pairs(
|
212
|
-
seq: ta.Iterable[tuple[int, T]],
|
213
|
-
default: T,
|
214
|
-
*,
|
215
|
-
width: int | None = None,
|
216
|
-
) -> list[T]:
|
217
|
-
width_ = width
|
218
|
-
if width_ is None:
|
219
|
-
width_ = (max(idx for idx, _ in seq) + 1) if seq else 0
|
220
|
-
result = [default] * width_
|
221
|
-
for idx, value in seq:
|
222
|
-
if idx < width_:
|
223
|
-
result[idx] = value
|
224
|
-
return result
|
225
|
-
|
226
|
-
|
227
|
-
##
|
228
|
-
# https://docs.python.org/3/library/itertools.html#itertools-recipes
|
229
|
-
|
230
|
-
|
231
|
-
def sliding_window(it: ta.Iterable[T], n: int) -> ta.Iterator[tuple[T, ...]]:
|
232
|
-
# sliding_window('ABCDEFG', 4) -> ABCD BCDE CDEF DEFG
|
233
|
-
iterator = iter(it)
|
234
|
-
window = collections.deque(itertools.islice(iterator, n - 1), maxlen=n)
|
235
|
-
for x in iterator:
|
236
|
-
window.append(x)
|
237
|
-
yield tuple(window)
|
238
|
-
|
239
|
-
|
240
|
-
##
|
241
|
-
|
242
|
-
|
243
|
-
@dc.dataclass()
|
244
|
-
class UniqueStats:
|
245
|
-
key: ta.Any
|
246
|
-
num_seen: int
|
247
|
-
first_idx: int
|
248
|
-
last_idx: int
|
249
|
-
|
250
|
-
|
251
|
-
@dc.dataclass(frozen=True)
|
252
|
-
class UniqueItem(ta.Generic[T]):
|
253
|
-
idx: int
|
254
|
-
item: T
|
255
|
-
stats: UniqueStats
|
256
|
-
out: lang.Maybe[T]
|
257
|
-
|
258
|
-
|
259
|
-
class UniqueIterator(ta.Iterator[UniqueItem[T]]):
|
260
|
-
def __init__(
|
261
|
-
self,
|
262
|
-
it: ta.Iterable[T],
|
263
|
-
keyer: ta.Callable[[T], ta.Any] = lang.identity,
|
264
|
-
) -> None:
|
265
|
-
super().__init__()
|
266
|
-
self._it = enumerate(it)
|
267
|
-
self._keyer = keyer
|
268
|
-
|
269
|
-
self.stats: dict[ta.Any, UniqueStats] = {}
|
270
|
-
|
271
|
-
def __next__(self) -> UniqueItem[T]:
|
272
|
-
idx, item = next(self._it)
|
273
|
-
key = self._keyer(item)
|
274
|
-
|
275
|
-
try:
|
276
|
-
stats = self.stats[key]
|
277
|
-
|
278
|
-
except KeyError:
|
279
|
-
stats = self.stats[key] = UniqueStats(
|
280
|
-
key,
|
281
|
-
num_seen=1,
|
282
|
-
first_idx=idx,
|
283
|
-
last_idx=idx,
|
284
|
-
)
|
285
|
-
return UniqueItem(
|
286
|
-
idx,
|
287
|
-
item,
|
288
|
-
stats,
|
289
|
-
lang.just(item),
|
290
|
-
)
|
291
|
-
|
292
|
-
else:
|
293
|
-
stats.num_seen += 1
|
294
|
-
stats.last_idx = idx
|
295
|
-
return UniqueItem(
|
296
|
-
idx,
|
297
|
-
item,
|
298
|
-
stats,
|
299
|
-
lang.empty(),
|
300
|
-
)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|