pyochain 0.5.1__py3-none-any.whl → 0.5.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyochain might be problematic. Click here for more details.

pyochain/_iter/_main.py CHANGED
@@ -1,14 +1,22 @@
1
1
  from __future__ import annotations
2
2
 
3
- from collections.abc import Callable, Collection, Generator, Iterable, Iterator
4
- from typing import TYPE_CHECKING, Any, Concatenate
3
+ import itertools
4
+ from collections.abc import (
5
+ Callable,
6
+ Generator,
7
+ Iterable,
8
+ Iterator,
9
+ Sequence,
10
+ )
11
+ from typing import TYPE_CHECKING, Any, Concatenate, overload, override
12
+
13
+ import cytoolz as cz
5
14
 
6
15
  from ._aggregations import BaseAgg
7
16
  from ._booleans import BaseBool
8
- from ._constructors import IterConstructors
17
+ from ._dicts import BaseDict
9
18
  from ._eager import BaseEager
10
19
  from ._filters import BaseFilter
11
- from ._groups import BaseGroups
12
20
  from ._joins import BaseJoins
13
21
  from ._lists import BaseList
14
22
  from ._maps import BaseMap
@@ -21,8 +29,15 @@ if TYPE_CHECKING:
21
29
  from .._dict import Dict
22
30
 
23
31
 
32
+ class CommonMethods[T](BaseAgg[T], BaseEager[T], BaseDict[T]):
33
+ pass
34
+
35
+
36
+ def _convert_data[T](data: Iterable[T] | T, *more_data: T) -> Iterable[T]:
37
+ return data if cz.itertoolz.isiterable(data) else (data, *more_data)
38
+
39
+
24
40
  class Iter[T](
25
- BaseAgg[T],
26
41
  BaseBool[T],
27
42
  BaseFilter[T],
28
43
  BaseProcess[T],
@@ -32,18 +47,15 @@ class Iter[T](
32
47
  BaseTuples[T],
33
48
  BasePartitions[T],
34
49
  BaseJoins[T],
35
- BaseGroups[T],
36
- BaseEager[T],
37
- IterConstructors,
50
+ CommonMethods[T],
38
51
  ):
39
52
  """
40
- A wrapper around Python's built-in iterable types, providing a rich set of functional programming tools.
53
+ A wrapper around Python's built-in Iterators/Generators types, providing a rich set of functional programming tools.
41
54
 
42
- It supports lazy evaluation, allowing for efficient processing of large datasets.
55
+ It's designed around lazy evaluation, allowing for efficient processing of large datasets.
43
56
 
44
- It is not a collection itself, but a wrapper that provides additional methods for working with iterables.
45
-
46
- It can be constructed from any iterable, including `lists`, `tuples`, `sets`, and `generators`.
57
+ - To instantiate from a lazy Iterator/Generator, simply pass it to the standard constructor.
58
+ - To instantiate from an eager Sequence (like a list or set), use the `from_` class method.
47
59
  """
48
60
 
49
61
  __slots__ = ("_data",)
@@ -51,8 +63,157 @@ class Iter[T](
51
63
  def __init__(self, data: Iterator[T] | Generator[T, Any, Any]) -> None:
52
64
  self._data = data
53
65
 
54
- def __repr__(self) -> str:
55
- return f"{self.__class__.__name__}({self.unwrap().__repr__()})"
66
+ @staticmethod
67
+ def from_count(start: int = 0, step: int = 1) -> Iter[int]:
68
+ """
69
+ Create an infinite iterator of evenly spaced values.
70
+
71
+ **Warning** ⚠️
72
+ This creates an infinite iterator.
73
+ Be sure to use `Iter.take()` or `Iter.slice()` to limit the number of items taken.
74
+
75
+ Args:
76
+ start: Starting value of the sequence. Defaults to 0.
77
+ step: Difference between consecutive values. Defaults to 1.
78
+ Example:
79
+ ```python
80
+ >>> import pyochain as pc
81
+ >>> pc.Iter.from_count(10, 2).take(3).into(list)
82
+ [10, 12, 14]
83
+
84
+ ```
85
+ """
86
+
87
+ return Iter(itertools.count(start, step))
88
+
89
+ @staticmethod
90
+ def from_func[U](func: Callable[[U], U], input: U) -> Iter[U]:
91
+ """
92
+ Create an infinite iterator by repeatedly applying a function on an original input.
93
+
94
+ **Warning** ⚠️
95
+ This creates an infinite iterator.
96
+ Be sure to use `Iter.take()` or `Iter.slice()` to limit the number of items taken.
97
+
98
+ Args:
99
+ func: Function to apply repeatedly.
100
+ input: Initial value to start the iteration.
101
+
102
+ Example:
103
+ ```python
104
+ >>> import pyochain as pc
105
+ >>> pc.Iter.from_func(lambda x: x + 1, 0).take(3).into(list)
106
+ [0, 1, 2]
107
+
108
+ ```
109
+ """
110
+
111
+ return Iter(cz.itertoolz.iterate(func, input))
112
+
113
+ @overload
114
+ @staticmethod
115
+ def from_[U](data: Iterable[U]) -> Iter[U]: ...
116
+ @overload
117
+ @staticmethod
118
+ def from_[U](data: U, *more_data: U) -> Iter[U]: ...
119
+ @staticmethod
120
+ def from_[U](data: Iterable[U] | U, *more_data: U) -> Iter[U]:
121
+ """
122
+ Create an iterator from any Iterable, or from unpacked values.
123
+
124
+ - An Iterable is any object capable of returning its members one at a time, permitting it to be iterated over in a for-loop.
125
+ - An Iterator is an object representing a stream of data; returned by calling `iter()` on an Iterable.
126
+ - Once an Iterator is exhausted, it cannot be reused or reset.
127
+
128
+ If you need to reuse the data, consider collecting it into a list first with `.collect()`.
129
+
130
+ In general, avoid intermediate references when dealing with lazy iterators, and prioritize method chaining instead.
131
+
132
+ Args:
133
+ data: Iterable to convert into an iterator, or a single value.
134
+ more_data: Additional values to include if 'data' is not an Iterable.
135
+ Example:
136
+ ```python
137
+ >>> import pyochain as pc
138
+ >>> data: tuple[int, ...] = (1, 2, 3)
139
+ >>> iterator = pc.Iter.from_(data)
140
+ >>> iterator.unwrap().__class__.__name__
141
+ 'tuple_iterator'
142
+ >>> mapped = iterator.map(lambda x: x * 2)
143
+ >>> mapped.unwrap().__class__.__name__
144
+ 'map'
145
+ >>> mapped.collect(tuple).unwrap()
146
+ (2, 4, 6)
147
+ >>> # iterator is now exhausted
148
+ >>> iterator.collect().unwrap()
149
+ []
150
+ >>> # Creating from unpacked values
151
+ >>> pc.Iter.from_(1, 2, 3).collect(tuple).unwrap()
152
+ (1, 2, 3)
153
+
154
+ ```
155
+ """
156
+
157
+ return Iter(iter(_convert_data(data, *more_data)))
158
+
159
+ @staticmethod
160
+ def unfold[S, V](seed: S, generator: Callable[[S], tuple[V, S] | None]) -> Iter[V]:
161
+ """
162
+ Create an iterator by repeatedly applying a generator function to an initial state.
163
+
164
+ The `generator` function takes the current state and must return:
165
+
166
+ - A tuple `(value, new_state)` to emit the `value` and continue with the `new_state`.
167
+ - `None` to stop the generation.
168
+
169
+ This is functionally equivalent to a state-based `while` loop.
170
+
171
+ **Warning** ⚠️
172
+ If the `generator` function never returns `None`, it creates an infinite iterator.
173
+ Be sure to use `Iter.take()` or `Iter.slice()` to limit the number of items taken if necessary.
174
+
175
+ Args:
176
+ seed: Initial state for the generator.
177
+ generator: Function that generates the next value and state.
178
+
179
+ Example:
180
+ ```python
181
+ >>> import pyochain as pc
182
+ >>> # Example 1: Simple counter up to 5
183
+ >>> def counter_generator(state: int) -> tuple[int, int] | None:
184
+ ... if state < 5:
185
+ ... return (state * 10, state + 1)
186
+ ... return None
187
+ >>> pc.Iter.unfold(seed=0, generator=counter_generator).into(list)
188
+ [0, 10, 20, 30, 40]
189
+ >>> # Example 2: Fibonacci sequence up to 100
190
+ >>> type FibState = tuple[int, int]
191
+ >>> def fib_generator(state: FibState) -> tuple[int, FibState] | None:
192
+ ... a, b = state
193
+ ... if a > 100:
194
+ ... return None
195
+ ... return (a, (b, a + b))
196
+ >>> pc.Iter.unfold(seed=(0, 1), generator=fib_generator).into(list)
197
+ [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
198
+ >>> # Example 3: Infinite iterator (requires take())
199
+ >>> pc.Iter.unfold(seed=1, generator=lambda s: (s, s * 2)).take(5).into(list)
200
+ [1, 2, 4, 8, 16]
201
+
202
+ ```
203
+ """
204
+ from ._main import Iter
205
+
206
+ def _unfold() -> Iterator[V]:
207
+ current_seed: S = seed
208
+ while True:
209
+ result: tuple[V, S] | None = generator(current_seed)
210
+ if result is None:
211
+ break
212
+ value, next_seed = result
213
+ yield value
214
+ current_seed = next_seed
215
+
216
+ return Iter(_unfold())
56
217
 
57
218
  def itr[**P, R, U: Iterable[Any]](
58
219
  self: Iter[U],
@@ -86,9 +247,9 @@ class Iter[T](
86
247
  """
87
248
 
88
249
  def _itr(data: Iterable[U]) -> Generator[R, None, None]:
89
- return (func(Iter.from_(x), *args, **kwargs) for x in data)
250
+ return (func(Iter(iter(x)), *args, **kwargs) for x in data)
90
251
 
91
- return self.apply(_itr)
252
+ return self._lazy(_itr)
92
253
 
93
254
  def struct[**P, R, K, V](
94
255
  self: Iter[dict[K, V]],
@@ -100,6 +261,7 @@ class Iter[T](
100
261
  Apply a function to each element after wrapping it in a Dict.
101
262
 
102
263
  This is a convenience method for the common pattern of mapping a function over an iterable of dictionaries.
264
+
103
265
  Args:
104
266
  func: Function to apply to each wrapped dictionary.
105
267
  *args: Positional arguments to pass to the function.
@@ -134,15 +296,9 @@ class Iter[T](
134
296
  ... .struct(lambda d: d.drop("Continent").unwrap())
135
297
  ... .into(list)
136
298
  ... ) # doctest: +NORMALIZE_WHITESPACE
137
- Dict({
138
- 'America': [
139
- {'Name': 'Alice', 'City': 'New York'},
140
- {'Name': 'Charlie', 'City': 'New York'}
141
- ],
142
- 'Europe': [
143
- {'Name': 'David', 'City': 'Paris'}
144
- ]
145
- })
299
+ {'America': [{'City': 'New York', 'Name': 'Alice'},
300
+ {'City': 'New York', 'Name': 'Charlie'}],
301
+ 'Europe': [{'City': 'Paris', 'Name': 'David'}]}
146
302
 
147
303
  ```
148
304
  """
@@ -151,74 +307,155 @@ class Iter[T](
151
307
  def _struct(data: Iterable[dict[K, V]]) -> Generator[R, None, None]:
152
308
  return (func(Dict(x), *args, **kwargs) for x in data)
153
309
 
154
- return self.apply(_struct)
310
+ return self._lazy(_struct)
155
311
 
156
- def with_keys[K](self, keys: Iterable[K]) -> Dict[K, T]:
312
+ def apply[**P, R](
313
+ self,
314
+ func: Callable[Concatenate[Iterable[T], P], Iterator[R]],
315
+ *args: P.args,
316
+ **kwargs: P.kwargs,
317
+ ) -> Iter[R]:
157
318
  """
158
- Create a Dict by zipping the iterable with keys.
319
+ Apply a function to the underlying Iterator and return a new Iter instance.
320
+
321
+ Allow to pass user defined functions that transform the iterable while retaining the Iter wrapper.
159
322
 
160
323
  Args:
161
- keys: Iterable of keys to pair with the values.
324
+ func: Function to apply to the underlying iterable.
325
+ *args: Positional arguments to pass to the function.
326
+ **kwargs: Keyword arguments to pass to the function.
327
+
162
328
  Example:
163
329
  ```python
164
330
  >>> import pyochain as pc
165
- >>> keys = ["a", "b", "c"]
166
- >>> values = [1, 2, 3]
167
- >>> pc.Iter.from_(values).with_keys(keys).unwrap()
168
- {'a': 1, 'b': 2, 'c': 3}
169
- >>> # This is equivalent to:
170
- >>> pc.Iter.from_(keys).zip(values).pipe(
171
- ... lambda x: pc.Dict(x.into(dict)).unwrap()
172
- ... )
173
- {'a': 1, 'b': 2, 'c': 3}
331
+ >>> def double(data: Iterable[int]) -> Iterator[int]:
332
+ ... return (x * 2 for x in data)
333
+ >>> pc.Iter.from_([1, 2, 3]).apply(double).into(list)
334
+ [2, 4, 6]
174
335
 
175
336
  ```
176
337
  """
177
- from .._dict import Dict
178
-
179
- return Dict(dict(zip(keys, self.unwrap())))
338
+ return self._lazy(func, *args, **kwargs)
180
339
 
181
- def with_values[V](self, values: Iterable[V]) -> Dict[T, V]:
340
+ def collect(self, factory: Callable[[Iterable[T]], Sequence[T]] = list) -> Seq[T]:
182
341
  """
183
- Create a Dict by zipping the iterable with values.
342
+ Collect the elements into a sequence, using the provided factory.
184
343
 
185
344
  Args:
186
- values: Iterable of values to pair with the keys.
345
+ factory: A callable that takes an iterable and returns a Sequence. Defaults to list.
346
+
187
347
  Example:
188
348
  ```python
189
349
  >>> import pyochain as pc
190
- >>> keys = [1, 2, 3]
191
- >>> values = ["a", "b", "c"]
192
- >>> pc.Iter.from_(keys).with_values(values).unwrap()
193
- {1: 'a', 2: 'b', 3: 'c'}
194
- >>> # This is equivalent to:
195
- >>> pc.Iter.from_(keys).zip(values).pipe(
196
- ... lambda x: pc.Dict(x.into(dict)).unwrap()
197
- ... )
198
- {1: 'a', 2: 'b', 3: 'c'}
350
+ >>> pc.Iter.from_(range(5)).collect().unwrap()
351
+ [0, 1, 2, 3, 4]
199
352
 
200
353
  ```
201
354
  """
202
- from .._dict import Dict
355
+ return self._eager(factory)
203
356
 
204
- return Dict(dict(zip(self.unwrap(), values)))
357
+ @override
358
+ def unwrap(self) -> Iterator[T]:
359
+ """
360
+ Unwrap and return the underlying Iterator.
205
361
 
362
+ ```python
363
+ >>> import pyochain as pc
364
+ >>> iterator = pc.Iter.from_([1, 2, 3])
365
+ >>> unwrapped = iterator.unwrap()
366
+ >>> list(unwrapped)
367
+ [1, 2, 3]
206
368
 
207
- class Seq[T](BaseAgg[T], BaseEager[T]):
369
+ ```
370
+ """
371
+ return self._data # type: ignore[return-value]
372
+
373
+
374
+ class Seq[T](CommonMethods[T]):
208
375
  """
209
- pyochain.Seq represent an in memory collection.
376
+ pyochain.Seq represent an in memory Sequence.
210
377
 
211
378
  Provides a subset of pyochain.Iter methods with eager evaluation, and is the return type of pyochain.Iter.collect().
212
379
  """
213
380
 
214
381
  __slots__ = ("_data",)
215
382
 
216
- def __init__(self, data: Collection[T]) -> None:
383
+ def __init__(self, data: Sequence[T]) -> None:
217
384
  self._data = data
218
385
 
386
+ @overload
387
+ @staticmethod
388
+ def from_[U](data: Sequence[U]) -> Seq[U]: ...
389
+ @overload
390
+ @staticmethod
391
+ def from_[U](data: U, *more_data: U) -> Seq[U]: ...
392
+ @staticmethod
393
+ def from_[U](data: Sequence[U] | U, *more_data: U) -> Seq[U]:
394
+ """
395
+ Create a Seq from a Sequence or unpacked values.
396
+
397
+ Args:
398
+ data: Sequence of items or a single item.
399
+ more_data: Additional item to include if 'data' is not a Sequence.
400
+
401
+ Example:
402
+ ```python
403
+ >>> import pyochain as pc
404
+ >>> pc.Seq.from_([1, 2, 3]).unwrap()
405
+ [1, 2, 3]
406
+ >>> pc.Seq.from_(1, 2).unwrap()
407
+ (1, 2)
408
+
409
+ ```
410
+
411
+ """
412
+ return Seq(_convert_data(data, *more_data)) # type: ignore[return-value]
413
+
219
414
  def iter(self) -> Iter[T]:
220
415
  """
221
416
  Get an iterator over the sequence.
222
417
  Call this to switch to lazy evaluation.
223
418
  """
224
- return Iter.from_(self.unwrap())
419
+ return self._lazy(iter)
420
+
421
+ def apply[**P, R](
422
+ self,
423
+ func: Callable[Concatenate[Iterable[T], P], Sequence[R]],
424
+ *args: P.args,
425
+ **kwargs: P.kwargs,
426
+ ) -> Seq[R]:
427
+ """
428
+ Apply a function to the underlying Sequence and return a Seq instance.
429
+
430
+ Allow to pass user defined functions that transform the Sequence while retaining the Seq wrapper.
431
+
432
+ Args:
433
+ func: Function to apply to the underlying Sequence.
434
+ *args: Positional arguments to pass to the function.
435
+ **kwargs: Keyword arguments to pass to the function.
436
+
437
+ Example:
438
+ ```python
439
+ >>> import pyochain as pc
440
+ >>> def double(data: Iterable[int]) -> Sequence[int]:
441
+ ... return [x * 2 for x in data]
442
+ >>> pc.Seq([1, 2, 3]).apply(double).into(list)
443
+ [2, 4, 6]
444
+
445
+ ```
446
+ """
447
+ return self._eager(func, *args, **kwargs)
448
+
449
+ @override
450
+ def unwrap(self) -> Sequence[T]:
451
+ """
452
+ Unwrap and return the underlying Sequence.
453
+
454
+ ```python
455
+ >>> import pyochain as pc
456
+ >>> pc.Seq([1, 2, 3]).unwrap()
457
+ [1, 2, 3]
458
+
459
+ ```
460
+ """
461
+ return self._data # type: ignore[return-value]
pyochain/_iter/_maps.py CHANGED
@@ -1,9 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import itertools
4
- from collections.abc import Callable, Collection, Generator, Iterable, Iterator, Mapping
4
+ from collections.abc import Callable, Generator, Iterable, Iterator, Mapping, Sequence
5
5
  from functools import partial
6
- from typing import TYPE_CHECKING, Any, Concatenate, Self, overload
6
+ from typing import TYPE_CHECKING, Any, Concatenate, overload
7
7
 
8
8
  import cytoolz as cz
9
9
  import more_itertools as mit
@@ -20,29 +20,31 @@ class BaseMap[T](IterWrapper[T]):
20
20
  func: Callable[Concatenate[T, P], Any],
21
21
  *args: P.args,
22
22
  **kwargs: P.kwargs,
23
- ) -> Self:
23
+ ) -> None:
24
24
  """
25
- Apply a function to each element in the iterable.
25
+ Consume the Iterator by applying a function to each element in the iterable.
26
26
 
27
27
  Args:
28
28
  func: Function to apply to each element.
29
29
  args: Positional arguments for the function.
30
30
  kwargs: Keyword arguments for the function.
31
31
 
32
- Can be used for side effects such as printing or logging.
32
+ Is a terminal operation, and is useful for functions that have side effects, or when you want to force evaluation of a lazy iterable.
33
33
  ```python
34
34
  >>> import pyochain as pc
35
- >>> pc.Iter.from_([1, 2, 3]).for_each(lambda x: print(x)).collect().unwrap()
35
+ >>> pc.Iter.from_([1, 2, 3]).for_each(lambda x: print(x))
36
36
  1
37
37
  2
38
38
  3
39
- []
40
39
 
41
40
  ```
42
41
  """
43
- for v in self.unwrap():
44
- func(v, *args, **kwargs)
45
- return self
42
+
43
+ def _for_each(data: Iterable[T]) -> None:
44
+ for v in data:
45
+ func(v, *args, **kwargs)
46
+
47
+ return self.into(_for_each)
46
48
 
47
49
  def map[R](self, func: Callable[[T], R]) -> Iter[R]:
48
50
  """
@@ -58,23 +60,33 @@ class BaseMap[T](IterWrapper[T]):
58
60
 
59
61
  ```
60
62
  """
61
- return self.apply(partial(map, func))
63
+ return self._lazy(partial(map, func))
62
64
 
63
65
  @overload
64
66
  def flat_map[U, R](
65
- self: IterWrapper[Iterable[Iterable[Iterable[U]]]],
66
- func: Callable[[T], Iterable[Iterable[R]]],
67
- ) -> Iter[Iterable[Iterable[R]]]: ...
67
+ self: IterWrapper[Iterable[U]],
68
+ func: Callable[[U], R],
69
+ ) -> Iter[R]: ...
70
+ @overload
71
+ def flat_map[U, R](
72
+ self: IterWrapper[Iterator[U]], func: Callable[[U], R]
73
+ ) -> Iter[R]: ...
74
+ @overload
75
+ def flat_map[U, R](
76
+ self: IterWrapper[Sequence[U]], func: Callable[[U], R]
77
+ ) -> Iter[R]: ...
68
78
  @overload
69
79
  def flat_map[U, R](
70
- self: IterWrapper[Iterable[Iterable[U]]], func: Callable[[T], Iterable[R]]
71
- ) -> Iter[Iterable[R]]: ...
80
+ self: IterWrapper[list[U]], func: Callable[[U], R]
81
+ ) -> Iter[R]: ...
72
82
  @overload
73
83
  def flat_map[U, R](
74
- self: IterWrapper[Iterable[U]], func: Callable[[T], R]
84
+ self: IterWrapper[tuple[U, ...]], func: Callable[[U], R]
75
85
  ) -> Iter[R]: ...
86
+ @overload
87
+ def flat_map[R](self: IterWrapper[range], func: Callable[[int], R]) -> Iter[R]: ...
76
88
  def flat_map[U: Iterable[Any], R](
77
- self: IterWrapper[U], func: Callable[[T], R]
89
+ self: IterWrapper[U], func: Callable[[Any], R]
78
90
  ) -> Iter[Any]:
79
91
  """
80
92
  Map each element through func and flatten the result by one level.
@@ -94,7 +106,7 @@ class BaseMap[T](IterWrapper[T]):
94
106
  def _flat_map(data: Iterable[U]) -> map[R]:
95
107
  return map(func, itertools.chain.from_iterable(data))
96
108
 
97
- return self.apply(_flat_map)
109
+ return self._lazy(_flat_map)
98
110
 
99
111
  def map_star[U: Iterable[Any], R](
100
112
  self: IterWrapper[U], func: Callable[..., R]
@@ -123,11 +135,12 @@ class BaseMap[T](IterWrapper[T]):
123
135
  ['blue-S', 'blue-M', 'red-S', 'red-M']
124
136
 
125
137
  ```
138
+
126
139
  - Use map_star when the performance matters (it is faster).
127
140
  - Use map with unpacking when readability matters (the types can be inferred).
128
141
  """
129
142
 
130
- return self.apply(partial(itertools.starmap, func))
143
+ return self._lazy(partial(itertools.starmap, func))
131
144
 
132
145
  def map_if[R](
133
146
  self,
@@ -165,7 +178,7 @@ class BaseMap[T](IterWrapper[T]):
165
178
 
166
179
  ```
167
180
  """
168
- return self.apply(mit.map_if, predicate, func, func_else=func_else)
181
+ return self._lazy(mit.map_if, predicate, func, func_else=func_else)
169
182
 
170
183
  def map_except[R](
171
184
  self, func: Callable[[T], R], *exceptions: type[BaseException]
@@ -192,17 +205,17 @@ class BaseMap[T](IterWrapper[T]):
192
205
  def _map_except(data: Iterable[T]) -> Iterator[R]:
193
206
  return mit.map_except(func, data, *exceptions)
194
207
 
195
- return self.apply(_map_except)
208
+ return self._lazy(_map_except)
196
209
 
197
210
  def repeat(
198
- self, n: int, factory: Callable[[Iterable[T]], Collection[T]] = tuple
211
+ self, n: int, factory: Callable[[Iterable[T]], Sequence[T]] = tuple
199
212
  ) -> Iter[Iterable[T]]:
200
213
  """
201
- Repeat the entire iterable n times (as elements) and return Iter.
214
+ Repeat the entire iterable n times (as elements).
202
215
 
203
216
  Args:
204
217
  n: Number of repetitions.
205
- factory: Factory to create the repeated collection (default: tuple).
218
+ factory: Factory to create the repeated Sequence (default: tuple).
206
219
 
207
220
  ```python
208
221
  >>> import pyochain as pc
@@ -217,7 +230,7 @@ class BaseMap[T](IterWrapper[T]):
217
230
  def _repeat(data: Iterable[T]) -> Iterator[Iterable[T]]:
218
231
  return itertools.repeat(factory(data), n)
219
232
 
220
- return self.apply(_repeat)
233
+ return self._lazy(_repeat)
221
234
 
222
235
  @overload
223
236
  def repeat_last(self, default: T) -> Iter[T]: ...
@@ -247,7 +260,7 @@ class BaseMap[T](IterWrapper[T]):
247
260
 
248
261
  ```
249
262
  """
250
- return self.apply(mit.repeat_last, default)
263
+ return self._lazy(mit.repeat_last, default)
251
264
 
252
265
  def ichunked(self, n: int) -> Iter[Iterator[T]]:
253
266
  """
@@ -275,17 +288,21 @@ class BaseMap[T](IterWrapper[T]):
275
288
 
276
289
  ```
277
290
  """
278
- return self.apply(mit.ichunked, n)
291
+ return self._lazy(mit.ichunked, n)
279
292
 
280
293
  @overload
281
- def flatten[U](
282
- self: IterWrapper[Iterable[Iterable[Iterable[U]]]],
283
- ) -> Iter[Iterable[Iterable[U]]]: ...
294
+ def flatten[U](self: IterWrapper[Iterable[U]]) -> Iter[U]: ...
284
295
  @overload
285
- def flatten[U](self: IterWrapper[Iterable[Iterable[U]]]) -> Iter[Iterable[U]]: ...
296
+ def flatten[U](self: IterWrapper[Iterator[U]]) -> Iter[U]: ...
286
297
  @overload
287
- def flatten[U](self: IterWrapper[Iterable[U]]) -> Iter[U]: ...
288
- def flatten(self: IterWrapper[Iterable[Any]]) -> Iter[Any]:
298
+ def flatten[U](self: IterWrapper[Sequence[U]]) -> Iter[U]: ...
299
+ @overload
300
+ def flatten[U](self: IterWrapper[list[U]]) -> Iter[U]: ...
301
+ @overload
302
+ def flatten[U](self: IterWrapper[tuple[U, ...]]) -> Iter[U]: ...
303
+ @overload
304
+ def flatten(self: IterWrapper[range]) -> Iter[int]: ...
305
+ def flatten[U: Iterable[Any]](self: IterWrapper[U]) -> Iter[Any]:
289
306
  """
290
307
  Flatten one level of nesting and return a new Iterable wrapper.
291
308
 
@@ -297,7 +314,7 @@ class BaseMap[T](IterWrapper[T]):
297
314
 
298
315
  ```
299
316
  """
300
- return self.apply(itertools.chain.from_iterable)
317
+ return self._lazy(itertools.chain.from_iterable)
301
318
 
302
319
  def pluck[U: Mapping[Any, Any]](
303
320
  self: IterWrapper[U], *keys: str | int
@@ -333,17 +350,16 @@ class BaseMap[T](IterWrapper[T]):
333
350
  """
334
351
 
335
352
  getter = partial(cz.dicttoolz.get_in, keys)
336
- return self.apply(partial(map, getter))
353
+ return self._lazy(partial(map, getter))
337
354
 
338
355
  def round[U: float | int](
339
356
  self: IterWrapper[U], ndigits: int | None = None
340
357
  ) -> Iter[float]:
341
358
  """
342
- Round each element in the iterable to the given number of decimal places and return Iter.
359
+ Round each element in the iterable to the given number of decimal places.
343
360
 
344
361
  Args:
345
362
  ndigits: Number of decimal places to round to.
346
-
347
363
  ```python
348
364
  >>> import pyochain as pc
349
365
  >>> pc.Iter.from_([1.2345, 2.3456, 3.4567]).round(2).into(list)
@@ -355,4 +371,4 @@ class BaseMap[T](IterWrapper[T]):
355
371
  def _round(data: Iterable[U]) -> Generator[float | int, None, None]:
356
372
  return (round(x, ndigits) for x in data)
357
373
 
358
- return self.apply(_round)
374
+ return self._lazy(_round)