pyochain 0.5.1__py3-none-any.whl → 0.5.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyochain might be problematic. Click here for more details.

pyochain/_iter/_main.py CHANGED
@@ -1,14 +1,22 @@
1
1
  from __future__ import annotations
2
2
 
3
- from collections.abc import Callable, Collection, Generator, Iterable, Iterator
4
- from typing import TYPE_CHECKING, Any, Concatenate
3
+ import itertools
4
+ from collections.abc import (
5
+ Callable,
6
+ Generator,
7
+ Iterable,
8
+ Iterator,
9
+ Sequence,
10
+ )
11
+ from typing import TYPE_CHECKING, Any, Concatenate, overload, override
12
+
13
+ import cytoolz as cz
5
14
 
6
15
  from ._aggregations import BaseAgg
7
16
  from ._booleans import BaseBool
8
- from ._constructors import IterConstructors
17
+ from ._dicts import BaseDict
9
18
  from ._eager import BaseEager
10
19
  from ._filters import BaseFilter
11
- from ._groups import BaseGroups
12
20
  from ._joins import BaseJoins
13
21
  from ._lists import BaseList
14
22
  from ._maps import BaseMap
@@ -21,8 +29,11 @@ if TYPE_CHECKING:
21
29
  from .._dict import Dict
22
30
 
23
31
 
32
+ class CommonMethods[T](BaseAgg[T], BaseEager[T], BaseDict[T]):
33
+ pass
34
+
35
+
24
36
  class Iter[T](
25
- BaseAgg[T],
26
37
  BaseBool[T],
27
38
  BaseFilter[T],
28
39
  BaseProcess[T],
@@ -32,18 +43,15 @@ class Iter[T](
32
43
  BaseTuples[T],
33
44
  BasePartitions[T],
34
45
  BaseJoins[T],
35
- BaseGroups[T],
36
- BaseEager[T],
37
- IterConstructors,
46
+ CommonMethods[T],
38
47
  ):
39
48
  """
40
- A wrapper around Python's built-in iterable types, providing a rich set of functional programming tools.
49
+ A wrapper around Python's built-in Iterators/Generators types, providing a rich set of functional programming tools.
41
50
 
42
- It supports lazy evaluation, allowing for efficient processing of large datasets.
51
+ It's designed around lazy evaluation, allowing for efficient processing of large datasets.
43
52
 
44
- It is not a collection itself, but a wrapper that provides additional methods for working with iterables.
45
-
46
- It can be constructed from any iterable, including `lists`, `tuples`, `sets`, and `generators`.
53
+ - To instantiate from a lazy Iterator/Generator, simply pass it to the standard constructor.
54
+ - To instantiate from an eager Sequence (like a list or set), use the `from_` class method.
47
55
  """
48
56
 
49
57
  __slots__ = ("_data",)
@@ -51,8 +59,163 @@ class Iter[T](
51
59
  def __init__(self, data: Iterator[T] | Generator[T, Any, Any]) -> None:
52
60
  self._data = data
53
61
 
54
- def __repr__(self) -> str:
55
- return f"{self.__class__.__name__}({self.unwrap().__repr__()})"
62
+ @staticmethod
63
+ def from_count(start: int = 0, step: int = 1) -> Iter[int]:
64
+ """
65
+ Create an infinite iterator of evenly spaced values.
66
+
67
+ **Warning** ⚠️
68
+ This creates an infinite iterator.
69
+ Be sure to use `Iter.take()` or `Iter.slice()` to limit the number of items taken.
70
+
71
+ Args:
72
+ start: Starting value of the sequence. Defaults to 0.
73
+ step: Difference between consecutive values. Defaults to 1.
74
+ Example:
75
+ ```python
76
+ >>> import pyochain as pc
77
+ >>> pc.Iter.from_count(10, 2).take(3).into(list)
78
+ [10, 12, 14]
79
+
80
+ ```
81
+ """
82
+
83
+ return Iter(itertools.count(start, step))
84
+
85
+ @staticmethod
86
+ def from_func[U](func: Callable[[U], U], input: U) -> Iter[U]:
87
+ """
88
+ Create an infinite iterator by repeatedly applying a function on an original input.
89
+
90
+ **Warning** ⚠️
91
+ This creates an infinite iterator.
92
+ Be sure to use `Iter.take()` or `Iter.slice()` to limit the number of items taken.
93
+
94
+ Args:
95
+ func: Function to apply repeatedly.
96
+ input: Initial value to start the iteration.
97
+
98
+ Example:
99
+ ```python
100
+ >>> import pyochain as pc
101
+ >>> pc.Iter.from_func(lambda x: x + 1, 0).take(3).into(list)
102
+ [0, 1, 2]
103
+
104
+ ```
105
+ """
106
+
107
+ return Iter(cz.itertoolz.iterate(func, input))
108
+
109
+ @overload
110
+ @staticmethod
111
+ def from_[U](data: Iterable[U]) -> Iter[U]: ...
112
+ @overload
113
+ @staticmethod
114
+ def from_[U](data: U, *more_data: U) -> Iter[U]: ...
115
+ @staticmethod
116
+ def from_[U](data: Iterable[U] | U, *more_data: U) -> Iter[U]:
117
+ """
118
+ Create an iterator from any Iterable, or from unpacked values.
119
+
120
+ - An Iterable is any object capable of returning its members one at a time, permitting it to be iterated over in a for-loop.
121
+ - An Iterator is an object representing a stream of data; returned by calling `iter()` on an Iterable.
122
+ - Once an Iterator is exhausted, it cannot be reused or reset.
123
+
124
+ If you need to reuse the data, consider collecting it into a list first with `.collect()`.
125
+
126
+ In general, avoid intermediate references when dealing with lazy iterators, and prioritize method chaining instead.
127
+
128
+ Args:
129
+ data: Iterable to convert into an iterator, or a single value.
130
+ more_data: Additional values to include if 'data' is not an Iterable.
131
+ Example:
132
+ ```python
133
+ >>> import pyochain as pc
134
+ >>> data: tuple[int, ...] = (1, 2, 3)
135
+ >>> iterator = pc.Iter.from_(data)
136
+ >>> iterator.unwrap().__class__.__name__
137
+ 'tuple_iterator'
138
+ >>> mapped = iterator.map(lambda x: x * 2)
139
+ >>> mapped.unwrap().__class__.__name__
140
+ 'map'
141
+ >>> mapped.collect(tuple).unwrap()
142
+ (2, 4, 6)
143
+ >>> # iterator is now exhausted
144
+ >>> iterator.collect().unwrap()
145
+ []
146
+ >>> # Creating from unpacked values
147
+ >>> pc.Iter.from_(1, 2, 3).collect(tuple).unwrap()
148
+ (1, 2, 3)
149
+
150
+ ```
151
+ """
152
+
153
+ def _convert_data() -> Sequence[Any]:
154
+ if cz.itertoolz.isiterable(data):
155
+ return data
156
+ else:
157
+ return (data, *more_data)
158
+
159
+ return Iter(iter(_convert_data()))
160
+
161
+ @staticmethod
162
+ def unfold[S, V](seed: S, generator: Callable[[S], tuple[V, S] | None]) -> Iter[V]:
163
+ """
164
+ Create an iterator by repeatedly applying a generator function to an initial state.
165
+
166
+ The `generator` function takes the current state and must return:
167
+
168
+ - A tuple `(value, new_state)` to emit the `value` and continue with the `new_state`.
169
+ - `None` to stop the generation.
170
+
171
+ This is functionally equivalent to a state-based `while` loop.
172
+
173
+ **Warning** ⚠️
174
+ If the `generator` function never returns `None`, it creates an infinite iterator.
175
+ Be sure to use `Iter.take()` or `Iter.slice()` to limit the number of items taken if necessary.
176
+
177
+ Args:
178
+ seed: Initial state for the generator.
179
+ generator: Function that generates the next value and state.
180
+
181
+ Example:
182
+ ```python
183
+ >>> import pyochain as pc
184
+ >>> # Example 1: Simple counter up to 5
185
+ >>> def counter_generator(state: int) -> tuple[int, int] | None:
186
+ ... if state < 5:
187
+ ... return (state * 10, state + 1)
188
+ ... return None
189
+ >>> pc.Iter.unfold(seed=0, generator=counter_generator).into(list)
190
+ [0, 10, 20, 30, 40]
191
+ >>> # Example 2: Fibonacci sequence up to 100
192
+ >>> type FibState = tuple[int, int]
193
+ >>> def fib_generator(state: FibState) -> tuple[int, FibState] | None:
194
+ ... a, b = state
195
+ ... if a > 100:
196
+ ... return None
197
+ ... return (a, (b, a + b))
198
+ >>> pc.Iter.unfold(seed=(0, 1), generator=fib_generator).into(list)
199
+ [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
200
+ >>> # Example 3: Infinite iterator (requires take())
201
+ >>> pc.Iter.unfold(seed=1, generator=lambda s: (s, s * 2)).take(5).into(list)
202
+ [1, 2, 4, 8, 16]
203
+
204
+ ```
205
+ """
206
+ from ._main import Iter
207
+
208
+ def _unfold() -> Iterator[V]:
209
+ current_seed: S = seed
210
+ while True:
211
+ result: tuple[V, S] | None = generator(current_seed)
212
+ if result is None:
213
+ break
214
+ value, next_seed = result
215
+ yield value
216
+ current_seed = next_seed
217
+
218
+ return Iter(_unfold())
56
219
 
57
220
  def itr[**P, R, U: Iterable[Any]](
58
221
  self: Iter[U],
@@ -86,9 +249,9 @@ class Iter[T](
86
249
  """
87
250
 
88
251
  def _itr(data: Iterable[U]) -> Generator[R, None, None]:
89
- return (func(Iter.from_(x), *args, **kwargs) for x in data)
252
+ return (func(Iter(iter(x)), *args, **kwargs) for x in data)
90
253
 
91
- return self.apply(_itr)
254
+ return self._lazy(_itr)
92
255
 
93
256
  def struct[**P, R, K, V](
94
257
  self: Iter[dict[K, V]],
@@ -100,6 +263,7 @@ class Iter[T](
100
263
  Apply a function to each element after wrapping it in a Dict.
101
264
 
102
265
  This is a convenience method for the common pattern of mapping a function over an iterable of dictionaries.
266
+
103
267
  Args:
104
268
  func: Function to apply to each wrapped dictionary.
105
269
  *args: Positional arguments to pass to the function.
@@ -134,15 +298,9 @@ class Iter[T](
134
298
  ... .struct(lambda d: d.drop("Continent").unwrap())
135
299
  ... .into(list)
136
300
  ... ) # doctest: +NORMALIZE_WHITESPACE
137
- Dict({
138
- 'America': [
139
- {'Name': 'Alice', 'City': 'New York'},
140
- {'Name': 'Charlie', 'City': 'New York'}
141
- ],
142
- 'Europe': [
143
- {'Name': 'David', 'City': 'Paris'}
144
- ]
145
- })
301
+ {'America': [{'City': 'New York', 'Name': 'Alice'},
302
+ {'City': 'New York', 'Name': 'Charlie'}],
303
+ 'Europe': [{'City': 'Paris', 'Name': 'David'}]}
146
304
 
147
305
  ```
148
306
  """
@@ -151,74 +309,158 @@ class Iter[T](
151
309
  def _struct(data: Iterable[dict[K, V]]) -> Generator[R, None, None]:
152
310
  return (func(Dict(x), *args, **kwargs) for x in data)
153
311
 
154
- return self.apply(_struct)
312
+ return self._lazy(_struct)
155
313
 
156
- def with_keys[K](self, keys: Iterable[K]) -> Dict[K, T]:
314
+ def apply[**P, R](
315
+ self,
316
+ func: Callable[Concatenate[Iterable[T], P], Iterator[R]],
317
+ *args: P.args,
318
+ **kwargs: P.kwargs,
319
+ ) -> Iter[R]:
157
320
  """
158
- Create a Dict by zipping the iterable with keys.
321
+ Apply a function to the underlying Iterator and return a new Iter instance.
322
+
323
+ Allow to pass user defined functions that transform the iterable while retaining the Iter wrapper.
159
324
 
160
325
  Args:
161
- keys: Iterable of keys to pair with the values.
326
+ func: Function to apply to the underlying iterable.
327
+ *args: Positional arguments to pass to the function.
328
+ **kwargs: Keyword arguments to pass to the function.
329
+
162
330
  Example:
163
331
  ```python
164
332
  >>> import pyochain as pc
165
- >>> keys = ["a", "b", "c"]
166
- >>> values = [1, 2, 3]
167
- >>> pc.Iter.from_(values).with_keys(keys).unwrap()
168
- {'a': 1, 'b': 2, 'c': 3}
169
- >>> # This is equivalent to:
170
- >>> pc.Iter.from_(keys).zip(values).pipe(
171
- ... lambda x: pc.Dict(x.into(dict)).unwrap()
172
- ... )
173
- {'a': 1, 'b': 2, 'c': 3}
333
+ >>> def double(data: Iterable[int]) -> Iterator[int]:
334
+ ... return (x * 2 for x in data)
335
+ >>> pc.Iter.from_([1, 2, 3]).apply(double).into(list)
336
+ [2, 4, 6]
174
337
 
175
338
  ```
176
339
  """
177
- from .._dict import Dict
178
-
179
- return Dict(dict(zip(keys, self.unwrap())))
340
+ return self._lazy(func, *args, **kwargs)
180
341
 
181
- def with_values[V](self, values: Iterable[V]) -> Dict[T, V]:
342
+ def collect(self, factory: Callable[[Iterable[T]], Sequence[T]] = list) -> Seq[T]:
182
343
  """
183
- Create a Dict by zipping the iterable with values.
344
+ Collect the elements into a sequence, using the provided factory.
184
345
 
185
346
  Args:
186
- values: Iterable of values to pair with the keys.
347
+ factory: A callable that takes an iterable and returns a Sequence. Defaults to list.
348
+
187
349
  Example:
188
350
  ```python
189
351
  >>> import pyochain as pc
190
- >>> keys = [1, 2, 3]
191
- >>> values = ["a", "b", "c"]
192
- >>> pc.Iter.from_(keys).with_values(values).unwrap()
193
- {1: 'a', 2: 'b', 3: 'c'}
194
- >>> # This is equivalent to:
195
- >>> pc.Iter.from_(keys).zip(values).pipe(
196
- ... lambda x: pc.Dict(x.into(dict)).unwrap()
197
- ... )
198
- {1: 'a', 2: 'b', 3: 'c'}
352
+ >>> pc.Iter.from_(range(5)).collect().unwrap()
353
+ [0, 1, 2, 3, 4]
199
354
 
200
355
  ```
201
356
  """
202
- from .._dict import Dict
357
+ return self._eager(factory)
203
358
 
204
- return Dict(dict(zip(self.unwrap(), values)))
359
+ @override
360
+ def unwrap(self) -> Iterator[T]:
361
+ """
362
+ Unwrap and return the underlying Iterator.
205
363
 
364
+ ```python
365
+ >>> import pyochain as pc
366
+ >>> iterator = pc.Iter.from_([1, 2, 3])
367
+ >>> unwrapped = iterator.unwrap()
368
+ >>> list(unwrapped)
369
+ [1, 2, 3]
206
370
 
207
- class Seq[T](BaseAgg[T], BaseEager[T]):
371
+ ```
372
+ """
373
+ return self._data # type: ignore[return-value]
374
+
375
+
376
+ class Seq[T](CommonMethods[T]):
208
377
  """
209
- pyochain.Seq represent an in memory collection.
378
+ pyochain.Seq represent an in memory Sequence.
210
379
 
211
380
  Provides a subset of pyochain.Iter methods with eager evaluation, and is the return type of pyochain.Iter.collect().
212
381
  """
213
382
 
214
383
  __slots__ = ("_data",)
215
384
 
216
- def __init__(self, data: Collection[T]) -> None:
385
+ def __init__(self, data: Sequence[T]) -> None:
217
386
  self._data = data
218
387
 
388
+ @overload
389
+ @staticmethod
390
+ def from_[U](data: Sequence[U]) -> Seq[U]: ...
391
+ @overload
392
+ @staticmethod
393
+ def from_[U](data: U, *more_data: U) -> Seq[U]: ...
394
+ @staticmethod
395
+ def from_[U](data: Sequence[U] | U, *more_data: U) -> Seq[U]:
396
+ """
397
+ Create a Seq from a Sequence or unpacked values.
398
+
399
+ Args:
400
+ data: Sequence of items or a single item.
401
+ more_data: Additional item to include if 'data' is not a Sequence.
402
+
403
+ Example:
404
+ ```python
405
+ >>> import pyochain as pc
406
+ >>> pc.Seq.from_([1, 2, 3]).unwrap()
407
+ [1, 2, 3]
408
+ >>> pc.Seq.from_(1, 2).unwrap()
409
+ (1, 2)
410
+
411
+ ```
412
+
413
+ """
414
+ if cz.itertoolz.isiterable(data):
415
+ return Seq(data)
416
+ else:
417
+ return Seq((data, *more_data))
418
+
219
419
  def iter(self) -> Iter[T]:
220
420
  """
221
421
  Get an iterator over the sequence.
222
422
  Call this to switch to lazy evaluation.
223
423
  """
224
- return Iter.from_(self.unwrap())
424
+ return self._lazy(iter)
425
+
426
+ def apply[**P, R](
427
+ self,
428
+ func: Callable[Concatenate[Iterable[T], P], Sequence[R]],
429
+ *args: P.args,
430
+ **kwargs: P.kwargs,
431
+ ) -> Seq[R]:
432
+ """
433
+ Apply a function to the underlying Sequence and return a Seq instance.
434
+
435
+ Allow to pass user defined functions that transform the Sequence while retaining the Seq wrapper.
436
+
437
+ Args:
438
+ func: Function to apply to the underlying Sequence.
439
+ *args: Positional arguments to pass to the function.
440
+ **kwargs: Keyword arguments to pass to the function.
441
+
442
+ Example:
443
+ ```python
444
+ >>> import pyochain as pc
445
+ >>> def double(data: Iterable[int]) -> Sequence[int]:
446
+ ... return [x * 2 for x in data]
447
+ >>> pc.Seq([1, 2, 3]).apply(double).into(list)
448
+ [2, 4, 6]
449
+
450
+ ```
451
+ """
452
+ return self._eager(func, *args, **kwargs)
453
+
454
+ @override
455
+ def unwrap(self) -> Sequence[T]:
456
+ """
457
+ Unwrap and return the underlying Sequence.
458
+
459
+ ```python
460
+ >>> import pyochain as pc
461
+ >>> pc.Seq([1, 2, 3]).unwrap()
462
+ [1, 2, 3]
463
+
464
+ ```
465
+ """
466
+ return self._data # type: ignore[return-value]
pyochain/_iter/_maps.py CHANGED
@@ -1,9 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import itertools
4
- from collections.abc import Callable, Collection, Generator, Iterable, Iterator, Mapping
4
+ from collections.abc import Callable, Generator, Iterable, Iterator, Mapping, Sequence
5
5
  from functools import partial
6
- from typing import TYPE_CHECKING, Any, Concatenate, Self, overload
6
+ from typing import TYPE_CHECKING, Any, Concatenate, overload
7
7
 
8
8
  import cytoolz as cz
9
9
  import more_itertools as mit
@@ -20,29 +20,31 @@ class BaseMap[T](IterWrapper[T]):
20
20
  func: Callable[Concatenate[T, P], Any],
21
21
  *args: P.args,
22
22
  **kwargs: P.kwargs,
23
- ) -> Self:
23
+ ) -> None:
24
24
  """
25
- Apply a function to each element in the iterable.
25
+ Consume the Iterator by applying a function to each element in the iterable.
26
26
 
27
27
  Args:
28
28
  func: Function to apply to each element.
29
29
  args: Positional arguments for the function.
30
30
  kwargs: Keyword arguments for the function.
31
31
 
32
- Can be used for side effects such as printing or logging.
32
+ Is a terminal operation, and is useful for functions that have side effects, or when you want to force evaluation of a lazy iterable.
33
33
  ```python
34
34
  >>> import pyochain as pc
35
- >>> pc.Iter.from_([1, 2, 3]).for_each(lambda x: print(x)).collect().unwrap()
35
+ >>> pc.Iter.from_([1, 2, 3]).for_each(lambda x: print(x))
36
36
  1
37
37
  2
38
38
  3
39
- []
40
39
 
41
40
  ```
42
41
  """
43
- for v in self.unwrap():
44
- func(v, *args, **kwargs)
45
- return self
42
+
43
+ def _for_each(data: Iterable[T]) -> None:
44
+ for v in data:
45
+ func(v, *args, **kwargs)
46
+
47
+ return self.into(_for_each)
46
48
 
47
49
  def map[R](self, func: Callable[[T], R]) -> Iter[R]:
48
50
  """
@@ -58,23 +60,33 @@ class BaseMap[T](IterWrapper[T]):
58
60
 
59
61
  ```
60
62
  """
61
- return self.apply(partial(map, func))
63
+ return self._lazy(partial(map, func))
62
64
 
63
65
  @overload
64
66
  def flat_map[U, R](
65
- self: IterWrapper[Iterable[Iterable[Iterable[U]]]],
66
- func: Callable[[T], Iterable[Iterable[R]]],
67
- ) -> Iter[Iterable[Iterable[R]]]: ...
67
+ self: IterWrapper[Iterable[U]],
68
+ func: Callable[[U], R],
69
+ ) -> Iter[R]: ...
70
+ @overload
71
+ def flat_map[U, R](
72
+ self: IterWrapper[Iterator[U]], func: Callable[[U], R]
73
+ ) -> Iter[R]: ...
74
+ @overload
75
+ def flat_map[U, R](
76
+ self: IterWrapper[Sequence[U]], func: Callable[[U], R]
77
+ ) -> Iter[R]: ...
68
78
  @overload
69
79
  def flat_map[U, R](
70
- self: IterWrapper[Iterable[Iterable[U]]], func: Callable[[T], Iterable[R]]
71
- ) -> Iter[Iterable[R]]: ...
80
+ self: IterWrapper[list[U]], func: Callable[[U], R]
81
+ ) -> Iter[R]: ...
72
82
  @overload
73
83
  def flat_map[U, R](
74
- self: IterWrapper[Iterable[U]], func: Callable[[T], R]
84
+ self: IterWrapper[tuple[U, ...]], func: Callable[[U], R]
75
85
  ) -> Iter[R]: ...
86
+ @overload
87
+ def flat_map[R](self: IterWrapper[range], func: Callable[[int], R]) -> Iter[R]: ...
76
88
  def flat_map[U: Iterable[Any], R](
77
- self: IterWrapper[U], func: Callable[[T], R]
89
+ self: IterWrapper[U], func: Callable[[Any], R]
78
90
  ) -> Iter[Any]:
79
91
  """
80
92
  Map each element through func and flatten the result by one level.
@@ -94,7 +106,7 @@ class BaseMap[T](IterWrapper[T]):
94
106
  def _flat_map(data: Iterable[U]) -> map[R]:
95
107
  return map(func, itertools.chain.from_iterable(data))
96
108
 
97
- return self.apply(_flat_map)
109
+ return self._lazy(_flat_map)
98
110
 
99
111
  def map_star[U: Iterable[Any], R](
100
112
  self: IterWrapper[U], func: Callable[..., R]
@@ -123,11 +135,12 @@ class BaseMap[T](IterWrapper[T]):
123
135
  ['blue-S', 'blue-M', 'red-S', 'red-M']
124
136
 
125
137
  ```
138
+
126
139
  - Use map_star when the performance matters (it is faster).
127
140
  - Use map with unpacking when readability matters (the types can be inferred).
128
141
  """
129
142
 
130
- return self.apply(partial(itertools.starmap, func))
143
+ return self._lazy(partial(itertools.starmap, func))
131
144
 
132
145
  def map_if[R](
133
146
  self,
@@ -165,7 +178,7 @@ class BaseMap[T](IterWrapper[T]):
165
178
 
166
179
  ```
167
180
  """
168
- return self.apply(mit.map_if, predicate, func, func_else=func_else)
181
+ return self._lazy(mit.map_if, predicate, func, func_else=func_else)
169
182
 
170
183
  def map_except[R](
171
184
  self, func: Callable[[T], R], *exceptions: type[BaseException]
@@ -192,17 +205,17 @@ class BaseMap[T](IterWrapper[T]):
192
205
  def _map_except(data: Iterable[T]) -> Iterator[R]:
193
206
  return mit.map_except(func, data, *exceptions)
194
207
 
195
- return self.apply(_map_except)
208
+ return self._lazy(_map_except)
196
209
 
197
210
  def repeat(
198
- self, n: int, factory: Callable[[Iterable[T]], Collection[T]] = tuple
211
+ self, n: int, factory: Callable[[Iterable[T]], Sequence[T]] = tuple
199
212
  ) -> Iter[Iterable[T]]:
200
213
  """
201
- Repeat the entire iterable n times (as elements) and return Iter.
214
+ Repeat the entire iterable n times (as elements).
202
215
 
203
216
  Args:
204
217
  n: Number of repetitions.
205
- factory: Factory to create the repeated collection (default: tuple).
218
+ factory: Factory to create the repeated Sequence (default: tuple).
206
219
 
207
220
  ```python
208
221
  >>> import pyochain as pc
@@ -217,7 +230,7 @@ class BaseMap[T](IterWrapper[T]):
217
230
  def _repeat(data: Iterable[T]) -> Iterator[Iterable[T]]:
218
231
  return itertools.repeat(factory(data), n)
219
232
 
220
- return self.apply(_repeat)
233
+ return self._lazy(_repeat)
221
234
 
222
235
  @overload
223
236
  def repeat_last(self, default: T) -> Iter[T]: ...
@@ -247,7 +260,7 @@ class BaseMap[T](IterWrapper[T]):
247
260
 
248
261
  ```
249
262
  """
250
- return self.apply(mit.repeat_last, default)
263
+ return self._lazy(mit.repeat_last, default)
251
264
 
252
265
  def ichunked(self, n: int) -> Iter[Iterator[T]]:
253
266
  """
@@ -275,17 +288,21 @@ class BaseMap[T](IterWrapper[T]):
275
288
 
276
289
  ```
277
290
  """
278
- return self.apply(mit.ichunked, n)
291
+ return self._lazy(mit.ichunked, n)
279
292
 
280
293
  @overload
281
- def flatten[U](
282
- self: IterWrapper[Iterable[Iterable[Iterable[U]]]],
283
- ) -> Iter[Iterable[Iterable[U]]]: ...
294
+ def flatten[U](self: IterWrapper[Iterable[U]]) -> Iter[U]: ...
284
295
  @overload
285
- def flatten[U](self: IterWrapper[Iterable[Iterable[U]]]) -> Iter[Iterable[U]]: ...
296
+ def flatten[U](self: IterWrapper[Iterator[U]]) -> Iter[U]: ...
286
297
  @overload
287
- def flatten[U](self: IterWrapper[Iterable[U]]) -> Iter[U]: ...
288
- def flatten(self: IterWrapper[Iterable[Any]]) -> Iter[Any]:
298
+ def flatten[U](self: IterWrapper[Sequence[U]]) -> Iter[U]: ...
299
+ @overload
300
+ def flatten[U](self: IterWrapper[list[U]]) -> Iter[U]: ...
301
+ @overload
302
+ def flatten[U](self: IterWrapper[tuple[U, ...]]) -> Iter[U]: ...
303
+ @overload
304
+ def flatten(self: IterWrapper[range]) -> Iter[int]: ...
305
+ def flatten[U: Iterable[Any]](self: IterWrapper[U]) -> Iter[Any]:
289
306
  """
290
307
  Flatten one level of nesting and return a new Iterable wrapper.
291
308
 
@@ -297,7 +314,7 @@ class BaseMap[T](IterWrapper[T]):
297
314
 
298
315
  ```
299
316
  """
300
- return self.apply(itertools.chain.from_iterable)
317
+ return self._lazy(itertools.chain.from_iterable)
301
318
 
302
319
  def pluck[U: Mapping[Any, Any]](
303
320
  self: IterWrapper[U], *keys: str | int
@@ -333,17 +350,16 @@ class BaseMap[T](IterWrapper[T]):
333
350
  """
334
351
 
335
352
  getter = partial(cz.dicttoolz.get_in, keys)
336
- return self.apply(partial(map, getter))
353
+ return self._lazy(partial(map, getter))
337
354
 
338
355
  def round[U: float | int](
339
356
  self: IterWrapper[U], ndigits: int | None = None
340
357
  ) -> Iter[float]:
341
358
  """
342
- Round each element in the iterable to the given number of decimal places and return Iter.
359
+ Round each element in the iterable to the given number of decimal places.
343
360
 
344
361
  Args:
345
362
  ndigits: Number of decimal places to round to.
346
-
347
363
  ```python
348
364
  >>> import pyochain as pc
349
365
  >>> pc.Iter.from_([1.2345, 2.3456, 3.4567]).round(2).into(list)
@@ -355,4 +371,4 @@ class BaseMap[T](IterWrapper[T]):
355
371
  def _round(data: Iterable[U]) -> Generator[float | int, None, None]:
356
372
  return (round(x, ndigits) for x in data)
357
373
 
358
- return self.apply(_round)
374
+ return self._lazy(_round)