pyochain 0.5.0__py3-none-any.whl → 0.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyochain might be problematic. Click here for more details.

pyochain/_core/_main.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from abc import ABC, abstractmethod
4
- from collections.abc import Callable, Collection, Iterable, Iterator
4
+ from collections.abc import Callable, Iterable, Iterator, Sequence
5
5
  from typing import TYPE_CHECKING, Any, Concatenate, Self
6
6
 
7
7
  if TYPE_CHECKING:
@@ -53,9 +53,9 @@ class CommonBase[T](ABC, Pipeable):
53
53
  from pprint import pprint
54
54
 
55
55
  if pretty:
56
- pprint(self.unwrap(), sort_dicts=False)
56
+ self.into(pprint, sort_dicts=False)
57
57
  else:
58
- print(self.unwrap())
58
+ self.into(print)
59
59
  return self
60
60
 
61
61
  def unwrap(self) -> T:
@@ -90,49 +90,34 @@ class CommonBase[T](ABC, Pipeable):
90
90
  class IterWrapper[T](CommonBase[Iterable[T]]):
91
91
  _data: Iterable[T]
92
92
 
93
- def apply[**P, R](
93
+ def __repr__(self) -> str:
94
+ return f"{self.__class__.__name__}({self.unwrap().__repr__()})"
95
+
96
+ def _eager[**P, U](
94
97
  self,
95
- func: Callable[Concatenate[Iterable[T], P], Iterator[R]],
98
+ factory: Callable[Concatenate[Iterable[T], P], Sequence[U]],
96
99
  *args: P.args,
97
100
  **kwargs: P.kwargs,
98
- ) -> Iter[R]:
99
- """
100
- Apply a function to the underlying iterable and return an Iter of the result.
101
- Allow to pass user defined functions that transform the iterable while retaining the Iter wrapper.
102
- Args:
103
- func: Function to apply to the underlying iterable.
104
- *args: Positional arguments to pass to the function.
105
- **kwargs: Keyword arguments to pass to the function.
101
+ ) -> Seq[U]:
102
+ from .._iter import Seq
106
103
 
107
- Example:
108
- ```python
109
- >>> import pyochain as pc
110
- >>> def double(data: Iterable[int]) -> Iterator[int]:
111
- ... return (x * 2 for x in data)
112
- >>> pc.Iter.from_([1, 2, 3]).apply(double).into(list)
113
- [2, 4, 6]
114
- """
115
- from .._iter import Iter
104
+ def _(data: Iterable[T]):
105
+ return Seq(factory(data, *args, **kwargs))
116
106
 
117
- return Iter(self.into(func, *args, **kwargs))
107
+ return self.into(_)
118
108
 
119
- def collect(self, factory: Callable[[Iterable[T]], Collection[T]] = list) -> Seq[T]:
120
- """
121
- Collect the elements into a sequence.
122
- Args:
123
- factory: A callable that takes an iterable and returns a collection. Defaults to list.
124
-
125
- Example:
126
- ```python
127
- >>> import pyochain as pc
128
- >>> pc.Iter.from_(range(5)).collect().unwrap()
129
- [0, 1, 2, 3, 4]
109
+ def _lazy[**P, U](
110
+ self,
111
+ factory: Callable[Concatenate[Iterable[T], P], Iterator[U]],
112
+ *args: P.args,
113
+ **kwargs: P.kwargs,
114
+ ) -> Iter[U]:
115
+ from .._iter import Iter
130
116
 
131
- ```
132
- """
133
- from .._iter import Seq
117
+ def _(data: Iterable[T]):
118
+ return Iter(factory(data, *args, **kwargs))
134
119
 
135
- return Seq(self.into(factory))
120
+ return self.into(_)
136
121
 
137
122
 
138
123
  class MappingWrapper[K, V](CommonBase[dict[K, V]]):
@@ -147,6 +132,7 @@ class MappingWrapper[K, V](CommonBase[dict[K, V]]):
147
132
  """
148
133
  Apply a function to the underlying dict and return a Dict of the result.
149
134
  Allow to pass user defined functions that transform the dict while retaining the Dict wrapper.
135
+
150
136
  Args:
151
137
  func: Function to apply to the underlying dict.
152
138
  *args: Positional arguments to pass to the function.
pyochain/_dict/_exprs.py CHANGED
@@ -17,6 +17,7 @@ class Expr(Pipeable):
17
17
  An Expr encapsulates a sequence of operations to be applied to keys on a python dict.
18
18
 
19
19
  Each Expr instance maintains:
20
+
20
21
  - A list of tokens representing the keys to access in the dict (the first being the input given to the `key` function),
21
22
  - A tuple of operations to apply to the accessed data
22
23
  - An alias for the expression (default to the last token).
@@ -112,6 +112,7 @@ class FilterDict[K, V](MappingWrapper[K, V]):
112
112
  Filter values that have a given attribute.
113
113
 
114
114
  This does not enforce type checking at runtime for performance considerations.
115
+
115
116
  Args:
116
117
  attr: Attribute name to check for.
117
118
  dtype: Optional expected type of the attribute for type hinting.
pyochain/_dict/_groups.py CHANGED
@@ -143,8 +143,7 @@ class GroupsDict[K, V](MappingWrapper[K, V]):
143
143
  ... agg_func=lambda d: d.iter_keys().count(),
144
144
  ... ).unwrap()
145
145
  {'A': 2, 'B': 1}
146
- >>>
147
- >>> # --- Exemple 2: Agrégation plus complexe ---
146
+ >>> # Second example
148
147
  >>> sales_data = {
149
148
  ... "store_1": "Electronics",
150
149
  ... "store_2": "Groceries",
pyochain/_dict/_iter.py CHANGED
@@ -43,7 +43,7 @@ class IterDict[K, V](MappingWrapper[K, V]):
43
43
 
44
44
  def _itr(data: Mapping[K, Iterable[U]]) -> dict[K, R]:
45
45
  def _(v: Iterable[U]) -> R:
46
- return func(Iter.from_(v), *args, **kwargs)
46
+ return func(Iter(iter(v)), *args, **kwargs)
47
47
 
48
48
  return cz.dicttoolz.valmap(_, data)
49
49
 
@@ -61,7 +61,10 @@ class IterDict[K, V](MappingWrapper[K, V]):
61
61
  """
62
62
  from .._iter import Iter
63
63
 
64
- return Iter.from_(self.unwrap().keys())
64
+ def _keys(data: dict[K, V]) -> Iter[K]:
65
+ return Iter(iter(data.keys()))
66
+
67
+ return self.into(_keys)
65
68
 
66
69
  def iter_values(self) -> Iter[V]:
67
70
  """
@@ -75,7 +78,10 @@ class IterDict[K, V](MappingWrapper[K, V]):
75
78
  """
76
79
  from .._iter import Iter
77
80
 
78
- return Iter.from_(self.unwrap().values())
81
+ def _values(data: dict[K, V]) -> Iter[V]:
82
+ return Iter(iter(data.values()))
83
+
84
+ return self.into(_values)
79
85
 
80
86
  def iter_items(self) -> Iter[tuple[K, V]]:
81
87
  """
@@ -89,4 +95,7 @@ class IterDict[K, V](MappingWrapper[K, V]):
89
95
  """
90
96
  from .._iter import Iter
91
97
 
92
- return Iter.from_(self.unwrap().items())
98
+ def _items(data: dict[K, V]) -> Iter[tuple[K, V]]:
99
+ return Iter(iter(data.items()))
100
+
101
+ return self.into(_items)
pyochain/_dict/_main.py CHANGED
@@ -300,8 +300,5 @@ class Dict[K, V](
300
300
 
301
301
  ```
302
302
  """
303
- return (
304
- self.unwrap() == other.unwrap()
305
- if isinstance(other, Dict)
306
- else self.unwrap() == other
307
- )
303
+ other_data = other.unwrap() if isinstance(other, Dict) else other
304
+ return self.unwrap() == other_data
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING, Any, Concatenate
5
5
 
6
6
  import cytoolz as cz
7
7
 
8
- from .._core import MappingWrapper
8
+ from .._core import MappingWrapper, SupportsRichComparison
9
9
 
10
10
  if TYPE_CHECKING:
11
11
  from ._main import Dict
@@ -169,3 +169,24 @@ class ProcessDict[K, V](MappingWrapper[K, V]):
169
169
  return dict(sorted(data.items(), reverse=reverse))
170
170
 
171
171
  return self.apply(_sort)
172
+
173
+ def sort_values[U: SupportsRichComparison[Any]](
174
+ self: ProcessDict[K, U], reverse: bool = False
175
+ ) -> Dict[K, U]:
176
+ """
177
+ Sort the dictionary by its values and return a new Dict.
178
+
179
+ Args:
180
+ reverse: Whether to sort in descending order. Defaults to False.
181
+ ```python
182
+ >>> import pyochain as pc
183
+ >>> pc.Dict({"a": 2, "b": 1}).sort_values().unwrap()
184
+ {'b': 1, 'a': 2}
185
+
186
+ ```
187
+ """
188
+
189
+ def _sort_values(data: dict[K, U]) -> dict[K, U]:
190
+ return dict(sorted(data.items(), key=lambda item: item[1], reverse=reverse))
191
+
192
+ return self.apply(_sort_values)
@@ -30,6 +30,7 @@ class BaseAgg[T](IterWrapper[T]):
30
30
 
31
31
  - one from the left elements of the pairs
32
32
  - one from the right elements.
33
+
33
34
  This function is, in some sense, the opposite of zip.
34
35
  ```python
35
36
  >>> import pyochain as pc
@@ -21,6 +21,7 @@ class BaseBool[T](IterWrapper[T]):
21
21
  If any of them return false, it returns false.
22
22
 
23
23
  An empty iterator returns true.
24
+
24
25
  Args:
25
26
  predicate: Function to evaluate each item. Defaults to checking truthiness.
26
27
  Example:
@@ -57,6 +58,7 @@ class BaseBool[T](IterWrapper[T]):
57
58
  If they all return false, it returns false.
58
59
 
59
60
  An empty iterator returns false.
61
+
60
62
  Args:
61
63
  predicate: Function to evaluate each item. Defaults to checking truthiness.
62
64
  Example:
@@ -200,6 +202,7 @@ class BaseBool[T](IterWrapper[T]):
200
202
  - Returning the first element that satisfies the `predicate`.
201
203
 
202
204
  If all the elements return false, `Iter.find()` returns the default value.
205
+
203
206
  Args:
204
207
  default: Value to return if no element satisfies the predicate. Defaults to None.
205
208
  predicate: Function to evaluate each item. Defaults to checking truthiness.
@@ -1,20 +1,73 @@
1
1
  from __future__ import annotations
2
2
 
3
- from collections.abc import Callable, Iterable, Iterator
4
- from functools import partial
5
- from typing import TYPE_CHECKING, Any, overload
3
+ from collections.abc import Callable, Iterable
4
+ from typing import TYPE_CHECKING
6
5
 
7
6
  import cytoolz as cz
8
- import more_itertools as mit
9
7
 
10
8
  from .._core import IterWrapper
11
9
 
12
10
  if TYPE_CHECKING:
13
11
  from .._dict import Dict
14
- from ._main import Iter
15
12
 
16
13
 
17
- class BaseGroups[T](IterWrapper[T]):
14
+ class BaseDict[T](IterWrapper[T]):
15
+ def with_keys[K](self, keys: Iterable[K]) -> Dict[K, T]:
16
+ """
17
+ Create a Dict by zipping the iterable with keys.
18
+
19
+ Args:
20
+ keys: Iterable of keys to pair with the values.
21
+ Example:
22
+ ```python
23
+ >>> import pyochain as pc
24
+ >>> keys = ["a", "b", "c"]
25
+ >>> values = [1, 2, 3]
26
+ >>> pc.Iter.from_(values).with_keys(keys).unwrap()
27
+ {'a': 1, 'b': 2, 'c': 3}
28
+ >>> # This is equivalent to:
29
+ >>> pc.Iter.from_(keys).zip(values).pipe(
30
+ ... lambda x: pc.Dict(x.into(dict)).unwrap()
31
+ ... )
32
+ {'a': 1, 'b': 2, 'c': 3}
33
+
34
+ ```
35
+ """
36
+ from .._dict import Dict
37
+
38
+ def _with_keys(data: Iterable[T]) -> Dict[K, T]:
39
+ return Dict(dict(zip(keys, data)))
40
+
41
+ return self.into(_with_keys)
42
+
43
+ def with_values[V](self, values: Iterable[V]) -> Dict[T, V]:
44
+ """
45
+ Create a Dict by zipping the iterable with values.
46
+
47
+ Args:
48
+ values: Iterable of values to pair with the keys.
49
+ Example:
50
+ ```python
51
+ >>> import pyochain as pc
52
+ >>> keys = [1, 2, 3]
53
+ >>> values = ["a", "b", "c"]
54
+ >>> pc.Iter.from_(keys).with_values(values).unwrap()
55
+ {1: 'a', 2: 'b', 3: 'c'}
56
+ >>> # This is equivalent to:
57
+ >>> pc.Iter.from_(keys).zip(values).pipe(
58
+ ... lambda x: pc.Dict(x.into(dict)).unwrap()
59
+ ... )
60
+ {1: 'a', 2: 'b', 3: 'c'}
61
+
62
+ ```
63
+ """
64
+ from .._dict import Dict
65
+
66
+ def _with_values(data: Iterable[T]) -> Dict[T, V]:
67
+ return Dict(dict(zip(data, values)))
68
+
69
+ return self.into(_with_values)
70
+
18
71
  def reduce_by[K](
19
72
  self, key: Callable[[T], K], binop: Callable[[T, T], T]
20
73
  ) -> Dict[K, T]:
@@ -58,7 +111,10 @@ class BaseGroups[T](IterWrapper[T]):
58
111
  """
59
112
  from .._dict import Dict
60
113
 
61
- return Dict(self.into(partial(cz.itertoolz.reduceby, key, binop)))
114
+ def _reduce_by(data: Iterable[T]) -> Dict[K, T]:
115
+ return Dict(cz.itertoolz.reduceby(key, binop, data))
116
+
117
+ return self.into(_reduce_by)
62
118
 
63
119
  def group_by[K](self, on: Callable[[T], K]) -> Dict[K, list[T]]:
64
120
  """
@@ -117,7 +173,10 @@ class BaseGroups[T](IterWrapper[T]):
117
173
  """
118
174
  from .._dict import Dict
119
175
 
120
- return Dict(self.into(partial(cz.itertoolz.groupby, on)))
176
+ def _group_by(data: Iterable[T]) -> Dict[K, list[T]]:
177
+ return Dict(cz.itertoolz.groupby(on, data))
178
+
179
+ return self.into(_group_by)
121
180
 
122
181
  def frequencies(self) -> Dict[T, int]:
123
182
  """
@@ -132,7 +191,10 @@ class BaseGroups[T](IterWrapper[T]):
132
191
  """
133
192
  from .._dict import Dict
134
193
 
135
- return Dict(self.into(cz.itertoolz.frequencies))
194
+ def _frequencies(data: Iterable[T]) -> Dict[T, int]:
195
+ return Dict(cz.itertoolz.frequencies(data))
196
+
197
+ return self.into(_frequencies)
136
198
 
137
199
  def count_by[K](self, key: Callable[[T], K]) -> Dict[K, int]:
138
200
  """
@@ -154,111 +216,7 @@ class BaseGroups[T](IterWrapper[T]):
154
216
  """
155
217
  from .._dict import Dict
156
218
 
157
- return Dict(self.into(partial(cz.recipes.countby, key)))
158
-
159
- @overload
160
- def group_by_transform(
161
- self,
162
- keyfunc: None = None,
163
- valuefunc: None = None,
164
- reducefunc: None = None,
165
- ) -> Iter[tuple[T, Iterator[T]]]: ...
166
- @overload
167
- def group_by_transform[U](
168
- self,
169
- keyfunc: Callable[[T], U],
170
- valuefunc: None,
171
- reducefunc: None,
172
- ) -> Iter[tuple[U, Iterator[T]]]: ...
173
- @overload
174
- def group_by_transform[V](
175
- self,
176
- keyfunc: None,
177
- valuefunc: Callable[[T], V],
178
- reducefunc: None,
179
- ) -> Iter[tuple[T, Iterator[V]]]: ...
180
- @overload
181
- def group_by_transform[U, V](
182
- self,
183
- keyfunc: Callable[[T], U],
184
- valuefunc: Callable[[T], V],
185
- reducefunc: None,
186
- ) -> Iter[tuple[U, Iterator[V]]]: ...
187
- @overload
188
- def group_by_transform[W](
189
- self,
190
- keyfunc: None,
191
- valuefunc: None,
192
- reducefunc: Callable[[Iterator[T]], W],
193
- ) -> Iter[tuple[T, W]]: ...
194
- @overload
195
- def group_by_transform[U, W](
196
- self,
197
- keyfunc: Callable[[T], U],
198
- valuefunc: None,
199
- reducefunc: Callable[[Iterator[T]], W],
200
- ) -> Iter[tuple[U, W]]: ...
201
- @overload
202
- def group_by_transform[V, W](
203
- self,
204
- keyfunc: None,
205
- valuefunc: Callable[[T], V],
206
- reducefunc: Callable[[Iterator[V]], W],
207
- ) -> Iter[tuple[T, W]]: ...
208
- @overload
209
- def group_by_transform[U, V, W](
210
- self,
211
- keyfunc: Callable[[T], U],
212
- valuefunc: Callable[[T], V],
213
- reducefunc: Callable[[Iterator[V]], W],
214
- ) -> Iter[tuple[U, W]]: ...
215
- def group_by_transform[U, V](
216
- self,
217
- keyfunc: Callable[[T], U] | None = None,
218
- valuefunc: Callable[[T], V] | None = None,
219
- reducefunc: Any = None,
220
- ) -> Iter[tuple[Any, ...]]:
221
- """
222
- An extension of itertools.groupby that can apply transformations to the grouped data.
223
-
224
- Args:
225
- keyfunc: Function to compute the key for grouping. Defaults to None.
226
- valuefunc: Function to transform individual items after grouping. Defaults to None.
227
- reducefunc: Function to transform each group of items. Defaults to None.
228
-
229
- Example:
230
- ```python
231
- >>> import pyochain as pc
232
- >>> data = pc.Iter.from_("aAAbBBcCC")
233
- >>> data.group_by_transform(
234
- ... lambda k: k.upper(), lambda v: v.lower(), lambda g: "".join(g)
235
- ... ).into(list)
236
- [('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')]
237
-
238
- ```
239
- Each optional argument defaults to an identity function if not specified.
240
-
241
- group_by_transform is useful when grouping elements of an iterable using a separate iterable as the key.
242
-
243
- To do this, zip the iterables and pass a keyfunc that extracts the first element and a valuefunc that extracts the second element:
244
-
245
- Note that the order of items in the iterable is significant.
246
-
247
- Only adjacent items are grouped together, so if you don't want any duplicate groups, you should sort the iterable by the key function.
248
-
249
- Example:
250
- ```python
251
- >>> from operator import itemgetter
252
- >>> data = pc.Iter.from_([0, 0, 1, 1, 1, 2, 2, 2, 3])
253
- >>> data.zip("abcdefghi").group_by_transform(itemgetter(0), itemgetter(1)).map(
254
- ... lambda kv: (kv[0], "".join(kv[1]))
255
- ... ).into(list)
256
- [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]
257
-
258
- ```
259
- """
260
-
261
- def _group_by_transform(data: Iterable[T]) -> Iterator[tuple[Any, ...]]:
262
- return mit.groupby_transform(data, keyfunc, valuefunc, reducefunc)
219
+ def _count_by(data: Iterable[T]) -> Dict[K, int]:
220
+ return Dict(cz.recipes.countby(key, data))
263
221
 
264
- return self.apply(_group_by_transform)
222
+ return self.into(_count_by)
pyochain/_iter/_eager.py CHANGED
@@ -38,7 +38,7 @@ class BaseEager[T](IterWrapper[T]):
38
38
  def _sort(data: Iterable[U]) -> list[U]:
39
39
  return sorted(data, reverse=reverse, key=key)
40
40
 
41
- return self.collect(_sort)
41
+ return self._eager(_sort)
42
42
 
43
43
  def tail(self, n: int) -> Seq[T]:
44
44
  """
@@ -54,7 +54,7 @@ class BaseEager[T](IterWrapper[T]):
54
54
 
55
55
  ```
56
56
  """
57
- return self.collect(partial(cz.itertoolz.tail, n))
57
+ return self._eager(partial(cz.itertoolz.tail, n))
58
58
 
59
59
  def top_n(self, n: int, key: Callable[[T], Any] | None = None) -> Seq[T]:
60
60
  """
@@ -71,7 +71,7 @@ class BaseEager[T](IterWrapper[T]):
71
71
 
72
72
  ```
73
73
  """
74
- return self.collect(partial(cz.itertoolz.topk, n, key=key))
74
+ return self._eager(partial(cz.itertoolz.topk, n, key=key))
75
75
 
76
76
  def union(self, *others: Iterable[T]) -> Seq[T]:
77
77
  """
@@ -91,10 +91,10 @@ class BaseEager[T](IterWrapper[T]):
91
91
  ```
92
92
  """
93
93
 
94
- def _union(data: Iterable[T]) -> set[T]:
95
- return set(data).union(*others)
94
+ def _union(data: Iterable[T]) -> list[T]:
95
+ return list(set(data).union(*others))
96
96
 
97
- return self.collect(_union)
97
+ return self._eager(_union)
98
98
 
99
99
  def intersection(self, *others: Iterable[T]) -> Seq[T]:
100
100
  """
@@ -109,15 +109,15 @@ class BaseEager[T](IterWrapper[T]):
109
109
  ```python
110
110
  >>> import pyochain as pc
111
111
  >>> pc.Iter.from_([1, 2, 2]).intersection([2, 3], [2]).unwrap()
112
- {2}
112
+ [2]
113
113
 
114
114
  ```
115
115
  """
116
116
 
117
- def _intersection(data: Iterable[T]) -> set[T]:
118
- return set(data).intersection(*others)
117
+ def _intersection(data: Iterable[T]) -> list[T]:
118
+ return list(set(data).intersection(*others))
119
119
 
120
- return self.collect(_intersection)
120
+ return self._eager(_intersection)
121
121
 
122
122
  def diff_unique(self, *others: Iterable[T]) -> Seq[T]:
123
123
  """
@@ -133,15 +133,15 @@ class BaseEager[T](IterWrapper[T]):
133
133
  ```python
134
134
  >>> import pyochain as pc
135
135
  >>> pc.Iter.from_([1, 2, 2]).diff_unique([2, 3]).unwrap()
136
- {1}
136
+ [1]
137
137
 
138
138
  ```
139
139
  """
140
140
 
141
- def _difference(data: Iterable[T]) -> set[T]:
142
- return set(data).difference(*others)
141
+ def _difference(data: Iterable[T]) -> list[T]:
142
+ return list(set(data).difference(*others))
143
143
 
144
- return self.collect(_difference)
144
+ return self._eager(_difference)
145
145
 
146
146
  def diff_symmetric(self, *others: Iterable[T]) -> Seq[T]:
147
147
  """
@@ -163,10 +163,10 @@ class BaseEager[T](IterWrapper[T]):
163
163
  ```
164
164
  """
165
165
 
166
- def _symmetric_difference(data: Iterable[T]) -> set[T]:
167
- return set(data).symmetric_difference(*others)
166
+ def _symmetric_difference(data: Iterable[T]) -> list[T]:
167
+ return list(set(data).symmetric_difference(*others))
168
168
 
169
- return self.collect(_symmetric_difference)
169
+ return self._eager(_symmetric_difference)
170
170
 
171
171
  def most_common(self, n: int | None = None) -> Seq[tuple[T, int]]:
172
172
  """
@@ -176,7 +176,6 @@ class BaseEager[T](IterWrapper[T]):
176
176
 
177
177
  Args:
178
178
  n: Number of most common elements to return. Defaults to None (all elements).
179
-
180
179
  Example:
181
180
  ```python
182
181
  >>> import pyochain as pc
@@ -187,9 +186,7 @@ class BaseEager[T](IterWrapper[T]):
187
186
  """
188
187
  from collections import Counter
189
188
 
190
- from ._main import Seq
191
-
192
189
  def _most_common(data: Iterable[T]) -> list[tuple[T, int]]:
193
190
  return Counter(data).most_common(n)
194
191
 
195
- return Seq(self.into(_most_common))
192
+ return self._eager(_most_common)