pyochain 0.5.1__tar.gz → 0.5.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyochain might be problematic. Click here for more details.
- {pyochain-0.5.1 → pyochain-0.5.2}/PKG-INFO +4 -6
- {pyochain-0.5.1 → pyochain-0.5.2}/README.md +2 -4
- {pyochain-0.5.1 → pyochain-0.5.2}/pyproject.toml +4 -4
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_core/_main.py +24 -38
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_dict/_exprs.py +1 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_dict/_filters.py +1 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_dict/_groups.py +1 -2
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_dict/_iter.py +13 -4
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_dict/_main.py +2 -5
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_dict/_process.py +22 -1
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_aggregations.py +1 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_booleans.py +3 -0
- pyochain-0.5.1/src/pyochain/_iter/_groups.py → pyochain-0.5.2/src/pyochain/_iter/_dicts.py +74 -116
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_eager.py +18 -21
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_filters.py +31 -24
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_joins.py +10 -8
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_lists.py +11 -9
- pyochain-0.5.2/src/pyochain/_iter/_main.py +469 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_maps.py +28 -26
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_partitions.py +11 -14
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_process.py +25 -31
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_rolling.py +22 -28
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/_tuples.py +119 -14
- pyochain-0.5.1/src/pyochain/_iter/_constructors.py +0 -155
- pyochain-0.5.1/src/pyochain/_iter/_main.py +0 -224
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/__init__.py +0 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_core/__init__.py +0 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_core/_protocols.py +0 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_dict/__init__.py +0 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_dict/_funcs.py +0 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_dict/_joins.py +0 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_dict/_nested.py +0 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/_iter/__init__.py +0 -0
- {pyochain-0.5.1 → pyochain-0.5.2}/src/pyochain/py.typed +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: pyochain
|
|
3
|
-
Version: 0.5.
|
|
4
|
-
Summary:
|
|
3
|
+
Version: 0.5.2
|
|
4
|
+
Summary: Method chaining for iterables and dictionaries in Python.
|
|
5
5
|
Requires-Dist: cytoolz>=1.0.1
|
|
6
6
|
Requires-Dist: more-itertools>=10.8.0
|
|
7
7
|
Requires-Dist: rolling>=0.5.0
|
|
@@ -33,7 +33,7 @@ The full API reference can be found at:
|
|
|
33
33
|
|
|
34
34
|
* **Declarative over Imperative:** Replace explicit `for` and `while` loops with sequences of high-level operations (map, filter, group, join...).
|
|
35
35
|
* **Fluent Chaining:** Each method transforms the data and returns a new wrapper instance, allowing for seamless chaining.
|
|
36
|
-
* **Lazy and Eager:** `Iter` operates lazily for efficiency on large or infinite sequences, while `Seq` represents materialized
|
|
36
|
+
* **Lazy and Eager:** `Iter` operates lazily for efficiency on large or infinite sequences, while `Seq` represents materialized sequences for eager operations.
|
|
37
37
|
* **100% Type-safe:** Extensive use of generics and overloads ensures type safety and improves developer experience.
|
|
38
38
|
* **Documentation-first:** Each method is thoroughly documented with clear explanations, and usage examples. Before any commit is made, each docstring is automatically tested to ensure accuracy. This also allows for a convenient experience in IDEs, where developers can easily access documentation with a simple hover of the mouse.
|
|
39
39
|
* **Functional paradigm:** Design encourages building complex data transformations by composing simple, reusable functions on known buildings blocks, rather than implementing customs classes each time.
|
|
@@ -57,7 +57,7 @@ Provides a vast array of methods for transformation, filtering, aggregation, joi
|
|
|
57
57
|
|
|
58
58
|
#### `Seq[T]`
|
|
59
59
|
|
|
60
|
-
Wraps a Python `
|
|
60
|
+
Wraps a Python `Sequence` (`list`, `tuple`...), and represents **eagerly** evaluated data.
|
|
61
61
|
|
|
62
62
|
Exposes a subset of the `Iter` methods who operate on the full dataset (e.g., `sort`, `union`) or who aggregate it.
|
|
63
63
|
|
|
@@ -122,8 +122,6 @@ Each method and class make extensive use of generics, type hints, and overloads
|
|
|
122
122
|
|
|
123
123
|
Since there's much less need for intermediate variables, the developper don't have to annotate them as much, whilst still keeping a type-safe codebase.
|
|
124
124
|
|
|
125
|
-
Target: modern Python 3.13 syntax (PEP 695 generics, updated collections.abc types).
|
|
126
|
-
|
|
127
125
|
### Expressions for Dict ``pyochain.key``
|
|
128
126
|
|
|
129
127
|
Compute new fields from existing nested data with key() and Expr.apply(), either selecting a new dict or merging into the root.
|
|
@@ -23,7 +23,7 @@ The full API reference can be found at:
|
|
|
23
23
|
|
|
24
24
|
* **Declarative over Imperative:** Replace explicit `for` and `while` loops with sequences of high-level operations (map, filter, group, join...).
|
|
25
25
|
* **Fluent Chaining:** Each method transforms the data and returns a new wrapper instance, allowing for seamless chaining.
|
|
26
|
-
* **Lazy and Eager:** `Iter` operates lazily for efficiency on large or infinite sequences, while `Seq` represents materialized
|
|
26
|
+
* **Lazy and Eager:** `Iter` operates lazily for efficiency on large or infinite sequences, while `Seq` represents materialized sequences for eager operations.
|
|
27
27
|
* **100% Type-safe:** Extensive use of generics and overloads ensures type safety and improves developer experience.
|
|
28
28
|
* **Documentation-first:** Each method is thoroughly documented with clear explanations, and usage examples. Before any commit is made, each docstring is automatically tested to ensure accuracy. This also allows for a convenient experience in IDEs, where developers can easily access documentation with a simple hover of the mouse.
|
|
29
29
|
* **Functional paradigm:** Design encourages building complex data transformations by composing simple, reusable functions on known buildings blocks, rather than implementing customs classes each time.
|
|
@@ -47,7 +47,7 @@ Provides a vast array of methods for transformation, filtering, aggregation, joi
|
|
|
47
47
|
|
|
48
48
|
#### `Seq[T]`
|
|
49
49
|
|
|
50
|
-
Wraps a Python `
|
|
50
|
+
Wraps a Python `Sequence` (`list`, `tuple`...), and represents **eagerly** evaluated data.
|
|
51
51
|
|
|
52
52
|
Exposes a subset of the `Iter` methods who operate on the full dataset (e.g., `sort`, `union`) or who aggregate it.
|
|
53
53
|
|
|
@@ -112,8 +112,6 @@ Each method and class make extensive use of generics, type hints, and overloads
|
|
|
112
112
|
|
|
113
113
|
Since there's much less need for intermediate variables, the developper don't have to annotate them as much, whilst still keeping a type-safe codebase.
|
|
114
114
|
|
|
115
|
-
Target: modern Python 3.13 syntax (PEP 695 generics, updated collections.abc types).
|
|
116
|
-
|
|
117
115
|
### Expressions for Dict ``pyochain.key``
|
|
118
116
|
|
|
119
117
|
Compute new fields from existing nested data with key() and Expr.apply(), either selecting a new dict or merging into the root.
|
|
@@ -1,15 +1,14 @@
|
|
|
1
1
|
[project]
|
|
2
|
-
description = "
|
|
2
|
+
description = "Method chaining for iterables and dictionaries in Python."
|
|
3
3
|
name = "pyochain"
|
|
4
4
|
readme = "README.md"
|
|
5
5
|
requires-python = ">=3.12"
|
|
6
|
-
version = "0.5.
|
|
6
|
+
version = "0.5.2"
|
|
7
7
|
|
|
8
8
|
dependencies = ["cytoolz>=1.0.1", "more-itertools>=10.8.0", "rolling>=0.5.0"]
|
|
9
9
|
|
|
10
10
|
[dependency-groups]
|
|
11
11
|
dev = [
|
|
12
|
-
"cytoolz-stubs",
|
|
13
12
|
"doctester",
|
|
14
13
|
"griffe>=1.14.0",
|
|
15
14
|
"mkdocs>=1.6.1",
|
|
@@ -21,13 +20,14 @@ dev = [
|
|
|
21
20
|
"polars>=1.33.1",
|
|
22
21
|
"ruff>=0.14.1",
|
|
23
22
|
"rolling @ git+https://github.com/OutSquareCapital/rolling.git@add-type-stubs",
|
|
23
|
+
"cytoolz-stubs",
|
|
24
24
|
]
|
|
25
25
|
|
|
26
26
|
[tool.ruff.format]
|
|
27
27
|
docstring-code-format = true
|
|
28
28
|
|
|
29
29
|
[tool.uv.sources]
|
|
30
|
-
cytoolz-stubs = { git = "https://github.com/
|
|
30
|
+
cytoolz-stubs = { git = "https://github.com/OutSquareCapital/cytoolz-stubs.git" }
|
|
31
31
|
doctester = { git = "https://github.com/OutSquareCapital/doctester.git" }
|
|
32
32
|
|
|
33
33
|
[build-system]
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from abc import ABC, abstractmethod
|
|
4
|
-
from collections.abc import Callable,
|
|
4
|
+
from collections.abc import Callable, Iterable, Iterator, Sequence
|
|
5
5
|
from typing import TYPE_CHECKING, Any, Concatenate, Self
|
|
6
6
|
|
|
7
7
|
if TYPE_CHECKING:
|
|
@@ -53,9 +53,9 @@ class CommonBase[T](ABC, Pipeable):
|
|
|
53
53
|
from pprint import pprint
|
|
54
54
|
|
|
55
55
|
if pretty:
|
|
56
|
-
|
|
56
|
+
self.into(pprint, sort_dicts=False)
|
|
57
57
|
else:
|
|
58
|
-
|
|
58
|
+
self.into(print)
|
|
59
59
|
return self
|
|
60
60
|
|
|
61
61
|
def unwrap(self) -> T:
|
|
@@ -90,49 +90,34 @@ class CommonBase[T](ABC, Pipeable):
|
|
|
90
90
|
class IterWrapper[T](CommonBase[Iterable[T]]):
|
|
91
91
|
_data: Iterable[T]
|
|
92
92
|
|
|
93
|
-
def
|
|
93
|
+
def __repr__(self) -> str:
|
|
94
|
+
return f"{self.__class__.__name__}({self.unwrap().__repr__()})"
|
|
95
|
+
|
|
96
|
+
def _eager[**P, U](
|
|
94
97
|
self,
|
|
95
|
-
|
|
98
|
+
factory: Callable[Concatenate[Iterable[T], P], Sequence[U]],
|
|
96
99
|
*args: P.args,
|
|
97
100
|
**kwargs: P.kwargs,
|
|
98
|
-
) ->
|
|
99
|
-
|
|
100
|
-
Apply a function to the underlying iterable and return an Iter of the result.
|
|
101
|
-
Allow to pass user defined functions that transform the iterable while retaining the Iter wrapper.
|
|
102
|
-
Args:
|
|
103
|
-
func: Function to apply to the underlying iterable.
|
|
104
|
-
*args: Positional arguments to pass to the function.
|
|
105
|
-
**kwargs: Keyword arguments to pass to the function.
|
|
101
|
+
) -> Seq[U]:
|
|
102
|
+
from .._iter import Seq
|
|
106
103
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
>>> import pyochain as pc
|
|
110
|
-
>>> def double(data: Iterable[int]) -> Iterator[int]:
|
|
111
|
-
... return (x * 2 for x in data)
|
|
112
|
-
>>> pc.Iter.from_([1, 2, 3]).apply(double).into(list)
|
|
113
|
-
[2, 4, 6]
|
|
114
|
-
"""
|
|
115
|
-
from .._iter import Iter
|
|
104
|
+
def _(data: Iterable[T]):
|
|
105
|
+
return Seq(factory(data, *args, **kwargs))
|
|
116
106
|
|
|
117
|
-
return
|
|
107
|
+
return self.into(_)
|
|
118
108
|
|
|
119
|
-
def
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
```python
|
|
127
|
-
>>> import pyochain as pc
|
|
128
|
-
>>> pc.Iter.from_(range(5)).collect().unwrap()
|
|
129
|
-
[0, 1, 2, 3, 4]
|
|
109
|
+
def _lazy[**P, U](
|
|
110
|
+
self,
|
|
111
|
+
factory: Callable[Concatenate[Iterable[T], P], Iterator[U]],
|
|
112
|
+
*args: P.args,
|
|
113
|
+
**kwargs: P.kwargs,
|
|
114
|
+
) -> Iter[U]:
|
|
115
|
+
from .._iter import Iter
|
|
130
116
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
from .._iter import Seq
|
|
117
|
+
def _(data: Iterable[T]):
|
|
118
|
+
return Iter(factory(data, *args, **kwargs))
|
|
134
119
|
|
|
135
|
-
return
|
|
120
|
+
return self.into(_)
|
|
136
121
|
|
|
137
122
|
|
|
138
123
|
class MappingWrapper[K, V](CommonBase[dict[K, V]]):
|
|
@@ -147,6 +132,7 @@ class MappingWrapper[K, V](CommonBase[dict[K, V]]):
|
|
|
147
132
|
"""
|
|
148
133
|
Apply a function to the underlying dict and return a Dict of the result.
|
|
149
134
|
Allow to pass user defined functions that transform the dict while retaining the Dict wrapper.
|
|
135
|
+
|
|
150
136
|
Args:
|
|
151
137
|
func: Function to apply to the underlying dict.
|
|
152
138
|
*args: Positional arguments to pass to the function.
|
|
@@ -17,6 +17,7 @@ class Expr(Pipeable):
|
|
|
17
17
|
An Expr encapsulates a sequence of operations to be applied to keys on a python dict.
|
|
18
18
|
|
|
19
19
|
Each Expr instance maintains:
|
|
20
|
+
|
|
20
21
|
- A list of tokens representing the keys to access in the dict (the first being the input given to the `key` function),
|
|
21
22
|
- A tuple of operations to apply to the accessed data
|
|
22
23
|
- An alias for the expression (default to the last token).
|
|
@@ -112,6 +112,7 @@ class FilterDict[K, V](MappingWrapper[K, V]):
|
|
|
112
112
|
Filter values that have a given attribute.
|
|
113
113
|
|
|
114
114
|
This does not enforce type checking at runtime for performance considerations.
|
|
115
|
+
|
|
115
116
|
Args:
|
|
116
117
|
attr: Attribute name to check for.
|
|
117
118
|
dtype: Optional expected type of the attribute for type hinting.
|
|
@@ -143,8 +143,7 @@ class GroupsDict[K, V](MappingWrapper[K, V]):
|
|
|
143
143
|
... agg_func=lambda d: d.iter_keys().count(),
|
|
144
144
|
... ).unwrap()
|
|
145
145
|
{'A': 2, 'B': 1}
|
|
146
|
-
>>>
|
|
147
|
-
>>> # --- Exemple 2: Agrégation plus complexe ---
|
|
146
|
+
>>> # Second example
|
|
148
147
|
>>> sales_data = {
|
|
149
148
|
... "store_1": "Electronics",
|
|
150
149
|
... "store_2": "Groceries",
|
|
@@ -43,7 +43,7 @@ class IterDict[K, V](MappingWrapper[K, V]):
|
|
|
43
43
|
|
|
44
44
|
def _itr(data: Mapping[K, Iterable[U]]) -> dict[K, R]:
|
|
45
45
|
def _(v: Iterable[U]) -> R:
|
|
46
|
-
return func(Iter
|
|
46
|
+
return func(Iter(iter(v)), *args, **kwargs)
|
|
47
47
|
|
|
48
48
|
return cz.dicttoolz.valmap(_, data)
|
|
49
49
|
|
|
@@ -61,7 +61,10 @@ class IterDict[K, V](MappingWrapper[K, V]):
|
|
|
61
61
|
"""
|
|
62
62
|
from .._iter import Iter
|
|
63
63
|
|
|
64
|
-
|
|
64
|
+
def _keys(data: dict[K, V]) -> Iter[K]:
|
|
65
|
+
return Iter(iter(data.keys()))
|
|
66
|
+
|
|
67
|
+
return self.into(_keys)
|
|
65
68
|
|
|
66
69
|
def iter_values(self) -> Iter[V]:
|
|
67
70
|
"""
|
|
@@ -75,7 +78,10 @@ class IterDict[K, V](MappingWrapper[K, V]):
|
|
|
75
78
|
"""
|
|
76
79
|
from .._iter import Iter
|
|
77
80
|
|
|
78
|
-
|
|
81
|
+
def _values(data: dict[K, V]) -> Iter[V]:
|
|
82
|
+
return Iter(iter(data.values()))
|
|
83
|
+
|
|
84
|
+
return self.into(_values)
|
|
79
85
|
|
|
80
86
|
def iter_items(self) -> Iter[tuple[K, V]]:
|
|
81
87
|
"""
|
|
@@ -89,4 +95,7 @@ class IterDict[K, V](MappingWrapper[K, V]):
|
|
|
89
95
|
"""
|
|
90
96
|
from .._iter import Iter
|
|
91
97
|
|
|
92
|
-
|
|
98
|
+
def _items(data: dict[K, V]) -> Iter[tuple[K, V]]:
|
|
99
|
+
return Iter(iter(data.items()))
|
|
100
|
+
|
|
101
|
+
return self.into(_items)
|
|
@@ -300,8 +300,5 @@ class Dict[K, V](
|
|
|
300
300
|
|
|
301
301
|
```
|
|
302
302
|
"""
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
if isinstance(other, Dict)
|
|
306
|
-
else self.unwrap() == other
|
|
307
|
-
)
|
|
303
|
+
other_data = other.unwrap() if isinstance(other, Dict) else other
|
|
304
|
+
return self.unwrap() == other_data
|
|
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING, Any, Concatenate
|
|
|
5
5
|
|
|
6
6
|
import cytoolz as cz
|
|
7
7
|
|
|
8
|
-
from .._core import MappingWrapper
|
|
8
|
+
from .._core import MappingWrapper, SupportsRichComparison
|
|
9
9
|
|
|
10
10
|
if TYPE_CHECKING:
|
|
11
11
|
from ._main import Dict
|
|
@@ -169,3 +169,24 @@ class ProcessDict[K, V](MappingWrapper[K, V]):
|
|
|
169
169
|
return dict(sorted(data.items(), reverse=reverse))
|
|
170
170
|
|
|
171
171
|
return self.apply(_sort)
|
|
172
|
+
|
|
173
|
+
def sort_values[U: SupportsRichComparison[Any]](
|
|
174
|
+
self: ProcessDict[K, U], reverse: bool = False
|
|
175
|
+
) -> Dict[K, U]:
|
|
176
|
+
"""
|
|
177
|
+
Sort the dictionary by its values and return a new Dict.
|
|
178
|
+
|
|
179
|
+
Args:
|
|
180
|
+
reverse: Whether to sort in descending order. Defaults to False.
|
|
181
|
+
```python
|
|
182
|
+
>>> import pyochain as pc
|
|
183
|
+
>>> pc.Dict({"a": 2, "b": 1}).sort_values().unwrap()
|
|
184
|
+
{'b': 1, 'a': 2}
|
|
185
|
+
|
|
186
|
+
```
|
|
187
|
+
"""
|
|
188
|
+
|
|
189
|
+
def _sort_values(data: dict[K, U]) -> dict[K, U]:
|
|
190
|
+
return dict(sorted(data.items(), key=lambda item: item[1], reverse=reverse))
|
|
191
|
+
|
|
192
|
+
return self.apply(_sort_values)
|
|
@@ -21,6 +21,7 @@ class BaseBool[T](IterWrapper[T]):
|
|
|
21
21
|
If any of them return false, it returns false.
|
|
22
22
|
|
|
23
23
|
An empty iterator returns true.
|
|
24
|
+
|
|
24
25
|
Args:
|
|
25
26
|
predicate: Function to evaluate each item. Defaults to checking truthiness.
|
|
26
27
|
Example:
|
|
@@ -57,6 +58,7 @@ class BaseBool[T](IterWrapper[T]):
|
|
|
57
58
|
If they all return false, it returns false.
|
|
58
59
|
|
|
59
60
|
An empty iterator returns false.
|
|
61
|
+
|
|
60
62
|
Args:
|
|
61
63
|
predicate: Function to evaluate each item. Defaults to checking truthiness.
|
|
62
64
|
Example:
|
|
@@ -200,6 +202,7 @@ class BaseBool[T](IterWrapper[T]):
|
|
|
200
202
|
- Returning the first element that satisfies the `predicate`.
|
|
201
203
|
|
|
202
204
|
If all the elements return false, `Iter.find()` returns the default value.
|
|
205
|
+
|
|
203
206
|
Args:
|
|
204
207
|
default: Value to return if no element satisfies the predicate. Defaults to None.
|
|
205
208
|
predicate: Function to evaluate each item. Defaults to checking truthiness.
|
|
@@ -1,20 +1,73 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from collections.abc import Callable, Iterable
|
|
4
|
-
from
|
|
5
|
-
from typing import TYPE_CHECKING, Any, overload
|
|
3
|
+
from collections.abc import Callable, Iterable
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
6
5
|
|
|
7
6
|
import cytoolz as cz
|
|
8
|
-
import more_itertools as mit
|
|
9
7
|
|
|
10
8
|
from .._core import IterWrapper
|
|
11
9
|
|
|
12
10
|
if TYPE_CHECKING:
|
|
13
11
|
from .._dict import Dict
|
|
14
|
-
from ._main import Iter
|
|
15
12
|
|
|
16
13
|
|
|
17
|
-
class
|
|
14
|
+
class BaseDict[T](IterWrapper[T]):
|
|
15
|
+
def with_keys[K](self, keys: Iterable[K]) -> Dict[K, T]:
|
|
16
|
+
"""
|
|
17
|
+
Create a Dict by zipping the iterable with keys.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
keys: Iterable of keys to pair with the values.
|
|
21
|
+
Example:
|
|
22
|
+
```python
|
|
23
|
+
>>> import pyochain as pc
|
|
24
|
+
>>> keys = ["a", "b", "c"]
|
|
25
|
+
>>> values = [1, 2, 3]
|
|
26
|
+
>>> pc.Iter.from_(values).with_keys(keys).unwrap()
|
|
27
|
+
{'a': 1, 'b': 2, 'c': 3}
|
|
28
|
+
>>> # This is equivalent to:
|
|
29
|
+
>>> pc.Iter.from_(keys).zip(values).pipe(
|
|
30
|
+
... lambda x: pc.Dict(x.into(dict)).unwrap()
|
|
31
|
+
... )
|
|
32
|
+
{'a': 1, 'b': 2, 'c': 3}
|
|
33
|
+
|
|
34
|
+
```
|
|
35
|
+
"""
|
|
36
|
+
from .._dict import Dict
|
|
37
|
+
|
|
38
|
+
def _with_keys(data: Iterable[T]) -> Dict[K, T]:
|
|
39
|
+
return Dict(dict(zip(keys, data)))
|
|
40
|
+
|
|
41
|
+
return self.into(_with_keys)
|
|
42
|
+
|
|
43
|
+
def with_values[V](self, values: Iterable[V]) -> Dict[T, V]:
|
|
44
|
+
"""
|
|
45
|
+
Create a Dict by zipping the iterable with values.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
values: Iterable of values to pair with the keys.
|
|
49
|
+
Example:
|
|
50
|
+
```python
|
|
51
|
+
>>> import pyochain as pc
|
|
52
|
+
>>> keys = [1, 2, 3]
|
|
53
|
+
>>> values = ["a", "b", "c"]
|
|
54
|
+
>>> pc.Iter.from_(keys).with_values(values).unwrap()
|
|
55
|
+
{1: 'a', 2: 'b', 3: 'c'}
|
|
56
|
+
>>> # This is equivalent to:
|
|
57
|
+
>>> pc.Iter.from_(keys).zip(values).pipe(
|
|
58
|
+
... lambda x: pc.Dict(x.into(dict)).unwrap()
|
|
59
|
+
... )
|
|
60
|
+
{1: 'a', 2: 'b', 3: 'c'}
|
|
61
|
+
|
|
62
|
+
```
|
|
63
|
+
"""
|
|
64
|
+
from .._dict import Dict
|
|
65
|
+
|
|
66
|
+
def _with_values(data: Iterable[T]) -> Dict[T, V]:
|
|
67
|
+
return Dict(dict(zip(data, values)))
|
|
68
|
+
|
|
69
|
+
return self.into(_with_values)
|
|
70
|
+
|
|
18
71
|
def reduce_by[K](
|
|
19
72
|
self, key: Callable[[T], K], binop: Callable[[T, T], T]
|
|
20
73
|
) -> Dict[K, T]:
|
|
@@ -58,7 +111,10 @@ class BaseGroups[T](IterWrapper[T]):
|
|
|
58
111
|
"""
|
|
59
112
|
from .._dict import Dict
|
|
60
113
|
|
|
61
|
-
|
|
114
|
+
def _reduce_by(data: Iterable[T]) -> Dict[K, T]:
|
|
115
|
+
return Dict(cz.itertoolz.reduceby(key, binop, data))
|
|
116
|
+
|
|
117
|
+
return self.into(_reduce_by)
|
|
62
118
|
|
|
63
119
|
def group_by[K](self, on: Callable[[T], K]) -> Dict[K, list[T]]:
|
|
64
120
|
"""
|
|
@@ -117,7 +173,10 @@ class BaseGroups[T](IterWrapper[T]):
|
|
|
117
173
|
"""
|
|
118
174
|
from .._dict import Dict
|
|
119
175
|
|
|
120
|
-
|
|
176
|
+
def _group_by(data: Iterable[T]) -> Dict[K, list[T]]:
|
|
177
|
+
return Dict(cz.itertoolz.groupby(on, data))
|
|
178
|
+
|
|
179
|
+
return self.into(_group_by)
|
|
121
180
|
|
|
122
181
|
def frequencies(self) -> Dict[T, int]:
|
|
123
182
|
"""
|
|
@@ -132,7 +191,10 @@ class BaseGroups[T](IterWrapper[T]):
|
|
|
132
191
|
"""
|
|
133
192
|
from .._dict import Dict
|
|
134
193
|
|
|
135
|
-
|
|
194
|
+
def _frequencies(data: Iterable[T]) -> Dict[T, int]:
|
|
195
|
+
return Dict(cz.itertoolz.frequencies(data))
|
|
196
|
+
|
|
197
|
+
return self.into(_frequencies)
|
|
136
198
|
|
|
137
199
|
def count_by[K](self, key: Callable[[T], K]) -> Dict[K, int]:
|
|
138
200
|
"""
|
|
@@ -154,111 +216,7 @@ class BaseGroups[T](IterWrapper[T]):
|
|
|
154
216
|
"""
|
|
155
217
|
from .._dict import Dict
|
|
156
218
|
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
@overload
|
|
160
|
-
def group_by_transform(
|
|
161
|
-
self,
|
|
162
|
-
keyfunc: None = None,
|
|
163
|
-
valuefunc: None = None,
|
|
164
|
-
reducefunc: None = None,
|
|
165
|
-
) -> Iter[tuple[T, Iterator[T]]]: ...
|
|
166
|
-
@overload
|
|
167
|
-
def group_by_transform[U](
|
|
168
|
-
self,
|
|
169
|
-
keyfunc: Callable[[T], U],
|
|
170
|
-
valuefunc: None,
|
|
171
|
-
reducefunc: None,
|
|
172
|
-
) -> Iter[tuple[U, Iterator[T]]]: ...
|
|
173
|
-
@overload
|
|
174
|
-
def group_by_transform[V](
|
|
175
|
-
self,
|
|
176
|
-
keyfunc: None,
|
|
177
|
-
valuefunc: Callable[[T], V],
|
|
178
|
-
reducefunc: None,
|
|
179
|
-
) -> Iter[tuple[T, Iterator[V]]]: ...
|
|
180
|
-
@overload
|
|
181
|
-
def group_by_transform[U, V](
|
|
182
|
-
self,
|
|
183
|
-
keyfunc: Callable[[T], U],
|
|
184
|
-
valuefunc: Callable[[T], V],
|
|
185
|
-
reducefunc: None,
|
|
186
|
-
) -> Iter[tuple[U, Iterator[V]]]: ...
|
|
187
|
-
@overload
|
|
188
|
-
def group_by_transform[W](
|
|
189
|
-
self,
|
|
190
|
-
keyfunc: None,
|
|
191
|
-
valuefunc: None,
|
|
192
|
-
reducefunc: Callable[[Iterator[T]], W],
|
|
193
|
-
) -> Iter[tuple[T, W]]: ...
|
|
194
|
-
@overload
|
|
195
|
-
def group_by_transform[U, W](
|
|
196
|
-
self,
|
|
197
|
-
keyfunc: Callable[[T], U],
|
|
198
|
-
valuefunc: None,
|
|
199
|
-
reducefunc: Callable[[Iterator[T]], W],
|
|
200
|
-
) -> Iter[tuple[U, W]]: ...
|
|
201
|
-
@overload
|
|
202
|
-
def group_by_transform[V, W](
|
|
203
|
-
self,
|
|
204
|
-
keyfunc: None,
|
|
205
|
-
valuefunc: Callable[[T], V],
|
|
206
|
-
reducefunc: Callable[[Iterator[V]], W],
|
|
207
|
-
) -> Iter[tuple[T, W]]: ...
|
|
208
|
-
@overload
|
|
209
|
-
def group_by_transform[U, V, W](
|
|
210
|
-
self,
|
|
211
|
-
keyfunc: Callable[[T], U],
|
|
212
|
-
valuefunc: Callable[[T], V],
|
|
213
|
-
reducefunc: Callable[[Iterator[V]], W],
|
|
214
|
-
) -> Iter[tuple[U, W]]: ...
|
|
215
|
-
def group_by_transform[U, V](
|
|
216
|
-
self,
|
|
217
|
-
keyfunc: Callable[[T], U] | None = None,
|
|
218
|
-
valuefunc: Callable[[T], V] | None = None,
|
|
219
|
-
reducefunc: Any = None,
|
|
220
|
-
) -> Iter[tuple[Any, ...]]:
|
|
221
|
-
"""
|
|
222
|
-
An extension of itertools.groupby that can apply transformations to the grouped data.
|
|
223
|
-
|
|
224
|
-
Args:
|
|
225
|
-
keyfunc: Function to compute the key for grouping. Defaults to None.
|
|
226
|
-
valuefunc: Function to transform individual items after grouping. Defaults to None.
|
|
227
|
-
reducefunc: Function to transform each group of items. Defaults to None.
|
|
228
|
-
|
|
229
|
-
Example:
|
|
230
|
-
```python
|
|
231
|
-
>>> import pyochain as pc
|
|
232
|
-
>>> data = pc.Iter.from_("aAAbBBcCC")
|
|
233
|
-
>>> data.group_by_transform(
|
|
234
|
-
... lambda k: k.upper(), lambda v: v.lower(), lambda g: "".join(g)
|
|
235
|
-
... ).into(list)
|
|
236
|
-
[('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')]
|
|
237
|
-
|
|
238
|
-
```
|
|
239
|
-
Each optional argument defaults to an identity function if not specified.
|
|
240
|
-
|
|
241
|
-
group_by_transform is useful when grouping elements of an iterable using a separate iterable as the key.
|
|
242
|
-
|
|
243
|
-
To do this, zip the iterables and pass a keyfunc that extracts the first element and a valuefunc that extracts the second element:
|
|
244
|
-
|
|
245
|
-
Note that the order of items in the iterable is significant.
|
|
246
|
-
|
|
247
|
-
Only adjacent items are grouped together, so if you don't want any duplicate groups, you should sort the iterable by the key function.
|
|
248
|
-
|
|
249
|
-
Example:
|
|
250
|
-
```python
|
|
251
|
-
>>> from operator import itemgetter
|
|
252
|
-
>>> data = pc.Iter.from_([0, 0, 1, 1, 1, 2, 2, 2, 3])
|
|
253
|
-
>>> data.zip("abcdefghi").group_by_transform(itemgetter(0), itemgetter(1)).map(
|
|
254
|
-
... lambda kv: (kv[0], "".join(kv[1]))
|
|
255
|
-
... ).into(list)
|
|
256
|
-
[(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]
|
|
257
|
-
|
|
258
|
-
```
|
|
259
|
-
"""
|
|
260
|
-
|
|
261
|
-
def _group_by_transform(data: Iterable[T]) -> Iterator[tuple[Any, ...]]:
|
|
262
|
-
return mit.groupby_transform(data, keyfunc, valuefunc, reducefunc)
|
|
219
|
+
def _count_by(data: Iterable[T]) -> Dict[K, int]:
|
|
220
|
+
return Dict(cz.recipes.countby(key, data))
|
|
263
221
|
|
|
264
|
-
return self.
|
|
222
|
+
return self.into(_count_by)
|