mappingtools 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -0,0 +1,459 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mappingtools
|
|
3
|
+
Version: 0.3.0
|
|
4
|
+
Summary: MappingTools. Do stuff with Mappings and more
|
|
5
|
+
Project-URL: Homepage, https://erivlis.github.io/mappingtools
|
|
6
|
+
Project-URL: Bug Tracker, https://github.com/erivlis/mappingtools/issues
|
|
7
|
+
Project-URL: Source, https://github.com/erivlis/mappingtools
|
|
8
|
+
Author-email: Eran Rivlis <eran@rivlis.info>
|
|
9
|
+
License-File: LICENSE
|
|
10
|
+
Keywords: Mapping,manipulate,mutate,transform
|
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: Intended Audience :: Information Technology
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Natural Language :: English
|
|
16
|
+
Classifier: Operating System :: OS Independent
|
|
17
|
+
Classifier: Programming Language :: Python :: 3
|
|
18
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
23
|
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
24
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
25
|
+
Classifier: Typing :: Typed
|
|
26
|
+
Requires-Python: >=3.10
|
|
27
|
+
Provides-Extra: dev
|
|
28
|
+
Requires-Dist: ruff; extra == 'dev'
|
|
29
|
+
Provides-Extra: docs
|
|
30
|
+
Requires-Dist: mkdocs-gen-files; extra == 'docs'
|
|
31
|
+
Requires-Dist: mkdocs-git-revision-date-localized-plugin; extra == 'docs'
|
|
32
|
+
Requires-Dist: mkdocs-glightbox; extra == 'docs'
|
|
33
|
+
Requires-Dist: mkdocs-literate-nav; extra == 'docs'
|
|
34
|
+
Requires-Dist: mkdocs-material; extra == 'docs'
|
|
35
|
+
Requires-Dist: mkdocs-section-index; extra == 'docs'
|
|
36
|
+
Requires-Dist: mkdocstrings-python; extra == 'docs'
|
|
37
|
+
Provides-Extra: test
|
|
38
|
+
Requires-Dist: pytest; extra == 'test'
|
|
39
|
+
Requires-Dist: pytest-cov; extra == 'test'
|
|
40
|
+
Requires-Dist: pytest-randomly; extra == 'test'
|
|
41
|
+
Requires-Dist: pytest-xdist; extra == 'test'
|
|
42
|
+
Description-Content-Type: text/markdown
|
|
43
|
+
|
|
44
|
+
# MappingTools
|
|
45
|
+
|
|
46
|
+
> Do stuff with Mappings and more!!!
|
|
47
|
+
|
|
48
|
+
This library provides utility functions for manipulating and transforming data structures which have or include
|
|
49
|
+
Mapping-like characteristics. Including inverting dictionaries, converting class like objects to dictionaries, creating
|
|
50
|
+
nested defaultdicts, and unwrapping complex objects.
|
|
51
|
+
|
|
52
|
+
<table>
|
|
53
|
+
<tr style="vertical-align: middle;">
|
|
54
|
+
<td>Package</td>
|
|
55
|
+
<td>
|
|
56
|
+
<img alt="PyPI - version" src="https://img.shields.io/pypi/v/mappingtools">
|
|
57
|
+
<img alt="PyPI - Status" src="https://img.shields.io/pypi/status/mappingtools">
|
|
58
|
+
<img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/mappingtools">
|
|
59
|
+
<img alt="PyPI - Downloads" src="https://img.shields.io/pypi/dd/mappingtools">
|
|
60
|
+
<br>
|
|
61
|
+
<img alt="GitHub" src="https://img.shields.io/github/license/erivlis/mappingtools">
|
|
62
|
+
<img alt="GitHub repo size" src="https://img.shields.io/github/repo-size/erivlis/mappingtools">
|
|
63
|
+
<img alt="GitHub last commit (by committer)" src="https://img.shields.io/github/last-commit/erivlis/mappingtools">
|
|
64
|
+
<a href="https://github.com/erivlis/mappingtools/graphs/contributors"><img alt="Contributors" src="https://img.shields.io/github/contributors/erivlis/mappingtools.svg"></a>
|
|
65
|
+
</td>
|
|
66
|
+
</tr>
|
|
67
|
+
<tr>
|
|
68
|
+
<td>Tools</td>
|
|
69
|
+
<td>
|
|
70
|
+
<a href="https://www.jetbrains.com/pycharm/"><img alt="PyCharm" src="https://img.shields.io/badge/PyCharm-FCF84A.svg?logo=PyCharm&logoColor=black&labelColor=21D789&color=FCF84A"></a>
|
|
71
|
+
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
|
72
|
+
<a href="https://github.com/astral-sh/uv"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/uv/main/assets/badge/v0.json" alt="uv" style="max-width:100%;"></a>
|
|
73
|
+
<!-- a href="https://squidfunk.github.io/mkdocs-material/"><img src="https://img.shields.io/badge/Material_for_MkDocs-526CFE?&logo=MaterialForMkDocs&logoColor=white&labelColor=grey"></a -->
|
|
74
|
+
</td>
|
|
75
|
+
</tr>
|
|
76
|
+
<tr>
|
|
77
|
+
<td>CI/CD</td>
|
|
78
|
+
<td>
|
|
79
|
+
<a href="https://github.com/erivlis/mappingtools/actions/workflows/test.yml"><img alt="Tests" src="https://github.com/erivlis/mappingtools/actions/workflows/test.yml/badge.svg?branch=main"></a>
|
|
80
|
+
<a href="https://github.com/erivlis/mappingtools/actions/workflows/publish.yml"><img alt="Publish" src="https://github.com/erivlis/mappingtools/actions/workflows/publish.yml/badge.svg"></a>
|
|
81
|
+
<!--a href="https://github.com/erivlis/mappingtools/actions/workflows/publish-docs.yaml"><img alt="Publish Docs" src="https://github.com/erivlis/mappingtools/actions/workflows/publish-docs.yaml/badge.svg"></a-->
|
|
82
|
+
</td>
|
|
83
|
+
</tr>
|
|
84
|
+
<tr>
|
|
85
|
+
<td>Scans</td>
|
|
86
|
+
<td>
|
|
87
|
+
<a href="https://codecov.io/gh/erivlis/mappingtools"><img alt="Coverage" src="https://codecov.io/gh/erivlis/mappingtools/graph/badge.svg?token=POODT8M9NV"/></a>
|
|
88
|
+
<br>
|
|
89
|
+
<a href="https://sonarcloud.io/summary/new_code?id=erivlis_mappingtools"><img alt="Quality Gate Status" src="https://sonarcloud.io/api/project_badges/measure?project=erivlis_mappingtools&metric=alert_status"></a>
|
|
90
|
+
<a href="https://sonarcloud.io/summary/new_code?id=erivlis_mappingtools"><img alt="Security Rating" src="https://sonarcloud.io/api/project_badges/measure?project=erivlis_mappingtools&metric=security_rating"></a>
|
|
91
|
+
<a href="https://sonarcloud.io/summary/new_code?id=erivlis_mappingtools"><img alt="Maintainability Rating" src="https://sonarcloud.io/api/project_badges/measure?project=erivlis_mappingtools&metric=sqale_rating"></a>
|
|
92
|
+
<a href="https://sonarcloud.io/summary/new_code?id=erivlis_mappingtools"><img alt="Reliability Rating" src="https://sonarcloud.io/api/project_badges/measure?project=erivlis_mappingtools&metric=reliability_rating"></a>
|
|
93
|
+
<br>
|
|
94
|
+
<a href="https://sonarcloud.io/summary/new_code?id=erivlis_mappingtools"><img alt="Lines of Code" src="https://sonarcloud.io/api/project_badges/measure?project=erivlis_mappingtools&metric=ncloc"></a>
|
|
95
|
+
<a href="https://sonarcloud.io/summary/new_code?id=erivlis_mappingtools"><img alt="Vulnerabilities" src="https://sonarcloud.io/api/project_badges/measure?project=erivlis_mappingtools&metric=vulnerabilities"></a>
|
|
96
|
+
<a href="https://sonarcloud.io/summary/new_code?id=erivlis_mappingtools"><img alt="Bugs" src="https://sonarcloud.io/api/project_badges/measure?project=erivlis_mappingtools&metric=bugs"></a>
|
|
97
|
+
<br>
|
|
98
|
+
<a href="https://app.codacy.com/gh/erivlis/mappingtools/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade"><img alt="Codacy Quality" src="https://app.codacy.com/project/badge/Grade/8b83a99f939b4883ae2f37d7ec3419d1"></a>
|
|
99
|
+
<a href="https://app.codacy.com/gh/erivlis/mappingtools/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_coverage"><img alt="Codacy Coverage" src="https://app.codacy.com/project/badge/Coverage/8b83a99f939b4883ae2f37d7ec3419d1"/></a>
|
|
100
|
+
</td>
|
|
101
|
+
</tr>
|
|
102
|
+
</table>
|
|
103
|
+
|
|
104
|
+
## Usage
|
|
105
|
+
|
|
106
|
+
### Transformers
|
|
107
|
+
|
|
108
|
+
#### `distinct`
|
|
109
|
+
|
|
110
|
+
Yields distinct values for a specified key across multiple mappings.
|
|
111
|
+
|
|
112
|
+
<!-- name: test_distinct -->
|
|
113
|
+
|
|
114
|
+
```python
|
|
115
|
+
from mappingtools import distinct
|
|
116
|
+
|
|
117
|
+
mappings = [
|
|
118
|
+
{'a': 1, 'b': 2},
|
|
119
|
+
{'a': 2, 'b': 3},
|
|
120
|
+
{'a': 1, 'b': 4}
|
|
121
|
+
]
|
|
122
|
+
distinct_values = list(distinct('a', *mappings))
|
|
123
|
+
print(distinct_values)
|
|
124
|
+
# Output: [1, 2]
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
#### `keep`
|
|
128
|
+
|
|
129
|
+
Yields subsets of mappings by retaining only the specified keys.
|
|
130
|
+
|
|
131
|
+
<!-- name: test_keep -->
|
|
132
|
+
|
|
133
|
+
```python
|
|
134
|
+
from mappingtools import keep
|
|
135
|
+
|
|
136
|
+
mappings = [
|
|
137
|
+
{'a': 1, 'b': 2, 'c': 3},
|
|
138
|
+
{'a': 4, 'b': 5, 'd': 6}
|
|
139
|
+
]
|
|
140
|
+
keys_to_keep = ['a', 'b']
|
|
141
|
+
result = list(keep(keys_to_keep, *mappings))
|
|
142
|
+
# result: [{'a': 1, 'b': 2}, {'a': 4, 'b': 5}]
|
|
143
|
+
```
|
|
144
|
+
|
|
145
|
+
#### `remove`
|
|
146
|
+
|
|
147
|
+
Yields mappings with specified keys removed. It takes an iterable of keys and multiple mappings, and returns a generator
|
|
148
|
+
of mappings with those keys excluded.
|
|
149
|
+
|
|
150
|
+
<!-- name: test_remove -->
|
|
151
|
+
|
|
152
|
+
```python
|
|
153
|
+
from mappingtools import remove
|
|
154
|
+
|
|
155
|
+
mappings = [
|
|
156
|
+
{'a': 1, 'b': 2, 'c': 3},
|
|
157
|
+
{'a': 4, 'b': 5, 'd': 6}
|
|
158
|
+
]
|
|
159
|
+
keys_to_remove = ['a', 'b']
|
|
160
|
+
result = list(remove(keys_to_remove, *mappings))
|
|
161
|
+
# result: [{'c': 3}, {'d': 6}]
|
|
162
|
+
```
|
|
163
|
+
|
|
164
|
+
#### `inverse`
|
|
165
|
+
|
|
166
|
+
Swaps keys and values in a dictionary.
|
|
167
|
+
|
|
168
|
+
<!-- name: test_inverse -->
|
|
169
|
+
|
|
170
|
+
```python
|
|
171
|
+
from mappingtools import inverse
|
|
172
|
+
|
|
173
|
+
original_mapping = {'a': {1, 2}, 'b': {3}}
|
|
174
|
+
inverted_mapping = inverse(original_mapping)
|
|
175
|
+
print(inverted_mapping)
|
|
176
|
+
# Output: defaultdict(<class 'set'>, {1: {'a'}, 2: {'a'}, 3: {'b'}})
|
|
177
|
+
```
|
|
178
|
+
|
|
179
|
+
#### `flattened`
|
|
180
|
+
|
|
181
|
+
The flattened function takes a nested mapping structure and converts it into a single-level dictionary by flattening the
|
|
182
|
+
keys into tuples.
|
|
183
|
+
|
|
184
|
+
<!-- name: test_flattened -->
|
|
185
|
+
|
|
186
|
+
```python
|
|
187
|
+
from mappingtools import flattened
|
|
188
|
+
|
|
189
|
+
nested_dict = {
|
|
190
|
+
'a': {'b': 1, 'c': {'d': 2}},
|
|
191
|
+
'e': 3
|
|
192
|
+
}
|
|
193
|
+
flat_dict = flattened(nested_dict)
|
|
194
|
+
# Expected output: {('a', 'b'): 1, ('a', 'c', 'd'): 2, ('e',): 3}
|
|
195
|
+
```
|
|
196
|
+
|
|
197
|
+
#### `listify`
|
|
198
|
+
|
|
199
|
+
Transforms complex objects into a list of dictionaries with key and value pairs.
|
|
200
|
+
|
|
201
|
+
<!-- name: test_listify -->
|
|
202
|
+
|
|
203
|
+
```python
|
|
204
|
+
from mappingtools import listify
|
|
205
|
+
|
|
206
|
+
wrapped_data = {'key1': {'subkey': 'value'}, 'key2': ['item1', 'item2']}
|
|
207
|
+
unwrapped_data = listify(wrapped_data)
|
|
208
|
+
print(unwrapped_data)
|
|
209
|
+
# Output: [{'key': 'key1', 'value': [{'key': 'subkey', 'value': 'value'}]}, {'key': 'key2', 'value': ['item1', 'item2']}]
|
|
210
|
+
```
|
|
211
|
+
|
|
212
|
+
#### `simplify`
|
|
213
|
+
|
|
214
|
+
Converts objects to strictly structured dictionaries.
|
|
215
|
+
|
|
216
|
+
<!-- name: test_simplify -->
|
|
217
|
+
|
|
218
|
+
```python
|
|
219
|
+
from collections import Counter
|
|
220
|
+
from dataclasses import dataclass
|
|
221
|
+
from datetime import datetime
|
|
222
|
+
from typing import Mapping
|
|
223
|
+
|
|
224
|
+
from mappingtools import simplify
|
|
225
|
+
|
|
226
|
+
data = {'key1': 'value1', 'key2': ['item1', 'item2']}
|
|
227
|
+
simplified_data = simplify(data)
|
|
228
|
+
print(simplified_data)
|
|
229
|
+
# Output: {'key1': 'value1', 'key2': ['item1', 'item2']}
|
|
230
|
+
|
|
231
|
+
counter = Counter({'a': 1, 'b': 2})
|
|
232
|
+
print(counter)
|
|
233
|
+
# Output: Counter({'b': 2, 'a': 1})
|
|
234
|
+
|
|
235
|
+
simplified_counter = simplify(counter)
|
|
236
|
+
print(simplified_counter)
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
# Output: {'a': 1, 'b': 2}
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
@dataclass
|
|
243
|
+
class SampleDataClass:
|
|
244
|
+
a: int
|
|
245
|
+
b: int
|
|
246
|
+
aa: str
|
|
247
|
+
bb: str
|
|
248
|
+
c: list[int]
|
|
249
|
+
d: Mapping
|
|
250
|
+
e: datetime
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
sample_datetime = datetime(2024, 7, 22, 21, 42, 17, 314159)
|
|
254
|
+
sample_dataclass = SampleDataClass(1, 2, '11', '22', [1, 2], {'aaa': 111, 'bbb': '222'}, sample_datetime)
|
|
255
|
+
|
|
256
|
+
print(sample_dataclass)
|
|
257
|
+
# Output: SampleDataClass(a=1, b=2, aa='11', bb='22', c=[1, 2], d={'aaa': 111, 'bbb': '222'}, e=datetime.datetime(2024, 7, 22, 21, 42, 17, 314159))
|
|
258
|
+
|
|
259
|
+
simplified_sample_dataclass = simplify(sample_dataclass)
|
|
260
|
+
print(simplified_sample_dataclass)
|
|
261
|
+
# Output: {'a': 1, 'aa': '11', 'b': 2, 'bb': '22', 'c': [1, 2], 'd': {'aaa': 111, 'bbb': '222'}, 'e': datetime.datetime(2024, 7, 22, 21, 42, 17, 314159)}
|
|
262
|
+
```
|
|
263
|
+
|
|
264
|
+
#### `strictify`
|
|
265
|
+
|
|
266
|
+
Applies a strict structural conversion to an object using optional converters for keys and values.
|
|
267
|
+
|
|
268
|
+
<!-- name: test_strictify -->
|
|
269
|
+
|
|
270
|
+
```python
|
|
271
|
+
from mappingtools import strictify
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def uppercase_key(key):
|
|
275
|
+
return key.upper()
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def double_value(value):
|
|
279
|
+
return value * 2
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
data = {'a': 1, 'b': 2}
|
|
283
|
+
result = strictify(data, key_converter=uppercase_key, value_converter=double_value)
|
|
284
|
+
print(result)
|
|
285
|
+
# Output: {'A': 2, 'B': 4}
|
|
286
|
+
```
|
|
287
|
+
|
|
288
|
+
#### `stringify`
|
|
289
|
+
|
|
290
|
+
Converts an object into a string representation by recursively processing it based on its type.
|
|
291
|
+
|
|
292
|
+
<!-- Name: test_stringify -->
|
|
293
|
+
|
|
294
|
+
```python
|
|
295
|
+
from mappingtools import stringify
|
|
296
|
+
|
|
297
|
+
data = {'key1': 'value1', 'key2': 'value2'}
|
|
298
|
+
result = stringify(data)
|
|
299
|
+
|
|
300
|
+
print(result)
|
|
301
|
+
# Output: "key1=value1, key2=value2"
|
|
302
|
+
|
|
303
|
+
data = [1, 2, 3]
|
|
304
|
+
result = stringify(data)
|
|
305
|
+
|
|
306
|
+
print(result)
|
|
307
|
+
# Output: "[1, 2, 3]"
|
|
308
|
+
```
|
|
309
|
+
|
|
310
|
+
#### `stream`
|
|
311
|
+
|
|
312
|
+
Takes a mapping and an optional item factory function, and generates items from the mapping.
|
|
313
|
+
If the item factory is provided, it applies the factory to each key-value pair before yielding.
|
|
314
|
+
|
|
315
|
+
<!-- name: test_stream -->
|
|
316
|
+
|
|
317
|
+
```python
|
|
318
|
+
from collections import namedtuple
|
|
319
|
+
|
|
320
|
+
from mappingtools import stream
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def custom_factory(key, value):
|
|
324
|
+
return f"{key}: {value}"
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
my_mapping = {'a': 1, 'b': 2, 'c': 3}
|
|
328
|
+
|
|
329
|
+
for item in stream(my_mapping, custom_factory):
|
|
330
|
+
print(item)
|
|
331
|
+
# Output:
|
|
332
|
+
# a: 1
|
|
333
|
+
# b: 2
|
|
334
|
+
# c: 3
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
MyTuple = namedtuple('MyTuple', ['key', 'value'])
|
|
338
|
+
data = {'a': 1, 'b': 2}
|
|
339
|
+
|
|
340
|
+
for item in stream(data, MyTuple):
|
|
341
|
+
print(item)
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
# Output:
|
|
345
|
+
# MyTuple(key='a', value=1)
|
|
346
|
+
# MyTuple(key='b', value=2)
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
def record(k, v):
|
|
350
|
+
return {'key': k, 'value': v}
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
for item in stream(data, record):
|
|
354
|
+
print(item)
|
|
355
|
+
# output:
|
|
356
|
+
# {'key': 'a', 'value': 1}
|
|
357
|
+
# {'key': 'b', 'value': 2}
|
|
358
|
+
```
|
|
359
|
+
|
|
360
|
+
#### `stream_dict_records`
|
|
361
|
+
|
|
362
|
+
generates dictionary records from a given mapping, where each record contains a key-value pair from the mapping with
|
|
363
|
+
customizable key and value names.
|
|
364
|
+
|
|
365
|
+
<!-- name: test_stream_dict_records -->
|
|
366
|
+
|
|
367
|
+
```python
|
|
368
|
+
from mappingtools import stream_dict_records
|
|
369
|
+
|
|
370
|
+
mapping = {'a': 1, 'b': 2}
|
|
371
|
+
records = stream_dict_records(mapping, key_name='letter', value_name='number')
|
|
372
|
+
for record in records:
|
|
373
|
+
print(record)
|
|
374
|
+
# Output:
|
|
375
|
+
# {'letter': 'a', 'number': 1}
|
|
376
|
+
# {'letter': 'b', 'number': 2}
|
|
377
|
+
```
|
|
378
|
+
|
|
379
|
+
### Collectors
|
|
380
|
+
|
|
381
|
+
#### `nested_defaultdict`
|
|
382
|
+
|
|
383
|
+
Creates a nested defaultdict with specified depth and factory.
|
|
384
|
+
|
|
385
|
+
<!-- name: test_nested_defaultdict -->
|
|
386
|
+
|
|
387
|
+
```python
|
|
388
|
+
from mappingtools import nested_defaultdict
|
|
389
|
+
|
|
390
|
+
nested_dd = nested_defaultdict(1, list)
|
|
391
|
+
nested_dd[0][1].append('value')
|
|
392
|
+
print(nested_dd)
|
|
393
|
+
# Output: defaultdict(<function nested_defaultdict.<locals>.factory at ...>, {0: defaultdict(<function nested_defaultdict.<locals>.factory at ...>, {1: ['value']})})
|
|
394
|
+
```
|
|
395
|
+
|
|
396
|
+
#### `CategoryCounter`
|
|
397
|
+
|
|
398
|
+
The CategoryCounter class extends a dictionary to count occurrences of data items categorized by multiple categories.
|
|
399
|
+
It maintains a total count of all data items and allows categorization using direct values or functions.
|
|
400
|
+
|
|
401
|
+
<!-- name: test_category_counter -->
|
|
402
|
+
|
|
403
|
+
```python
|
|
404
|
+
from mappingtools import CategoryCounter
|
|
405
|
+
|
|
406
|
+
counter = CategoryCounter()
|
|
407
|
+
|
|
408
|
+
for fruit in ['apple', 'banana', 'apple']:
|
|
409
|
+
counter.update({fruit: 1}, type='fruit', char_count=len(fruit), unique_char_count=len(set(fruit)))
|
|
410
|
+
|
|
411
|
+
print(counter.total)
|
|
412
|
+
# Output: Counter({'apple': 2, 'banana': 1})
|
|
413
|
+
|
|
414
|
+
print(counter)
|
|
415
|
+
# Output: CategoryCounter({'type': defaultdict(<class 'collections.Counter'>, {'fruit': Counter({'apple': 2, 'banana': 1})}), 'char_count': defaultdict(<class 'collections.Counter'>, {5: Counter({'apple': 2}), 6: Counter({'banana': 1})}), 'unique_char_count': defaultdict(<class 'collections.Counter'>, {4: Counter({'apple': 2}), 3: Counter({'banana': 1})})})
|
|
416
|
+
```
|
|
417
|
+
|
|
418
|
+
#### `MappingCollector`
|
|
419
|
+
|
|
420
|
+
A class designed to collect key-value pairs into an internal mapping based on different modes.
|
|
421
|
+
It supports modes like ALL, COUNT, DISTINCT, FIRST, and LAST, each dictating how key-value pairs are
|
|
422
|
+
collected.
|
|
423
|
+
|
|
424
|
+
<!-- name: test_mapping_collector -->
|
|
425
|
+
|
|
426
|
+
```python
|
|
427
|
+
from mappingtools import MappingCollector, MappingCollectorMode
|
|
428
|
+
|
|
429
|
+
collector = MappingCollector(MappingCollectorMode.ALL)
|
|
430
|
+
collector.add('a', 1)
|
|
431
|
+
collector.add('a', 2)
|
|
432
|
+
collector.collect([('b', 3), ('b', 4)])
|
|
433
|
+
print(collector.mapping)
|
|
434
|
+
# Output: {'a': [1, 2], 'b': [3, 4]}
|
|
435
|
+
```
|
|
436
|
+
|
|
437
|
+
## Development
|
|
438
|
+
|
|
439
|
+
### Ruff
|
|
440
|
+
|
|
441
|
+
```shell
|
|
442
|
+
ruff check src
|
|
443
|
+
|
|
444
|
+
ruff check tests
|
|
445
|
+
```
|
|
446
|
+
|
|
447
|
+
### Test
|
|
448
|
+
|
|
449
|
+
#### Standard (cobertura) XML Coverage Report
|
|
450
|
+
|
|
451
|
+
```shell
|
|
452
|
+
python -m pytest tests -n auto --cov=src --cov-branch --doctest-modules --cov-report=xml
|
|
453
|
+
```
|
|
454
|
+
|
|
455
|
+
#### HTML Coverage Report
|
|
456
|
+
|
|
457
|
+
```shell
|
|
458
|
+
python -m pytest tests -n auto --cov=src --cov-branch --doctest-modules --cov-report=html
|
|
459
|
+
```
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
mappingtools.py,sha256=e1igUpsD30fOT4ZvCYv4ZeZP59TR8tEVjJgml5RqLr8,19026
|
|
2
|
+
mappingtools-0.3.0.dist-info/METADATA,sha256=A25MAlpiPTEMb_AVxs5yrL4aKJN9iLre3aJ2O9CVu4s,15181
|
|
3
|
+
mappingtools-0.3.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
4
|
+
mappingtools-0.3.0.dist-info/licenses/LICENSE,sha256=fiCxD4qmBY6VdONaK43ANsvpmf9oxSpFLHaFaij0Jx4,1068
|
|
5
|
+
mappingtools-0.3.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Eran Rivlis
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
mappingtools.py
ADDED
|
@@ -0,0 +1,538 @@
|
|
|
1
|
+
import dataclasses
|
|
2
|
+
import inspect
|
|
3
|
+
from collections import Counter, defaultdict
|
|
4
|
+
from collections.abc import Callable, Generator, Iterable, Mapping
|
|
5
|
+
from enum import Enum, auto
|
|
6
|
+
from itertools import chain
|
|
7
|
+
from typing import Any, TypeVar
|
|
8
|
+
|
|
9
|
+
CIRCULAR_REFERENCE = '...'
|
|
10
|
+
|
|
11
|
+
K = TypeVar('K')
|
|
12
|
+
KT = TypeVar('KT')
|
|
13
|
+
VT = TypeVar('VT')
|
|
14
|
+
VT_co = TypeVar('VT_co')
|
|
15
|
+
|
|
16
|
+
Category = TypeVar('Category', bound=str | tuple | int | float)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class CategoryCounter(dict[str, defaultdict[Category, Counter]]):
|
|
20
|
+
|
|
21
|
+
def __init__(self):
|
|
22
|
+
super().__init__()
|
|
23
|
+
self.total = Counter()
|
|
24
|
+
|
|
25
|
+
def __repr__(self):
|
|
26
|
+
return f"CategoryCounter({super().__repr__()})"
|
|
27
|
+
|
|
28
|
+
def update(self, data, **categories: Category | Callable[[Any], Category]):
|
|
29
|
+
"""
|
|
30
|
+
Updates a CategoryCounter object with data and corresponding categories.
|
|
31
|
+
|
|
32
|
+
Parameters:
|
|
33
|
+
data: Any - The data to update the counter with (see Counter update method documentation).
|
|
34
|
+
**categories: Category | Callable[[Any], Category] - categories to associate the data with.
|
|
35
|
+
The categories can be either a direct value or a function that extracts the category from the data.
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
None
|
|
39
|
+
"""
|
|
40
|
+
self.total.update(data)
|
|
41
|
+
for category_name, category_value in categories.items():
|
|
42
|
+
category_value = category_value(data) if callable(category_value) else category_value
|
|
43
|
+
if category_name not in self:
|
|
44
|
+
self[category_name] = defaultdict(Counter)
|
|
45
|
+
self[category_name][category_value].update(data)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class MappingCollectorMode(Enum):
|
|
49
|
+
"""
|
|
50
|
+
Define an enumeration class for mapping collector modes.
|
|
51
|
+
|
|
52
|
+
Attributes:
|
|
53
|
+
ALL: Collect all values for each key.
|
|
54
|
+
COUNT: Count the occurrences of each value for each key.
|
|
55
|
+
DISTINCT: Collect distinct values for each key.
|
|
56
|
+
FIRST: Collect the first value for each key.
|
|
57
|
+
LAST: Collect the last value for each key.
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
"""
|
|
61
|
+
ALL = auto()
|
|
62
|
+
COUNT = auto()
|
|
63
|
+
DISTINCT = auto()
|
|
64
|
+
FIRST = auto()
|
|
65
|
+
LAST = auto()
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class MappingCollector:
|
|
69
|
+
|
|
70
|
+
def __init__(self, mode: MappingCollectorMode = MappingCollectorMode.ALL, **kwargs):
|
|
71
|
+
"""
|
|
72
|
+
Initialize the MappingCollector with the specified mode.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
mode (MappingCollectorMode): The mode for collecting mappings.
|
|
76
|
+
*args: Variable positional arguments used to initialize the internal mapping.
|
|
77
|
+
**kwargs: Variable keyword arguments used to initialize the internal mapping.
|
|
78
|
+
"""
|
|
79
|
+
self._mapping: Mapping[KT, VT_co]
|
|
80
|
+
|
|
81
|
+
self.mode = mode
|
|
82
|
+
|
|
83
|
+
match self.mode:
|
|
84
|
+
case MappingCollectorMode.ALL:
|
|
85
|
+
self._mapping = defaultdict(list, **kwargs)
|
|
86
|
+
case MappingCollectorMode.COUNT:
|
|
87
|
+
self._mapping = defaultdict(Counter, **kwargs)
|
|
88
|
+
case MappingCollectorMode.DISTINCT:
|
|
89
|
+
self._mapping = defaultdict(set, **kwargs)
|
|
90
|
+
case MappingCollectorMode.FIRST | MappingCollectorMode.LAST:
|
|
91
|
+
self._mapping = dict(**kwargs)
|
|
92
|
+
case _:
|
|
93
|
+
raise ValueError("Invalid mode")
|
|
94
|
+
|
|
95
|
+
def __repr__(self):
|
|
96
|
+
return f'MappingCollector(mode={self.mode}, mapping={self.mapping})'
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def mapping(self) -> Mapping[KT, VT_co]:
|
|
100
|
+
"""
|
|
101
|
+
Return a shallow copy of the internal mapping.
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
Mapping[KT, VT_co]: A shallow copy of the internal mapping.
|
|
105
|
+
"""
|
|
106
|
+
return dict(self._mapping)
|
|
107
|
+
|
|
108
|
+
def add(self, key: KT, value: VT):
|
|
109
|
+
"""
|
|
110
|
+
Add a key-value pair to the internal mapping based on the specified mode.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
key: The key to be added to the mapping.
|
|
114
|
+
value: The value corresponding to the key.
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
None
|
|
118
|
+
"""
|
|
119
|
+
match self.mode:
|
|
120
|
+
case MappingCollectorMode.ALL:
|
|
121
|
+
self._mapping[key].append(value)
|
|
122
|
+
case MappingCollectorMode.COUNT:
|
|
123
|
+
self._mapping[key].update({value: 1})
|
|
124
|
+
case MappingCollectorMode.DISTINCT:
|
|
125
|
+
self._mapping[key].add(value)
|
|
126
|
+
case MappingCollectorMode.FIRST if key not in self.mapping:
|
|
127
|
+
self._mapping[key] = value
|
|
128
|
+
case MappingCollectorMode.LAST:
|
|
129
|
+
self._mapping[key] = value
|
|
130
|
+
|
|
131
|
+
def collect(self, iterable: Iterable[tuple[KT, VT]]):
|
|
132
|
+
"""
|
|
133
|
+
Collect key-value pairs from the given iterable and add them to the internal mapping
|
|
134
|
+
based on the specified mode.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
iterable (Iterable[tuple[KT, VT]]): An iterable containing key-value pairs to collect.
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
None
|
|
141
|
+
"""
|
|
142
|
+
for k, v in iterable:
|
|
143
|
+
self.add(k, v)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def distinct(key: K, *mappings: Mapping[K, Any]) -> Generator[Any, Any, None]:
|
|
147
|
+
"""
|
|
148
|
+
Yield distinct values for the specified key across multiple mappings.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
key (K): The key to extract distinct values from the mappings.
|
|
152
|
+
*mappings (Mapping[K, Any]): Variable number of mappings to search for distinct values.
|
|
153
|
+
|
|
154
|
+
Yields:
|
|
155
|
+
Generator[K, Any, None]: A generator of distinct values extracted from the mappings.
|
|
156
|
+
"""
|
|
157
|
+
distinct_value_type_pairs = set()
|
|
158
|
+
for mapping in mappings:
|
|
159
|
+
value = mapping.get(key, )
|
|
160
|
+
value_type_pair = (value, type(value))
|
|
161
|
+
if key in mapping and value_type_pair not in distinct_value_type_pairs:
|
|
162
|
+
distinct_value_type_pairs.add(value_type_pair)
|
|
163
|
+
yield value
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def _take(keys: Iterable[K], mapping: Mapping[K, Any], exclude: bool = False) -> dict[K, Any]:
|
|
167
|
+
"""
|
|
168
|
+
Return a dictionary pertaining to the specified keys and their corresponding values from the mapping.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
keys (Iterable[K]): The keys to include in the resulting dictionary.
|
|
172
|
+
mapping (Mapping[K, Any]): The mapping to extract key-value pairs from.
|
|
173
|
+
exclude (bool, optional): If True, exclude the specified keys from the mapping. Defaults to False.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
dict[K, Any]: A dictionary with the selected keys and their values from the mapping.
|
|
177
|
+
"""
|
|
178
|
+
|
|
179
|
+
if not isinstance(mapping, Mapping):
|
|
180
|
+
raise TypeError(f"Parameter 'mapping' should be of type 'Mapping', but instead is type '{type(mapping)}'")
|
|
181
|
+
|
|
182
|
+
mapping_keys = set(mapping.keys())
|
|
183
|
+
keys = set(keys) & mapping_keys # intersection with keys to get actual existing keys
|
|
184
|
+
if exclude:
|
|
185
|
+
keys = mapping_keys - keys
|
|
186
|
+
|
|
187
|
+
return {k: mapping.get(k) for k in keys}
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def keep(keys: Iterable[K], *mappings: Mapping[K, Any]) -> Generator[Mapping[K, Any], Any, None]:
|
|
191
|
+
"""
|
|
192
|
+
Yield a subset of mappings by keeping only the specified keys.
|
|
193
|
+
|
|
194
|
+
Args:
|
|
195
|
+
keys (Iterable[K]): The keys to keep in the mappings.
|
|
196
|
+
*mappings (Mapping[K, Any]): Variable number of mappings to filter.
|
|
197
|
+
|
|
198
|
+
Yields:
|
|
199
|
+
Generator[Mapping[K, Any], Any, None]: A generator of mappings with only the specified keys.
|
|
200
|
+
"""
|
|
201
|
+
yield from (_take(keys, mapping) for mapping in mappings)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def remove(keys: Iterable[K], *mappings: Mapping[K, Any]) -> Generator[Mapping[K, Any], Any, None]:
|
|
205
|
+
"""
|
|
206
|
+
Yield a subset of mappings by removing the specified keys.
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
keys (Iterable[K]): The keys to remove from the mappings.
|
|
210
|
+
*mappings (Mapping[K, Any]): Variable number of mappings to filter.
|
|
211
|
+
|
|
212
|
+
Yields:
|
|
213
|
+
Generator[Mapping[K, Any], Any, None]: A generator of mappings with specified keys removed.
|
|
214
|
+
"""
|
|
215
|
+
yield from (_take(keys, mapping, exclude=True) for mapping in mappings)
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def inverse(mapping: Mapping[Any, set]) -> Mapping[Any, set]:
|
|
219
|
+
"""
|
|
220
|
+
Return a new dictionary with keys and values swapped from the input mapping.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
mapping (Mapping[Any, set]): The input mapping to invert.
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
Mapping: A new Mapping with values as keys and keys as values.
|
|
227
|
+
"""
|
|
228
|
+
items = chain.from_iterable(((vi, k) for vi in v) for k, v in mapping.items())
|
|
229
|
+
dd = defaultdict(set)
|
|
230
|
+
for k, v in items:
|
|
231
|
+
dd[k].add(v)
|
|
232
|
+
|
|
233
|
+
return dd
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def nested_defaultdict(nesting_depth: int = 0, default_factory: Callable | None = None, **kwargs) -> defaultdict:
|
|
237
|
+
"""Return a nested defaultdict with the specified nesting depth and default factory.
|
|
238
|
+
A nested_defaultdict with nesting_depth=0 is equivalent to builtin 'collections.defaultdict'.
|
|
239
|
+
For each increment in nesting_depth an additional item accessor is added.
|
|
240
|
+
|
|
241
|
+
Args:
|
|
242
|
+
nesting_depth (int): The depth of nesting for the defaultdict (default is 0);
|
|
243
|
+
default_factory (Callable): The default factory function for the defaultdict (default is None).
|
|
244
|
+
**kwargs: Additional keyword arguments to initialize the most nested defaultdict.
|
|
245
|
+
|
|
246
|
+
Returns:
|
|
247
|
+
defaultdict: A nested defaultdict based on the specified parameters.
|
|
248
|
+
"""
|
|
249
|
+
|
|
250
|
+
if nesting_depth < 0:
|
|
251
|
+
raise ValueError("'nesting_depth' must be zero or more.")
|
|
252
|
+
|
|
253
|
+
if default_factory is not None and not callable(default_factory):
|
|
254
|
+
raise TypeError("default_factory argument must be Callable or None")
|
|
255
|
+
|
|
256
|
+
def factory():
|
|
257
|
+
if nesting_depth > 0:
|
|
258
|
+
return nested_defaultdict(nesting_depth=nesting_depth - 1, default_factory=default_factory, **kwargs)
|
|
259
|
+
else:
|
|
260
|
+
return default_factory() if default_factory else None
|
|
261
|
+
|
|
262
|
+
return defaultdict(factory, **kwargs)
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def flattened(mapping: Mapping[Any, Any]) -> dict[tuple, Any]:
|
|
266
|
+
"""
|
|
267
|
+
Flatten a nested mapping structure into a single-level dictionary.
|
|
268
|
+
|
|
269
|
+
:param mapping: A nested mapping structure to be flattened.
|
|
270
|
+
:return: A dictionary representing the flattened structure.
|
|
271
|
+
"""
|
|
272
|
+
|
|
273
|
+
def flatten(key: tuple, value: Any):
|
|
274
|
+
if isinstance(value, Mapping):
|
|
275
|
+
for k, v in value.items():
|
|
276
|
+
new_key = tuple([*key, *k] if _is_strict_iterable(k) else [*key, k])
|
|
277
|
+
yield from flatten(new_key, v)
|
|
278
|
+
else:
|
|
279
|
+
yield key, value
|
|
280
|
+
|
|
281
|
+
return dict(flatten((), mapping))
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def _is_strict_iterable(obj: Iterable) -> bool:
|
|
285
|
+
return isinstance(obj, Iterable) and not isinstance(obj, str | bytes | bytearray)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def _is_class_instance(obj) -> bool:
|
|
289
|
+
return (dataclasses.is_dataclass(obj) and not isinstance(obj, type)) or hasattr(obj, '__dict__')
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def _class_generator(obj):
|
|
293
|
+
yield from ((k, v) for k, v in inspect.getmembers(obj) if not k.startswith('_'))
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
class Processor:
|
|
297
|
+
"""
|
|
298
|
+
A class to process objects recursively based on their type.
|
|
299
|
+
"""
|
|
300
|
+
|
|
301
|
+
def __init__(self,
|
|
302
|
+
mapping_handler: Callable | None = None,
|
|
303
|
+
iterable_handler: Callable | None = None,
|
|
304
|
+
class_handler: Callable | None = None,
|
|
305
|
+
default_handler: Callable | None = None,
|
|
306
|
+
*args,
|
|
307
|
+
**kwargs):
|
|
308
|
+
"""
|
|
309
|
+
Initialize the Processor with optional handlers for different types of objects.
|
|
310
|
+
|
|
311
|
+
Args:
|
|
312
|
+
mapping_handler (Optional[Callable]): Handler for mapping objects.
|
|
313
|
+
Iterable_handler (Optional[Callable]): Handler for iterable objects.
|
|
314
|
+
Class_handler (Optional[Callable]): Handler for class instances.
|
|
315
|
+
Default_handler (Optional[Callable]): Default handler for other objects.
|
|
316
|
+
*args: Additional positional arguments for handlers.
|
|
317
|
+
**kwargs: Additional keyword arguments for handlers.
|
|
318
|
+
"""
|
|
319
|
+
|
|
320
|
+
self.mapping_handler = mapping_handler
|
|
321
|
+
self.iterable_handler = iterable_handler
|
|
322
|
+
self.class_handler = class_handler
|
|
323
|
+
self.default_handler = default_handler
|
|
324
|
+
self.args = args
|
|
325
|
+
self.kwargs = kwargs
|
|
326
|
+
|
|
327
|
+
self.objects_counter = Counter()
|
|
328
|
+
self.objects = {}
|
|
329
|
+
|
|
330
|
+
def __call__(self, obj: Any):
|
|
331
|
+
"""
|
|
332
|
+
Process the given object using the appropriate handler.
|
|
333
|
+
|
|
334
|
+
Args:
|
|
335
|
+
obj (Any): The object to process.
|
|
336
|
+
|
|
337
|
+
Returns:
|
|
338
|
+
Any: The processed object.
|
|
339
|
+
"""
|
|
340
|
+
obj_id = id(obj)
|
|
341
|
+
self.objects_counter[obj_id] += 1
|
|
342
|
+
if self.objects_counter[obj_id] == 1:
|
|
343
|
+
processed_obj = self._process(obj)
|
|
344
|
+
self.objects[obj_id] = processed_obj
|
|
345
|
+
return self.objects[obj_id]
|
|
346
|
+
elif self.objects_counter[obj_id] == 2:
|
|
347
|
+
return self.objects.get(obj_id, CIRCULAR_REFERENCE)
|
|
348
|
+
|
|
349
|
+
def _process(self, obj: Any):
|
|
350
|
+
if callable(self.mapping_handler) and isinstance(obj, Mapping):
|
|
351
|
+
return self.mapping_handler(obj, self, *self.args, **self.kwargs)
|
|
352
|
+
elif callable(self.iterable_handler) and _is_strict_iterable(obj):
|
|
353
|
+
return self.iterable_handler(obj, self, *self.args, **self.kwargs)
|
|
354
|
+
elif callable(self.class_handler) and _is_class_instance(obj):
|
|
355
|
+
return self.class_handler(obj, self, *self.args, **self.kwargs)
|
|
356
|
+
elif callable(self.default_handler):
|
|
357
|
+
self.objects_counter.pop(id(obj))
|
|
358
|
+
return self.default_handler(obj)
|
|
359
|
+
else:
|
|
360
|
+
self.objects_counter.pop(id(obj))
|
|
361
|
+
return obj
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def _strictify_mapping(obj, processor, key_converter, value_converter):
|
|
365
|
+
return {
|
|
366
|
+
(key_converter(k) if key_converter else k): processor(value_converter(v) if value_converter else v)
|
|
367
|
+
for k, v in obj.items()}
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
def _strictify_iterable(obj, processor, key_converter, value_converter):
|
|
371
|
+
return [processor(value_converter(v) if value_converter else v) for v in obj]
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
def _strictify_class(obj, processor, key_converter, value_converter):
|
|
375
|
+
return {
|
|
376
|
+
(key_converter(k) if key_converter else k): processor(value_converter(v) if value_converter else v)
|
|
377
|
+
for k, v in _class_generator(obj)
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
|
|
381
|
+
def strictify(obj: Any,
|
|
382
|
+
key_converter: Callable[[Any], str] | None = None,
|
|
383
|
+
value_converter: Callable[[Any], Any] | None = None) -> Any:
|
|
384
|
+
"""Applies strict structural conversion to the given object using optional specific converters for keys and values.
|
|
385
|
+
|
|
386
|
+
Args:
|
|
387
|
+
obj: The object to be converted.
|
|
388
|
+
key_converter: A function to convert keys (optional).
|
|
389
|
+
value_converter: A function to convert values (optional).
|
|
390
|
+
|
|
391
|
+
Returns:
|
|
392
|
+
The object content after applying the conversion.
|
|
393
|
+
"""
|
|
394
|
+
|
|
395
|
+
processor = Processor(mapping_handler=_strictify_mapping,
|
|
396
|
+
iterable_handler=_strictify_iterable,
|
|
397
|
+
class_handler=_strictify_class,
|
|
398
|
+
key_converter=key_converter,
|
|
399
|
+
value_converter=value_converter)
|
|
400
|
+
|
|
401
|
+
return processor(obj)
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def _listify_mapping(obj: Mapping, processor, key_name, value_name) -> list[dict]:
|
|
405
|
+
return [{key_name: k, value_name: processor(v)} for k, v in obj.items()]
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
def _listify_iterable(obj: Iterable, processor, key_name, value_name) -> list:
|
|
409
|
+
return [processor(v) for v in obj]
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
def _listify_class(obj, processor, key_name, value_name):
|
|
413
|
+
return [{key_name: k, value_name: processor(v)} for k, v in inspect.getmembers(obj) if not k.startswith('_')]
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
def listify(obj: Any, key_name: str = 'key', value_name: str = 'value') -> Any:
|
|
417
|
+
"""
|
|
418
|
+
listify recursively the given object.
|
|
419
|
+
|
|
420
|
+
Args:
|
|
421
|
+
obj (Any): The object to unwrap.
|
|
422
|
+
key_name(str): The key field name.
|
|
423
|
+
value_name(str): The value field name.
|
|
424
|
+
|
|
425
|
+
Returns:
|
|
426
|
+
Any: The unwrapped object.
|
|
427
|
+
"""
|
|
428
|
+
|
|
429
|
+
processor = Processor(mapping_handler=_listify_mapping,
|
|
430
|
+
iterable_handler=_listify_iterable,
|
|
431
|
+
class_handler=_listify_class,
|
|
432
|
+
key_name=key_name,
|
|
433
|
+
value_name=value_name)
|
|
434
|
+
|
|
435
|
+
return processor(obj)
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
def simplify(obj: Any) -> Any:
|
|
439
|
+
"""Dictify recursively the given object.
|
|
440
|
+
|
|
441
|
+
Args:
|
|
442
|
+
obj (Any): The object to be simplified.
|
|
443
|
+
|
|
444
|
+
Returns:
|
|
445
|
+
The simplified object.
|
|
446
|
+
"""
|
|
447
|
+
return strictify(obj, key_converter=str)
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
def _stringify_kv_stream(iterable: Iterable[tuple[Any, Any]],
|
|
451
|
+
processor,
|
|
452
|
+
kv_delimiter,
|
|
453
|
+
item_delimiter,
|
|
454
|
+
key_converter,
|
|
455
|
+
*args,
|
|
456
|
+
**kwargs):
|
|
457
|
+
items = (f"{key_converter(k)}{kv_delimiter}{processor(v)}" for k, v in iterable)
|
|
458
|
+
return item_delimiter.join(items)
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
def _stringify_mapping(obj, *args, **kwargs):
|
|
462
|
+
return _stringify_kv_stream(obj.items(), *args, **kwargs)
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
def _stringify_iterable(obj, processor, kv_delimiter, item_delimiter, *args, **kwargs):
|
|
466
|
+
return f'[{item_delimiter.join(processor(v) for v in obj)}]'
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
def _stringify_class(obj, processor, kv_delimiter, item_delimiter, *args, **kwargs):
|
|
470
|
+
return _stringify_kv_stream(_class_generator(obj), processor, kv_delimiter, item_delimiter, *args, **kwargs)
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
def stringify(obj: Any, kv_delimiter: str = '=', item_delimiter: str = ', ') -> str:
|
|
474
|
+
"""Stringify recursively the given object.
|
|
475
|
+
|
|
476
|
+
Args:
|
|
477
|
+
obj (Any): The object to be stringified.
|
|
478
|
+
kv_delimiter (str): The key-value delimiter. Defaults to '='.
|
|
479
|
+
item_delimiter (str): The item delimiter. Defaults to ', '.
|
|
480
|
+
|
|
481
|
+
Returns:
|
|
482
|
+
str: The stringified object.
|
|
483
|
+
"""
|
|
484
|
+
|
|
485
|
+
processor = Processor(mapping_handler=_stringify_mapping,
|
|
486
|
+
iterable_handler=_stringify_iterable,
|
|
487
|
+
class_handler=_stringify_class,
|
|
488
|
+
default_handler=str,
|
|
489
|
+
kv_delimiter=kv_delimiter,
|
|
490
|
+
item_delimiter=item_delimiter,
|
|
491
|
+
key_converter=str)
|
|
492
|
+
|
|
493
|
+
return processor(obj)
|
|
494
|
+
|
|
495
|
+
|
|
496
|
+
def stream(mapping: Mapping, item_factory: Callable[[Any, Any], Any] | None = None) -> Generator[Any, Any, None]:
|
|
497
|
+
"""
|
|
498
|
+
Generate a stream of items from a mapping.
|
|
499
|
+
|
|
500
|
+
Args:
|
|
501
|
+
mapping (Mapping): The mapping object to stream items from.
|
|
502
|
+
item_factory (Callable[[Any, Any], Any], optional): A function that transforms each key-value pair from
|
|
503
|
+
the mapping. Defaults to None.
|
|
504
|
+
|
|
505
|
+
Yields:
|
|
506
|
+
The streamed items from the mapping.
|
|
507
|
+
"""
|
|
508
|
+
|
|
509
|
+
items = mapping.items() if item_factory is None else iter(item_factory(k, v) for k, v in mapping.items())
|
|
510
|
+
yield from items
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
def stream_dict_records(mapping: Mapping,
|
|
514
|
+
key_name: str = 'key',
|
|
515
|
+
value_name: str = 'value') -> Generator[Mapping[str, Any], Any, None]:
|
|
516
|
+
"""
|
|
517
|
+
Generate dictionary records from a mapping.
|
|
518
|
+
|
|
519
|
+
Args:
|
|
520
|
+
mapping (Mapping): The input mapping to generate records from.
|
|
521
|
+
key_name (str): The name to use for the key in the generated records. Defaults to 'key'.
|
|
522
|
+
value_name (str): The name to use for the value in the generated records. Defaults to 'value'.
|
|
523
|
+
|
|
524
|
+
Yields:
|
|
525
|
+
dictionary records based on the input mapping.
|
|
526
|
+
"""
|
|
527
|
+
|
|
528
|
+
def record(k, v):
|
|
529
|
+
return {key_name: k, value_name: v}
|
|
530
|
+
|
|
531
|
+
yield from stream(mapping, record)
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
__all__ = (
|
|
535
|
+
'Category', 'CategoryCounter', 'MappingCollector', 'MappingCollectorMode', 'distinct', 'flattened', 'inverse',
|
|
536
|
+
'keep', 'listify', 'nested_defaultdict', 'remove', 'simplify', 'stream', 'stream_dict_records', 'strictify',
|
|
537
|
+
'stringify'
|
|
538
|
+
)
|