adiumentum 0.1.1__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. adiumentum-0.3.0/PKG-INFO +61 -0
  2. adiumentum-0.3.0/README.md +48 -0
  3. {adiumentum-0.1.1 → adiumentum-0.3.0}/pyproject.toml +60 -89
  4. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/__init__.py +35 -14
  5. adiumentum-0.3.0/src/adiumentum/dependency_sorting.py +84 -0
  6. adiumentum-0.3.0/src/adiumentum/display.py +49 -0
  7. adiumentum-0.3.0/src/adiumentum/functional.py +95 -0
  8. adiumentum-0.3.0/src/adiumentum/io_utils.py +63 -0
  9. adiumentum-0.3.0/src/adiumentum/markers.py +100 -0
  10. adiumentum-0.3.0/src/adiumentum/merge.py +113 -0
  11. adiumentum-0.3.0/src/adiumentum/paths_manager.py +19 -0
  12. adiumentum-0.3.0/src/adiumentum/pydantic_extensions.md +839 -0
  13. adiumentum-0.3.0/src/adiumentum/pydantic_extensions.py +410 -0
  14. adiumentum-0.1.1/src/adiumentum/string.py → adiumentum-0.3.0/src/adiumentum/string_utils.py +10 -2
  15. adiumentum-0.3.0/src/adiumentum/typing_utils.py +132 -0
  16. adiumentum-0.1.1/PKG-INFO +0 -236
  17. adiumentum-0.1.1/README.md +0 -224
  18. adiumentum-0.1.1/src/adiumentum/functional.py +0 -44
  19. adiumentum-0.1.1/src/adiumentum/io.py +0 -33
  20. adiumentum-0.1.1/src/adiumentum/markers.py +0 -117
  21. adiumentum-0.1.1/src/adiumentum/typing_utils.py +0 -19
  22. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/color.py +0 -0
  23. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/comparison.py +0 -0
  24. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/converters.py +0 -0
  25. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/elementary_types.py +0 -0
  26. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/exceptions.py +0 -0
  27. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/file_modification_time.py +0 -0
  28. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/frozendict.py +0 -0
  29. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/numerical.py +0 -0
  30. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/performance_logging.py +0 -0
  31. {adiumentum-0.1.1 → adiumentum-0.3.0}/src/adiumentum/timestamping.py +0 -0
@@ -0,0 +1,61 @@
1
+ Metadata-Version: 2.3
2
+ Name: adiumentum
3
+ Version: 0.3.0
4
+ Summary:
5
+ Author: Isaac Riley
6
+ Author-email: Isaac Riley <yelircaasi@proton.me>
7
+ Requires-Dist: pydantic>=2.11
8
+ Requires-Dist: multipledispatch>=1
9
+ Requires-Dist: loguru>=0.7.3
10
+ Requires-Dist: datethyme>=0.4.0
11
+ Requires-Python: >=3.11, <3.15
12
+ Description-Content-Type: text/markdown
13
+
14
+ # adiumentum
15
+
16
+
17
+ With Nix installed, you can enter a development environment with all dependencies installed:
18
+
19
+ ```sh
20
+ nix develop
21
+ ```
22
+
23
+ Once in this dev shell, you have a number of development utils you can try out (via just):
24
+
25
+ ```sh
26
+ ✔just
27
+ ✔just format
28
+ ✔just check
29
+ ✔just fix
30
+ ✔just typecheck
31
+ ✔just lint
32
+ ✔just deal
33
+ ✔just vulture
34
+ ✔just pydeps-full
35
+ ✔just pydeps
36
+ ✔just pydeps-simple
37
+ ✔just view-deps
38
+ ✔just snakefood
39
+ ✔just deply
40
+ ✔just bandit
41
+ ✔just bandit-html
42
+ ✔just bandit-view
43
+ ✔just pyflame
44
+ ✔just flamegraph
45
+ ✔just perf-flamegraph
46
+ ✔just check-structure
47
+ ✔just check-imports
48
+ ✔just smoke
49
+ ✔just unit
50
+ ✔just test
51
+ ✔just test-cov
52
+ ✔just docs
53
+ ✔just scalene
54
+ ✔just view-cov
55
+ ✔just view-docs
56
+ ✔just view-flamegraphs
57
+ ✔just sbom
58
+
59
+ lefthook validate
60
+ lefthook run all
61
+ ```
@@ -0,0 +1,48 @@
1
+ # adiumentum
2
+
3
+
4
+ With Nix installed, you can enter a development environment with all dependencies installed:
5
+
6
+ ```sh
7
+ nix develop
8
+ ```
9
+
10
+ Once in this dev shell, you have a number of development utils you can try out (via just):
11
+
12
+ ```sh
13
+ ✔just
14
+ ✔just format
15
+ ✔just check
16
+ ✔just fix
17
+ ✔just typecheck
18
+ ✔just lint
19
+ ✔just deal
20
+ ✔just vulture
21
+ ✔just pydeps-full
22
+ ✔just pydeps
23
+ ✔just pydeps-simple
24
+ ✔just view-deps
25
+ ✔just snakefood
26
+ ✔just deply
27
+ ✔just bandit
28
+ ✔just bandit-html
29
+ ✔just bandit-view
30
+ ✔just pyflame
31
+ ✔just flamegraph
32
+ ✔just perf-flamegraph
33
+ ✔just check-structure
34
+ ✔just check-imports
35
+ ✔just smoke
36
+ ✔just unit
37
+ ✔just test
38
+ ✔just test-cov
39
+ ✔just docs
40
+ ✔just scalene
41
+ ✔just view-cov
42
+ ✔just view-docs
43
+ ✔just view-flamegraphs
44
+ ✔just sbom
45
+
46
+ lefthook validate
47
+ lefthook run all
48
+ ```
@@ -1,16 +1,27 @@
1
1
  [project]
2
2
  name = "adiumentum"
3
- version = "0.1.1"
3
+ version = "0.3.0"
4
4
  description = ""
5
5
  authors = [{name = "Isaac Riley", email = "yelircaasi@proton.me"}]
6
6
  readme = "README.md"
7
- requires-python = ">=3.12,<3.14"
7
+ requires-python = ">=3.11,<3.15"
8
8
  dependencies = [
9
+ "pydantic >= 2.11 ",
9
10
  "multipledispatch >= 1 ",
10
11
  "loguru >= 0.7.3",
11
12
  "datethyme >= 0.4.0",
12
13
  ]
13
14
 
15
+ [project.scripts]
16
+ adiumentum = "adiumentum.__main__:main"
17
+
18
+ [build-system]
19
+ requires = ["uv_build>=0.7.5,<0.8.0"]
20
+ build-backend = "uv_build"
21
+
22
+ [tool.uv.build-backend]
23
+ module-name = "adiumentum"
24
+
14
25
  [dependency-groups]
15
26
  test = [
16
27
  "pytest >= 8.3 ",
@@ -57,10 +68,6 @@ cli-utils = [
57
68
  "ty >= 0.0.1a5",
58
69
  ]
59
70
 
60
- [build-system]
61
- requires = ["uv_build>=0.7.5,<0.8.0"]
62
- build-backend = "uv_build"
63
-
64
71
  [tool.bandit]
65
72
  targets = ["src/adiumentum", "codeqa/scripts/"]
66
73
  exclude_dirs = []
@@ -248,97 +255,61 @@ suppress-none-returning = true
248
255
  [tool.ruff.lint.pydocstyle]
249
256
  convention = "google"
250
257
 
251
- [tool.scripts.check_imports]
252
- primitive_modules = ["utils"] # the utils module may not import from any other module in the project (external imports ok)
253
- external_allowed_everywhere = []
254
- internal_allowed_everywhere = []
255
-
256
- [tool.scripts.check_imports.disallowed.internal] # for a given module, allow all internal imports except those explicitly named
257
- utils = ["utils"] # don’t let the project’s root module import directly from the utils module
258
-
259
- [tool.scripts.check_imports.disallowed.external] # for a given module, allow all internal imports except those explicitly named
260
- utils = ["pydantic"] # don’t let the utils module import from pydantic
258
+ [tool.structlint]
259
+ root_dir = "."
260
+ module_name = "consilium.notes"
261
+ module_root_dir = "src/consilium/notes"
261
262
 
262
- [tool.scripts.check_imports.allowed.internal] # for a given module, disallow all imports except those explicitly named and those allowed everywhere
263
-
264
- [tool.scripts.check_imports.allowed.external] # for a given module, disallow all imports except those explicitly named and those allowed everywhere
265
- utils = ["deal"]
263
+ [tool.structlint.docs]
264
+ md_dir = "docs/md"
265
+ allow_additional = "(?!)"
266
+ ignore = "(?!)"
267
+ order_ignore = "(?!)"
268
+ file_per_directory = "(?!)"
269
+ file_per_class = "(?!)"
270
+ replace_double_underscore = false
266
271
 
267
- [tool.scripts.check_method_order]
268
- init = 0
269
- abstract_property = 0.0401
270
- property = 0.041
271
- abstract_private_property = 0.042
272
- private_property = 0.003
273
- abstract_dunder = 0.8
274
- abstract_classmethod = 0.81
275
- abstract_static = 0.82
276
- abstract = 0.9
277
- abstract_private = 0.91
278
- dunder = 1
279
- classmethod = 2
280
- static = 3.5
281
- final = 3.9
282
- normal = 4
283
- private = 5
284
- mangled = 6
272
+ [tool.structlint.imports]
273
+ internal_allowed_everywhere = []
274
+ external_allowed_everywhere = []
275
+ grimp_cache = ".grimp_cache"
285
276
 
286
- [tool.scripts.check_method_order.regex]
287
- "@model_validator|model_validate" = 0
288
- "_pydantic_" = 0.01
289
- " adapter(" = 0.011
290
- "@field_validator" = 0.1
291
- "model_serializ|model_dump" = 0.00001
292
- "@field_serializer" = 0.3
293
- "__call__" = 0.99
294
- "check_.+" = 9
295
- " read[^ ]+(" = 3.98
296
- " write[^ ]+(" = 3.99
297
- "[^_][a-z_]+_hook$" = 9
277
+ [tool.structlint.imports.internal.allowed]
278
+ datamodels = []
279
+ utils = []
280
+ types = ["datamodels"]
298
281
 
299
- [tool.scripts.check_structure]
300
- checks = [
301
- "tests",
302
- "mkdocs",
303
- ]
304
- module_root_dir = "src/adiumentum"
282
+ [tool.structlint.imports.internal.disallowed]
283
+ conversion = ["datamodels"]
305
284
 
306
- [tool.scripts.check_structure.docs]
307
- allow_additional = "index"
308
- file_per_class = ""
309
- file_per_directory = "|utils"
310
- ignore = "exceptions.py|:_[A-Z]"
311
- keep_double_underscore = true
312
- md_dir = "docs/md"
285
+ [tool.structlint.methods.builtins_order]
286
+ init = 0.0
287
+ abstract_property = 1.0
288
+ property = 2.0
289
+ abstract_private_property = 3.0
290
+ private_property = 4.0
291
+ abstract_dunder = 5.0
292
+ dunder = 6.0
293
+ abstract_classmethod = 7.0
294
+ classmethod = 8.0
295
+ abstract = 9.0
296
+ final = 11.0
297
+ abstract_static = 12.0
298
+ static = 13.0
299
+ abstract_private = 14.0
300
+ private = 15.0
301
+ mangled = 16.0
313
302
 
314
- [tool.scripts.check_structure.tests]
315
- allow_additional = ".*"
316
- file_per_class = ""
317
- file_per_directory = "|utils"
318
- function_for_class = ""
319
- ignore = ":test__[A-Z]|__init__$|_abcs|exceptions|__get_pydantic_core_schema__"
320
- keep_double_underscore = true
303
+ [tool.structlint.tests]
321
304
  unit_dir = "tests/unit"
322
305
  use_filename_suffix = true
323
-
324
- [tool.tomlsort]
325
- all = false
326
- in_place = true
327
- no_comments = false
328
- no_header_comments = false
329
- no_footer_comments = false
330
- no_inline_comments = false
331
- no_block_comments = false
332
- no_sort_tables = true
333
- sort_first = ["key1", "key2"]
334
- sort_table_keys = false
335
- sort_inline_tables = false
336
- sort_inline_arrays = false
337
- spaces_before_inline_comment = 2
338
- spaces_indent_inline_array = 4
339
- trailing_comma_inline_array = true
340
- check = false
341
- ignore_case = false
306
+ allow_additional = "_tmp"
307
+ ignore = "(?!)"
308
+ order_ignore = "(?!)"
309
+ file_per_directory = "(?!)"
310
+ file_per_class = "(?!)"
311
+ function_per_class = "(?!)"
312
+ replace_double_underscore = false
342
313
 
343
314
  [tool.vulture]
344
315
  exclude = []
@@ -11,15 +11,22 @@ from .file_modification_time import (
11
11
  from .frozendict import FrozenDefaultDict
12
12
  from .functional import (
13
13
  dmap,
14
+ endofilter,
15
+ endomap,
14
16
  fold_dictionaries,
15
17
  identity,
18
+ kfilter,
16
19
  kmap,
20
+ lfilter,
17
21
  lmap,
22
+ sfilter,
18
23
  smap,
24
+ tfilter,
19
25
  tmap,
26
+ vfilter,
20
27
  vmap,
21
28
  )
22
- from .io import (
29
+ from .io_utils import (
23
30
  list_full,
24
31
  read_json,
25
32
  read_raw,
@@ -28,20 +35,24 @@ from .io import (
28
35
  write_raw_bytes,
29
36
  )
30
37
  from .markers import (
31
- helper,
38
+ endo,
32
39
  impure,
33
40
  mutates,
34
41
  mutates_and_returns_instance,
35
42
  mutates_instance,
36
43
  pure,
37
44
  refactor,
38
- step_data,
39
- step_transition,
40
- validator,
45
+ )
46
+ from .merge import (
47
+ join_as_sequence,
48
+ make_hashable,
49
+ merge_dicts,
41
50
  )
42
51
  from .numerical import evenly_spaced, ihash, round5
43
- from .performance_logging import log_perf
44
- from .string import (
52
+ from .paths_manager import PathsManager
53
+ from .performance_logging import log_perf # type: ignore
54
+ from .pydantic_extensions import BaseDict, BaseList, BaseSet
55
+ from .string_utils import (
45
56
  MixedValidated,
46
57
  PromptTypeName,
47
58
  as_json,
@@ -49,6 +60,7 @@ from .string import (
49
60
  flexsplit,
50
61
  indent_lines,
51
62
  parse_sequence,
63
+ re_split,
52
64
  )
53
65
  from .timestamping import insert_timestamp, make_timestamp
54
66
  from .typing_utils import (
@@ -61,55 +73,64 @@ DELIMITER = "᜶"
61
73
 
62
74
  __all__ = [
63
75
  "DELIMITER",
76
+ "BaseDict",
77
+ "BaseList",
78
+ "BaseSet",
64
79
  "Colorizer",
65
80
  "CustomValidationError",
66
81
  "FrozenDefaultDict",
67
82
  "MixedValidated",
68
- "NoneDate",
69
- "NoneTime",
83
+ "PathsManager",
70
84
  "PromptTypeName",
71
85
  "areinstances",
72
- "args_to_dict",
73
86
  "as_json",
74
87
  "call_fallback_if_none",
75
88
  "cast_as",
76
89
  "dmap",
90
+ "endo",
91
+ "endofilter",
92
+ "endomap",
77
93
  "equal_within",
78
94
  "evenly_spaced",
79
95
  "fallback_if_none",
80
96
  "first_newer",
81
97
  "flexsplit",
82
98
  "fold_dictionaries",
83
- "helper",
84
99
  "identity",
85
100
  "ihash",
86
101
  "impure",
87
102
  "indent_lines",
88
103
  "insert_timestamp",
104
+ "join_as_sequence",
105
+ "kfilter",
89
106
  "kmap",
107
+ "lfilter",
90
108
  "list_full",
91
109
  "lmap",
92
110
  "log_perf",
111
+ "make_hashable",
93
112
  "make_timestamp",
113
+ "merge_dicts",
94
114
  "mutates",
95
115
  "mutates_and_returns_instance",
96
116
  "mutates_instance",
97
117
  "nearly_equal",
98
118
  "parse_sequence",
99
119
  "pure",
120
+ "re_split",
100
121
  "read_json",
101
122
  "read_raw",
102
123
  "refactor",
103
124
  "round5",
125
+ "sfilter",
104
126
  "smap",
105
- "step_data",
106
- "step_transition",
127
+ "tfilter",
107
128
  "time_created",
108
129
  "time_created_readable",
109
130
  "time_modified",
110
131
  "time_modified_readable",
111
132
  "tmap",
112
- "validator",
133
+ "vfilter",
113
134
  "vmap",
114
135
  "write_json",
115
136
  "write_raw",
@@ -0,0 +1,84 @@
1
+ # for tests
2
+ sample = [
3
+ {"id": "a", "priority": 0.4, "prerequisites": []},
4
+ {"id": "b", "priority": 0.7, "prerequisites": []},
5
+ {"id": "c", "priority": 0.4, "prerequisites": []},
6
+ {"id": "d", "priority": 0.3, "prerequisites": []},
7
+ {"id": "e", "priority": 0.8, "prerequisites": ["a", "b"]},
8
+ {"id": "f", "priority": 0.9, "prerequisites": []},
9
+ {"id": "g", "priority": 0.1, "prerequisites": ["i", "m"]},
10
+ {"id": "h", "priority": 0.5, "prerequisites": []},
11
+ {"id": "i", "priority": 0.45, "prerequisites": ["j"]},
12
+ {"id": "j", "priority": 0.3, "prerequisites": []},
13
+ {"id": "k", "priority": 0.8, "prerequisites": ["l", "m", "d"]},
14
+ {"id": "l", "priority": 0.6, "prerequisites": ["o"]},
15
+ {"id": "m", "priority": 0.9, "prerequisites": []},
16
+ {"id": "n", "priority": 0.4, "prerequisites": ["o"]},
17
+ {"id": "o", "priority": 0.2, "prerequisites": []},
18
+ {"id": "p", "priority": 0.5, "prerequisites": []},
19
+ ]
20
+
21
+
22
+ def dep_sort(task_list: list[dict]) -> list[dict]:
23
+ """
24
+ Sort list of dictionaries such that
25
+ 1) dependency constraints are satisfied and
26
+ 2) priority ordering is satisfied subject to (1)
27
+ """
28
+ # sort on priority to preserve priority order in the output
29
+ task_list.sort(key=lambda t: t["priority"], reverse=True)
30
+ task_ids = [t["id"] for t in task_list]
31
+
32
+ # important to make changes in reverse priority order so that insertion preserves priority order
33
+ dep_dict = {t["id"]: t["prerequisites"] for t in reversed(task_list) if t["prerequisites"]}
34
+ print(dep_dict)
35
+
36
+ def place_after(to_move: str, deps: list[str], id_list: list) -> bool:
37
+ if not deps:
38
+ return False
39
+ idx1 = id_list.index(to_move)
40
+ idx2 = max(map(id_list.index, deps))
41
+ if idx1 > idx2:
42
+ return False
43
+ id_list.pop(idx1)
44
+ id_list.insert(idx2, to_move)
45
+ return True
46
+
47
+ print(task_ids)
48
+ count = 0
49
+ maxcount = sum(range(len(task_ids) + 1))
50
+ while count < maxcount:
51
+ change_tracker = []
52
+ for task_id, task_deps in dep_dict.items():
53
+ changed = place_after(task_id, task_deps, task_ids)
54
+ change_tracker.append(changed)
55
+ unchanged = not any(change_tracker)
56
+ print(task_ids)
57
+ if unchanged:
58
+ return sorted(task_list, key=lambda t: task_ids.index(t["id"]))
59
+ count += 1
60
+
61
+ print("ERROR ---------------------------------------------------")
62
+ str(task_ids)
63
+ for task_id, task_deps in dep_dict.items():
64
+ changed = place_after(task_id, task_deps, task_ids)
65
+ if changed:
66
+ after = str(task_ids)
67
+ print(after)
68
+
69
+ raise ValueError("Graph contains a cycle.")
70
+
71
+ # levels = {t: 0 for t in task_ids}
72
+ # roots = list(filter(lambda t: t["depencies"] == [], task_list))
73
+ # depended_on = {t: set() for t in task_ids}
74
+ # for t in task_list:
75
+ # for d in t["prerequisites"]:
76
+ # depended_on[d].add(t)
77
+ # dict_rep = json.dumps(levels)
78
+ # new_rep = ""
79
+ # while new_rep != dict_rep:
80
+ # dict_rep = json.dumps(levels)
81
+ # for t in task_list:
82
+ # levels[t["id"]] = max([levels[d] for d in t["prerequisites"]]) + 1
83
+ # new_rep = json.dumps(levels)
84
+ # return sorted(task_list, key=lambda t: levels[t["id"]])
@@ -0,0 +1,49 @@
1
+ import re
2
+ from collections import defaultdict
3
+ from collections.abc import Callable
4
+
5
+
6
+ def display_counts(key: str, depth: int, dl: list[str]) -> None:
7
+ print(f"\n=== {key} ===")
8
+ for d in dl:
9
+ if key not in d:
10
+ print(d["name"])
11
+ cats = [d[key] for d in dl]
12
+ cats = [".".join(re.split(r"\.|, ", c)[:depth]) for c in cats]
13
+ counts = sorted([(cats.count(c), c) for c in sorted(set(cats))], reverse=True)
14
+ for count, item in counts:
15
+ print(f"{count:>4} {item}")
16
+
17
+
18
+ def print_tree(strings):
19
+ # Nested dictionary to hold tree structure
20
+ def tree() -> defaultdict:
21
+ return defaultdict(tree)
22
+
23
+ root = tree()
24
+
25
+ # Build the tree
26
+ for _string in strings:
27
+ parts = _string.split(".")
28
+ current_level = root
29
+ for part in parts:
30
+ current_level = current_level[part]
31
+
32
+ # Function to print the tree recursively
33
+ def print_subtree(node, prefix=""):
34
+ children = list(node.keys())
35
+ for i, child in enumerate(children):
36
+ is_last = i == len(children) - 1
37
+ if is_last:
38
+ print(prefix + "└─ " + child)
39
+ new_prefix = prefix + " "
40
+ else:
41
+ print(prefix + "├─ " + child)
42
+ new_prefix = prefix + "│ "
43
+ print_subtree(node[child], new_prefix)
44
+
45
+ # Print the root
46
+ print_subtree(root)
47
+
48
+
49
+ def wrap_line(line: str, length: int, formatter: Callable) -> str: ...
@@ -0,0 +1,95 @@
1
+ from collections.abc import Callable, Hashable, Iterable
2
+ from functools import reduce
3
+ from typing import TypeVar, overload
4
+
5
+ T = TypeVar("T")
6
+ TPost = TypeVar("TPost")
7
+ TPre = TypeVar("TPre")
8
+ K = TypeVar("K", bound=Hashable)
9
+ V = TypeVar("V")
10
+ type Filterer[T] = Callable[[T], bool]
11
+
12
+
13
+ @overload
14
+ def endomap(callable_: Callable[[TPre], TPost], sequence: list[TPre]) -> list[TPost]: ...
15
+ @overload
16
+ def endomap(callable_: Callable[[TPre], TPost], sequence: set[TPre]) -> set[TPost]: ...
17
+ @overload
18
+ def endomap(
19
+ callable_: Callable[[TPre], TPost], sequence: tuple[TPre, ...]
20
+ ) -> tuple[TPost, ...]: ...
21
+
22
+
23
+ def endomap(callable_, sequence):
24
+ return type(sequence)(map(callable_, sequence))
25
+
26
+
27
+ @overload
28
+ def endofilter(callable_: Callable[[T], bool], sequence: list[T]) -> list[T]: ...
29
+ @overload
30
+ def endofilter(callable_: Callable[[T], bool], sequence: set[T]) -> set[T]: ...
31
+ @overload
32
+ def endofilter(callable_: Callable[[T], bool], sequence: tuple[T, ...]) -> tuple[T, ...]: ...
33
+
34
+
35
+ def endofilter(callable_, sequence):
36
+ return type(sequence)(filter(callable_, sequence))
37
+
38
+
39
+ def lmap(callable_: Callable[[TPre], TPost], iterable: Iterable[TPre]) -> list[TPost]:
40
+ return list(map(callable_, iterable))
41
+
42
+
43
+ def smap(callable_: Callable[[TPre], TPost], iterable: Iterable[TPre]) -> set[TPost]:
44
+ return set(map(callable_, iterable))
45
+
46
+
47
+ def tmap(callable_: Callable[[TPre], TPost], iterable: Iterable[TPre]) -> tuple[TPost, ...]:
48
+ return tuple(map(callable_, iterable))
49
+
50
+
51
+ def vmap(callable_: Callable[[TPre], TPost], dictionary: dict[K, TPre]) -> dict[K, TPost]:
52
+ return {k: callable_(v) for k, v in dictionary.items()}
53
+
54
+
55
+ def kmap(callable_: Callable[[TPre], TPost], dictionary: dict[TPre, V]) -> dict[TPost, V]:
56
+ return {callable_(k): v for k, v in dictionary.items()}
57
+
58
+
59
+ def dmap(callable_: Callable[[TPre], TPost], dictionary: dict[TPre, TPre]) -> dict[TPost, TPost]:
60
+ return {callable_(k): callable_(v) for k, v in dictionary.items()}
61
+
62
+
63
+ def lfilter(filterer: Filterer[T], iterable: Iterable[T]) -> list[T]:
64
+ return list(filter(filterer, iterable))
65
+
66
+
67
+ def sfilter(filterer: Filterer[T], iterable: Iterable[T]) -> set[T]:
68
+ return set(filter(filterer, iterable))
69
+
70
+
71
+ def tfilter(filterer: Filterer[T], iterable: Iterable[T]) -> tuple[T, ...]:
72
+ return tuple(filter(filterer, iterable))
73
+
74
+
75
+ def vfilter(filterer: Filterer[V], dictionary: dict[K, V]) -> dict[K, V]:
76
+ return {k: v for k, v in dictionary.items() if filterer(v)}
77
+
78
+
79
+ def kfilter(filterer: Filterer[K], dictionary: dict[K, V]) -> dict[K, V]:
80
+ return {k: v for k, v in dictionary.items() if filterer(k)}
81
+
82
+
83
+ def dfilter(filterer: Filterer[T], dictionary: dict[T, T]) -> dict[T, T]:
84
+ return {k: v for k, v in dictionary.items() if filterer(k) and filterer(v)}
85
+
86
+
87
+ def identity[T](x: T) -> T:
88
+ return x
89
+
90
+
91
+ def fold_dictionaries[K, V](dicts: Iterable[dict[K, V]]) -> dict[K, V]:
92
+ def _or(dict1: dict[K, V], dict2: dict[K, V]) -> dict[K, V]:
93
+ return dict1 | dict2
94
+
95
+ return reduce(_or, dicts)