lionagi 0.16.2__py3-none-any.whl → 0.16.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. lionagi/adapters/_utils.py +0 -14
  2. lionagi/ln/__init__.py +4 -0
  3. lionagi/ln/fuzzy/__init__.py +4 -1
  4. lionagi/ln/fuzzy/_fuzzy_validate.py +109 -0
  5. lionagi/ln/fuzzy/_to_dict.py +388 -0
  6. lionagi/models/__init__.py +0 -2
  7. lionagi/operations/communicate/communicate.py +1 -1
  8. lionagi/operations/parse/parse.py +1 -1
  9. lionagi/protocols/generic/pile.py +1 -1
  10. lionagi/protocols/operatives/operative.py +2 -2
  11. lionagi/service/connections/match_endpoint.py +2 -10
  12. lionagi/service/connections/providers/types.py +1 -3
  13. lionagi/service/hooks/hook_event.py +1 -1
  14. lionagi/service/hooks/hook_registry.py +1 -1
  15. lionagi/service/rate_limited_processor.py +1 -1
  16. lionagi/utils.py +3 -335
  17. lionagi/version.py +1 -1
  18. {lionagi-0.16.2.dist-info → lionagi-0.16.3.dist-info}/METADATA +3 -12
  19. {lionagi-0.16.2.dist-info → lionagi-0.16.3.dist-info}/RECORD +21 -43
  20. lionagi/adapters/postgres_model_adapter.py +0 -131
  21. lionagi/libs/concurrency.py +0 -1
  22. lionagi/libs/nested/__init__.py +0 -3
  23. lionagi/libs/nested/flatten.py +0 -172
  24. lionagi/libs/nested/nfilter.py +0 -59
  25. lionagi/libs/nested/nget.py +0 -45
  26. lionagi/libs/nested/ninsert.py +0 -104
  27. lionagi/libs/nested/nmerge.py +0 -158
  28. lionagi/libs/nested/npop.py +0 -69
  29. lionagi/libs/nested/nset.py +0 -94
  30. lionagi/libs/nested/unflatten.py +0 -83
  31. lionagi/libs/nested/utils.py +0 -189
  32. lionagi/libs/parse.py +0 -31
  33. lionagi/libs/schema/json_schema.py +0 -231
  34. lionagi/libs/unstructured/__init__.py +0 -0
  35. lionagi/libs/unstructured/pdf_to_image.py +0 -45
  36. lionagi/libs/unstructured/read_image_to_base64.py +0 -33
  37. lionagi/libs/validate/fuzzy_match_keys.py +0 -7
  38. lionagi/libs/validate/fuzzy_validate_mapping.py +0 -144
  39. lionagi/libs/validate/string_similarity.py +0 -7
  40. lionagi/libs/validate/xml_parser.py +0 -203
  41. lionagi/models/note.py +0 -387
  42. lionagi/service/connections/providers/claude_code_.py +0 -299
  43. {lionagi-0.16.2.dist-info → lionagi-0.16.3.dist-info}/WHEEL +0 -0
  44. {lionagi-0.16.2.dist-info → lionagi-0.16.3.dist-info}/licenses/LICENSE +0 -0
@@ -1,131 +0,0 @@
1
- """
2
- Clean LionAGI PostgreSQL adapter for integration into lionagi core.
3
-
4
- This adapter handles the metadata field conflict and provides seamless
5
- PostgreSQL persistence for lionagi Nodes.
6
- """
7
-
8
- from __future__ import annotations
9
-
10
- from typing import Union, get_args, get_origin
11
-
12
- from pydantic import BaseModel
13
-
14
- from ._utils import check_postgres_available
15
-
16
- _POSTGRES_AVAILABLE = check_postgres_available()
17
- if isinstance(_POSTGRES_AVAILABLE, ImportError):
18
- raise _POSTGRES_AVAILABLE
19
-
20
- from pydapter.model_adapters.postgres_model import PostgresModelAdapter
21
- from sqlalchemy import String
22
- from sqlalchemy.orm import DeclarativeBase
23
-
24
-
25
- class LionAGIPostgresAdapter(PostgresModelAdapter):
26
- """
27
- PostgreSQL adapter for lionagi Nodes with automatic metadata field mapping.
28
-
29
- Solves the core issue where lionagi's 'metadata' field conflicts with
30
- SQLAlchemy's reserved 'metadata' attribute by automatically mapping it
31
- to 'node_metadata' in the database schema.
32
-
33
- Features:
34
- - Automatic metadata field mapping (metadata → node_metadata)
35
- - Handles Union types like list[float] | None for embedding fields
36
- - Preserves all PostgreSQL-specific type support from parent
37
- - Transparent to lionagi users - Elements work seamlessly
38
- """
39
-
40
- # Core field mapping to resolve SQLAlchemy conflicts
41
- FIELD_MAPPINGS = {"metadata": "node_metadata"}
42
-
43
- def __init__(self):
44
- super().__init__()
45
- self._register_lionagi_types()
46
-
47
- def _register_lionagi_types(self):
48
- """Register lionagi-specific type mappings."""
49
- try:
50
- # Handle lionagi IDType as String (UUID)
51
- from lionagi.protocols.generic.element import IDType
52
-
53
- self.register_type_mapping(
54
- python_type=IDType,
55
- sql_type_factory=lambda: String(36), # UUID string length
56
- python_to_sql=lambda x: str(x),
57
- sql_to_python=lambda x: IDType.validate(x) if x else None,
58
- )
59
- except ImportError:
60
- pass # lionagi not available
61
-
62
- @classmethod
63
- def pydantic_model_to_sql(
64
- cls,
65
- model: type[BaseModel],
66
- *,
67
- table_name: str | None = None,
68
- pk_field: str = "id",
69
- schema: str | None = None,
70
- ) -> type[DeclarativeBase]:
71
- """
72
- Generate SQLAlchemy model with lionagi field mapping.
73
-
74
- Automatically handles:
75
- - metadata → node_metadata mapping
76
- - Union type resolution (e.g., list[float] | None → list[float])
77
- - Standard lionagi Node field structure
78
- """
79
-
80
- # Create modified field mapping for lionagi compatibility
81
- modified_fields = {}
82
-
83
- for name, info in model.model_fields.items():
84
- # Apply field name mapping
85
- field_name = cls.FIELD_MAPPINGS.get(name, name)
86
-
87
- # Resolve Union types by extracting non-None type
88
- annotation = info.annotation
89
- origin = get_origin(annotation)
90
-
91
- if origin is Union or (
92
- hasattr(annotation, "__class__")
93
- and annotation.__class__.__name__ == "UnionType"
94
- ):
95
- args = get_args(annotation)
96
- non_none_args = [arg for arg in args if arg is not type(None)]
97
- if len(non_none_args) == 1:
98
- annotation = non_none_args[0]
99
-
100
- # Create field info with resolved annotation
101
- from pydantic.fields import FieldInfo
102
-
103
- modified_fields[field_name] = FieldInfo(
104
- annotation=annotation,
105
- default=info.default,
106
- default_factory=info.default_factory,
107
- alias=info.alias,
108
- title=info.title,
109
- description=info.description,
110
- json_schema_extra=info.json_schema_extra,
111
- frozen=info.frozen,
112
- validate_default=info.validate_default,
113
- repr=info.repr,
114
- init_var=info.init_var,
115
- kw_only=info.kw_only,
116
- )
117
-
118
- # Create temporary model with mapped fields
119
- class ModifiedModel(BaseModel):
120
- model_config = getattr(model, "model_config", {})
121
-
122
- ModifiedModel.model_fields = modified_fields
123
- ModifiedModel.__name__ = model.__name__
124
-
125
- # Generate SQLAlchemy model with parent's logic
126
- return super().pydantic_model_to_sql(
127
- ModifiedModel,
128
- table_name=table_name,
129
- pk_field=pk_field,
130
- schema=schema,
131
- )
@@ -1 +0,0 @@
1
- from ..ln.concurrency import * # backward compatibility
@@ -1,3 +0,0 @@
1
- # Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
2
- #
3
- # SPDX-License-Identifier: Apache-2.0
@@ -1,172 +0,0 @@
1
- # Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
2
- #
3
- # SPDX-License-Identifier: Apache-2.0
4
-
5
- from collections import deque
6
- from collections.abc import Mapping, Sequence
7
- from typing import Any, Literal, TypeVar, overload
8
-
9
- T = TypeVar("T")
10
-
11
-
12
- @overload
13
- def flatten(
14
- nested_structure: T,
15
- /,
16
- *,
17
- parent_key: tuple = (),
18
- sep: str = "|",
19
- coerce_keys: Literal[True] = True,
20
- dynamic: bool = True,
21
- coerce_sequence: Literal["dict", None] = None,
22
- max_depth: int | None = None,
23
- ) -> dict[str, Any] | None: ...
24
-
25
-
26
- @overload
27
- def flatten(
28
- nested_structure: T,
29
- /,
30
- *,
31
- parent_key: tuple = (),
32
- sep: str = "|",
33
- coerce_keys: Literal[False],
34
- dynamic: bool = True,
35
- coerce_sequence: Literal["dict", "list", None] = None,
36
- max_depth: int | None = None,
37
- ) -> dict[tuple, Any] | None: ...
38
-
39
-
40
- def flatten(
41
- nested_structure: Any,
42
- /,
43
- *,
44
- parent_key: tuple = (),
45
- sep: str = "|",
46
- coerce_keys: bool = True,
47
- dynamic: bool = True,
48
- coerce_sequence: Literal["dict", "list"] | None = None,
49
- max_depth: int | None = None,
50
- ) -> dict[tuple | str, Any] | None:
51
- """Flatten a nested structure into a single-level dictionary.
52
-
53
- Recursively traverses the input, creating keys that represent the path
54
- to each value in the flattened result.
55
-
56
- Args:
57
- nested_structure: The nested structure to flatten.
58
- parent_key: Base key for the current recursion level. Default: ().
59
- sep: Separator for joining keys. Default: "|".
60
- coerce_keys: Join keys into strings if True, keep as tuples if False.
61
- Default: True.
62
- dynamic: Handle sequences (except strings) dynamically if True.
63
- Default: True.
64
- coerce_sequence: Force sequences to be treated as dicts or lists.
65
- Options: "dict", "list", or None. Default: None.
66
- max_depth: Maximum depth to flatten. None for complete flattening.
67
- Default: None.
68
-
69
- Returns:
70
- A flattened dictionary with keys as tuples or strings (based on
71
- coerce_keys) representing the path to each value.
72
-
73
- Raises:
74
- ValueError: If coerce_sequence is "list" and coerce_keys is True.
75
-
76
- Example:
77
- >>> nested = {"a": 1, "b": {"c": 2, "d": [3, 4]}}
78
- >>> flatten(nested)
79
- {'a': 1, 'b|c': 2, 'b|d|0': 3, 'b|d|1': 4}
80
-
81
- Note:
82
- - Preserves order of keys in dicts and indices in sequences.
83
- - With dynamic=True, treats sequences (except strings) as nestable.
84
- - coerce_sequence allows forcing sequence handling for homogeneity.
85
- """
86
-
87
- if coerce_keys and coerce_sequence == "list":
88
- raise ValueError(
89
- "coerce_sequence cannot be 'list' when coerce_keys is True"
90
- )
91
-
92
- coerce_sequence_to_list = None
93
- coerce_sequence_to_dict = None
94
-
95
- if dynamic and coerce_sequence:
96
- if coerce_sequence == "dict":
97
- coerce_sequence_to_dict = True
98
- elif coerce_sequence == "list":
99
- coerce_sequence_to_list = True
100
-
101
- return _flatten_iterative(
102
- obj=nested_structure,
103
- parent_key=parent_key,
104
- sep=sep,
105
- coerce_keys=coerce_keys,
106
- dynamic=dynamic,
107
- coerce_sequence_to_list=coerce_sequence_to_list,
108
- coerce_sequence_to_dict=coerce_sequence_to_dict,
109
- max_depth=max_depth,
110
- )
111
-
112
-
113
- def _flatten_iterative(
114
- obj: Any,
115
- parent_key: tuple,
116
- sep: str,
117
- coerce_keys: bool,
118
- dynamic: bool,
119
- coerce_sequence_to_list: bool = False,
120
- coerce_sequence_to_dict: bool = False,
121
- max_depth: int | None = None,
122
- ) -> dict[tuple | str, Any]:
123
- stack = deque([(obj, parent_key, 0)])
124
- result = {}
125
-
126
- while stack:
127
- current_obj, current_key, depth = stack.pop()
128
-
129
- if max_depth is not None and depth >= max_depth:
130
- result[_format_key(current_key, sep, coerce_keys)] = current_obj
131
- continue
132
-
133
- if isinstance(current_obj, Mapping):
134
- for k, v in current_obj.items():
135
- new_key = current_key + (k,)
136
- if (
137
- v
138
- and isinstance(v, (Mapping, Sequence))
139
- and not isinstance(v, (str, bytes, bytearray))
140
- ):
141
- stack.appendleft((v, new_key, depth + 1))
142
- else:
143
- result[_format_key(new_key, sep, coerce_keys)] = v
144
-
145
- elif (
146
- dynamic
147
- and isinstance(current_obj, Sequence)
148
- and not isinstance(current_obj, (str, bytes, bytearray))
149
- ):
150
- if coerce_sequence_to_dict:
151
- dict_obj = {str(i): v for i, v in enumerate(current_obj)}
152
- for k, v in dict_obj.items():
153
- new_key = current_key + (k,)
154
- stack.appendleft((v, new_key, depth + 1))
155
- elif coerce_sequence_to_list:
156
- for i, v in enumerate(current_obj):
157
- new_key = current_key + (i,)
158
- stack.appendleft((v, new_key, depth + 1))
159
- else:
160
- for i, v in enumerate(current_obj):
161
- new_key = current_key + (str(i),)
162
- stack.appendleft((v, new_key, depth + 1))
163
- else:
164
- result[_format_key(current_key, sep, coerce_keys)] = current_obj
165
-
166
- return result
167
-
168
-
169
- def _format_key(key: tuple, sep: str, coerce_keys: bool, /) -> tuple | str:
170
- if not key:
171
- return key
172
- return sep.join(map(str, key)) if coerce_keys else key
@@ -1,59 +0,0 @@
1
- # Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
2
- #
3
- # SPDX-License-Identifier: Apache-2.0
4
-
5
- from collections.abc import Callable
6
- from typing import Any
7
-
8
-
9
- def nfilter(
10
- nested_structure: dict[Any, Any] | list[Any],
11
- /,
12
- condition: Callable[[Any], bool],
13
- ) -> dict[Any, Any] | list[Any]:
14
- """Filter elements in a nested structure based on a condition.
15
-
16
- Args:
17
- nested_structure: The nested structure (dict or list) to filter.
18
- condition: Function returning True for elements to keep, False to
19
- discard.
20
-
21
- Returns:
22
- The filtered nested structure.
23
-
24
- Raises:
25
- TypeError: If nested_structure is not a dict or list.
26
-
27
- Example:
28
- >>> data = {"a": 1, "b": {"c": 2, "d": 3}, "e": [4, 5, 6]}
29
- >>> nfilter(data, lambda x: isinstance(x, int) and x > 2)
30
- {'b': {'d': 3}, 'e': [4, 5, 6]}
31
- """
32
- if isinstance(nested_structure, dict):
33
- return _filter_dict(nested_structure, condition)
34
- elif isinstance(nested_structure, list):
35
- return _filter_list(nested_structure, condition)
36
- else:
37
- raise TypeError(
38
- "The nested_structure must be either a dict or a list."
39
- )
40
-
41
-
42
- def _filter_dict(
43
- dictionary: dict[Any, Any], condition: Callable[[tuple[Any, Any]], bool]
44
- ) -> dict[Any, Any]:
45
- return {
46
- k: nfilter(v, condition) if isinstance(v, dict | list) else v
47
- for k, v in dictionary.items()
48
- if condition(v) or isinstance(v, dict | list)
49
- }
50
-
51
-
52
- def _filter_list(
53
- lst: list[Any], condition: Callable[[Any], bool]
54
- ) -> list[Any]:
55
- return [
56
- nfilter(item, condition) if isinstance(item, dict | list) else item
57
- for item in lst
58
- if condition(item) or isinstance(item, dict | list)
59
- ]
@@ -1,45 +0,0 @@
1
- # Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
2
- #
3
- # SPDX-License-Identifier: Apache-2.0
4
-
5
- from typing import Any
6
-
7
- from lionagi.utils import UNDEFINED
8
-
9
- from .utils import get_target_container
10
-
11
-
12
- def nget(
13
- nested_structure: dict[Any, Any] | list[Any],
14
- /,
15
- indices: list[int | str],
16
- default: Any = UNDEFINED,
17
- ) -> Any:
18
- try:
19
- target_container = get_target_container(nested_structure, indices[:-1])
20
- last_index = indices[-1]
21
-
22
- if (
23
- isinstance(target_container, list)
24
- and isinstance(last_index, int)
25
- and last_index < len(target_container)
26
- ):
27
- return target_container[last_index]
28
- elif (
29
- isinstance(target_container, dict)
30
- and last_index in target_container
31
- ):
32
- return target_container[last_index]
33
- elif default is not UNDEFINED:
34
- return default
35
- else:
36
- raise LookupError(
37
- "Target not found and no default value provided."
38
- )
39
- except (IndexError, KeyError, TypeError):
40
- if default is not UNDEFINED:
41
- return default
42
- else:
43
- raise LookupError(
44
- "Target not found and no default value provided."
45
- )
@@ -1,104 +0,0 @@
1
- # Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
2
- #
3
- # SPDX-License-Identifier: Apache-2.0
4
-
5
- from typing import Any
6
-
7
- from lionagi.utils import to_list
8
-
9
-
10
- def ninsert(
11
- nested_structure: dict[Any, Any] | list[Any],
12
- /,
13
- indices: list[str | int],
14
- value: Any,
15
- *,
16
- current_depth: int = 0,
17
- ) -> None:
18
- """
19
- Inserts a value into a nested structure at a specified path.
20
-
21
- Navigates a nested dictionary or list based on a sequence of indices or
22
- keys and inserts `value` at the final location. This method can create
23
- intermediate dictionaries or lists as needed.
24
-
25
- Args:
26
- nested_structure: The nested structure to modify.
27
- indices: The sequence of keys or indices defining the insertion path.
28
- value: The value to insert at the specified location.
29
- current_depth: Internal use only; tracks the current depth during
30
- recursive calls.
31
-
32
- Raises:
33
- ValueError: If the indices list is empty.
34
- TypeError: If an invalid key or container type is encountered.
35
-
36
- Examples:
37
- >>> subject_ = {'a': {'b': [1, 2]}}
38
- >>> ninsert(subject_, ['a', 'b', 2], 3)
39
- >>> assert subject_ == {'a': {'b': [1, 2, 3]}}
40
-
41
- >>> subject_ = []
42
- >>> ninsert(subject_, [0, 'a'], 1)
43
- >>> assert subject_ == [{'a': 1}]
44
- """
45
- if not indices:
46
- raise ValueError("Indices list cannot be empty")
47
-
48
- indices = to_list(indices)
49
- for i, part in enumerate(indices[:-1]):
50
- if isinstance(part, int):
51
- if isinstance(nested_structure, dict):
52
- raise TypeError(
53
- f"Unsupported key type: {type(part).__name__}.Only string keys are acceptable.",
54
- )
55
- while len(nested_structure) <= part:
56
- nested_structure.append(None)
57
- if nested_structure[part] is None or not isinstance(
58
- nested_structure[part], (dict, list)
59
- ):
60
- next_part = indices[i + 1]
61
- nested_structure[part] = (
62
- [] if isinstance(next_part, int) else {}
63
- )
64
- elif isinstance(nested_structure, dict):
65
- if part is None:
66
- raise TypeError("Cannot use NoneType as a key in a dictionary")
67
- if isinstance(part, (float, complex)):
68
- raise TypeError(
69
- f"Unsupported key type: {type(part).__name__}.Only string keys are acceptable.",
70
- )
71
- if part not in nested_structure:
72
- next_part = indices[i + 1]
73
- nested_structure[part] = (
74
- [] if isinstance(next_part, int) else {}
75
- )
76
- else:
77
- raise TypeError(
78
- f"Invalid container type: {type(nested_structure)} encountered during insertion"
79
- )
80
-
81
- nested_structure = nested_structure[part]
82
- current_depth += 1
83
-
84
- last_part = indices[-1]
85
- if isinstance(last_part, int):
86
- if isinstance(nested_structure, dict):
87
- raise TypeError(
88
- f"Unsupported key type: {type(last_part).__name__}."
89
- "Only string keys are acceptable.",
90
- )
91
- while len(nested_structure) <= last_part:
92
- nested_structure.append(None)
93
- nested_structure[last_part] = value
94
- elif isinstance(nested_structure, list):
95
- raise TypeError("Cannot use non-integer index on a list")
96
- else:
97
- if last_part is None:
98
- raise TypeError("Cannot use NoneType as a key in a dictionary")
99
- if isinstance(last_part, (float, complex)):
100
- raise TypeError(
101
- f"Unsupported key type: {type(last_part).__name__}."
102
- "Only string keys are acceptable.",
103
- )
104
- nested_structure[last_part] = value
@@ -1,158 +0,0 @@
1
- # Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
2
- #
3
- # SPDX-License-Identifier: Apache-2.0
4
-
5
- from collections import defaultdict
6
- from collections.abc import Callable, Sequence
7
- from itertools import chain
8
- from typing import Any
9
-
10
- from .utils import is_homogeneous
11
-
12
-
13
- def nmerge(
14
- nested_structure: Sequence[dict[str, Any] | list[Any]],
15
- /,
16
- *,
17
- overwrite: bool = False,
18
- dict_sequence: bool = False,
19
- sort_list: bool = False,
20
- custom_sort: Callable[[Any], Any] | None = None,
21
- ) -> dict[str, Any] | list[Any]:
22
- """
23
- Merge multiple dictionaries, lists, or sequences into a unified structure.
24
-
25
- Args:
26
- nested_structure: A sequence containing dictionaries, lists, or other
27
- iterable objects to merge.
28
- overwrite: If True, overwrite existing keys in dictionaries with
29
- those from subsequent dictionaries.
30
- dict_sequence: Enables unique key generation for duplicate keys by
31
- appending a sequence number. Applicable only if `overwrite` is
32
- False.
33
- sort_list: When True, sort the resulting list after merging. It does
34
- not affect dictionaries.
35
- custom_sort: An optional callable that defines custom sorting logic
36
- for the merged list.
37
-
38
- Returns:
39
- A merged dictionary or list, depending on the types present in
40
- `nested_structure`.
41
-
42
- Raises:
43
- TypeError: If `nested_structure` contains objects of incompatible
44
- types that cannot be merged.
45
- """
46
- if not isinstance(nested_structure, list):
47
- raise TypeError("Please input a list")
48
- if is_homogeneous(nested_structure, dict):
49
- return _merge_dicts(nested_structure, overwrite, dict_sequence)
50
- elif is_homogeneous(nested_structure, list):
51
- return _merge_sequences(nested_structure, sort_list, custom_sort)
52
- else:
53
- raise TypeError(
54
- "All items in the input list must be of the same type, either dict, list, or Iterable."
55
- )
56
-
57
-
58
- def _deep_merge_dicts(
59
- dict1: dict[str, Any], dict2: dict[str, Any]
60
- ) -> dict[str, Any]:
61
- """
62
- Recursively merges two dictionaries, combining values where keys overlap.
63
-
64
- Args:
65
- dict1: The first dictionary.
66
- dict2: The second dictionary.
67
-
68
- Returns:
69
- The merged dictionary.
70
- """
71
- for key in dict2:
72
- if key in dict1:
73
- if isinstance(dict1[key], dict) and isinstance(dict2[key], dict):
74
- _deep_merge_dicts(dict1[key], dict2[key])
75
- else:
76
- if not isinstance(dict1[key], list):
77
- dict1[key] = [dict1[key]]
78
- dict1[key].append(dict2[key])
79
- else:
80
- dict1[key] = dict2[key]
81
- return dict1
82
-
83
-
84
- def _merge_dicts(
85
- iterables: list[dict[str, Any]],
86
- dict_update: bool,
87
- dict_sequence: bool,
88
- ) -> dict[str, Any]:
89
- """
90
- Merges a list of dictionaries into a single dictionary, with options for
91
- handling duplicate keys and sequences.
92
-
93
- Args:
94
- iterables: A list of dictionaries to merge.
95
- dict_update: If True, overwrite existing keys in dictionaries
96
- with those from subsequent dictionaries.
97
- dict_sequence: Enables unique key generation for duplicate keys
98
- by appending a sequence number
99
-
100
- Returns:
101
- The merged dictionary.
102
- """
103
- merged_dict = {} # {'a': [1, 2]}
104
- sequence_counters = defaultdict(int)
105
- list_values = {}
106
-
107
- for d in iterables: # [{'a': [1, 2]}, {'a': [3, 4]}]
108
- for key, value in d.items(): # {'a': [3, 4]}
109
- if key not in merged_dict or dict_update:
110
- if (
111
- key in merged_dict
112
- and isinstance(merged_dict[key], dict)
113
- and isinstance(value, dict)
114
- ):
115
- _deep_merge_dicts(merged_dict[key], value)
116
- else:
117
- merged_dict[key] = value # {'a': [1, 2]}
118
- if isinstance(value, list):
119
- list_values[key] = True
120
- elif dict_sequence:
121
- sequence_counters[key] += 1
122
- new_key = f"{key}{sequence_counters[key]}"
123
- merged_dict[new_key] = value
124
- else:
125
- if not isinstance(merged_dict[key], list) or list_values.get(
126
- key, False
127
- ):
128
- merged_dict[key] = [merged_dict[key]]
129
- merged_dict[key].append(value)
130
-
131
- return merged_dict
132
-
133
-
134
- def _merge_sequences(
135
- iterables: list[list[Any]],
136
- sort_list: bool,
137
- custom_sort: Callable[[Any], Any] | None = None,
138
- ) -> list[Any]:
139
- """
140
- Merges a list of lists into a single list, with options for sorting and
141
- custom sorting logic.
142
-
143
- Args:
144
- iterables: A list of lists to merge.
145
- sort_list: When True, sort the resulting list after merging.
146
- custom_sort: An optional callable that defines custom sorting logic
147
- for the merged list.
148
-
149
- Returns:
150
- The merged list.
151
- """
152
- merged_list = list(chain(*iterables))
153
- if sort_list:
154
- if custom_sort:
155
- return sorted(merged_list, key=custom_sort)
156
- else:
157
- return sorted(merged_list, key=lambda x: (isinstance(x, str), x))
158
- return merged_list