lionagi 0.16.2__py3-none-any.whl → 0.16.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lionagi/adapters/_utils.py +0 -14
- lionagi/ln/__init__.py +4 -0
- lionagi/ln/fuzzy/__init__.py +4 -1
- lionagi/ln/fuzzy/_fuzzy_validate.py +109 -0
- lionagi/ln/fuzzy/_to_dict.py +388 -0
- lionagi/models/__init__.py +0 -2
- lionagi/operations/communicate/communicate.py +1 -1
- lionagi/operations/parse/parse.py +1 -1
- lionagi/protocols/generic/pile.py +1 -1
- lionagi/protocols/operatives/operative.py +2 -2
- lionagi/service/connections/match_endpoint.py +2 -10
- lionagi/service/connections/providers/types.py +1 -3
- lionagi/service/hooks/hook_event.py +1 -1
- lionagi/service/hooks/hook_registry.py +1 -1
- lionagi/service/rate_limited_processor.py +1 -1
- lionagi/utils.py +3 -335
- lionagi/version.py +1 -1
- {lionagi-0.16.2.dist-info → lionagi-0.16.3.dist-info}/METADATA +3 -12
- {lionagi-0.16.2.dist-info → lionagi-0.16.3.dist-info}/RECORD +21 -43
- lionagi/adapters/postgres_model_adapter.py +0 -131
- lionagi/libs/concurrency.py +0 -1
- lionagi/libs/nested/__init__.py +0 -3
- lionagi/libs/nested/flatten.py +0 -172
- lionagi/libs/nested/nfilter.py +0 -59
- lionagi/libs/nested/nget.py +0 -45
- lionagi/libs/nested/ninsert.py +0 -104
- lionagi/libs/nested/nmerge.py +0 -158
- lionagi/libs/nested/npop.py +0 -69
- lionagi/libs/nested/nset.py +0 -94
- lionagi/libs/nested/unflatten.py +0 -83
- lionagi/libs/nested/utils.py +0 -189
- lionagi/libs/parse.py +0 -31
- lionagi/libs/schema/json_schema.py +0 -231
- lionagi/libs/unstructured/__init__.py +0 -0
- lionagi/libs/unstructured/pdf_to_image.py +0 -45
- lionagi/libs/unstructured/read_image_to_base64.py +0 -33
- lionagi/libs/validate/fuzzy_match_keys.py +0 -7
- lionagi/libs/validate/fuzzy_validate_mapping.py +0 -144
- lionagi/libs/validate/string_similarity.py +0 -7
- lionagi/libs/validate/xml_parser.py +0 -203
- lionagi/models/note.py +0 -387
- lionagi/service/connections/providers/claude_code_.py +0 -299
- {lionagi-0.16.2.dist-info → lionagi-0.16.3.dist-info}/WHEEL +0 -0
- {lionagi-0.16.2.dist-info → lionagi-0.16.3.dist-info}/licenses/LICENSE +0 -0
lionagi/adapters/_utils.py
CHANGED
@@ -10,17 +10,3 @@ def check_async_postgres_available():
|
|
10
10
|
"This adapter requires postgres option to be installed. "
|
11
11
|
'Please install them using `uv pip install "lionagi[postgres]"`.'
|
12
12
|
)
|
13
|
-
|
14
|
-
|
15
|
-
def check_postgres_available():
|
16
|
-
try:
|
17
|
-
from pydapter.model_adapters.postgres_model import PostgresModelAdapter
|
18
|
-
from sqlalchemy import String
|
19
|
-
from sqlalchemy.orm import DeclarativeBase
|
20
|
-
|
21
|
-
return True
|
22
|
-
except Exception:
|
23
|
-
return ImportError(
|
24
|
-
"This adapter requires postgres option to be installed. "
|
25
|
-
'Please install them using `uv pip install "lionagi[postgres]"`.'
|
26
|
-
)
|
lionagi/ln/__init__.py
CHANGED
@@ -37,8 +37,10 @@ from .fuzzy import (
|
|
37
37
|
extract_json,
|
38
38
|
fuzzy_json,
|
39
39
|
fuzzy_match_keys,
|
40
|
+
fuzzy_validate_mapping,
|
40
41
|
fuzzy_validate_pydantic,
|
41
42
|
string_similarity,
|
43
|
+
to_dict,
|
42
44
|
)
|
43
45
|
from .types import is_sentinel, not_sentinel
|
44
46
|
|
@@ -80,4 +82,6 @@ __all__ = (
|
|
80
82
|
"string_similarity",
|
81
83
|
"is_sentinel",
|
82
84
|
"not_sentinel",
|
85
|
+
"to_dict",
|
86
|
+
"fuzzy_validate_mapping",
|
83
87
|
)
|
lionagi/ln/fuzzy/__init__.py
CHANGED
@@ -1,10 +1,12 @@
|
|
1
1
|
from ._extract_json import extract_json
|
2
2
|
from ._fuzzy_json import fuzzy_json
|
3
3
|
from ._fuzzy_match import FuzzyMatchKeysParams, fuzzy_match_keys
|
4
|
-
from ._fuzzy_validate import fuzzy_validate_pydantic
|
4
|
+
from ._fuzzy_validate import fuzzy_validate_mapping, fuzzy_validate_pydantic
|
5
5
|
from ._string_similarity import SIMILARITY_TYPE, string_similarity
|
6
|
+
from ._to_dict import to_dict
|
6
7
|
|
7
8
|
__all__ = (
|
9
|
+
"to_dict",
|
8
10
|
"fuzzy_json",
|
9
11
|
"fuzzy_match_keys",
|
10
12
|
"extract_json",
|
@@ -12,4 +14,5 @@ __all__ = (
|
|
12
14
|
"SIMILARITY_TYPE",
|
13
15
|
"fuzzy_validate_pydantic",
|
14
16
|
"FuzzyMatchKeysParams",
|
17
|
+
"fuzzy_validate_mapping",
|
15
18
|
)
|
@@ -1,9 +1,15 @@
|
|
1
|
+
from collections.abc import Callable, Sequence
|
2
|
+
from typing import Any, Literal
|
3
|
+
|
1
4
|
from pydantic import BaseModel
|
2
5
|
|
3
6
|
from lionagi._errors import ValidationError
|
4
7
|
|
8
|
+
from ..types import KeysDict
|
5
9
|
from ._extract_json import extract_json
|
6
10
|
from ._fuzzy_match import FuzzyMatchKeysParams, fuzzy_match_keys
|
11
|
+
from ._string_similarity import SIMILARITY_TYPE
|
12
|
+
from ._to_dict import to_dict
|
7
13
|
|
8
14
|
__all__ = ("fuzzy_validate_pydantic",)
|
9
15
|
|
@@ -44,3 +50,106 @@ def fuzzy_validate_pydantic(
|
|
44
50
|
return model_type.model_validate(model_data)
|
45
51
|
except Exception as e:
|
46
52
|
raise ValidationError(f"Validation failed: {e}") from e
|
53
|
+
|
54
|
+
|
55
|
+
def fuzzy_validate_mapping(
|
56
|
+
d: Any,
|
57
|
+
keys: Sequence[str] | KeysDict,
|
58
|
+
/,
|
59
|
+
*,
|
60
|
+
similarity_algo: (
|
61
|
+
SIMILARITY_TYPE | Callable[[str, str], float]
|
62
|
+
) = "jaro_winkler",
|
63
|
+
similarity_threshold: float = 0.85,
|
64
|
+
fuzzy_match: bool = True,
|
65
|
+
handle_unmatched: Literal[
|
66
|
+
"ignore", "raise", "remove", "fill", "force"
|
67
|
+
] = "ignore",
|
68
|
+
fill_value: Any = None,
|
69
|
+
fill_mapping: dict[str, Any] | None = None,
|
70
|
+
strict: bool = False,
|
71
|
+
suppress_conversion_errors: bool = False,
|
72
|
+
) -> dict[str, Any]:
|
73
|
+
"""
|
74
|
+
Validate and correct any input into a dictionary with expected keys.
|
75
|
+
|
76
|
+
Args:
|
77
|
+
d: Input to validate. Can be:
|
78
|
+
- Dictionary
|
79
|
+
- JSON string or markdown code block
|
80
|
+
- XML string
|
81
|
+
- Object with to_dict/model_dump method
|
82
|
+
- Any type convertible to dictionary
|
83
|
+
keys: List of expected keys or dictionary mapping keys to types.
|
84
|
+
similarity_algo: String similarity algorithm or custom function.
|
85
|
+
similarity_threshold: Minimum similarity score for fuzzy matching.
|
86
|
+
fuzzy_match: If True, use fuzzy matching for key correction.
|
87
|
+
handle_unmatched: How to handle unmatched keys:
|
88
|
+
- "ignore": Keep unmatched keys
|
89
|
+
- "raise": Raise error for unmatched keys
|
90
|
+
- "remove": Remove unmatched keys
|
91
|
+
- "fill": Fill missing keys with default values
|
92
|
+
- "force": Combine "fill" and "remove" behaviors
|
93
|
+
fill_value: Default value for filling unmatched keys.
|
94
|
+
fill_mapping: Dictionary mapping keys to default values.
|
95
|
+
strict: Raise error if any expected key is missing.
|
96
|
+
suppress_conversion_errors: Return empty dict on conversion errors.
|
97
|
+
|
98
|
+
Returns:
|
99
|
+
Validated and corrected dictionary.
|
100
|
+
|
101
|
+
Raises:
|
102
|
+
ValueError: If input cannot be converted or validation fails.
|
103
|
+
TypeError: If input types are invalid.
|
104
|
+
"""
|
105
|
+
if d is None:
|
106
|
+
raise TypeError("Input cannot be None")
|
107
|
+
|
108
|
+
# Try converting to dictionary
|
109
|
+
try:
|
110
|
+
if isinstance(d, str):
|
111
|
+
# First try to_json for JSON strings and code blocks
|
112
|
+
try:
|
113
|
+
json_result = extract_json(
|
114
|
+
d, fuzzy_parse=True, return_one_if_single=True
|
115
|
+
)
|
116
|
+
dict_input = (
|
117
|
+
json_result[0]
|
118
|
+
if isinstance(json_result, list)
|
119
|
+
else json_result
|
120
|
+
)
|
121
|
+
except Exception:
|
122
|
+
dict_input = to_dict(
|
123
|
+
d, str_type="json", fuzzy_parse=True, suppress=True
|
124
|
+
)
|
125
|
+
else:
|
126
|
+
dict_input = to_dict(
|
127
|
+
d, use_model_dump=True, fuzzy_parse=True, suppress=True
|
128
|
+
)
|
129
|
+
|
130
|
+
if not isinstance(dict_input, dict):
|
131
|
+
if suppress_conversion_errors:
|
132
|
+
dict_input = {}
|
133
|
+
else:
|
134
|
+
raise ValueError(
|
135
|
+
f"Failed to convert input to dictionary: {type(dict_input)}"
|
136
|
+
)
|
137
|
+
|
138
|
+
except Exception as e:
|
139
|
+
if suppress_conversion_errors:
|
140
|
+
dict_input = {}
|
141
|
+
else:
|
142
|
+
raise ValueError(f"Failed to convert input to dictionary: {e}")
|
143
|
+
|
144
|
+
# Validate the dictionary
|
145
|
+
return fuzzy_match_keys(
|
146
|
+
dict_input,
|
147
|
+
keys,
|
148
|
+
similarity_algo=similarity_algo,
|
149
|
+
similarity_threshold=similarity_threshold,
|
150
|
+
fuzzy_match=fuzzy_match,
|
151
|
+
handle_unmatched=handle_unmatched,
|
152
|
+
fill_value=fill_value,
|
153
|
+
fill_mapping=fill_mapping,
|
154
|
+
strict=strict,
|
155
|
+
)
|
@@ -0,0 +1,388 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import contextlib
|
4
|
+
import dataclasses
|
5
|
+
import json
|
6
|
+
from collections.abc import Callable, Iterable, Mapping, Sequence
|
7
|
+
from enum import Enum as _Enum
|
8
|
+
from typing import Any, Literal
|
9
|
+
|
10
|
+
from ._fuzzy_json import fuzzy_json
|
11
|
+
|
12
|
+
# ----------------------------
|
13
|
+
# Helpers (small, tight, local)
|
14
|
+
# ----------------------------
|
15
|
+
|
16
|
+
|
17
|
+
def _is_na(obj: Any) -> bool:
|
18
|
+
"""None / Pydantic undefined sentinels -> treat as NA."""
|
19
|
+
if obj is None:
|
20
|
+
return True
|
21
|
+
# Avoid importing pydantic types; match by typename to stay lightweight
|
22
|
+
tname = type(obj).__name__
|
23
|
+
return tname in {
|
24
|
+
"Undefined",
|
25
|
+
"UndefinedType",
|
26
|
+
"PydanticUndefined",
|
27
|
+
"PydanticUndefinedType",
|
28
|
+
}
|
29
|
+
|
30
|
+
|
31
|
+
def _enum_class_to_dict(
|
32
|
+
enum_cls: type[_Enum], use_enum_values: bool
|
33
|
+
) -> dict[str, Any]:
|
34
|
+
members = dict(enum_cls.__members__) # cheap, stable
|
35
|
+
if use_enum_values:
|
36
|
+
return {k: v.value for k, v in members.items()}
|
37
|
+
return {k: v for k, v in members.items()}
|
38
|
+
|
39
|
+
|
40
|
+
def _parse_str(
|
41
|
+
s: str,
|
42
|
+
*,
|
43
|
+
fuzzy_parse: bool,
|
44
|
+
str_type: Literal["json", "xml"] | None,
|
45
|
+
parser: Callable[[str], Any] | None,
|
46
|
+
**kwargs: Any,
|
47
|
+
) -> Any:
|
48
|
+
"""Parse str -> Python object. Keep imports local to avoid cold start overhead."""
|
49
|
+
if parser is not None:
|
50
|
+
return parser(s, **kwargs)
|
51
|
+
|
52
|
+
if str_type == "xml":
|
53
|
+
# xmltodict is optional; import only if needed
|
54
|
+
import xmltodict
|
55
|
+
|
56
|
+
return xmltodict.parse(s, **kwargs)
|
57
|
+
|
58
|
+
# JSON path
|
59
|
+
if fuzzy_parse:
|
60
|
+
# If the caller supplied a fuzzy parser in scope, use it; otherwise fallback.
|
61
|
+
# We intentionally do not import anything heavy here.
|
62
|
+
with contextlib.suppress(NameError):
|
63
|
+
return fuzzy_json(s, **kwargs) # type: ignore[name-defined]
|
64
|
+
return json.loads(s, **kwargs)
|
65
|
+
|
66
|
+
|
67
|
+
def _object_to_mapping_like(
|
68
|
+
obj: Any,
|
69
|
+
*,
|
70
|
+
use_model_dump: bool,
|
71
|
+
**kwargs: Any,
|
72
|
+
) -> Mapping | dict | Any:
|
73
|
+
"""
|
74
|
+
Convert 'custom' objects to mapping-like, if possible.
|
75
|
+
Order:
|
76
|
+
1) Pydantic v2 'model_dump' (duck-typed)
|
77
|
+
2) Common methods: to_dict, dict, to_json/json (parsed if string)
|
78
|
+
3) Dataclass
|
79
|
+
4) __dict__
|
80
|
+
5) dict(obj)
|
81
|
+
"""
|
82
|
+
# 1) Pydantic v2
|
83
|
+
if use_model_dump and hasattr(obj, "model_dump"):
|
84
|
+
return obj.model_dump(**kwargs)
|
85
|
+
|
86
|
+
# 2) Common methods
|
87
|
+
for name in ("to_dict", "dict", "to_json", "json"):
|
88
|
+
if hasattr(obj, name):
|
89
|
+
res = getattr(obj, name)(**kwargs)
|
90
|
+
return json.loads(res) if isinstance(res, str) else res
|
91
|
+
|
92
|
+
# 3) Dataclass
|
93
|
+
if dataclasses.is_dataclass(obj):
|
94
|
+
# asdict is already recursive; keep it (fast enough & simple)
|
95
|
+
return dataclasses.asdict(obj)
|
96
|
+
|
97
|
+
# 4) __dict__
|
98
|
+
if hasattr(obj, "__dict__"):
|
99
|
+
return obj.__dict__
|
100
|
+
|
101
|
+
# 5) Try dict() fallback
|
102
|
+
return dict(obj) # may raise -> handled by caller
|
103
|
+
|
104
|
+
|
105
|
+
def _enumerate_iterable(it: Iterable) -> dict[int, Any]:
|
106
|
+
return {i: v for i, v in enumerate(it)}
|
107
|
+
|
108
|
+
|
109
|
+
# ---------------------------------------
|
110
|
+
# Recursive pre-processing (single pass)
|
111
|
+
# ---------------------------------------
|
112
|
+
|
113
|
+
|
114
|
+
def _preprocess_recursive(
|
115
|
+
obj: Any,
|
116
|
+
*,
|
117
|
+
depth: int,
|
118
|
+
max_depth: int,
|
119
|
+
recursive_custom_types: bool,
|
120
|
+
str_parse_opts: dict[str, Any],
|
121
|
+
use_model_dump: bool,
|
122
|
+
) -> Any:
|
123
|
+
"""
|
124
|
+
Recursively process nested structures:
|
125
|
+
- Parse strings (JSON/XML/custom parser)
|
126
|
+
- Recurse into dict/list/tuple/set/etc.
|
127
|
+
- If recursive_custom_types=True, convert custom objects to mapping-like then continue
|
128
|
+
Containers retain their original types (dict stays dict, list stays list, set stays set, etc.)
|
129
|
+
"""
|
130
|
+
if depth >= max_depth:
|
131
|
+
return obj
|
132
|
+
|
133
|
+
# Fast paths by exact type where possible
|
134
|
+
t = type(obj)
|
135
|
+
|
136
|
+
# Strings: try to parse; on failure, keep as-is
|
137
|
+
if t is str:
|
138
|
+
try:
|
139
|
+
parsed = _parse_str(obj, **str_parse_opts)
|
140
|
+
except Exception:
|
141
|
+
return obj
|
142
|
+
return _preprocess_recursive(
|
143
|
+
parsed,
|
144
|
+
depth=depth + 1,
|
145
|
+
max_depth=max_depth,
|
146
|
+
recursive_custom_types=recursive_custom_types,
|
147
|
+
str_parse_opts=str_parse_opts,
|
148
|
+
use_model_dump=use_model_dump,
|
149
|
+
)
|
150
|
+
|
151
|
+
# Dict-like
|
152
|
+
if isinstance(obj, Mapping):
|
153
|
+
# Recurse only into values (keys kept as-is)
|
154
|
+
return {
|
155
|
+
k: _preprocess_recursive(
|
156
|
+
v,
|
157
|
+
depth=depth + 1,
|
158
|
+
max_depth=max_depth,
|
159
|
+
recursive_custom_types=recursive_custom_types,
|
160
|
+
str_parse_opts=str_parse_opts,
|
161
|
+
use_model_dump=use_model_dump,
|
162
|
+
)
|
163
|
+
for k, v in obj.items()
|
164
|
+
}
|
165
|
+
|
166
|
+
# Sequence/Set-like (but not str)
|
167
|
+
if isinstance(obj, (list, tuple, set, frozenset)):
|
168
|
+
items = [
|
169
|
+
_preprocess_recursive(
|
170
|
+
v,
|
171
|
+
depth=depth + 1,
|
172
|
+
max_depth=max_depth,
|
173
|
+
recursive_custom_types=recursive_custom_types,
|
174
|
+
str_parse_opts=str_parse_opts,
|
175
|
+
use_model_dump=use_model_dump,
|
176
|
+
)
|
177
|
+
for v in obj
|
178
|
+
]
|
179
|
+
if t is list:
|
180
|
+
return items
|
181
|
+
if t is tuple:
|
182
|
+
return tuple(items)
|
183
|
+
if t is set:
|
184
|
+
return set(items)
|
185
|
+
if t is frozenset:
|
186
|
+
return frozenset(items)
|
187
|
+
|
188
|
+
# Enum *class* (rare in values, but preserve your original attempt)
|
189
|
+
if isinstance(obj, type) and issubclass(obj, _Enum):
|
190
|
+
try:
|
191
|
+
enum_map = _enum_class_to_dict(
|
192
|
+
obj,
|
193
|
+
use_enum_values=str_parse_opts.get("use_enum_values", True),
|
194
|
+
)
|
195
|
+
return _preprocess_recursive(
|
196
|
+
enum_map,
|
197
|
+
depth=depth + 1,
|
198
|
+
max_depth=max_depth,
|
199
|
+
recursive_custom_types=recursive_custom_types,
|
200
|
+
str_parse_opts=str_parse_opts,
|
201
|
+
use_model_dump=use_model_dump,
|
202
|
+
)
|
203
|
+
except Exception:
|
204
|
+
return obj
|
205
|
+
|
206
|
+
# Custom objects
|
207
|
+
if recursive_custom_types:
|
208
|
+
with contextlib.suppress(Exception):
|
209
|
+
mapped = _object_to_mapping_like(
|
210
|
+
obj, use_model_dump=use_model_dump
|
211
|
+
)
|
212
|
+
return _preprocess_recursive(
|
213
|
+
mapped,
|
214
|
+
depth=depth + 1,
|
215
|
+
max_depth=max_depth,
|
216
|
+
recursive_custom_types=recursive_custom_types,
|
217
|
+
str_parse_opts=str_parse_opts,
|
218
|
+
use_model_dump=use_model_dump,
|
219
|
+
)
|
220
|
+
|
221
|
+
return obj
|
222
|
+
|
223
|
+
|
224
|
+
# ---------------------------------------
|
225
|
+
# Top-level conversion (non-recursive)
|
226
|
+
# ---------------------------------------
|
227
|
+
|
228
|
+
|
229
|
+
def _convert_top_level_to_dict(
|
230
|
+
obj: Any,
|
231
|
+
*,
|
232
|
+
fuzzy_parse: bool,
|
233
|
+
str_type: Literal["json", "xml"] | None,
|
234
|
+
parser: Callable[[str], Any] | None,
|
235
|
+
use_model_dump: bool,
|
236
|
+
use_enum_values: bool,
|
237
|
+
**kwargs: Any,
|
238
|
+
) -> dict[str, Any]:
|
239
|
+
"""
|
240
|
+
Convert a *single* object to dict using the 'brute force' rules.
|
241
|
+
Mirrors your original order, with fixes & optimizations.
|
242
|
+
"""
|
243
|
+
# Set -> {v: v}
|
244
|
+
if isinstance(obj, set):
|
245
|
+
return {v: v for v in obj}
|
246
|
+
|
247
|
+
# Enum class -> members mapping
|
248
|
+
if isinstance(obj, type) and issubclass(obj, _Enum):
|
249
|
+
return _enum_class_to_dict(obj, use_enum_values)
|
250
|
+
|
251
|
+
# Mapping -> copy to plain dict (preserve your copy semantics)
|
252
|
+
if isinstance(obj, Mapping):
|
253
|
+
return dict(obj)
|
254
|
+
|
255
|
+
# None / pydantic undefined -> {}
|
256
|
+
if _is_na(obj):
|
257
|
+
return {}
|
258
|
+
|
259
|
+
# str -> parse (and return *as parsed*, which may be list, dict, etc.)
|
260
|
+
if isinstance(obj, str):
|
261
|
+
return _parse_str(
|
262
|
+
obj,
|
263
|
+
fuzzy_parse=fuzzy_parse,
|
264
|
+
str_type=str_type,
|
265
|
+
parser=parser,
|
266
|
+
**kwargs,
|
267
|
+
)
|
268
|
+
|
269
|
+
# Try "custom" object conversions
|
270
|
+
# (Covers BaseModel via model_dump, dataclasses, __dict__, json-strings, etc.)
|
271
|
+
try:
|
272
|
+
# If it's *not* a Sequence (e.g., numbers, objects) we try object conversion first,
|
273
|
+
# faithfully following your previous "non-Sequence -> model path" behavior.
|
274
|
+
if not isinstance(obj, Sequence):
|
275
|
+
converted = _object_to_mapping_like(
|
276
|
+
obj, use_model_dump=use_model_dump, **kwargs
|
277
|
+
)
|
278
|
+
# If conversion returned a string, try to parse JSON to mapping; else pass-through
|
279
|
+
if isinstance(converted, str):
|
280
|
+
return _parse_str(
|
281
|
+
converted,
|
282
|
+
fuzzy_parse=fuzzy_parse,
|
283
|
+
str_type="json",
|
284
|
+
parser=None,
|
285
|
+
)
|
286
|
+
if isinstance(converted, Mapping):
|
287
|
+
return dict(converted)
|
288
|
+
# If it's a list/tuple/etc., enumerate (your original did that after the fact)
|
289
|
+
if isinstance(converted, Iterable) and not isinstance(
|
290
|
+
converted, (str, bytes, bytearray)
|
291
|
+
):
|
292
|
+
return _enumerate_iterable(converted)
|
293
|
+
# Best effort final cast
|
294
|
+
return dict(converted)
|
295
|
+
|
296
|
+
except Exception:
|
297
|
+
# Fall through to other strategies
|
298
|
+
pass
|
299
|
+
|
300
|
+
# Iterable (list/tuple/namedtuple/frozenset/…): enumerate
|
301
|
+
if isinstance(obj, Iterable) and not isinstance(
|
302
|
+
obj, (str, bytes, bytearray)
|
303
|
+
):
|
304
|
+
return _enumerate_iterable(obj)
|
305
|
+
|
306
|
+
# Dataclass fallback (reachable only if it wasn't caught above)
|
307
|
+
with contextlib.suppress(Exception):
|
308
|
+
if dataclasses.is_dataclass(obj):
|
309
|
+
return dataclasses.asdict(obj)
|
310
|
+
|
311
|
+
# Last-ditch attempt
|
312
|
+
return dict(obj) # may raise, handled by top-level try/except
|
313
|
+
|
314
|
+
|
315
|
+
# ---------------
|
316
|
+
# Public function
|
317
|
+
# ---------------
|
318
|
+
|
319
|
+
|
320
|
+
def to_dict(
|
321
|
+
input_: Any,
|
322
|
+
/,
|
323
|
+
*,
|
324
|
+
use_model_dump: bool = True,
|
325
|
+
fuzzy_parse: bool = False,
|
326
|
+
suppress: bool = False,
|
327
|
+
str_type: Literal["json", "xml"] | None = "json",
|
328
|
+
parser: Callable[[str], Any] | None = None,
|
329
|
+
recursive: bool = False,
|
330
|
+
max_recursive_depth: int | None = None,
|
331
|
+
recursive_python_only: bool = True,
|
332
|
+
use_enum_values: bool = False,
|
333
|
+
**kwargs: Any,
|
334
|
+
) -> dict[str, Any]:
|
335
|
+
"""
|
336
|
+
Convert various input types to a dictionary, with optional recursive processing.
|
337
|
+
Semantics preserved from original implementation.
|
338
|
+
"""
|
339
|
+
try:
|
340
|
+
# Clamp recursion depth (match your constraints)
|
341
|
+
if not isinstance(max_recursive_depth, int):
|
342
|
+
max_depth = 5
|
343
|
+
else:
|
344
|
+
if max_recursive_depth < 0:
|
345
|
+
raise ValueError(
|
346
|
+
"max_recursive_depth must be a non-negative integer"
|
347
|
+
)
|
348
|
+
if max_recursive_depth > 10:
|
349
|
+
raise ValueError(
|
350
|
+
"max_recursive_depth must be less than or equal to 10"
|
351
|
+
)
|
352
|
+
max_depth = max_recursive_depth
|
353
|
+
|
354
|
+
# Prepare one small dict to avoid repeated arg passing and lookups
|
355
|
+
str_parse_opts = {
|
356
|
+
"fuzzy_parse": fuzzy_parse,
|
357
|
+
"str_type": str_type,
|
358
|
+
"parser": parser,
|
359
|
+
"use_enum_values": use_enum_values, # threaded for enum class in recursion
|
360
|
+
**kwargs,
|
361
|
+
}
|
362
|
+
|
363
|
+
obj = input_
|
364
|
+
if recursive:
|
365
|
+
obj = _preprocess_recursive(
|
366
|
+
obj,
|
367
|
+
depth=0,
|
368
|
+
max_depth=max_depth,
|
369
|
+
recursive_custom_types=not recursive_python_only,
|
370
|
+
str_parse_opts=str_parse_opts,
|
371
|
+
use_model_dump=use_model_dump,
|
372
|
+
)
|
373
|
+
|
374
|
+
# Final top-level conversion
|
375
|
+
return _convert_top_level_to_dict(
|
376
|
+
obj,
|
377
|
+
fuzzy_parse=fuzzy_parse,
|
378
|
+
str_type=str_type,
|
379
|
+
parser=parser,
|
380
|
+
use_model_dump=use_model_dump,
|
381
|
+
use_enum_values=use_enum_values,
|
382
|
+
**kwargs,
|
383
|
+
)
|
384
|
+
|
385
|
+
except Exception as e:
|
386
|
+
if suppress or input_ == "":
|
387
|
+
return {}
|
388
|
+
raise e
|
lionagi/models/__init__.py
CHANGED
@@ -5,7 +5,6 @@
|
|
5
5
|
from .field_model import FieldModel
|
6
6
|
from .hashable_model import HashableModel
|
7
7
|
from .model_params import ModelParams
|
8
|
-
from .note import Note
|
9
8
|
from .operable_model import OperableModel
|
10
9
|
from .schema_model import SchemaModel
|
11
10
|
|
@@ -13,7 +12,6 @@ __all__ = (
|
|
13
12
|
"FieldModel",
|
14
13
|
"ModelParams",
|
15
14
|
"OperableModel",
|
16
|
-
"Note",
|
17
15
|
"SchemaModel",
|
18
16
|
"HashableModel",
|
19
17
|
)
|
@@ -5,7 +5,7 @@
|
|
5
5
|
import logging
|
6
6
|
from typing import TYPE_CHECKING
|
7
7
|
|
8
|
-
from lionagi.
|
8
|
+
from lionagi.ln.fuzzy._fuzzy_validate import fuzzy_validate_mapping
|
9
9
|
from lionagi.utils import UNDEFINED
|
10
10
|
|
11
11
|
if TYPE_CHECKING:
|
@@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Any, Literal
|
|
6
6
|
|
7
7
|
from pydantic import BaseModel
|
8
8
|
|
9
|
-
from lionagi.
|
9
|
+
from lionagi.ln.fuzzy._fuzzy_validate import fuzzy_validate_mapping
|
10
10
|
from lionagi.protocols.types import Operative
|
11
11
|
from lionagi.utils import breakdown_pydantic_annotation
|
12
12
|
|
@@ -25,7 +25,7 @@ from pydapter import Adaptable, AsyncAdaptable
|
|
25
25
|
from typing_extensions import Self, deprecated, override
|
26
26
|
|
27
27
|
from lionagi._errors import ItemExistsError, ItemNotFoundError, ValidationError
|
28
|
-
from lionagi.
|
28
|
+
from lionagi.ln.concurrency import Lock as ConcurrencyLock
|
29
29
|
from lionagi.utils import (
|
30
30
|
UNDEFINED,
|
31
31
|
is_same_dtype,
|
@@ -2,12 +2,12 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
|
5
|
-
from typing import Any
|
5
|
+
from typing import Any
|
6
6
|
|
7
7
|
from pydantic import BaseModel
|
8
8
|
from pydantic.fields import FieldInfo
|
9
9
|
|
10
|
-
from lionagi.
|
10
|
+
from lionagi.ln.fuzzy._fuzzy_match import fuzzy_match_keys
|
11
11
|
from lionagi.models import FieldModel, ModelParams, OperableModel
|
12
12
|
from lionagi.utils import UNDEFINED, to_json
|
13
13
|
|
@@ -57,17 +57,9 @@ def match_endpoint(
|
|
57
57
|
|
58
58
|
return NvidiaNimChatEndpoint(None, **kwargs)
|
59
59
|
if provider == "claude_code":
|
60
|
-
|
61
|
-
from .providers.claude_code_cli import ClaudeCodeCLIEndpoint
|
60
|
+
from .providers.claude_code_cli import ClaudeCodeCLIEndpoint
|
62
61
|
|
63
|
-
|
64
|
-
|
65
|
-
if "query" in endpoint or "code" in endpoint:
|
66
|
-
from lionagi.service.connections.providers.claude_code_ import (
|
67
|
-
ClaudeCodeEndpoint,
|
68
|
-
)
|
69
|
-
|
70
|
-
return ClaudeCodeEndpoint(None, **kwargs)
|
62
|
+
return ClaudeCodeCLIEndpoint(None, **kwargs)
|
71
63
|
|
72
64
|
from .providers.oai_ import OpenaiChatEndpoint
|
73
65
|
|
@@ -1,6 +1,5 @@
|
|
1
1
|
from .anthropic_ import AnthropicMessagesEndpoint
|
2
|
-
from .
|
3
|
-
from .claude_code_cli import ClaudeCodeCLIEndpoint
|
2
|
+
from .claude_code_cli import ClaudeCodeCLIEndpoint, ClaudeCodeRequest
|
4
3
|
from .exa_ import ExaSearchEndpoint, ExaSearchRequest
|
5
4
|
from .oai_ import (
|
6
5
|
GroqChatEndpoint,
|
@@ -14,7 +13,6 @@ from .perplexity_ import PerplexityChatEndpoint, PerplexityChatRequest
|
|
14
13
|
|
15
14
|
__all__ = (
|
16
15
|
"AnthropicMessagesEndpoint",
|
17
|
-
"ClaudeCodeEndpoint",
|
18
16
|
"ClaudeCodeRequest",
|
19
17
|
"ClaudeCodeCLIEndpoint",
|
20
18
|
"ExaSearchEndpoint",
|
@@ -8,7 +8,7 @@ from typing import Any
|
|
8
8
|
import anyio
|
9
9
|
from pydantic import Field, PrivateAttr
|
10
10
|
|
11
|
-
from lionagi.
|
11
|
+
from lionagi.ln.concurrency import fail_after, get_cancelled_exc_class
|
12
12
|
from lionagi.protocols.types import Event, EventStatus
|
13
13
|
|
14
14
|
from ._types import AssosiatedEventInfo, HookEventTypes
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|
5
5
|
|
6
6
|
from typing import Any, TypeVar
|
7
7
|
|
8
|
-
from lionagi.
|
8
|
+
from lionagi.ln.concurrency import get_cancelled_exc_class
|
9
9
|
from lionagi.protocols.types import Event, EventStatus
|
10
10
|
from lionagi.utils import UNDEFINED
|
11
11
|
|
@@ -8,7 +8,7 @@ from typing import Any
|
|
8
8
|
|
9
9
|
from typing_extensions import Self, override
|
10
10
|
|
11
|
-
from lionagi.
|
11
|
+
from lionagi.ln.concurrency import CapacityLimiter, Lock, move_on_after
|
12
12
|
from lionagi.protocols.types import Executor, Processor
|
13
13
|
|
14
14
|
from .connections.api_calling import APICalling
|