lionherd-core 1.0.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. lionherd_core/__init__.py +84 -0
  2. lionherd_core/base/__init__.py +30 -0
  3. lionherd_core/base/_utils.py +295 -0
  4. lionherd_core/base/broadcaster.py +128 -0
  5. lionherd_core/base/element.py +300 -0
  6. lionherd_core/base/event.py +322 -0
  7. lionherd_core/base/eventbus.py +112 -0
  8. lionherd_core/base/flow.py +236 -0
  9. lionherd_core/base/graph.py +616 -0
  10. lionherd_core/base/node.py +212 -0
  11. lionherd_core/base/pile.py +811 -0
  12. lionherd_core/base/progression.py +261 -0
  13. lionherd_core/errors.py +104 -0
  14. lionherd_core/libs/__init__.py +2 -0
  15. lionherd_core/libs/concurrency/__init__.py +60 -0
  16. lionherd_core/libs/concurrency/_cancel.py +85 -0
  17. lionherd_core/libs/concurrency/_errors.py +80 -0
  18. lionherd_core/libs/concurrency/_patterns.py +238 -0
  19. lionherd_core/libs/concurrency/_primitives.py +253 -0
  20. lionherd_core/libs/concurrency/_priority_queue.py +135 -0
  21. lionherd_core/libs/concurrency/_resource_tracker.py +66 -0
  22. lionherd_core/libs/concurrency/_task.py +58 -0
  23. lionherd_core/libs/concurrency/_utils.py +61 -0
  24. lionherd_core/libs/schema_handlers/__init__.py +35 -0
  25. lionherd_core/libs/schema_handlers/_function_call_parser.py +122 -0
  26. lionherd_core/libs/schema_handlers/_minimal_yaml.py +88 -0
  27. lionherd_core/libs/schema_handlers/_schema_to_model.py +251 -0
  28. lionherd_core/libs/schema_handlers/_typescript.py +153 -0
  29. lionherd_core/libs/string_handlers/__init__.py +15 -0
  30. lionherd_core/libs/string_handlers/_extract_json.py +65 -0
  31. lionherd_core/libs/string_handlers/_fuzzy_json.py +103 -0
  32. lionherd_core/libs/string_handlers/_string_similarity.py +347 -0
  33. lionherd_core/libs/string_handlers/_to_num.py +63 -0
  34. lionherd_core/ln/__init__.py +45 -0
  35. lionherd_core/ln/_async_call.py +314 -0
  36. lionherd_core/ln/_fuzzy_match.py +166 -0
  37. lionherd_core/ln/_fuzzy_validate.py +151 -0
  38. lionherd_core/ln/_hash.py +141 -0
  39. lionherd_core/ln/_json_dump.py +347 -0
  40. lionherd_core/ln/_list_call.py +110 -0
  41. lionherd_core/ln/_to_dict.py +373 -0
  42. lionherd_core/ln/_to_list.py +190 -0
  43. lionherd_core/ln/_utils.py +156 -0
  44. lionherd_core/lndl/__init__.py +62 -0
  45. lionherd_core/lndl/errors.py +30 -0
  46. lionherd_core/lndl/fuzzy.py +321 -0
  47. lionherd_core/lndl/parser.py +427 -0
  48. lionherd_core/lndl/prompt.py +137 -0
  49. lionherd_core/lndl/resolver.py +323 -0
  50. lionherd_core/lndl/types.py +287 -0
  51. lionherd_core/protocols.py +181 -0
  52. lionherd_core/py.typed +0 -0
  53. lionherd_core/types/__init__.py +46 -0
  54. lionherd_core/types/_sentinel.py +131 -0
  55. lionherd_core/types/base.py +341 -0
  56. lionherd_core/types/operable.py +133 -0
  57. lionherd_core/types/spec.py +313 -0
  58. lionherd_core/types/spec_adapters/__init__.py +10 -0
  59. lionherd_core/types/spec_adapters/_protocol.py +125 -0
  60. lionherd_core/types/spec_adapters/pydantic_field.py +177 -0
  61. lionherd_core-1.0.0a3.dist-info/METADATA +502 -0
  62. lionherd_core-1.0.0a3.dist-info/RECORD +64 -0
  63. lionherd_core-1.0.0a3.dist-info/WHEEL +4 -0
  64. lionherd_core-1.0.0a3.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,133 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ from dataclasses import dataclass
7
+ from typing import TYPE_CHECKING, Literal
8
+
9
+ from lionherd_core.protocols import Allowable, Hashable, implements
10
+
11
+ from ._sentinel import MaybeUnset, Unset
12
+
13
+ if TYPE_CHECKING:
14
+ from .spec import Spec
15
+
16
+ __all__ = ("Operable",)
17
+
18
+
19
+ @implements(Hashable, Allowable)
20
+ @dataclass(frozen=True, slots=True, init=False)
21
+ class Operable:
22
+ """Ordered Spec collection for model generation. Validates uniqueness, no duplicates."""
23
+
24
+ __op_fields__: tuple[Spec, ...]
25
+ name: str | None
26
+
27
+ def __init__(
28
+ self,
29
+ specs: tuple[Spec, ...] | list[Spec] = (),
30
+ *,
31
+ name: str | None = None,
32
+ ):
33
+ """Init with specs. Raises: TypeError (non-Spec), ValueError (duplicate names)."""
34
+ # Import here to avoid circular import
35
+ from .spec import Spec
36
+
37
+ # Convert to tuple if list
38
+ if isinstance(specs, list):
39
+ specs = tuple(specs)
40
+
41
+ # Validate all items are Spec objects
42
+ for i, item in enumerate(specs):
43
+ if not isinstance(item, Spec):
44
+ raise TypeError(
45
+ f"All specs must be Spec objects, got {type(item).__name__} at index {i}"
46
+ )
47
+
48
+ # Check for duplicate names
49
+ names = [s.name for s in specs if s.name is not None]
50
+ if len(names) != len(set(names)):
51
+ from collections import Counter
52
+
53
+ duplicates = [name for name, count in Counter(names).items() if count > 1]
54
+ raise ValueError(
55
+ f"Duplicate field names found: {duplicates}. Each spec must have a unique name."
56
+ )
57
+
58
+ object.__setattr__(self, "__op_fields__", specs)
59
+ object.__setattr__(self, "name", name)
60
+
61
+ def allowed(self) -> set[str]:
62
+ """Get set of allowed field names from specs."""
63
+ return {i.name for i in self.__op_fields__} # type: ignore[misc]
64
+
65
+ def check_allowed(self, *args, as_boolean: bool = False):
66
+ """Check field names allowed. Args: field names, as_boolean. Raises ValueError if not allowed."""
67
+ if not set(args).issubset(self.allowed()):
68
+ if as_boolean:
69
+ return False
70
+ raise ValueError(
71
+ f"Some specified fields are not allowed: {set(args).difference(self.allowed())}"
72
+ )
73
+ return True
74
+
75
+ def get(self, key: str, /, default=Unset) -> MaybeUnset[Spec]:
76
+ """Get Spec by field name. Returns default if not found."""
77
+ if not self.check_allowed(key, as_boolean=True):
78
+ return default
79
+ for i in self.__op_fields__:
80
+ if i.name == key:
81
+ return i
82
+
83
+ def get_specs(
84
+ self,
85
+ *,
86
+ include: set[str] | None = None,
87
+ exclude: set[str] | None = None,
88
+ ) -> tuple[Spec, ...]:
89
+ """Get filtered Specs. Args: include/exclude sets. Raises ValueError if both or invalid names."""
90
+ if include is not None and exclude is not None:
91
+ raise ValueError("Cannot specify both include and exclude")
92
+
93
+ if include:
94
+ if self.check_allowed(*include, as_boolean=True) is False:
95
+ raise ValueError(
96
+ "Some specified fields are not allowed: "
97
+ f"{set(include).difference(self.allowed())}"
98
+ )
99
+ return tuple(self.get(i) for i in include if self.get(i) is not Unset) # type: ignore[misc]
100
+
101
+ if exclude:
102
+ _discards = {self.get(i) for i in exclude if self.get(i) is not Unset}
103
+ return tuple(s for s in self.__op_fields__ if s not in _discards)
104
+
105
+ return self.__op_fields__
106
+
107
+ def create_model(
108
+ self,
109
+ adapter: Literal["pydantic"] = "pydantic",
110
+ model_name: str | None = None,
111
+ include: set[str] | None = None,
112
+ exclude: set[str] | None = None,
113
+ **kw,
114
+ ):
115
+ """Create framework model from specs. Args: adapter, model_name, include/exclude. Raises: ImportError, ValueError."""
116
+ match adapter:
117
+ case "pydantic":
118
+ try:
119
+ from .spec_adapters.pydantic_field import PydanticSpecAdapter
120
+ except ImportError as e:
121
+ raise ImportError(
122
+ "PydanticSpecAdapter requires Pydantic. Install with: pip install pydantic"
123
+ ) from e
124
+
125
+ kws = {
126
+ "model_name": model_name or self.name or "DynamicModel",
127
+ "include": include,
128
+ "exclude": exclude,
129
+ **kw,
130
+ }
131
+ return PydanticSpecAdapter.create_model(self, **kws)
132
+ case _:
133
+ raise ValueError(f"Unsupported adapter: {adapter}")
@@ -0,0 +1,313 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ import contextlib
7
+ import os
8
+ import threading
9
+ from collections import OrderedDict
10
+ from collections.abc import Callable
11
+ from dataclasses import dataclass
12
+ from typing import Annotated, Any, Self
13
+
14
+ from lionherd_core.libs.concurrency import is_coro_func
15
+ from lionherd_core.protocols import Hashable, implements
16
+
17
+ from ._sentinel import MaybeUndefined, Undefined, is_sentinel, not_sentinel
18
+ from .base import Enum, Meta
19
+
20
+ # Global cache for annotated types with bounded size
21
+ _MAX_CACHE_SIZE = int(os.environ.get("lionherd_FIELD_CACHE_SIZE", "10000"))
22
+ _annotated_cache: OrderedDict[tuple[type, tuple[Meta, ...]], type] = OrderedDict()
23
+ _cache_lock = threading.RLock() # Thread-safe access to cache
24
+
25
+
26
+ __all__ = ("CommonMeta", "Spec")
27
+
28
+
29
+ class CommonMeta(Enum):
30
+ """Common metadata keys: NAME, NULLABLE, LISTABLE, VALIDATOR, DEFAULT, DEFAULT_FACTORY."""
31
+
32
+ NAME = "name"
33
+ NULLABLE = "nullable"
34
+ LISTABLE = "listable"
35
+ VALIDATOR = "validator"
36
+ DEFAULT = "default"
37
+ DEFAULT_FACTORY = "default_factory"
38
+
39
+ @classmethod
40
+ def _validate_common_metas(cls, **kw):
41
+ """Validate metadata constraints. Uses ExceptionGroup for multiple errors."""
42
+ errors: list[Exception] = []
43
+
44
+ if kw.get("default") and kw.get("default_factory"):
45
+ errors.append(ValueError("Cannot provide both 'default' and 'default_factory'"))
46
+ if (_df := kw.get("default_factory")) and not callable(_df):
47
+ errors.append(ValueError("'default_factory' must be callable"))
48
+ if _val := kw.get("validator"):
49
+ _val = [_val] if not isinstance(_val, list) else _val
50
+ if not all(callable(v) for v in _val):
51
+ errors.append(ValueError("Validators must be a list of functions or a function"))
52
+
53
+ if errors:
54
+ raise ExceptionGroup("Metadata validation failed", errors)
55
+
56
+ @classmethod
57
+ def prepare(
58
+ cls, *args: Meta, metadata: tuple[Meta, ...] | None = None, **kw: Any
59
+ ) -> tuple[Meta, ...]:
60
+ """Prepare metadata tuple from args/kw. Validates no duplicates, constraints."""
61
+ # Lazy import to avoid circular dependency
62
+ from ..ln._to_list import to_list
63
+
64
+ seen_keys = set()
65
+ metas = []
66
+
67
+ # Process existing metadata
68
+ if metadata:
69
+ for meta in metadata:
70
+ if meta.key in seen_keys:
71
+ raise ValueError(f"Duplicate metadata key: {meta.key}")
72
+ seen_keys.add(meta.key)
73
+ metas.append(meta)
74
+
75
+ # Process args
76
+ if args:
77
+ _args = to_list(args, flatten=True, flatten_tuple_set=True, dropna=True)
78
+ for meta in _args:
79
+ if meta.key in seen_keys:
80
+ raise ValueError(f"Duplicate metadata key: {meta.key}")
81
+ seen_keys.add(meta.key)
82
+ metas.append(meta)
83
+
84
+ # Process kwargs
85
+ for k, v in kw.items():
86
+ if k in seen_keys:
87
+ raise ValueError(f"Duplicate metadata key: {k}")
88
+ seen_keys.add(k)
89
+ metas.append(Meta(k, v))
90
+
91
+ # Validate common metadata constraints
92
+ meta_dict = {m.key: m.value for m in metas}
93
+ cls._validate_common_metas(**meta_dict)
94
+
95
+ return tuple(metas)
96
+
97
+
98
+ @implements(Hashable)
99
+ @dataclass(frozen=True, slots=True, init=False)
100
+ class Spec:
101
+ """Framework-agnostic field spec: base_type + metadata. Build with Spec(type, name=..., nullable=...)."""
102
+
103
+ base_type: type
104
+ metadata: tuple[Meta, ...]
105
+
106
+ def __init__(
107
+ self,
108
+ base_type: type | None = None,
109
+ *args,
110
+ metadata: tuple[Meta, ...] | None = None,
111
+ **kw,
112
+ ) -> None:
113
+ """Init with type and metadata. Args: base_type, Meta objects, kw as Meta."""
114
+ metas = CommonMeta.prepare(*args, metadata=metadata, **kw)
115
+
116
+ if not_sentinel(base_type, True):
117
+ import types
118
+
119
+ is_valid_type = (
120
+ isinstance(base_type, type)
121
+ or hasattr(base_type, "__origin__")
122
+ or isinstance(base_type, types.UnionType)
123
+ )
124
+ if not is_valid_type:
125
+ raise ValueError(f"base_type must be a type or type annotation, got {base_type}")
126
+
127
+ # Check for async default factory and warn
128
+ if kw.get("default_factory") and is_coro_func(kw["default_factory"]):
129
+ import warnings
130
+
131
+ warnings.warn(
132
+ "Async default factories are not yet fully supported by all adapters. "
133
+ "Consider using sync factories for compatibility.",
134
+ UserWarning,
135
+ stacklevel=2,
136
+ )
137
+
138
+ object.__setattr__(self, "base_type", base_type)
139
+ object.__setattr__(self, "metadata", metas)
140
+
141
+ def __getitem__(self, key: str) -> Any:
142
+ """Get metadata by key. Raises KeyError if not found."""
143
+ for meta in self.metadata:
144
+ if meta.key == key:
145
+ return meta.value
146
+ raise KeyError(f"Metadata key '{key}' undefined in Spec.")
147
+
148
+ def get(self, key: str, default: Any = Undefined) -> Any:
149
+ """Get metadata by key with default."""
150
+ with contextlib.suppress(KeyError):
151
+ return self[key]
152
+ return default
153
+
154
+ @property
155
+ def name(self) -> MaybeUndefined[str]:
156
+ """Get the field name from metadata."""
157
+ return self.get(CommonMeta.NAME.value)
158
+
159
+ @property
160
+ def is_nullable(self) -> bool:
161
+ """Check if field is nullable."""
162
+ return self.get(CommonMeta.NULLABLE.value) is True
163
+
164
+ @property
165
+ def is_listable(self) -> bool:
166
+ """Check if field is listable."""
167
+ return self.get(CommonMeta.LISTABLE.value) is True
168
+
169
+ @property
170
+ def default(self) -> MaybeUndefined[Any]:
171
+ """Get default value or factory."""
172
+ return self.get(
173
+ CommonMeta.DEFAULT.value,
174
+ self.get(CommonMeta.DEFAULT_FACTORY.value),
175
+ )
176
+
177
+ @property
178
+ def has_default_factory(self) -> bool:
179
+ """Check if this spec has a default factory."""
180
+ return _is_factory(self.get(CommonMeta.DEFAULT_FACTORY.value))[0]
181
+
182
+ @property
183
+ def has_async_default_factory(self) -> bool:
184
+ """Check if this spec has an async default factory."""
185
+ return _is_factory(self.get(CommonMeta.DEFAULT_FACTORY.value))[1]
186
+
187
+ def create_default_value(self) -> Any:
188
+ """Create default value (sync). Raises ValueError if no default or async factory."""
189
+ if self.default is Undefined:
190
+ raise ValueError("No default value or factory defined in Spec.")
191
+ if self.has_async_default_factory:
192
+ raise ValueError(
193
+ "Default factory is asynchronous; cannot create default synchronously. "
194
+ "Use 'await spec.acreate_default_value()' instead."
195
+ )
196
+ if self.has_default_factory:
197
+ return self.default() # type: ignore[operator]
198
+ return self.default
199
+
200
+ async def acreate_default_value(self) -> Any:
201
+ """Create default value (async). Handles both sync/async factories."""
202
+ if self.has_async_default_factory:
203
+ return await self.default() # type: ignore[operator]
204
+ return self.create_default_value()
205
+
206
+ def with_updates(self, **kw) -> Self:
207
+ """Create new Spec with updated metadata."""
208
+ _filtered = [meta for meta in self.metadata if meta.key not in kw]
209
+ for k, v in kw.items():
210
+ if not_sentinel(v):
211
+ _filtered.append(Meta(k, v))
212
+ _metas = tuple(_filtered)
213
+ return type(self)(self.base_type, metadata=_metas)
214
+
215
+ def as_nullable(self) -> Self:
216
+ """Create nullable version."""
217
+ return self.with_updates(nullable=True)
218
+
219
+ def as_listable(self) -> Self:
220
+ """Create listable version."""
221
+ return self.with_updates(listable=True)
222
+
223
+ def with_default(self, default: Any) -> Self:
224
+ """Create spec with default value/factory. Callables treated as factories."""
225
+ if callable(default):
226
+ return self.with_updates(default_factory=default)
227
+ return self.with_updates(default=default)
228
+
229
+ def with_validator(self, validator: Callable[..., Any] | list[Callable[..., Any]]) -> Self:
230
+ """Create spec with validator(s)."""
231
+ return self.with_updates(validator=validator)
232
+
233
+ @property
234
+ def annotation(self) -> type[Any]:
235
+ """Plain type annotation: base_type + nullable/listable modifiers."""
236
+ if is_sentinel(self.base_type, none_as_sentinel=True):
237
+ return Any
238
+ t_ = self.base_type # type: ignore[valid-type]
239
+ if self.is_listable:
240
+ t_ = list[t_] # type: ignore[valid-type]
241
+ if self.is_nullable:
242
+ t_ = t_ | None # type: ignore[assignment]
243
+ return t_ # type: ignore[return-value]
244
+
245
+ def annotated(self) -> type[Any]:
246
+ """Create Annotated[type, metadata...] with LRU cache."""
247
+ # Check cache first with thread safety
248
+ cache_key = (self.base_type, self.metadata)
249
+
250
+ with _cache_lock:
251
+ if cache_key in _annotated_cache:
252
+ # Move to end to mark as recently used
253
+ _annotated_cache.move_to_end(cache_key)
254
+ return _annotated_cache[cache_key]
255
+
256
+ # Handle nullable case - wrap in Optional-like union
257
+ actual_type = (
258
+ Any if is_sentinel(self.base_type, none_as_sentinel=True) else self.base_type
259
+ )
260
+ current_metadata = self.metadata
261
+
262
+ if any(m.key == "nullable" and m.value for m in current_metadata):
263
+ # Use union syntax for nullable
264
+ actual_type = actual_type | None # type: ignore
265
+
266
+ if current_metadata:
267
+ args = [actual_type, *list(current_metadata)]
268
+ # Python 3.11-3.14 compatibility: try __class_getitem__ first (3.11-3.12),
269
+ # fall back to direct subscripting approach for 3.13+
270
+ try:
271
+ result = Annotated.__class_getitem__(tuple(args)) # type: ignore
272
+ except AttributeError:
273
+ # Python 3.13+ removed __class_getitem__, use operator approach
274
+ import operator
275
+
276
+ result = operator.getitem(Annotated, tuple(args)) # type: ignore
277
+ else:
278
+ result = actual_type # type: ignore[misc]
279
+
280
+ # Cache the result with LRU eviction
281
+ _annotated_cache[cache_key] = result # type: ignore[assignment]
282
+
283
+ # Evict oldest if cache is too large
284
+ while len(_annotated_cache) > _MAX_CACHE_SIZE:
285
+ _annotated_cache.popitem(last=False) # Remove oldest
286
+
287
+ return result # type: ignore[return-value]
288
+
289
+ def metadict(
290
+ self, exclude: set[str] | None = None, exclude_common: bool = False
291
+ ) -> dict[str, Any]:
292
+ """Get metadata as dict. Args: exclude keys, exclude_common flag."""
293
+ if exclude is None:
294
+ exclude = set()
295
+ if exclude_common:
296
+ exclude = exclude | set(CommonMeta.allowed())
297
+ return {meta.key: meta.value for meta in self.metadata if meta.key not in exclude}
298
+
299
+
300
+ def _is_factory(obj: Any) -> tuple[bool, bool]:
301
+ """Check if object is a factory function.
302
+
303
+ Args:
304
+ obj: Object to check
305
+
306
+ Returns:
307
+ Tuple of (is_factory, is_async)
308
+ """
309
+ if not callable(obj):
310
+ return (False, False)
311
+ if is_coro_func(obj):
312
+ return (True, True)
313
+ return (True, False)
@@ -0,0 +1,10 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from ._protocol import SpecAdapter
5
+ from .pydantic_field import PydanticSpecAdapter
6
+
7
+ __all__ = (
8
+ "PydanticSpecAdapter",
9
+ "SpecAdapter",
10
+ )
@@ -0,0 +1,125 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from abc import ABC, abstractmethod
5
+ from typing import TYPE_CHECKING, Any, Generic, TypeAlias, TypeVar
6
+
7
+ if TYPE_CHECKING:
8
+ from lionherd_core.types.operable import Operable
9
+ from lionherd_core.types.spec import Spec
10
+
11
+ __all__ = ("SpecAdapter",)
12
+
13
+ M = TypeVar("M") # Model instance type
14
+ JSONLike: TypeAlias = dict[str, Any] | list[Any] | str | int | float | bool | None
15
+
16
+
17
+ class SpecAdapter(ABC, Generic[M]):
18
+ """Abstract adapter: Spec → framework-specific formats. Implement create_field, create_model, validate_model, dump_model."""
19
+
20
+ # ---- Abstract Methods ----
21
+
22
+ @classmethod
23
+ @abstractmethod
24
+ def create_field(cls, spec: "Spec") -> Any:
25
+ """Convert Spec to framework field (FieldInfo, Attribute, etc.)."""
26
+ ...
27
+
28
+ @classmethod
29
+ @abstractmethod
30
+ def create_model(
31
+ cls,
32
+ operable: "Operable",
33
+ model_name: str,
34
+ include: set[str] | None = None,
35
+ exclude: set[str] | None = None,
36
+ **kwargs: Any,
37
+ ) -> type[M]:
38
+ """Generate model class from Operable. Args: operable, model_name, include/exclude, framework kwargs."""
39
+ ...
40
+
41
+ @classmethod
42
+ @abstractmethod
43
+ def validate_model(cls, model_cls: type[M], data: dict) -> M:
44
+ """Validate dict → model instance. Framework-specific (Pydantic: model_validate, attrs/dataclasses: __init__)."""
45
+ ...
46
+
47
+ @classmethod
48
+ @abstractmethod
49
+ def dump_model(cls, instance: M) -> dict[str, Any]:
50
+ """Dump model → dict. Framework-specific (Pydantic: model_dump, attrs: asdict, dataclasses: asdict)."""
51
+ ...
52
+
53
+ @classmethod
54
+ def create_validator(cls, spec: "Spec") -> Any:
55
+ """Generate framework-specific validators from Spec metadata. Returns None if not supported."""
56
+ return None
57
+
58
+ # ---- Concrete Methods (Shared) ----
59
+
60
+ @classmethod
61
+ def parse_json(cls, text: str, fuzzy: bool = True) -> JSONLike:
62
+ """Extract/parse JSON from text. Args: text, fuzzy (markdown extraction)."""
63
+ from lionherd_core.libs.string_handlers import extract_json
64
+
65
+ data = extract_json(text, fuzzy_parse=fuzzy)
66
+
67
+ # Unwrap single-item lists/tuples
68
+ if isinstance(data, list | tuple) and len(data) == 1:
69
+ data = data[0]
70
+
71
+ return data
72
+
73
+ @classmethod
74
+ @abstractmethod
75
+ def fuzzy_match_fields(cls, data: dict, model_cls: type[M], strict: bool = False) -> dict:
76
+ """Match data keys to model fields (fuzzy). Framework-specific. Args: data, model_cls, strict."""
77
+ ...
78
+
79
+ @classmethod
80
+ def validate_response(
81
+ cls,
82
+ text: str,
83
+ model_cls: type[M],
84
+ strict: bool = False,
85
+ fuzzy_parse: bool = True,
86
+ ) -> M | None:
87
+ """Validate text → model. Pipeline: parse_json → fuzzy_match → validate. Returns None on fail (strict=False)."""
88
+ try:
89
+ # Step 1: Parse JSON
90
+ data = cls.parse_json(text, fuzzy=fuzzy_parse)
91
+
92
+ # Step 2: Fuzzy match fields
93
+ matched_data = cls.fuzzy_match_fields(data, model_cls, strict=strict)
94
+
95
+ # Step 3: Validate with framework-specific method
96
+ instance = cls.validate_model(model_cls, matched_data)
97
+
98
+ return instance
99
+
100
+ except (ValueError, TypeError, KeyError, AttributeError):
101
+ # Catch validation-related exceptions only
102
+ # ValueError: JSON/parsing errors, validation failures
103
+ # TypeError: Type mismatches during validation
104
+ # KeyError: Missing required fields
105
+ # AttributeError: Field access errors
106
+ if strict:
107
+ raise
108
+ return None
109
+
110
+ @classmethod
111
+ def update_model(
112
+ cls,
113
+ instance: M,
114
+ updates: dict,
115
+ model_cls: type[M] | None = None,
116
+ ) -> M:
117
+ """Update model with new data. Merges existing + updates, returns new validated instance."""
118
+ model_cls = model_cls or type(instance) # type: ignore[assignment]
119
+
120
+ # Merge existing data with updates
121
+ current_data = cls.dump_model(instance)
122
+ current_data.update(updates)
123
+
124
+ # Validate merged data
125
+ return cls.validate_model(model_cls, current_data)