krons 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kronos/__init__.py +0 -0
- kronos/core/__init__.py +145 -0
- kronos/core/broadcaster.py +116 -0
- kronos/core/element.py +225 -0
- kronos/core/event.py +316 -0
- kronos/core/eventbus.py +116 -0
- kronos/core/flow.py +356 -0
- kronos/core/graph.py +442 -0
- kronos/core/node.py +982 -0
- kronos/core/pile.py +575 -0
- kronos/core/processor.py +494 -0
- kronos/core/progression.py +296 -0
- kronos/enforcement/__init__.py +57 -0
- kronos/enforcement/common/__init__.py +34 -0
- kronos/enforcement/common/boolean.py +85 -0
- kronos/enforcement/common/choice.py +97 -0
- kronos/enforcement/common/mapping.py +118 -0
- kronos/enforcement/common/model.py +102 -0
- kronos/enforcement/common/number.py +98 -0
- kronos/enforcement/common/string.py +140 -0
- kronos/enforcement/context.py +129 -0
- kronos/enforcement/policy.py +80 -0
- kronos/enforcement/registry.py +153 -0
- kronos/enforcement/rule.py +312 -0
- kronos/enforcement/service.py +370 -0
- kronos/enforcement/validator.py +198 -0
- kronos/errors.py +146 -0
- kronos/operations/__init__.py +32 -0
- kronos/operations/builder.py +228 -0
- kronos/operations/flow.py +398 -0
- kronos/operations/node.py +101 -0
- kronos/operations/registry.py +92 -0
- kronos/protocols.py +414 -0
- kronos/py.typed +0 -0
- kronos/services/__init__.py +81 -0
- kronos/services/backend.py +286 -0
- kronos/services/endpoint.py +608 -0
- kronos/services/hook.py +471 -0
- kronos/services/imodel.py +465 -0
- kronos/services/registry.py +115 -0
- kronos/services/utilities/__init__.py +36 -0
- kronos/services/utilities/header_factory.py +87 -0
- kronos/services/utilities/rate_limited_executor.py +271 -0
- kronos/services/utilities/rate_limiter.py +180 -0
- kronos/services/utilities/resilience.py +414 -0
- kronos/session/__init__.py +41 -0
- kronos/session/exchange.py +258 -0
- kronos/session/message.py +60 -0
- kronos/session/session.py +411 -0
- kronos/specs/__init__.py +25 -0
- kronos/specs/adapters/__init__.py +0 -0
- kronos/specs/adapters/_utils.py +45 -0
- kronos/specs/adapters/dataclass_field.py +246 -0
- kronos/specs/adapters/factory.py +56 -0
- kronos/specs/adapters/pydantic_adapter.py +309 -0
- kronos/specs/adapters/sql_ddl.py +946 -0
- kronos/specs/catalog/__init__.py +36 -0
- kronos/specs/catalog/_audit.py +39 -0
- kronos/specs/catalog/_common.py +43 -0
- kronos/specs/catalog/_content.py +59 -0
- kronos/specs/catalog/_enforcement.py +70 -0
- kronos/specs/factory.py +120 -0
- kronos/specs/operable.py +314 -0
- kronos/specs/phrase.py +405 -0
- kronos/specs/protocol.py +140 -0
- kronos/specs/spec.py +506 -0
- kronos/types/__init__.py +60 -0
- kronos/types/_sentinel.py +311 -0
- kronos/types/base.py +369 -0
- kronos/types/db_types.py +260 -0
- kronos/types/identity.py +66 -0
- kronos/utils/__init__.py +40 -0
- kronos/utils/_hash.py +234 -0
- kronos/utils/_json_dump.py +392 -0
- kronos/utils/_lazy_init.py +63 -0
- kronos/utils/_to_list.py +165 -0
- kronos/utils/_to_num.py +85 -0
- kronos/utils/_utils.py +375 -0
- kronos/utils/concurrency/__init__.py +205 -0
- kronos/utils/concurrency/_async_call.py +333 -0
- kronos/utils/concurrency/_cancel.py +122 -0
- kronos/utils/concurrency/_errors.py +96 -0
- kronos/utils/concurrency/_patterns.py +363 -0
- kronos/utils/concurrency/_primitives.py +328 -0
- kronos/utils/concurrency/_priority_queue.py +135 -0
- kronos/utils/concurrency/_resource_tracker.py +110 -0
- kronos/utils/concurrency/_run_async.py +67 -0
- kronos/utils/concurrency/_task.py +95 -0
- kronos/utils/concurrency/_utils.py +79 -0
- kronos/utils/fuzzy/__init__.py +14 -0
- kronos/utils/fuzzy/_extract_json.py +90 -0
- kronos/utils/fuzzy/_fuzzy_json.py +288 -0
- kronos/utils/fuzzy/_fuzzy_match.py +149 -0
- kronos/utils/fuzzy/_string_similarity.py +187 -0
- kronos/utils/fuzzy/_to_dict.py +396 -0
- kronos/utils/sql/__init__.py +13 -0
- kronos/utils/sql/_sql_validation.py +142 -0
- krons-0.1.0.dist-info/METADATA +70 -0
- krons-0.1.0.dist-info/RECORD +101 -0
- krons-0.1.0.dist-info/WHEEL +4 -0
- krons-0.1.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""DataClass SpecAdapter: Spec <-> dataclass fields/Params/DataClass.
|
|
5
|
+
|
|
6
|
+
Supports bidirectional transformation for Python dataclasses:
|
|
7
|
+
- Frozen (Params): Immutable, slots=True, init=False
|
|
8
|
+
- Mutable (DataClass): Standard dataclass with slots=True
|
|
9
|
+
- Field validators via __field_validators__ mechanism
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
from dataclasses import MISSING as DATACLASS_MISSING
|
|
15
|
+
from dataclasses import dataclass
|
|
16
|
+
from dataclasses import field as dc_field
|
|
17
|
+
from dataclasses import fields
|
|
18
|
+
from typing import TYPE_CHECKING, Any
|
|
19
|
+
|
|
20
|
+
from kronos.types._sentinel import Unset, UnsetType, is_sentinel
|
|
21
|
+
|
|
22
|
+
from ..protocol import SpecAdapter
|
|
23
|
+
from ..spec import Spec
|
|
24
|
+
from ._utils import resolve_annotation_to_base_types
|
|
25
|
+
|
|
26
|
+
if TYPE_CHECKING:
|
|
27
|
+
from kronos.types.base import DataClass, ModelConfig, Params
|
|
28
|
+
|
|
29
|
+
from ..operable import Operable
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
__all__ = ("DataClassSpecAdapter",)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _field_to_spec(field_name: str, field_obj: Any, annotation: Any) -> Spec:
|
|
36
|
+
"""Convert dataclass field to Spec, preserving defaults and type modifiers."""
|
|
37
|
+
|
|
38
|
+
base_metas = resolve_annotation_to_base_types(annotation)
|
|
39
|
+
spec = Spec(name=field_name, **base_metas)
|
|
40
|
+
|
|
41
|
+
# Handle defaults
|
|
42
|
+
if field_obj.default is not DATACLASS_MISSING:
|
|
43
|
+
spec = spec.with_default(field_obj.default)
|
|
44
|
+
elif field_obj.default_factory is not DATACLASS_MISSING:
|
|
45
|
+
spec = spec.with_default(field_obj.default_factory)
|
|
46
|
+
|
|
47
|
+
return spec
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _make_validator_method(validators: dict[str, list[Any]], is_frozen: bool) -> Any:
|
|
51
|
+
"""Create _validate method that runs field validators. Uses object.__setattr__ for frozen classes."""
|
|
52
|
+
|
|
53
|
+
def _validate_with_field_validators(self) -> None:
|
|
54
|
+
"""Validate instance including field validators."""
|
|
55
|
+
super(type(self), self)._validate()
|
|
56
|
+
|
|
57
|
+
field_validators = getattr(type(self), "__field_validators__", {})
|
|
58
|
+
errors: list[Exception] = []
|
|
59
|
+
|
|
60
|
+
for fname, validator_list in field_validators.items():
|
|
61
|
+
value = getattr(self, fname, None)
|
|
62
|
+
for validator in validator_list:
|
|
63
|
+
try:
|
|
64
|
+
result = validator(value)
|
|
65
|
+
if result is not None and result is not value:
|
|
66
|
+
if is_frozen:
|
|
67
|
+
object.__setattr__(self, fname, result)
|
|
68
|
+
else:
|
|
69
|
+
setattr(self, fname, result)
|
|
70
|
+
except Exception as e:
|
|
71
|
+
errors.append(ValueError(f"Validation failed for '{fname}': {e}"))
|
|
72
|
+
|
|
73
|
+
if errors:
|
|
74
|
+
raise ExceptionGroup(f"Field validation failed for {type(self).__name__}", errors)
|
|
75
|
+
|
|
76
|
+
return _validate_with_field_validators
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class DataClassSpecAdapter(SpecAdapter[dict[str, Any]]):
|
|
80
|
+
"""DataClass/Params adapter: Spec → dataclass fields, Operable → DataClass/Params.
|
|
81
|
+
|
|
82
|
+
Supports both frozen (Params) and mutable (DataClass) targets with
|
|
83
|
+
optional field validators.
|
|
84
|
+
|
|
85
|
+
Usage:
|
|
86
|
+
op = Operable([
|
|
87
|
+
Spec(str, name="name", validator=validate_name),
|
|
88
|
+
Spec(int, name="age", default=0),
|
|
89
|
+
])
|
|
90
|
+
Person = DataClassSpecAdapter.compose_structure(op, "Person", frozen=True)
|
|
91
|
+
p = Person(name="Alice", age=30)
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
@classmethod
|
|
95
|
+
def create_field(cls, spec: Spec) -> dict[str, Any]:
|
|
96
|
+
"""Convert Spec to dataclass field kwargs (default or default_factory)."""
|
|
97
|
+
field_kwargs: dict[str, Any] = {}
|
|
98
|
+
|
|
99
|
+
default_val = spec.get("default")
|
|
100
|
+
default_factory = spec.get("default_factory")
|
|
101
|
+
|
|
102
|
+
if not is_sentinel(default_factory, {"none"}):
|
|
103
|
+
field_kwargs["default_factory"] = default_factory
|
|
104
|
+
elif not is_sentinel(default_val, {"none"}):
|
|
105
|
+
field_kwargs["default"] = default_val
|
|
106
|
+
elif spec.is_nullable:
|
|
107
|
+
field_kwargs["default"] = None
|
|
108
|
+
|
|
109
|
+
return field_kwargs
|
|
110
|
+
|
|
111
|
+
@classmethod
|
|
112
|
+
def create_field_validator(cls, spec: Spec) -> dict[str, list[Any]] | None:
|
|
113
|
+
"""Extract validators from Spec. Returns {field_name: [validators]} or None."""
|
|
114
|
+
validator = spec.get("validator")
|
|
115
|
+
if is_sentinel(validator):
|
|
116
|
+
return None
|
|
117
|
+
|
|
118
|
+
field_name = spec.name or "field"
|
|
119
|
+
validators = validator if isinstance(validator, list) else [validator]
|
|
120
|
+
|
|
121
|
+
return {field_name: validators}
|
|
122
|
+
|
|
123
|
+
@classmethod
|
|
124
|
+
def compose_structure(
|
|
125
|
+
cls,
|
|
126
|
+
op: Operable,
|
|
127
|
+
name: str,
|
|
128
|
+
/,
|
|
129
|
+
*,
|
|
130
|
+
include: set[str] | UnsetType = Unset,
|
|
131
|
+
exclude: set[str] | UnsetType = Unset,
|
|
132
|
+
frozen: bool = True,
|
|
133
|
+
base_type: type | None = None,
|
|
134
|
+
doc: str | None = None,
|
|
135
|
+
model_config: ModelConfig | None = None,
|
|
136
|
+
**kwargs: Any,
|
|
137
|
+
) -> type[Params] | type[DataClass]:
|
|
138
|
+
"""Generate DataClass/Params subclass from Operable.
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
op: Operable containing Specs
|
|
142
|
+
name: Class name
|
|
143
|
+
include/exclude: Field name filters
|
|
144
|
+
frozen: True=Params (immutable), False=DataClass (mutable)
|
|
145
|
+
base_type: Custom base class (Params/DataClass subclass)
|
|
146
|
+
doc: Optional docstring
|
|
147
|
+
model_config: ModelConfig instance for sentinel/validation behavior
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
Dynamically created dataclass with validators wired in
|
|
151
|
+
"""
|
|
152
|
+
from kronos.types.base import DataClass, Params
|
|
153
|
+
|
|
154
|
+
use_specs = op.get_specs(include=include, exclude=exclude)
|
|
155
|
+
|
|
156
|
+
base = base_type if base_type is not None else (Params if frozen else DataClass)
|
|
157
|
+
|
|
158
|
+
annotations: dict[str, type] = {}
|
|
159
|
+
class_attrs: dict[str, Any] = {}
|
|
160
|
+
validators: dict[str, list[Any]] = {}
|
|
161
|
+
|
|
162
|
+
required_specs = []
|
|
163
|
+
optional_specs = []
|
|
164
|
+
|
|
165
|
+
for spec in use_specs:
|
|
166
|
+
if not spec.name:
|
|
167
|
+
continue
|
|
168
|
+
field_kwargs = cls.create_field(spec)
|
|
169
|
+
if "default" in field_kwargs or "default_factory" in field_kwargs:
|
|
170
|
+
optional_specs.append((spec, field_kwargs))
|
|
171
|
+
else:
|
|
172
|
+
required_specs.append((spec, field_kwargs))
|
|
173
|
+
|
|
174
|
+
for spec, field_kwargs in required_specs + optional_specs:
|
|
175
|
+
field_name = spec.name
|
|
176
|
+
annotations[field_name] = spec.annotation
|
|
177
|
+
|
|
178
|
+
if "default_factory" in field_kwargs:
|
|
179
|
+
class_attrs[field_name] = dc_field(default_factory=field_kwargs["default_factory"])
|
|
180
|
+
elif "default" in field_kwargs:
|
|
181
|
+
class_attrs[field_name] = field_kwargs["default"]
|
|
182
|
+
|
|
183
|
+
if v := cls.create_field_validator(spec):
|
|
184
|
+
validators.update(v)
|
|
185
|
+
|
|
186
|
+
class_dict: dict[str, Any] = {
|
|
187
|
+
"__annotations__": annotations,
|
|
188
|
+
"__module__": op.name or "__dynamic__",
|
|
189
|
+
**class_attrs,
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
if doc:
|
|
193
|
+
class_dict["__doc__"] = doc
|
|
194
|
+
|
|
195
|
+
if model_config is not None:
|
|
196
|
+
class_dict["_config"] = model_config
|
|
197
|
+
|
|
198
|
+
if validators:
|
|
199
|
+
class_dict["__field_validators__"] = validators
|
|
200
|
+
class_dict["_validate"] = _make_validator_method(validators, frozen)
|
|
201
|
+
|
|
202
|
+
new_cls = type(name, (base,), class_dict)
|
|
203
|
+
|
|
204
|
+
if frozen:
|
|
205
|
+
new_cls = dataclass(frozen=True, slots=True, init=False)(new_cls)
|
|
206
|
+
else:
|
|
207
|
+
new_cls = dataclass(slots=True)(new_cls)
|
|
208
|
+
|
|
209
|
+
return new_cls
|
|
210
|
+
|
|
211
|
+
@classmethod
|
|
212
|
+
def validate_instance(
|
|
213
|
+
cls, structure: type[Params] | type[DataClass], data: dict, /
|
|
214
|
+
) -> Params | DataClass:
|
|
215
|
+
"""Create DataClass/Params instance from dict data."""
|
|
216
|
+
return structure(**data)
|
|
217
|
+
|
|
218
|
+
@classmethod
|
|
219
|
+
def dump_instance(cls, instance: Params | DataClass) -> dict[str, Any]:
|
|
220
|
+
"""Dump DataClass/Params instance to dict via to_dict()."""
|
|
221
|
+
return instance.to_dict()
|
|
222
|
+
|
|
223
|
+
@classmethod
|
|
224
|
+
def extract_specs(cls, structure: type[Params] | type[DataClass]) -> tuple[Spec, ...]:
|
|
225
|
+
"""Extract Specs from DataClass/Params, preserving defaults and type modifiers.
|
|
226
|
+
|
|
227
|
+
Raises:
|
|
228
|
+
TypeError: If structure is not a DataClass or Params subclass
|
|
229
|
+
"""
|
|
230
|
+
from kronos.types.base import DataClass, Params
|
|
231
|
+
|
|
232
|
+
if not isinstance(structure, type) or not issubclass(structure, (DataClass, Params)):
|
|
233
|
+
raise TypeError(
|
|
234
|
+
f"structure must be a DataClass or Params subclass, got {type(structure)}"
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
specs: list[Spec] = []
|
|
238
|
+
for f in fields(structure):
|
|
239
|
+
if f.name.startswith("_"):
|
|
240
|
+
continue
|
|
241
|
+
|
|
242
|
+
annotation = structure.__annotations__.get(f.name, Any)
|
|
243
|
+
spec = _field_to_spec(f.name, f, annotation)
|
|
244
|
+
specs.append(spec)
|
|
245
|
+
|
|
246
|
+
return tuple(specs)
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import functools
|
|
4
|
+
from typing import TYPE_CHECKING, Literal
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from ..protocol import SpecAdapter
|
|
8
|
+
|
|
9
|
+
# Supported adapter types
|
|
10
|
+
AdapterType = Literal["pydantic", "sql", "dataclass"]
|
|
11
|
+
|
|
12
|
+
__all__ = (
|
|
13
|
+
"get_adapter",
|
|
14
|
+
"AdapterType",
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@functools.cache
|
|
19
|
+
def get_adapter(adapter_name: str) -> type[SpecAdapter]:
|
|
20
|
+
"""Get adapter class by name (cached).
|
|
21
|
+
|
|
22
|
+
Factory method for adapter classes. Caches to avoid repeated imports.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
adapter_name: Adapter identifier ("pydantic", "sql", "dataclass", future: "rust")
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
Adapter class (not instance)
|
|
29
|
+
|
|
30
|
+
Raises:
|
|
31
|
+
ValueError: If adapter not supported
|
|
32
|
+
ImportError: If adapter dependencies not installed
|
|
33
|
+
"""
|
|
34
|
+
match adapter_name:
|
|
35
|
+
case "pydantic":
|
|
36
|
+
try:
|
|
37
|
+
from .pydantic_adapter import PydanticSpecAdapter
|
|
38
|
+
|
|
39
|
+
return PydanticSpecAdapter
|
|
40
|
+
except ImportError as e:
|
|
41
|
+
raise ImportError(
|
|
42
|
+
"PydanticSpecAdapter requires Pydantic. Install with: pip install pydantic"
|
|
43
|
+
) from e
|
|
44
|
+
case "sql":
|
|
45
|
+
from .sql_ddl import SQLSpecAdapter
|
|
46
|
+
|
|
47
|
+
return SQLSpecAdapter
|
|
48
|
+
case "dataclass":
|
|
49
|
+
from .dataclass_field import DataClassSpecAdapter
|
|
50
|
+
|
|
51
|
+
return DataClassSpecAdapter
|
|
52
|
+
# case "rust":
|
|
53
|
+
# from .spec_adapters.rust_field import RustSpecAdapter
|
|
54
|
+
# return RustSpecAdapter
|
|
55
|
+
case _:
|
|
56
|
+
raise ValueError(f"Unsupported adapter: {adapter_name}")
|
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Pydantic SpecAdapter: Spec <-> Pydantic FieldInfo/BaseModel.
|
|
5
|
+
|
|
6
|
+
Supports bidirectional transformation with:
|
|
7
|
+
- Field validators (field_validator decorator)
|
|
8
|
+
- Constraints (gt, ge, lt, le, min_length, max_length, pattern, etc.)
|
|
9
|
+
- Rich metadata (aliases, descriptions, examples, json_schema_extra)
|
|
10
|
+
- Type modifiers (nullable, listable)
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
import functools
|
|
16
|
+
from typing import TYPE_CHECKING, Annotated, Any
|
|
17
|
+
|
|
18
|
+
from pydantic import BaseModel, create_model, field_validator
|
|
19
|
+
from pydantic.fields import FieldInfo
|
|
20
|
+
from pydantic_core import PydanticUndefined
|
|
21
|
+
from pydantic_core._pydantic_core import PydanticUndefinedType
|
|
22
|
+
|
|
23
|
+
from kronos.specs.protocol import SpecAdapter
|
|
24
|
+
from kronos.specs.spec import Spec
|
|
25
|
+
from kronos.types._sentinel import Unset, UnsetType, is_sentinel, is_unset, not_sentinel
|
|
26
|
+
from kronos.types.db_types import FKMeta, VectorMeta
|
|
27
|
+
|
|
28
|
+
from ._utils import resolve_annotation_to_base_types
|
|
29
|
+
|
|
30
|
+
if TYPE_CHECKING:
|
|
31
|
+
from kronos.specs.operable import Operable
|
|
32
|
+
|
|
33
|
+
__all__ = ("PydanticSpecAdapter",)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# ---------------------------------------------------------------------------
|
|
37
|
+
# Constants
|
|
38
|
+
# ---------------------------------------------------------------------------
|
|
39
|
+
|
|
40
|
+
_FIELD_INFO_ATTRS = frozenset(
|
|
41
|
+
{
|
|
42
|
+
"alias",
|
|
43
|
+
"validation_alias",
|
|
44
|
+
"serialization_alias",
|
|
45
|
+
"title",
|
|
46
|
+
"description",
|
|
47
|
+
"examples",
|
|
48
|
+
"deprecated",
|
|
49
|
+
"frozen",
|
|
50
|
+
"json_schema_extra",
|
|
51
|
+
"discriminator",
|
|
52
|
+
"exclude",
|
|
53
|
+
"repr",
|
|
54
|
+
"init",
|
|
55
|
+
"init_var",
|
|
56
|
+
"kw_only",
|
|
57
|
+
"validate_default",
|
|
58
|
+
}
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
_CONSTRAINT_MAPPING = {
|
|
62
|
+
"Gt": "gt",
|
|
63
|
+
"Ge": "ge",
|
|
64
|
+
"Lt": "lt",
|
|
65
|
+
"Le": "le",
|
|
66
|
+
"MultipleOf": "multiple_of",
|
|
67
|
+
"MinLen": "min_length",
|
|
68
|
+
"MaxLen": "max_length",
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@functools.lru_cache(maxsize=1)
|
|
73
|
+
def _get_pydantic_field_params() -> set[str]:
|
|
74
|
+
"""Get valid Pydantic Field() parameter names (cached)."""
|
|
75
|
+
import inspect
|
|
76
|
+
|
|
77
|
+
from pydantic import Field as PydanticField
|
|
78
|
+
|
|
79
|
+
params = set(inspect.signature(PydanticField).parameters.keys())
|
|
80
|
+
params.discard("kwargs")
|
|
81
|
+
return params
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# ---------------------------------------------------------------------------
|
|
85
|
+
# FieldInfo -> Spec resolution (extract_specs direction)
|
|
86
|
+
# ---------------------------------------------------------------------------
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _is_valid_meta(v) -> bool:
|
|
90
|
+
return not_sentinel(v) and not isinstance(v, PydanticUndefinedType)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _ensure_annotation_from_field_info(fi: FieldInfo) -> Annotated | Any:
|
|
94
|
+
annotation = fi.annotation
|
|
95
|
+
if hasattr(fi, "metadata") and fi.metadata:
|
|
96
|
+
for meta in fi.metadata:
|
|
97
|
+
if isinstance(meta, (FKMeta, VectorMeta)):
|
|
98
|
+
annotation = Annotated[annotation, meta]
|
|
99
|
+
break
|
|
100
|
+
return annotation
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _resolve_defaults_from_field_info(
|
|
104
|
+
fi: FieldInfo,
|
|
105
|
+
) -> dict[str, Any]:
|
|
106
|
+
if getattr(fi, "default", PydanticUndefined) is not PydanticUndefined:
|
|
107
|
+
return {"default": fi.default}
|
|
108
|
+
if (
|
|
109
|
+
_df := getattr(fi, "default_factory", PydanticUndefined)
|
|
110
|
+
) is not PydanticUndefined and not_sentinel(_df, {"none", "empty"}):
|
|
111
|
+
return {"default_factory": _df}
|
|
112
|
+
return {}
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _resolve_constraint_metas(metadata: Any) -> dict[str, Any]:
|
|
116
|
+
all_wanted = set(_CONSTRAINT_MAPPING.keys()) | {
|
|
117
|
+
"Strict",
|
|
118
|
+
"_PydanticGeneralMetadata",
|
|
119
|
+
}
|
|
120
|
+
out = {}
|
|
121
|
+
|
|
122
|
+
for constraint in metadata:
|
|
123
|
+
con_type = type(constraint).__name__
|
|
124
|
+
if con_type not in all_wanted:
|
|
125
|
+
continue
|
|
126
|
+
|
|
127
|
+
_k, _v = Unset, Unset
|
|
128
|
+
match con_type:
|
|
129
|
+
case "Gt" | "Ge" | "Lt" | "Le" | "MultipleOf" | "MinLen" | "MaxLen":
|
|
130
|
+
_k = _CONSTRAINT_MAPPING[con_type]
|
|
131
|
+
_v = getattr(constraint, _k, Unset)
|
|
132
|
+
case "Strict":
|
|
133
|
+
_k, _v = "strict", getattr(constraint, "strict", True)
|
|
134
|
+
case "_PydanticGeneralMetadata":
|
|
135
|
+
pattern = getattr(constraint, "pattern", Unset)
|
|
136
|
+
if is_sentinel(pattern, {"none", "empty"}):
|
|
137
|
+
_k, _v = "pattern", pattern
|
|
138
|
+
strict = getattr(constraint, "strict", Unset)
|
|
139
|
+
if is_sentinel(strict, {"none", "empty"}):
|
|
140
|
+
_k, _v = "strict", strict
|
|
141
|
+
|
|
142
|
+
if not is_unset(_k) and _is_valid_meta(_v):
|
|
143
|
+
out[_k] = _v
|
|
144
|
+
|
|
145
|
+
return out
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def _create_spec_metas_from_field_info(field_name: str, field_info: FieldInfo) -> dict:
|
|
149
|
+
annotation = _ensure_annotation_from_field_info(field_info)
|
|
150
|
+
base_metas = resolve_annotation_to_base_types(annotation)
|
|
151
|
+
defaults = _resolve_defaults_from_field_info(field_info)
|
|
152
|
+
updates = {
|
|
153
|
+
attr: v
|
|
154
|
+
for attr in _FIELD_INFO_ATTRS
|
|
155
|
+
if _is_valid_meta(v := getattr(field_info, attr, Unset))
|
|
156
|
+
}
|
|
157
|
+
if not_sentinel(
|
|
158
|
+
(meta := getattr(field_info, "metadata", Unset)),
|
|
159
|
+
{"none", "empty"},
|
|
160
|
+
):
|
|
161
|
+
updates.update(_resolve_constraint_metas(meta))
|
|
162
|
+
|
|
163
|
+
return {"name": field_name, **defaults, **updates, **base_metas}
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
# ---------------------------------------------------------------------------
|
|
167
|
+
# PydanticSpecAdapter
|
|
168
|
+
# ---------------------------------------------------------------------------
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
class PydanticSpecAdapter(SpecAdapter[FieldInfo]):
|
|
172
|
+
@classmethod
|
|
173
|
+
def create_field(cls, spec: Spec) -> FieldInfo:
|
|
174
|
+
"""Convert Spec to Pydantic FieldInfo with annotation set."""
|
|
175
|
+
from pydantic import Field as PydanticField
|
|
176
|
+
|
|
177
|
+
pydantic_field_params = _get_pydantic_field_params()
|
|
178
|
+
field_kwargs: dict[str, Any] = {}
|
|
179
|
+
|
|
180
|
+
if not is_sentinel(spec.metadata, {"none"}):
|
|
181
|
+
for meta in spec.metadata:
|
|
182
|
+
if meta.key == "default":
|
|
183
|
+
if callable(meta.value):
|
|
184
|
+
field_kwargs["default_factory"] = meta.value
|
|
185
|
+
else:
|
|
186
|
+
field_kwargs["default"] = meta.value
|
|
187
|
+
elif meta.key == "validator":
|
|
188
|
+
continue
|
|
189
|
+
elif meta.key in pydantic_field_params:
|
|
190
|
+
if not_sentinel(meta.value):
|
|
191
|
+
field_kwargs[meta.key] = meta.value
|
|
192
|
+
elif meta.key in {"nullable", "listable"}:
|
|
193
|
+
pass
|
|
194
|
+
else:
|
|
195
|
+
if isinstance(meta.value, type):
|
|
196
|
+
continue
|
|
197
|
+
if "json_schema_extra" not in field_kwargs:
|
|
198
|
+
field_kwargs["json_schema_extra"] = {}
|
|
199
|
+
field_kwargs["json_schema_extra"][meta.key] = meta.value
|
|
200
|
+
|
|
201
|
+
# Nullable fields default to None unless explicitly required
|
|
202
|
+
is_required = any(m.key == "required" and m.value for m in spec.metadata)
|
|
203
|
+
if (
|
|
204
|
+
spec.is_nullable
|
|
205
|
+
and "default" not in field_kwargs
|
|
206
|
+
and "default_factory" not in field_kwargs
|
|
207
|
+
and not is_required
|
|
208
|
+
):
|
|
209
|
+
field_kwargs["default"] = None
|
|
210
|
+
|
|
211
|
+
field_info = PydanticField(**field_kwargs)
|
|
212
|
+
field_info.annotation = spec.annotation
|
|
213
|
+
|
|
214
|
+
return field_info
|
|
215
|
+
|
|
216
|
+
@classmethod
|
|
217
|
+
def create_field_validator(cls, spec: Spec) -> dict[str, Any] | None:
|
|
218
|
+
"""Create Pydantic field_validator from Spec metadata. Returns None if no validator."""
|
|
219
|
+
v = spec.get("validator")
|
|
220
|
+
if is_sentinel(v):
|
|
221
|
+
return None
|
|
222
|
+
_func = field_validator(spec.name, check_fields=False)(v)
|
|
223
|
+
return {f"_{spec.name}_validator": _func}
|
|
224
|
+
|
|
225
|
+
@classmethod
|
|
226
|
+
def compose_structure(
|
|
227
|
+
cls,
|
|
228
|
+
op: Operable,
|
|
229
|
+
name: str,
|
|
230
|
+
/,
|
|
231
|
+
*,
|
|
232
|
+
include: set[str] | UnsetType = Unset,
|
|
233
|
+
exclude: set[str] | UnsetType = Unset,
|
|
234
|
+
base_type: type[BaseModel] | UnsetType = Unset,
|
|
235
|
+
doc: str | UnsetType = Unset,
|
|
236
|
+
) -> type[BaseModel]:
|
|
237
|
+
"""Generate Pydantic BaseModel subclass from Operable.
|
|
238
|
+
|
|
239
|
+
Args:
|
|
240
|
+
op: Operable containing Specs
|
|
241
|
+
name: Model class name
|
|
242
|
+
include/exclude: Field name filters
|
|
243
|
+
base_type: Base class for the model
|
|
244
|
+
doc: Docstring for the model
|
|
245
|
+
|
|
246
|
+
Returns:
|
|
247
|
+
Dynamically created BaseModel subclass with validators
|
|
248
|
+
"""
|
|
249
|
+
use_specs = op.get_specs(include=include, exclude=exclude)
|
|
250
|
+
use_fields = {i.name: cls.create_field(i) for i in use_specs if i.name}
|
|
251
|
+
|
|
252
|
+
field_definitions = {
|
|
253
|
+
field_name: (field_info.annotation, field_info)
|
|
254
|
+
for field_name, field_info in use_fields.items()
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
validators = {}
|
|
258
|
+
for spec in use_specs:
|
|
259
|
+
if spec.name and (validator := cls.create_field_validator(spec)):
|
|
260
|
+
validators.update(validator)
|
|
261
|
+
|
|
262
|
+
if validators:
|
|
263
|
+
base_with_validators = type(
|
|
264
|
+
f"{name}Base",
|
|
265
|
+
(base_type or BaseModel,),
|
|
266
|
+
validators,
|
|
267
|
+
)
|
|
268
|
+
actual_base = base_with_validators
|
|
269
|
+
else:
|
|
270
|
+
actual_base = base_type or BaseModel
|
|
271
|
+
|
|
272
|
+
model_cls: type[BaseModel] = create_model(
|
|
273
|
+
name,
|
|
274
|
+
__base__=actual_base,
|
|
275
|
+
__doc__=doc,
|
|
276
|
+
**field_definitions,
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
model_cls.model_rebuild()
|
|
280
|
+
return model_cls
|
|
281
|
+
|
|
282
|
+
@classmethod
|
|
283
|
+
def validate_instance(cls, structure: type[BaseModel], data: dict, /) -> BaseModel:
|
|
284
|
+
"""Validate dict into BaseModel instance via model_validate."""
|
|
285
|
+
return structure.model_validate(data)
|
|
286
|
+
|
|
287
|
+
@classmethod
|
|
288
|
+
def dump_instance(cls, instance: BaseModel) -> dict[str, Any]:
|
|
289
|
+
"""Dump BaseModel instance to dict via model_dump."""
|
|
290
|
+
return instance.model_dump()
|
|
291
|
+
|
|
292
|
+
@classmethod
|
|
293
|
+
def extract_specs(cls, structure: type[BaseModel]) -> tuple[Spec, ...]:
|
|
294
|
+
"""Extract Specs from Pydantic model, preserving constraints and metadata.
|
|
295
|
+
|
|
296
|
+
Raises:
|
|
297
|
+
TypeError: If structure is not a BaseModel subclass
|
|
298
|
+
"""
|
|
299
|
+
if not isinstance(structure, type) or not issubclass(structure, BaseModel):
|
|
300
|
+
raise TypeError(
|
|
301
|
+
f"structure must be a Pydantic BaseModel subclass, got {type(structure)}"
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
specs: list[Spec] = []
|
|
305
|
+
for field_name, field_info in structure.model_fields.items():
|
|
306
|
+
metas = _create_spec_metas_from_field_info(field_name, field_info)
|
|
307
|
+
specs.append(Spec(**metas))
|
|
308
|
+
|
|
309
|
+
return tuple(specs)
|