ai-lib-python 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ai_lib_python/__init__.py +43 -0
- ai_lib_python/batch/__init__.py +15 -0
- ai_lib_python/batch/collector.py +244 -0
- ai_lib_python/batch/executor.py +224 -0
- ai_lib_python/cache/__init__.py +26 -0
- ai_lib_python/cache/backends.py +380 -0
- ai_lib_python/cache/key.py +237 -0
- ai_lib_python/cache/manager.py +332 -0
- ai_lib_python/client/__init__.py +37 -0
- ai_lib_python/client/builder.py +528 -0
- ai_lib_python/client/cancel.py +368 -0
- ai_lib_python/client/core.py +433 -0
- ai_lib_python/client/response.py +134 -0
- ai_lib_python/embeddings/__init__.py +36 -0
- ai_lib_python/embeddings/client.py +339 -0
- ai_lib_python/embeddings/types.py +234 -0
- ai_lib_python/embeddings/vectors.py +246 -0
- ai_lib_python/errors/__init__.py +41 -0
- ai_lib_python/errors/base.py +316 -0
- ai_lib_python/errors/classification.py +210 -0
- ai_lib_python/guardrails/__init__.py +35 -0
- ai_lib_python/guardrails/base.py +336 -0
- ai_lib_python/guardrails/filters.py +583 -0
- ai_lib_python/guardrails/validators.py +475 -0
- ai_lib_python/pipeline/__init__.py +55 -0
- ai_lib_python/pipeline/accumulate.py +248 -0
- ai_lib_python/pipeline/base.py +240 -0
- ai_lib_python/pipeline/decode.py +281 -0
- ai_lib_python/pipeline/event_map.py +506 -0
- ai_lib_python/pipeline/fan_out.py +284 -0
- ai_lib_python/pipeline/select.py +297 -0
- ai_lib_python/plugins/__init__.py +32 -0
- ai_lib_python/plugins/base.py +294 -0
- ai_lib_python/plugins/hooks.py +296 -0
- ai_lib_python/plugins/middleware.py +285 -0
- ai_lib_python/plugins/registry.py +294 -0
- ai_lib_python/protocol/__init__.py +71 -0
- ai_lib_python/protocol/loader.py +317 -0
- ai_lib_python/protocol/manifest.py +385 -0
- ai_lib_python/protocol/validator.py +460 -0
- ai_lib_python/py.typed +1 -0
- ai_lib_python/resilience/__init__.py +102 -0
- ai_lib_python/resilience/backpressure.py +225 -0
- ai_lib_python/resilience/circuit_breaker.py +318 -0
- ai_lib_python/resilience/executor.py +343 -0
- ai_lib_python/resilience/fallback.py +341 -0
- ai_lib_python/resilience/preflight.py +413 -0
- ai_lib_python/resilience/rate_limiter.py +291 -0
- ai_lib_python/resilience/retry.py +299 -0
- ai_lib_python/resilience/signals.py +283 -0
- ai_lib_python/routing/__init__.py +118 -0
- ai_lib_python/routing/manager.py +593 -0
- ai_lib_python/routing/strategy.py +345 -0
- ai_lib_python/routing/types.py +397 -0
- ai_lib_python/structured/__init__.py +33 -0
- ai_lib_python/structured/json_mode.py +281 -0
- ai_lib_python/structured/schema.py +316 -0
- ai_lib_python/structured/validator.py +334 -0
- ai_lib_python/telemetry/__init__.py +127 -0
- ai_lib_python/telemetry/exporters/__init__.py +9 -0
- ai_lib_python/telemetry/exporters/prometheus.py +111 -0
- ai_lib_python/telemetry/feedback.py +446 -0
- ai_lib_python/telemetry/health.py +409 -0
- ai_lib_python/telemetry/logger.py +389 -0
- ai_lib_python/telemetry/metrics.py +496 -0
- ai_lib_python/telemetry/tracer.py +473 -0
- ai_lib_python/tokens/__init__.py +25 -0
- ai_lib_python/tokens/counter.py +282 -0
- ai_lib_python/tokens/estimator.py +286 -0
- ai_lib_python/transport/__init__.py +34 -0
- ai_lib_python/transport/auth.py +141 -0
- ai_lib_python/transport/http.py +364 -0
- ai_lib_python/transport/pool.py +425 -0
- ai_lib_python/types/__init__.py +41 -0
- ai_lib_python/types/events.py +343 -0
- ai_lib_python/types/message.py +332 -0
- ai_lib_python/types/tool.py +191 -0
- ai_lib_python/utils/__init__.py +21 -0
- ai_lib_python/utils/tool_call_assembler.py +317 -0
- ai_lib_python-0.5.0.dist-info/METADATA +837 -0
- ai_lib_python-0.5.0.dist-info/RECORD +84 -0
- ai_lib_python-0.5.0.dist-info/WHEEL +4 -0
- ai_lib_python-0.5.0.dist-info/licenses/LICENSE-APACHE +201 -0
- ai_lib_python-0.5.0.dist-info/licenses/LICENSE-MIT +21 -0
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
"""
|
|
2
|
+
JSON Schema generation utilities.
|
|
3
|
+
|
|
4
|
+
Provides schema generation from Python types and Pydantic models.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
from typing import Any, get_args, get_origin
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def json_schema_from_type(python_type: type) -> dict[str, Any]:
|
|
14
|
+
"""Generate JSON schema from a Python type.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
python_type: Python type to convert
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
JSON schema dictionary
|
|
21
|
+
|
|
22
|
+
Example:
|
|
23
|
+
>>> schema = json_schema_from_type(str)
|
|
24
|
+
>>> print(schema)
|
|
25
|
+
{"type": "string"}
|
|
26
|
+
|
|
27
|
+
>>> from typing import List
|
|
28
|
+
>>> schema = json_schema_from_type(List[int])
|
|
29
|
+
>>> print(schema)
|
|
30
|
+
{"type": "array", "items": {"type": "integer"}}
|
|
31
|
+
"""
|
|
32
|
+
# Handle None
|
|
33
|
+
if python_type is type(None):
|
|
34
|
+
return {"type": "null"}
|
|
35
|
+
|
|
36
|
+
# Handle basic types
|
|
37
|
+
type_mapping = {
|
|
38
|
+
str: {"type": "string"},
|
|
39
|
+
int: {"type": "integer"},
|
|
40
|
+
float: {"type": "number"},
|
|
41
|
+
bool: {"type": "boolean"},
|
|
42
|
+
bytes: {"type": "string", "format": "byte"},
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if python_type in type_mapping:
|
|
46
|
+
return type_mapping[python_type]
|
|
47
|
+
|
|
48
|
+
# Handle generic types
|
|
49
|
+
origin = get_origin(python_type)
|
|
50
|
+
args = get_args(python_type)
|
|
51
|
+
|
|
52
|
+
# Handle list/List
|
|
53
|
+
if origin is list:
|
|
54
|
+
if args:
|
|
55
|
+
return {
|
|
56
|
+
"type": "array",
|
|
57
|
+
"items": json_schema_from_type(args[0]),
|
|
58
|
+
}
|
|
59
|
+
return {"type": "array"}
|
|
60
|
+
|
|
61
|
+
# Handle dict/Dict
|
|
62
|
+
if origin is dict:
|
|
63
|
+
schema: dict[str, Any] = {"type": "object"}
|
|
64
|
+
if len(args) >= 2:
|
|
65
|
+
schema["additionalProperties"] = json_schema_from_type(args[1])
|
|
66
|
+
return schema
|
|
67
|
+
|
|
68
|
+
# Handle tuple/Tuple
|
|
69
|
+
if origin is tuple:
|
|
70
|
+
if args:
|
|
71
|
+
return {
|
|
72
|
+
"type": "array",
|
|
73
|
+
"items": [json_schema_from_type(arg) for arg in args],
|
|
74
|
+
"minItems": len(args),
|
|
75
|
+
"maxItems": len(args),
|
|
76
|
+
}
|
|
77
|
+
return {"type": "array"}
|
|
78
|
+
|
|
79
|
+
# Handle Union types (including | syntax)
|
|
80
|
+
if origin is type(int | str): # Python 3.10+ union
|
|
81
|
+
schemas = [json_schema_from_type(arg) for arg in args]
|
|
82
|
+
# Check if it's Optional (Union with None)
|
|
83
|
+
none_schemas = [s for s in schemas if s.get("type") == "null"]
|
|
84
|
+
other_schemas = [s for s in schemas if s.get("type") != "null"]
|
|
85
|
+
|
|
86
|
+
if none_schemas and len(other_schemas) == 1:
|
|
87
|
+
# Optional type
|
|
88
|
+
return {**other_schemas[0], "nullable": True}
|
|
89
|
+
return {"anyOf": schemas}
|
|
90
|
+
|
|
91
|
+
# Handle Any
|
|
92
|
+
if python_type is Any:
|
|
93
|
+
return {}
|
|
94
|
+
|
|
95
|
+
# Try to handle as Pydantic model
|
|
96
|
+
if hasattr(python_type, "model_json_schema"):
|
|
97
|
+
return json_schema_from_pydantic(python_type)
|
|
98
|
+
|
|
99
|
+
# Default to object
|
|
100
|
+
return {"type": "object"}
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def json_schema_from_pydantic(model: type) -> dict[str, Any]:
|
|
104
|
+
"""Generate JSON schema from a Pydantic model.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
model: Pydantic model class
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
JSON schema dictionary
|
|
111
|
+
|
|
112
|
+
Raises:
|
|
113
|
+
ValueError: If model is not a Pydantic model
|
|
114
|
+
|
|
115
|
+
Example:
|
|
116
|
+
>>> from pydantic import BaseModel
|
|
117
|
+
>>> class User(BaseModel):
|
|
118
|
+
... name: str
|
|
119
|
+
... age: int
|
|
120
|
+
>>> schema = json_schema_from_pydantic(User)
|
|
121
|
+
"""
|
|
122
|
+
if not hasattr(model, "model_json_schema"):
|
|
123
|
+
raise ValueError(f"{model} is not a Pydantic model")
|
|
124
|
+
|
|
125
|
+
return model.model_json_schema()
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
class SchemaGenerator:
|
|
129
|
+
"""Generator for JSON schemas with customization options.
|
|
130
|
+
|
|
131
|
+
Example:
|
|
132
|
+
>>> generator = SchemaGenerator()
|
|
133
|
+
>>> generator.add_property("name", str, description="User's name")
|
|
134
|
+
>>> generator.add_property("age", int, minimum=0)
|
|
135
|
+
>>> schema = generator.build()
|
|
136
|
+
"""
|
|
137
|
+
|
|
138
|
+
def __init__(
|
|
139
|
+
self,
|
|
140
|
+
title: str | None = None,
|
|
141
|
+
description: str | None = None,
|
|
142
|
+
) -> None:
|
|
143
|
+
"""Initialize schema generator.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
title: Schema title
|
|
147
|
+
description: Schema description
|
|
148
|
+
"""
|
|
149
|
+
self._title = title
|
|
150
|
+
self._description = description
|
|
151
|
+
self._properties: dict[str, dict[str, Any]] = {}
|
|
152
|
+
self._required: list[str] = []
|
|
153
|
+
self._additional_properties: bool | dict[str, Any] = False
|
|
154
|
+
|
|
155
|
+
def add_property(
|
|
156
|
+
self,
|
|
157
|
+
name: str,
|
|
158
|
+
python_type: type,
|
|
159
|
+
*,
|
|
160
|
+
description: str | None = None,
|
|
161
|
+
required: bool = True,
|
|
162
|
+
default: Any = None,
|
|
163
|
+
enum: list[Any] | None = None,
|
|
164
|
+
minimum: float | None = None,
|
|
165
|
+
maximum: float | None = None,
|
|
166
|
+
min_length: int | None = None,
|
|
167
|
+
max_length: int | None = None,
|
|
168
|
+
pattern: str | None = None,
|
|
169
|
+
) -> SchemaGenerator:
|
|
170
|
+
"""Add a property to the schema.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
name: Property name
|
|
174
|
+
python_type: Property type
|
|
175
|
+
description: Property description
|
|
176
|
+
required: Whether property is required
|
|
177
|
+
default: Default value
|
|
178
|
+
enum: Allowed values
|
|
179
|
+
minimum: Minimum value (for numbers)
|
|
180
|
+
maximum: Maximum value (for numbers)
|
|
181
|
+
min_length: Minimum length (for strings)
|
|
182
|
+
max_length: Maximum length (for strings)
|
|
183
|
+
pattern: Regex pattern (for strings)
|
|
184
|
+
|
|
185
|
+
Returns:
|
|
186
|
+
Self for chaining
|
|
187
|
+
"""
|
|
188
|
+
prop_schema = json_schema_from_type(python_type)
|
|
189
|
+
|
|
190
|
+
if description:
|
|
191
|
+
prop_schema["description"] = description
|
|
192
|
+
if default is not None:
|
|
193
|
+
prop_schema["default"] = default
|
|
194
|
+
if enum:
|
|
195
|
+
prop_schema["enum"] = enum
|
|
196
|
+
if minimum is not None:
|
|
197
|
+
prop_schema["minimum"] = minimum
|
|
198
|
+
if maximum is not None:
|
|
199
|
+
prop_schema["maximum"] = maximum
|
|
200
|
+
if min_length is not None:
|
|
201
|
+
prop_schema["minLength"] = min_length
|
|
202
|
+
if max_length is not None:
|
|
203
|
+
prop_schema["maxLength"] = max_length
|
|
204
|
+
if pattern:
|
|
205
|
+
prop_schema["pattern"] = pattern
|
|
206
|
+
|
|
207
|
+
self._properties[name] = prop_schema
|
|
208
|
+
|
|
209
|
+
if required:
|
|
210
|
+
self._required.append(name)
|
|
211
|
+
|
|
212
|
+
return self
|
|
213
|
+
|
|
214
|
+
def add_object_property(
|
|
215
|
+
self,
|
|
216
|
+
name: str,
|
|
217
|
+
nested_schema: dict[str, Any],
|
|
218
|
+
*,
|
|
219
|
+
description: str | None = None,
|
|
220
|
+
required: bool = True,
|
|
221
|
+
) -> SchemaGenerator:
|
|
222
|
+
"""Add a nested object property.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
name: Property name
|
|
226
|
+
nested_schema: Nested JSON schema
|
|
227
|
+
description: Property description
|
|
228
|
+
required: Whether property is required
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
Self for chaining
|
|
232
|
+
"""
|
|
233
|
+
prop_schema = nested_schema.copy()
|
|
234
|
+
if description:
|
|
235
|
+
prop_schema["description"] = description
|
|
236
|
+
|
|
237
|
+
self._properties[name] = prop_schema
|
|
238
|
+
|
|
239
|
+
if required:
|
|
240
|
+
self._required.append(name)
|
|
241
|
+
|
|
242
|
+
return self
|
|
243
|
+
|
|
244
|
+
def allow_additional_properties(
|
|
245
|
+
self, allowed: bool | type = True
|
|
246
|
+
) -> SchemaGenerator:
|
|
247
|
+
"""Configure additional properties.
|
|
248
|
+
|
|
249
|
+
Args:
|
|
250
|
+
allowed: True to allow any, False to disallow, or type to restrict
|
|
251
|
+
|
|
252
|
+
Returns:
|
|
253
|
+
Self for chaining
|
|
254
|
+
"""
|
|
255
|
+
if isinstance(allowed, bool):
|
|
256
|
+
self._additional_properties = allowed
|
|
257
|
+
else:
|
|
258
|
+
self._additional_properties = json_schema_from_type(allowed)
|
|
259
|
+
return self
|
|
260
|
+
|
|
261
|
+
def build(self) -> dict[str, Any]:
|
|
262
|
+
"""Build the JSON schema.
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
JSON schema dictionary
|
|
266
|
+
"""
|
|
267
|
+
schema: dict[str, Any] = {
|
|
268
|
+
"type": "object",
|
|
269
|
+
"properties": self._properties,
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
if self._title:
|
|
273
|
+
schema["title"] = self._title
|
|
274
|
+
if self._description:
|
|
275
|
+
schema["description"] = self._description
|
|
276
|
+
if self._required:
|
|
277
|
+
schema["required"] = self._required
|
|
278
|
+
if self._additional_properties is not True:
|
|
279
|
+
schema["additionalProperties"] = self._additional_properties
|
|
280
|
+
|
|
281
|
+
return schema
|
|
282
|
+
|
|
283
|
+
def to_json(self, indent: int = 2) -> str:
|
|
284
|
+
"""Convert schema to JSON string.
|
|
285
|
+
|
|
286
|
+
Args:
|
|
287
|
+
indent: JSON indentation
|
|
288
|
+
|
|
289
|
+
Returns:
|
|
290
|
+
JSON string
|
|
291
|
+
"""
|
|
292
|
+
return json.dumps(self.build(), indent=indent)
|
|
293
|
+
|
|
294
|
+
@classmethod
|
|
295
|
+
def from_pydantic(cls, model: type) -> SchemaGenerator:
|
|
296
|
+
"""Create generator from Pydantic model.
|
|
297
|
+
|
|
298
|
+
Args:
|
|
299
|
+
model: Pydantic model class
|
|
300
|
+
|
|
301
|
+
Returns:
|
|
302
|
+
SchemaGenerator instance
|
|
303
|
+
"""
|
|
304
|
+
generator = cls(
|
|
305
|
+
title=getattr(model, "__name__", None),
|
|
306
|
+
description=getattr(model, "__doc__", None),
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
# Get schema from Pydantic
|
|
310
|
+
schema = json_schema_from_pydantic(model)
|
|
311
|
+
|
|
312
|
+
# Copy properties
|
|
313
|
+
generator._properties = schema.get("properties", {})
|
|
314
|
+
generator._required = schema.get("required", [])
|
|
315
|
+
|
|
316
|
+
return generator
|
|
@@ -0,0 +1,334 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Output validation for structured responses.
|
|
3
|
+
|
|
4
|
+
Validates JSON output against schemas and Pydantic models.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
from dataclasses import dataclass, field
|
|
11
|
+
from typing import Any, TypeVar
|
|
12
|
+
|
|
13
|
+
T = TypeVar("T")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ValidationError(Exception):
|
|
17
|
+
"""Error raised when validation fails.
|
|
18
|
+
|
|
19
|
+
Attributes:
|
|
20
|
+
message: Error message
|
|
21
|
+
path: JSON path to the error location
|
|
22
|
+
value: The invalid value
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
message: str,
|
|
28
|
+
path: str | None = None,
|
|
29
|
+
value: Any = None,
|
|
30
|
+
) -> None:
|
|
31
|
+
"""Initialize validation error.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
message: Error message
|
|
35
|
+
path: JSON path to error
|
|
36
|
+
value: Invalid value
|
|
37
|
+
"""
|
|
38
|
+
self.message = message
|
|
39
|
+
self.path = path
|
|
40
|
+
self.value = value
|
|
41
|
+
super().__init__(self._format_message())
|
|
42
|
+
|
|
43
|
+
def _format_message(self) -> str:
|
|
44
|
+
"""Format the error message."""
|
|
45
|
+
msg = self.message
|
|
46
|
+
if self.path:
|
|
47
|
+
msg = f"{self.path}: {msg}"
|
|
48
|
+
return msg
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@dataclass
|
|
52
|
+
class ValidationResult:
|
|
53
|
+
"""Result of validation.
|
|
54
|
+
|
|
55
|
+
Attributes:
|
|
56
|
+
valid: Whether validation passed
|
|
57
|
+
errors: List of validation errors
|
|
58
|
+
data: Validated/parsed data
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
valid: bool = True
|
|
62
|
+
errors: list[str] = field(default_factory=list)
|
|
63
|
+
data: Any = None
|
|
64
|
+
|
|
65
|
+
def __bool__(self) -> bool:
|
|
66
|
+
"""Return True if validation passed."""
|
|
67
|
+
return self.valid
|
|
68
|
+
|
|
69
|
+
def raise_if_invalid(self) -> None:
|
|
70
|
+
"""Raise ValidationError if validation failed."""
|
|
71
|
+
if not self.valid:
|
|
72
|
+
raise ValidationError("; ".join(self.errors))
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class OutputValidator:
|
|
76
|
+
"""Validator for structured output.
|
|
77
|
+
|
|
78
|
+
Validates JSON strings and dictionaries against schemas
|
|
79
|
+
or Pydantic models.
|
|
80
|
+
|
|
81
|
+
Example:
|
|
82
|
+
>>> from pydantic import BaseModel
|
|
83
|
+
>>> class User(BaseModel):
|
|
84
|
+
... name: str
|
|
85
|
+
... age: int
|
|
86
|
+
>>>
|
|
87
|
+
>>> validator = OutputValidator(User)
|
|
88
|
+
>>> result = validator.validate('{"name": "Alice", "age": 30}')
|
|
89
|
+
>>> print(result.valid) # True
|
|
90
|
+
>>> print(result.data) # User(name='Alice', age=30)
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
def __init__(
|
|
94
|
+
self,
|
|
95
|
+
schema: dict[str, Any] | type | None = None,
|
|
96
|
+
strict: bool = True,
|
|
97
|
+
) -> None:
|
|
98
|
+
"""Initialize validator.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
schema: JSON schema dict or Pydantic model class
|
|
102
|
+
strict: Whether to use strict validation
|
|
103
|
+
"""
|
|
104
|
+
self._pydantic_model: type | None = None
|
|
105
|
+
self._json_schema: dict[str, Any] | None = None
|
|
106
|
+
self._strict = strict
|
|
107
|
+
|
|
108
|
+
if schema is not None:
|
|
109
|
+
if isinstance(schema, dict):
|
|
110
|
+
self._json_schema = schema
|
|
111
|
+
elif hasattr(schema, "model_validate"):
|
|
112
|
+
# Pydantic model
|
|
113
|
+
self._pydantic_model = schema
|
|
114
|
+
self._json_schema = schema.model_json_schema()
|
|
115
|
+
else:
|
|
116
|
+
raise ValueError(
|
|
117
|
+
"Schema must be a JSON schema dict or Pydantic model class"
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
def validate(self, data: str | dict[str, Any]) -> ValidationResult:
|
|
121
|
+
"""Validate data against the schema.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
data: JSON string or dictionary to validate
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
ValidationResult with validation status and parsed data
|
|
128
|
+
"""
|
|
129
|
+
# Parse JSON if string
|
|
130
|
+
if isinstance(data, str):
|
|
131
|
+
try:
|
|
132
|
+
parsed = json.loads(data)
|
|
133
|
+
except json.JSONDecodeError as e:
|
|
134
|
+
return ValidationResult(
|
|
135
|
+
valid=False,
|
|
136
|
+
errors=[f"Invalid JSON: {e}"],
|
|
137
|
+
)
|
|
138
|
+
else:
|
|
139
|
+
parsed = data
|
|
140
|
+
|
|
141
|
+
# Validate against Pydantic model if available
|
|
142
|
+
if self._pydantic_model is not None:
|
|
143
|
+
return self._validate_pydantic(parsed)
|
|
144
|
+
|
|
145
|
+
# Validate against JSON schema if available
|
|
146
|
+
if self._json_schema is not None:
|
|
147
|
+
return self._validate_json_schema(parsed)
|
|
148
|
+
|
|
149
|
+
# No schema, just return parsed data
|
|
150
|
+
return ValidationResult(valid=True, data=parsed)
|
|
151
|
+
|
|
152
|
+
def validate_or_raise(self, data: str | dict[str, Any]) -> Any:
|
|
153
|
+
"""Validate data and raise if invalid.
|
|
154
|
+
|
|
155
|
+
Args:
|
|
156
|
+
data: Data to validate
|
|
157
|
+
|
|
158
|
+
Returns:
|
|
159
|
+
Validated/parsed data
|
|
160
|
+
|
|
161
|
+
Raises:
|
|
162
|
+
ValidationError: If validation fails
|
|
163
|
+
"""
|
|
164
|
+
result = self.validate(data)
|
|
165
|
+
result.raise_if_invalid()
|
|
166
|
+
return result.data
|
|
167
|
+
|
|
168
|
+
def parse(self, data: str | dict[str, Any], model: type[T]) -> T:
|
|
169
|
+
"""Parse and validate data into a Pydantic model.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
data: Data to parse
|
|
173
|
+
model: Pydantic model class
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
Model instance
|
|
177
|
+
|
|
178
|
+
Raises:
|
|
179
|
+
ValidationError: If validation fails
|
|
180
|
+
"""
|
|
181
|
+
if isinstance(data, str):
|
|
182
|
+
try:
|
|
183
|
+
parsed = json.loads(data)
|
|
184
|
+
except json.JSONDecodeError as e:
|
|
185
|
+
raise ValidationError(f"Invalid JSON: {e}") from e
|
|
186
|
+
else:
|
|
187
|
+
parsed = data
|
|
188
|
+
|
|
189
|
+
try:
|
|
190
|
+
return model.model_validate(parsed)
|
|
191
|
+
except Exception as e:
|
|
192
|
+
raise ValidationError(str(e)) from e
|
|
193
|
+
|
|
194
|
+
def _validate_pydantic(self, data: dict[str, Any]) -> ValidationResult:
|
|
195
|
+
"""Validate against Pydantic model.
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
data: Data to validate
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
ValidationResult
|
|
202
|
+
"""
|
|
203
|
+
try:
|
|
204
|
+
validated = self._pydantic_model.model_validate(data)
|
|
205
|
+
return ValidationResult(valid=True, data=validated)
|
|
206
|
+
except Exception as e:
|
|
207
|
+
return ValidationResult(
|
|
208
|
+
valid=False,
|
|
209
|
+
errors=[str(e)],
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
def _validate_json_schema(self, data: dict[str, Any]) -> ValidationResult:
|
|
213
|
+
"""Validate against JSON schema.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
data: Data to validate
|
|
217
|
+
|
|
218
|
+
Returns:
|
|
219
|
+
ValidationResult
|
|
220
|
+
"""
|
|
221
|
+
errors: list[str] = []
|
|
222
|
+
|
|
223
|
+
# Basic type checking
|
|
224
|
+
schema_type = self._json_schema.get("type")
|
|
225
|
+
if schema_type == "object" and not isinstance(data, dict):
|
|
226
|
+
return ValidationResult(
|
|
227
|
+
valid=False,
|
|
228
|
+
errors=[f"Expected object, got {type(data).__name__}"],
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
# Validate required properties
|
|
232
|
+
required = self._json_schema.get("required", [])
|
|
233
|
+
for prop in required:
|
|
234
|
+
if prop not in data:
|
|
235
|
+
errors.append(f"Missing required property: {prop}")
|
|
236
|
+
|
|
237
|
+
# Validate property types
|
|
238
|
+
properties = self._json_schema.get("properties", {})
|
|
239
|
+
for prop_name, prop_schema in properties.items():
|
|
240
|
+
if prop_name in data:
|
|
241
|
+
prop_errors = self._validate_property(
|
|
242
|
+
data[prop_name], prop_schema, prop_name
|
|
243
|
+
)
|
|
244
|
+
errors.extend(prop_errors)
|
|
245
|
+
|
|
246
|
+
# Check additional properties
|
|
247
|
+
additional_props = self._json_schema.get("additionalProperties", True)
|
|
248
|
+
if additional_props is False:
|
|
249
|
+
extra_props = set(data.keys()) - set(properties.keys())
|
|
250
|
+
for prop in extra_props:
|
|
251
|
+
errors.append(f"Additional property not allowed: {prop}")
|
|
252
|
+
|
|
253
|
+
return ValidationResult(
|
|
254
|
+
valid=len(errors) == 0,
|
|
255
|
+
errors=errors,
|
|
256
|
+
data=data if len(errors) == 0 else None,
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
def _validate_property(
|
|
260
|
+
self,
|
|
261
|
+
value: Any,
|
|
262
|
+
schema: dict[str, Any],
|
|
263
|
+
path: str,
|
|
264
|
+
) -> list[str]:
|
|
265
|
+
"""Validate a single property.
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
value: Property value
|
|
269
|
+
schema: Property schema
|
|
270
|
+
path: Property path
|
|
271
|
+
|
|
272
|
+
Returns:
|
|
273
|
+
List of error messages
|
|
274
|
+
"""
|
|
275
|
+
errors: list[str] = []
|
|
276
|
+
prop_type = schema.get("type")
|
|
277
|
+
|
|
278
|
+
# Type validation
|
|
279
|
+
type_checks = {
|
|
280
|
+
"string": lambda v: isinstance(v, str),
|
|
281
|
+
"integer": lambda v: isinstance(v, int) and not isinstance(v, bool),
|
|
282
|
+
"number": lambda v: isinstance(v, (int, float)) and not isinstance(v, bool),
|
|
283
|
+
"boolean": lambda v: isinstance(v, bool),
|
|
284
|
+
"array": lambda v: isinstance(v, list),
|
|
285
|
+
"object": lambda v: isinstance(v, dict),
|
|
286
|
+
"null": lambda v: v is None,
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
if prop_type and prop_type in type_checks:
|
|
290
|
+
# Handle nullable
|
|
291
|
+
if schema.get("nullable") and value is None:
|
|
292
|
+
return []
|
|
293
|
+
|
|
294
|
+
if not type_checks[prop_type](value):
|
|
295
|
+
errors.append(f"{path}: Expected {prop_type}, got {type(value).__name__}")
|
|
296
|
+
return errors
|
|
297
|
+
|
|
298
|
+
# String constraints
|
|
299
|
+
if prop_type == "string" and isinstance(value, str):
|
|
300
|
+
if "minLength" in schema and len(value) < schema["minLength"]:
|
|
301
|
+
errors.append(f"{path}: String too short (min {schema['minLength']})")
|
|
302
|
+
if "maxLength" in schema and len(value) > schema["maxLength"]:
|
|
303
|
+
errors.append(f"{path}: String too long (max {schema['maxLength']})")
|
|
304
|
+
if "pattern" in schema:
|
|
305
|
+
import re
|
|
306
|
+
|
|
307
|
+
if not re.match(schema["pattern"], value):
|
|
308
|
+
errors.append(f"{path}: String does not match pattern")
|
|
309
|
+
|
|
310
|
+
# Number constraints
|
|
311
|
+
if prop_type in ("integer", "number") and isinstance(value, (int, float)):
|
|
312
|
+
if "minimum" in schema and value < schema["minimum"]:
|
|
313
|
+
errors.append(f"{path}: Value below minimum ({schema['minimum']})")
|
|
314
|
+
if "maximum" in schema and value > schema["maximum"]:
|
|
315
|
+
errors.append(f"{path}: Value above maximum ({schema['maximum']})")
|
|
316
|
+
|
|
317
|
+
# Enum constraint
|
|
318
|
+
if "enum" in schema and value not in schema["enum"]:
|
|
319
|
+
errors.append(f"{path}: Value not in allowed enum values")
|
|
320
|
+
|
|
321
|
+
# Array validation
|
|
322
|
+
if prop_type == "array" and isinstance(value, list):
|
|
323
|
+
if "minItems" in schema and len(value) < schema["minItems"]:
|
|
324
|
+
errors.append(f"{path}: Array too short (min {schema['minItems']})")
|
|
325
|
+
if "maxItems" in schema and len(value) > schema["maxItems"]:
|
|
326
|
+
errors.append(f"{path}: Array too long (max {schema['maxItems']})")
|
|
327
|
+
if "items" in schema:
|
|
328
|
+
for i, item in enumerate(value):
|
|
329
|
+
item_errors = self._validate_property(
|
|
330
|
+
item, schema["items"], f"{path}[{i}]"
|
|
331
|
+
)
|
|
332
|
+
errors.extend(item_errors)
|
|
333
|
+
|
|
334
|
+
return errors
|