ostruct-cli 0.5.0__py3-none-any.whl → 0.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ostruct/cli/cli.py +119 -374
- ostruct/cli/errors.py +63 -18
- ostruct/cli/model_creation.py +507 -0
- ostruct/cli/schema_validation.py +213 -0
- {ostruct_cli-0.5.0.dist-info → ostruct_cli-0.6.1.dist-info}/METADATA +211 -32
- {ostruct_cli-0.5.0.dist-info → ostruct_cli-0.6.1.dist-info}/RECORD +9 -7
- {ostruct_cli-0.5.0.dist-info → ostruct_cli-0.6.1.dist-info}/WHEEL +1 -1
- {ostruct_cli-0.5.0.dist-info → ostruct_cli-0.6.1.dist-info}/LICENSE +0 -0
- {ostruct_cli-0.5.0.dist-info → ostruct_cli-0.6.1.dist-info}/entry_points.txt +0 -0
ostruct/cli/errors.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
"""Custom error classes for CLI error handling."""
|
2
2
|
|
3
|
+
import json
|
3
4
|
import logging
|
4
5
|
from typing import Any, Dict, List, Optional
|
5
6
|
|
@@ -323,31 +324,54 @@ class SchemaFileError(CLIError):
|
|
323
324
|
|
324
325
|
|
325
326
|
class SchemaValidationError(CLIError):
|
326
|
-
"""
|
327
|
+
"""Error raised when a schema fails validation."""
|
327
328
|
|
328
329
|
def __init__(
|
329
330
|
self,
|
330
331
|
message: str,
|
331
|
-
schema_path: Optional[str] = None,
|
332
332
|
context: Optional[Dict[str, Any]] = None,
|
333
333
|
):
|
334
334
|
context = context or {}
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
335
|
+
|
336
|
+
# Format error message with tips
|
337
|
+
formatted_message = [message]
|
338
|
+
|
339
|
+
if "path" in context:
|
340
|
+
formatted_message.append(f"\nLocation: {context['path']}")
|
341
|
+
|
342
|
+
if "found" in context:
|
343
|
+
formatted_message.append(f"Found: {context['found']}")
|
344
|
+
|
345
|
+
if "count" in context:
|
346
|
+
formatted_message.append(f"Count: {context['count']}")
|
347
|
+
|
348
|
+
if "missing_required" in context:
|
349
|
+
formatted_message.append(
|
350
|
+
f"Missing required: {context['missing_required']}"
|
351
|
+
)
|
352
|
+
|
353
|
+
if "extra_required" in context:
|
354
|
+
formatted_message.append(
|
355
|
+
f"Extra required: {context['extra_required']}"
|
356
|
+
)
|
357
|
+
|
358
|
+
if "prohibited_used" in context:
|
359
|
+
formatted_message.append(
|
360
|
+
f"Prohibited keywords used: {context['prohibited_used']}"
|
347
361
|
)
|
348
362
|
|
363
|
+
if "tips" in context:
|
364
|
+
formatted_message.append("\nHow to fix:")
|
365
|
+
for tip in context["tips"]:
|
366
|
+
if isinstance(tip, dict):
|
367
|
+
# Format JSON example
|
368
|
+
formatted_message.append("Example schema:")
|
369
|
+
formatted_message.append(json.dumps(tip, indent=2))
|
370
|
+
else:
|
371
|
+
formatted_message.append(f"- {tip}")
|
372
|
+
|
349
373
|
super().__init__(
|
350
|
-
|
374
|
+
"\n".join(formatted_message),
|
351
375
|
context=context,
|
352
376
|
exit_code=ExitCode.SCHEMA_ERROR,
|
353
377
|
)
|
@@ -430,9 +454,30 @@ class EmptyResponseError(CLIError):
|
|
430
454
|
|
431
455
|
|
432
456
|
class InvalidResponseFormatError(CLIError):
|
433
|
-
"""
|
434
|
-
|
435
|
-
|
457
|
+
"""Raised when the response format is invalid."""
|
458
|
+
|
459
|
+
def __init__(self, message: str, context: Optional[Dict[str, Any]] = None):
|
460
|
+
if "schema must be a JSON Schema of 'type: \"object\"'" in message:
|
461
|
+
message = (
|
462
|
+
"The schema must have a root type of 'object', but got 'array'. "
|
463
|
+
"To fix this, wrap your array in an object. For example:\n\n"
|
464
|
+
"{\n"
|
465
|
+
' "type": "object",\n'
|
466
|
+
' "properties": {\n'
|
467
|
+
' "items": {\n'
|
468
|
+
' "type": "array",\n'
|
469
|
+
' "items": { ... your array items schema ... }\n'
|
470
|
+
" }\n"
|
471
|
+
" },\n"
|
472
|
+
' "required": ["items"]\n'
|
473
|
+
"}\n\n"
|
474
|
+
"Then update your template to handle the wrapper object."
|
475
|
+
)
|
476
|
+
super().__init__(
|
477
|
+
message,
|
478
|
+
exit_code=ExitCode.API_ERROR,
|
479
|
+
context=context,
|
480
|
+
)
|
436
481
|
|
437
482
|
|
438
483
|
class OpenAIClientError(CLIError):
|
@@ -0,0 +1,507 @@
|
|
1
|
+
"""Model creation utilities for the CLI."""
|
2
|
+
|
3
|
+
import json
|
4
|
+
import logging
|
5
|
+
import sys
|
6
|
+
from datetime import date, datetime, time
|
7
|
+
from enum import Enum, IntEnum
|
8
|
+
from typing import (
|
9
|
+
Any,
|
10
|
+
Dict,
|
11
|
+
List,
|
12
|
+
Optional,
|
13
|
+
Tuple,
|
14
|
+
Type,
|
15
|
+
Union,
|
16
|
+
cast,
|
17
|
+
get_origin,
|
18
|
+
)
|
19
|
+
|
20
|
+
if sys.version_info >= (3, 11):
|
21
|
+
from enum import StrEnum
|
22
|
+
|
23
|
+
from pydantic import (
|
24
|
+
AnyUrl,
|
25
|
+
BaseModel,
|
26
|
+
ConfigDict,
|
27
|
+
EmailStr,
|
28
|
+
Field,
|
29
|
+
ValidationError,
|
30
|
+
create_model,
|
31
|
+
)
|
32
|
+
from pydantic.fields import FieldInfo
|
33
|
+
from pydantic.functional_validators import BeforeValidator
|
34
|
+
from pydantic.types import constr
|
35
|
+
|
36
|
+
from .errors import (
|
37
|
+
FieldDefinitionError,
|
38
|
+
ModelCreationError,
|
39
|
+
ModelValidationError,
|
40
|
+
NestedModelError,
|
41
|
+
SchemaValidationError,
|
42
|
+
)
|
43
|
+
|
44
|
+
logger = logging.getLogger(__name__)
|
45
|
+
|
46
|
+
# Type aliases
|
47
|
+
FieldType = Type[
|
48
|
+
Any
|
49
|
+
] # Changed from Type[Any] to allow both concrete types and generics
|
50
|
+
FieldDefinition = Tuple[Any, FieldInfo] # Changed to Any to handle generics
|
51
|
+
|
52
|
+
|
53
|
+
def _create_enum_type(values: List[Any], field_name: str) -> Type[Enum]:
|
54
|
+
"""Create an enum type from a list of values.
|
55
|
+
|
56
|
+
Args:
|
57
|
+
values: List of enum values
|
58
|
+
field_name: Name of the field for enum type name
|
59
|
+
|
60
|
+
Returns:
|
61
|
+
Created enum type
|
62
|
+
"""
|
63
|
+
# Determine the value type
|
64
|
+
value_types = {type(v) for v in values}
|
65
|
+
|
66
|
+
if len(value_types) > 1:
|
67
|
+
# Mixed types, use string representation
|
68
|
+
enum_dict = {f"VALUE_{i}": str(v) for i, v in enumerate(values)}
|
69
|
+
return type(f"{field_name.title()}Enum", (str, Enum), enum_dict)
|
70
|
+
elif value_types == {int}:
|
71
|
+
# All integer values
|
72
|
+
enum_dict = {f"VALUE_{v}": v for v in values}
|
73
|
+
return type(f"{field_name.title()}Enum", (IntEnum,), enum_dict)
|
74
|
+
elif value_types == {str}:
|
75
|
+
# All string values
|
76
|
+
enum_dict = {v.upper().replace(" ", "_"): v for v in values}
|
77
|
+
if sys.version_info >= (3, 11):
|
78
|
+
return type(f"{field_name.title()}Enum", (StrEnum,), enum_dict)
|
79
|
+
else:
|
80
|
+
# Other types, use string representation
|
81
|
+
return type(f"{field_name.title()}Enum", (str, Enum), enum_dict)
|
82
|
+
|
83
|
+
# Default case: treat as string enum
|
84
|
+
enum_dict = {f"VALUE_{i}": str(v) for i, v in enumerate(values)}
|
85
|
+
return type(f"{field_name.title()}Enum", (str, Enum), enum_dict)
|
86
|
+
|
87
|
+
|
88
|
+
def is_container_type(tp: Type[Any]) -> bool:
|
89
|
+
"""Check if a type is a container type (List, Dict, etc).
|
90
|
+
|
91
|
+
Args:
|
92
|
+
tp: Type to check
|
93
|
+
|
94
|
+
Returns:
|
95
|
+
bool: True if type is a container type
|
96
|
+
"""
|
97
|
+
origin = get_origin(tp)
|
98
|
+
return origin is not None and origin in (list, dict, List, Dict)
|
99
|
+
|
100
|
+
|
101
|
+
# Validation functions
|
102
|
+
def pattern(regex: str) -> Any:
|
103
|
+
return constr(pattern=regex)
|
104
|
+
|
105
|
+
|
106
|
+
def min_length(length: int) -> Any:
|
107
|
+
return BeforeValidator(lambda v: v if len(str(v)) >= length else None)
|
108
|
+
|
109
|
+
|
110
|
+
def max_length(length: int) -> Any:
|
111
|
+
return BeforeValidator(lambda v: v if len(str(v)) <= length else None)
|
112
|
+
|
113
|
+
|
114
|
+
def ge(value: Union[int, float]) -> Any:
|
115
|
+
return BeforeValidator(lambda v: v if float(v) >= value else None)
|
116
|
+
|
117
|
+
|
118
|
+
def le(value: Union[int, float]) -> Any:
|
119
|
+
return BeforeValidator(lambda v: v if float(v) <= value else None)
|
120
|
+
|
121
|
+
|
122
|
+
def gt(value: Union[int, float]) -> Any:
|
123
|
+
return BeforeValidator(lambda v: v if float(v) > value else None)
|
124
|
+
|
125
|
+
|
126
|
+
def lt(value: Union[int, float]) -> Any:
|
127
|
+
return BeforeValidator(lambda v: v if float(v) < value else None)
|
128
|
+
|
129
|
+
|
130
|
+
def multiple_of(value: Union[int, float]) -> Any:
|
131
|
+
return BeforeValidator(lambda v: v if float(v) % value == 0 else None)
|
132
|
+
|
133
|
+
|
134
|
+
def _get_type_with_constraints(
|
135
|
+
field_schema: Dict[str, Any], field_name: str, base_name: str
|
136
|
+
) -> FieldDefinition:
|
137
|
+
"""Get type with constraints from field schema.
|
138
|
+
|
139
|
+
Args:
|
140
|
+
field_schema: Field schema dict
|
141
|
+
field_name: Name of the field
|
142
|
+
base_name: Base name for nested models
|
143
|
+
|
144
|
+
Returns:
|
145
|
+
Tuple of (type, field)
|
146
|
+
"""
|
147
|
+
field_kwargs: Dict[str, Any] = {}
|
148
|
+
|
149
|
+
# Add common field metadata
|
150
|
+
if "title" in field_schema:
|
151
|
+
field_kwargs["title"] = field_schema["title"]
|
152
|
+
if "description" in field_schema:
|
153
|
+
field_kwargs["description"] = field_schema["description"]
|
154
|
+
if "default" in field_schema:
|
155
|
+
field_kwargs["default"] = field_schema["default"]
|
156
|
+
if "readOnly" in field_schema:
|
157
|
+
field_kwargs["frozen"] = field_schema["readOnly"]
|
158
|
+
|
159
|
+
field_type = field_schema.get("type")
|
160
|
+
|
161
|
+
# Handle array type
|
162
|
+
if field_type == "array":
|
163
|
+
items_schema = field_schema.get("items", {})
|
164
|
+
if not items_schema:
|
165
|
+
return (List[Any], Field(**field_kwargs)) # Direct generic type
|
166
|
+
|
167
|
+
# Create nested model for object items
|
168
|
+
if (
|
169
|
+
isinstance(items_schema, dict)
|
170
|
+
and items_schema.get("type") == "object"
|
171
|
+
):
|
172
|
+
array_item_model = create_dynamic_model(
|
173
|
+
items_schema,
|
174
|
+
base_name=f"{base_name}_{field_name}_Item",
|
175
|
+
show_schema=False,
|
176
|
+
debug_validation=False,
|
177
|
+
)
|
178
|
+
return (List[array_item_model], Field(**field_kwargs)) # type: ignore[valid-type]
|
179
|
+
|
180
|
+
# For non-object items, use the type directly
|
181
|
+
item_type = items_schema.get("type", "string")
|
182
|
+
if item_type == "string":
|
183
|
+
return (List[str], Field(**field_kwargs))
|
184
|
+
elif item_type == "integer":
|
185
|
+
return (List[int], Field(**field_kwargs))
|
186
|
+
elif item_type == "number":
|
187
|
+
return (List[float], Field(**field_kwargs))
|
188
|
+
elif item_type == "boolean":
|
189
|
+
return (List[bool], Field(**field_kwargs))
|
190
|
+
else:
|
191
|
+
return (List[Any], Field(**field_kwargs))
|
192
|
+
|
193
|
+
# Handle object type
|
194
|
+
if field_type == "object":
|
195
|
+
# Create nested model with explicit type annotation
|
196
|
+
object_model = create_dynamic_model(
|
197
|
+
field_schema,
|
198
|
+
base_name=f"{base_name}_{field_name}",
|
199
|
+
show_schema=False,
|
200
|
+
debug_validation=False,
|
201
|
+
)
|
202
|
+
return (object_model, Field(**field_kwargs))
|
203
|
+
|
204
|
+
# Handle additionalProperties
|
205
|
+
if "additionalProperties" in field_schema and isinstance(
|
206
|
+
field_schema["additionalProperties"], dict
|
207
|
+
):
|
208
|
+
# Create nested model with explicit type annotation
|
209
|
+
dict_value_model = create_dynamic_model(
|
210
|
+
field_schema["additionalProperties"],
|
211
|
+
base_name=f"{base_name}_{field_name}_Value",
|
212
|
+
show_schema=False,
|
213
|
+
debug_validation=False,
|
214
|
+
)
|
215
|
+
dict_type: Type[Dict[str, Any]] = Dict[str, dict_value_model] # type: ignore[valid-type]
|
216
|
+
return (dict_type, Field(**field_kwargs))
|
217
|
+
|
218
|
+
# Handle other types
|
219
|
+
if field_type == "string":
|
220
|
+
field_type_cls: Type[Any] = str
|
221
|
+
|
222
|
+
# Add string-specific constraints to field_kwargs
|
223
|
+
if "pattern" in field_schema:
|
224
|
+
field_kwargs["pattern"] = field_schema["pattern"]
|
225
|
+
if "minLength" in field_schema:
|
226
|
+
field_kwargs["min_length"] = field_schema["minLength"]
|
227
|
+
if "maxLength" in field_schema:
|
228
|
+
field_kwargs["max_length"] = field_schema["maxLength"]
|
229
|
+
|
230
|
+
# Handle special string formats
|
231
|
+
if "format" in field_schema:
|
232
|
+
if field_schema["format"] == "date-time":
|
233
|
+
field_type_cls = datetime
|
234
|
+
elif field_schema["format"] == "date":
|
235
|
+
field_type_cls = date
|
236
|
+
elif field_schema["format"] == "time":
|
237
|
+
field_type_cls = time
|
238
|
+
elif field_schema["format"] == "email":
|
239
|
+
field_type_cls = EmailStr
|
240
|
+
elif field_schema["format"] == "uri":
|
241
|
+
field_type_cls = AnyUrl
|
242
|
+
|
243
|
+
return (field_type_cls, Field(**field_kwargs))
|
244
|
+
|
245
|
+
if field_type == "number":
|
246
|
+
field_type_cls = float
|
247
|
+
|
248
|
+
# Add number-specific constraints to field_kwargs
|
249
|
+
if "minimum" in field_schema:
|
250
|
+
field_kwargs["ge"] = field_schema["minimum"]
|
251
|
+
if "maximum" in field_schema:
|
252
|
+
field_kwargs["le"] = field_schema["maximum"]
|
253
|
+
if "exclusiveMinimum" in field_schema:
|
254
|
+
field_kwargs["gt"] = field_schema["exclusiveMinimum"]
|
255
|
+
if "exclusiveMaximum" in field_schema:
|
256
|
+
field_kwargs["lt"] = field_schema["exclusiveMaximum"]
|
257
|
+
if "multipleOf" in field_schema:
|
258
|
+
field_kwargs["multiple_of"] = field_schema["multipleOf"]
|
259
|
+
|
260
|
+
return (field_type_cls, Field(**field_kwargs))
|
261
|
+
|
262
|
+
if field_type == "integer":
|
263
|
+
field_type_cls = int
|
264
|
+
|
265
|
+
# Add integer-specific constraints to field_kwargs
|
266
|
+
if "minimum" in field_schema:
|
267
|
+
field_kwargs["ge"] = field_schema["minimum"]
|
268
|
+
if "maximum" in field_schema:
|
269
|
+
field_kwargs["le"] = field_schema["maximum"]
|
270
|
+
if "exclusiveMinimum" in field_schema:
|
271
|
+
field_kwargs["gt"] = field_schema["exclusiveMinimum"]
|
272
|
+
if "exclusiveMaximum" in field_schema:
|
273
|
+
field_kwargs["lt"] = field_schema["exclusiveMaximum"]
|
274
|
+
if "multipleOf" in field_schema:
|
275
|
+
field_kwargs["multiple_of"] = field_schema["multipleOf"]
|
276
|
+
|
277
|
+
return (field_type_cls, Field(**field_kwargs))
|
278
|
+
|
279
|
+
if field_type == "boolean":
|
280
|
+
return (bool, Field(**field_kwargs))
|
281
|
+
|
282
|
+
if field_type == "null":
|
283
|
+
return (type(None), Field(**field_kwargs))
|
284
|
+
|
285
|
+
# Handle enum
|
286
|
+
if "enum" in field_schema:
|
287
|
+
enum_type = _create_enum_type(field_schema["enum"], field_name)
|
288
|
+
return (cast(Type[Any], enum_type), Field(**field_kwargs))
|
289
|
+
|
290
|
+
# Default to Any for unknown types
|
291
|
+
return (Any, Field(**field_kwargs))
|
292
|
+
|
293
|
+
|
294
|
+
def create_dynamic_model(
|
295
|
+
schema: Dict[str, Any],
|
296
|
+
base_name: str = "DynamicModel",
|
297
|
+
show_schema: bool = False,
|
298
|
+
debug_validation: bool = False,
|
299
|
+
) -> Type[BaseModel]:
|
300
|
+
"""Create a Pydantic model from a JSON schema.
|
301
|
+
|
302
|
+
Args:
|
303
|
+
schema: JSON schema to create model from
|
304
|
+
base_name: Name for the model class
|
305
|
+
show_schema: Whether to show the generated model schema
|
306
|
+
debug_validation: Whether to show detailed validation errors
|
307
|
+
|
308
|
+
Returns:
|
309
|
+
Type[BaseModel]: The generated Pydantic model class
|
310
|
+
|
311
|
+
Raises:
|
312
|
+
ModelValidationError: If the schema is invalid
|
313
|
+
SchemaValidationError: If the schema violates OpenAI requirements
|
314
|
+
"""
|
315
|
+
if debug_validation:
|
316
|
+
logger.info("Creating dynamic model from schema:")
|
317
|
+
logger.info(json.dumps(schema, indent=2))
|
318
|
+
|
319
|
+
try:
|
320
|
+
# Handle our wrapper format if present
|
321
|
+
if "schema" in schema:
|
322
|
+
if debug_validation:
|
323
|
+
logger.info("Found schema wrapper, extracting inner schema")
|
324
|
+
logger.info(
|
325
|
+
"Original schema: %s", json.dumps(schema, indent=2)
|
326
|
+
)
|
327
|
+
inner_schema = schema["schema"]
|
328
|
+
if not isinstance(inner_schema, dict):
|
329
|
+
if debug_validation:
|
330
|
+
logger.info(
|
331
|
+
"Inner schema must be a dictionary, got %s",
|
332
|
+
type(inner_schema),
|
333
|
+
)
|
334
|
+
raise SchemaValidationError(
|
335
|
+
"Inner schema must be a dictionary"
|
336
|
+
)
|
337
|
+
if debug_validation:
|
338
|
+
logger.info("Using inner schema:")
|
339
|
+
logger.info(json.dumps(inner_schema, indent=2))
|
340
|
+
schema = inner_schema
|
341
|
+
|
342
|
+
# Validate against OpenAI requirements
|
343
|
+
from .schema_validation import validate_openai_schema
|
344
|
+
|
345
|
+
validate_openai_schema(schema)
|
346
|
+
|
347
|
+
# Create model configuration
|
348
|
+
config = ConfigDict(
|
349
|
+
title=schema.get("title", base_name),
|
350
|
+
extra="forbid", # OpenAI requires additionalProperties: false
|
351
|
+
validate_default=True,
|
352
|
+
use_enum_values=True,
|
353
|
+
arbitrary_types_allowed=True,
|
354
|
+
json_schema_extra={
|
355
|
+
k: v
|
356
|
+
for k, v in schema.items()
|
357
|
+
if k
|
358
|
+
not in {
|
359
|
+
"type",
|
360
|
+
"properties",
|
361
|
+
"required",
|
362
|
+
"title",
|
363
|
+
"description",
|
364
|
+
"additionalProperties",
|
365
|
+
"readOnly",
|
366
|
+
}
|
367
|
+
},
|
368
|
+
)
|
369
|
+
|
370
|
+
if debug_validation:
|
371
|
+
logger.info("Created model configuration:")
|
372
|
+
logger.info(" Title: %s", config.get("title"))
|
373
|
+
logger.info(" Extra: %s", config.get("extra"))
|
374
|
+
logger.info(
|
375
|
+
" Validate Default: %s", config.get("validate_default")
|
376
|
+
)
|
377
|
+
logger.info(" Use Enum Values: %s", config.get("use_enum_values"))
|
378
|
+
logger.info(
|
379
|
+
" Arbitrary Types: %s", config.get("arbitrary_types_allowed")
|
380
|
+
)
|
381
|
+
logger.info(
|
382
|
+
" JSON Schema Extra: %s", config.get("json_schema_extra")
|
383
|
+
)
|
384
|
+
|
385
|
+
# Process schema properties into fields
|
386
|
+
properties = schema.get("properties", {})
|
387
|
+
required = schema.get("required", [])
|
388
|
+
|
389
|
+
field_definitions: Dict[str, Tuple[Type[Any], FieldInfo]] = {}
|
390
|
+
for field_name, field_schema in properties.items():
|
391
|
+
if debug_validation:
|
392
|
+
logger.info("Processing field %s:", field_name)
|
393
|
+
logger.info(" Schema: %s", json.dumps(field_schema, indent=2))
|
394
|
+
|
395
|
+
try:
|
396
|
+
python_type, field = _get_type_with_constraints(
|
397
|
+
field_schema, field_name, base_name
|
398
|
+
)
|
399
|
+
|
400
|
+
# Handle optional fields
|
401
|
+
if field_name not in required:
|
402
|
+
if debug_validation:
|
403
|
+
logger.info(
|
404
|
+
"Field %s is optional, wrapping in Optional",
|
405
|
+
field_name,
|
406
|
+
)
|
407
|
+
field_type = cast(Type[Any], Optional[python_type])
|
408
|
+
else:
|
409
|
+
field_type = python_type
|
410
|
+
if debug_validation:
|
411
|
+
logger.info("Field %s is required", field_name)
|
412
|
+
|
413
|
+
# Create field definition
|
414
|
+
field_definitions[field_name] = (field_type, field)
|
415
|
+
|
416
|
+
if debug_validation:
|
417
|
+
logger.info("Successfully created field definition:")
|
418
|
+
logger.info(" Name: %s", field_name)
|
419
|
+
logger.info(" Type: %s", str(field_type))
|
420
|
+
logger.info(" Required: %s", field_name in required)
|
421
|
+
|
422
|
+
except (FieldDefinitionError, NestedModelError) as e:
|
423
|
+
if debug_validation:
|
424
|
+
logger.error("Error creating field %s:", field_name)
|
425
|
+
logger.error(" Error type: %s", type(e).__name__)
|
426
|
+
logger.error(" Error message: %s", str(e))
|
427
|
+
raise ModelValidationError(base_name, [str(e)])
|
428
|
+
|
429
|
+
# Create the model with the fields
|
430
|
+
field_defs: Dict[str, Any] = {
|
431
|
+
name: (
|
432
|
+
(
|
433
|
+
cast(Type[Any], field_type)
|
434
|
+
if is_container_type(field_type)
|
435
|
+
else field_type
|
436
|
+
),
|
437
|
+
field,
|
438
|
+
)
|
439
|
+
for name, (field_type, field) in field_definitions.items()
|
440
|
+
}
|
441
|
+
model: Type[BaseModel] = create_model(
|
442
|
+
base_name, __config__=config, **field_defs
|
443
|
+
)
|
444
|
+
|
445
|
+
# Set the model config after creation
|
446
|
+
model.model_config = config
|
447
|
+
|
448
|
+
if debug_validation:
|
449
|
+
logger.info("Successfully created model: %s", model.__name__)
|
450
|
+
logger.info("Model config: %s", dict(model.model_config))
|
451
|
+
logger.info(
|
452
|
+
"Model schema: %s",
|
453
|
+
json.dumps(model.model_json_schema(), indent=2),
|
454
|
+
)
|
455
|
+
|
456
|
+
# Validate the model's JSON schema
|
457
|
+
try:
|
458
|
+
model.model_json_schema()
|
459
|
+
except ValidationError as e:
|
460
|
+
validation_errors = (
|
461
|
+
[str(err) for err in e.errors()]
|
462
|
+
if hasattr(e, "errors")
|
463
|
+
else [str(e)]
|
464
|
+
)
|
465
|
+
if debug_validation:
|
466
|
+
logger.error("Schema validation failed:")
|
467
|
+
logger.error(" Error type: %s", type(e).__name__)
|
468
|
+
logger.error(" Error message: %s", str(e))
|
469
|
+
raise ModelValidationError(base_name, validation_errors)
|
470
|
+
|
471
|
+
return model
|
472
|
+
|
473
|
+
except SchemaValidationError as e:
|
474
|
+
# Always log basic error info
|
475
|
+
logger.error("Schema validation error: %s", str(e))
|
476
|
+
|
477
|
+
# Log additional debug info if requested
|
478
|
+
if debug_validation:
|
479
|
+
logger.error(" Error type: %s", type(e).__name__)
|
480
|
+
logger.error(" Error details: %s", str(e))
|
481
|
+
# Always raise schema validation errors directly
|
482
|
+
raise
|
483
|
+
|
484
|
+
except Exception as e:
|
485
|
+
# Always log basic error info
|
486
|
+
logger.error("Model creation error: %s", str(e))
|
487
|
+
|
488
|
+
# Log additional debug info if requested
|
489
|
+
if debug_validation:
|
490
|
+
logger.error(" Error type: %s", type(e).__name__)
|
491
|
+
logger.error(" Error details: %s", str(e))
|
492
|
+
if hasattr(e, "__cause__"):
|
493
|
+
logger.error(" Caused by: %s", str(e.__cause__))
|
494
|
+
if hasattr(e, "__context__"):
|
495
|
+
logger.error(" Context: %s", str(e.__context__))
|
496
|
+
if hasattr(e, "__traceback__"):
|
497
|
+
import traceback
|
498
|
+
|
499
|
+
logger.error(
|
500
|
+
" Traceback:\n%s",
|
501
|
+
"".join(traceback.format_tb(e.__traceback__)),
|
502
|
+
)
|
503
|
+
# Always wrap other errors as ModelCreationError
|
504
|
+
raise ModelCreationError(
|
505
|
+
f"Failed to create model {base_name}",
|
506
|
+
context={"error": str(e)},
|
507
|
+
) from e
|