ostruct-cli 0.6.0__py3-none-any.whl → 0.6.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ostruct/cli/cli.py +195 -412
- ostruct/cli/errors.py +61 -54
- ostruct/cli/model_creation.py +480 -0
- ostruct/cli/security/errors.py +1 -1
- ostruct/cli/security/normalization.py +1 -1
- ostruct/cli/security/security_manager.py +48 -7
- ostruct/cli/template_extensions.py +32 -1
- ostruct/cli/template_utils.py +175 -16
- ostruct/cli/utils.py +3 -1
- ostruct/cli/validators.py +6 -2
- {ostruct_cli-0.6.0.dist-info → ostruct_cli-0.6.2.dist-info}/METADATA +39 -177
- {ostruct_cli-0.6.0.dist-info → ostruct_cli-0.6.2.dist-info}/RECORD +15 -14
- {ostruct_cli-0.6.0.dist-info → ostruct_cli-0.6.2.dist-info}/LICENSE +0 -0
- {ostruct_cli-0.6.0.dist-info → ostruct_cli-0.6.2.dist-info}/WHEEL +0 -0
- {ostruct_cli-0.6.0.dist-info → ostruct_cli-0.6.2.dist-info}/entry_points.txt +0 -0
ostruct/cli/errors.py
CHANGED
@@ -323,60 +323,6 @@ class SchemaFileError(CLIError):
|
|
323
323
|
return self.context.get("schema_path")
|
324
324
|
|
325
325
|
|
326
|
-
class SchemaValidationError(CLIError):
|
327
|
-
"""Error raised when a schema fails validation."""
|
328
|
-
|
329
|
-
def __init__(
|
330
|
-
self,
|
331
|
-
message: str,
|
332
|
-
context: Optional[Dict[str, Any]] = None,
|
333
|
-
):
|
334
|
-
context = context or {}
|
335
|
-
|
336
|
-
# Format error message with tips
|
337
|
-
formatted_message = [message]
|
338
|
-
|
339
|
-
if "path" in context:
|
340
|
-
formatted_message.append(f"\nLocation: {context['path']}")
|
341
|
-
|
342
|
-
if "found" in context:
|
343
|
-
formatted_message.append(f"Found: {context['found']}")
|
344
|
-
|
345
|
-
if "count" in context:
|
346
|
-
formatted_message.append(f"Count: {context['count']}")
|
347
|
-
|
348
|
-
if "missing_required" in context:
|
349
|
-
formatted_message.append(
|
350
|
-
f"Missing required: {context['missing_required']}"
|
351
|
-
)
|
352
|
-
|
353
|
-
if "extra_required" in context:
|
354
|
-
formatted_message.append(
|
355
|
-
f"Extra required: {context['extra_required']}"
|
356
|
-
)
|
357
|
-
|
358
|
-
if "prohibited_used" in context:
|
359
|
-
formatted_message.append(
|
360
|
-
f"Prohibited keywords used: {context['prohibited_used']}"
|
361
|
-
)
|
362
|
-
|
363
|
-
if "tips" in context:
|
364
|
-
formatted_message.append("\nHow to fix:")
|
365
|
-
for tip in context["tips"]:
|
366
|
-
if isinstance(tip, dict):
|
367
|
-
# Format JSON example
|
368
|
-
formatted_message.append("Example schema:")
|
369
|
-
formatted_message.append(json.dumps(tip, indent=2))
|
370
|
-
else:
|
371
|
-
formatted_message.append(f"- {tip}")
|
372
|
-
|
373
|
-
super().__init__(
|
374
|
-
"\n".join(formatted_message),
|
375
|
-
context=context,
|
376
|
-
exit_code=ExitCode.SCHEMA_ERROR,
|
377
|
-
)
|
378
|
-
|
379
|
-
|
380
326
|
class ModelCreationError(CLIError):
|
381
327
|
"""Base class for model creation errors."""
|
382
328
|
|
@@ -496,6 +442,67 @@ class OpenAIClientError(CLIError):
|
|
496
442
|
super().__init__(message, exit_code=exit_code, context=context)
|
497
443
|
|
498
444
|
|
445
|
+
class SchemaValidationError(ModelCreationError):
|
446
|
+
"""Raised when schema validation fails."""
|
447
|
+
|
448
|
+
def __init__(
|
449
|
+
self,
|
450
|
+
message: str,
|
451
|
+
context: Optional[Dict[str, Any]] = None,
|
452
|
+
exit_code: ExitCode = ExitCode.SCHEMA_ERROR,
|
453
|
+
):
|
454
|
+
context = context or {}
|
455
|
+
# Preserve validation type for error handling
|
456
|
+
context.setdefault("validation_type", "schema")
|
457
|
+
|
458
|
+
# Format error message with tips
|
459
|
+
formatted_message = []
|
460
|
+
|
461
|
+
if "path" in context:
|
462
|
+
formatted_message.append(f"\nLocation: {context['path']}")
|
463
|
+
|
464
|
+
if "found" in context:
|
465
|
+
formatted_message.append(f"Found: {context['found']}")
|
466
|
+
|
467
|
+
if "reference" in context:
|
468
|
+
formatted_message.append(f"Reference: {context['reference']}")
|
469
|
+
|
470
|
+
if "count" in context:
|
471
|
+
formatted_message.append(f"Count: {context['count']}")
|
472
|
+
|
473
|
+
if "missing_required" in context:
|
474
|
+
formatted_message.append(
|
475
|
+
f"Missing required: {context['missing_required']}"
|
476
|
+
)
|
477
|
+
|
478
|
+
if "extra_required" in context:
|
479
|
+
formatted_message.append(
|
480
|
+
f"Extra required: {context['extra_required']}"
|
481
|
+
)
|
482
|
+
|
483
|
+
if "prohibited_used" in context:
|
484
|
+
formatted_message.append(
|
485
|
+
f"Prohibited keywords used: {context['prohibited_used']}"
|
486
|
+
)
|
487
|
+
|
488
|
+
if "tips" in context:
|
489
|
+
formatted_message.append("\nHow to fix:")
|
490
|
+
for tip in context["tips"]:
|
491
|
+
if isinstance(tip, dict):
|
492
|
+
# Format JSON example
|
493
|
+
formatted_message.append("Example schema:")
|
494
|
+
formatted_message.append(json.dumps(tip, indent=2))
|
495
|
+
else:
|
496
|
+
formatted_message.append(f"- {tip}")
|
497
|
+
|
498
|
+
# Combine message with details
|
499
|
+
final_message = message
|
500
|
+
if formatted_message:
|
501
|
+
final_message += "\n" + "\n".join(formatted_message)
|
502
|
+
|
503
|
+
super().__init__(final_message, context=context, exit_code=exit_code)
|
504
|
+
|
505
|
+
|
499
506
|
# Export public API
|
500
507
|
__all__ = [
|
501
508
|
"VariableError",
|
@@ -0,0 +1,480 @@
|
|
1
|
+
"""Model creation utilities for the CLI."""
|
2
|
+
|
3
|
+
import json
|
4
|
+
import logging
|
5
|
+
import sys
|
6
|
+
from datetime import date, datetime, time
|
7
|
+
from enum import Enum, IntEnum
|
8
|
+
from typing import (
|
9
|
+
Any,
|
10
|
+
Dict,
|
11
|
+
List,
|
12
|
+
Optional,
|
13
|
+
Tuple,
|
14
|
+
Type,
|
15
|
+
Union,
|
16
|
+
cast,
|
17
|
+
get_origin,
|
18
|
+
)
|
19
|
+
|
20
|
+
if sys.version_info >= (3, 11):
|
21
|
+
from enum import StrEnum
|
22
|
+
|
23
|
+
from pydantic import (
|
24
|
+
AnyUrl,
|
25
|
+
BaseModel,
|
26
|
+
ConfigDict,
|
27
|
+
EmailStr,
|
28
|
+
Field,
|
29
|
+
ValidationError,
|
30
|
+
create_model,
|
31
|
+
)
|
32
|
+
from pydantic.fields import FieldInfo
|
33
|
+
from pydantic.functional_validators import BeforeValidator
|
34
|
+
from pydantic.types import constr
|
35
|
+
|
36
|
+
from .errors import (
|
37
|
+
FieldDefinitionError,
|
38
|
+
ModelCreationError,
|
39
|
+
ModelValidationError,
|
40
|
+
NestedModelError,
|
41
|
+
SchemaValidationError,
|
42
|
+
)
|
43
|
+
from .exit_codes import ExitCode
|
44
|
+
|
45
|
+
logger = logging.getLogger(__name__)
|
46
|
+
|
47
|
+
# Type aliases
|
48
|
+
FieldType = Type[
|
49
|
+
Any
|
50
|
+
] # Changed from Type[Any] to allow both concrete types and generics
|
51
|
+
FieldDefinition = Tuple[Any, FieldInfo] # Changed to Any to handle generics
|
52
|
+
|
53
|
+
|
54
|
+
def _create_enum_type(values: List[Any], field_name: str) -> Type[Enum]:
|
55
|
+
"""Create an enum type from a list of values.
|
56
|
+
|
57
|
+
Args:
|
58
|
+
values: List of enum values
|
59
|
+
field_name: Name of the field for enum type name
|
60
|
+
|
61
|
+
Returns:
|
62
|
+
Created enum type
|
63
|
+
"""
|
64
|
+
# Determine the value type
|
65
|
+
value_types = {type(v) for v in values}
|
66
|
+
|
67
|
+
if len(value_types) > 1:
|
68
|
+
# Mixed types, use string representation
|
69
|
+
enum_dict = {f"VALUE_{i}": str(v) for i, v in enumerate(values)}
|
70
|
+
return type(f"{field_name.title()}Enum", (str, Enum), enum_dict)
|
71
|
+
elif value_types == {int}:
|
72
|
+
# All integer values
|
73
|
+
enum_dict = {f"VALUE_{v}": v for v in values}
|
74
|
+
return type(f"{field_name.title()}Enum", (IntEnum,), enum_dict)
|
75
|
+
elif value_types == {str}:
|
76
|
+
# All string values
|
77
|
+
enum_dict = {v.upper().replace(" ", "_"): v for v in values}
|
78
|
+
if sys.version_info >= (3, 11):
|
79
|
+
return type(f"{field_name.title()}Enum", (StrEnum,), enum_dict)
|
80
|
+
else:
|
81
|
+
# Other types, use string representation
|
82
|
+
return type(f"{field_name.title()}Enum", (str, Enum), enum_dict)
|
83
|
+
|
84
|
+
# Default case: treat as string enum
|
85
|
+
enum_dict = {f"VALUE_{i}": str(v) for i, v in enumerate(values)}
|
86
|
+
return type(f"{field_name.title()}Enum", (str, Enum), enum_dict)
|
87
|
+
|
88
|
+
|
89
|
+
def is_container_type(tp: Type[Any]) -> bool:
|
90
|
+
"""Check if a type is a container type (List, Dict, etc).
|
91
|
+
|
92
|
+
Args:
|
93
|
+
tp: Type to check
|
94
|
+
|
95
|
+
Returns:
|
96
|
+
bool: True if type is a container type
|
97
|
+
"""
|
98
|
+
origin = get_origin(tp)
|
99
|
+
return origin is not None and origin in (list, dict, List, Dict)
|
100
|
+
|
101
|
+
|
102
|
+
# Validation functions
|
103
|
+
def pattern(regex: str) -> Any:
|
104
|
+
return constr(pattern=regex)
|
105
|
+
|
106
|
+
|
107
|
+
def min_length(length: int) -> Any:
|
108
|
+
return BeforeValidator(lambda v: v if len(str(v)) >= length else None)
|
109
|
+
|
110
|
+
|
111
|
+
def max_length(length: int) -> Any:
|
112
|
+
return BeforeValidator(lambda v: v if len(str(v)) <= length else None)
|
113
|
+
|
114
|
+
|
115
|
+
def ge(value: Union[int, float]) -> Any:
|
116
|
+
return BeforeValidator(lambda v: v if float(v) >= value else None)
|
117
|
+
|
118
|
+
|
119
|
+
def le(value: Union[int, float]) -> Any:
|
120
|
+
return BeforeValidator(lambda v: v if float(v) <= value else None)
|
121
|
+
|
122
|
+
|
123
|
+
def gt(value: Union[int, float]) -> Any:
|
124
|
+
return BeforeValidator(lambda v: v if float(v) > value else None)
|
125
|
+
|
126
|
+
|
127
|
+
def lt(value: Union[int, float]) -> Any:
|
128
|
+
return BeforeValidator(lambda v: v if float(v) < value else None)
|
129
|
+
|
130
|
+
|
131
|
+
def multiple_of(value: Union[int, float]) -> Any:
|
132
|
+
return BeforeValidator(lambda v: v if float(v) % value == 0 else None)
|
133
|
+
|
134
|
+
|
135
|
+
def _get_type_with_constraints(
|
136
|
+
field_schema: Dict[str, Any], field_name: str, base_name: str
|
137
|
+
) -> FieldDefinition:
|
138
|
+
"""Get type with constraints from field schema.
|
139
|
+
|
140
|
+
Args:
|
141
|
+
field_schema: Field schema dict
|
142
|
+
field_name: Name of the field
|
143
|
+
base_name: Base name for nested models
|
144
|
+
|
145
|
+
Returns:
|
146
|
+
Tuple of (type, field)
|
147
|
+
"""
|
148
|
+
field_kwargs: Dict[str, Any] = {}
|
149
|
+
|
150
|
+
# Add common field metadata
|
151
|
+
if "title" in field_schema:
|
152
|
+
field_kwargs["title"] = field_schema["title"]
|
153
|
+
if "description" in field_schema:
|
154
|
+
field_kwargs["description"] = field_schema["description"]
|
155
|
+
if "default" in field_schema:
|
156
|
+
field_kwargs["default"] = field_schema["default"]
|
157
|
+
if "readOnly" in field_schema:
|
158
|
+
field_kwargs["frozen"] = field_schema["readOnly"]
|
159
|
+
|
160
|
+
field_type = field_schema.get("type")
|
161
|
+
|
162
|
+
# Handle array type
|
163
|
+
if field_type == "array":
|
164
|
+
items_schema = field_schema.get("items", {})
|
165
|
+
if not items_schema:
|
166
|
+
return (List[Any], Field(**field_kwargs)) # Direct generic type
|
167
|
+
|
168
|
+
# Create nested model for object items
|
169
|
+
if (
|
170
|
+
isinstance(items_schema, dict)
|
171
|
+
and items_schema.get("type") == "object"
|
172
|
+
):
|
173
|
+
array_item_model = create_dynamic_model(
|
174
|
+
items_schema,
|
175
|
+
base_name=f"{base_name}_{field_name}_Item",
|
176
|
+
show_schema=False,
|
177
|
+
debug_validation=False,
|
178
|
+
)
|
179
|
+
return (List[array_item_model], Field(**field_kwargs)) # type: ignore[valid-type]
|
180
|
+
|
181
|
+
# For non-object items, use the type directly
|
182
|
+
item_type = items_schema.get("type", "string")
|
183
|
+
if item_type == "string":
|
184
|
+
return (List[str], Field(**field_kwargs))
|
185
|
+
elif item_type == "integer":
|
186
|
+
return (List[int], Field(**field_kwargs))
|
187
|
+
elif item_type == "number":
|
188
|
+
return (List[float], Field(**field_kwargs))
|
189
|
+
elif item_type == "boolean":
|
190
|
+
return (List[bool], Field(**field_kwargs))
|
191
|
+
else:
|
192
|
+
return (List[Any], Field(**field_kwargs))
|
193
|
+
|
194
|
+
# Handle object type
|
195
|
+
if field_type == "object":
|
196
|
+
# Create nested model with explicit type annotation
|
197
|
+
object_model = create_dynamic_model(
|
198
|
+
field_schema,
|
199
|
+
base_name=f"{base_name}_{field_name}",
|
200
|
+
show_schema=False,
|
201
|
+
debug_validation=False,
|
202
|
+
)
|
203
|
+
return (object_model, Field(**field_kwargs))
|
204
|
+
|
205
|
+
# Handle additionalProperties
|
206
|
+
if "additionalProperties" in field_schema and isinstance(
|
207
|
+
field_schema["additionalProperties"], dict
|
208
|
+
):
|
209
|
+
# Create nested model with explicit type annotation
|
210
|
+
dict_value_model = create_dynamic_model(
|
211
|
+
field_schema["additionalProperties"],
|
212
|
+
base_name=f"{base_name}_{field_name}_Value",
|
213
|
+
show_schema=False,
|
214
|
+
debug_validation=False,
|
215
|
+
)
|
216
|
+
dict_type: Type[Dict[str, Any]] = Dict[str, dict_value_model] # type: ignore[valid-type]
|
217
|
+
return (dict_type, Field(**field_kwargs))
|
218
|
+
|
219
|
+
# Handle other types
|
220
|
+
if field_type == "string":
|
221
|
+
field_type_cls: Type[Any] = str
|
222
|
+
|
223
|
+
# Add string-specific constraints to field_kwargs
|
224
|
+
if "pattern" in field_schema:
|
225
|
+
field_kwargs["pattern"] = field_schema["pattern"]
|
226
|
+
if "minLength" in field_schema:
|
227
|
+
field_kwargs["min_length"] = field_schema["minLength"]
|
228
|
+
if "maxLength" in field_schema:
|
229
|
+
field_kwargs["max_length"] = field_schema["maxLength"]
|
230
|
+
|
231
|
+
# Handle special string formats
|
232
|
+
if "format" in field_schema:
|
233
|
+
if field_schema["format"] == "date-time":
|
234
|
+
field_type_cls = datetime
|
235
|
+
elif field_schema["format"] == "date":
|
236
|
+
field_type_cls = date
|
237
|
+
elif field_schema["format"] == "time":
|
238
|
+
field_type_cls = time
|
239
|
+
elif field_schema["format"] == "email":
|
240
|
+
field_type_cls = EmailStr
|
241
|
+
elif field_schema["format"] == "uri":
|
242
|
+
field_type_cls = AnyUrl
|
243
|
+
|
244
|
+
return (field_type_cls, Field(**field_kwargs))
|
245
|
+
|
246
|
+
if field_type == "number":
|
247
|
+
field_type_cls = float
|
248
|
+
|
249
|
+
# Add number-specific constraints to field_kwargs
|
250
|
+
if "minimum" in field_schema:
|
251
|
+
field_kwargs["ge"] = field_schema["minimum"]
|
252
|
+
if "maximum" in field_schema:
|
253
|
+
field_kwargs["le"] = field_schema["maximum"]
|
254
|
+
if "exclusiveMinimum" in field_schema:
|
255
|
+
field_kwargs["gt"] = field_schema["exclusiveMinimum"]
|
256
|
+
if "exclusiveMaximum" in field_schema:
|
257
|
+
field_kwargs["lt"] = field_schema["exclusiveMaximum"]
|
258
|
+
if "multipleOf" in field_schema:
|
259
|
+
field_kwargs["multiple_of"] = field_schema["multipleOf"]
|
260
|
+
|
261
|
+
return (field_type_cls, Field(**field_kwargs))
|
262
|
+
|
263
|
+
if field_type == "integer":
|
264
|
+
field_type_cls = int
|
265
|
+
|
266
|
+
# Add integer-specific constraints to field_kwargs
|
267
|
+
if "minimum" in field_schema:
|
268
|
+
field_kwargs["ge"] = field_schema["minimum"]
|
269
|
+
if "maximum" in field_schema:
|
270
|
+
field_kwargs["le"] = field_schema["maximum"]
|
271
|
+
if "exclusiveMinimum" in field_schema:
|
272
|
+
field_kwargs["gt"] = field_schema["exclusiveMinimum"]
|
273
|
+
if "exclusiveMaximum" in field_schema:
|
274
|
+
field_kwargs["lt"] = field_schema["exclusiveMaximum"]
|
275
|
+
if "multipleOf" in field_schema:
|
276
|
+
field_kwargs["multiple_of"] = field_schema["multipleOf"]
|
277
|
+
|
278
|
+
return (field_type_cls, Field(**field_kwargs))
|
279
|
+
|
280
|
+
if field_type == "boolean":
|
281
|
+
return (bool, Field(**field_kwargs))
|
282
|
+
|
283
|
+
if field_type == "null":
|
284
|
+
return (type(None), Field(**field_kwargs))
|
285
|
+
|
286
|
+
# Handle enum
|
287
|
+
if "enum" in field_schema:
|
288
|
+
enum_type = _create_enum_type(field_schema["enum"], field_name)
|
289
|
+
return (cast(Type[Any], enum_type), Field(**field_kwargs))
|
290
|
+
|
291
|
+
# Default to Any for unknown types
|
292
|
+
return (Any, Field(**field_kwargs))
|
293
|
+
|
294
|
+
|
295
|
+
def create_dynamic_model(
|
296
|
+
schema: Dict[str, Any],
|
297
|
+
base_name: str = "DynamicModel",
|
298
|
+
show_schema: bool = False,
|
299
|
+
debug_validation: bool = False,
|
300
|
+
) -> Type[BaseModel]:
|
301
|
+
"""Create a Pydantic model from a JSON Schema.
|
302
|
+
|
303
|
+
Args:
|
304
|
+
schema: JSON Schema to create model from
|
305
|
+
base_name: Base name for the model class
|
306
|
+
show_schema: Whether to show the generated schema
|
307
|
+
debug_validation: Whether to show debug validation info
|
308
|
+
|
309
|
+
Returns:
|
310
|
+
Generated Pydantic model class
|
311
|
+
|
312
|
+
Raises:
|
313
|
+
SchemaValidationError: If schema validation fails
|
314
|
+
ModelCreationError: If model creation fails
|
315
|
+
"""
|
316
|
+
try:
|
317
|
+
# Validate schema structure before model creation
|
318
|
+
from .template_utils import validate_json_schema
|
319
|
+
|
320
|
+
validate_json_schema(schema)
|
321
|
+
|
322
|
+
# Process schema properties into fields
|
323
|
+
properties = schema.get("properties", {})
|
324
|
+
required = schema.get("required", [])
|
325
|
+
|
326
|
+
field_definitions: Dict[str, Tuple[Type[Any], FieldInfo]] = {}
|
327
|
+
for field_name, field_schema in properties.items():
|
328
|
+
if debug_validation:
|
329
|
+
logger.info("Processing field %s:", field_name)
|
330
|
+
logger.info(" Schema: %s", json.dumps(field_schema, indent=2))
|
331
|
+
|
332
|
+
try:
|
333
|
+
python_type, field = _get_type_with_constraints(
|
334
|
+
field_schema, field_name, base_name
|
335
|
+
)
|
336
|
+
|
337
|
+
# Handle optional fields
|
338
|
+
if field_name not in required:
|
339
|
+
if debug_validation:
|
340
|
+
logger.info(
|
341
|
+
"Field %s is optional, wrapping in Optional",
|
342
|
+
field_name,
|
343
|
+
)
|
344
|
+
field_type = cast(Type[Any], Optional[python_type])
|
345
|
+
else:
|
346
|
+
field_type = python_type
|
347
|
+
if debug_validation:
|
348
|
+
logger.info("Field %s is required", field_name)
|
349
|
+
|
350
|
+
# Create field definition
|
351
|
+
field_definitions[field_name] = (field_type, field)
|
352
|
+
|
353
|
+
if debug_validation:
|
354
|
+
logger.info("Successfully created field definition:")
|
355
|
+
logger.info(" Name: %s", field_name)
|
356
|
+
logger.info(" Type: %s", str(field_type))
|
357
|
+
logger.info(" Required: %s", field_name in required)
|
358
|
+
|
359
|
+
except (FieldDefinitionError, NestedModelError) as e:
|
360
|
+
if debug_validation:
|
361
|
+
logger.error("Error creating field %s:", field_name)
|
362
|
+
logger.error(" Error type: %s", type(e).__name__)
|
363
|
+
logger.error(" Error message: %s", str(e))
|
364
|
+
raise ModelValidationError(base_name, [str(e)])
|
365
|
+
|
366
|
+
# Create the model with the fields
|
367
|
+
field_defs: Dict[str, Any] = {
|
368
|
+
name: (
|
369
|
+
(
|
370
|
+
cast(Type[Any], field_type)
|
371
|
+
if is_container_type(field_type)
|
372
|
+
else field_type
|
373
|
+
),
|
374
|
+
field,
|
375
|
+
)
|
376
|
+
for name, (field_type, field) in field_definitions.items()
|
377
|
+
}
|
378
|
+
|
379
|
+
# Create model class
|
380
|
+
model = create_model(base_name, __base__=BaseModel, **field_defs)
|
381
|
+
|
382
|
+
# Set model config
|
383
|
+
model.model_config = ConfigDict(
|
384
|
+
title=schema.get("title", base_name),
|
385
|
+
extra="forbid",
|
386
|
+
)
|
387
|
+
|
388
|
+
if show_schema:
|
389
|
+
logger.info(
|
390
|
+
"Generated schema for %s:\n%s",
|
391
|
+
base_name,
|
392
|
+
json.dumps(model.model_json_schema(), indent=2),
|
393
|
+
)
|
394
|
+
|
395
|
+
try:
|
396
|
+
# Validate model schema
|
397
|
+
model.model_json_schema()
|
398
|
+
except ValidationError as e:
|
399
|
+
validation_errors = (
|
400
|
+
[str(err) for err in e.errors()]
|
401
|
+
if hasattr(e, "errors")
|
402
|
+
else [str(e)]
|
403
|
+
)
|
404
|
+
if debug_validation:
|
405
|
+
logger.error("Schema validation failed:")
|
406
|
+
logger.error(" Error type: %s", type(e).__name__)
|
407
|
+
logger.error(" Error message: %s", str(e))
|
408
|
+
raise ModelValidationError(base_name, validation_errors)
|
409
|
+
except KeyError as e:
|
410
|
+
# Handle Pydantic schema generation errors, particularly for recursive references
|
411
|
+
error_msg = str(e).strip(
|
412
|
+
"'\""
|
413
|
+
) # Strip quotes from KeyError message
|
414
|
+
if error_msg.startswith("#/definitions/"):
|
415
|
+
context = {
|
416
|
+
"schema_path": schema.get("$id", "unknown"),
|
417
|
+
"reference": error_msg,
|
418
|
+
"found": "circular reference or missing definition",
|
419
|
+
"tips": [
|
420
|
+
"Add explicit $ref definitions for recursive structures",
|
421
|
+
"Use Pydantic's deferred annotations with typing.Self",
|
422
|
+
"Limit recursion depth with max_depth validator",
|
423
|
+
"Flatten nested structures using reference IDs",
|
424
|
+
],
|
425
|
+
}
|
426
|
+
|
427
|
+
error_msg = (
|
428
|
+
f"Invalid schema reference: {error_msg}\n"
|
429
|
+
"Detected circular reference or missing definition.\n"
|
430
|
+
"Solutions:\n"
|
431
|
+
"1. Add missing $ref definitions to your schema\n"
|
432
|
+
"2. Use explicit ID references instead of nested objects\n"
|
433
|
+
"3. Implement depth limits for recursive structures"
|
434
|
+
)
|
435
|
+
|
436
|
+
if debug_validation:
|
437
|
+
logger.error("Schema reference error:")
|
438
|
+
logger.error(" Error type: %s", type(e).__name__)
|
439
|
+
logger.error(" Error message: %s", error_msg)
|
440
|
+
|
441
|
+
raise SchemaValidationError(
|
442
|
+
error_msg, context=context, exit_code=ExitCode.SCHEMA_ERROR
|
443
|
+
) from e
|
444
|
+
|
445
|
+
# For other KeyErrors, preserve the original error
|
446
|
+
raise ModelCreationError(
|
447
|
+
f"Failed to create model {base_name}",
|
448
|
+
context={"error": str(e)},
|
449
|
+
) from e
|
450
|
+
|
451
|
+
return model
|
452
|
+
|
453
|
+
except SchemaValidationError:
|
454
|
+
# Re-raise schema validation errors without wrapping
|
455
|
+
raise
|
456
|
+
|
457
|
+
except Exception as e:
|
458
|
+
# Always log basic error info
|
459
|
+
logger.error("Model creation error: %s", str(e))
|
460
|
+
|
461
|
+
# Log additional debug info if requested
|
462
|
+
if debug_validation:
|
463
|
+
logger.error(" Error type: %s", type(e).__name__)
|
464
|
+
logger.error(" Error details: %s", str(e))
|
465
|
+
if hasattr(e, "__cause__"):
|
466
|
+
logger.error(" Caused by: %s", str(e.__cause__))
|
467
|
+
if hasattr(e, "__context__"):
|
468
|
+
logger.error(" Context: %s", str(e.__context__))
|
469
|
+
if hasattr(e, "__traceback__"):
|
470
|
+
import traceback
|
471
|
+
|
472
|
+
logger.error(
|
473
|
+
" Traceback:\n%s",
|
474
|
+
"".join(traceback.format_tb(e.__traceback__)),
|
475
|
+
)
|
476
|
+
# Always wrap other errors as ModelCreationError
|
477
|
+
raise ModelCreationError(
|
478
|
+
f"Failed to create model {base_name}",
|
479
|
+
context={"error": str(e)},
|
480
|
+
) from e
|
ostruct/cli/security/errors.py
CHANGED
@@ -61,7 +61,7 @@ from .errors import PathSecurityError, SecurityErrorReasons
|
|
61
61
|
# Patterns for path normalization and validation
|
62
62
|
_UNICODE_SAFETY_PATTERN = re.compile(
|
63
63
|
r"[\u0000-\u001F\u007F-\u009F\u2028-\u2029\u0085]" # Control chars and line separators
|
64
|
-
r"
|
64
|
+
r"|(?:^|/)\.\.(?:/|$)" # Directory traversal attempts (only ".." as a path component)
|
65
65
|
r"|[\u2024\u2025\uFE52\u2024\u2025\u2026\uFE19\uFE30\uFE52\uFF0E\uFF61]" # Alternative dots and separators
|
66
66
|
)
|
67
67
|
_BACKSLASH_PATTERN = re.compile(r"\\")
|