fastmcp 2.9.1__py3-none-any.whl → 2.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. fastmcp/cli/cli.py +16 -1
  2. fastmcp/cli/run.py +4 -0
  3. fastmcp/client/auth/oauth.py +5 -82
  4. fastmcp/client/client.py +114 -24
  5. fastmcp/client/elicitation.py +63 -0
  6. fastmcp/client/transports.py +50 -36
  7. fastmcp/contrib/component_manager/README.md +170 -0
  8. fastmcp/contrib/component_manager/__init__.py +4 -0
  9. fastmcp/contrib/component_manager/component_manager.py +186 -0
  10. fastmcp/contrib/component_manager/component_service.py +225 -0
  11. fastmcp/contrib/component_manager/example.py +59 -0
  12. fastmcp/prompts/prompt.py +12 -4
  13. fastmcp/resources/resource.py +8 -3
  14. fastmcp/resources/template.py +5 -0
  15. fastmcp/server/auth/auth.py +15 -0
  16. fastmcp/server/auth/providers/bearer.py +41 -3
  17. fastmcp/server/auth/providers/bearer_env.py +4 -0
  18. fastmcp/server/auth/providers/in_memory.py +15 -0
  19. fastmcp/server/context.py +144 -4
  20. fastmcp/server/elicitation.py +160 -0
  21. fastmcp/server/http.py +1 -9
  22. fastmcp/server/low_level.py +4 -2
  23. fastmcp/server/middleware/__init__.py +14 -1
  24. fastmcp/server/middleware/logging.py +11 -0
  25. fastmcp/server/middleware/middleware.py +10 -6
  26. fastmcp/server/openapi.py +19 -77
  27. fastmcp/server/proxy.py +13 -6
  28. fastmcp/server/server.py +76 -11
  29. fastmcp/settings.py +0 -17
  30. fastmcp/tools/tool.py +209 -57
  31. fastmcp/tools/tool_manager.py +2 -3
  32. fastmcp/tools/tool_transform.py +125 -26
  33. fastmcp/utilities/cli.py +106 -0
  34. fastmcp/utilities/components.py +5 -1
  35. fastmcp/utilities/json_schema_type.py +648 -0
  36. fastmcp/utilities/openapi.py +69 -0
  37. fastmcp/utilities/types.py +50 -19
  38. {fastmcp-2.9.1.dist-info → fastmcp-2.10.0.dist-info}/METADATA +3 -2
  39. {fastmcp-2.9.1.dist-info → fastmcp-2.10.0.dist-info}/RECORD +42 -33
  40. {fastmcp-2.9.1.dist-info → fastmcp-2.10.0.dist-info}/WHEEL +0 -0
  41. {fastmcp-2.9.1.dist-info → fastmcp-2.10.0.dist-info}/entry_points.txt +0 -0
  42. {fastmcp-2.9.1.dist-info → fastmcp-2.10.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,648 @@
1
+ """Convert JSON Schema to Python types with validation.
2
+
3
+ The json_schema_to_type function converts a JSON Schema into a Python type that can be used
4
+ for validation with Pydantic. It supports:
5
+
6
+ - Basic types (string, number, integer, boolean, null)
7
+ - Complex types (arrays, objects)
8
+ - Format constraints (date-time, email, uri)
9
+ - Numeric constraints (minimum, maximum, multipleOf)
10
+ - String constraints (minLength, maxLength, pattern)
11
+ - Array constraints (minItems, maxItems, uniqueItems)
12
+ - Object properties with defaults
13
+ - References and recursive schemas
14
+ - Enums and constants
15
+ - Union types
16
+
17
+ Example:
18
+ ```python
19
+ schema = {
20
+ "type": "object",
21
+ "properties": {
22
+ "name": {"type": "string", "minLength": 1},
23
+ "age": {"type": "integer", "minimum": 0},
24
+ "email": {"type": "string", "format": "email"}
25
+ },
26
+ "required": ["name", "age"]
27
+ }
28
+
29
+ # Name is optional and will be inferred from schema's "title" property if not provided
30
+ Person = json_schema_to_type(schema)
31
+ # Creates a validated dataclass with name, age, and optional email fields
32
+ ```
33
+ """
34
+
35
+ from __future__ import annotations
36
+
37
+ import hashlib
38
+ import json
39
+ import re
40
+ from collections.abc import Callable, Mapping
41
+ from copy import deepcopy
42
+ from dataclasses import MISSING, field, make_dataclass
43
+ from datetime import datetime
44
+ from typing import (
45
+ Annotated,
46
+ Any,
47
+ ForwardRef,
48
+ Literal,
49
+ Union,
50
+ )
51
+
52
+ from pydantic import (
53
+ AnyUrl,
54
+ BaseModel,
55
+ ConfigDict,
56
+ EmailStr,
57
+ Field,
58
+ Json,
59
+ StringConstraints,
60
+ model_validator,
61
+ )
62
+ from typing_extensions import NotRequired, TypedDict
63
+
64
+ __all__ = ["json_schema_to_type", "JSONSchema"]
65
+
66
+
67
+ FORMAT_TYPES: dict[str, Any] = {
68
+ "date-time": datetime,
69
+ "email": EmailStr,
70
+ "uri": AnyUrl,
71
+ "json": Json,
72
+ }
73
+
74
+ _classes: dict[tuple[str, Any], type | None] = {}
75
+
76
+
77
+ class JSONSchema(TypedDict):
78
+ type: NotRequired[str | list[str]]
79
+ properties: NotRequired[dict[str, JSONSchema]]
80
+ required: NotRequired[list[str]]
81
+ additionalProperties: NotRequired[bool | JSONSchema]
82
+ items: NotRequired[JSONSchema | list[JSONSchema]]
83
+ enum: NotRequired[list[Any]]
84
+ const: NotRequired[Any]
85
+ default: NotRequired[Any]
86
+ description: NotRequired[str]
87
+ title: NotRequired[str]
88
+ examples: NotRequired[list[Any]]
89
+ format: NotRequired[str]
90
+ allOf: NotRequired[list[JSONSchema]]
91
+ anyOf: NotRequired[list[JSONSchema]]
92
+ oneOf: NotRequired[list[JSONSchema]]
93
+ not_: NotRequired[JSONSchema]
94
+ definitions: NotRequired[dict[str, JSONSchema]]
95
+ dependencies: NotRequired[dict[str, JSONSchema | list[str]]]
96
+ pattern: NotRequired[str]
97
+ minLength: NotRequired[int]
98
+ maxLength: NotRequired[int]
99
+ minimum: NotRequired[int | float]
100
+ maximum: NotRequired[int | float]
101
+ exclusiveMinimum: NotRequired[int | float]
102
+ exclusiveMaximum: NotRequired[int | float]
103
+ multipleOf: NotRequired[int | float]
104
+ uniqueItems: NotRequired[bool]
105
+ minItems: NotRequired[int]
106
+ maxItems: NotRequired[int]
107
+ additionalItems: NotRequired[bool | JSONSchema]
108
+
109
+
110
+ def json_schema_to_type(
111
+ schema: Mapping[str, Any],
112
+ name: str | None = None,
113
+ ) -> type:
114
+ """Convert JSON schema to appropriate Python type with validation.
115
+
116
+ Args:
117
+ schema: A JSON Schema dictionary defining the type structure and validation rules
118
+ name: Optional name for object schemas. Only allowed when schema type is "object".
119
+ If not provided for objects, name will be inferred from schema's "title"
120
+ property or default to "Root".
121
+
122
+ Returns:
123
+ A Python type (typically a dataclass for objects) with Pydantic validation
124
+
125
+ Raises:
126
+ ValueError: If a name is provided for a non-object schema
127
+
128
+ Examples:
129
+ Create a dataclass from an object schema:
130
+ ```python
131
+ schema = {
132
+ "type": "object",
133
+ "title": "Person",
134
+ "properties": {
135
+ "name": {"type": "string", "minLength": 1},
136
+ "age": {"type": "integer", "minimum": 0},
137
+ "email": {"type": "string", "format": "email"}
138
+ },
139
+ "required": ["name", "age"]
140
+ }
141
+
142
+ Person = json_schema_to_type(schema)
143
+ # Creates a dataclass with name, age, and optional email fields:
144
+ # @dataclass
145
+ # class Person:
146
+ # name: str
147
+ # age: int
148
+ # email: str | None = None
149
+ ```
150
+ Person(name="John", age=30)
151
+
152
+ Create a scalar type with constraints:
153
+ ```python
154
+ schema = {
155
+ "type": "string",
156
+ "minLength": 3,
157
+ "pattern": "^[A-Z][a-z]+$"
158
+ }
159
+
160
+ NameType = json_schema_to_type(schema)
161
+ # Creates Annotated[str, StringConstraints(min_length=3, pattern="^[A-Z][a-z]+$")]
162
+
163
+ @dataclass
164
+ class Name:
165
+ name: NameType
166
+ ```
167
+ """
168
+ # Always use the top-level schema for references
169
+ if schema.get("type") == "object":
170
+ # If no properties defined but has additionalProperties, return typed dict
171
+ if not schema.get("properties") and schema.get("additionalProperties"):
172
+ additional_props = schema["additionalProperties"]
173
+ if additional_props is True:
174
+ return dict[str, Any] # type: ignore - additionalProperties: true means dict[str, Any]
175
+ else:
176
+ # Handle typed dictionaries like dict[str, str]
177
+ value_type = _schema_to_type(additional_props, schemas=schema)
178
+ return dict[str, value_type] # type: ignore
179
+ # If no properties and no additionalProperties, default to dict[str, Any] for safety
180
+ elif not schema.get("properties") and not schema.get("additionalProperties"):
181
+ return dict[str, Any] # type: ignore
182
+ # If has properties AND additionalProperties is True, use Pydantic BaseModel
183
+ elif schema.get("properties") and schema.get("additionalProperties") is True:
184
+ return _create_pydantic_model(schema, name, schemas=schema)
185
+ # Otherwise use fast dataclass
186
+ return _create_dataclass(schema, name, schemas=schema)
187
+ elif name:
188
+ raise ValueError(f"Can not apply name to non-object schema: {name}")
189
+ result = _schema_to_type(schema, schemas=schema)
190
+ return result # type: ignore[return-value]
191
+
192
+
193
+ def _hash_schema(schema: Mapping[str, Any]) -> str:
194
+ """Generate a deterministic hash for schema caching."""
195
+ return hashlib.sha256(json.dumps(schema, sort_keys=True).encode()).hexdigest()
196
+
197
+
198
+ def _resolve_ref(ref: str, schemas: Mapping[str, Any]) -> Mapping[str, Any]:
199
+ """Resolve JSON Schema reference to target schema."""
200
+ path = ref.replace("#/", "").split("/")
201
+ current = schemas
202
+ for part in path:
203
+ current = current.get(part, {})
204
+ return current
205
+
206
+
207
+ def _create_string_type(schema: Mapping[str, Any]) -> type | Annotated[Any, ...]:
208
+ """Create string type with optional constraints."""
209
+ if "const" in schema:
210
+ return Literal[schema["const"]] # type: ignore
211
+
212
+ if fmt := schema.get("format"):
213
+ if fmt == "uri":
214
+ return AnyUrl
215
+ elif fmt == "uri-reference":
216
+ return str
217
+ return FORMAT_TYPES.get(fmt, str)
218
+
219
+ constraints = {
220
+ k: v
221
+ for k, v in {
222
+ "min_length": schema.get("minLength"),
223
+ "max_length": schema.get("maxLength"),
224
+ "pattern": schema.get("pattern"),
225
+ }.items()
226
+ if v is not None
227
+ }
228
+
229
+ return Annotated[str, StringConstraints(**constraints)] if constraints else str
230
+
231
+
232
+ def _create_numeric_type(
233
+ base: type[int | float], schema: Mapping[str, Any]
234
+ ) -> type | Annotated[Any, ...]:
235
+ """Create numeric type with optional constraints."""
236
+ if "const" in schema:
237
+ return Literal[schema["const"]] # type: ignore
238
+
239
+ constraints = {
240
+ k: v
241
+ for k, v in {
242
+ "gt": schema.get("exclusiveMinimum"),
243
+ "ge": schema.get("minimum"),
244
+ "lt": schema.get("exclusiveMaximum"),
245
+ "le": schema.get("maximum"),
246
+ "multiple_of": schema.get("multipleOf"),
247
+ }.items()
248
+ if v is not None
249
+ }
250
+
251
+ return Annotated[base, Field(**constraints)] if constraints else base
252
+
253
+
254
+ def _create_enum(name: str, values: list[Any]) -> type:
255
+ """Create enum type from list of values."""
256
+ # Always return Literal for enum fields to preserve the literal nature
257
+ return Literal[tuple(values)] # type: ignore[return-value]
258
+
259
+
260
+ def _create_array_type(
261
+ schema: Mapping[str, Any], schemas: Mapping[str, Any]
262
+ ) -> type | Annotated[Any, ...]:
263
+ """Create list/set type with optional constraints."""
264
+ items = schema.get("items", {})
265
+ if isinstance(items, list):
266
+ # Handle positional item schemas
267
+ item_types = [_schema_to_type(s, schemas) for s in items]
268
+ combined = Union[tuple(item_types)] # type: ignore # noqa: UP007
269
+ base = list[combined]
270
+ else:
271
+ # Handle single item schema
272
+ item_type = _schema_to_type(items, schemas)
273
+ base_class = set if schema.get("uniqueItems") else list
274
+ base = base_class[item_type] # type: ignore[misc]
275
+
276
+ constraints = {
277
+ k: v
278
+ for k, v in {
279
+ "min_length": schema.get("minItems"),
280
+ "max_length": schema.get("maxItems"),
281
+ }.items()
282
+ if v is not None
283
+ }
284
+
285
+ return Annotated[base, Field(**constraints)] if constraints else base
286
+
287
+
288
+ def _return_Any() -> Any:
289
+ return Any
290
+
291
+
292
+ def _get_from_type_handler(
293
+ schema: Mapping[str, Any], schemas: Mapping[str, Any]
294
+ ) -> Callable[..., Any]:
295
+ """Get the appropriate type handler for the schema."""
296
+
297
+ type_handlers: dict[str, Callable[..., Any]] = { # TODO
298
+ "string": lambda s: _create_string_type(s), # type: ignore
299
+ "integer": lambda s: _create_numeric_type(int, s), # type: ignore
300
+ "number": lambda s: _create_numeric_type(float, s), # type: ignore
301
+ "boolean": lambda _: bool, # type: ignore
302
+ "null": lambda _: type(None), # type: ignore
303
+ "array": lambda s: _create_array_type(s, schemas), # type: ignore
304
+ "object": lambda s: (
305
+ _create_pydantic_model(s, s.get("title"), schemas)
306
+ if s.get("properties") and s.get("additionalProperties") is True
307
+ else _create_dataclass(s, s.get("title"), schemas)
308
+ ), # type: ignore
309
+ }
310
+ return type_handlers.get(schema.get("type", None), _return_Any)
311
+
312
+
313
+ def _schema_to_type(
314
+ schema: Mapping[str, Any],
315
+ schemas: Mapping[str, Any],
316
+ ) -> type | ForwardRef:
317
+ """Convert schema to appropriate Python type."""
318
+ if not schema:
319
+ return object
320
+
321
+ if "type" not in schema and "properties" in schema:
322
+ return _create_dataclass(schema, schema.get("title", "<unknown>"), schemas)
323
+
324
+ # Handle references first
325
+ if "$ref" in schema:
326
+ ref = schema["$ref"]
327
+ # Handle self-reference
328
+ if ref == "#":
329
+ return ForwardRef(schema.get("title", "Root")) # type: ignore[return-value]
330
+ return _schema_to_type(_resolve_ref(ref, schemas), schemas)
331
+
332
+ if "const" in schema:
333
+ return Literal[schema["const"]] # type: ignore
334
+
335
+ if "enum" in schema:
336
+ return _create_enum(f"Enum_{len(_classes)}", schema["enum"])
337
+
338
+ # Handle anyOf unions
339
+ if "anyOf" in schema:
340
+ types: list[type | Any] = []
341
+ for subschema in schema["anyOf"]:
342
+ # Special handling for dict-like objects in unions
343
+ if (
344
+ subschema.get("type") == "object"
345
+ and not subschema.get("properties")
346
+ and subschema.get("additionalProperties")
347
+ ):
348
+ # This is a dict type, handle it directly
349
+ additional_props = subschema["additionalProperties"]
350
+ if additional_props is True:
351
+ types.append(dict[str, Any]) # type: ignore
352
+ else:
353
+ value_type = _schema_to_type(additional_props, schemas)
354
+ types.append(dict[str, value_type]) # type: ignore
355
+ else:
356
+ types.append(_schema_to_type(subschema, schemas))
357
+
358
+ # Check if one of the types is None (null)
359
+ has_null = type(None) in types
360
+ types = [t for t in types if t is not type(None)]
361
+
362
+ if len(types) == 0:
363
+ return type(None)
364
+ elif len(types) == 1:
365
+ if has_null:
366
+ return types[0] | None # type: ignore
367
+ else:
368
+ return types[0]
369
+ else:
370
+ if has_null:
371
+ return Union[tuple(types + [type(None)])] # type: ignore # noqa: UP007
372
+ else:
373
+ return Union[tuple(types)] # type: ignore # noqa: UP007
374
+
375
+ schema_type = schema.get("type")
376
+ if not schema_type:
377
+ return Any # type: ignore[return-value]
378
+
379
+ if isinstance(schema_type, list):
380
+ # Create a copy of the schema for each type, but keep all constraints
381
+ types: list[type | Any] = []
382
+ for t in schema_type:
383
+ type_schema = dict(schema)
384
+ type_schema["type"] = t
385
+ types.append(_schema_to_type(type_schema, schemas))
386
+ has_null = type(None) in types
387
+ types = [t for t in types if t is not type(None)]
388
+ if has_null:
389
+ if len(types) == 1:
390
+ return types[0] | None # type: ignore
391
+ else:
392
+ return Union[tuple(types + [type(None)])] # type: ignore # noqa: UP007
393
+ return Union[tuple(types)] # type: ignore # noqa: UP007
394
+
395
+ return _get_from_type_handler(schema, schemas)(schema)
396
+
397
+
398
+ def _sanitize_name(name: str) -> str:
399
+ """Convert string to valid Python identifier."""
400
+ original_name = name
401
+ # Step 1: replace everything except [0-9a-zA-Z_] with underscores
402
+ cleaned = re.sub(r"[^0-9a-zA-Z_]", "_", name)
403
+ # Step 2: deduplicate underscores
404
+ cleaned = re.sub(r"__+", "_", cleaned)
405
+ # Step 3: if the first char of original name isn't a letter or underscore, prepend field_
406
+ if not name or not re.match(r"[a-zA-Z_]", name[0]):
407
+ cleaned = f"field_{cleaned}"
408
+ # Step 4: deduplicate again
409
+ cleaned = re.sub(r"__+", "_", cleaned)
410
+ # Step 5: only strip trailing underscores if they weren't in the original name
411
+ if not original_name.endswith("_"):
412
+ cleaned = cleaned.rstrip("_")
413
+ return cleaned
414
+
415
+
416
+ def _get_default_value(
417
+ schema: dict[str, Any],
418
+ prop_name: str,
419
+ parent_default: dict[str, Any] | None = None,
420
+ ) -> Any:
421
+ """Get default value with proper priority ordering.
422
+ 1. Value from parent's default if it exists
423
+ 2. Property's own default if it exists
424
+ 3. None
425
+ """
426
+ if parent_default is not None and prop_name in parent_default:
427
+ return parent_default[prop_name]
428
+ return schema.get("default")
429
+
430
+
431
+ def _create_field_with_default(
432
+ field_type: type,
433
+ default_value: Any,
434
+ schema: dict[str, Any],
435
+ ) -> Any:
436
+ """Create a field with simplified default handling."""
437
+ # Always use None as default for complex types
438
+ if isinstance(default_value, dict | list) or default_value is None:
439
+ return field(default=None)
440
+
441
+ # For simple types, use the value directly
442
+ return field(default=default_value)
443
+
444
+
445
+ def _create_pydantic_model(
446
+ schema: Mapping[str, Any],
447
+ name: str | None = None,
448
+ schemas: Mapping[str, Any] | None = None,
449
+ ) -> type:
450
+ """Create Pydantic BaseModel from object schema with additionalProperties."""
451
+ name = name or schema.get("title", "Root")
452
+ assert name is not None # Should not be None after the or operation
453
+ sanitized_name = _sanitize_name(name)
454
+ schema_hash = _hash_schema(schema)
455
+ cache_key = (schema_hash, sanitized_name)
456
+
457
+ # Return existing class if already built
458
+ if cache_key in _classes:
459
+ existing = _classes[cache_key]
460
+ if existing is None:
461
+ return ForwardRef(sanitized_name) # type: ignore[return-value]
462
+ return existing
463
+
464
+ # Place placeholder for recursive references
465
+ _classes[cache_key] = None
466
+
467
+ properties = schema.get("properties", {})
468
+ required = schema.get("required", [])
469
+
470
+ # Build field annotations and defaults
471
+ annotations = {}
472
+ defaults = {}
473
+
474
+ for prop_name, prop_schema in properties.items():
475
+ field_type = _schema_to_type(prop_schema, schemas or {})
476
+
477
+ # Handle defaults
478
+ default_value = prop_schema.get("default", MISSING)
479
+ if default_value is not MISSING:
480
+ defaults[prop_name] = default_value
481
+ annotations[prop_name] = field_type
482
+ elif prop_name in required:
483
+ annotations[prop_name] = field_type
484
+ else:
485
+ annotations[prop_name] = Union[field_type, type(None)] # type: ignore[misc] # noqa: UP007
486
+ defaults[prop_name] = None
487
+
488
+ # Create Pydantic model class
489
+ cls_dict = {
490
+ "__annotations__": annotations,
491
+ "model_config": ConfigDict(extra="allow"),
492
+ **defaults,
493
+ }
494
+
495
+ cls = type(sanitized_name, (BaseModel,), cls_dict)
496
+
497
+ # Store completed class
498
+ _classes[cache_key] = cls
499
+ return cls
500
+
501
+
502
+ def _create_dataclass(
503
+ schema: Mapping[str, Any],
504
+ name: str | None = None,
505
+ schemas: Mapping[str, Any] | None = None,
506
+ ) -> type:
507
+ """Create dataclass from object schema."""
508
+ name = name or schema.get("title", "Root")
509
+ # Sanitize name for class creation
510
+ assert name is not None # Should not be None after the or operation
511
+ sanitized_name = _sanitize_name(name)
512
+ schema_hash = _hash_schema(schema)
513
+ cache_key = (schema_hash, sanitized_name)
514
+ original_schema = dict(schema) # Store copy for validator
515
+
516
+ # Return existing class if already built
517
+ if cache_key in _classes:
518
+ existing = _classes[cache_key]
519
+ if existing is None:
520
+ return ForwardRef(sanitized_name) # type: ignore[return-value]
521
+ return existing
522
+
523
+ # Place placeholder for recursive references
524
+ _classes[cache_key] = None
525
+
526
+ if "$ref" in schema:
527
+ ref = schema["$ref"]
528
+ if ref == "#":
529
+ return ForwardRef(sanitized_name) # type: ignore[return-value]
530
+ schema = _resolve_ref(ref, schemas or {})
531
+
532
+ properties = schema.get("properties", {})
533
+ required = schema.get("required", [])
534
+
535
+ fields: list[tuple[Any, ...]] = []
536
+ for prop_name, prop_schema in properties.items():
537
+ field_name = _sanitize_name(prop_name)
538
+
539
+ # Check for self-reference in property
540
+ if prop_schema.get("$ref") == "#":
541
+ field_type = ForwardRef(sanitized_name)
542
+ else:
543
+ field_type = _schema_to_type(prop_schema, schemas or {})
544
+
545
+ default_val = prop_schema.get("default", MISSING)
546
+ is_required = prop_name in required
547
+
548
+ # Include alias in field metadata
549
+ meta = {"alias": prop_name}
550
+
551
+ if default_val is not MISSING:
552
+ if isinstance(default_val, dict | list):
553
+ field_def = field(
554
+ default_factory=lambda d=default_val: deepcopy(d), metadata=meta
555
+ )
556
+ else:
557
+ field_def = field(default=default_val, metadata=meta)
558
+ else:
559
+ if is_required:
560
+ field_def = field(metadata=meta)
561
+ else:
562
+ field_def = field(default=None, metadata=meta)
563
+
564
+ if is_required and default_val is not MISSING:
565
+ fields.append((field_name, field_type, field_def))
566
+ elif is_required:
567
+ fields.append((field_name, field_type, field_def))
568
+ else:
569
+ fields.append((field_name, Union[field_type, type(None)], field_def)) # type: ignore[misc] # noqa: UP007
570
+
571
+ cls = make_dataclass(sanitized_name, fields, kw_only=True)
572
+
573
+ # Add model validator for defaults
574
+ @model_validator(mode="before")
575
+ @classmethod
576
+ def _apply_defaults(cls, data: Mapping[str, Any]):
577
+ if isinstance(data, dict):
578
+ return _merge_defaults(data, original_schema)
579
+ return data
580
+
581
+ setattr(cls, "_apply_defaults", _apply_defaults)
582
+
583
+ # Store completed class
584
+ _classes[cache_key] = cls
585
+ return cls
586
+
587
+
588
+ def _merge_defaults(
589
+ data: Mapping[str, Any],
590
+ schema: Mapping[str, Any],
591
+ parent_default: Mapping[str, Any] | None = None,
592
+ ) -> dict[str, Any]:
593
+ """Merge defaults with provided data at all levels."""
594
+ # If we have no data
595
+ if not data:
596
+ # Start with parent default if available
597
+ if parent_default:
598
+ result = dict(parent_default)
599
+ # Otherwise use schema default if available
600
+ elif "default" in schema:
601
+ result = dict(schema["default"])
602
+ # Otherwise start empty
603
+ else:
604
+ result = {}
605
+ # If we have data and a parent default, merge them
606
+ elif parent_default:
607
+ result = dict(parent_default)
608
+ for key, value in data.items():
609
+ if (
610
+ isinstance(value, dict)
611
+ and key in result
612
+ and isinstance(result[key], dict)
613
+ ):
614
+ # recursively merge nested dicts
615
+ result[key] = _merge_defaults(value, {"properties": {}}, result[key])
616
+ else:
617
+ result[key] = value
618
+ # Otherwise just use the data
619
+ else:
620
+ result = dict(data)
621
+
622
+ # For each property in the schema
623
+ for prop_name, prop_schema in schema.get("properties", {}).items():
624
+ # If property is missing, apply defaults in priority order
625
+ if prop_name not in result:
626
+ if parent_default and prop_name in parent_default:
627
+ result[prop_name] = parent_default[prop_name]
628
+ elif "default" in prop_schema:
629
+ result[prop_name] = prop_schema["default"]
630
+
631
+ # If property exists and is an object, recursively merge
632
+ if (
633
+ prop_name in result
634
+ and isinstance(result[prop_name], dict)
635
+ and prop_schema.get("type") == "object"
636
+ ):
637
+ # Get the appropriate default for this nested object
638
+ nested_default = None
639
+ if parent_default and prop_name in parent_default:
640
+ nested_default = parent_default[prop_name]
641
+ elif "default" in prop_schema:
642
+ nested_default = prop_schema["default"]
643
+
644
+ result[prop_name] = _merge_defaults(
645
+ result[prop_name], prop_schema, nested_default
646
+ )
647
+
648
+ return result