datamodel-code-generator 0.11.12__py3-none-any.whl → 0.45.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. datamodel_code_generator/__init__.py +654 -185
  2. datamodel_code_generator/__main__.py +872 -388
  3. datamodel_code_generator/arguments.py +798 -0
  4. datamodel_code_generator/cli_options.py +295 -0
  5. datamodel_code_generator/format.py +292 -54
  6. datamodel_code_generator/http.py +85 -10
  7. datamodel_code_generator/imports.py +152 -43
  8. datamodel_code_generator/model/__init__.py +138 -1
  9. datamodel_code_generator/model/base.py +531 -120
  10. datamodel_code_generator/model/dataclass.py +211 -0
  11. datamodel_code_generator/model/enum.py +133 -12
  12. datamodel_code_generator/model/imports.py +22 -0
  13. datamodel_code_generator/model/msgspec.py +462 -0
  14. datamodel_code_generator/model/pydantic/__init__.py +30 -25
  15. datamodel_code_generator/model/pydantic/base_model.py +304 -100
  16. datamodel_code_generator/model/pydantic/custom_root_type.py +11 -2
  17. datamodel_code_generator/model/pydantic/dataclass.py +15 -4
  18. datamodel_code_generator/model/pydantic/imports.py +40 -27
  19. datamodel_code_generator/model/pydantic/types.py +188 -96
  20. datamodel_code_generator/model/pydantic_v2/__init__.py +51 -0
  21. datamodel_code_generator/model/pydantic_v2/base_model.py +268 -0
  22. datamodel_code_generator/model/pydantic_v2/imports.py +15 -0
  23. datamodel_code_generator/model/pydantic_v2/root_model.py +35 -0
  24. datamodel_code_generator/model/pydantic_v2/types.py +143 -0
  25. datamodel_code_generator/model/scalar.py +124 -0
  26. datamodel_code_generator/model/template/Enum.jinja2 +15 -2
  27. datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2 +6 -0
  28. datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2 +6 -0
  29. datamodel_code_generator/model/template/ScalarTypeStatement.jinja2 +6 -0
  30. datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2 +20 -0
  31. datamodel_code_generator/model/template/TypeAliasType.jinja2 +20 -0
  32. datamodel_code_generator/model/template/TypeStatement.jinja2 +20 -0
  33. datamodel_code_generator/model/template/TypedDict.jinja2 +5 -0
  34. datamodel_code_generator/model/template/TypedDictClass.jinja2 +25 -0
  35. datamodel_code_generator/model/template/TypedDictFunction.jinja2 +24 -0
  36. datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2 +10 -0
  37. datamodel_code_generator/model/template/UnionTypeAliasType.jinja2 +10 -0
  38. datamodel_code_generator/model/template/UnionTypeStatement.jinja2 +10 -0
  39. datamodel_code_generator/model/template/dataclass.jinja2 +50 -0
  40. datamodel_code_generator/model/template/msgspec.jinja2 +55 -0
  41. datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +17 -4
  42. datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +12 -4
  43. datamodel_code_generator/model/template/pydantic/Config.jinja2 +1 -1
  44. datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +15 -2
  45. datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +57 -0
  46. datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +5 -0
  47. datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +48 -0
  48. datamodel_code_generator/model/type_alias.py +70 -0
  49. datamodel_code_generator/model/typed_dict.py +161 -0
  50. datamodel_code_generator/model/types.py +106 -0
  51. datamodel_code_generator/model/union.py +105 -0
  52. datamodel_code_generator/parser/__init__.py +30 -12
  53. datamodel_code_generator/parser/_graph.py +67 -0
  54. datamodel_code_generator/parser/_scc.py +171 -0
  55. datamodel_code_generator/parser/base.py +2426 -380
  56. datamodel_code_generator/parser/graphql.py +652 -0
  57. datamodel_code_generator/parser/jsonschema.py +2518 -647
  58. datamodel_code_generator/parser/openapi.py +631 -222
  59. datamodel_code_generator/py.typed +0 -0
  60. datamodel_code_generator/pydantic_patch.py +28 -0
  61. datamodel_code_generator/reference.py +672 -290
  62. datamodel_code_generator/types.py +521 -145
  63. datamodel_code_generator/util.py +155 -0
  64. datamodel_code_generator/watch.py +65 -0
  65. datamodel_code_generator-0.45.0.dist-info/METADATA +301 -0
  66. datamodel_code_generator-0.45.0.dist-info/RECORD +69 -0
  67. {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info}/WHEEL +1 -1
  68. datamodel_code_generator-0.45.0.dist-info/entry_points.txt +2 -0
  69. datamodel_code_generator/version.py +0 -1
  70. datamodel_code_generator-0.11.12.dist-info/METADATA +0 -440
  71. datamodel_code_generator-0.11.12.dist-info/RECORD +0 -31
  72. datamodel_code_generator-0.11.12.dist-info/entry_points.txt +0 -3
  73. {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info/licenses}/LICENSE +0 -0
@@ -1,317 +1,607 @@
1
+ """JSON Schema parser implementation.
2
+
3
+ Handles parsing of JSON Schema, JSON, YAML, Dict, and CSV inputs to generate
4
+ Python data models. Supports draft-04 through draft-2020-12 schemas.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
1
9
  import enum as _enum
10
+ import json
2
11
  from collections import defaultdict
3
- from contextlib import contextmanager
4
- from functools import lru_cache
12
+ from collections.abc import Iterable
13
+ from contextlib import contextmanager, suppress
14
+ from functools import cached_property, lru_cache
5
15
  from pathlib import Path
6
- from typing import (
7
- Any,
8
- Callable,
9
- DefaultDict,
10
- Dict,
11
- Generator,
12
- Iterable,
13
- List,
14
- Mapping,
15
- Optional,
16
- Sequence,
17
- Set,
18
- Tuple,
19
- Type,
20
- Union,
21
- )
22
- from urllib.parse import ParseResult
16
+ from typing import TYPE_CHECKING, Any, Callable, ClassVar, Literal, Optional, Union
17
+ from urllib.parse import ParseResult, unquote
23
18
  from warnings import warn
24
19
 
25
- from pydantic import BaseModel, Field, root_validator, validator
20
+ from pydantic import (
21
+ Field,
22
+ )
26
23
 
27
24
  from datamodel_code_generator import (
25
+ DEFAULT_SHARED_MODULE_NAME,
26
+ AllOfMergeMode,
27
+ DataclassArguments,
28
28
  InvalidClassNameError,
29
- cached_property,
29
+ ReadOnlyWriteOnlyModelType,
30
+ ReuseScope,
31
+ YamlValue,
30
32
  load_yaml,
31
- load_yaml_from_path,
33
+ load_yaml_dict,
34
+ load_yaml_dict_from_path,
32
35
  snooper_to_methods,
33
36
  )
34
- from datamodel_code_generator.format import PythonVersion
37
+ from datamodel_code_generator.format import (
38
+ DEFAULT_FORMATTERS,
39
+ DatetimeClassType,
40
+ Formatter,
41
+ PythonVersion,
42
+ PythonVersionMin,
43
+ )
44
+ from datamodel_code_generator.imports import IMPORT_ANY
35
45
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
36
46
  from datamodel_code_generator.model import pydantic as pydantic_model
37
- from datamodel_code_generator.model.base import get_module_name
38
- from datamodel_code_generator.model.enum import Enum
47
+ from datamodel_code_generator.model.base import UNDEFINED, get_module_name, sanitize_module_name
48
+ from datamodel_code_generator.model.dataclass import DataClass
49
+ from datamodel_code_generator.model.enum import (
50
+ SPECIALIZED_ENUM_TYPE_MATCH,
51
+ Enum,
52
+ StrEnum,
53
+ )
39
54
  from datamodel_code_generator.parser import DefaultPutDict, LiteralType
40
55
  from datamodel_code_generator.parser.base import (
56
+ SPECIAL_PATH_FORMAT,
41
57
  Parser,
58
+ Source,
42
59
  escape_characters,
60
+ get_special_path,
43
61
  title_to_class_name,
44
62
  )
45
- from datamodel_code_generator.reference import ModelType, Reference, is_url
46
- from datamodel_code_generator.types import DataType, DataTypeManager, StrictTypes, Types
47
-
48
-
49
- def get_model_by_path(schema: Dict[str, Any], keys: List[str]) -> Dict[str, Any]:
50
- if not keys:
51
- return schema
52
- elif len(keys) == 1:
53
- return schema.get(keys[0], {})
54
- return get_model_by_path(schema[keys[0]], keys[1:])
63
+ from datamodel_code_generator.reference import SPECIAL_PATH_MARKER, ModelType, Reference, is_url
64
+ from datamodel_code_generator.types import (
65
+ ANY,
66
+ DataType,
67
+ DataTypeManager,
68
+ EmptyDataType,
69
+ StrictTypes,
70
+ Types,
71
+ UnionIntFloat,
72
+ )
73
+ from datamodel_code_generator.util import (
74
+ PYDANTIC_V2,
75
+ BaseModel,
76
+ field_validator,
77
+ model_validator,
78
+ )
55
79
 
80
+ if PYDANTIC_V2:
81
+ from pydantic import ConfigDict
56
82
 
57
- SPECIAL_PATH_FORMAT: str = '#-datamodel-code-generator-#-{}-#-special-#'
83
+ if TYPE_CHECKING:
84
+ from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
58
85
 
59
86
 
60
- def get_special_path(keyword: str, path: List[str]) -> List[str]:
61
- return [*path, SPECIAL_PATH_FORMAT.format(keyword)]
87
+ def unescape_json_pointer_segment(segment: str) -> str:
88
+ """Unescape JSON pointer segment by converting escape sequences and percent-encoding."""
89
+ # Unescape ~1, ~0, and percent-encoding
90
+ return unquote(segment.replace("~1", "/").replace("~0", "~"))
62
91
 
63
92
 
64
- json_schema_data_formats: Dict[str, Dict[str, Types]] = {
65
- 'integer': {
66
- 'int32': Types.int32,
67
- 'int64': Types.int64,
68
- 'default': Types.integer,
69
- 'unix-time': Types.int64,
93
+ def get_model_by_path(
94
+ schema: dict[str, YamlValue] | list[YamlValue], keys: list[str] | list[int]
95
+ ) -> dict[str, YamlValue]:
96
+ """Retrieve a model from schema by traversing the given path keys."""
97
+ if not keys:
98
+ if isinstance(schema, dict):
99
+ return schema
100
+ msg = f"Does not support json pointer to array. schema={schema}, key={keys}" # pragma: no cover
101
+ raise NotImplementedError(msg) # pragma: no cover
102
+ # Unescape the key if it's a string (JSON pointer segment)
103
+ key = keys[0]
104
+ if isinstance(key, str): # pragma: no branch
105
+ key = unescape_json_pointer_segment(key)
106
+ value = schema.get(str(key), {}) if isinstance(schema, dict) else schema[int(key)]
107
+ if len(keys) == 1:
108
+ if isinstance(value, dict):
109
+ return value
110
+ msg = f"Does not support json pointer to array. schema={schema}, key={keys}" # pragma: no cover
111
+ raise NotImplementedError(msg) # pragma: no cover
112
+ if isinstance(value, (dict, list)):
113
+ return get_model_by_path(value, keys[1:])
114
+ msg = f"Cannot traverse non-container value. schema={schema}, key={keys}" # pragma: no cover
115
+ raise NotImplementedError(msg) # pragma: no cover
116
+
117
+
118
+ # TODO: This dictionary contains formats valid only for OpenAPI and not for
119
+ # jsonschema and vice versa. They should be separated.
120
+ json_schema_data_formats: dict[str, dict[str, Types]] = {
121
+ "integer": {
122
+ "int32": Types.int32,
123
+ "int64": Types.int64,
124
+ "default": Types.integer,
125
+ "date-time": Types.date_time,
126
+ "unix-time": Types.int64,
70
127
  },
71
- 'number': {
72
- 'float': Types.float,
73
- 'double': Types.double,
74
- 'decimal': Types.decimal,
75
- 'time': Types.time,
76
- 'default': Types.number,
128
+ "number": {
129
+ "float": Types.float,
130
+ "double": Types.double,
131
+ "decimal": Types.decimal,
132
+ "date-time": Types.date_time,
133
+ "time": Types.time,
134
+ "default": Types.number,
77
135
  },
78
- 'string': {
79
- 'default': Types.string,
80
- 'byte': Types.byte, # base64 encoded string
81
- 'binary': Types.binary,
82
- 'date': Types.date,
83
- 'date-time': Types.date_time,
84
- 'time': Types.time,
85
- 'password': Types.password,
86
- 'email': Types.email,
87
- 'idn-email': Types.email,
88
- 'uuid': Types.uuid,
89
- 'uuid1': Types.uuid1,
90
- 'uuid2': Types.uuid2,
91
- 'uuid3': Types.uuid3,
92
- 'uuid4': Types.uuid4,
93
- 'uuid5': Types.uuid5,
94
- 'uri': Types.uri,
95
- 'uri-reference': Types.string,
96
- 'hostname': Types.hostname,
97
- 'ipv4': Types.ipv4,
98
- 'ipv6': Types.ipv6,
99
- 'decimal': Types.decimal,
100
- 'integer': Types.integer,
136
+ "string": {
137
+ "default": Types.string,
138
+ "byte": Types.byte, # base64 encoded string
139
+ "binary": Types.binary,
140
+ "date": Types.date,
141
+ "date-time": Types.date_time,
142
+ "duration": Types.timedelta,
143
+ "time": Types.time,
144
+ "password": Types.password,
145
+ "path": Types.path,
146
+ "email": Types.email,
147
+ "idn-email": Types.email,
148
+ "uuid": Types.uuid,
149
+ "uuid1": Types.uuid1,
150
+ "uuid2": Types.uuid2,
151
+ "uuid3": Types.uuid3,
152
+ "uuid4": Types.uuid4,
153
+ "uuid5": Types.uuid5,
154
+ "uri": Types.uri,
155
+ "uri-reference": Types.string,
156
+ "hostname": Types.hostname,
157
+ "ipv4": Types.ipv4,
158
+ "ipv4-network": Types.ipv4_network,
159
+ "ipv6": Types.ipv6,
160
+ "ipv6-network": Types.ipv6_network,
161
+ "decimal": Types.decimal,
162
+ "integer": Types.integer,
101
163
  },
102
- 'boolean': {'default': Types.boolean},
103
- 'object': {'default': Types.object},
104
- 'null': {'default': Types.null},
105
- 'array': {'default': Types.array},
164
+ "boolean": {"default": Types.boolean},
165
+ "object": {"default": Types.object},
166
+ "null": {"default": Types.null},
167
+ "array": {"default": Types.array},
106
168
  }
107
169
 
108
170
 
109
171
  class JSONReference(_enum.Enum):
110
- LOCAL = 'LOCAL'
111
- REMOTE = 'REMOTE'
112
- URL = 'URL'
172
+ """Define types of JSON references."""
173
+
174
+ LOCAL = "LOCAL"
175
+ REMOTE = "REMOTE"
176
+ URL = "URL"
177
+
178
+
179
+ class Discriminator(BaseModel):
180
+ """Represent OpenAPI discriminator object."""
181
+
182
+ propertyName: str # noqa: N815
183
+ mapping: Optional[dict[str, str]] = None # noqa: UP045
113
184
 
114
185
 
115
186
  class JsonSchemaObject(BaseModel):
116
- __constraint_fields__: Set[str] = {
117
- 'exclusiveMinimum',
118
- 'minimum',
119
- 'exclusiveMaximum',
120
- 'maximum',
121
- 'multipleOf',
122
- 'minItems',
123
- 'maxItems',
124
- 'minLength',
125
- 'maxLength',
126
- 'pattern',
187
+ """Represent a JSON Schema object with validation and parsing capabilities."""
188
+
189
+ if not TYPE_CHECKING:
190
+ if PYDANTIC_V2:
191
+
192
+ @classmethod
193
+ def get_fields(cls) -> dict[str, Any]:
194
+ """Get fields for Pydantic v2 models."""
195
+ return cls.model_fields
196
+
197
+ else:
198
+
199
+ @classmethod
200
+ def get_fields(cls) -> dict[str, Any]:
201
+ """Get fields for Pydantic v1 models."""
202
+ return cls.__fields__
203
+
204
+ @classmethod
205
+ def model_rebuild(cls) -> None:
206
+ """Rebuild model by updating forward references."""
207
+ cls.update_forward_refs()
208
+
209
+ __constraint_fields__: set[str] = { # noqa: RUF012
210
+ "exclusiveMinimum",
211
+ "minimum",
212
+ "exclusiveMaximum",
213
+ "maximum",
214
+ "multipleOf",
215
+ "minItems",
216
+ "maxItems",
217
+ "minLength",
218
+ "maxLength",
219
+ "pattern",
220
+ "uniqueItems",
221
+ }
222
+ __extra_key__: str = SPECIAL_PATH_FORMAT.format("extras")
223
+ __metadata_only_fields__: set[str] = { # noqa: RUF012
224
+ "title",
225
+ "description",
226
+ "id",
227
+ "$id",
228
+ "$schema",
229
+ "$comment",
230
+ "examples",
231
+ "example",
232
+ "x_enum_varnames",
233
+ "definitions",
234
+ "$defs",
235
+ "default",
236
+ "readOnly",
237
+ "writeOnly",
238
+ "deprecated",
127
239
  }
128
240
 
129
- @root_validator(pre=True)
130
- def validate_exclusive_maximum_and_exclusive_minimum(
131
- cls, values: Dict[str, Any]
132
- ) -> Any:
133
- exclusive_maximum: Union[float, bool, None] = values.get('exclusiveMaximum')
134
- exclusive_minimum: Union[float, bool, None] = values.get('exclusiveMinimum')
241
+ @model_validator(mode="before")
242
+ def validate_exclusive_maximum_and_exclusive_minimum(cls, values: Any) -> Any: # noqa: N805
243
+ """Validate and convert boolean exclusive maximum and minimum to numeric values."""
244
+ if not isinstance(values, dict):
245
+ return values
246
+ exclusive_maximum: float | bool | None = values.get("exclusiveMaximum")
247
+ exclusive_minimum: float | bool | None = values.get("exclusiveMinimum")
135
248
 
136
249
  if exclusive_maximum is True:
137
- values['exclusiveMaximum'] = values['maximum']
138
- del values['maximum']
250
+ values["exclusiveMaximum"] = values["maximum"]
251
+ del values["maximum"]
139
252
  elif exclusive_maximum is False:
140
- del values['exclusiveMaximum']
253
+ del values["exclusiveMaximum"]
141
254
  if exclusive_minimum is True:
142
- values['exclusiveMinimum'] = values['minimum']
143
- del values['minimum']
255
+ values["exclusiveMinimum"] = values["minimum"]
256
+ del values["minimum"]
144
257
  elif exclusive_minimum is False:
145
- del values['exclusiveMinimum']
258
+ del values["exclusiveMinimum"]
146
259
  return values
147
260
 
148
- @validator('ref')
149
- def validate_ref(cls, value: Any) -> Any:
150
- if isinstance(value, str) and '#' in value:
151
- if value.endswith('#/'):
261
+ @field_validator("ref")
262
+ def validate_ref(cls, value: Any) -> Any: # noqa: N805
263
+ """Validate and normalize $ref values."""
264
+ if isinstance(value, str) and "#" in value:
265
+ if value.endswith("#/"):
152
266
  return value[:-1]
153
- elif '#/' in value or value[0] == '#' or value[-1] == '#':
267
+ if "#/" in value or value[0] == "#" or value[-1] == "#":
154
268
  return value
155
- return value.replace('#', '#/')
269
+ return value.replace("#", "#/")
156
270
  return value
157
271
 
158
- items: Union[List['JsonSchemaObject'], 'JsonSchemaObject', None]
159
- uniqueItem: Optional[bool]
160
- type: Union[str, List[str], None]
161
- format: Optional[str]
162
- pattern: Optional[str]
163
- minLength: Optional[int]
164
- maxLength: Optional[int]
165
- minimum: Optional[float]
166
- maximum: Optional[float]
167
- minItems: Optional[int]
168
- maxItems: Optional[int]
169
- multipleOf: Optional[float]
170
- exclusiveMaximum: Union[float, bool, None]
171
- exclusiveMinimum: Union[float, bool, None]
172
- additionalProperties: Union['JsonSchemaObject', bool, None]
173
- patternProperties: Optional[Dict[str, 'JsonSchemaObject']]
174
- oneOf: List['JsonSchemaObject'] = []
175
- anyOf: List['JsonSchemaObject'] = []
176
- allOf: List['JsonSchemaObject'] = []
177
- enum: List[Any] = []
178
- writeOnly: Optional[bool]
179
- properties: Optional[Dict[str, 'JsonSchemaObject']]
180
- required: List[str] = []
181
- ref: Optional[str] = Field(default=None, alias='$ref')
182
- nullable: Optional[bool] = False
183
- x_enum_varnames: List[str] = Field(default=[], alias='x-enum-varnames')
184
- description: Optional[str]
185
- title: Optional[str]
186
- example: Any
187
- examples: Any
188
- default: Any
189
- id: Optional[str] = Field(default=None, alias='$id')
190
- custom_type_path: Optional[str] = Field(default=None, alias='customTypePath')
191
- _raw: Dict[str, Any]
192
-
193
- class Config:
194
- arbitrary_types_allowed = True
195
- keep_untouched = (cached_property,)
196
- underscore_attrs_are_private = True
197
-
198
- def __init__(self, **data: Any) -> None: # type: ignore
199
- super().__init__(**data)
200
- self._raw = data
272
+ @field_validator("required", mode="before")
273
+ def validate_required(cls, value: Any) -> Any: # noqa: N805
274
+ """Validate and normalize required field values."""
275
+ if value is None:
276
+ return []
277
+ if isinstance(value, list): # noqa: PLR1702
278
+ # Filter to only include valid strings, excluding invalid objects
279
+ required_fields: list[str] = []
280
+ for item in value:
281
+ if isinstance(item, str):
282
+ required_fields.append(item)
283
+
284
+ # In some cases, the required field can include "anyOf", "oneOf", or "allOf" as a dict (#2297)
285
+ elif isinstance(item, dict):
286
+ for key, val in item.items():
287
+ if isinstance(val, list):
288
+ # If 'anyOf' or "oneOf" is present, we won't include it in required fields
289
+ if key in {"anyOf", "oneOf"}:
290
+ continue
291
+
292
+ if key == "allOf":
293
+ # If 'allOf' is present, we include them as required fields
294
+ required_fields.extend(sub_item for sub_item in val if isinstance(sub_item, str))
295
+
296
+ value = required_fields
201
297
 
202
- @cached_property
203
- def extras(self) -> Dict[str, Any]:
204
- return {k: v for k, v in self._raw.items() if k not in EXCLUDE_FIELD_KEYS}
298
+ return value
299
+
300
+ @field_validator("type", mode="before")
301
+ def validate_null_type(cls, value: Any) -> Any: # noqa: N805
302
+ """Validate and convert unquoted null type to string "null"."""
303
+ # TODO[openapi]: This should be supported only for OpenAPI 3.1+
304
+ # See: https://github.com/koxudaxi/datamodel-code-generator/issues/2477#issuecomment-3192480591
305
+ if value is None:
306
+ value = "null"
307
+ if isinstance(value, list) and None in value:
308
+ value = [v if v is not None else "null" for v in value]
309
+ return value
310
+
311
+ items: Optional[Union[list[JsonSchemaObject], JsonSchemaObject, bool]] = None # noqa: UP007, UP045
312
+ uniqueItems: Optional[bool] = None # noqa: N815, UP045
313
+ type: Optional[Union[str, list[str]]] = None # noqa: UP007, UP045
314
+ format: Optional[str] = None # noqa: UP045
315
+ pattern: Optional[str] = None # noqa: UP045
316
+ minLength: Optional[int] = None # noqa: N815,UP045
317
+ maxLength: Optional[int] = None # noqa: N815,UP045
318
+ minimum: Optional[UnionIntFloat] = None # noqa: UP045
319
+ maximum: Optional[UnionIntFloat] = None # noqa: UP045
320
+ minItems: Optional[int] = None # noqa: N815,UP045
321
+ maxItems: Optional[int] = None # noqa: N815,UP045
322
+ multipleOf: Optional[float] = None # noqa: N815, UP045
323
+ exclusiveMaximum: Optional[Union[float, bool]] = None # noqa: N815, UP007, UP045
324
+ exclusiveMinimum: Optional[Union[float, bool]] = None # noqa: N815, UP007, UP045
325
+ additionalProperties: Optional[Union[JsonSchemaObject, bool]] = None # noqa: N815, UP007, UP045
326
+ patternProperties: Optional[dict[str, JsonSchemaObject]] = None # noqa: N815, UP045
327
+ oneOf: list[JsonSchemaObject] = [] # noqa: N815, RUF012
328
+ anyOf: list[JsonSchemaObject] = [] # noqa: N815, RUF012
329
+ allOf: list[JsonSchemaObject] = [] # noqa: N815, RUF012
330
+ enum: list[Any] = [] # noqa: RUF012
331
+ writeOnly: Optional[bool] = None # noqa: N815, UP045
332
+ readOnly: Optional[bool] = None # noqa: N815, UP045
333
+ properties: Optional[dict[str, Union[JsonSchemaObject, bool]]] = None # noqa: UP007, UP045
334
+ required: list[str] = [] # noqa: RUF012
335
+ ref: Optional[str] = Field(default=None, alias="$ref") # noqa: UP045
336
+ nullable: Optional[bool] = False # noqa: UP045
337
+ x_enum_varnames: list[str] = Field(default_factory=list, alias="x-enum-varnames")
338
+ x_enum_names: list[str] = Field(default_factory=list, alias="x-enumNames")
339
+ description: Optional[str] = None # noqa: UP045
340
+ title: Optional[str] = None # noqa: UP045
341
+ example: Any = None
342
+ examples: Any = None
343
+ default: Any = None
344
+ id: Optional[str] = Field(default=None, alias="$id") # noqa: UP045
345
+ custom_type_path: Optional[str] = Field(default=None, alias="customTypePath") # noqa: UP045
346
+ custom_base_path: Optional[str] = Field(default=None, alias="customBasePath") # noqa: UP045
347
+ extras: dict[str, Any] = Field(alias=__extra_key__, default_factory=dict)
348
+ discriminator: Optional[Union[Discriminator, str]] = None # noqa: UP007, UP045
349
+ if PYDANTIC_V2:
350
+ model_config = ConfigDict( # pyright: ignore[reportPossiblyUnboundVariable]
351
+ arbitrary_types_allowed=True,
352
+ ignored_types=(cached_property,),
353
+ )
354
+ else:
355
+
356
+ class Config:
357
+ """Pydantic v1 configuration for JsonSchemaObject."""
358
+
359
+ arbitrary_types_allowed = True
360
+ keep_untouched = (cached_property,)
361
+ smart_casts = True
362
+
363
+ def __init__(self, **data: Any) -> None:
364
+ """Initialize JsonSchemaObject with extra fields handling."""
365
+ super().__init__(**data)
366
+ # Restore extras from alias key (for dict -> parse_obj round-trip)
367
+ alias_extras = data.get(self.__extra_key__, {})
368
+ # Collect custom keys from raw data
369
+ raw_extras = {k: v for k, v in data.items() if k not in EXCLUDE_FIELD_KEYS}
370
+ # Merge: raw_extras takes precedence (original data is the source of truth)
371
+ self.extras = {**alias_extras, **raw_extras}
372
+ if "const" in alias_extras: # pragma: no cover
373
+ self.extras["const"] = alias_extras["const"]
205
374
 
206
375
  @cached_property
207
376
  def is_object(self) -> bool:
208
- return (
209
- self.properties is not None
210
- or self.type == 'object'
211
- and not self.allOf
212
- and not self.oneOf
213
- and not self.anyOf
214
- and not self.ref
377
+ """Check if the schema represents an object type."""
378
+ return self.properties is not None or (
379
+ self.type == "object" and not self.allOf and not self.oneOf and not self.anyOf and not self.ref
215
380
  )
216
381
 
217
382
  @cached_property
218
383
  def is_array(self) -> bool:
219
- return self.items is not None or self.type == 'array'
384
+ """Check if the schema represents an array type."""
385
+ return self.items is not None or self.type == "array"
220
386
 
221
387
  @cached_property
222
388
  def ref_object_name(self) -> str: # pragma: no cover
223
- return self.ref.rsplit('/', 1)[-1] # type: ignore
389
+ """Extract the object name from the reference path."""
390
+ return (self.ref or "").rsplit("/", 1)[-1]
224
391
 
225
- @validator('items', pre=True)
226
- def validate_items(cls, values: Any) -> Any:
392
+ @field_validator("items", mode="before")
393
+ def validate_items(cls, values: Any) -> Any: # noqa: N805
394
+ """Validate items field, converting empty dicts to None."""
227
395
  # this condition expects empty dict
228
396
  return values or None
229
397
 
230
398
  @cached_property
231
399
  def has_default(self) -> bool:
232
- return 'default' in self.__fields_set__
400
+ """Check if the schema has a default value or default factory."""
401
+ return "default" in self.__fields_set__ or "default_factory" in self.extras
233
402
 
234
403
  @cached_property
235
404
  def has_constraint(self) -> bool:
405
+ """Check if the schema has any constraint fields set."""
236
406
  return bool(self.__constraint_fields__ & self.__fields_set__)
237
407
 
238
408
  @cached_property
239
- def ref_type(self) -> Optional[JSONReference]:
409
+ def ref_type(self) -> JSONReference | None:
410
+ """Get the reference type (LOCAL, REMOTE, or URL)."""
240
411
  if self.ref:
241
412
  return get_ref_type(self.ref)
242
413
  return None # pragma: no cover
243
414
 
415
+ @cached_property
416
+ def type_has_null(self) -> bool:
417
+ """Check if the type list or oneOf/anyOf contains null."""
418
+ if isinstance(self.type, list) and "null" in self.type:
419
+ return True
420
+ for item in self.oneOf + self.anyOf:
421
+ if item.type == "null":
422
+ return True
423
+ if isinstance(item.type, list) and "null" in item.type:
424
+ return True
425
+ return False
426
+
427
+ @cached_property
428
+ def has_multiple_types(self) -> bool:
429
+ """Check if the type is a list with multiple non-null types."""
430
+ if not isinstance(self.type, list):
431
+ return False
432
+ non_null_types = [t for t in self.type if t != "null"]
433
+ return len(non_null_types) > 1
244
434
 
245
- @lru_cache()
435
+ @cached_property
436
+ def has_ref_with_schema_keywords(self) -> bool:
437
+ """Check if schema has $ref combined with schema-affecting keywords.
438
+
439
+ Metadata-only keywords (title, description, etc.) are excluded
440
+ as they don't affect the schema structure.
441
+ """
442
+ if not self.ref:
443
+ return False
444
+ other_fields = self.__fields_set__ - {"ref"}
445
+ schema_affecting_fields = other_fields - self.__metadata_only_fields__ - {"extras"}
446
+ if self.extras:
447
+ schema_affecting_extras = {k for k in self.extras if k not in self.__metadata_only_fields__}
448
+ if schema_affecting_extras:
449
+ schema_affecting_fields |= {"extras"}
450
+ return bool(schema_affecting_fields)
451
+
452
+
453
+ @lru_cache
246
454
  def get_ref_type(ref: str) -> JSONReference:
247
- if ref[0] == '#':
455
+ """Determine the type of reference (LOCAL, REMOTE, or URL)."""
456
+ if ref[0] == "#":
248
457
  return JSONReference.LOCAL
249
- elif is_url(ref):
458
+ if is_url(ref):
250
459
  return JSONReference.URL
251
460
  return JSONReference.REMOTE
252
461
 
253
462
 
254
- JsonSchemaObject.update_forward_refs()
463
+ def _get_type(type_: str, format__: str | None = None) -> Types:
464
+ """Get the appropriate Types enum for a given JSON Schema type and format."""
465
+ if type_ not in json_schema_data_formats:
466
+ return Types.any
467
+ if (data_formats := json_schema_data_formats[type_].get("default" if format__ is None else format__)) is not None:
468
+ return data_formats
255
469
 
256
- DEFAULT_FIELD_KEYS: Set[str] = {
257
- 'example',
258
- 'examples',
259
- 'description',
260
- 'title',
470
+ warn(f"format of {format__!r} not understood for {type_!r} - using default", stacklevel=2)
471
+ return json_schema_data_formats[type_]["default"]
472
+
473
+
474
+ JsonSchemaObject.model_rebuild()
475
+
476
+ DEFAULT_FIELD_KEYS: set[str] = {
477
+ "example",
478
+ "examples",
479
+ "description",
480
+ "discriminator",
481
+ "title",
482
+ "const",
483
+ "default_factory",
261
484
  }
262
485
 
263
- EXCLUDE_FIELD_KEYS = (set(JsonSchemaObject.__fields__) - DEFAULT_FIELD_KEYS) | {
264
- '$id',
265
- '$ref',
486
+ EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA: set[str] = {
487
+ "readOnly",
488
+ "writeOnly",
266
489
  }
267
490
 
491
+ EXCLUDE_FIELD_KEYS = (
492
+ set(JsonSchemaObject.get_fields()) # pyright: ignore[reportAttributeAccessIssue]
493
+ - DEFAULT_FIELD_KEYS
494
+ - EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA
495
+ ) | {
496
+ "$id",
497
+ "$ref",
498
+ JsonSchemaObject.__extra_key__,
499
+ }
268
500
 
269
- @snooper_to_methods(max_variable_length=None)
501
+
502
+ @snooper_to_methods() # noqa: PLR0904
270
503
  class JsonSchemaParser(Parser):
271
- def __init__(
504
+ """Parser for JSON Schema, JSON, YAML, Dict, and CSV formats."""
505
+
506
+ SCHEMA_PATHS: ClassVar[list[str]] = ["#/definitions", "#/$defs"]
507
+ SCHEMA_OBJECT_TYPE: ClassVar[type[JsonSchemaObject]] = JsonSchemaObject
508
+
509
+ def __init__( # noqa: PLR0913
272
510
  self,
273
- source: Union[str, Path, List[Path], ParseResult],
511
+ source: str | Path | list[Path] | ParseResult,
274
512
  *,
275
- data_model_type: Type[DataModel] = pydantic_model.BaseModel,
276
- data_model_root_type: Type[DataModel] = pydantic_model.CustomRootType,
277
- data_type_manager_type: Type[DataTypeManager] = pydantic_model.DataTypeManager,
278
- data_model_field_type: Type[DataModelFieldBase] = pydantic_model.DataModelField,
279
- base_class: Optional[str] = None,
280
- custom_template_dir: Optional[Path] = None,
281
- extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
282
- target_python_version: PythonVersion = PythonVersion.PY_37,
283
- dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = None,
513
+ data_model_type: type[DataModel] = pydantic_model.BaseModel,
514
+ data_model_root_type: type[DataModel] = pydantic_model.CustomRootType,
515
+ data_type_manager_type: type[DataTypeManager] = pydantic_model.DataTypeManager,
516
+ data_model_field_type: type[DataModelFieldBase] = pydantic_model.DataModelField,
517
+ base_class: str | None = None,
518
+ additional_imports: list[str] | None = None,
519
+ custom_template_dir: Path | None = None,
520
+ extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
521
+ target_python_version: PythonVersion = PythonVersionMin,
522
+ dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
284
523
  validation: bool = False,
285
524
  field_constraints: bool = False,
286
525
  snake_case_field: bool = False,
287
526
  strip_default_none: bool = False,
288
- aliases: Optional[Mapping[str, str]] = None,
527
+ aliases: Mapping[str, str] | None = None,
289
528
  allow_population_by_field_name: bool = False,
290
529
  apply_default_values_for_required_fields: bool = False,
530
+ allow_extra_fields: bool = False,
531
+ extra_fields: str | None = None,
291
532
  force_optional_for_required_fields: bool = False,
292
- class_name: Optional[str] = None,
533
+ class_name: str | None = None,
293
534
  use_standard_collections: bool = False,
294
- base_path: Optional[Path] = None,
535
+ base_path: Path | None = None,
295
536
  use_schema_description: bool = False,
537
+ use_field_description: bool = False,
538
+ use_attribute_docstrings: bool = False,
539
+ use_inline_field_description: bool = False,
540
+ use_default_kwarg: bool = False,
296
541
  reuse_model: bool = False,
297
- encoding: str = 'utf-8',
298
- enum_field_as_literal: Optional[LiteralType] = None,
542
+ reuse_scope: ReuseScope | None = None,
543
+ shared_module_name: str = DEFAULT_SHARED_MODULE_NAME,
544
+ encoding: str = "utf-8",
545
+ enum_field_as_literal: LiteralType | None = None,
546
+ use_one_literal_as_default: bool = False,
547
+ use_enum_values_in_discriminator: bool = False,
299
548
  set_default_enum_member: bool = False,
549
+ use_subclass_enum: bool = False,
550
+ use_specialized_enum: bool = True,
300
551
  strict_nullable: bool = False,
301
552
  use_generic_container_types: bool = False,
302
553
  enable_faux_immutability: bool = False,
303
- remote_text_cache: Optional[DefaultPutDict[str, str]] = None,
554
+ remote_text_cache: DefaultPutDict[str, str] | None = None,
304
555
  disable_appending_item_suffix: bool = False,
305
- strict_types: Optional[Sequence[StrictTypes]] = None,
306
- empty_enum_field_name: Optional[str] = None,
307
- custom_class_name_generator: Optional[Callable[[str], str]] = None,
308
- field_extra_keys: Optional[Set[str]] = None,
556
+ strict_types: Sequence[StrictTypes] | None = None,
557
+ empty_enum_field_name: str | None = None,
558
+ custom_class_name_generator: Callable[[str], str] | None = None,
559
+ field_extra_keys: set[str] | None = None,
309
560
  field_include_all_keys: bool = False,
310
- wrap_string_literal: Optional[bool] = None,
561
+ field_extra_keys_without_x_prefix: set[str] | None = None,
562
+ wrap_string_literal: bool | None = None,
311
563
  use_title_as_name: bool = False,
312
- http_headers: Optional[Sequence[Tuple[str, str]]] = None,
564
+ use_operation_id_as_name: bool = False,
565
+ use_unique_items_as_set: bool = False,
566
+ allof_merge_mode: AllOfMergeMode = AllOfMergeMode.Constraints,
567
+ http_headers: Sequence[tuple[str, str]] | None = None,
568
+ http_ignore_tls: bool = False,
313
569
  use_annotated: bool = False,
314
- ):
570
+ use_serialize_as_any: bool = False,
571
+ use_non_positive_negative_number_constrained_types: bool = False,
572
+ use_decimal_for_multiple_of: bool = False,
573
+ original_field_name_delimiter: str | None = None,
574
+ use_double_quotes: bool = False,
575
+ use_union_operator: bool = False,
576
+ allow_responses_without_content: bool = False,
577
+ collapse_root_models: bool = False,
578
+ skip_root_model: bool = False,
579
+ use_type_alias: bool = False,
580
+ special_field_name_prefix: str | None = None,
581
+ remove_special_field_name_prefix: bool = False,
582
+ capitalise_enum_members: bool = False,
583
+ keep_model_order: bool = False,
584
+ known_third_party: list[str] | None = None,
585
+ custom_formatters: list[str] | None = None,
586
+ custom_formatters_kwargs: dict[str, Any] | None = None,
587
+ use_pendulum: bool = False,
588
+ http_query_parameters: Sequence[tuple[str, str]] | None = None,
589
+ treat_dot_as_module: bool = False,
590
+ use_exact_imports: bool = False,
591
+ default_field_extras: dict[str, Any] | None = None,
592
+ target_datetime_class: DatetimeClassType | None = None,
593
+ keyword_only: bool = False,
594
+ frozen_dataclasses: bool = False,
595
+ no_alias: bool = False,
596
+ use_frozen_field: bool = False,
597
+ formatters: list[Formatter] = DEFAULT_FORMATTERS,
598
+ parent_scoped_naming: bool = False,
599
+ dataclass_arguments: DataclassArguments | None = None,
600
+ type_mappings: list[str] | None = None,
601
+ read_only_write_only_model_type: ReadOnlyWriteOnlyModelType | None = None,
602
+ ) -> None:
603
+ """Initialize the JSON Schema parser with configuration options."""
604
+ target_datetime_class = target_datetime_class or DatetimeClassType.Awaredatetime
315
605
  super().__init__(
316
606
  source=source,
317
607
  data_model_type=data_model_type,
@@ -319,6 +609,7 @@ class JsonSchemaParser(Parser):
319
609
  data_type_manager_type=data_type_manager_type,
320
610
  data_model_field_type=data_model_field_type,
321
611
  base_class=base_class,
612
+ additional_imports=additional_imports,
322
613
  custom_template_dir=custom_template_dir,
323
614
  extra_template_data=extra_template_data,
324
615
  target_python_version=target_python_version,
@@ -329,16 +620,28 @@ class JsonSchemaParser(Parser):
329
620
  strip_default_none=strip_default_none,
330
621
  aliases=aliases,
331
622
  allow_population_by_field_name=allow_population_by_field_name,
623
+ allow_extra_fields=allow_extra_fields,
624
+ extra_fields=extra_fields,
332
625
  apply_default_values_for_required_fields=apply_default_values_for_required_fields,
333
626
  force_optional_for_required_fields=force_optional_for_required_fields,
334
627
  class_name=class_name,
335
628
  use_standard_collections=use_standard_collections,
336
629
  base_path=base_path,
337
630
  use_schema_description=use_schema_description,
631
+ use_field_description=use_field_description,
632
+ use_attribute_docstrings=use_attribute_docstrings,
633
+ use_inline_field_description=use_inline_field_description,
634
+ use_default_kwarg=use_default_kwarg,
338
635
  reuse_model=reuse_model,
636
+ reuse_scope=reuse_scope,
637
+ shared_module_name=shared_module_name,
339
638
  encoding=encoding,
340
639
  enum_field_as_literal=enum_field_as_literal,
640
+ use_one_literal_as_default=use_one_literal_as_default,
641
+ use_enum_values_in_discriminator=use_enum_values_in_discriminator,
341
642
  set_default_enum_member=set_default_enum_member,
643
+ use_subclass_enum=use_subclass_enum,
644
+ use_specialized_enum=use_specialized_enum,
342
645
  strict_nullable=strict_nullable,
343
646
  use_generic_container_types=use_generic_container_types,
344
647
  enable_faux_immutability=enable_faux_immutability,
@@ -349,184 +652,1452 @@ class JsonSchemaParser(Parser):
349
652
  custom_class_name_generator=custom_class_name_generator,
350
653
  field_extra_keys=field_extra_keys,
351
654
  field_include_all_keys=field_include_all_keys,
655
+ field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
352
656
  wrap_string_literal=wrap_string_literal,
353
657
  use_title_as_name=use_title_as_name,
658
+ use_operation_id_as_name=use_operation_id_as_name,
659
+ use_unique_items_as_set=use_unique_items_as_set,
660
+ allof_merge_mode=allof_merge_mode,
354
661
  http_headers=http_headers,
662
+ http_ignore_tls=http_ignore_tls,
355
663
  use_annotated=use_annotated,
664
+ use_serialize_as_any=use_serialize_as_any,
665
+ use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
666
+ use_decimal_for_multiple_of=use_decimal_for_multiple_of,
667
+ original_field_name_delimiter=original_field_name_delimiter,
668
+ use_double_quotes=use_double_quotes,
669
+ use_union_operator=use_union_operator,
670
+ allow_responses_without_content=allow_responses_without_content,
671
+ collapse_root_models=collapse_root_models,
672
+ skip_root_model=skip_root_model,
673
+ use_type_alias=use_type_alias,
674
+ special_field_name_prefix=special_field_name_prefix,
675
+ remove_special_field_name_prefix=remove_special_field_name_prefix,
676
+ capitalise_enum_members=capitalise_enum_members,
677
+ keep_model_order=keep_model_order,
678
+ known_third_party=known_third_party,
679
+ custom_formatters=custom_formatters,
680
+ custom_formatters_kwargs=custom_formatters_kwargs,
681
+ use_pendulum=use_pendulum,
682
+ http_query_parameters=http_query_parameters,
683
+ treat_dot_as_module=treat_dot_as_module,
684
+ use_exact_imports=use_exact_imports,
685
+ default_field_extras=default_field_extras,
686
+ target_datetime_class=target_datetime_class,
687
+ keyword_only=keyword_only,
688
+ frozen_dataclasses=frozen_dataclasses,
689
+ no_alias=no_alias,
690
+ use_frozen_field=use_frozen_field,
691
+ formatters=formatters,
692
+ parent_scoped_naming=parent_scoped_naming,
693
+ dataclass_arguments=dataclass_arguments,
694
+ type_mappings=type_mappings,
695
+ read_only_write_only_model_type=read_only_write_only_model_type,
356
696
  )
357
697
 
358
- self.remote_object_cache: DefaultPutDict[str, Dict[str, Any]] = DefaultPutDict()
359
- self.raw_obj: Dict[Any, Any] = {}
360
- self._root_id: Optional[str] = None
361
- self._root_id_base_path: Optional[str] = None
362
- self.reserved_refs: DefaultDict[Tuple[str], Set[str]] = defaultdict(set)
363
- self.field_keys: Set[str] = {*DEFAULT_FIELD_KEYS, *self.field_extra_keys}
698
+ self.remote_object_cache: DefaultPutDict[str, dict[str, YamlValue]] = DefaultPutDict()
699
+ self.raw_obj: dict[str, YamlValue] = {}
700
+ self._root_id: Optional[str] = None # noqa: UP045
701
+ self._root_id_base_path: Optional[str] = None # noqa: UP045
702
+ self.reserved_refs: defaultdict[tuple[str, ...], set[str]] = defaultdict(set)
703
+ self.field_keys: set[str] = {
704
+ *DEFAULT_FIELD_KEYS,
705
+ *self.field_extra_keys,
706
+ *self.field_extra_keys_without_x_prefix,
707
+ }
708
+
709
+ if self.data_model_field_type.can_have_extra_keys:
710
+ self.get_field_extra_key: Callable[[str], str] = (
711
+ lambda key: self.model_resolver.get_valid_field_name_and_alias(
712
+ key, model_type=self.field_name_model_type
713
+ )[0]
714
+ )
715
+
716
+ else:
717
+ self.get_field_extra_key = lambda key: key
364
718
 
365
- def get_field_extras(self, obj: JsonSchemaObject) -> Dict[str, Any]:
719
+ def get_field_extras(self, obj: JsonSchemaObject) -> dict[str, Any]:
720
+ """Extract extra field metadata from a JSON Schema object."""
366
721
  if self.field_include_all_keys:
367
- return {
368
- self.model_resolver.get_valid_field_name_and_alias(k)[0]: v
722
+ extras = {
723
+ self.get_field_extra_key(k.lstrip("x-") if k in self.field_extra_keys_without_x_prefix else k): v
369
724
  for k, v in obj.extras.items()
370
725
  }
371
726
  else:
372
- return {
373
- self.model_resolver.get_valid_field_name_and_alias(k)[0]: v
727
+ extras = {
728
+ self.get_field_extra_key(k.lstrip("x-") if k in self.field_extra_keys_without_x_prefix else k): v
374
729
  for k, v in obj.extras.items()
375
730
  if k in self.field_keys
376
731
  }
732
+ if self.default_field_extras:
733
+ extras.update(self.default_field_extras)
734
+ return extras
735
+
736
+ def _get_type_with_mappings(self, type_: str, format_: str | None = None) -> Types:
737
+ """Get the Types enum for a given type and format, applying custom type mappings.
738
+
739
+ Custom mappings from --type-mappings are checked first, then falls back to
740
+ the default json_schema_data_formats mappings.
741
+ """
742
+ if self.type_mappings and format_ is not None and (type_, format_) in self.type_mappings:
743
+ target_format = self.type_mappings[type_, format_]
744
+ for type_formats in json_schema_data_formats.values():
745
+ if target_format in type_formats:
746
+ return type_formats[target_format]
747
+ if target_format in json_schema_data_formats:
748
+ return json_schema_data_formats[target_format]["default"]
749
+
750
+ return _get_type(type_, format_)
751
+
752
+ @cached_property
753
+ def schema_paths(self) -> list[tuple[str, list[str]]]:
754
+ """Get schema paths for definitions and defs."""
755
+ return [(s, s.lstrip("#/").split("/")) for s in self.SCHEMA_PATHS]
377
756
 
378
757
  @property
379
- def root_id(self) -> Optional[str]:
758
+ def root_id(self) -> str | None:
759
+ """Get the root $id from the model resolver."""
380
760
  return self.model_resolver.root_id
381
761
 
382
762
  @root_id.setter
383
- def root_id(self, value: Optional[str]) -> None:
763
+ def root_id(self, value: str | None) -> None:
764
+ """Set the root $id in the model resolver."""
384
765
  self.model_resolver.set_root_id(value)
385
766
 
386
767
  def should_parse_enum_as_literal(self, obj: JsonSchemaObject) -> bool:
768
+ """Determine if an enum should be parsed as a literal type."""
387
769
  return self.enum_field_as_literal == LiteralType.All or (
388
770
  self.enum_field_as_literal == LiteralType.One and len(obj.enum) == 1
389
771
  )
390
772
 
773
+ @classmethod
774
+ def _extract_const_enum_from_combined( # noqa: PLR0912
775
+ cls, items: list[JsonSchemaObject], parent_type: str | list[str] | None
776
+ ) -> tuple[list[Any], list[str], str | None, bool] | None:
777
+ """Extract enum values from oneOf/anyOf const pattern."""
778
+ enum_values: list[Any] = []
779
+ varnames: list[str] = []
780
+ nullable = False
781
+ inferred_type: str | None = None
782
+
783
+ for item in items:
784
+ if item.type == "null" and "const" not in item.extras:
785
+ nullable = True
786
+ continue
787
+
788
+ if "const" not in item.extras:
789
+ return None
790
+
791
+ if item.ref or item.properties or item.oneOf or item.anyOf or item.allOf:
792
+ return None
793
+
794
+ const_value = item.extras["const"]
795
+ enum_values.append(const_value)
796
+
797
+ if item.title:
798
+ varnames.append(item.title)
799
+ else:
800
+ varnames.append(str(const_value))
801
+
802
+ if inferred_type is None and const_value is not None:
803
+ if isinstance(const_value, str):
804
+ inferred_type = "string"
805
+ elif isinstance(const_value, bool):
806
+ inferred_type = "boolean"
807
+ elif isinstance(const_value, int):
808
+ inferred_type = "integer"
809
+ elif isinstance(const_value, float):
810
+ inferred_type = "number"
811
+
812
+ if not enum_values: # pragma: no cover
813
+ return None
814
+
815
+ final_type: str | None
816
+ if isinstance(parent_type, str):
817
+ final_type = parent_type
818
+ elif isinstance(parent_type, list):
819
+ non_null_types = [t for t in parent_type if t != "null"]
820
+ final_type = non_null_types[0] if non_null_types else inferred_type
821
+ if "null" in parent_type:
822
+ nullable = True
823
+ else:
824
+ final_type = inferred_type
825
+
826
+ return (enum_values, varnames, final_type, nullable)
827
+
828
+ def _create_synthetic_enum_obj(
829
+ self,
830
+ original: JsonSchemaObject,
831
+ enum_values: list[Any],
832
+ varnames: list[str],
833
+ enum_type: str | None,
834
+ nullable: bool, # noqa: FBT001
835
+ ) -> JsonSchemaObject:
836
+ """Create a synthetic JsonSchemaObject for enum parsing."""
837
+ final_enum = [*enum_values, None] if nullable else enum_values
838
+ final_varnames = varnames if len(varnames) == len(enum_values) else []
839
+
840
+ return self.SCHEMA_OBJECT_TYPE(
841
+ type=enum_type,
842
+ enum=final_enum,
843
+ title=original.title,
844
+ description=original.description,
845
+ x_enum_varnames=final_varnames,
846
+ default=original.default if original.has_default else None,
847
+ )
848
+
849
+ def is_constraints_field(self, obj: JsonSchemaObject) -> bool:
850
+ """Check if a field should include constraints."""
851
+ return obj.is_array or (
852
+ self.field_constraints and not (obj.ref or obj.anyOf or obj.oneOf or obj.allOf or obj.is_object or obj.enum)
853
+ )
854
+
855
+ def _resolve_field_flag(self, obj: JsonSchemaObject, flag: Literal["readOnly", "writeOnly"]) -> bool:
856
+ """Resolve a field flag (readOnly/writeOnly) from direct value, $ref, and compositions."""
857
+ if getattr(obj, flag) is True:
858
+ return True
859
+ if (
860
+ self.read_only_write_only_model_type
861
+ and obj.ref
862
+ and self._resolve_field_flag(self._load_ref_schema_object(obj.ref), flag)
863
+ ):
864
+ return True
865
+ return any(self._resolve_field_flag(sub, flag) for sub in obj.allOf + obj.anyOf + obj.oneOf)
866
+
867
+ def _collect_all_fields_for_request_response(
868
+ self,
869
+ fields: list[DataModelFieldBase],
870
+ base_classes: list[Reference] | None,
871
+ ) -> list[DataModelFieldBase]:
872
+ """Collect all fields including those from base classes for Request/Response models.
873
+
874
+ Order: parent → child, with child fields overriding parent fields of the same name.
875
+ """
876
+ all_fields: list[DataModelFieldBase] = []
877
+ visited: set[str] = set()
878
+
879
+ def iter_from_schema(obj: JsonSchemaObject, path: list[str]) -> Iterable[DataModelFieldBase]:
880
+ module_name = get_module_name(path[-1] if path else "", None, treat_dot_as_module=self.treat_dot_as_module)
881
+ if obj.properties:
882
+ yield from self.parse_object_fields(obj, path, module_name)
883
+ for item in obj.allOf:
884
+ if item.ref:
885
+ if item.ref in visited: # pragma: no cover
886
+ continue
887
+ visited.add(item.ref)
888
+ yield from iter_from_schema(self._load_ref_schema_object(item.ref), path)
889
+ elif item.properties:
890
+ yield from self.parse_object_fields(item, path, module_name)
891
+
892
+ for base_ref in base_classes or []:
893
+ if isinstance(base_ref.source, DataModel):
894
+ all_fields.extend(base_ref.source.iter_all_fields(visited))
895
+ elif base_ref.path not in visited: # pragma: no cover
896
+ visited.add(base_ref.path)
897
+ all_fields.extend(iter_from_schema(self._load_ref_schema_object(base_ref.path), []))
898
+ all_fields.extend(fields)
899
+
900
+ deduplicated: dict[str, DataModelFieldBase] = {}
901
+ for field in all_fields:
902
+ key = field.original_name or field.name
903
+ if key: # pragma: no cover
904
+ deduplicated[key] = field.copy_deep()
905
+ return list(deduplicated.values())
906
+
907
+ def _should_generate_separate_models(
908
+ self,
909
+ fields: list[DataModelFieldBase],
910
+ base_classes: list[Reference] | None,
911
+ ) -> bool:
912
+ """Determine if Request/Response models should be generated."""
913
+ if self.read_only_write_only_model_type is None:
914
+ return False
915
+ all_fields = self._collect_all_fields_for_request_response(fields, base_classes)
916
+ return any(field.read_only or field.write_only for field in all_fields)
917
+
918
+ def _should_generate_base_model(self, *, generates_separate_models: bool = False) -> bool:
919
+ """Determine if Base model should be generated."""
920
+ if self.read_only_write_only_model_type is None:
921
+ return True
922
+ if self.read_only_write_only_model_type == ReadOnlyWriteOnlyModelType.All:
923
+ return True
924
+ return not generates_separate_models
925
+
926
+ def _create_variant_model( # noqa: PLR0913, PLR0917
927
+ self,
928
+ path: list[str],
929
+ base_name: str,
930
+ suffix: str,
931
+ model_fields: list[DataModelFieldBase],
932
+ obj: JsonSchemaObject,
933
+ data_model_type_class: type[DataModel],
934
+ ) -> None:
935
+ """Create a Request or Response model variant."""
936
+ if not model_fields:
937
+ return
938
+ variant_name = f"{base_name}{suffix}"
939
+ unique_name = self.model_resolver.get_class_name(variant_name, unique=True).name
940
+ model_path = [*path[:-1], unique_name]
941
+ reference = self.model_resolver.add(model_path, unique_name, class_name=True, unique=False, loaded=True)
942
+ model = self._create_data_model(
943
+ model_type=data_model_type_class,
944
+ reference=reference,
945
+ fields=model_fields,
946
+ custom_base_class=obj.custom_base_path or self.base_class,
947
+ custom_template_dir=self.custom_template_dir,
948
+ extra_template_data=self.extra_template_data,
949
+ path=self.current_source_path,
950
+ description=obj.description if self.use_schema_description else None,
951
+ nullable=obj.type_has_null,
952
+ keyword_only=self.keyword_only,
953
+ treat_dot_as_module=self.treat_dot_as_module,
954
+ dataclass_arguments=self.dataclass_arguments,
955
+ )
956
+ self.results.append(model)
957
+
958
+ def _create_request_response_models( # noqa: PLR0913, PLR0917
959
+ self,
960
+ name: str,
961
+ obj: JsonSchemaObject,
962
+ path: list[str],
963
+ fields: list[DataModelFieldBase],
964
+ data_model_type_class: type[DataModel],
965
+ base_classes: list[Reference] | None = None,
966
+ ) -> None:
967
+ """Generate Request and Response model variants."""
968
+ all_fields = self._collect_all_fields_for_request_response(fields, base_classes)
969
+
970
+ # Request model: exclude readOnly fields
971
+ if any(field.read_only for field in all_fields):
972
+ self._create_variant_model(
973
+ path,
974
+ name,
975
+ "Request",
976
+ [field for field in all_fields if not field.read_only],
977
+ obj,
978
+ data_model_type_class,
979
+ )
980
+ # Response model: exclude writeOnly fields
981
+ if any(field.write_only for field in all_fields):
982
+ self._create_variant_model(
983
+ path,
984
+ name,
985
+ "Response",
986
+ [field for field in all_fields if not field.write_only],
987
+ obj,
988
+ data_model_type_class,
989
+ )
990
+
991
+ def get_object_field( # noqa: PLR0913
992
+ self,
993
+ *,
994
+ field_name: str | None,
995
+ field: JsonSchemaObject,
996
+ required: bool,
997
+ field_type: DataType,
998
+ alias: str | None,
999
+ original_field_name: str | None,
1000
+ ) -> DataModelFieldBase:
1001
+ """Create a data model field from a JSON Schema object field."""
1002
+ return self.data_model_field_type(
1003
+ name=field_name,
1004
+ default=field.default,
1005
+ data_type=field_type,
1006
+ required=required,
1007
+ alias=alias,
1008
+ constraints=field.dict() if self.is_constraints_field(field) else None,
1009
+ nullable=field.nullable if self.strict_nullable and (field.has_default or required) else None,
1010
+ strip_default_none=self.strip_default_none,
1011
+ extras=self.get_field_extras(field),
1012
+ use_annotated=self.use_annotated,
1013
+ use_serialize_as_any=self.use_serialize_as_any,
1014
+ use_field_description=self.use_field_description,
1015
+ use_inline_field_description=self.use_inline_field_description,
1016
+ use_default_kwarg=self.use_default_kwarg,
1017
+ original_name=original_field_name,
1018
+ has_default=field.has_default,
1019
+ type_has_null=field.type_has_null,
1020
+ read_only=self._resolve_field_flag(field, "readOnly"),
1021
+ write_only=self._resolve_field_flag(field, "writeOnly"),
1022
+ use_frozen_field=self.use_frozen_field,
1023
+ )
1024
+
391
1025
  def get_data_type(self, obj: JsonSchemaObject) -> DataType:
1026
+ """Get the data type for a JSON Schema object."""
392
1027
  if obj.type is None:
393
- return self.data_type_manager.get_data_type(Types.any)
1028
+ if "const" in obj.extras:
1029
+ return self.data_type_manager.get_data_type_from_value(obj.extras["const"])
1030
+ return self.data_type_manager.get_data_type(
1031
+ Types.any,
1032
+ )
394
1033
 
395
1034
  def _get_data_type(type_: str, format__: str) -> DataType:
396
- data_formats: Optional[Types] = json_schema_data_formats[type_].get(
397
- format__
398
- )
399
- if data_formats is None:
400
- warn(
401
- "format of {!r} not understood for {!r} - using default"
402
- "".format(format__, type_)
403
- )
404
- data_formats = json_schema_data_formats[type_]['default']
405
1035
  return self.data_type_manager.get_data_type(
406
- data_formats,
1036
+ self._get_type_with_mappings(type_, format__),
407
1037
  **obj.dict() if not self.field_constraints else {},
408
1038
  )
409
1039
 
410
1040
  if isinstance(obj.type, list):
411
1041
  return self.data_type(
412
- data_types=[
413
- _get_data_type(t, 'default') for t in obj.type if t != 'null'
414
- ],
415
- is_optional='null' in obj.type,
1042
+ data_types=[_get_data_type(t, obj.format or "default") for t in obj.type if t != "null"],
1043
+ is_optional="null" in obj.type,
416
1044
  )
417
- return _get_data_type(obj.type, obj.format or 'default')
1045
+ return _get_data_type(obj.type, obj.format or "default")
418
1046
 
419
1047
  def get_ref_data_type(self, ref: str) -> DataType:
1048
+ """Get a data type from a reference string."""
420
1049
  reference = self.model_resolver.add_ref(ref)
421
- return self.data_type(reference=reference)
1050
+ ref_schema = self._load_ref_schema_object(ref)
1051
+ is_optional = (
1052
+ ref_schema.type_has_null or ref_schema.type == "null" or (self.strict_nullable and ref_schema.nullable)
1053
+ )
1054
+ return self.data_type(reference=reference, is_optional=is_optional)
422
1055
 
423
- def set_additional_properties(self, name: str, obj: JsonSchemaObject) -> None:
424
- if obj.additionalProperties is not None:
425
- # TODO check additional property types.
426
- self.extra_template_data[name][
427
- 'additionalProperties'
428
- ] = obj.additionalProperties
1056
+ def set_additional_properties(self, path: str, obj: JsonSchemaObject) -> None:
1057
+ """Set additional properties flag in extra template data."""
1058
+ if isinstance(obj.additionalProperties, bool):
1059
+ self.extra_template_data[path]["additionalProperties"] = obj.additionalProperties
429
1060
 
430
- def set_title(self, name: str, obj: JsonSchemaObject) -> None:
1061
+ def set_title(self, path: str, obj: JsonSchemaObject) -> None:
1062
+ """Set title in extra template data."""
431
1063
  if obj.title:
432
- self.extra_template_data[name]['title'] = obj.title
1064
+ self.extra_template_data[path]["title"] = obj.title
433
1065
 
434
- def parse_any_of(
435
- self, name: str, obj: JsonSchemaObject, path: List[str]
436
- ) -> List[DataType]:
437
- return self.parse_list_item(name, obj.anyOf, path, obj)
1066
+ def _set_schema_metadata(self, path: str, obj: JsonSchemaObject) -> None:
1067
+ """Set title and additionalProperties in extra template data."""
1068
+ if obj.title:
1069
+ self.extra_template_data[path]["title"] = obj.title
1070
+ if isinstance(obj.additionalProperties, bool):
1071
+ self.extra_template_data[path]["additionalProperties"] = obj.additionalProperties
438
1072
 
439
- def parse_one_of(
440
- self, name: str, obj: JsonSchemaObject, path: List[str]
441
- ) -> List[DataType]:
442
- return self.parse_list_item(name, obj.oneOf, path, obj)
1073
+ def _apply_title_as_name(self, name: str, obj: JsonSchemaObject) -> str:
1074
+ """Apply title as name if use_title_as_name is enabled."""
1075
+ if self.use_title_as_name and obj.title:
1076
+ return sanitize_module_name(obj.title, treat_dot_as_module=self.treat_dot_as_module)
1077
+ return name
443
1078
 
444
- def parse_all_of(
1079
+ def _should_field_be_required(
1080
+ self,
1081
+ *,
1082
+ in_required_list: bool = True,
1083
+ has_default: bool = False,
1084
+ is_nullable: bool = False,
1085
+ ) -> bool:
1086
+ """Determine if a field should be marked as required."""
1087
+ if self.force_optional_for_required_fields:
1088
+ return False
1089
+ if self.apply_default_values_for_required_fields and has_default: # pragma: no cover
1090
+ return False
1091
+ if is_nullable:
1092
+ return False
1093
+ return in_required_list
1094
+
1095
+ def _deep_merge(self, dict1: dict[Any, Any], dict2: dict[Any, Any]) -> dict[Any, Any]:
1096
+ """Deep merge two dictionaries, combining nested dicts and lists."""
1097
+ result = dict1.copy()
1098
+ for key, value in dict2.items():
1099
+ if key in result:
1100
+ if isinstance(result[key], dict) and isinstance(value, dict):
1101
+ result[key] = self._deep_merge(result[key], value)
1102
+ continue
1103
+ if isinstance(result[key], list) and isinstance(value, list):
1104
+ result[key] = result[key] + value # noqa: PLR6104
1105
+ continue
1106
+ result[key] = value
1107
+ return result
1108
+
1109
+ def _load_ref_schema_object(self, ref: str) -> JsonSchemaObject:
1110
+ """Load a JsonSchemaObject from a $ref using standard resolve/load pipeline."""
1111
+ resolved_ref = self.model_resolver.resolve_ref(ref)
1112
+ file_part, fragment = ([*resolved_ref.split("#", 1), ""])[:2]
1113
+ raw_doc = self._get_ref_body(file_part) if file_part else self.raw_obj
1114
+
1115
+ target_schema: dict[str, YamlValue] | YamlValue = raw_doc
1116
+ if fragment:
1117
+ pointer = [p for p in fragment.split("/") if p]
1118
+ target_schema = get_model_by_path(raw_doc, pointer)
1119
+
1120
+ return self.SCHEMA_OBJECT_TYPE.parse_obj(target_schema)
1121
+
1122
+ def _merge_ref_with_schema(self, obj: JsonSchemaObject) -> JsonSchemaObject:
1123
+ """Merge $ref schema with current schema's additional keywords.
1124
+
1125
+ JSON Schema 2020-12 allows $ref alongside other keywords,
1126
+ which should be merged together.
1127
+
1128
+ The local keywords take precedence over referenced schema.
1129
+ """
1130
+ if not obj.ref:
1131
+ return obj
1132
+
1133
+ ref_schema = self._load_ref_schema_object(obj.ref)
1134
+ ref_dict = ref_schema.dict(exclude_unset=True, by_alias=True)
1135
+ current_dict = obj.dict(exclude={"ref"}, exclude_unset=True, by_alias=True)
1136
+ merged = self._deep_merge(ref_dict, current_dict)
1137
+ merged.pop("$ref", None)
1138
+
1139
+ return self.SCHEMA_OBJECT_TYPE.parse_obj(merged)
1140
+
1141
+ def _merge_primitive_schemas(self, items: list[JsonSchemaObject]) -> JsonSchemaObject:
1142
+ """Merge multiple primitive schemas by computing the intersection of their constraints."""
1143
+ if len(items) == 1:
1144
+ return items[0]
1145
+
1146
+ base_dict: dict[str, Any] = {}
1147
+ for item in items: # pragma: no branch
1148
+ if item.type: # pragma: no branch
1149
+ base_dict = item.dict(exclude_unset=True, by_alias=True)
1150
+ break
1151
+
1152
+ for item in items:
1153
+ for field in JsonSchemaObject.__constraint_fields__:
1154
+ value = getattr(item, field, None)
1155
+ if value is None:
1156
+ value = item.extras.get(field)
1157
+ if value is not None:
1158
+ if field not in base_dict or base_dict[field] is None:
1159
+ base_dict[field] = value
1160
+ else:
1161
+ base_dict[field] = JsonSchemaParser._intersect_constraint(field, base_dict[field], value)
1162
+
1163
+ return self.SCHEMA_OBJECT_TYPE.parse_obj(base_dict)
1164
+
1165
+ def _merge_primitive_schemas_for_allof(self, items: list[JsonSchemaObject]) -> JsonSchemaObject | None:
1166
+ """Merge primitive schemas for allOf, respecting allof_merge_mode setting."""
1167
+ if len(items) == 1:
1168
+ return items[0] # pragma: no cover
1169
+
1170
+ formats = {item.format for item in items if item.format}
1171
+ if len(formats) > 1:
1172
+ return None
1173
+
1174
+ merged_format = formats.pop() if formats else None
1175
+
1176
+ if self.allof_merge_mode != AllOfMergeMode.NoMerge:
1177
+ merged = self._merge_primitive_schemas(items)
1178
+ merged_dict = merged.dict(exclude_unset=True, by_alias=True)
1179
+ if merged_format:
1180
+ merged_dict["format"] = merged_format
1181
+ return self.SCHEMA_OBJECT_TYPE.parse_obj(merged_dict)
1182
+
1183
+ base_dict: dict[str, Any] = {}
1184
+ for item in items:
1185
+ if item.type:
1186
+ base_dict = item.dict(exclude_unset=True, by_alias=True)
1187
+ break
1188
+
1189
+ for item in items:
1190
+ for constraint_field in JsonSchemaObject.__constraint_fields__:
1191
+ value = getattr(item, constraint_field, None)
1192
+ if value is None:
1193
+ value = item.extras.get(constraint_field)
1194
+ if value is not None:
1195
+ base_dict[constraint_field] = value
1196
+
1197
+ if merged_format:
1198
+ base_dict["format"] = merged_format
1199
+
1200
+ return self.SCHEMA_OBJECT_TYPE.parse_obj(base_dict)
1201
+
1202
+ @staticmethod
1203
+ def _intersect_constraint(field: str, val1: Any, val2: Any) -> Any: # noqa: PLR0911
1204
+ """Compute the intersection of two constraint values."""
1205
+ v1: float | None = None
1206
+ v2: float | None = None
1207
+ with suppress(TypeError, ValueError):
1208
+ v1 = float(val1) if val1 is not None else None
1209
+ v2 = float(val2) if val2 is not None else None
1210
+
1211
+ if field in {"minLength", "minimum", "exclusiveMinimum", "minItems"}:
1212
+ if v1 is not None and v2 is not None:
1213
+ return val1 if v1 >= v2 else val2
1214
+ return val1 # pragma: no cover
1215
+ if field in {"maxLength", "maximum", "exclusiveMaximum", "maxItems"}:
1216
+ if v1 is not None and v2 is not None:
1217
+ return val1 if v1 <= v2 else val2
1218
+ return val1 # pragma: no cover
1219
+ if field == "pattern":
1220
+ return f"(?={val1})(?={val2})" if val1 != val2 else val1
1221
+ if field == "uniqueItems":
1222
+ return val1 or val2
1223
+ return val1
1224
+
1225
+ def _build_allof_type( # noqa: PLR0911, PLR0912
1226
+ self,
1227
+ allof_items: list[JsonSchemaObject],
1228
+ depth: int,
1229
+ visited: frozenset[int],
1230
+ max_depth: int,
1231
+ max_union_elements: int,
1232
+ ) -> DataType | None:
1233
+ """Build a DataType from allOf schema items."""
1234
+ if len(allof_items) == 1:
1235
+ item = allof_items[0]
1236
+ if item.ref:
1237
+ return self.get_ref_data_type(item.ref)
1238
+ return self._build_lightweight_type(item, depth + 1, visited, max_depth, max_union_elements)
1239
+
1240
+ ref_items: list[JsonSchemaObject] = []
1241
+ primitive_items: list[JsonSchemaObject] = []
1242
+ constraint_only_items: list[JsonSchemaObject] = []
1243
+ object_items: list[JsonSchemaObject] = []
1244
+
1245
+ for item in allof_items:
1246
+ if item.ref:
1247
+ ref_items.append(item)
1248
+ elif item.type and item.type != "object" and not isinstance(item.type, list):
1249
+ primitive_items.append(item)
1250
+ elif item.properties or item.additionalProperties or item.type == "object":
1251
+ object_items.append(item)
1252
+ elif item.allOf or item.anyOf or item.oneOf:
1253
+ nested_type = self._build_lightweight_type(item, depth + 1, visited, max_depth, max_union_elements)
1254
+ if nested_type is None: # pragma: no cover
1255
+ return None
1256
+ if nested_type.reference: # pragma: no cover
1257
+ ref_items.append(item)
1258
+ else:
1259
+ primitive_items.append(item)
1260
+ elif item.enum: # pragma: no cover
1261
+ primitive_items.append(item)
1262
+ elif item.has_constraint:
1263
+ constraint_only_items.append(item)
1264
+
1265
+ if ref_items and not primitive_items and not object_items:
1266
+ ref = ref_items[0].ref
1267
+ if ref:
1268
+ return self.get_ref_data_type(ref)
1269
+ return None # pragma: no cover
1270
+
1271
+ if ref_items and (primitive_items or object_items or constraint_only_items):
1272
+ ignored_count = len(primitive_items) + len(constraint_only_items)
1273
+ if ignored_count > 0: # pragma: no branch
1274
+ warn(
1275
+ f"allOf combines $ref with {ignored_count} constraint(s) that will be ignored "
1276
+ f"in inherited field type resolution. Consider defining constraints in the referenced schema.",
1277
+ stacklevel=4,
1278
+ )
1279
+ ref = ref_items[0].ref
1280
+ if ref:
1281
+ return self.get_ref_data_type(ref)
1282
+ return None # pragma: no cover
1283
+
1284
+ if primitive_items and not object_items:
1285
+ all_primitives = primitive_items + constraint_only_items
1286
+ merged_schema = self._merge_primitive_schemas(all_primitives)
1287
+ return self._build_lightweight_type(merged_schema, depth + 1, visited, max_depth, max_union_elements)
1288
+
1289
+ if object_items:
1290
+ additional_props_types: list[DataType] = []
1291
+
1292
+ for obj_item in object_items:
1293
+ if isinstance(obj_item.additionalProperties, JsonSchemaObject):
1294
+ ap_type = self._build_lightweight_type(
1295
+ obj_item.additionalProperties, depth + 1, visited, max_depth, max_union_elements
1296
+ )
1297
+ if ap_type:
1298
+ additional_props_types.append(ap_type)
1299
+
1300
+ if additional_props_types:
1301
+ best_type = additional_props_types[0]
1302
+ for ap_type in additional_props_types[1:]: # pragma: no branch
1303
+ is_better = best_type.type == ANY and ap_type.type != ANY
1304
+ is_better = is_better or (ap_type.reference and not best_type.reference)
1305
+ if is_better: # pragma: no cover
1306
+ best_type = ap_type
1307
+ return self.data_type(data_types=[best_type], is_dict=True)
1308
+
1309
+ return self.data_type(data_types=[DataType(type=ANY, import_=IMPORT_ANY)], is_dict=True)
1310
+
1311
+ return None
1312
+
1313
+ def _build_lightweight_type( # noqa: PLR0911, PLR0912
1314
+ self,
1315
+ schema: JsonSchemaObject,
1316
+ depth: int = 0,
1317
+ visited: frozenset[int] | None = None,
1318
+ max_depth: int = 3,
1319
+ max_union_elements: int = 5,
1320
+ ) -> DataType | None:
1321
+ """Build a DataType from schema without generating models."""
1322
+ if depth > max_depth: # pragma: no cover
1323
+ return None
1324
+ if visited is None:
1325
+ visited = frozenset()
1326
+
1327
+ schema_id = id(schema)
1328
+ if schema_id in visited: # pragma: no cover
1329
+ return None
1330
+ visited |= {schema_id}
1331
+
1332
+ if schema.ref:
1333
+ return self.get_ref_data_type(schema.ref)
1334
+
1335
+ if schema.enum: # pragma: no cover
1336
+ return self.get_data_type(schema)
1337
+
1338
+ if schema.is_array and schema.items and isinstance(schema.items, JsonSchemaObject):
1339
+ if schema.items.ref:
1340
+ item_type = self.get_ref_data_type(schema.items.ref)
1341
+ else:
1342
+ item_type = self._build_lightweight_type(
1343
+ schema.items, depth + 1, visited, max_depth, max_union_elements
1344
+ )
1345
+ if item_type is None: # pragma: no cover
1346
+ item_type = DataType(type=ANY, import_=IMPORT_ANY)
1347
+ return self.data_type(data_types=[item_type], is_list=True)
1348
+
1349
+ if schema.type and not isinstance(schema.type, list) and schema.type != "object":
1350
+ return self.get_data_type(schema)
1351
+ if isinstance(schema.type, list):
1352
+ return self.get_data_type(schema)
1353
+
1354
+ combined_items = schema.anyOf or schema.oneOf
1355
+ if combined_items:
1356
+ if len(combined_items) > max_union_elements: # pragma: no cover
1357
+ return None
1358
+ data_types: list[DataType] = []
1359
+ for item in combined_items:
1360
+ if item.ref: # pragma: no cover
1361
+ data_types.append(self.get_ref_data_type(item.ref))
1362
+ else:
1363
+ item_type = self._build_lightweight_type(item, depth + 1, visited, max_depth, max_union_elements)
1364
+ if item_type is None: # pragma: no cover
1365
+ return None
1366
+ data_types.append(item_type)
1367
+ if len(data_types) == 1: # pragma: no cover
1368
+ return data_types[0]
1369
+ return self.data_type(data_types=data_types)
1370
+
1371
+ if schema.allOf: # pragma: no cover
1372
+ return self._build_allof_type(schema.allOf, depth, visited, max_depth, max_union_elements)
1373
+
1374
+ if isinstance(schema.additionalProperties, JsonSchemaObject): # pragma: no cover
1375
+ value_type = self._build_lightweight_type(
1376
+ schema.additionalProperties, depth + 1, visited, max_depth, max_union_elements
1377
+ )
1378
+ if value_type is None:
1379
+ value_type = DataType(type=ANY, import_=IMPORT_ANY)
1380
+ return self.data_type(data_types=[value_type], is_dict=True)
1381
+
1382
+ if schema.properties or schema.type == "object":
1383
+ return self.data_type(data_types=[DataType(type=ANY, import_=IMPORT_ANY)], is_dict=True)
1384
+
1385
+ return None
1386
+
1387
+ def _is_list_with_any_item_type(self, data_type: DataType | None) -> bool: # noqa: PLR6301
1388
+ """Return True when data_type represents List[Any] (including nested lists)."""
1389
+ if not data_type: # pragma: no cover
1390
+ return False
1391
+
1392
+ candidate = data_type
1393
+ if not candidate.is_list and len(candidate.data_types) == 1 and candidate.data_types[0].is_list:
1394
+ candidate = candidate.data_types[0]
1395
+
1396
+ if not candidate.is_list or len(candidate.data_types) != 1:
1397
+ return False
1398
+
1399
+ item_type = candidate.data_types[0]
1400
+ while len(item_type.data_types) == 1:
1401
+ inner = item_type.data_types[0]
1402
+ if (not item_type.is_list and inner.is_list) or item_type.is_list:
1403
+ item_type = inner
1404
+ else:
1405
+ break
1406
+ return item_type.type == ANY
1407
+
1408
+ def _merge_property_schemas(self, parent_dict: dict[str, Any], child_dict: dict[str, Any]) -> dict[str, Any]:
1409
+ """Merge parent and child property schemas for allOf."""
1410
+ if self.allof_merge_mode == AllOfMergeMode.NoMerge:
1411
+ return child_dict.copy()
1412
+
1413
+ non_merged_fields: set[str] = set()
1414
+ if self.allof_merge_mode == AllOfMergeMode.Constraints:
1415
+ non_merged_fields = {"default", "examples", "example"}
1416
+
1417
+ result = {key: value for key, value in parent_dict.items() if key not in non_merged_fields}
1418
+
1419
+ for key, value in child_dict.items():
1420
+ if key in result and isinstance(result[key], dict) and isinstance(value, dict):
1421
+ result[key] = self._merge_property_schemas(result[key], value)
1422
+ else:
1423
+ result[key] = value
1424
+ return result
1425
+
1426
+ def _merge_properties_with_parent_constraints(
1427
+ self, child_obj: JsonSchemaObject, parent_refs: list[str]
1428
+ ) -> JsonSchemaObject:
1429
+ """Merge child properties with parent property constraints for allOf inheritance."""
1430
+ if not child_obj.properties:
1431
+ return child_obj
1432
+
1433
+ parent_properties: dict[str, JsonSchemaObject] = {}
1434
+ for ref in parent_refs:
1435
+ try:
1436
+ parent_schema = self._load_ref_schema_object(ref)
1437
+ except Exception: # pragma: no cover # noqa: BLE001, S112
1438
+ continue
1439
+ if parent_schema.properties:
1440
+ for prop_name, prop_schema in parent_schema.properties.items():
1441
+ if isinstance(prop_schema, JsonSchemaObject) and prop_name not in parent_properties:
1442
+ parent_properties[prop_name] = prop_schema
1443
+
1444
+ if not parent_properties:
1445
+ return child_obj
1446
+
1447
+ merged_properties: dict[str, JsonSchemaObject | bool] = {}
1448
+ for prop_name, child_prop in child_obj.properties.items():
1449
+ if not isinstance(child_prop, JsonSchemaObject):
1450
+ merged_properties[prop_name] = child_prop
1451
+ continue
1452
+
1453
+ parent_prop = parent_properties.get(prop_name)
1454
+ if parent_prop is None:
1455
+ merged_properties[prop_name] = child_prop
1456
+ continue
1457
+
1458
+ parent_dict = parent_prop.dict(exclude_unset=True, by_alias=True)
1459
+ child_dict = child_prop.dict(exclude_unset=True, by_alias=True)
1460
+ merged_dict = self._merge_property_schemas(parent_dict, child_dict)
1461
+ merged_properties[prop_name] = self.SCHEMA_OBJECT_TYPE.parse_obj(merged_dict)
1462
+
1463
+ merged_obj_dict = child_obj.dict(exclude_unset=True, by_alias=True)
1464
+ merged_obj_dict["properties"] = {
1465
+ k: v.dict(exclude_unset=True, by_alias=True) if isinstance(v, JsonSchemaObject) else v
1466
+ for k, v in merged_properties.items()
1467
+ }
1468
+ return self.SCHEMA_OBJECT_TYPE.parse_obj(merged_obj_dict)
1469
+
1470
+ def _get_inherited_field_type(self, prop_name: str, base_classes: list[Reference]) -> DataType | None:
1471
+ """Get the data type for an inherited property from parent schemas."""
1472
+ for base in base_classes:
1473
+ if not base.path: # pragma: no cover
1474
+ continue
1475
+ if "#" in base.path:
1476
+ file_part, fragment = base.path.split("#", 1)
1477
+ ref = f"{file_part}#{fragment}" if file_part else f"#{fragment}"
1478
+ else: # pragma: no cover
1479
+ ref = f"#{base.path}"
1480
+ try:
1481
+ parent_schema = self._load_ref_schema_object(ref)
1482
+ except Exception: # pragma: no cover # noqa: BLE001, S112
1483
+ continue
1484
+ if not parent_schema.properties: # pragma: no cover
1485
+ continue
1486
+ prop_schema = parent_schema.properties.get(prop_name)
1487
+ if not isinstance(prop_schema, JsonSchemaObject): # pragma: no cover
1488
+ continue
1489
+ result = self._build_lightweight_type(prop_schema)
1490
+ if result is not None:
1491
+ return result
1492
+ return None
1493
+
1494
+ def _schema_signature(self, prop_schema: JsonSchemaObject | bool) -> str | bool: # noqa: FBT001, PLR6301
1495
+ """Normalize property schema for comparison across allOf items."""
1496
+ if isinstance(prop_schema, bool):
1497
+ return prop_schema
1498
+ return json.dumps(prop_schema.dict(exclude_unset=True, by_alias=True), sort_keys=True, default=repr)
1499
+
1500
+ def _is_root_model_schema(self, obj: JsonSchemaObject) -> bool: # noqa: PLR6301
1501
+ """Check if schema represents a root model (primitive type with constraints).
1502
+
1503
+ Based on parse_raw_obj() else branch conditions. Returns True when
1504
+ the schema would be processed by parse_root_type().
1505
+ """
1506
+ if obj.is_array:
1507
+ return False
1508
+ if obj.allOf or obj.oneOf or obj.anyOf:
1509
+ return False
1510
+ if obj.properties:
1511
+ return False
1512
+ if obj.patternProperties:
1513
+ return False
1514
+ if obj.type == "object":
1515
+ return False
1516
+ return not obj.enum
1517
+
1518
+ def _handle_allof_root_model_with_constraints( # noqa: PLR0911, PLR0912
445
1519
  self,
446
1520
  name: str,
447
1521
  obj: JsonSchemaObject,
448
- path: List[str],
449
- ignore_duplicate_model: bool = False,
450
- ) -> DataType:
451
- fields: List[DataModelFieldBase] = []
452
- base_classes: List[Reference] = []
453
- if len(obj.allOf) == 1 and not obj.properties:
454
- single_obj = obj.allOf[0]
455
- if single_obj.ref and single_obj.ref_type == JSONReference.LOCAL:
456
- if get_model_by_path(self.raw_obj, single_obj.ref[2:].split('/')).get(
457
- 'enum'
458
- ):
459
- return self.get_ref_data_type(single_obj.ref)
460
- for all_of_item in obj.allOf:
461
- if all_of_item.ref: # $ref
462
- base_classes.append(self.model_resolver.add_ref(all_of_item.ref))
1522
+ path: list[str],
1523
+ ) -> DataType | None:
1524
+ """Handle allOf that combines a root model $ref with additional constraints.
1525
+
1526
+ This handler is for generating a root model from a root model reference.
1527
+ Object inheritance (with properties) is handled by existing _parse_all_of_item() path.
1528
+ Only applies to named schema definitions, not inline properties.
1529
+ """
1530
+ for path_element in path:
1531
+ if SPECIAL_PATH_MARKER in path_element:
1532
+ return None # pragma: no cover
1533
+
1534
+ ref_items = [item for item in obj.allOf if item.ref]
1535
+
1536
+ if len(ref_items) != 1:
1537
+ return None
1538
+
1539
+ ref_item = ref_items[0]
1540
+ ref_value = ref_item.ref
1541
+ if ref_value is None:
1542
+ return None # pragma: no cover
1543
+
1544
+ if ref_item.has_ref_with_schema_keywords:
1545
+ ref_schema = self._merge_ref_with_schema(ref_item)
1546
+ else:
1547
+ ref_schema = self._load_ref_schema_object(ref_value)
1548
+
1549
+ if not self._is_root_model_schema(ref_schema):
1550
+ return None
1551
+
1552
+ constraint_items: list[JsonSchemaObject] = []
1553
+ for item in obj.allOf:
1554
+ if item.ref:
1555
+ continue
1556
+ if item.properties or item.items:
1557
+ return None
1558
+ if item.has_constraint or item.type or item.format:
1559
+ if item.type and ref_schema.type:
1560
+ compatible_type_pairs = {
1561
+ ("integer", "number"),
1562
+ ("number", "integer"),
1563
+ }
1564
+ if item.type != ref_schema.type and (item.type, ref_schema.type) not in compatible_type_pairs:
1565
+ return None
1566
+ constraint_items.append(item)
1567
+
1568
+ if not constraint_items:
1569
+ return None
1570
+
1571
+ all_items = [ref_schema, *constraint_items]
1572
+ merged_schema = self._merge_primitive_schemas_for_allof(all_items)
1573
+ if merged_schema is None:
1574
+ return None
1575
+
1576
+ if obj.description:
1577
+ merged_dict = merged_schema.dict(exclude_unset=True, by_alias=True)
1578
+ merged_dict["description"] = obj.description
1579
+ merged_schema = self.SCHEMA_OBJECT_TYPE.parse_obj(merged_dict)
1580
+
1581
+ return self.parse_root_type(name, merged_schema, path)
1582
+
1583
+ def _merge_all_of_object(self, obj: JsonSchemaObject) -> JsonSchemaObject | None:
1584
+ """Merge allOf items when they share object properties to avoid duplicate models.
1585
+
1586
+ Skip merging when there is exactly one $ref (inheritance with property overrides).
1587
+ Continue merging when multiple $refs share properties to avoid duplicate fields.
1588
+ """
1589
+ ref_count = sum(1 for item in obj.allOf if item.ref)
1590
+ if ref_count == 1:
1591
+ return None
1592
+
1593
+ resolved_items: list[JsonSchemaObject] = []
1594
+ property_signatures: dict[str, set[str | bool]] = {}
1595
+ for item in obj.allOf:
1596
+ resolved_item = self._load_ref_schema_object(item.ref) if item.ref else item
1597
+ resolved_items.append(resolved_item)
1598
+ if resolved_item.properties:
1599
+ for prop_name, prop_schema in resolved_item.properties.items():
1600
+ property_signatures.setdefault(prop_name, set()).add(self._schema_signature(prop_schema))
1601
+
1602
+ if obj.properties:
1603
+ for prop_name, prop_schema in obj.properties.items():
1604
+ property_signatures.setdefault(prop_name, set()).add(self._schema_signature(prop_schema))
1605
+
1606
+ if not any(len(signatures) > 1 for signatures in property_signatures.values()):
1607
+ return None
1608
+
1609
+ merged_schema: dict[str, Any] = obj.dict(exclude={"allOf"}, exclude_unset=True, by_alias=True)
1610
+ for resolved_item in resolved_items:
1611
+ merged_schema = self._deep_merge(merged_schema, resolved_item.dict(exclude_unset=True, by_alias=True))
1612
+
1613
+ if "required" in merged_schema and isinstance(merged_schema["required"], list):
1614
+ merged_schema["required"] = list(dict.fromkeys(merged_schema["required"]))
1615
+
1616
+ merged_schema.pop("allOf", None)
1617
+ return self.SCHEMA_OBJECT_TYPE.parse_obj(merged_schema)
1618
+
1619
+ def parse_combined_schema(
1620
+ self,
1621
+ name: str,
1622
+ obj: JsonSchemaObject,
1623
+ path: list[str],
1624
+ target_attribute_name: str,
1625
+ ) -> list[DataType]:
1626
+ """Parse combined schema (anyOf, oneOf, allOf) into a list of data types."""
1627
+ base_object = obj.dict(exclude={target_attribute_name}, exclude_unset=True, by_alias=True)
1628
+ combined_schemas: list[JsonSchemaObject] = []
1629
+ refs = []
1630
+ for index, target_attribute in enumerate(getattr(obj, target_attribute_name, [])):
1631
+ if target_attribute.ref:
1632
+ if target_attribute.has_ref_with_schema_keywords:
1633
+ merged_attr = self._merge_ref_with_schema(target_attribute)
1634
+ combined_schemas.append(
1635
+ self.SCHEMA_OBJECT_TYPE.parse_obj(
1636
+ self._deep_merge(base_object, merged_attr.dict(exclude_unset=True, by_alias=True))
1637
+ )
1638
+ )
1639
+ else:
1640
+ combined_schemas.append(target_attribute)
1641
+ refs.append(index)
463
1642
  else:
464
- fields.extend(
465
- self.parse_object_fields(
466
- all_of_item,
467
- path,
468
- get_module_name(name, None),
1643
+ combined_schemas.append(
1644
+ self.SCHEMA_OBJECT_TYPE.parse_obj(
1645
+ self._deep_merge(
1646
+ base_object,
1647
+ target_attribute.dict(exclude_unset=True, by_alias=True),
1648
+ )
469
1649
  )
470
1650
  )
1651
+
1652
+ parsed_schemas = self.parse_list_item(
1653
+ name,
1654
+ combined_schemas,
1655
+ path,
1656
+ obj,
1657
+ singular_name=False,
1658
+ )
1659
+ common_path_keyword = f"{target_attribute_name}Common"
1660
+ return [
1661
+ self._parse_object_common_part(
1662
+ name,
1663
+ obj,
1664
+ [*get_special_path(common_path_keyword, path), str(i)],
1665
+ ignore_duplicate_model=True,
1666
+ fields=[],
1667
+ base_classes=[d.reference],
1668
+ required=[],
1669
+ )
1670
+ if i in refs and d.reference
1671
+ else d
1672
+ for i, d in enumerate(parsed_schemas)
1673
+ ]
1674
+
1675
+ def parse_any_of(self, name: str, obj: JsonSchemaObject, path: list[str]) -> list[DataType]:
1676
+ """Parse anyOf schema into a list of data types."""
1677
+ return self.parse_combined_schema(name, obj, path, "anyOf")
1678
+
1679
+ def parse_one_of(self, name: str, obj: JsonSchemaObject, path: list[str]) -> list[DataType]:
1680
+ """Parse oneOf schema into a list of data types."""
1681
+ return self.parse_combined_schema(name, obj, path, "oneOf")
1682
+
1683
+ def _create_data_model(self, model_type: type[DataModel] | None = None, **kwargs: Any) -> DataModel:
1684
+ """Create data model instance with dataclass_arguments support for DataClass."""
1685
+ data_model_class = model_type or self.data_model_type
1686
+ if issubclass(data_model_class, DataClass):
1687
+ # Use dataclass_arguments from kwargs, or fall back to self.dataclass_arguments
1688
+ # If both are None, construct from legacy frozen_dataclasses/keyword_only flags
1689
+ dataclass_arguments = kwargs.pop("dataclass_arguments", None)
1690
+ if dataclass_arguments is None:
1691
+ dataclass_arguments = self.dataclass_arguments
1692
+ if dataclass_arguments is None:
1693
+ # Construct from legacy flags for library API compatibility
1694
+ dataclass_arguments = {}
1695
+ if self.frozen_dataclasses:
1696
+ dataclass_arguments["frozen"] = True
1697
+ if self.keyword_only:
1698
+ dataclass_arguments["kw_only"] = True
1699
+ kwargs["dataclass_arguments"] = dataclass_arguments
1700
+ kwargs.pop("frozen", None)
1701
+ kwargs.pop("keyword_only", None)
1702
+ else:
1703
+ kwargs.pop("dataclass_arguments", None)
1704
+ return data_model_class(**kwargs)
1705
+
1706
+ def _parse_object_common_part( # noqa: PLR0912, PLR0913, PLR0915
1707
+ self,
1708
+ name: str,
1709
+ obj: JsonSchemaObject,
1710
+ path: list[str],
1711
+ *,
1712
+ ignore_duplicate_model: bool,
1713
+ fields: list[DataModelFieldBase],
1714
+ base_classes: list[Reference],
1715
+ required: list[str],
1716
+ ) -> DataType:
1717
+ if self.read_only_write_only_model_type is not None and obj.properties:
1718
+ for prop in obj.properties.values():
1719
+ if isinstance(prop, JsonSchemaObject) and prop.ref:
1720
+ self._load_ref_schema_object(prop.ref)
471
1721
  if obj.properties:
472
1722
  fields.extend(
473
- self.parse_object_fields(obj, path, get_module_name(name, None))
1723
+ self.parse_object_fields(
1724
+ obj,
1725
+ path,
1726
+ get_module_name(name, None, treat_dot_as_module=self.treat_dot_as_module),
1727
+ class_name=name,
1728
+ )
474
1729
  )
1730
+ if base_classes:
1731
+ for field in fields:
1732
+ current_type = field.data_type
1733
+ field_name = field.original_name or field.name
1734
+ if current_type and current_type.type == ANY and field_name:
1735
+ inherited_type = self._get_inherited_field_type(field_name, base_classes)
1736
+ if inherited_type is not None:
1737
+ if PYDANTIC_V2:
1738
+ new_type = inherited_type.model_copy(deep=True)
1739
+ else:
1740
+ new_type = inherited_type.copy(deep=True)
1741
+ new_type.is_optional = new_type.is_optional or current_type.is_optional
1742
+ new_type.is_dict = new_type.is_dict or current_type.is_dict
1743
+ new_type.is_list = new_type.is_list or current_type.is_list
1744
+ new_type.is_set = new_type.is_set or current_type.is_set
1745
+ if new_type.kwargs is None and current_type.kwargs is not None: # pragma: no cover
1746
+ new_type.kwargs = current_type.kwargs
1747
+ field.data_type = new_type
1748
+ # Handle List[Any] case: inherit item type from parent if items have Any type
1749
+ elif field_name and self._is_list_with_any_item_type(current_type):
1750
+ inherited_type = self._get_inherited_field_type(field_name, base_classes)
1751
+ if inherited_type is None or not inherited_type.is_list or not inherited_type.data_types:
1752
+ continue
1753
+
1754
+ new_type = inherited_type.model_copy(deep=True) if PYDANTIC_V2 else inherited_type.copy(deep=True)
1755
+
1756
+ # Preserve modifiers coming from the overriding schema.
1757
+ if current_type is not None: # pragma: no branch
1758
+ new_type.is_optional = new_type.is_optional or current_type.is_optional
1759
+ new_type.is_dict = new_type.is_dict or current_type.is_dict
1760
+ new_type.is_list = new_type.is_list or current_type.is_list
1761
+ new_type.is_set = new_type.is_set or current_type.is_set
1762
+ if new_type.kwargs is None and current_type.kwargs is not None: # pragma: no cover
1763
+ new_type.kwargs = current_type.kwargs
1764
+
1765
+ # Some code paths represent the list type inside an outer container.
1766
+ is_wrapped = (
1767
+ current_type is not None
1768
+ and not current_type.is_list
1769
+ and len(current_type.data_types) == 1
1770
+ and current_type.data_types[0].is_list
1771
+ )
1772
+ if is_wrapped:
1773
+ wrapper = current_type.model_copy(deep=True) if PYDANTIC_V2 else current_type.copy(deep=True)
1774
+ wrapper.data_types[0] = new_type
1775
+ field.data_type = wrapper
1776
+ continue
1777
+
1778
+ field.data_type = new_type # pragma: no cover
475
1779
  # ignore an undetected object
476
1780
  if ignore_duplicate_model and not fields and len(base_classes) == 1:
477
- return self.data_type(reference=base_classes[0])
478
- if self.use_title_as_name and obj.title:
479
- name = obj.title
1781
+ with self.model_resolver.current_base_path_context(self.model_resolver._base_path): # noqa: SLF001
1782
+ self.model_resolver.delete(path)
1783
+ return self.data_type(reference=base_classes[0])
1784
+ if required:
1785
+ for field in fields:
1786
+ if self.force_optional_for_required_fields or ( # pragma: no cover
1787
+ self.apply_default_values_for_required_fields and field.has_default
1788
+ ):
1789
+ continue # pragma: no cover
1790
+ if (field.original_name or field.name) in required:
1791
+ field.required = True
1792
+ if obj.required:
1793
+ field_name_to_field = {f.original_name or f.name: f for f in fields}
1794
+ for required_ in obj.required:
1795
+ if required_ in field_name_to_field:
1796
+ field = field_name_to_field[required_]
1797
+ if self.force_optional_for_required_fields or (
1798
+ self.apply_default_values_for_required_fields and field.has_default
1799
+ ):
1800
+ continue
1801
+ field.required = True
1802
+ else:
1803
+ fields.append(
1804
+ self.data_model_field_type(required=True, original_name=required_, data_type=DataType())
1805
+ )
1806
+ name = self._apply_title_as_name(name, obj) # pragma: no cover
480
1807
  reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
481
- self.set_additional_properties(reference.name, obj)
482
- data_model_type = self.data_model_type(
483
- reference=reference,
1808
+ self.set_additional_properties(reference.path, obj)
1809
+
1810
+ generates_separate = self._should_generate_separate_models(fields, base_classes)
1811
+ if generates_separate:
1812
+ self._create_request_response_models(
1813
+ name=reference.name,
1814
+ obj=obj,
1815
+ path=path,
1816
+ fields=fields,
1817
+ data_model_type_class=self.data_model_type,
1818
+ base_classes=base_classes,
1819
+ )
1820
+
1821
+ # Generate base model if needed
1822
+ if self._should_generate_base_model(generates_separate_models=generates_separate):
1823
+ data_model_type = self._create_data_model(
1824
+ reference=reference,
1825
+ fields=fields,
1826
+ base_classes=base_classes,
1827
+ custom_base_class=obj.custom_base_path or self.base_class,
1828
+ custom_template_dir=self.custom_template_dir,
1829
+ extra_template_data=self.extra_template_data,
1830
+ path=self.current_source_path,
1831
+ description=obj.description if self.use_schema_description else None,
1832
+ keyword_only=self.keyword_only,
1833
+ treat_dot_as_module=self.treat_dot_as_module,
1834
+ dataclass_arguments=self.dataclass_arguments,
1835
+ )
1836
+ self.results.append(data_model_type)
1837
+
1838
+ return self.data_type(reference=reference)
1839
+
1840
+ def _parse_all_of_item( # noqa: PLR0912, PLR0913, PLR0917
1841
+ self,
1842
+ name: str,
1843
+ obj: JsonSchemaObject,
1844
+ path: list[str],
1845
+ fields: list[DataModelFieldBase],
1846
+ base_classes: list[Reference],
1847
+ required: list[str],
1848
+ union_models: list[Reference],
1849
+ ) -> None:
1850
+ parent_refs = [item.ref for item in obj.allOf if item.ref]
1851
+
1852
+ for all_of_item in obj.allOf: # noqa: PLR1702
1853
+ if all_of_item.ref: # $ref
1854
+ ref_schema = self._load_ref_schema_object(all_of_item.ref)
1855
+
1856
+ if ref_schema.oneOf or ref_schema.anyOf:
1857
+ self.model_resolver.add(path, name, class_name=True, loaded=True)
1858
+ if ref_schema.anyOf:
1859
+ union_models.extend(
1860
+ d.reference for d in self.parse_any_of(name, ref_schema, path) if d.reference
1861
+ )
1862
+ if ref_schema.oneOf:
1863
+ union_models.extend(
1864
+ d.reference for d in self.parse_one_of(name, ref_schema, path) if d.reference
1865
+ )
1866
+ else:
1867
+ ref = self.model_resolver.add_ref(all_of_item.ref)
1868
+ if ref.path not in {b.path for b in base_classes}:
1869
+ base_classes.append(ref)
1870
+ else:
1871
+ # Merge child properties with parent constraints before processing
1872
+ merged_item = self._merge_properties_with_parent_constraints(all_of_item, parent_refs)
1873
+ module_name = get_module_name(name, None, treat_dot_as_module=self.treat_dot_as_module)
1874
+ object_fields = self.parse_object_fields(
1875
+ merged_item,
1876
+ path,
1877
+ module_name,
1878
+ class_name=name,
1879
+ )
1880
+
1881
+ if object_fields:
1882
+ fields.extend(object_fields)
1883
+ if all_of_item.required:
1884
+ required.extend(all_of_item.required)
1885
+ field_names: set[str] = set()
1886
+ for f in object_fields:
1887
+ if f.original_name:
1888
+ field_names.add(f.original_name)
1889
+ elif f.name: # pragma: no cover
1890
+ field_names.add(f.name)
1891
+ existing_field_names: set[str] = set()
1892
+ for f in fields:
1893
+ if f.original_name:
1894
+ existing_field_names.add(f.original_name)
1895
+ elif f.name: # pragma: no cover
1896
+ existing_field_names.add(f.name)
1897
+ for request in all_of_item.required:
1898
+ if request in field_names or request in existing_field_names:
1899
+ continue
1900
+ if self.force_optional_for_required_fields:
1901
+ continue
1902
+ field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
1903
+ request,
1904
+ excludes=existing_field_names,
1905
+ model_type=self.field_name_model_type,
1906
+ class_name=name,
1907
+ )
1908
+ data_type = self._get_inherited_field_type(request, base_classes)
1909
+ if data_type is None:
1910
+ data_type = DataType(type=ANY, import_=IMPORT_ANY)
1911
+ fields.append(
1912
+ self.data_model_field_type(
1913
+ name=field_name,
1914
+ required=True,
1915
+ original_name=request,
1916
+ alias=alias,
1917
+ data_type=data_type,
1918
+ )
1919
+ )
1920
+ existing_field_names.update({request, field_name})
1921
+ elif all_of_item.required:
1922
+ required.extend(all_of_item.required)
1923
+ self._parse_all_of_item(
1924
+ name,
1925
+ all_of_item,
1926
+ path,
1927
+ fields,
1928
+ base_classes,
1929
+ required,
1930
+ union_models,
1931
+ )
1932
+ if all_of_item.anyOf:
1933
+ self.model_resolver.add(path, name, class_name=True, loaded=True)
1934
+ union_models.extend(d.reference for d in self.parse_any_of(name, all_of_item, path) if d.reference)
1935
+ if all_of_item.oneOf:
1936
+ self.model_resolver.add(path, name, class_name=True, loaded=True)
1937
+ union_models.extend(d.reference for d in self.parse_one_of(name, all_of_item, path) if d.reference)
1938
+
1939
+ def parse_all_of(
1940
+ self,
1941
+ name: str,
1942
+ obj: JsonSchemaObject,
1943
+ path: list[str],
1944
+ ignore_duplicate_model: bool = False, # noqa: FBT001, FBT002
1945
+ ) -> DataType:
1946
+ """Parse allOf schema into a single data type with combined properties."""
1947
+ if len(obj.allOf) == 1 and not obj.properties:
1948
+ single_obj = obj.allOf[0]
1949
+ if (
1950
+ single_obj.ref
1951
+ and single_obj.ref_type == JSONReference.LOCAL
1952
+ and get_model_by_path(self.raw_obj, single_obj.ref[2:].split("/")).get("enum")
1953
+ ):
1954
+ ref_data_type = self.get_ref_data_type(single_obj.ref)
1955
+
1956
+ full_path = self.model_resolver.join_path(path)
1957
+ existing_ref = self.model_resolver.references.get(full_path)
1958
+ if existing_ref is not None and not existing_ref.loaded:
1959
+ reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
1960
+ field = self.data_model_field_type(
1961
+ name=None,
1962
+ data_type=ref_data_type,
1963
+ required=True,
1964
+ )
1965
+ data_model_root = self.data_model_root_type(
1966
+ reference=reference,
1967
+ fields=[field],
1968
+ custom_base_class=obj.custom_base_path or self.base_class,
1969
+ custom_template_dir=self.custom_template_dir,
1970
+ extra_template_data=self.extra_template_data,
1971
+ path=self.current_source_path,
1972
+ description=obj.description if self.use_schema_description else None,
1973
+ nullable=obj.type_has_null,
1974
+ treat_dot_as_module=self.treat_dot_as_module,
1975
+ )
1976
+ self.results.append(data_model_root)
1977
+ return self.data_type(reference=reference)
1978
+
1979
+ return ref_data_type
1980
+
1981
+ merged_all_of_obj = self._merge_all_of_object(obj)
1982
+ if merged_all_of_obj:
1983
+ return self._parse_object_common_part(
1984
+ name,
1985
+ merged_all_of_obj,
1986
+ path,
1987
+ ignore_duplicate_model=ignore_duplicate_model,
1988
+ fields=[],
1989
+ base_classes=[],
1990
+ required=[],
1991
+ )
1992
+
1993
+ root_model_result = self._handle_allof_root_model_with_constraints(name, obj, path)
1994
+ if root_model_result is not None:
1995
+ return root_model_result
1996
+
1997
+ fields: list[DataModelFieldBase] = []
1998
+ base_classes: list[Reference] = []
1999
+ required: list[str] = []
2000
+ union_models: list[Reference] = []
2001
+ self._parse_all_of_item(name, obj, path, fields, base_classes, required, union_models)
2002
+ if not union_models:
2003
+ return self._parse_object_common_part(
2004
+ name,
2005
+ obj,
2006
+ path,
2007
+ ignore_duplicate_model=ignore_duplicate_model,
2008
+ fields=fields,
2009
+ base_classes=base_classes,
2010
+ required=required,
2011
+ )
2012
+ reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
2013
+ all_of_data_type = self._parse_object_common_part(
2014
+ name,
2015
+ obj,
2016
+ get_special_path("allOf", path),
2017
+ ignore_duplicate_model=ignore_duplicate_model,
484
2018
  fields=fields,
485
2019
  base_classes=base_classes,
486
- custom_base_class=self.base_class,
2020
+ required=required,
2021
+ )
2022
+ assert all_of_data_type.reference is not None
2023
+ data_type = self.data_type(
2024
+ data_types=[
2025
+ self._parse_object_common_part(
2026
+ name,
2027
+ obj,
2028
+ get_special_path(f"union_model-{index}", path),
2029
+ ignore_duplicate_model=ignore_duplicate_model,
2030
+ fields=[],
2031
+ base_classes=[union_model, all_of_data_type.reference],
2032
+ required=[],
2033
+ )
2034
+ for index, union_model in enumerate(union_models)
2035
+ ]
2036
+ )
2037
+ field = self.get_object_field(
2038
+ field_name=None,
2039
+ field=obj,
2040
+ required=True,
2041
+ field_type=data_type,
2042
+ alias=None,
2043
+ original_field_name=None,
2044
+ )
2045
+ data_model_root = self.data_model_root_type(
2046
+ reference=reference,
2047
+ fields=[field],
2048
+ custom_base_class=obj.custom_base_path or self.base_class,
487
2049
  custom_template_dir=self.custom_template_dir,
488
2050
  extra_template_data=self.extra_template_data,
489
2051
  path=self.current_source_path,
490
2052
  description=obj.description if self.use_schema_description else None,
2053
+ nullable=obj.type_has_null,
2054
+ treat_dot_as_module=self.treat_dot_as_module,
491
2055
  )
492
- self.results.append(data_model_type)
493
-
2056
+ self.results.append(data_model_root)
494
2057
  return self.data_type(reference=reference)
495
2058
 
496
2059
  def parse_object_fields(
497
- self, obj: JsonSchemaObject, path: List[str], module_name: Optional[str] = None
498
- ) -> List[DataModelFieldBase]:
499
- properties: Dict[str, JsonSchemaObject] = (
500
- {} if obj.properties is None else obj.properties
501
- )
502
- requires: Set[str] = {*()} if obj.required is None else {*obj.required}
503
- fields: List[DataModelFieldBase] = []
504
-
505
- exclude_field_names: Set[str] = set()
2060
+ self,
2061
+ obj: JsonSchemaObject,
2062
+ path: list[str],
2063
+ module_name: Optional[str] = None, # noqa: UP045
2064
+ class_name: Optional[str] = None, # noqa: UP045
2065
+ ) -> list[DataModelFieldBase]:
2066
+ """Parse object properties into a list of data model fields."""
2067
+ properties: dict[str, JsonSchemaObject | bool] = {} if obj.properties is None else obj.properties
2068
+ requires: set[str] = {*()} if obj.required is None else {*obj.required}
2069
+ fields: list[DataModelFieldBase] = []
2070
+
2071
+ exclude_field_names: set[str] = set()
506
2072
  for original_field_name, field in properties.items():
507
-
508
- if field.is_array or (
509
- self.field_constraints
510
- and not (
511
- field.ref
512
- or field.anyOf
513
- or field.oneOf
514
- or field.allOf
515
- or field.is_object
516
- or field.enum
517
- )
518
- ):
519
- constraints: Optional[Mapping[str, Any]] = field.dict()
520
- else:
521
- constraints = None
522
-
523
2073
  field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
524
- original_field_name, exclude_field_names
2074
+ original_field_name,
2075
+ excludes=exclude_field_names,
2076
+ model_type=self.field_name_model_type,
2077
+ class_name=class_name,
525
2078
  )
526
- modular_name = f'{module_name}.{field_name}' if module_name else field_name
2079
+ modular_name = f"{module_name}.{field_name}" if module_name else field_name
527
2080
 
528
2081
  exclude_field_names.add(field_name)
529
2082
 
2083
+ if isinstance(field, bool):
2084
+ fields.append(
2085
+ self.data_model_field_type(
2086
+ name=field_name,
2087
+ data_type=self.data_type_manager.get_data_type(
2088
+ Types.any,
2089
+ ),
2090
+ required=False if self.force_optional_for_required_fields else original_field_name in requires,
2091
+ alias=alias,
2092
+ strip_default_none=self.strip_default_none,
2093
+ use_annotated=self.use_annotated,
2094
+ use_field_description=self.use_field_description,
2095
+ use_inline_field_description=self.use_inline_field_description,
2096
+ original_name=original_field_name,
2097
+ )
2098
+ )
2099
+ continue
2100
+
530
2101
  field_type = self.parse_item(modular_name, field, [*path, field_name])
531
2102
 
532
2103
  if self.force_optional_for_required_fields or (
@@ -536,19 +2107,13 @@ class JsonSchemaParser(Parser):
536
2107
  else:
537
2108
  required = original_field_name in requires
538
2109
  fields.append(
539
- self.data_model_field_type(
540
- name=field_name,
541
- default=field.default,
542
- data_type=field_type,
2110
+ self.get_object_field(
2111
+ field_name=field_name,
2112
+ field=field,
543
2113
  required=required,
2114
+ field_type=field_type,
544
2115
  alias=alias,
545
- constraints=constraints,
546
- nullable=field.nullable
547
- if self.strict_nullable and (field.has_default or required)
548
- else None,
549
- strip_default_none=self.strip_default_none,
550
- extras={**self.get_field_extras(field)},
551
- use_annotated=self.use_annotated,
2116
+ original_field_name=original_field_name,
552
2117
  )
553
2118
  )
554
2119
  return fields
@@ -557,18 +2122,19 @@ class JsonSchemaParser(Parser):
557
2122
  self,
558
2123
  name: str,
559
2124
  obj: JsonSchemaObject,
560
- path: List[str],
561
- singular_name: bool = False,
562
- unique: bool = True,
2125
+ path: list[str],
2126
+ singular_name: bool = False, # noqa: FBT001, FBT002
2127
+ unique: bool = True, # noqa: FBT001, FBT002
563
2128
  ) -> DataType:
2129
+ """Parse object schema into a data model."""
564
2130
  if not unique: # pragma: no cover
565
2131
  warn(
566
- f'{self.__class__.__name__}.parse_object() ignore `unique` argument.'
567
- f'An object name must be unique.'
568
- f'This argument will be removed in a future version'
2132
+ f"{self.__class__.__name__}.parse_object() ignore `unique` argument."
2133
+ f"An object name must be unique."
2134
+ f"This argument will be removed in a future version",
2135
+ stacklevel=2,
569
2136
  )
570
- if self.use_title_as_name and obj.title:
571
- name = obj.title
2137
+ name = self._apply_title_as_name(name, obj)
572
2138
  reference = self.model_resolver.add(
573
2139
  path,
574
2140
  name,
@@ -577,39 +2143,115 @@ class JsonSchemaParser(Parser):
577
2143
  loaded=True,
578
2144
  )
579
2145
  class_name = reference.name
580
- self.set_title(class_name, obj)
581
- self.set_additional_properties(class_name, obj)
582
- data_model_type = self.data_model_type(
583
- reference=reference,
584
- fields=self.parse_object_fields(
585
- obj, path, get_module_name(class_name, None)
586
- ),
587
- custom_base_class=self.base_class,
588
- custom_template_dir=self.custom_template_dir,
589
- extra_template_data=self.extra_template_data,
590
- path=self.current_source_path,
591
- description=obj.description if self.use_schema_description else None,
2146
+ self.set_title(reference.path, obj)
2147
+ if self.read_only_write_only_model_type is not None and obj.properties:
2148
+ for prop in obj.properties.values():
2149
+ if isinstance(prop, JsonSchemaObject) and prop.ref:
2150
+ self._load_ref_schema_object(prop.ref)
2151
+ fields = self.parse_object_fields(
2152
+ obj,
2153
+ path,
2154
+ get_module_name(class_name, None, treat_dot_as_module=self.treat_dot_as_module),
2155
+ class_name=class_name,
592
2156
  )
593
- self.results.append(data_model_type)
2157
+ if fields or not isinstance(obj.additionalProperties, JsonSchemaObject):
2158
+ data_model_type_class = self.data_model_type
2159
+ else:
2160
+ fields.append(
2161
+ self.get_object_field(
2162
+ field_name=None,
2163
+ field=obj.additionalProperties,
2164
+ required=True,
2165
+ original_field_name=None,
2166
+ field_type=self.data_type(
2167
+ data_types=[
2168
+ self.parse_item(
2169
+ # TODO: Improve naming for nested ClassName
2170
+ name,
2171
+ obj.additionalProperties,
2172
+ [*path, "additionalProperties"],
2173
+ )
2174
+ ],
2175
+ is_dict=True,
2176
+ ),
2177
+ alias=None,
2178
+ )
2179
+ )
2180
+ data_model_type_class = self.data_model_root_type
2181
+
2182
+ self.set_additional_properties(reference.path, obj)
2183
+
2184
+ generates_separate = self._should_generate_separate_models(fields, None)
2185
+ if generates_separate:
2186
+ self._create_request_response_models(
2187
+ name=class_name,
2188
+ obj=obj,
2189
+ path=path,
2190
+ fields=fields,
2191
+ data_model_type_class=data_model_type_class,
2192
+ )
2193
+
2194
+ # Generate base model if needed
2195
+ if self._should_generate_base_model(generates_separate_models=generates_separate):
2196
+ data_model_type = self._create_data_model(
2197
+ model_type=data_model_type_class,
2198
+ reference=reference,
2199
+ fields=fields,
2200
+ custom_base_class=obj.custom_base_path or self.base_class,
2201
+ custom_template_dir=self.custom_template_dir,
2202
+ extra_template_data=self.extra_template_data,
2203
+ path=self.current_source_path,
2204
+ description=obj.description if self.use_schema_description else None,
2205
+ nullable=obj.type_has_null,
2206
+ keyword_only=self.keyword_only,
2207
+ treat_dot_as_module=self.treat_dot_as_module,
2208
+ dataclass_arguments=self.dataclass_arguments,
2209
+ )
2210
+ self.results.append(data_model_type)
2211
+
594
2212
  return self.data_type(reference=reference)
595
2213
 
596
- def parse_item(
2214
+ def parse_pattern_properties(
2215
+ self,
2216
+ name: str,
2217
+ pattern_properties: dict[str, JsonSchemaObject],
2218
+ path: list[str],
2219
+ ) -> DataType:
2220
+ """Parse patternProperties into a dict data type with regex keys."""
2221
+ return self.data_type(
2222
+ data_types=[
2223
+ self.data_type(
2224
+ data_types=[
2225
+ self.parse_item(
2226
+ name,
2227
+ kv[1],
2228
+ get_special_path(f"patternProperties/{i}", path),
2229
+ )
2230
+ ],
2231
+ is_dict=True,
2232
+ dict_key=self.data_type_manager.get_data_type(
2233
+ Types.string,
2234
+ pattern=kv[0] if not self.field_constraints else None,
2235
+ ),
2236
+ )
2237
+ for i, kv in enumerate(pattern_properties.items())
2238
+ ],
2239
+ )
2240
+
2241
+ def parse_item( # noqa: PLR0911, PLR0912
597
2242
  self,
598
2243
  name: str,
599
2244
  item: JsonSchemaObject,
600
- path: List[str],
601
- singular_name: bool = False,
602
- parent: Optional[JsonSchemaObject] = None,
2245
+ path: list[str],
2246
+ singular_name: bool = False, # noqa: FBT001, FBT002
2247
+ parent: JsonSchemaObject | None = None,
603
2248
  ) -> DataType:
2249
+ """Parse a single JSON Schema item into a data type."""
604
2250
  if self.use_title_as_name and item.title:
605
- name = item.title
2251
+ name = sanitize_module_name(item.title, treat_dot_as_module=self.treat_dot_as_module)
606
2252
  singular_name = False
607
- if (
608
- parent
609
- and item.has_constraint
610
- and (parent.has_constraint or self.field_constraints)
611
- ):
612
- root_type_path = get_special_path('array', path)
2253
+ if parent and not item.enum and item.has_constraint and (parent.has_constraint or self.field_constraints):
2254
+ root_type_path = get_special_path("array", path)
613
2255
  return self.parse_root_type(
614
2256
  self.model_resolver.add(
615
2257
  root_type_path,
@@ -620,86 +2262,84 @@ class JsonSchemaParser(Parser):
620
2262
  item,
621
2263
  root_type_path,
622
2264
  )
623
- elif item.ref:
2265
+ if item.has_ref_with_schema_keywords:
2266
+ item = self._merge_ref_with_schema(item)
2267
+ if item.ref:
624
2268
  return self.get_ref_data_type(item.ref)
625
- elif item.custom_type_path:
626
- return self.data_type_manager.get_data_type_from_full_path(
627
- item.custom_type_path, is_custom_type=True
628
- )
629
- elif item.is_array:
630
- return self.parse_array_fields(
631
- name, item, get_special_path('array', path)
632
- ).data_type
633
- elif item.anyOf:
634
- return self.data_type(
635
- data_types=self.parse_any_of(
636
- name, item, get_special_path('anyOf', path)
637
- )
638
- )
639
- elif item.oneOf:
640
- return self.data_type(
641
- data_types=self.parse_one_of(
642
- name, item, get_special_path('oneOf', path)
643
- )
644
- )
645
- elif item.allOf:
646
- all_of_path = get_special_path('allOf', path)
2269
+ if item.custom_type_path: # pragma: no cover
2270
+ return self.data_type_manager.get_data_type_from_full_path(item.custom_type_path, is_custom_type=True)
2271
+ if item.is_array:
2272
+ return self.parse_array_fields(name, item, get_special_path("array", path)).data_type
2273
+ if item.discriminator and parent and parent.is_array and (item.oneOf or item.anyOf):
2274
+ return self.parse_root_type(name, item, path)
2275
+ if item.anyOf:
2276
+ const_enum_data = self._extract_const_enum_from_combined(item.anyOf, item.type)
2277
+ if const_enum_data is not None:
2278
+ enum_values, varnames, enum_type, nullable = const_enum_data
2279
+ synthetic_obj = self._create_synthetic_enum_obj(item, enum_values, varnames, enum_type, nullable)
2280
+ if self.should_parse_enum_as_literal(synthetic_obj):
2281
+ return self.parse_enum_as_literal(synthetic_obj)
2282
+ return self.parse_enum(name, synthetic_obj, get_special_path("enum", path), singular_name=singular_name)
2283
+ return self.data_type(data_types=self.parse_any_of(name, item, get_special_path("anyOf", path)))
2284
+ if item.oneOf:
2285
+ const_enum_data = self._extract_const_enum_from_combined(item.oneOf, item.type)
2286
+ if const_enum_data is not None:
2287
+ enum_values, varnames, enum_type, nullable = const_enum_data
2288
+ synthetic_obj = self._create_synthetic_enum_obj(item, enum_values, varnames, enum_type, nullable)
2289
+ if self.should_parse_enum_as_literal(synthetic_obj):
2290
+ return self.parse_enum_as_literal(synthetic_obj)
2291
+ return self.parse_enum(name, synthetic_obj, get_special_path("enum", path), singular_name=singular_name)
2292
+ return self.data_type(data_types=self.parse_one_of(name, item, get_special_path("oneOf", path)))
2293
+ if item.allOf:
2294
+ all_of_path = get_special_path("allOf", path)
2295
+ all_of_path = [self.model_resolver.resolve_ref(all_of_path)]
647
2296
  return self.parse_all_of(
648
- self.model_resolver.add(
649
- all_of_path, name, singular_name=singular_name, class_name=True
650
- ).name,
2297
+ self.model_resolver.add(all_of_path, name, singular_name=singular_name, class_name=True).name,
651
2298
  item,
652
2299
  all_of_path,
653
2300
  ignore_duplicate_model=True,
654
2301
  )
655
- elif item.is_object or item.patternProperties:
656
- object_path = get_special_path('object', path)
2302
+ if item.is_object or item.patternProperties:
2303
+ object_path = get_special_path("object", path)
657
2304
  if item.properties:
658
- return self.parse_object(
659
- name, item, object_path, singular_name=singular_name
660
- )
661
- elif item.patternProperties:
662
- # support only single key dict.
663
- return self.data_type(
664
- data_types=[
665
- self.data_type(
666
- data_types=[self.parse_item(name, v, object_path)],
667
- is_dict=True,
668
- dict_key=self.data_type_manager.get_data_type(
669
- Types.string,
670
- pattern=k,
671
- ),
2305
+ if item.has_multiple_types and isinstance(item.type, list):
2306
+ data_types: list[DataType] = []
2307
+ data_types.append(self.parse_object(name, item, object_path, singular_name=singular_name))
2308
+ data_types.extend(
2309
+ self.data_type_manager.get_data_type(
2310
+ self._get_type_with_mappings(t, item.format or "default"),
672
2311
  )
673
- for k, v in item.patternProperties.items()
674
- ],
675
- )
676
- elif isinstance(item.additionalProperties, JsonSchemaObject):
2312
+ for t in item.type
2313
+ if t not in {"object", "null"}
2314
+ )
2315
+ return self.data_type(data_types=data_types)
2316
+ return self.parse_object(name, item, object_path, singular_name=singular_name)
2317
+ if item.patternProperties:
2318
+ # support only single key dict.
2319
+ return self.parse_pattern_properties(name, item.patternProperties, object_path)
2320
+ if isinstance(item.additionalProperties, JsonSchemaObject):
677
2321
  return self.data_type(
678
- data_types=[
679
- self.parse_item(name, item.additionalProperties, object_path)
680
- ],
2322
+ data_types=[self.parse_item(name, item.additionalProperties, object_path)],
681
2323
  is_dict=True,
682
2324
  )
683
- return self.data_type_manager.get_data_type(Types.object)
684
- elif item.enum:
685
- if self.should_parse_enum_as_literal(item):
686
- enum_literals = item.enum
687
- if item.nullable:
688
- enum_literals = [i for i in item.enum if i is not None]
689
- return self.data_type(literals=enum_literals)
690
- return self.parse_enum(
691
- name, item, get_special_path('enum', path), singular_name=singular_name
2325
+ return self.data_type_manager.get_data_type(
2326
+ Types.object,
692
2327
  )
2328
+ if item.enum:
2329
+ if self.should_parse_enum_as_literal(item):
2330
+ return self.parse_enum_as_literal(item)
2331
+ return self.parse_enum(name, item, get_special_path("enum", path), singular_name=singular_name)
693
2332
  return self.get_data_type(item)
694
2333
 
695
2334
  def parse_list_item(
696
2335
  self,
697
2336
  name: str,
698
- target_items: List[JsonSchemaObject],
699
- path: List[str],
2337
+ target_items: list[JsonSchemaObject],
2338
+ path: list[str],
700
2339
  parent: JsonSchemaObject,
701
- singular_name: bool = True,
702
- ) -> List[DataType]:
2340
+ singular_name: bool = True, # noqa: FBT001, FBT002
2341
+ ) -> list[DataType]:
2342
+ """Parse a list of items into data types."""
703
2343
  return [
704
2344
  self.parse_item(
705
2345
  name,
@@ -715,50 +2355,51 @@ class JsonSchemaParser(Parser):
715
2355
  self,
716
2356
  name: str,
717
2357
  obj: JsonSchemaObject,
718
- path: List[str],
719
- singular_name: bool = True,
2358
+ path: list[str],
2359
+ singular_name: bool = True, # noqa: FBT001, FBT002
720
2360
  ) -> DataModelFieldBase:
2361
+ """Parse array schema into a data model field with list type."""
721
2362
  if self.force_optional_for_required_fields:
722
2363
  required: bool = False
723
- nullable: Optional[bool] = None
2364
+ nullable: Optional[bool] = None # noqa: UP045
724
2365
  else:
725
- required = not (
726
- obj.has_default and self.apply_default_values_for_required_fields
727
- )
2366
+ required = not (obj.has_default and self.apply_default_values_for_required_fields)
728
2367
  if self.strict_nullable:
729
2368
  nullable = obj.nullable if obj.has_default or required else True
730
2369
  else:
731
2370
  required = not obj.nullable and required
732
2371
  nullable = None
2372
+ if isinstance(obj.items, JsonSchemaObject):
2373
+ items: list[JsonSchemaObject] = [obj.items]
2374
+ elif isinstance(obj.items, list):
2375
+ items = obj.items
2376
+ else:
2377
+ items = []
2378
+
2379
+ if items:
2380
+ item_data_types = self.parse_list_item(
2381
+ name,
2382
+ items,
2383
+ path,
2384
+ obj,
2385
+ singular_name=singular_name,
2386
+ )
2387
+ else:
2388
+ item_data_types = [self.data_type_manager.get_data_type(Types.any)]
733
2389
 
734
- data_types: List[DataType] = [
2390
+ data_types: list[DataType] = [
735
2391
  self.data_type(
736
- data_types=self.parse_list_item(
737
- name,
738
- [obj.items]
739
- if isinstance(obj.items, JsonSchemaObject)
740
- else obj.items or [],
741
- path,
742
- obj,
743
- singular_name=singular_name,
744
- ),
2392
+ data_types=item_data_types,
745
2393
  is_list=True,
746
2394
  )
747
2395
  ]
748
2396
  # TODO: decide special path word for a combined data model.
749
2397
  if obj.allOf:
750
- data_types.append(
751
- self.parse_all_of(name, obj, get_special_path('allOf', path))
752
- )
2398
+ data_types.append(self.parse_all_of(name, obj, get_special_path("allOf", path)))
753
2399
  elif obj.is_object:
754
- data_types.append(
755
- self.parse_object(name, obj, get_special_path('object', path))
756
- )
2400
+ data_types.append(self.parse_object(name, obj, get_special_path("object", path)))
757
2401
  if obj.enum:
758
- data_types.append(
759
- self.parse_enum(name, obj, get_special_path('enum', path))
760
- )
761
-
2402
+ data_types.append(self.parse_enum(name, obj, get_special_path("enum", path)))
762
2403
  return self.data_model_field_type(
763
2404
  data_type=self.data_type(data_types=data_types),
764
2405
  default=obj.default,
@@ -768,30 +2409,31 @@ class JsonSchemaParser(Parser):
768
2409
  strip_default_none=self.strip_default_none,
769
2410
  extras=self.get_field_extras(obj),
770
2411
  use_annotated=self.use_annotated,
2412
+ use_serialize_as_any=self.use_serialize_as_any,
2413
+ use_field_description=self.use_field_description,
2414
+ use_inline_field_description=self.use_inline_field_description,
2415
+ original_name=None,
2416
+ has_default=obj.has_default,
771
2417
  )
772
2418
 
773
2419
  def parse_array(
774
2420
  self,
775
2421
  name: str,
776
2422
  obj: JsonSchemaObject,
777
- path: List[str],
778
- original_name: Optional[str] = None,
2423
+ path: list[str],
2424
+ original_name: str | None = None,
779
2425
  ) -> DataType:
780
- if self.use_title_as_name and obj.title:
781
- name = obj.title
2426
+ """Parse array schema into a root model with array type."""
2427
+ name = self._apply_title_as_name(name, obj)
782
2428
  reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
783
2429
  field = self.parse_array_fields(original_name or name, obj, [*path, name])
784
2430
 
785
- if reference in [
786
- d.reference for d in field.data_type.all_data_types if d.reference
787
- ]:
2431
+ if reference in [d.reference for d in field.data_type.all_data_types if d.reference]:
788
2432
  # self-reference
789
2433
  field = self.data_model_field_type(
790
2434
  data_type=self.data_type(
791
2435
  data_types=[
792
- self.data_type(
793
- data_types=field.data_type.data_types[1:], is_list=True
794
- ),
2436
+ self.data_type(data_types=field.data_type.data_types[1:], is_list=True),
795
2437
  *field.data_type.data_types[1:],
796
2438
  ]
797
2439
  ),
@@ -802,68 +2444,88 @@ class JsonSchemaParser(Parser):
802
2444
  strip_default_none=field.strip_default_none,
803
2445
  extras=field.extras,
804
2446
  use_annotated=self.use_annotated,
2447
+ use_field_description=self.use_field_description,
2448
+ use_inline_field_description=self.use_inline_field_description,
2449
+ original_name=None,
2450
+ has_default=field.has_default,
805
2451
  )
806
2452
 
807
2453
  data_model_root = self.data_model_root_type(
808
2454
  reference=reference,
809
2455
  fields=[field],
810
- custom_base_class=self.base_class,
2456
+ custom_base_class=obj.custom_base_path or self.base_class,
811
2457
  custom_template_dir=self.custom_template_dir,
812
2458
  extra_template_data=self.extra_template_data,
813
2459
  path=self.current_source_path,
814
2460
  description=obj.description if self.use_schema_description else None,
2461
+ nullable=obj.type_has_null,
2462
+ treat_dot_as_module=self.treat_dot_as_module,
815
2463
  )
816
2464
  self.results.append(data_model_root)
817
2465
  return self.data_type(reference=reference)
818
2466
 
819
- def parse_root_type(
2467
+ def parse_root_type( # noqa: PLR0912
820
2468
  self,
821
2469
  name: str,
822
2470
  obj: JsonSchemaObject,
823
- path: List[str],
2471
+ path: list[str],
824
2472
  ) -> DataType:
2473
+ """Parse a root-level type into a root model."""
2474
+ reference: Reference | None = None
825
2475
  if obj.ref:
826
2476
  data_type: DataType = self.get_ref_data_type(obj.ref)
827
2477
  elif obj.custom_type_path:
828
2478
  data_type = self.data_type_manager.get_data_type_from_full_path(
829
2479
  obj.custom_type_path, is_custom_type=True
830
- )
831
- elif obj.is_object or obj.anyOf or obj.oneOf:
832
- data_types: List[DataType] = []
833
- object_path = [*path, name]
834
- if obj.is_object:
835
- data_types.append(
836
- self.parse_object(
837
- name, obj, get_special_path('object', object_path)
838
- )
839
- )
840
- if obj.anyOf:
841
- data_types.extend(
842
- self.parse_any_of(name, obj, get_special_path('anyOf', object_path))
843
- )
844
- if obj.oneOf:
845
- data_types.extend(
846
- self.parse_one_of(name, obj, get_special_path('oneOf', object_path))
847
- )
848
- if len(data_types) > 1:
849
- data_type = self.data_type(data_types=data_types)
2480
+ ) # pragma: no cover
2481
+ elif obj.is_array:
2482
+ data_type = self.parse_array_fields(
2483
+ name, obj, get_special_path("array", path)
2484
+ ).data_type # pragma: no cover
2485
+ elif obj.anyOf or obj.oneOf:
2486
+ combined_items = obj.anyOf or obj.oneOf
2487
+ const_enum_data = self._extract_const_enum_from_combined(combined_items, obj.type)
2488
+ if const_enum_data is not None: # pragma: no cover
2489
+ enum_values, varnames, enum_type, nullable = const_enum_data
2490
+ synthetic_obj = self._create_synthetic_enum_obj(obj, enum_values, varnames, enum_type, nullable)
2491
+ if self.should_parse_enum_as_literal(synthetic_obj):
2492
+ data_type = self.parse_enum_as_literal(synthetic_obj)
2493
+ else:
2494
+ data_type = self.parse_enum(name, synthetic_obj, path)
2495
+ else:
2496
+ reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
2497
+ if obj.anyOf:
2498
+ data_types: list[DataType] = self.parse_any_of(name, obj, get_special_path("anyOf", path))
2499
+ else:
2500
+ data_types = self.parse_one_of(name, obj, get_special_path("oneOf", path))
2501
+
2502
+ if len(data_types) > 1: # pragma: no cover
2503
+ data_type = self.data_type(data_types=data_types)
2504
+ elif not data_types: # pragma: no cover
2505
+ return EmptyDataType()
2506
+ else: # pragma: no cover
2507
+ data_type = data_types[0]
2508
+ elif obj.patternProperties:
2509
+ data_type = self.parse_pattern_properties(name, obj.patternProperties, path)
2510
+ elif obj.enum:
2511
+ if self.should_parse_enum_as_literal(obj):
2512
+ data_type = self.parse_enum_as_literal(obj)
850
2513
  else: # pragma: no cover
851
- data_type = data_types[0]
2514
+ data_type = self.parse_enum(name, obj, path)
852
2515
  elif obj.type:
853
2516
  data_type = self.get_data_type(obj)
854
2517
  else:
855
- data_type = self.data_type_manager.get_data_type(Types.any)
856
- if self.force_optional_for_required_fields:
857
- required: bool = False
858
- else:
859
- required = not obj.nullable and not (
860
- obj.has_default and self.apply_default_values_for_required_fields
2518
+ data_type = self.data_type_manager.get_data_type(
2519
+ Types.any,
861
2520
  )
862
- if self.use_title_as_name and obj.title:
863
- name = obj.title
864
- reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
865
- self.set_title(name, obj)
866
- self.set_additional_properties(name, obj)
2521
+ required = self._should_field_be_required(
2522
+ has_default=obj.has_default,
2523
+ is_nullable=bool(obj.nullable),
2524
+ )
2525
+ name = self._apply_title_as_name(name, obj)
2526
+ if not reference:
2527
+ reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
2528
+ self._set_schema_metadata(reference.path, obj)
867
2529
  data_model_root_type = self.data_model_root_type(
868
2530
  reference=reference,
869
2531
  fields=[
@@ -876,33 +2538,116 @@ class JsonSchemaParser(Parser):
876
2538
  strip_default_none=self.strip_default_none,
877
2539
  extras=self.get_field_extras(obj),
878
2540
  use_annotated=self.use_annotated,
2541
+ use_field_description=self.use_field_description,
2542
+ use_inline_field_description=self.use_inline_field_description,
2543
+ original_name=None,
2544
+ has_default=obj.has_default,
879
2545
  )
880
2546
  ],
881
- custom_base_class=self.base_class,
2547
+ custom_base_class=obj.custom_base_path or self.base_class,
882
2548
  custom_template_dir=self.custom_template_dir,
883
2549
  extra_template_data=self.extra_template_data,
884
2550
  path=self.current_source_path,
2551
+ nullable=obj.type_has_null,
2552
+ treat_dot_as_module=self.treat_dot_as_module,
2553
+ default=obj.default if obj.has_default else UNDEFINED,
885
2554
  )
886
2555
  self.results.append(data_model_root_type)
887
2556
  return self.data_type(reference=reference)
888
2557
 
2558
+ def _parse_multiple_types_with_properties(
2559
+ self,
2560
+ name: str,
2561
+ obj: JsonSchemaObject,
2562
+ type_list: list[str],
2563
+ path: list[str],
2564
+ ) -> None:
2565
+ """Parse a schema with multiple types including object with properties."""
2566
+ data_types: list[DataType] = []
2567
+
2568
+ object_path = get_special_path("object", path)
2569
+ object_data_type = self.parse_object(name, obj, object_path)
2570
+ data_types.append(object_data_type)
2571
+
2572
+ data_types.extend(
2573
+ self.data_type_manager.get_data_type(
2574
+ self._get_type_with_mappings(t, obj.format or "default"),
2575
+ )
2576
+ for t in type_list
2577
+ if t not in {"object", "null"}
2578
+ )
2579
+
2580
+ is_nullable = obj.nullable or obj.type_has_null
2581
+ required = self._should_field_be_required(
2582
+ has_default=obj.has_default,
2583
+ is_nullable=bool(is_nullable),
2584
+ )
2585
+
2586
+ reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
2587
+ self._set_schema_metadata(reference.path, obj)
2588
+
2589
+ data_model_root_type = self.data_model_root_type(
2590
+ reference=reference,
2591
+ fields=[
2592
+ self.data_model_field_type(
2593
+ data_type=self.data_type(data_types=data_types),
2594
+ default=obj.default,
2595
+ required=required,
2596
+ constraints=obj.dict() if self.field_constraints else {},
2597
+ nullable=obj.type_has_null if self.strict_nullable else None,
2598
+ strip_default_none=self.strip_default_none,
2599
+ extras=self.get_field_extras(obj),
2600
+ use_annotated=self.use_annotated,
2601
+ use_field_description=self.use_field_description,
2602
+ use_inline_field_description=self.use_inline_field_description,
2603
+ original_name=None,
2604
+ has_default=obj.has_default,
2605
+ )
2606
+ ],
2607
+ custom_base_class=obj.custom_base_path or self.base_class,
2608
+ custom_template_dir=self.custom_template_dir,
2609
+ extra_template_data=self.extra_template_data,
2610
+ path=self.current_source_path,
2611
+ nullable=obj.type_has_null,
2612
+ treat_dot_as_module=self.treat_dot_as_module,
2613
+ default=obj.default if obj.has_default else UNDEFINED,
2614
+ )
2615
+ self.results.append(data_model_root_type)
2616
+
2617
+ def parse_enum_as_literal(self, obj: JsonSchemaObject) -> DataType:
2618
+ """Parse enum values as a Literal type."""
2619
+ return self.data_type(literals=[i for i in obj.enum if i is not None])
2620
+
2621
+ @classmethod
2622
+ def _get_field_name_from_dict_enum(cls, enum_part: dict[str, Any], index: int) -> str:
2623
+ """Extract field name from dict enum value using title, name, or const keys."""
2624
+ if enum_part.get("title"):
2625
+ return str(enum_part["title"])
2626
+ if enum_part.get("name"):
2627
+ return str(enum_part["name"])
2628
+ if "const" in enum_part:
2629
+ return str(enum_part["const"])
2630
+ return f"value_{index}"
2631
+
889
2632
  def parse_enum(
890
2633
  self,
891
2634
  name: str,
892
2635
  obj: JsonSchemaObject,
893
- path: List[str],
894
- singular_name: bool = False,
895
- unique: bool = True,
2636
+ path: list[str],
2637
+ singular_name: bool = False, # noqa: FBT001, FBT002
2638
+ unique: bool = True, # noqa: FBT001, FBT002
896
2639
  ) -> DataType:
2640
+ """Parse enum schema into an Enum class."""
897
2641
  if not unique: # pragma: no cover
898
2642
  warn(
899
- f'{self.__class__.__name__}.parse_enum() ignore `unique` argument.'
900
- f'An object name must be unique.'
901
- f'This argument will be removed in a future version'
2643
+ f"{self.__class__.__name__}.parse_enum() ignore `unique` argument."
2644
+ f"An object name must be unique."
2645
+ f"This argument will be removed in a future version",
2646
+ stacklevel=2,
902
2647
  )
903
- enum_fields: List[DataModelFieldBase] = []
2648
+ enum_fields: list[DataModelFieldBase] = []
904
2649
 
905
- if None in obj.enum and obj.type == 'string':
2650
+ if None in obj.enum and obj.type == "string":
906
2651
  # Nullable is valid in only OpenAPI
907
2652
  nullable: bool = True
908
2653
  enum_times = [e for e in obj.enum if e is not None]
@@ -910,30 +2655,23 @@ class JsonSchemaParser(Parser):
910
2655
  enum_times = obj.enum
911
2656
  nullable = False
912
2657
 
913
- exclude_field_names: Set[str] = set()
2658
+ exclude_field_names: set[str] = set()
2659
+
2660
+ enum_names = obj.x_enum_varnames or obj.x_enum_names
914
2661
 
915
2662
  for i, enum_part in enumerate(enum_times):
916
- if obj.type == 'string' or isinstance(enum_part, str):
917
- default = (
918
- f"'{enum_part.translate(escape_characters)}'"
919
- if isinstance(enum_part, str)
920
- else enum_part
921
- )
922
- if obj.x_enum_varnames:
923
- field_name = obj.x_enum_varnames[i]
924
- else:
925
- field_name = str(enum_part)
2663
+ if obj.type == "string" or isinstance(enum_part, str):
2664
+ default = f"'{enum_part.translate(escape_characters)}'" if isinstance(enum_part, str) else enum_part
2665
+ field_name = enum_names[i] if enum_names and i < len(enum_names) and enum_names[i] else str(enum_part)
926
2666
  else:
927
2667
  default = enum_part
928
- if obj.x_enum_varnames:
929
- field_name = obj.x_enum_varnames[i]
2668
+ if enum_names and i < len(enum_names) and enum_names[i]:
2669
+ field_name = enum_names[i]
2670
+ elif isinstance(enum_part, dict):
2671
+ field_name = self._get_field_name_from_dict_enum(enum_part, i)
930
2672
  else:
931
- prefix = (
932
- obj.type
933
- if isinstance(obj.type, str)
934
- else type(enum_part).__name__
935
- )
936
- field_name = f'{prefix}_{enum_part}'
2673
+ prefix = obj.type if isinstance(obj.type, str) else type(enum_part).__name__
2674
+ field_name = f"{prefix}_{enum_part}"
937
2675
  field_name = self.model_resolver.get_valid_field_name(
938
2676
  field_name, excludes=exclude_field_names, model_type=ModelType.ENUM
939
2677
  )
@@ -942,31 +2680,96 @@ class JsonSchemaParser(Parser):
942
2680
  self.data_model_field_type(
943
2681
  name=field_name,
944
2682
  default=default,
945
- data_type=self.data_type_manager.get_data_type(Types.any),
2683
+ data_type=self.data_type_manager.get_data_type(
2684
+ Types.any,
2685
+ ),
946
2686
  required=True,
947
2687
  strip_default_none=self.strip_default_none,
2688
+ has_default=obj.has_default,
2689
+ use_field_description=self.use_field_description,
2690
+ use_inline_field_description=self.use_inline_field_description,
2691
+ original_name=None,
948
2692
  )
949
2693
  )
950
2694
 
2695
+ if not enum_fields:
2696
+ if not nullable:
2697
+ return self.data_type_manager.get_data_type(Types.null)
2698
+ name = self._apply_title_as_name(name, obj)
2699
+ reference = self.model_resolver.add(
2700
+ path,
2701
+ name,
2702
+ class_name=True,
2703
+ singular_name=singular_name,
2704
+ singular_name_suffix="Enum",
2705
+ loaded=True,
2706
+ )
2707
+ data_model_root_type = self.data_model_root_type(
2708
+ reference=reference,
2709
+ fields=[
2710
+ self.data_model_field_type(
2711
+ data_type=self.data_type_manager.get_data_type(Types.null),
2712
+ default=obj.default,
2713
+ required=False,
2714
+ nullable=True,
2715
+ strip_default_none=self.strip_default_none,
2716
+ extras=self.get_field_extras(obj),
2717
+ use_annotated=self.use_annotated,
2718
+ has_default=obj.has_default,
2719
+ use_field_description=self.use_field_description,
2720
+ use_inline_field_description=self.use_inline_field_description,
2721
+ original_name=None,
2722
+ )
2723
+ ],
2724
+ custom_base_class=obj.custom_base_path or self.base_class,
2725
+ custom_template_dir=self.custom_template_dir,
2726
+ extra_template_data=self.extra_template_data,
2727
+ path=self.current_source_path,
2728
+ default=obj.default if obj.has_default else UNDEFINED,
2729
+ nullable=obj.type_has_null,
2730
+ treat_dot_as_module=self.treat_dot_as_module,
2731
+ )
2732
+ self.results.append(data_model_root_type)
2733
+ return self.data_type(reference=reference)
2734
+
951
2735
  def create_enum(reference_: Reference) -> DataType:
952
- enum = Enum(
2736
+ type_: Types | None = (
2737
+ self._get_type_with_mappings(obj.type, obj.format) if isinstance(obj.type, str) else None
2738
+ )
2739
+
2740
+ enum_cls: type[Enum] = Enum
2741
+ if (
2742
+ self.use_specialized_enum
2743
+ and type_
2744
+ and (specialized_type := SPECIALIZED_ENUM_TYPE_MATCH.get(type_))
2745
+ # StrEnum is available only in Python 3.11+
2746
+ and (specialized_type != StrEnum or self.target_python_version.has_strenum)
2747
+ ):
2748
+ # If specialized enum is available in the target Python version,
2749
+ # use it and ignore `self.use_subclass_enum` setting.
2750
+ type_ = None
2751
+ enum_cls = specialized_type
2752
+
2753
+ enum = enum_cls(
953
2754
  reference=reference_,
954
2755
  fields=enum_fields,
955
2756
  path=self.current_source_path,
956
2757
  description=obj.description if self.use_schema_description else None,
957
2758
  custom_template_dir=self.custom_template_dir,
2759
+ type_=type_ if self.use_subclass_enum else None,
2760
+ default=obj.default if obj.has_default else UNDEFINED,
2761
+ treat_dot_as_module=self.treat_dot_as_module,
958
2762
  )
959
2763
  self.results.append(enum)
960
2764
  return self.data_type(reference=reference_)
961
2765
 
962
- if self.use_title_as_name and obj.title:
963
- name = obj.title
2766
+ name = self._apply_title_as_name(name, obj)
964
2767
  reference = self.model_resolver.add(
965
2768
  path,
966
2769
  name,
967
2770
  class_name=True,
968
2771
  singular_name=singular_name,
969
- singular_name_suffix='Enum',
2772
+ singular_name_suffix="Enum",
970
2773
  loaded=True,
971
2774
  )
972
2775
 
@@ -974,11 +2777,11 @@ class JsonSchemaParser(Parser):
974
2777
  return create_enum(reference)
975
2778
 
976
2779
  enum_reference = self.model_resolver.add(
977
- [*path, 'Enum'],
978
- f'{reference.name}Enum',
2780
+ [*path, "Enum"],
2781
+ f"{reference.name}Enum",
979
2782
  class_name=True,
980
2783
  singular_name=singular_name,
981
- singular_name_suffix='Enum',
2784
+ singular_name_suffix="Enum",
982
2785
  loaded=True,
983
2786
  )
984
2787
 
@@ -993,165 +2796,219 @@ class JsonSchemaParser(Parser):
993
2796
  strip_default_none=self.strip_default_none,
994
2797
  extras=self.get_field_extras(obj),
995
2798
  use_annotated=self.use_annotated,
2799
+ has_default=obj.has_default,
2800
+ use_field_description=self.use_field_description,
2801
+ use_inline_field_description=self.use_inline_field_description,
2802
+ original_name=None,
996
2803
  )
997
2804
  ],
998
- custom_base_class=self.base_class,
2805
+ custom_base_class=obj.custom_base_path or self.base_class,
999
2806
  custom_template_dir=self.custom_template_dir,
1000
2807
  extra_template_data=self.extra_template_data,
1001
2808
  path=self.current_source_path,
2809
+ default=obj.default if obj.has_default else UNDEFINED,
2810
+ nullable=obj.type_has_null,
2811
+ treat_dot_as_module=self.treat_dot_as_module,
1002
2812
  )
1003
2813
  self.results.append(data_model_root_type)
1004
2814
  return self.data_type(reference=reference)
1005
2815
 
1006
- def _get_ref_body(self, resolved_ref: str) -> Dict[Any, Any]:
2816
+ def _get_ref_body(self, resolved_ref: str) -> dict[str, YamlValue]:
2817
+ """Get the body of a reference from URL or remote file."""
1007
2818
  if is_url(resolved_ref):
1008
2819
  return self._get_ref_body_from_url(resolved_ref)
1009
2820
  return self._get_ref_body_from_remote(resolved_ref)
1010
2821
 
1011
- def _get_ref_body_from_url(self, ref: str) -> Dict[Any, Any]:
1012
- # URL Reference $ref: 'http://path/to/your/resource' Uses the whole document located on the different server.
2822
+ def _get_ref_body_from_url(self, ref: str) -> dict[str, YamlValue]:
2823
+ """Get reference body from a URL (HTTP, HTTPS, or file scheme)."""
2824
+ if ref.startswith("file://"):
2825
+ from urllib.parse import urlparse # noqa: PLC0415
2826
+ from urllib.request import url2pathname # noqa: PLC0415
2827
+
2828
+ parsed = urlparse(ref)
2829
+ # url2pathname handles percent-decoding and Windows drive letters
2830
+ path = url2pathname(parsed.path)
2831
+ # Handle UNC paths (file://server/share/path)
2832
+ if parsed.netloc:
2833
+ path = f"//{parsed.netloc}{path}"
2834
+ file_path = Path(path)
2835
+ return self.remote_object_cache.get_or_put(
2836
+ ref, default_factory=lambda _: load_yaml_dict_from_path(file_path, self.encoding)
2837
+ )
1013
2838
  return self.remote_object_cache.get_or_put(
1014
- ref, default_factory=lambda key: load_yaml(self._get_text_from_url(key))
2839
+ ref, default_factory=lambda key: load_yaml_dict(self._get_text_from_url(key))
1015
2840
  )
1016
2841
 
1017
- def _get_ref_body_from_remote(self, resolved_ref: str) -> Dict[Any, Any]:
1018
- # Remote Reference $ref: 'document.json' Uses the whole document located on the same server and in
2842
+ def _get_ref_body_from_remote(self, resolved_ref: str) -> dict[str, YamlValue]:
2843
+ """Get reference body from a remote file path."""
2844
+ # Remote Reference: $ref: 'document.json' Uses the whole document located on the same server and in
1019
2845
  # the same location. TODO treat edge case
1020
2846
  full_path = self.base_path / resolved_ref
1021
2847
 
1022
2848
  return self.remote_object_cache.get_or_put(
1023
2849
  str(full_path),
1024
- default_factory=lambda _: load_yaml_from_path(full_path, self.encoding),
2850
+ default_factory=lambda _: load_yaml_dict_from_path(full_path, self.encoding),
1025
2851
  )
1026
2852
 
1027
2853
  def resolve_ref(self, object_ref: str) -> Reference:
2854
+ """Resolve a reference by loading and parsing the referenced schema."""
1028
2855
  reference = self.model_resolver.add_ref(object_ref)
1029
2856
  if reference.loaded:
1030
2857
  return reference
1031
2858
 
1032
2859
  # https://swagger.io/docs/specification/using-ref/
1033
2860
  ref = self.model_resolver.resolve_ref(object_ref)
1034
- if get_ref_type(object_ref) == JSONReference.LOCAL:
1035
- # Local Reference – $ref: '#/definitions/myElement'
1036
- self.reserved_refs[tuple(self.model_resolver.current_root)].add(ref) # type: ignore
2861
+ if get_ref_type(object_ref) == JSONReference.LOCAL or get_ref_type(ref) == JSONReference.LOCAL:
2862
+ self.reserved_refs[tuple(self.model_resolver.current_root)].add(ref)
1037
2863
  return reference
1038
- elif self.model_resolver.is_after_load(ref):
1039
- self.reserved_refs[tuple(ref.split('#')[0].split('/'))].add(ref) # type: ignore
2864
+ if self.model_resolver.is_after_load(ref):
2865
+ self.reserved_refs[tuple(ref.split("#")[0].split("/"))].add(ref)
1040
2866
  return reference
1041
2867
 
1042
2868
  if is_url(ref):
1043
- relative_path, object_path = ref.split('#')
2869
+ relative_path, object_path = ref.split("#")
1044
2870
  relative_paths = [relative_path]
1045
2871
  base_path = None
1046
2872
  else:
1047
2873
  if self.model_resolver.is_external_root_ref(ref):
1048
- relative_path, object_path = ref[:-1], ''
2874
+ relative_path, object_path = ref[:-1], ""
1049
2875
  else:
1050
- relative_path, object_path = ref.split('#')
1051
- relative_paths = relative_path.split('/')
2876
+ relative_path, object_path = ref.split("#")
2877
+ relative_paths = relative_path.split("/")
1052
2878
  base_path = Path(*relative_paths).parent
1053
- with self.model_resolver.current_base_path_context(
1054
- base_path
1055
- ), self.model_resolver.base_url_context(relative_path):
2879
+ with (
2880
+ self.model_resolver.current_base_path_context(base_path),
2881
+ self.model_resolver.base_url_context(relative_path),
2882
+ ):
1056
2883
  self._parse_file(
1057
2884
  self._get_ref_body(relative_path),
1058
2885
  self.model_resolver.add_ref(ref, resolved=True).name,
1059
2886
  relative_paths,
1060
- object_path.split('/') if object_path else None,
2887
+ object_path.split("/") if object_path else None,
1061
2888
  )
1062
2889
  reference.loaded = True
1063
2890
  return reference
1064
2891
 
1065
- def parse_ref(self, obj: JsonSchemaObject, path: List[str]) -> None:
1066
- if obj.ref:
1067
- self.resolve_ref(obj.ref)
2892
+ def _traverse_schema_objects( # noqa: PLR0912
2893
+ self,
2894
+ obj: JsonSchemaObject,
2895
+ path: list[str],
2896
+ callback: Callable[[JsonSchemaObject, list[str]], None],
2897
+ *,
2898
+ include_one_of: bool = True,
2899
+ ) -> None:
2900
+ """Traverse schema objects recursively and apply callback."""
2901
+ callback(obj, path)
1068
2902
  if obj.items:
1069
2903
  if isinstance(obj.items, JsonSchemaObject):
1070
- self.parse_ref(obj.items, path)
1071
- else:
2904
+ self._traverse_schema_objects(obj.items, path, callback, include_one_of=include_one_of)
2905
+ elif isinstance(obj.items, list):
1072
2906
  for item in obj.items:
1073
- self.parse_ref(item, path)
2907
+ self._traverse_schema_objects(item, path, callback, include_one_of=include_one_of)
1074
2908
  if isinstance(obj.additionalProperties, JsonSchemaObject):
1075
- self.parse_ref(obj.additionalProperties, path)
2909
+ self._traverse_schema_objects(obj.additionalProperties, path, callback, include_one_of=include_one_of)
1076
2910
  if obj.patternProperties:
1077
2911
  for value in obj.patternProperties.values():
1078
- self.parse_ref(value, path)
2912
+ self._traverse_schema_objects(value, path, callback, include_one_of=include_one_of)
1079
2913
  for item in obj.anyOf:
1080
- self.parse_ref(item, path)
2914
+ self._traverse_schema_objects(item, path, callback, include_one_of=include_one_of)
1081
2915
  for item in obj.allOf:
1082
- self.parse_ref(item, path)
1083
- for item in obj.oneOf:
1084
- self.parse_ref(item, path)
2916
+ self._traverse_schema_objects(item, path, callback, include_one_of=include_one_of)
2917
+ if include_one_of:
2918
+ for item in obj.oneOf:
2919
+ self._traverse_schema_objects(item, path, callback, include_one_of=include_one_of)
1085
2920
  if obj.properties:
1086
2921
  for value in obj.properties.values():
1087
- self.parse_ref(value, path)
2922
+ if isinstance(value, JsonSchemaObject):
2923
+ self._traverse_schema_objects(value, path, callback, include_one_of=include_one_of)
1088
2924
 
1089
- def parse_id(self, obj: JsonSchemaObject, path: List[str]) -> None:
2925
+ def _resolve_ref_callback(self, obj: JsonSchemaObject, path: list[str]) -> None: # noqa: ARG002
2926
+ """Resolve $ref in schema object."""
2927
+ if obj.ref:
2928
+ self.resolve_ref(obj.ref)
2929
+
2930
+ def _add_id_callback(self, obj: JsonSchemaObject, path: list[str]) -> None:
2931
+ """Add $id to model resolver."""
1090
2932
  if obj.id:
1091
2933
  self.model_resolver.add_id(obj.id, path)
1092
- if obj.items:
1093
- if isinstance(obj.items, JsonSchemaObject):
1094
- self.parse_id(obj.items, path)
1095
- else:
1096
- for item in obj.items:
1097
- self.parse_id(item, path)
1098
- if isinstance(obj.additionalProperties, JsonSchemaObject):
1099
- self.parse_id(obj.additionalProperties, path)
1100
- if obj.patternProperties:
1101
- for value in obj.patternProperties.values():
1102
- self.parse_id(value, path)
1103
- for item in obj.anyOf:
1104
- self.parse_id(item, path)
1105
- for item in obj.allOf:
1106
- self.parse_id(item, path)
1107
- if obj.properties:
1108
- for value in obj.properties.values():
1109
- self.parse_id(value, path)
2934
+
2935
+ def parse_ref(self, obj: JsonSchemaObject, path: list[str]) -> None:
2936
+ """Recursively parse all $ref references in a schema object."""
2937
+ self._traverse_schema_objects(obj, path, self._resolve_ref_callback)
2938
+
2939
+ def parse_id(self, obj: JsonSchemaObject, path: list[str]) -> None:
2940
+ """Recursively parse all $id fields in a schema object."""
2941
+ self._traverse_schema_objects(obj, path, self._add_id_callback, include_one_of=False)
1110
2942
 
1111
2943
  @contextmanager
1112
- def root_id_context(self, root_raw: Dict[str, Any]) -> Generator[None, None, None]:
1113
- root_id: Optional[str] = root_raw.get('$id')
1114
- previous_root_id: Optional[str] = self.root_id
1115
- self.root_id = root_id if root_id else None
2944
+ def root_id_context(self, root_raw: dict[str, Any]) -> Generator[None, None, None]:
2945
+ """Context manager to temporarily set the root $id during parsing."""
2946
+ previous_root_id = self.root_id
2947
+ self.root_id = root_raw.get("$id") or None
1116
2948
  yield
1117
2949
  self.root_id = previous_root_id
1118
2950
 
1119
2951
  def parse_raw_obj(
1120
2952
  self,
1121
2953
  name: str,
1122
- raw: Dict[str, Any],
1123
- path: List[str],
2954
+ raw: dict[str, YamlValue] | YamlValue,
2955
+ path: list[str],
1124
2956
  ) -> None:
1125
- self.parse_obj(name, JsonSchemaObject.parse_obj(raw), path)
2957
+ """Parse a raw dictionary into a JsonSchemaObject and process it."""
2958
+ obj: JsonSchemaObject = (
2959
+ self.SCHEMA_OBJECT_TYPE.model_validate(raw) if PYDANTIC_V2 else self.SCHEMA_OBJECT_TYPE.parse_obj(raw)
2960
+ )
2961
+ self.parse_obj(name, obj, path)
1126
2962
 
1127
- def parse_obj(
2963
+ def parse_obj( # noqa: PLR0912
1128
2964
  self,
1129
2965
  name: str,
1130
2966
  obj: JsonSchemaObject,
1131
- path: List[str],
2967
+ path: list[str],
1132
2968
  ) -> None:
2969
+ """Parse a JsonSchemaObject by dispatching to appropriate parse methods."""
2970
+ if obj.has_ref_with_schema_keywords:
2971
+ obj = self._merge_ref_with_schema(obj)
2972
+
1133
2973
  if obj.is_array:
1134
2974
  self.parse_array(name, obj, path)
1135
2975
  elif obj.allOf:
1136
2976
  self.parse_all_of(name, obj, path)
1137
- elif obj.oneOf:
2977
+ elif obj.oneOf or obj.anyOf:
2978
+ combined_items = obj.oneOf or obj.anyOf
2979
+ const_enum_data = self._extract_const_enum_from_combined(combined_items, obj.type)
2980
+ if const_enum_data is not None:
2981
+ enum_values, varnames, enum_type, nullable = const_enum_data
2982
+ synthetic_obj = self._create_synthetic_enum_obj(obj, enum_values, varnames, enum_type, nullable)
2983
+ if not self.should_parse_enum_as_literal(synthetic_obj):
2984
+ self.parse_enum(name, synthetic_obj, path)
2985
+ else:
2986
+ self.parse_root_type(name, synthetic_obj, path)
2987
+ else:
2988
+ data_type = self.parse_root_type(name, obj, path)
2989
+ if isinstance(data_type, EmptyDataType) and obj.properties:
2990
+ self.parse_object(name, obj, path) # pragma: no cover
2991
+ elif obj.properties:
2992
+ if obj.has_multiple_types and isinstance(obj.type, list):
2993
+ self._parse_multiple_types_with_properties(name, obj, obj.type, path)
2994
+ else:
2995
+ self.parse_object(name, obj, path)
2996
+ elif obj.patternProperties:
1138
2997
  self.parse_root_type(name, obj, path)
1139
- elif obj.is_object:
2998
+ elif obj.type == "object":
1140
2999
  self.parse_object(name, obj, path)
1141
- elif obj.enum:
3000
+ elif obj.enum and not self.should_parse_enum_as_literal(obj):
1142
3001
  self.parse_enum(name, obj, path)
1143
3002
  else:
1144
3003
  self.parse_root_type(name, obj, path)
1145
3004
  self.parse_ref(obj, path)
1146
3005
 
1147
- def parse_raw(self) -> None:
1148
- if isinstance(self.source, list) or (
1149
- isinstance(self.source, Path) and self.source.is_dir()
1150
- ):
3006
+ def _get_context_source_path_parts(self) -> Iterator[tuple[Source, list[str]]]:
3007
+ """Get source and path parts for each input file with context managers."""
3008
+ if isinstance(self.source, list) or (isinstance(self.source, Path) and self.source.is_dir()):
1151
3009
  self.current_source_path = Path()
1152
3010
  self.model_resolver.after_load_files = {
1153
- self.base_path.joinpath(s.path).resolve().as_posix()
1154
- for s in self.iter_source
3011
+ self.base_path.joinpath(s.path).resolve().as_posix() for s in self.iter_source
1155
3012
  }
1156
3013
 
1157
3014
  for source in self.iter_source:
@@ -1161,98 +3018,114 @@ class JsonSchemaParser(Parser):
1161
3018
  path_parts = list(source.path.parts)
1162
3019
  if self.current_source_path is not None:
1163
3020
  self.current_source_path = source.path
1164
- with self.model_resolver.current_base_path_context(
1165
- source.path.parent
1166
- ), self.model_resolver.current_root_context(path_parts):
1167
- self.raw_obj = load_yaml(source.text)
1168
- if self.custom_class_name_generator:
1169
- obj_name = self.raw_obj.get('title', 'Model')
3021
+ with (
3022
+ self.model_resolver.current_base_path_context(source.path.parent),
3023
+ self.model_resolver.current_root_context(path_parts),
3024
+ ):
3025
+ yield source, path_parts
3026
+
3027
+ def parse_raw(self) -> None:
3028
+ """Parse all raw input sources into data models."""
3029
+ for source, path_parts in self._get_context_source_path_parts():
3030
+ raw_obj = load_yaml(source.text)
3031
+ if not isinstance(raw_obj, dict): # pragma: no cover
3032
+ warn(f"{source.path} is empty or not a dict. Skipping this file", stacklevel=2)
3033
+ continue
3034
+ self.raw_obj = raw_obj
3035
+ title = self.raw_obj.get("title")
3036
+ title_str = str(title) if title is not None else "Model"
3037
+ if self.custom_class_name_generator:
3038
+ obj_name = title_str
3039
+ else:
3040
+ if self.class_name:
3041
+ obj_name = self.class_name
1170
3042
  else:
1171
- if self.class_name:
1172
- obj_name = self.class_name
1173
- else:
1174
- # backward compatible
1175
- obj_name = self.raw_obj.get('title', 'Model')
1176
- if not self.model_resolver.validate_name(obj_name):
1177
- obj_name = title_to_class_name(obj_name)
3043
+ # backward compatible
3044
+ obj_name = title_str
1178
3045
  if not self.model_resolver.validate_name(obj_name):
1179
- raise InvalidClassNameError(obj_name)
1180
- self._parse_file(self.raw_obj, obj_name, path_parts)
3046
+ obj_name = title_to_class_name(obj_name)
3047
+ if not self.model_resolver.validate_name(obj_name):
3048
+ raise InvalidClassNameError(obj_name)
3049
+ self._parse_file(self.raw_obj, obj_name, path_parts)
1181
3050
 
1182
3051
  self._resolve_unparsed_json_pointer()
1183
3052
 
1184
3053
  def _resolve_unparsed_json_pointer(self) -> None:
3054
+ """Resolve any remaining unparsed JSON pointer references recursively."""
1185
3055
  model_count: int = len(self.results)
1186
3056
  for source in self.iter_source:
1187
3057
  path_parts = list(source.path.parts)
1188
- reserved_refs = self.reserved_refs.get(tuple(path_parts)) # type: ignore
1189
- if not reserved_refs:
3058
+ if not (reserved_refs := self.reserved_refs.get(tuple(path_parts))):
1190
3059
  continue
1191
3060
  if self.current_source_path is not None:
1192
3061
  self.current_source_path = source.path
1193
3062
 
1194
- with self.model_resolver.current_base_path_context(
1195
- source.path.parent
1196
- ), self.model_resolver.current_root_context(path_parts):
3063
+ with (
3064
+ self.model_resolver.current_base_path_context(source.path.parent),
3065
+ self.model_resolver.current_root_context(path_parts),
3066
+ ):
1197
3067
  for reserved_ref in sorted(reserved_refs):
1198
3068
  if self.model_resolver.add_ref(reserved_ref, resolved=True).loaded:
1199
3069
  continue
1200
3070
  # for root model
1201
- self.raw_obj = load_yaml(source.text)
3071
+ self.raw_obj = load_yaml_dict(source.text)
1202
3072
  self.parse_json_pointer(self.raw_obj, reserved_ref, path_parts)
1203
3073
 
1204
3074
  if model_count != len(self.results):
1205
3075
  # New model have been generated. It try to resolve json pointer again.
1206
3076
  self._resolve_unparsed_json_pointer()
1207
3077
 
1208
- def parse_json_pointer(
1209
- self, raw: Dict[str, Any], ref: str, path_parts: List[str]
1210
- ) -> None:
1211
- path = ref.split('#', 1)[-1]
1212
- if path[0] == '/': # pragma: no cover
3078
+ def parse_json_pointer(self, raw: dict[str, YamlValue], ref: str, path_parts: list[str]) -> None:
3079
+ """Parse a JSON pointer reference into a model."""
3080
+ path = ref.split("#", 1)[-1]
3081
+ if path[0] == "/": # pragma: no cover
1213
3082
  path = path[1:]
1214
- object_paths = path.split('/')
3083
+ object_paths = path.split("/")
1215
3084
  models = get_model_by_path(raw, object_paths)
1216
3085
  model_name = object_paths[-1]
1217
3086
 
1218
- self.parse_raw_obj(
1219
- model_name, models, [*path_parts, f'#/{object_paths[0]}', *object_paths[1:]]
1220
- )
3087
+ self.parse_raw_obj(model_name, models, [*path_parts, f"#/{object_paths[0]}", *object_paths[1:]])
1221
3088
 
1222
3089
  def _parse_file(
1223
3090
  self,
1224
- raw: Dict[str, Any],
3091
+ raw: dict[str, Any],
1225
3092
  obj_name: str,
1226
- path_parts: List[str],
1227
- object_paths: Optional[List[str]] = None,
3093
+ path_parts: list[str],
3094
+ object_paths: list[str] | None = None,
1228
3095
  ) -> None:
3096
+ """Parse a file containing JSON Schema definitions and references."""
1229
3097
  object_paths = [o for o in object_paths or [] if o]
1230
- if object_paths:
1231
- path = [*path_parts, f'#/{object_paths[0]}', *object_paths[1:]]
1232
- else:
1233
- path = path_parts
3098
+ path = [*path_parts, f"#/{object_paths[0]}", *object_paths[1:]] if object_paths else path_parts
1234
3099
  with self.model_resolver.current_root_context(path_parts):
1235
- obj_name = self.model_resolver.add(
1236
- path, obj_name, unique=False, class_name=True
1237
- ).name
3100
+ obj_name = self.model_resolver.add(path, obj_name, unique=False, class_name=True).name
1238
3101
  with self.root_id_context(raw):
1239
-
3102
+ # Some jsonschema docs include attribute self to have include version details
3103
+ raw.pop("self", None)
1240
3104
  # parse $id before parsing $ref
1241
- root_obj = JsonSchemaObject.parse_obj(raw)
3105
+ root_obj = self.SCHEMA_OBJECT_TYPE.parse_obj(raw)
1242
3106
  self.parse_id(root_obj, path_parts)
1243
- definitions = raw.get('definitions', {})
3107
+ definitions: dict[str, YamlValue] = {}
3108
+ schema_path = ""
3109
+ for schema_path_candidate, split_schema_path in self.schema_paths:
3110
+ try:
3111
+ if definitions := get_model_by_path(raw, split_schema_path):
3112
+ schema_path = schema_path_candidate
3113
+ break
3114
+ except KeyError: # pragma: no cover
3115
+ continue
3116
+
1244
3117
  for key, model in definitions.items():
1245
- obj = JsonSchemaObject.parse_obj(model)
1246
- self.parse_id(obj, [*path_parts, '#/definitions', key])
3118
+ obj = self.SCHEMA_OBJECT_TYPE.parse_obj(model)
3119
+ self.parse_id(obj, [*path_parts, schema_path, key])
1247
3120
 
1248
3121
  if object_paths:
1249
3122
  models = get_model_by_path(raw, object_paths)
1250
3123
  model_name = object_paths[-1]
1251
- self.parse_obj(model_name, JsonSchemaObject.parse_obj(models), path)
1252
- else:
1253
- self.parse_obj(obj_name, root_obj, path_parts or ['#'])
3124
+ self.parse_obj(model_name, self.SCHEMA_OBJECT_TYPE.parse_obj(models), path)
3125
+ elif not self.skip_root_model:
3126
+ self.parse_obj(obj_name, root_obj, path_parts or ["#"])
1254
3127
  for key, model in definitions.items():
1255
- path = [*path_parts, '#/definitions', key]
3128
+ path = [*path_parts, schema_path, key]
1256
3129
  reference = self.model_resolver.get(path)
1257
3130
  if not reference or not reference.loaded:
1258
3131
  self.parse_raw_obj(key, model, path)
@@ -1261,16 +3134,14 @@ class JsonSchemaParser(Parser):
1261
3134
  reserved_refs = set(self.reserved_refs.get(key) or [])
1262
3135
  while reserved_refs:
1263
3136
  for reserved_path in sorted(reserved_refs):
1264
- reference = self.model_resolver.get(reserved_path)
3137
+ reference = self.model_resolver.references.get(reserved_path)
1265
3138
  if not reference or reference.loaded:
1266
3139
  continue
1267
- path = reserved_path.split('/')
1268
- _, *object_paths = path
3140
+ object_paths = reserved_path.split("#/", 1)[-1].split("/")
3141
+ path = reserved_path.split("/")
1269
3142
  models = get_model_by_path(raw, object_paths)
1270
3143
  model_name = object_paths[-1]
1271
- self.parse_obj(
1272
- model_name, JsonSchemaObject.parse_obj(models), path
1273
- )
3144
+ self.parse_obj(model_name, self.SCHEMA_OBJECT_TYPE.parse_obj(models), path)
1274
3145
  previous_reserved_refs = reserved_refs
1275
3146
  reserved_refs = set(self.reserved_refs.get(key) or [])
1276
3147
  if previous_reserved_refs == reserved_refs: