datamodel-code-generator 0.31.2__py3-none-any.whl → 0.33.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

@@ -20,6 +20,7 @@ from typing import (
20
20
  from urllib.parse import ParseResult
21
21
 
22
22
  import yaml
23
+ import yaml.parser
23
24
 
24
25
  import datamodel_code_generator.pydantic_patch # noqa: F401
25
26
  from datamodel_code_generator.format import (
@@ -116,8 +117,8 @@ def chdir(path: Path | None) -> Iterator[None]:
116
117
  os.chdir(prev_cwd)
117
118
 
118
119
 
119
- def is_openapi(text: str) -> bool:
120
- return "openapi" in load_yaml(text)
120
+ def is_openapi(data: dict) -> bool:
121
+ return "openapi" in data
121
122
 
122
123
 
123
124
  JSON_SCHEMA_URLS: tuple[str, ...] = (
@@ -126,10 +127,7 @@ JSON_SCHEMA_URLS: tuple[str, ...] = (
126
127
  )
127
128
 
128
129
 
129
- def is_schema(text: str) -> bool:
130
- data = load_yaml(text)
131
- if not isinstance(data, dict):
132
- return False
130
+ def is_schema(data: dict) -> bool:
133
131
  schema = data.get("$schema")
134
132
  if isinstance(schema, str) and any(schema.startswith(u) for u in JSON_SCHEMA_URLS): # pragma: no cover
135
133
  return True
@@ -144,7 +142,7 @@ def is_schema(text: str) -> bool:
144
142
  )
145
143
  ):
146
144
  return True
147
- return bool(isinstance(data.get("properties"), dict))
145
+ return isinstance(data.get("properties"), dict)
148
146
 
149
147
 
150
148
  class InputFileType(Enum):
@@ -208,8 +206,8 @@ def get_first_file(path: Path) -> Path: # pragma: no cover
208
206
  for child in path.rglob("*"):
209
207
  if child.is_file():
210
208
  return child
211
- msg = "File not found"
212
- raise Error(msg)
209
+ msg = f"No file found in: {path}"
210
+ raise FileNotFoundError(msg)
213
211
 
214
212
 
215
213
  def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
@@ -258,6 +256,7 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
258
256
  field_include_all_keys: bool = False,
259
257
  field_extra_keys_without_x_prefix: set[str] | None = None,
260
258
  openapi_scopes: list[OpenAPIScope] | None = None,
259
+ include_path_parameters: bool = False,
261
260
  graphql_scopes: list[GraphQLScope] | None = None, # noqa: ARG001
262
261
  wrap_string_literal: bool | None = None,
263
262
  use_title_as_name: bool = False,
@@ -311,15 +310,21 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
311
310
  input_text_ = (
312
311
  get_first_file(input_).read_text(encoding=encoding) if isinstance(input_, Path) else input_text
313
312
  )
313
+ except FileNotFoundError as exc:
314
+ msg = "File not found"
315
+ raise Error(msg) from exc
316
+
317
+ try:
314
318
  assert isinstance(input_text_, str)
315
319
  input_file_type = infer_input_type(input_text_)
320
+ except Exception as exc:
321
+ msg = "Invalid file format"
322
+ raise Error(msg) from exc
323
+ else:
316
324
  print( # noqa: T201
317
325
  inferred_message.format(input_file_type.value),
318
326
  file=sys.stderr,
319
327
  )
320
- except Exception as exc:
321
- msg = "Invalid file format"
322
- raise Error(msg) from exc
323
328
 
324
329
  kwargs: dict[str, Any] = {}
325
330
  if input_file_type == InputFileType.OpenAPI: # noqa: PLR1702
@@ -327,6 +332,7 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
327
332
 
328
333
  parser_class: type[Parser] = OpenAPIParser
329
334
  kwargs["openapi_scopes"] = openapi_scopes
335
+ kwargs["include_path_parameters"] = include_path_parameters
330
336
  elif input_file_type == InputFileType.GraphQL:
331
337
  from datamodel_code_generator.parser.graphql import GraphQLParser # noqa: PLC0415
332
338
 
@@ -407,7 +413,7 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
407
413
 
408
414
  from datamodel_code_generator.model import get_data_model_types # noqa: PLC0415
409
415
 
410
- data_model_types = get_data_model_types(output_model_type, target_python_version, output_datetime_class)
416
+ data_model_types = get_data_model_types(output_model_type, target_python_version)
411
417
  source = input_text or input_
412
418
  assert not isinstance(source, Mapping)
413
419
  parser = parser_class(
@@ -558,11 +564,21 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
558
564
 
559
565
 
560
566
  def infer_input_type(text: str) -> InputFileType:
561
- if is_openapi(text):
562
- return InputFileType.OpenAPI
563
- if is_schema(text):
564
- return InputFileType.JsonSchema
565
- return InputFileType.Json
567
+ try:
568
+ data = load_yaml(text)
569
+ except yaml.parser.ParserError:
570
+ return InputFileType.CSV
571
+ if isinstance(data, dict):
572
+ if is_openapi(data):
573
+ return InputFileType.OpenAPI
574
+ if is_schema(data):
575
+ return InputFileType.JsonSchema
576
+ return InputFileType.Json
577
+ msg = (
578
+ "Can't infer input file type from the input data. "
579
+ "Please specify the input file type explicitly with --input-file-type option."
580
+ )
581
+ raise Error(msg)
566
582
 
567
583
 
568
584
  inferred_message = (
@@ -573,6 +589,7 @@ inferred_message = (
573
589
  __all__ = [
574
590
  "MAX_VERSION",
575
591
  "MIN_VERSION",
592
+ "DatetimeClassType",
576
593
  "DefaultPutDict",
577
594
  "Error",
578
595
  "InputFileType",
@@ -13,7 +13,7 @@ from collections.abc import Sequence # noqa: TC003 # pydantic needs it
13
13
  from enum import IntEnum
14
14
  from io import TextIOBase
15
15
  from pathlib import Path
16
- from typing import TYPE_CHECKING, Any, Optional, Union, cast
16
+ from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union, cast
17
17
  from urllib.parse import ParseResult, urlparse
18
18
 
19
19
  import argcomplete
@@ -136,48 +136,6 @@ class Config(BaseModel):
136
136
  msg = f"This protocol doesn't support only http/https. --input={value}"
137
137
  raise Error(msg) # pragma: no cover
138
138
 
139
- @model_validator()
140
- def validate_original_field_name_delimiter(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
141
- if values.get("original_field_name_delimiter") is not None and not values.get("snake_case_field"):
142
- msg = "`--original-field-name-delimiter` can not be used without `--snake-case-field`."
143
- raise Error(msg)
144
- return values
145
-
146
- @model_validator()
147
- def validate_custom_file_header(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
148
- if values.get("custom_file_header") and values.get("custom_file_header_path"):
149
- msg = "`--custom_file_header_path` can not be used with `--custom_file_header`."
150
- raise Error(msg) # pragma: no cover
151
- return values
152
-
153
- @model_validator()
154
- def validate_keyword_only(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
155
- output_model_type: DataModelType = values.get("output_model_type") # pyright: ignore[reportAssignmentType]
156
- python_target: PythonVersion = values.get("target_python_version") # pyright: ignore[reportAssignmentType]
157
- if (
158
- values.get("keyword_only")
159
- and output_model_type == DataModelType.DataclassesDataclass
160
- and not python_target.has_kw_only_dataclass
161
- ):
162
- msg = f"`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher."
163
- raise Error(msg)
164
- return values
165
-
166
- @model_validator()
167
- def validate_output_datetime_class(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
168
- datetime_class_type: DatetimeClassType | None = values.get("output_datetime_class")
169
- if (
170
- datetime_class_type
171
- and datetime_class_type is not DatetimeClassType.Datetime
172
- and values.get("output_model_type") == DataModelType.DataclassesDataclass
173
- ):
174
- msg = (
175
- '`--output-datetime-class` only allows "datetime" for '
176
- f"`--output-model-type` {DataModelType.DataclassesDataclass.value}"
177
- )
178
- raise Error(msg)
179
- return values
180
-
181
139
  # Pydantic 1.5.1 doesn't support each_item=True correctly
182
140
  @field_validator("http_headers", mode="before")
183
141
  def validate_http_headers(cls, value: Any) -> list[tuple[str, str]] | None: # noqa: N805
@@ -225,18 +183,104 @@ class Config(BaseModel):
225
183
  values["custom_formatters"] = custom_formatters.split(",")
226
184
  return values
227
185
 
186
+ __validate_output_datetime_class_err: ClassVar[str] = (
187
+ '`--output-datetime-class` only allows "datetime" for '
188
+ f"`--output-model-type` {DataModelType.DataclassesDataclass.value}"
189
+ )
190
+
191
+ __validate_original_field_name_delimiter_err: ClassVar[str] = (
192
+ "`--original-field-name-delimiter` can not be used without `--snake-case-field`."
193
+ )
194
+
195
+ __validate_custom_file_header_err: ClassVar[str] = (
196
+ "`--custom_file_header_path` can not be used with `--custom_file_header`."
197
+ )
198
+ __validate_keyword_only_err: ClassVar[str] = (
199
+ f"`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher."
200
+ )
201
+
228
202
  if PYDANTIC_V2:
229
203
 
230
204
  @model_validator() # pyright: ignore[reportArgumentType]
231
- def validate_root(self: Self) -> Self:
205
+ def validate_output_datetime_class(self: Self) -> Self: # pyright: ignore[reportRedeclaration]
206
+ datetime_class_type: DatetimeClassType | None = self.output_datetime_class
207
+ if (
208
+ datetime_class_type
209
+ and datetime_class_type is not DatetimeClassType.Datetime
210
+ and self.output_model_type == DataModelType.DataclassesDataclass
211
+ ):
212
+ raise Error(self.__validate_output_datetime_class_err)
213
+ return self
214
+
215
+ @model_validator() # pyright: ignore[reportArgumentType]
216
+ def validate_original_field_name_delimiter(self: Self) -> Self: # pyright: ignore[reportRedeclaration]
217
+ if self.original_field_name_delimiter is not None and not self.snake_case_field:
218
+ raise Error(self.__validate_original_field_name_delimiter_err)
219
+ return self
220
+
221
+ @model_validator() # pyright: ignore[reportArgumentType]
222
+ def validate_custom_file_header(self: Self) -> Self: # pyright: ignore[reportRedeclaration]
223
+ if self.custom_file_header and self.custom_file_header_path:
224
+ raise Error(self.__validate_custom_file_header_err)
225
+ return self
226
+
227
+ @model_validator() # pyright: ignore[reportArgumentType]
228
+ def validate_keyword_only(self: Self) -> Self: # pyright: ignore[reportRedeclaration]
229
+ output_model_type: DataModelType = self.output_model_type
230
+ python_target: PythonVersion = self.target_python_version
231
+ if (
232
+ self.keyword_only
233
+ and output_model_type == DataModelType.DataclassesDataclass
234
+ and not python_target.has_kw_only_dataclass
235
+ ):
236
+ raise Error(self.__validate_keyword_only_err)
237
+ return self
238
+
239
+ @model_validator() # pyright: ignore[reportArgumentType]
240
+ def validate_root(self: Self) -> Self: # pyright: ignore[reportRedeclaration]
232
241
  if self.use_annotated:
233
242
  self.field_constraints = True
234
243
  return self
235
244
 
236
245
  else:
237
246
 
238
- @model_validator()
239
- def validate_root(cls, values: Any) -> Any: # noqa: N805
247
+ @model_validator() # pyright: ignore[reportArgumentType]
248
+ def validate_output_datetime_class(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
249
+ datetime_class_type: DatetimeClassType | None = values.get("output_datetime_class")
250
+ if (
251
+ datetime_class_type
252
+ and datetime_class_type is not DatetimeClassType.Datetime
253
+ and values.get("output_model_type") == DataModelType.DataclassesDataclass
254
+ ):
255
+ raise Error(cls.__validate_output_datetime_class_err)
256
+ return values
257
+
258
+ @model_validator() # pyright: ignore[reportArgumentType]
259
+ def validate_original_field_name_delimiter(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
260
+ if values.get("original_field_name_delimiter") is not None and not values.get("snake_case_field"):
261
+ raise Error(cls.__validate_original_field_name_delimiter_err)
262
+ return values
263
+
264
+ @model_validator() # pyright: ignore[reportArgumentType]
265
+ def validate_custom_file_header(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
266
+ if values.get("custom_file_header") and values.get("custom_file_header_path"):
267
+ raise Error(cls.__validate_custom_file_header_err)
268
+ return values
269
+
270
+ @model_validator() # pyright: ignore[reportArgumentType]
271
+ def validate_keyword_only(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
272
+ output_model_type: DataModelType = cast("DataModelType", values.get("output_model_type"))
273
+ python_target: PythonVersion = cast("PythonVersion", values.get("target_python_version"))
274
+ if (
275
+ values.get("keyword_only")
276
+ and output_model_type == DataModelType.DataclassesDataclass
277
+ and not python_target.has_kw_only_dataclass
278
+ ):
279
+ raise Error(cls.__validate_keyword_only_err)
280
+ return values
281
+
282
+ @model_validator() # pyright: ignore[reportArgumentType]
283
+ def validate_root(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
240
284
  if values.get("use_annotated"):
241
285
  values["field_constraints"] = True
242
286
  return values
@@ -287,6 +331,7 @@ class Config(BaseModel):
287
331
  field_include_all_keys: bool = False
288
332
  field_extra_keys_without_x_prefix: Optional[set[str]] = None # noqa: UP045
289
333
  openapi_scopes: Optional[list[OpenAPIScope]] = [OpenAPIScope.Schemas] # noqa: UP045
334
+ include_path_parameters: bool = False
290
335
  wrap_string_literal: Optional[bool] = None # noqa: UP045
291
336
  use_title_as_name: bool = False
292
337
  use_operation_id_as_name: bool = False
@@ -500,6 +545,7 @@ def main(args: Sequence[str] | None = None) -> Exit: # noqa: PLR0911, PLR0912,
500
545
  field_include_all_keys=config.field_include_all_keys,
501
546
  field_extra_keys_without_x_prefix=config.field_extra_keys_without_x_prefix,
502
547
  openapi_scopes=config.openapi_scopes,
548
+ include_path_parameters=config.include_path_parameters,
503
549
  wrap_string_literal=config.wrap_string_literal,
504
550
  use_title_as_name=config.use_title_as_name,
505
551
  use_operation_id_as_name=config.use_operation_id_as_name,
@@ -437,7 +437,11 @@ template_options.add_argument(
437
437
  )
438
438
  template_options.add_argument(
439
439
  "--extra-template-data",
440
- help="Extra template data",
440
+ help="Extra template data for output models. Input is supposed to be a json/yaml file. "
441
+ "For OpenAPI and Jsonschema the keys are the spec path of the object, or the name of the object if you want to "
442
+ "apply the template data to multiple objects with the same name. "
443
+ "If you are using another input file type (e.g. GraphQL), the key is the name of the object. "
444
+ "The value is a dictionary of the template data to add.",
441
445
  type=FileType("rt"),
442
446
  )
443
447
  template_options.add_argument(
@@ -500,6 +504,12 @@ openapi_options.add_argument(
500
504
  action="store_true",
501
505
  default=None,
502
506
  )
507
+ openapi_options.add_argument(
508
+ "--include-path-parameters",
509
+ help="Include path parameters in generated parameter models in addition to query parameters (Only OpenAPI)",
510
+ action="store_true",
511
+ default=None,
512
+ )
503
513
  openapi_options.add_argument(
504
514
  "--validation",
505
515
  help="Deprecated: Enable validation (Only OpenAPI). this option is deprecated. it will be removed in future "
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import sys
4
4
  from typing import TYPE_CHECKING, Callable, NamedTuple
5
5
 
6
- from datamodel_code_generator import DatetimeClassType, PythonVersion
6
+ from datamodel_code_generator import PythonVersion
7
7
 
8
8
  from .base import ConstraintsBase, DataModel, DataModelFieldBase
9
9
 
@@ -13,7 +13,6 @@ if TYPE_CHECKING:
13
13
  from datamodel_code_generator import DataModelType
14
14
  from datamodel_code_generator.types import DataTypeManager as DataTypeManagerABC
15
15
 
16
- DEFAULT_TARGET_DATETIME_CLASS = DatetimeClassType.Datetime
17
16
  DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(f"{sys.version_info.major}.{sys.version_info.minor}")
18
17
 
19
18
 
@@ -29,15 +28,12 @@ class DataModelSet(NamedTuple):
29
28
  def get_data_model_types(
30
29
  data_model_type: DataModelType,
31
30
  target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
32
- target_datetime_class: DatetimeClassType | None = None,
33
31
  ) -> DataModelSet:
34
32
  from datamodel_code_generator import DataModelType # noqa: PLC0415
35
33
 
36
34
  from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict # noqa: PLC0415
37
35
  from .types import DataTypeManager # noqa: PLC0415
38
36
 
39
- if target_datetime_class is None:
40
- target_datetime_class = DEFAULT_TARGET_DATETIME_CLASS
41
37
  if data_model_type == DataModelType.PydanticBaseModel:
42
38
  return DataModelSet(
43
39
  data_model=pydantic.BaseModel,
@@ -298,11 +298,18 @@ class DataModel(TemplateBase, Nullable, ABC):
298
298
 
299
299
  self.reference.source = self
300
300
 
301
- self.extra_template_data = (
301
+ if extra_template_data is not None:
302
302
  # The supplied defaultdict will either create a new entry,
303
303
  # or already contain a predefined entry for this type
304
- extra_template_data[self.name] if extra_template_data is not None else defaultdict(dict)
305
- )
304
+ self.extra_template_data = extra_template_data[self.reference.path]
305
+
306
+ # We use the full object reference path as dictionary key, but
307
+ # we still support `name` as key because it was used for
308
+ # `--extra-template-data` input file and we don't want to break the
309
+ # existing behavior.
310
+ self.extra_template_data.update(extra_template_data[self.name])
311
+ else:
312
+ self.extra_template_data = defaultdict(dict)
306
313
 
307
314
  self.fields = self._validate_fields(fields) if fields else []
308
315
 
@@ -288,7 +288,7 @@ class DataTypeManager(_DataTypeManager):
288
288
  use_non_positive_negative_number_constrained_types: bool = False, # noqa: FBT001, FBT002
289
289
  use_union_operator: bool = False, # noqa: FBT001, FBT002
290
290
  use_pendulum: bool = False, # noqa: FBT001, FBT002
291
- target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
291
+ target_datetime_class: DatetimeClassType | None = None,
292
292
  treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
293
293
  ) -> None:
294
294
  super().__init__(
@@ -62,7 +62,6 @@ def type_map_factory(
62
62
  strict_types: Sequence[StrictTypes],
63
63
  pattern_key: str,
64
64
  use_pendulum: bool, # noqa: FBT001
65
- target_datetime_class: DatetimeClassType, # noqa: ARG001
66
65
  ) -> dict[Types, DataType]:
67
66
  data_type_int = data_type(type="int")
68
67
  data_type_float = data_type(type="float")
@@ -141,7 +140,7 @@ number_kwargs: set[str] = {
141
140
 
142
141
  string_kwargs: set[str] = {"minItems", "maxItems", "minLength", "maxLength", "pattern"}
143
142
 
144
- byes_kwargs: set[str] = {"minLength", "maxLength"}
143
+ bytes_kwargs: set[str] = {"minLength", "maxLength"}
145
144
 
146
145
  escape_characters = str.maketrans({
147
146
  "'": r"\'",
@@ -208,14 +207,13 @@ class DataTypeManager(_DataTypeManager):
208
207
  data_type: type[DataType],
209
208
  strict_types: Sequence[StrictTypes],
210
209
  pattern_key: str,
211
- target_datetime_class: DatetimeClassType, # noqa: ARG002
210
+ target_datetime_class: DatetimeClassType | None, # noqa: ARG002
212
211
  ) -> dict[Types, DataType]:
213
212
  return type_map_factory(
214
213
  data_type,
215
214
  strict_types,
216
215
  pattern_key,
217
216
  self.use_pendulum,
218
- self.target_datetime_class,
219
217
  )
220
218
 
221
219
  def transform_kwargs(self, kwargs: dict[str, Any], filter_: set[str]) -> dict[str, str]:
@@ -296,7 +294,7 @@ class DataTypeManager(_DataTypeManager):
296
294
  return self.type_map[types]
297
295
 
298
296
  def get_data_bytes_type(self, types: Types, **kwargs: Any) -> DataType:
299
- data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, byes_kwargs)
297
+ data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, bytes_kwargs)
300
298
  strict = StrictTypes.bytes in self.strict_types
301
299
  if data_type_kwargs and not strict:
302
300
  return self.data_type.from_import(IMPORT_CONBYTES, kwargs=data_type_kwargs)
@@ -45,6 +45,6 @@ class DataTypeManager(_DataTypeManager):
45
45
  }
46
46
  if target_datetime_class == DatetimeClassType.Awaredatetime:
47
47
  result[Types.date_time] = data_type.from_import(IMPORT_AWARE_DATETIME)
48
- if target_datetime_class == DatetimeClassType.Naivedatetime:
48
+ elif target_datetime_class == DatetimeClassType.Naivedatetime:
49
49
  result[Types.date_time] = data_type.from_import(IMPORT_NAIVE_DATETIME)
50
50
  return result
@@ -1,5 +1,5 @@
1
1
  {%- if description %}
2
- # {{ description }}
2
+ # {{ description | replace('\n', '\n# ') }}
3
3
  {%- endif %}
4
4
  {%- if fields|length > 1 %}
5
5
  {{ class_name }}: TypeAlias = Union[
@@ -67,7 +67,7 @@ class DataTypeManager(_DataTypeManager):
67
67
  use_non_positive_negative_number_constrained_types: bool = False, # noqa: FBT001, FBT002
68
68
  use_union_operator: bool = False, # noqa: FBT001, FBT002
69
69
  use_pendulum: bool = False, # noqa: FBT001, FBT002
70
- target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
70
+ target_datetime_class: DatetimeClassType | None = None,
71
71
  treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
72
72
  ) -> None:
73
73
  super().__init__(
@@ -376,7 +376,7 @@ class Parser(ABC):
376
376
  treat_dot_as_module: bool = False,
377
377
  use_exact_imports: bool = False,
378
378
  default_field_extras: dict[str, Any] | None = None,
379
- target_datetime_class: DatetimeClassType | None = DatetimeClassType.Datetime,
379
+ target_datetime_class: DatetimeClassType | None = None,
380
380
  keyword_only: bool = False,
381
381
  frozen_dataclasses: bool = False,
382
382
  no_alias: bool = False,
@@ -389,6 +389,7 @@ class Parser(ABC):
389
389
  python_version=target_python_version,
390
390
  use_standard_collections=use_standard_collections,
391
391
  use_generic_container_types=use_generic_container_types,
392
+ use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
392
393
  strict_types=strict_types,
393
394
  use_union_operator=use_union_operator,
394
395
  use_pendulum=use_pendulum,
@@ -934,6 +935,8 @@ class Parser(ABC):
934
935
  for data_type in model_field.data_type.all_data_types:
935
936
  reference = data_type.reference
936
937
  if not reference or not isinstance(reference.source, self.data_model_root_type):
938
+ # If the data type is not a reference, we can't collapse it.
939
+ # If it's a reference to a root model type, we don't do anything.
937
940
  continue
938
941
 
939
942
  # Use root-type as model_field type
@@ -948,6 +951,10 @@ class Parser(ABC):
948
951
  ):
949
952
  continue # pragma: no cover
950
953
 
954
+ if root_type_field.data_type.reference:
955
+ # If the root type field is a reference, we aren't able to collapse it yet.
956
+ continue
957
+
951
958
  # set copied data_type
952
959
  copied_data_type = root_type_field.data_type.copy()
953
960
  if isinstance(data_type.parent, self.data_model_field_type):
@@ -308,6 +308,7 @@ class GraphQLParser(Parser):
308
308
  required=False,
309
309
  alias="__typename",
310
310
  use_one_literal_as_default=True,
311
+ use_default_kwarg=self.use_default_kwarg,
311
312
  has_default=True,
312
313
  )
313
314
 
@@ -211,6 +211,33 @@ class JsonSchemaObject(BaseModel):
211
211
  return value.replace("#", "#/")
212
212
  return value
213
213
 
214
+ @field_validator("required", mode="before")
215
+ def validate_required(cls, value: Any) -> Any: # noqa: N805
216
+ if value is None:
217
+ return []
218
+ if isinstance(value, list): # noqa: PLR1702
219
+ # Filter to only include valid strings, excluding invalid objects
220
+ required_fields: list[str] = []
221
+ for item in value:
222
+ if isinstance(item, str):
223
+ required_fields.append(item)
224
+
225
+ # In some cases, the required field can include "anyOf", "oneOf", or "allOf" as a dict (#2297)
226
+ elif isinstance(item, dict):
227
+ for key, val in item.items():
228
+ if isinstance(val, list):
229
+ # If 'anyOf' or "oneOf" is present, we won't include it in required fields
230
+ if key in {"anyOf", "oneOf"}:
231
+ continue
232
+
233
+ if key == "allOf":
234
+ # If 'allOf' is present, we include them as required fields
235
+ required_fields.extend(sub_item for sub_item in val if isinstance(sub_item, str))
236
+
237
+ value = required_fields
238
+
239
+ return value
240
+
214
241
  items: Optional[Union[list[JsonSchemaObject], JsonSchemaObject, bool]] = None # noqa: UP007, UP045
215
242
  uniqueItems: Optional[bool] = None # noqa: N815, UP045
216
243
  type: Optional[Union[str, list[str]]] = None # noqa: UP007, UP045
@@ -431,13 +458,14 @@ class JsonSchemaParser(Parser):
431
458
  treat_dot_as_module: bool = False,
432
459
  use_exact_imports: bool = False,
433
460
  default_field_extras: dict[str, Any] | None = None,
434
- target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
461
+ target_datetime_class: DatetimeClassType | None = None,
435
462
  keyword_only: bool = False,
436
463
  frozen_dataclasses: bool = False,
437
464
  no_alias: bool = False,
438
465
  formatters: list[Formatter] = DEFAULT_FORMATTERS,
439
466
  parent_scoped_naming: bool = False,
440
467
  ) -> None:
468
+ target_datetime_class = target_datetime_class or DatetimeClassType.Awaredatetime
441
469
  super().__init__(
442
470
  source=source,
443
471
  data_model_type=data_model_type,
@@ -626,13 +654,13 @@ class JsonSchemaParser(Parser):
626
654
  reference = self.model_resolver.add_ref(ref)
627
655
  return self.data_type(reference=reference)
628
656
 
629
- def set_additional_properties(self, name: str, obj: JsonSchemaObject) -> None:
657
+ def set_additional_properties(self, path: str, obj: JsonSchemaObject) -> None:
630
658
  if isinstance(obj.additionalProperties, bool):
631
- self.extra_template_data[name]["additionalProperties"] = obj.additionalProperties
659
+ self.extra_template_data[path]["additionalProperties"] = obj.additionalProperties
632
660
 
633
- def set_title(self, name: str, obj: JsonSchemaObject) -> None:
661
+ def set_title(self, path: str, obj: JsonSchemaObject) -> None:
634
662
  if obj.title:
635
- self.extra_template_data[name]["title"] = obj.title
663
+ self.extra_template_data[path]["title"] = obj.title
636
664
 
637
665
  def _deep_merge(self, dict1: dict[Any, Any], dict2: dict[Any, Any]) -> dict[Any, Any]:
638
666
  result = dict1.copy()
@@ -754,7 +782,7 @@ class JsonSchemaParser(Parser):
754
782
  if self.use_title_as_name and obj.title: # pragma: no cover
755
783
  name = obj.title
756
784
  reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
757
- self.set_additional_properties(reference.name, obj)
785
+ self.set_additional_properties(reference.path, obj)
758
786
 
759
787
  data_model_type = self._create_data_model(
760
788
  reference=reference,
@@ -965,7 +993,7 @@ class JsonSchemaParser(Parser):
965
993
  loaded=True,
966
994
  )
967
995
  class_name = reference.name
968
- self.set_title(class_name, obj)
996
+ self.set_title(reference.path, obj)
969
997
  fields = self.parse_object_fields(
970
998
  obj, path, get_module_name(class_name, None, treat_dot_as_module=self.treat_dot_as_module)
971
999
  )
@@ -994,7 +1022,7 @@ class JsonSchemaParser(Parser):
994
1022
  )
995
1023
  data_model_type_class = self.data_model_root_type
996
1024
 
997
- self.set_additional_properties(class_name, obj)
1025
+ self.set_additional_properties(reference.path, obj)
998
1026
 
999
1027
  data_model_type = self._create_data_model(
1000
1028
  model_type=data_model_type_class,
@@ -1277,8 +1305,8 @@ class JsonSchemaParser(Parser):
1277
1305
  name = obj.title
1278
1306
  if not reference:
1279
1307
  reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
1280
- self.set_title(name, obj)
1281
- self.set_additional_properties(name, obj)
1308
+ self.set_title(reference.path, obj)
1309
+ self.set_additional_properties(reference.path, obj)
1282
1310
  data_model_root_type = self.data_model_root_type(
1283
1311
  reference=reference,
1284
1312
  fields=[
@@ -1376,6 +1404,7 @@ class JsonSchemaParser(Parser):
1376
1404
  custom_template_dir=self.custom_template_dir,
1377
1405
  type_=_get_type(obj.type, obj.format) if self.use_subclass_enum and isinstance(obj.type, str) else None,
1378
1406
  default=obj.default if obj.has_default else UNDEFINED,
1407
+ treat_dot_as_module=self.treat_dot_as_module,
1379
1408
  )
1380
1409
  self.results.append(enum)
1381
1410
  return self.data_type(reference=reference_)
@@ -1552,7 +1581,7 @@ class JsonSchemaParser(Parser):
1552
1581
  raw: dict[str, Any],
1553
1582
  path: list[str],
1554
1583
  ) -> None:
1555
- self.parse_obj(name, self.SCHEMA_OBJECT_TYPE.parse_obj(raw), path)
1584
+ self.parse_obj(name, self.SCHEMA_OBJECT_TYPE.model_validate(raw), path)
1556
1585
 
1557
1586
  def parse_obj(
1558
1587
  self,
@@ -189,6 +189,7 @@ class OpenAPIParser(JsonSchemaParser):
189
189
  field_include_all_keys: bool = False,
190
190
  field_extra_keys_without_x_prefix: set[str] | None = None,
191
191
  openapi_scopes: list[OpenAPIScope] | None = None,
192
+ include_path_parameters: bool = False,
192
193
  wrap_string_literal: bool | None = False,
193
194
  use_title_as_name: bool = False,
194
195
  use_operation_id_as_name: bool = False,
@@ -214,13 +215,14 @@ class OpenAPIParser(JsonSchemaParser):
214
215
  treat_dot_as_module: bool = False,
215
216
  use_exact_imports: bool = False,
216
217
  default_field_extras: dict[str, Any] | None = None,
217
- target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
218
+ target_datetime_class: DatetimeClassType | None = None,
218
219
  keyword_only: bool = False,
219
220
  frozen_dataclasses: bool = False,
220
221
  no_alias: bool = False,
221
222
  formatters: list[Formatter] = DEFAULT_FORMATTERS,
222
223
  parent_scoped_naming: bool = False,
223
224
  ) -> None:
225
+ target_datetime_class = target_datetime_class or DatetimeClassType.Awaredatetime
224
226
  super().__init__(
225
227
  source=source,
226
228
  data_model_type=data_model_type,
@@ -299,6 +301,7 @@ class OpenAPIParser(JsonSchemaParser):
299
301
  parent_scoped_naming=parent_scoped_naming,
300
302
  )
301
303
  self.open_api_scopes: list[OpenAPIScope] = openapi_scopes or [OpenAPIScope.Schemas]
304
+ self.include_path_parameters: bool = include_path_parameters
302
305
 
303
306
  def get_ref_model(self, ref: str) -> dict[str, Any]:
304
307
  ref_file, ref_path = self.model_resolver.resolve_ref(ref).split("#", 1)
@@ -351,13 +354,17 @@ class OpenAPIParser(JsonSchemaParser):
351
354
  name: str,
352
355
  request_body: RequestBodyObject,
353
356
  path: list[str],
354
- ) -> None:
357
+ ) -> dict[str, DataType]:
358
+ data_types: dict[str, DataType] = {}
355
359
  for (
356
360
  media_type,
357
361
  media_obj,
358
362
  ) in request_body.content.items():
359
363
  if isinstance(media_obj.schema_, JsonSchemaObject):
360
- self.parse_schema(name, media_obj.schema_, [*path, media_type])
364
+ data_types[media_type] = self.parse_schema(name, media_obj.schema_, [*path, media_type])
365
+ elif media_obj.schema_ is not None:
366
+ data_types[media_type] = self.get_ref_data_type(media_obj.schema_.ref)
367
+ return data_types
361
368
 
362
369
  def parse_responses(
363
370
  self,
@@ -414,15 +421,24 @@ class OpenAPIParser(JsonSchemaParser):
414
421
  name: str,
415
422
  parameters: list[ReferenceObject | ParameterObject],
416
423
  path: list[str],
417
- ) -> None:
424
+ ) -> DataType | None:
418
425
  fields: list[DataModelFieldBase] = []
419
426
  exclude_field_names: set[str] = set()
420
427
  reference = self.model_resolver.add(path, name, class_name=True, unique=True)
421
428
  for parameter_ in parameters:
422
429
  parameter = self.resolve_object(parameter_, ParameterObject)
423
430
  parameter_name = parameter.name
424
- if not parameter_name or parameter.in_ != ParameterLocation.query:
431
+ if (
432
+ not parameter_name
433
+ or parameter.in_ not in {ParameterLocation.query, ParameterLocation.path}
434
+ or (parameter.in_ == ParameterLocation.path and not self.include_path_parameters)
435
+ ):
425
436
  continue
437
+
438
+ if any(field.original_name == parameter_name for field in fields):
439
+ msg = f"Parameter name '{parameter_name}' is used more than once."
440
+ raise Exception(msg) # noqa: TRY002
441
+
426
442
  field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
427
443
  field_name=parameter_name, excludes=exclude_field_names
428
444
  )
@@ -500,6 +516,9 @@ class OpenAPIParser(JsonSchemaParser):
500
516
  treat_dot_as_module=self.treat_dot_as_module,
501
517
  )
502
518
  )
519
+ return self.data_type(reference=reference)
520
+
521
+ return None
503
522
 
504
523
  def parse_operation(
505
524
  self,
@@ -518,7 +537,9 @@ class OpenAPIParser(JsonSchemaParser):
518
537
  path_name = operation.operationId
519
538
  method = ""
520
539
  self.parse_all_parameters(
521
- self._get_model_name(path_name, method, suffix="ParametersQuery"),
540
+ self._get_model_name(
541
+ path_name, method, suffix="Parameters" if self.include_path_parameters else "ParametersQuery"
542
+ ),
522
543
  operation.parameters,
523
544
  [*path, "parameters"],
524
545
  )
@@ -602,7 +602,7 @@ class DataTypeManager(ABC):
602
602
  )
603
603
  self.use_union_operator: bool = use_union_operator
604
604
  self.use_pendulum: bool = use_pendulum
605
- self.target_datetime_class: DatetimeClassType = target_datetime_class or DatetimeClassType.Datetime
605
+ self.target_datetime_class: DatetimeClassType | None = target_datetime_class
606
606
  self.treat_dot_as_module: bool = treat_dot_as_module
607
607
 
608
608
  if TYPE_CHECKING:
@@ -1,38 +1,33 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import copy
4
- from typing import TYPE_CHECKING, Any, Callable, TypeVar
4
+ from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar, overload
5
5
 
6
6
  import pydantic
7
7
  from packaging import version
8
8
  from pydantic import BaseModel as _BaseModel
9
9
 
10
+ if TYPE_CHECKING:
11
+ from pathlib import Path
12
+
10
13
  PYDANTIC_VERSION = version.parse(pydantic.VERSION if isinstance(pydantic.VERSION, str) else str(pydantic.VERSION))
11
14
 
12
15
  PYDANTIC_V2: bool = version.parse("2.0b3") <= PYDANTIC_VERSION
13
16
 
14
- if TYPE_CHECKING:
15
- from pathlib import Path
16
- from typing import Literal
17
-
17
+ try:
18
+ from yaml import CSafeLoader as SafeLoader
19
+ except ImportError: # pragma: no cover
18
20
  from yaml import SafeLoader
19
21
 
20
- def load_toml(path: Path) -> dict[str, Any]: ...
22
+ try:
23
+ from tomllib import load as load_tomllib # type: ignore[ignoreMissingImports]
24
+ except ImportError:
25
+ from tomli import load as load_tomllib # type: ignore[ignoreMissingImports]
21
26
 
22
- else:
23
- try:
24
- from yaml import CSafeLoader as SafeLoader
25
- except ImportError: # pragma: no cover
26
- from yaml import SafeLoader
27
-
28
- try:
29
- from tomllib import load as load_tomllib
30
- except ImportError:
31
- from tomli import load as load_tomllib
32
27
 
33
- def load_toml(path: Path) -> dict[str, Any]:
34
- with path.open("rb") as f:
35
- return load_tomllib(f)
28
+ def load_toml(path: Path) -> dict[str, Any]:
29
+ with path.open("rb") as f:
30
+ return load_tomllib(f)
36
31
 
37
32
 
38
33
  SafeLoaderTemp = copy.deepcopy(SafeLoader)
@@ -44,16 +39,70 @@ SafeLoaderTemp.add_constructor(
44
39
  SafeLoader = SafeLoaderTemp
45
40
 
46
41
  Model = TypeVar("Model", bound=_BaseModel)
42
+ T = TypeVar("T")
43
+
44
+
45
+ @overload
46
+ def model_validator(
47
+ mode: Literal["before"],
48
+ ) -> (
49
+ Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
50
+ | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
51
+ ): ...
47
52
 
48
53
 
54
+ @overload
49
55
  def model_validator(
56
+ mode: Literal["after"],
57
+ ) -> (
58
+ Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
59
+ | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
60
+ | Callable[[Callable[[Model], Model]], Callable[[Model], Model]]
61
+ ): ...
62
+
63
+
64
+ @overload
65
+ def model_validator() -> (
66
+ Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
67
+ | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
68
+ | Callable[[Callable[[Model], Model]], Callable[[Model], Model]]
69
+ ): ...
70
+
71
+
72
+ def model_validator( # pyright: ignore[reportInconsistentOverload]
50
73
  mode: Literal["before", "after"] = "after",
51
- ) -> Callable[[Callable[[Model, Any], Any]], Callable[[Model, Any], Any]]:
52
- def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]:
74
+ ) -> (
75
+ Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
76
+ | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
77
+ | Callable[[Callable[[Model], Model]], Callable[[Model], Model]]
78
+ ):
79
+ """
80
+ Decorator for model validators in Pydantic models.
81
+
82
+ Uses `model_validator` in Pydantic v2 and `root_validator` in Pydantic v1.
83
+
84
+ We support only `before` mode because `after` mode needs different validator
85
+ implementation for v1 and v2.
86
+ """
87
+
88
+ @overload
89
+ def inner(method: Callable[[type[Model], T], T]) -> Callable[[type[Model], T], T]: ...
90
+
91
+ @overload
92
+ def inner(method: Callable[[Model, T], T]) -> Callable[[Model, T], T]: ...
93
+
94
+ @overload
95
+ def inner(method: Callable[[Model], Model]) -> Callable[[Model], Model]: ...
96
+
97
+ def inner(
98
+ method: Callable[[type[Model], T], T] | Callable[[Model, T], T] | Callable[[Model], Model],
99
+ ) -> Callable[[type[Model], T], T] | Callable[[Model, T], T] | Callable[[Model], Model]:
53
100
  if PYDANTIC_V2:
54
101
  from pydantic import model_validator as model_validator_v2 # noqa: PLC0415
55
102
 
56
- return model_validator_v2(mode=mode)(method) # pyright: ignore[reportReturnType]
103
+ if method == "before":
104
+ return model_validator_v2(mode=mode)(classmethod(method)) # type: ignore[reportReturnType]
105
+ return model_validator_v2(mode=mode)(method) # type: ignore[reportReturnType]
57
106
  from pydantic import root_validator # noqa: PLC0415
58
107
 
59
108
  return root_validator(method, pre=mode == "before") # pyright: ignore[reportCallIssue]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datamodel-code-generator
3
- Version: 0.31.2
3
+ Version: 0.33.0
4
4
  Summary: Datamodel Code Generator
5
5
  Project-URL: Homepage, https://github.com/koxudaxi/datamodel-code-generator
6
6
  Project-URL: Source, https://github.com/koxudaxi/datamodel-code-generator
@@ -543,7 +543,12 @@ Template customization:
543
543
  Custom template directory
544
544
  --encoding ENCODING The encoding of input and output (default: utf-8)
545
545
  --extra-template-data EXTRA_TEMPLATE_DATA
546
- Extra template data
546
+ Extra template data for output models. Input is supposed to be a
547
+ json/yaml file. For OpenAPI and Jsonschema the keys are the spec
548
+ path of the object, or the name of the object if you want to apply
549
+ the template data to multiple objects with the same name. If you are
550
+ using another input file type (e.g. GraphQL), the key is the name of
551
+ the object. The value is a dictionary of the template data to add.
547
552
  --use-double-quotes Model generated with double quotes. Single quotes or your black
548
553
  config skip_string_normalization value will be used without this
549
554
  option.
@@ -552,6 +557,9 @@ Template customization:
552
557
  option (require black 20.8b0 or later)
553
558
 
554
559
  OpenAPI-only options:
560
+ --include-path-parameters
561
+ Include path parameters in generated parameter models in addition to
562
+ query parameters (Only OpenAPI)
555
563
  --openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]
556
564
  Scopes of OpenAPI model generation (default: schemas)
557
565
  --strict-nullable Treat default field as a non-nullable field (Only OpenAPI)
@@ -1,42 +1,42 @@
1
- datamodel_code_generator/__init__.py,sha256=TO6OUS0RCHi22nEZ7IeGKbqZHEC1XjciEG3PkLIkMvY,20698
2
- datamodel_code_generator/__main__.py,sha256=Z9g_NGrnxV63MRuqtUUpPW48felXmVxwGBtWWkgNlj4,22803
3
- datamodel_code_generator/arguments.py,sha256=3GrwOmcqOpBIuZgkg0_3AFO1iKH9Mj0MryQ4PqJmThY,17147
1
+ datamodel_code_generator/__init__.py,sha256=y2SP5bJH_RsykmqaXt0MfxS1uqyMLqLvVb6fYLMYswY,21243
2
+ datamodel_code_generator/__main__.py,sha256=kmT3ith19I5Mia9VX7z4W66e8g1SznxOU4pL6aK5m2I,25449
3
+ datamodel_code_generator/arguments.py,sha256=In_PyAU_jB6NGtkuP-3fTra0BEpFWocTG_ososRVPrM,17778
4
4
  datamodel_code_generator/format.py,sha256=ZlnTCAl1H4og685smvCBSzexgpYbZtyYLIrt7lwUNcY,8934
5
5
  datamodel_code_generator/http.py,sha256=LE94GC7I9D8lWIg_YAGWedfy0XNxOXTmiYKuNMTwouo,887
6
6
  datamodel_code_generator/imports.py,sha256=Nq83WbEGCegntg3WX4VbKfzAIs84alZ7IrYyNPrlUbc,5517
7
7
  datamodel_code_generator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  datamodel_code_generator/pydantic_patch.py,sha256=co1IUDvZqQ-xEZ3C9gbV-BVm2Cin1vfyZNr2Dr0LdHY,718
9
9
  datamodel_code_generator/reference.py,sha256=OobfjN5hWaKzv4ECdCPc9Q3ODkoG93B4qaKlzDEcDrY,26748
10
- datamodel_code_generator/types.py,sha256=1g4RZZ1e5oC4EHaNyyDmsIfF4o1WAjQ2KaLjwc9mYss,21883
11
- datamodel_code_generator/util.py,sha256=mZW8-6CbFe6T4IY5OM9Av6cH-0VknQGe2eIKjTM6Jzo,2729
12
- datamodel_code_generator/model/__init__.py,sha256=pJlJ1juQ-Gv17ZKXy6OAfJSSoOAmYQ7QCbdneu1BENU,3594
13
- datamodel_code_generator/model/base.py,sha256=ZQ3Xy4Fs_I8M01tk1ps0EhWJM9vR-n7umPkz5NmHTjw,15050
10
+ datamodel_code_generator/types.py,sha256=Ofr3QpcJ1E-7ZqJkAglW55zx3YpapN6MapUWoynVkiU,21860
11
+ datamodel_code_generator/util.py,sha256=mi5jwvMNjzIS_EuL4jikcO3WPViBK_oQRAw99db0T48,4465
12
+ datamodel_code_generator/model/__init__.py,sha256=qbldMWjMiDhOphB6n75k3v42SLU17OlCuJQeTPOvgts,3356
13
+ datamodel_code_generator/model/base.py,sha256=KaotBuXbid4KMYglaxDBd5rJKaFwI2WoForNKrb57sc,15431
14
14
  datamodel_code_generator/model/dataclass.py,sha256=8Z02XY3S6byNe9Pb46LisE5opQcvpx8FVvPjUrlAacE,6309
15
15
  datamodel_code_generator/model/enum.py,sha256=yriQslY1hag_Qk-Xv3vl_LkPnbmMZ3iRTAGiiyMN0Io,4003
16
16
  datamodel_code_generator/model/imports.py,sha256=PTc09UzIBSsa5yAPoieb6hCGIohU2T1Y7igNy_pYarg,820
17
- datamodel_code_generator/model/msgspec.py,sha256=qL2DIEwBfpn-vd8p8KEmUViMUce6RgI4Ql-drOmPR7M,11845
17
+ datamodel_code_generator/model/msgspec.py,sha256=gTb9H9Dc13nXOYJDYPGO889Y5A4zBKKJOdpzrXz09JU,11830
18
18
  datamodel_code_generator/model/rootmodel.py,sha256=pY8G2SPjkafzfJ1L9P5sNdp8qe45UclpUYN86guRB3M,202
19
19
  datamodel_code_generator/model/scalar.py,sha256=xfONEK30eYJ2mSL9PK9zXqEG5-xApYMI_gmKOn5qhK4,2664
20
20
  datamodel_code_generator/model/typed_dict.py,sha256=FJi_fEZWuFe3nvidfl-jqr9PMRFTvfusoEFQkx1BqHI,4685
21
- datamodel_code_generator/model/types.py,sha256=ZyEwi76EBI5RS5JfoNUoRHmjOAZDup8oNFvQQDizfwQ,3502
21
+ datamodel_code_generator/model/types.py,sha256=kGnQ7SpjyovXYKwCCIPmAi3gr1HOCqiu7C9k-PnMuo0,3487
22
22
  datamodel_code_generator/model/union.py,sha256=zwq1ayGFW3KbI4SxPCcdZcrM7X4Px25IdujDedtwgOw,1929
23
23
  datamodel_code_generator/model/pydantic/__init__.py,sha256=ggJNv7_6Vv-BgY50O-0Pa6IHGavkxGAjSa9lLEmFOnE,1149
24
24
  datamodel_code_generator/model/pydantic/base_model.py,sha256=0ZsoQfjhhRTMlvsThGEsXJgYMubjrakkuOHUt5-f6HY,12492
25
25
  datamodel_code_generator/model/pydantic/custom_root_type.py,sha256=VJpEAmGFe3TzMKrR5YvR7PJ3pfGHcYytO1zhQrWyoWg,299
26
26
  datamodel_code_generator/model/pydantic/dataclass.py,sha256=jgjkqQk71CQP4RbTcPGSEOQDNqjTQnzFavvl5LjWTBw,455
27
27
  datamodel_code_generator/model/pydantic/imports.py,sha256=nWPiLgDeYNPHcAs8M-gaUUZg1daQRHdBPpjYuX3b5u4,2225
28
- datamodel_code_generator/model/pydantic/types.py,sha256=ttTiDsQ6FV3h4C_NTEhvPUmUpeqxBNQt-DJJFpKZS8s,13356
28
+ datamodel_code_generator/model/pydantic/types.py,sha256=NAtbr61vwrjPQ4BQWHanWwL8U-di4kbJS6O5VIaOpNk,13263
29
29
  datamodel_code_generator/model/pydantic_v2/__init__.py,sha256=YHfOs8prPBsrQmYm0kJATSyU0wafQTqKNd24e2-KGJE,1344
30
30
  datamodel_code_generator/model/pydantic_v2/base_model.py,sha256=ztDpXrifLVGABElvAgg-h5rulgQHxDG40DQ_Nr91JHg,8723
31
31
  datamodel_code_generator/model/pydantic_v2/imports.py,sha256=K3XD2kF9YCKmo5_7b2ipV5bGUrjz0avS-SiyDMVIpF0,299
32
32
  datamodel_code_generator/model/pydantic_v2/root_model.py,sha256=H4rwtg56N65-I3QHlPvlNhDcSPV0m56KSAgfGmxYXAQ,888
33
- datamodel_code_generator/model/pydantic_v2/types.py,sha256=apEuIhCBa15WdwGA0R9cqjpiH8mTLgAgu02CjcU4Css,2064
33
+ datamodel_code_generator/model/pydantic_v2/types.py,sha256=ZEWQ9CiUX-f7bMytzih67Kk_PffizVy54KV-adjUr8Y,2066
34
34
  datamodel_code_generator/model/template/Enum.jinja2,sha256=k9lB8iQUsB94bPi8e3xJEd0AGk2ciWL-pSZuGY5kNPQ,378
35
35
  datamodel_code_generator/model/template/Scalar.jinja2,sha256=Ss22-mYG3Vez-pbqmW2zFzwxGVhXkbQcAVTMV7POpg8,104
36
36
  datamodel_code_generator/model/template/TypedDict.jinja2,sha256=J_Pe_CiuvTOb-EUCExXPaeTEFzn2keyrKB0wglZ8HgA,135
37
37
  datamodel_code_generator/model/template/TypedDictClass.jinja2,sha256=URwp5__WyR8G21Hoyc17aMzoast-NppXnXe19VFi5wQ,377
38
38
  datamodel_code_generator/model/template/TypedDictFunction.jinja2,sha256=KjSij5_w4ow4a12SR3orYOndmXGkIvJBBUN735bQ6G0,321
39
- datamodel_code_generator/model/template/Union.jinja2,sha256=sq7o--2ESUSfIL4kCfgnr5ZXPFa_VeioqbATTY-N-5I,258
39
+ datamodel_code_generator/model/template/Union.jinja2,sha256=Sx_aqvNPOr75YmbVDfgfzZiBo71DWjzqu7kz-6j8JMY,282
40
40
  datamodel_code_generator/model/template/dataclass.jinja2,sha256=c3gs1ZwDEwLpmZ2PpOEWjHjfdl6kPP64xm18mt9lZMk,1007
41
41
  datamodel_code_generator/model/template/msgspec.jinja2,sha256=qMuFOH6SFFh558wImdI6uIjG4Mtam3J_ox8Hmgqkv0g,1174
42
42
  datamodel_code_generator/model/template/root.jinja2,sha256=3OTtibxLcGA-FMdR0QDCJUJQgf_kRW0OafeCTPFSFFo,162
@@ -48,12 +48,12 @@ datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2,sha256=i1Wg
48
48
  datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2,sha256=xHvBYrh__32O1xRCSl6_u5zbyYIjB8a5k8fZiTo0spY,149
49
49
  datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2,sha256=XQBlML7Hm5hN6_AExENNvVc_yxNWijcIfTTbbmegCpE,1223
50
50
  datamodel_code_generator/parser/__init__.py,sha256=3XtFcDPocaetfjmWFqj_CubqNCDipb7vXZHsYKdJXXU,851
51
- datamodel_code_generator/parser/base.py,sha256=4B4UeEjCfl_IdRak_qi5Wx8sUx5bX8Xt-Z0CmfsWsks,62940
52
- datamodel_code_generator/parser/graphql.py,sha256=TNLxy-0wWJSpCm6HK5fKooZdaJodcwQqVr0J29ao3J8,23330
53
- datamodel_code_generator/parser/jsonschema.py,sha256=oFSZOyLL3K0tneH09t2lNKdWcbmR_r5OjCyWKPq2Jxc,71114
54
- datamodel_code_generator/parser/openapi.py,sha256=kI4kqVBG96rdAVb8z0l4gF6QpcylgVoGjlK_-KToZRg,27607
55
- datamodel_code_generator-0.31.2.dist-info/METADATA,sha256=v3R32Y4sivpjW4M-a1qvq2ug4sz2Vpb9BSdHA5pLMAM,25577
56
- datamodel_code_generator-0.31.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
57
- datamodel_code_generator-0.31.2.dist-info/entry_points.txt,sha256=cJVcHiEViQMANaoM5C1xR5hzmyCqH6hHHMpV8W00in8,77
58
- datamodel_code_generator-0.31.2.dist-info/licenses/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
59
- datamodel_code_generator-0.31.2.dist-info/RECORD,,
51
+ datamodel_code_generator/parser/base.py,sha256=YqhgQMSsrsAuVIMiGUNwJ4vMtW8up8pMI55sFepqa28,63402
52
+ datamodel_code_generator/parser/graphql.py,sha256=x5Jge8xZiaup9jMhX6jVKncme_D5FmSoEWmXIKtguVo,23384
53
+ datamodel_code_generator/parser/jsonschema.py,sha256=inLczqOz1Tl8tC-qR1hiS2SDmVWtppWnp0U44MWnLgI,72514
54
+ datamodel_code_generator/parser/openapi.py,sha256=puBXUaq4zXDl7wj-VsZmmmt_D672RfS6qY9WID0VRPw,28603
55
+ datamodel_code_generator-0.33.0.dist-info/METADATA,sha256=lb9MUWpEd8NRvuijqRabotA8GjYjvyCCEudEcSIe-rA,26256
56
+ datamodel_code_generator-0.33.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
57
+ datamodel_code_generator-0.33.0.dist-info/entry_points.txt,sha256=cJVcHiEViQMANaoM5C1xR5hzmyCqH6hHHMpV8W00in8,77
58
+ datamodel_code_generator-0.33.0.dist-info/licenses/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
59
+ datamodel_code_generator-0.33.0.dist-info/RECORD,,