datamodel-code-generator 0.26.3__tar.gz → 0.26.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (60) hide show
  1. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/PKG-INFO +5 -3
  2. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/README.md +4 -2
  3. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/__init__.py +28 -3
  4. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/__main__.py +8 -1
  5. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/arguments.py +7 -0
  6. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/format.py +1 -0
  7. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/__init__.py +20 -9
  8. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/base.py +10 -2
  9. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/msgspec.py +3 -2
  10. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/parser/base.py +6 -5
  11. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/parser/graphql.py +2 -0
  12. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/parser/jsonschema.py +6 -0
  13. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/parser/openapi.py +9 -1
  14. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/reference.py +12 -3
  15. datamodel_code_generator-0.26.4/datamodel_code_generator/version.py +1 -0
  16. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/pyproject.toml +4 -3
  17. datamodel_code_generator-0.26.3/datamodel_code_generator/version.py +0 -1
  18. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/LICENSE +0 -0
  19. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/http.py +0 -0
  20. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/imports.py +0 -0
  21. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/dataclass.py +0 -0
  22. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/enum.py +0 -0
  23. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/imports.py +0 -0
  24. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/__init__.py +0 -0
  25. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/base_model.py +0 -0
  26. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/custom_root_type.py +0 -0
  27. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/dataclass.py +0 -0
  28. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/imports.py +0 -0
  29. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/types.py +0 -0
  30. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic_v2/__init__.py +0 -0
  31. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic_v2/base_model.py +0 -0
  32. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic_v2/imports.py +0 -0
  33. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic_v2/root_model.py +0 -0
  34. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic_v2/types.py +0 -0
  35. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/rootmodel.py +0 -0
  36. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/scalar.py +0 -0
  37. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/Enum.jinja2 +0 -0
  38. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/Scalar.jinja2 +0 -0
  39. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/TypedDict.jinja2 +0 -0
  40. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/TypedDictClass.jinja2 +0 -0
  41. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/TypedDictFunction.jinja2 +0 -0
  42. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/Union.jinja2 +0 -0
  43. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/dataclass.jinja2 +0 -0
  44. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/msgspec.jinja2 +0 -0
  45. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +0 -0
  46. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +0 -0
  47. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic/Config.jinja2 +0 -0
  48. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +0 -0
  49. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +0 -0
  50. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +0 -0
  51. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +0 -0
  52. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/root.jinja2 +0 -0
  53. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/typed_dict.py +0 -0
  54. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/types.py +0 -0
  55. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/union.py +0 -0
  56. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/parser/__init__.py +0 -0
  57. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/py.typed +0 -0
  58. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/pydantic_patch.py +0 -0
  59. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/types.py +0 -0
  60. {datamodel_code_generator-0.26.3 → datamodel_code_generator-0.26.4}/datamodel_code_generator/util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamodel-code-generator
3
- Version: 0.26.3
3
+ Version: 0.26.4
4
4
  Summary: Datamodel Code Generator
5
5
  Home-page: https://github.com/koxudaxi/datamodel-code-generator
6
6
  License: MIT
@@ -458,6 +458,8 @@ Field customization:
458
458
  --field-include-all-keys
459
459
  Add all keys to field parameters
460
460
  --force-optional Force optional for required fields
461
+ --no-alias Do not add a field alias. E.g., if --snake-case-field is used along
462
+ with a base class, which has an alias_generator
461
463
  --original-field-name-delimiter ORIGINAL_FIELD_NAME_DELIMITER
462
464
  Set delimiter to convert to snake case. This option only can be used
463
465
  with --snake-case-field (default: `_` )
@@ -499,8 +501,8 @@ Model customization:
499
501
  dataclass(kw_only=True)).
500
502
  --output-datetime-class {datetime,AwareDatetime,NaiveDatetime}
501
503
  Choose Datetime class between AwareDatetime, NaiveDatetime or
502
- datetime. Each output model has its default mapping, and only
503
- pydantic, dataclass, and msgspec support this override"
504
+ datetime. Each output model has its default mapping (for example
505
+ pydantic: datetime, dataclass: str, ...)
504
506
  --reuse-model Reuse models on the field when a module has the model with the same
505
507
  content
506
508
  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
@@ -413,6 +413,8 @@ Field customization:
413
413
  --field-include-all-keys
414
414
  Add all keys to field parameters
415
415
  --force-optional Force optional for required fields
416
+ --no-alias Do not add a field alias. E.g., if --snake-case-field is used along
417
+ with a base class, which has an alias_generator
416
418
  --original-field-name-delimiter ORIGINAL_FIELD_NAME_DELIMITER
417
419
  Set delimiter to convert to snake case. This option only can be used
418
420
  with --snake-case-field (default: `_` )
@@ -454,8 +456,8 @@ Model customization:
454
456
  dataclass(kw_only=True)).
455
457
  --output-datetime-class {datetime,AwareDatetime,NaiveDatetime}
456
458
  Choose Datetime class between AwareDatetime, NaiveDatetime or
457
- datetime. Each output model has its default mapping, and only
458
- pydantic, dataclass, and msgspec support this override"
459
+ datetime. Each output model has its default mapping (for example
460
+ pydantic: datetime, dataclass: str, ...)
459
461
  --reuse-model Reuse models on the field when a module has the model with the same
460
462
  content
461
463
  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
@@ -231,7 +231,7 @@ def get_first_file(path: Path) -> Path: # pragma: no cover
231
231
 
232
232
 
233
233
  def generate(
234
- input_: Union[Path, str, ParseResult],
234
+ input_: Union[Path, str, ParseResult, Mapping[str, Any]],
235
235
  *,
236
236
  input_filename: Optional[str] = None,
237
237
  input_file_type: InputFileType = InputFileType.Auto,
@@ -303,6 +303,7 @@ def generate(
303
303
  union_mode: Optional[UnionMode] = None,
304
304
  output_datetime_class: Optional[DatetimeClassType] = None,
305
305
  keyword_only: bool = False,
306
+ no_alias: bool = False,
306
307
  ) -> None:
307
308
  remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
308
309
  if isinstance(input_, str):
@@ -353,6 +354,8 @@ def generate(
353
354
  parser_class = JsonSchemaParser
354
355
 
355
356
  if input_file_type in RAW_DATA_TYPES:
357
+ import json
358
+
356
359
  try:
357
360
  if isinstance(input_, Path) and input_.is_dir(): # pragma: no cover
358
361
  raise Error(f'Input must be a file for {input_file_type}')
@@ -371,15 +374,33 @@ def generate(
371
374
  import io
372
375
 
373
376
  obj = get_header_and_first_line(io.StringIO(input_text))
374
- else:
377
+ elif input_file_type == InputFileType.Yaml:
375
378
  obj = load_yaml(
376
379
  input_.read_text(encoding=encoding) # type: ignore
377
380
  if isinstance(input_, Path)
378
381
  else input_text
379
382
  )
383
+ elif input_file_type == InputFileType.Json:
384
+ obj = json.loads(
385
+ input_.read_text(encoding=encoding) # type: ignore
386
+ if isinstance(input_, Path)
387
+ else input_text
388
+ )
389
+ elif input_file_type == InputFileType.Dict:
390
+ import ast
391
+
392
+ # Input can be a dict object stored in a python file
393
+ obj = (
394
+ ast.literal_eval(
395
+ input_.read_text(encoding=encoding) # type: ignore
396
+ )
397
+ if isinstance(input_, Path)
398
+ else input_
399
+ )
400
+ else: # pragma: no cover
401
+ raise Error(f'Unsupported input file type: {input_file_type}')
380
402
  except: # noqa
381
403
  raise Error('Invalid file format')
382
- import json
383
404
 
384
405
  from genson import SchemaBuilder
385
406
 
@@ -478,6 +499,7 @@ def generate(
478
499
  default_field_extras=default_field_extras,
479
500
  target_datetime_class=output_datetime_class,
480
501
  keyword_only=keyword_only,
502
+ no_alias=no_alias,
481
503
  **kwargs,
482
504
  )
483
505
 
@@ -488,6 +510,9 @@ def generate(
488
510
  input_filename = '<stdin>'
489
511
  elif isinstance(input_, ParseResult):
490
512
  input_filename = input_.geturl()
513
+ elif input_file_type == InputFileType.Dict:
514
+ # input_ might be a dict object provided directly, and missing a name field
515
+ input_filename = getattr(input_, 'name', '<dict>')
491
516
  else:
492
517
  input_filename = input_.name
493
518
  if not results:
@@ -186,8 +186,13 @@ class Config(BaseModel):
186
186
 
187
187
  @model_validator(mode='after')
188
188
  def validate_keyword_only(cls, values: Dict[str, Any]) -> Dict[str, Any]:
189
+ output_model_type: DataModelType = values.get('output_model_type')
189
190
  python_target: PythonVersion = values.get('target_python_version')
190
- if values.get('keyword_only') and not python_target.has_kw_only_dataclass:
191
+ if (
192
+ values.get('keyword_only')
193
+ and output_model_type == DataModelType.DataclassesDataclass
194
+ and not python_target.has_kw_only_dataclass
195
+ ):
191
196
  raise Error(
192
197
  f'`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher.'
193
198
  )
@@ -341,6 +346,7 @@ class Config(BaseModel):
341
346
  union_mode: Optional[UnionMode] = None
342
347
  output_datetime_class: Optional[DatetimeClassType] = None
343
348
  keyword_only: bool = False
349
+ no_alias: bool = False
344
350
 
345
351
  def merge_args(self, args: Namespace) -> None:
346
352
  set_args = {
@@ -542,6 +548,7 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
542
548
  union_mode=config.union_mode,
543
549
  output_datetime_class=config.output_datetime_class,
544
550
  keyword_only=config.keyword_only,
551
+ no_alias=config.no_alias,
545
552
  )
546
553
  return Exit.OK
547
554
  except InvalidClassNameError as e:
@@ -381,6 +381,13 @@ field_options.add_argument(
381
381
  choices=[u.value for u in UnionMode],
382
382
  default=None,
383
383
  )
384
+ field_options.add_argument(
385
+ '--no-alias',
386
+ help="""Do not add a field alias. E.g., if --snake-case-field is used along with a base class, which has an
387
+ alias_generator""",
388
+ action='store_true',
389
+ default=None,
390
+ )
384
391
 
385
392
  # ======================================================================================
386
393
  # Options for templating output
@@ -31,6 +31,7 @@ class PythonVersion(Enum):
31
31
  PY_310 = '3.10'
32
32
  PY_311 = '3.11'
33
33
  PY_312 = '3.12'
34
+ PY_313 = '3.13'
34
35
 
35
36
  @cached_property
36
37
  def _is_py_38_or_later(self) -> bool: # pragma: no cover
@@ -1,12 +1,19 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import sys
3
4
  from typing import TYPE_CHECKING, Callable, Iterable, List, NamedTuple, Optional, Type
4
5
 
6
+ from .. import DatetimeClassType, PythonVersion
5
7
  from ..types import DataTypeManager as DataTypeManagerABC
6
8
  from .base import ConstraintsBase, DataModel, DataModelFieldBase
7
9
 
8
10
  if TYPE_CHECKING:
9
- from .. import DataModelType, DatetimeClassType, PythonVersion
11
+ from .. import DataModelType
12
+
13
+ DEFAULT_TARGET_DATETIME_CLASS = DatetimeClassType.Datetime
14
+ DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(
15
+ f'{sys.version_info.major}.{sys.version_info.minor}'
16
+ )
10
17
 
11
18
 
12
19
  class DataModelSet(NamedTuple):
@@ -20,8 +27,8 @@ class DataModelSet(NamedTuple):
20
27
 
21
28
  def get_data_model_types(
22
29
  data_model_type: DataModelType,
23
- target_python_version: PythonVersion,
24
- target_datetime_class: DatetimeClassType,
30
+ target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
31
+ target_datetime_class: DatetimeClassType = DEFAULT_TARGET_DATETIME_CLASS,
25
32
  ) -> DataModelSet:
26
33
  from .. import DataModelType
27
34
  from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict
@@ -53,13 +60,17 @@ def get_data_model_types(
53
60
  )
54
61
  elif data_model_type == DataModelType.TypingTypedDict:
55
62
  return DataModelSet(
56
- data_model=typed_dict.TypedDict
57
- if target_python_version.has_typed_dict
58
- else typed_dict.TypedDictBackport,
63
+ data_model=(
64
+ typed_dict.TypedDict
65
+ if target_python_version.has_typed_dict
66
+ else typed_dict.TypedDictBackport
67
+ ),
59
68
  root_model=rootmodel.RootModel,
60
- field_model=typed_dict.DataModelField
61
- if target_python_version.has_typed_dict_non_required
62
- else typed_dict.DataModelFieldBackport,
69
+ field_model=(
70
+ typed_dict.DataModelField
71
+ if target_python_version.has_typed_dict_non_required
72
+ else typed_dict.DataModelFieldBackport
73
+ ),
63
74
  data_type_manager=DataTypeManager,
64
75
  dump_resolve_reference_action=None,
65
76
  )
@@ -1,5 +1,6 @@
1
1
  from abc import ABC, abstractmethod
2
2
  from collections import defaultdict
3
+ from copy import deepcopy
3
4
  from functools import lru_cache
4
5
  from pathlib import Path
5
6
  from typing import (
@@ -118,6 +119,7 @@ class DataModelFieldBase(_BaseModel):
118
119
  _exclude_fields: ClassVar[Set[str]] = {'parent'}
119
120
  _pass_fields: ClassVar[Set[str]] = {'parent', 'data_type'}
120
121
  can_have_extra_keys: ClassVar[bool] = True
122
+ type_has_null: Optional[bool] = None
121
123
 
122
124
  if not TYPE_CHECKING:
123
125
 
@@ -150,6 +152,8 @@ class DataModelFieldBase(_BaseModel):
150
152
  return get_optional_type(type_hint, self.data_type.use_union_operator)
151
153
  return type_hint
152
154
  elif self.required:
155
+ if self.type_has_null:
156
+ return get_optional_type(type_hint, self.data_type.use_union_operator)
153
157
  return type_hint
154
158
  elif self.fall_back_to_nullable:
155
159
  return get_optional_type(type_hint, self.data_type.use_union_operator)
@@ -316,6 +320,8 @@ class DataModel(TemplateBase, Nullable, ABC):
316
320
  self.reference.source = self
317
321
 
318
322
  self.extra_template_data = (
323
+ # The supplied defaultdict will either create a new entry,
324
+ # or already contain a predefined entry for this type
319
325
  extra_template_data[self.name]
320
326
  if extra_template_data is not None
321
327
  else defaultdict(dict)
@@ -327,10 +333,12 @@ class DataModel(TemplateBase, Nullable, ABC):
327
333
  if base_class.reference:
328
334
  base_class.reference.children.append(self)
329
335
 
330
- if extra_template_data:
336
+ if extra_template_data is not None:
331
337
  all_model_extra_template_data = extra_template_data.get(ALL_MODEL)
332
338
  if all_model_extra_template_data:
333
- self.extra_template_data.update(all_model_extra_template_data)
339
+ # The deepcopy is needed here to ensure that different models don't
340
+ # end up inadvertently sharing state (such as "base_class_kwargs")
341
+ self.extra_template_data.update(deepcopy(all_model_extra_template_data))
334
342
 
335
343
  self.methods: List[str] = methods or []
336
344
 
@@ -31,7 +31,6 @@ from datamodel_code_generator.model.imports import (
31
31
  IMPORT_MSGSPEC_CONVERT,
32
32
  IMPORT_MSGSPEC_FIELD,
33
33
  IMPORT_MSGSPEC_META,
34
- IMPORT_MSGSPEC_STRUCT,
35
34
  )
36
35
  from datamodel_code_generator.model.pydantic.base_model import (
37
36
  Constraints as _Constraints,
@@ -88,7 +87,7 @@ class RootModel(_RootModel):
88
87
  class Struct(DataModel):
89
88
  TEMPLATE_FILE_PATH: ClassVar[str] = 'msgspec.jinja2'
90
89
  BASE_CLASS: ClassVar[str] = 'msgspec.Struct'
91
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_MSGSPEC_STRUCT,)
90
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ()
92
91
 
93
92
  def __init__(
94
93
  self,
@@ -123,6 +122,8 @@ class Struct(DataModel):
123
122
  keyword_only=keyword_only,
124
123
  )
125
124
  self.extra_template_data.setdefault('base_class_kwargs', {})
125
+ if self.keyword_only:
126
+ self.add_base_class_kwarg('kw_only', 'True')
126
127
 
127
128
  def add_base_class_kwarg(self, name: str, value):
128
129
  self.extra_template_data['base_class_kwargs'][name] = value
@@ -412,6 +412,7 @@ class Parser(ABC):
412
412
  default_field_extras: Optional[Dict[str, Any]] = None,
413
413
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
414
414
  keyword_only: bool = False,
415
+ no_alias: bool = False,
415
416
  ) -> None:
416
417
  self.keyword_only = keyword_only
417
418
  self.data_type_manager: DataTypeManager = data_type_manager_type(
@@ -512,6 +513,7 @@ class Parser(ABC):
512
513
  special_field_name_prefix=special_field_name_prefix,
513
514
  remove_special_field_name_prefix=remove_special_field_name_prefix,
514
515
  capitalise_enum_members=capitalise_enum_members,
516
+ no_alias=no_alias,
515
517
  )
516
518
  self.class_name: Optional[str] = class_name
517
519
  self.wrap_string_literal: Optional[bool] = wrap_string_literal
@@ -866,10 +868,8 @@ class Parser(ABC):
866
868
  ) != property_name:
867
869
  continue
868
870
  literals = discriminator_field.data_type.literals
869
- if (
870
- len(literals) == 1 and literals[0] == type_names[0]
871
- if type_names
872
- else None
871
+ if len(literals) == 1 and literals[0] == (
872
+ type_names[0] if type_names else None
873
873
  ):
874
874
  has_one_literal = True
875
875
  if isinstance(
@@ -882,7 +882,8 @@ class Parser(ABC):
882
882
  'tag', discriminator_field.represented_default
883
883
  )
884
884
  discriminator_field.extras['is_classvar'] = True
885
- continue
885
+ # Found the discriminator field, no need to keep looking
886
+ break
886
887
  for (
887
888
  field_data_type
888
889
  ) in discriminator_field.data_type.all_data_types:
@@ -160,6 +160,7 @@ class GraphQLParser(Parser):
160
160
  default_field_extras: Optional[Dict[str, Any]] = None,
161
161
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
162
162
  keyword_only: bool = False,
163
+ no_alias: bool = False,
163
164
  ) -> None:
164
165
  super().__init__(
165
166
  source=source,
@@ -232,6 +233,7 @@ class GraphQLParser(Parser):
232
233
  default_field_extras=default_field_extras,
233
234
  target_datetime_class=target_datetime_class,
234
235
  keyword_only=keyword_only,
236
+ no_alias=no_alias,
235
237
  )
236
238
 
237
239
  self.data_model_scalar_type = data_model_scalar_type
@@ -448,6 +448,7 @@ class JsonSchemaParser(Parser):
448
448
  default_field_extras: Optional[Dict[str, Any]] = None,
449
449
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
450
450
  keyword_only: bool = False,
451
+ no_alias: bool = False,
451
452
  ) -> None:
452
453
  super().__init__(
453
454
  source=source,
@@ -520,6 +521,7 @@ class JsonSchemaParser(Parser):
520
521
  default_field_extras=default_field_extras,
521
522
  target_datetime_class=target_datetime_class,
522
523
  keyword_only=keyword_only,
524
+ no_alias=no_alias,
523
525
  )
524
526
 
525
527
  self.remote_object_cache: DefaultPutDict[str, Dict[str, Any]] = DefaultPutDict()
@@ -618,6 +620,7 @@ class JsonSchemaParser(Parser):
618
620
  use_default_kwarg=self.use_default_kwarg,
619
621
  original_name=original_field_name,
620
622
  has_default=field.has_default,
623
+ type_has_null=field.type_has_null,
621
624
  )
622
625
 
623
626
  def get_data_type(self, obj: JsonSchemaObject) -> DataType:
@@ -1715,6 +1718,9 @@ class JsonSchemaParser(Parser):
1715
1718
  def parse_raw(self) -> None:
1716
1719
  for source, path_parts in self._get_context_source_path_parts():
1717
1720
  self.raw_obj = load_yaml(source.text)
1721
+ if self.raw_obj is None: # pragma: no cover
1722
+ warn(f'{source.path} is empty. Skipping this file')
1723
+ continue
1718
1724
  if self.custom_class_name_generator:
1719
1725
  obj_name = self.raw_obj.get('title', 'Model')
1720
1726
  else:
@@ -228,6 +228,7 @@ class OpenAPIParser(JsonSchemaParser):
228
228
  default_field_extras: Optional[Dict[str, Any]] = None,
229
229
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
230
230
  keyword_only: bool = False,
231
+ no_alias: bool = False,
231
232
  ):
232
233
  super().__init__(
233
234
  source=source,
@@ -300,6 +301,7 @@ class OpenAPIParser(JsonSchemaParser):
300
301
  default_field_extras=default_field_extras,
301
302
  target_datetime_class=target_datetime_class,
302
303
  keyword_only=keyword_only,
304
+ no_alias=no_alias,
303
305
  )
304
306
  self.open_api_scopes: List[OpenAPIScope] = openapi_scopes or [
305
307
  OpenAPIScope.Schemas
@@ -314,8 +316,10 @@ class OpenAPIParser(JsonSchemaParser):
314
316
  return get_model_by_path(ref_body, ref_path.split('/')[1:])
315
317
 
316
318
  def get_data_type(self, obj: JsonSchemaObject) -> DataType:
317
- # OpenAPI doesn't allow `null` in `type` field and list of types
319
+ # OpenAPI 3.0 doesn't allow `null` in the `type` field and list of types
318
320
  # https://swagger.io/docs/specification/data-models/data-types/#null
321
+ # OpenAPI 3.1 does allow `null` in the `type` field and is equivalent to
322
+ # a `nullable` flag on the property itself
319
323
  if obj.nullable and self.strict_nullable and isinstance(obj.type, str):
320
324
  obj.type = [obj.type, 'null']
321
325
 
@@ -504,6 +508,9 @@ class OpenAPIParser(JsonSchemaParser):
504
508
  has_default=object_schema.has_default
505
509
  if object_schema
506
510
  else False,
511
+ type_has_null=object_schema.type_has_null
512
+ if object_schema
513
+ else None,
507
514
  )
508
515
  )
509
516
 
@@ -513,6 +520,7 @@ class OpenAPIParser(JsonSchemaParser):
513
520
  fields=fields,
514
521
  reference=reference,
515
522
  custom_base_class=self.base_class,
523
+ custom_template_dir=self.custom_template_dir,
516
524
  keyword_only=self.keyword_only,
517
525
  )
518
526
  )
@@ -198,6 +198,7 @@ class FieldNameResolver:
198
198
  special_field_name_prefix: Optional[str] = None,
199
199
  remove_special_field_name_prefix: bool = False,
200
200
  capitalise_enum_members: bool = False,
201
+ no_alias: bool = False,
201
202
  ):
202
203
  self.aliases: Mapping[str, str] = {} if aliases is None else {**aliases}
203
204
  self.empty_field_name: str = empty_field_name or '_'
@@ -208,6 +209,7 @@ class FieldNameResolver:
208
209
  )
209
210
  self.remove_special_field_name_prefix: bool = remove_special_field_name_prefix
210
211
  self.capitalise_enum_members: bool = capitalise_enum_members
212
+ self.no_alias = no_alias
211
213
 
212
214
  @classmethod
213
215
  def _validate_field_name(cls, field_name: str) -> bool:
@@ -274,7 +276,10 @@ class FieldNameResolver:
274
276
  if field_name in self.aliases:
275
277
  return self.aliases[field_name], field_name
276
278
  valid_name = self.get_valid_name(field_name, excludes=excludes)
277
- return valid_name, None if field_name == valid_name else field_name
279
+ return (
280
+ valid_name,
281
+ None if self.no_alias or field_name == valid_name else field_name,
282
+ )
278
283
 
279
284
 
280
285
  class PydanticFieldNameResolver(FieldNameResolver):
@@ -354,6 +359,7 @@ class ModelResolver:
354
359
  special_field_name_prefix: Optional[str] = None,
355
360
  remove_special_field_name_prefix: bool = False,
356
361
  capitalise_enum_members: bool = False,
362
+ no_alias: bool = False,
357
363
  ) -> None:
358
364
  self.references: Dict[str, Reference] = {}
359
365
  self._current_root: Sequence[str] = []
@@ -383,6 +389,7 @@ class ModelResolver:
383
389
  capitalise_enum_members=capitalise_enum_members
384
390
  if k == ModelType.ENUM
385
391
  else False,
392
+ no_alias=no_alias,
386
393
  )
387
394
  for k, v in merged_field_name_resolver_classes.items()
388
395
  }
@@ -566,11 +573,13 @@ class ModelResolver:
566
573
  split_ref = ref.rsplit('/', 1)
567
574
  if len(split_ref) == 1:
568
575
  original_name = Path(
569
- split_ref[0][:-1] if self.is_external_root_ref(path) else split_ref[0]
576
+ split_ref[0].rstrip('#')
577
+ if self.is_external_root_ref(path)
578
+ else split_ref[0]
570
579
  ).stem
571
580
  else:
572
581
  original_name = (
573
- Path(split_ref[1][:-1]).stem
582
+ Path(split_ref[1].rstrip('#')).stem
574
583
  if self.is_external_root_ref(path)
575
584
  else split_ref[1]
576
585
  )
@@ -0,0 +1 @@
1
+ version: str = '0.26.4'
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "datamodel-code-generator"
3
- version = "0.26.3"
3
+ version = "0.26.4"
4
4
  description = "Datamodel Code Generator"
5
5
  authors = ["Koudai Aono <koxudaxi@gmail.com>"]
6
6
  readme = "README.md"
@@ -19,6 +19,7 @@ classifiers = [
19
19
  "Programming Language :: Python :: 3.10",
20
20
  "Programming Language :: Python :: 3.11",
21
21
  "Programming Language :: Python :: 3.12",
22
+ "Programming Language :: Python :: 3.13",
22
23
  "Programming Language :: Python :: Implementation :: CPython"]
23
24
 
24
25
  [build-system]
@@ -77,8 +78,8 @@ types-setuptools = ">=67.6.0.5,<70.0.0.0"
77
78
  pydantic = "*"
78
79
  httpx = ">=0.24.1"
79
80
  PySnooper = "*"
80
- ruff = ">=0.0.290,<0.6.10"
81
- ruff-lsp = ">=0.0.39,<0.0.41"
81
+ ruff = ">=0.0.290,<0.7.5"
82
+ ruff-lsp = ">=0.0.39,<0.0.60"
82
83
  pre-commit = "*"
83
84
  pytest-xdist = "^3.3.1"
84
85
  prance = "*"
@@ -1 +0,0 @@
1
- version: str = '0.26.3'