datamodel-code-generator 0.26.2__tar.gz → 0.26.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (60) hide show
  1. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/PKG-INFO +5 -3
  2. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/README.md +4 -2
  3. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/__init__.py +28 -3
  4. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/__main__.py +8 -1
  5. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/arguments.py +7 -0
  6. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/format.py +1 -0
  7. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/__init__.py +21 -10
  8. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/base.py +10 -2
  9. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/imports.py +1 -0
  10. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/msgspec.py +75 -6
  11. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic_v2/__init__.py +1 -0
  12. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/msgspec.jinja2 +3 -1
  13. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/parser/base.py +54 -22
  14. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/parser/graphql.py +2 -0
  15. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/parser/jsonschema.py +6 -0
  16. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/parser/openapi.py +9 -1
  17. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/reference.py +12 -3
  18. datamodel_code_generator-0.26.4/datamodel_code_generator/version.py +1 -0
  19. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/pyproject.toml +5 -4
  20. datamodel_code_generator-0.26.2/datamodel_code_generator/version.py +0 -1
  21. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/LICENSE +0 -0
  22. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/http.py +0 -0
  23. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/imports.py +0 -0
  24. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/dataclass.py +0 -0
  25. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/enum.py +0 -0
  26. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/__init__.py +0 -0
  27. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/base_model.py +0 -0
  28. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/custom_root_type.py +0 -0
  29. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/dataclass.py +0 -0
  30. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/imports.py +0 -0
  31. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic/types.py +0 -0
  32. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic_v2/base_model.py +0 -0
  33. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic_v2/imports.py +0 -0
  34. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic_v2/root_model.py +0 -0
  35. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/pydantic_v2/types.py +0 -0
  36. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/rootmodel.py +0 -0
  37. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/scalar.py +0 -0
  38. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/Enum.jinja2 +0 -0
  39. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/Scalar.jinja2 +0 -0
  40. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/TypedDict.jinja2 +0 -0
  41. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/TypedDictClass.jinja2 +0 -0
  42. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/TypedDictFunction.jinja2 +0 -0
  43. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/Union.jinja2 +0 -0
  44. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/dataclass.jinja2 +0 -0
  45. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +0 -0
  46. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +0 -0
  47. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic/Config.jinja2 +0 -0
  48. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +0 -0
  49. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +0 -0
  50. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +0 -0
  51. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +0 -0
  52. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/template/root.jinja2 +0 -0
  53. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/typed_dict.py +0 -0
  54. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/types.py +0 -0
  55. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/model/union.py +0 -0
  56. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/parser/__init__.py +0 -0
  57. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/py.typed +0 -0
  58. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/pydantic_patch.py +0 -0
  59. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/types.py +0 -0
  60. {datamodel_code_generator-0.26.2 → datamodel_code_generator-0.26.4}/datamodel_code_generator/util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamodel-code-generator
3
- Version: 0.26.2
3
+ Version: 0.26.4
4
4
  Summary: Datamodel Code Generator
5
5
  Home-page: https://github.com/koxudaxi/datamodel-code-generator
6
6
  License: MIT
@@ -458,6 +458,8 @@ Field customization:
458
458
  --field-include-all-keys
459
459
  Add all keys to field parameters
460
460
  --force-optional Force optional for required fields
461
+ --no-alias Do not add a field alias. E.g., if --snake-case-field is used along
462
+ with a base class, which has an alias_generator
461
463
  --original-field-name-delimiter ORIGINAL_FIELD_NAME_DELIMITER
462
464
  Set delimiter to convert to snake case. This option only can be used
463
465
  with --snake-case-field (default: `_` )
@@ -499,8 +501,8 @@ Model customization:
499
501
  dataclass(kw_only=True)).
500
502
  --output-datetime-class {datetime,AwareDatetime,NaiveDatetime}
501
503
  Choose Datetime class between AwareDatetime, NaiveDatetime or
502
- datetime. Each output model has its default mapping, and only
503
- pydantic and dataclass support this override"
504
+ datetime. Each output model has its default mapping (for example
505
+ pydantic: datetime, dataclass: str, ...)
504
506
  --reuse-model Reuse models on the field when a module has the model with the same
505
507
  content
506
508
  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
@@ -413,6 +413,8 @@ Field customization:
413
413
  --field-include-all-keys
414
414
  Add all keys to field parameters
415
415
  --force-optional Force optional for required fields
416
+ --no-alias Do not add a field alias. E.g., if --snake-case-field is used along
417
+ with a base class, which has an alias_generator
416
418
  --original-field-name-delimiter ORIGINAL_FIELD_NAME_DELIMITER
417
419
  Set delimiter to convert to snake case. This option only can be used
418
420
  with --snake-case-field (default: `_` )
@@ -454,8 +456,8 @@ Model customization:
454
456
  dataclass(kw_only=True)).
455
457
  --output-datetime-class {datetime,AwareDatetime,NaiveDatetime}
456
458
  Choose Datetime class between AwareDatetime, NaiveDatetime or
457
- datetime. Each output model has its default mapping, and only
458
- pydantic and dataclass support this override"
459
+ datetime. Each output model has its default mapping (for example
460
+ pydantic: datetime, dataclass: str, ...)
459
461
  --reuse-model Reuse models on the field when a module has the model with the same
460
462
  content
461
463
  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
@@ -231,7 +231,7 @@ def get_first_file(path: Path) -> Path: # pragma: no cover
231
231
 
232
232
 
233
233
  def generate(
234
- input_: Union[Path, str, ParseResult],
234
+ input_: Union[Path, str, ParseResult, Mapping[str, Any]],
235
235
  *,
236
236
  input_filename: Optional[str] = None,
237
237
  input_file_type: InputFileType = InputFileType.Auto,
@@ -303,6 +303,7 @@ def generate(
303
303
  union_mode: Optional[UnionMode] = None,
304
304
  output_datetime_class: Optional[DatetimeClassType] = None,
305
305
  keyword_only: bool = False,
306
+ no_alias: bool = False,
306
307
  ) -> None:
307
308
  remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
308
309
  if isinstance(input_, str):
@@ -353,6 +354,8 @@ def generate(
353
354
  parser_class = JsonSchemaParser
354
355
 
355
356
  if input_file_type in RAW_DATA_TYPES:
357
+ import json
358
+
356
359
  try:
357
360
  if isinstance(input_, Path) and input_.is_dir(): # pragma: no cover
358
361
  raise Error(f'Input must be a file for {input_file_type}')
@@ -371,15 +374,33 @@ def generate(
371
374
  import io
372
375
 
373
376
  obj = get_header_and_first_line(io.StringIO(input_text))
374
- else:
377
+ elif input_file_type == InputFileType.Yaml:
375
378
  obj = load_yaml(
376
379
  input_.read_text(encoding=encoding) # type: ignore
377
380
  if isinstance(input_, Path)
378
381
  else input_text
379
382
  )
383
+ elif input_file_type == InputFileType.Json:
384
+ obj = json.loads(
385
+ input_.read_text(encoding=encoding) # type: ignore
386
+ if isinstance(input_, Path)
387
+ else input_text
388
+ )
389
+ elif input_file_type == InputFileType.Dict:
390
+ import ast
391
+
392
+ # Input can be a dict object stored in a python file
393
+ obj = (
394
+ ast.literal_eval(
395
+ input_.read_text(encoding=encoding) # type: ignore
396
+ )
397
+ if isinstance(input_, Path)
398
+ else input_
399
+ )
400
+ else: # pragma: no cover
401
+ raise Error(f'Unsupported input file type: {input_file_type}')
380
402
  except: # noqa
381
403
  raise Error('Invalid file format')
382
- import json
383
404
 
384
405
  from genson import SchemaBuilder
385
406
 
@@ -478,6 +499,7 @@ def generate(
478
499
  default_field_extras=default_field_extras,
479
500
  target_datetime_class=output_datetime_class,
480
501
  keyword_only=keyword_only,
502
+ no_alias=no_alias,
481
503
  **kwargs,
482
504
  )
483
505
 
@@ -488,6 +510,9 @@ def generate(
488
510
  input_filename = '<stdin>'
489
511
  elif isinstance(input_, ParseResult):
490
512
  input_filename = input_.geturl()
513
+ elif input_file_type == InputFileType.Dict:
514
+ # input_ might be a dict object provided directly, and missing a name field
515
+ input_filename = getattr(input_, 'name', '<dict>')
491
516
  else:
492
517
  input_filename = input_.name
493
518
  if not results:
@@ -186,8 +186,13 @@ class Config(BaseModel):
186
186
 
187
187
  @model_validator(mode='after')
188
188
  def validate_keyword_only(cls, values: Dict[str, Any]) -> Dict[str, Any]:
189
+ output_model_type: DataModelType = values.get('output_model_type')
189
190
  python_target: PythonVersion = values.get('target_python_version')
190
- if values.get('keyword_only') and not python_target.has_kw_only_dataclass:
191
+ if (
192
+ values.get('keyword_only')
193
+ and output_model_type == DataModelType.DataclassesDataclass
194
+ and not python_target.has_kw_only_dataclass
195
+ ):
191
196
  raise Error(
192
197
  f'`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher.'
193
198
  )
@@ -341,6 +346,7 @@ class Config(BaseModel):
341
346
  union_mode: Optional[UnionMode] = None
342
347
  output_datetime_class: Optional[DatetimeClassType] = None
343
348
  keyword_only: bool = False
349
+ no_alias: bool = False
344
350
 
345
351
  def merge_args(self, args: Namespace) -> None:
346
352
  set_args = {
@@ -542,6 +548,7 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
542
548
  union_mode=config.union_mode,
543
549
  output_datetime_class=config.output_datetime_class,
544
550
  keyword_only=config.keyword_only,
551
+ no_alias=config.no_alias,
545
552
  )
546
553
  return Exit.OK
547
554
  except InvalidClassNameError as e:
@@ -381,6 +381,13 @@ field_options.add_argument(
381
381
  choices=[u.value for u in UnionMode],
382
382
  default=None,
383
383
  )
384
+ field_options.add_argument(
385
+ '--no-alias',
386
+ help="""Do not add a field alias. E.g., if --snake-case-field is used along with a base class, which has an
387
+ alias_generator""",
388
+ action='store_true',
389
+ default=None,
390
+ )
384
391
 
385
392
  # ======================================================================================
386
393
  # Options for templating output
@@ -31,6 +31,7 @@ class PythonVersion(Enum):
31
31
  PY_310 = '3.10'
32
32
  PY_311 = '3.11'
33
33
  PY_312 = '3.12'
34
+ PY_313 = '3.13'
34
35
 
35
36
  @cached_property
36
37
  def _is_py_38_or_later(self) -> bool: # pragma: no cover
@@ -1,12 +1,19 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import sys
3
4
  from typing import TYPE_CHECKING, Callable, Iterable, List, NamedTuple, Optional, Type
4
5
 
6
+ from .. import DatetimeClassType, PythonVersion
5
7
  from ..types import DataTypeManager as DataTypeManagerABC
6
8
  from .base import ConstraintsBase, DataModel, DataModelFieldBase
7
9
 
8
10
  if TYPE_CHECKING:
9
- from .. import DataModelType, DatetimeClassType, PythonVersion
11
+ from .. import DataModelType
12
+
13
+ DEFAULT_TARGET_DATETIME_CLASS = DatetimeClassType.Datetime
14
+ DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(
15
+ f'{sys.version_info.major}.{sys.version_info.minor}'
16
+ )
10
17
 
11
18
 
12
19
  class DataModelSet(NamedTuple):
@@ -20,8 +27,8 @@ class DataModelSet(NamedTuple):
20
27
 
21
28
  def get_data_model_types(
22
29
  data_model_type: DataModelType,
23
- target_python_version: PythonVersion,
24
- target_datetime_class: DatetimeClassType,
30
+ target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
31
+ target_datetime_class: DatetimeClassType = DEFAULT_TARGET_DATETIME_CLASS,
25
32
  ) -> DataModelSet:
26
33
  from .. import DataModelType
27
34
  from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict
@@ -53,13 +60,17 @@ def get_data_model_types(
53
60
  )
54
61
  elif data_model_type == DataModelType.TypingTypedDict:
55
62
  return DataModelSet(
56
- data_model=typed_dict.TypedDict
57
- if target_python_version.has_typed_dict
58
- else typed_dict.TypedDictBackport,
63
+ data_model=(
64
+ typed_dict.TypedDict
65
+ if target_python_version.has_typed_dict
66
+ else typed_dict.TypedDictBackport
67
+ ),
59
68
  root_model=rootmodel.RootModel,
60
- field_model=typed_dict.DataModelField
61
- if target_python_version.has_typed_dict_non_required
62
- else typed_dict.DataModelFieldBackport,
69
+ field_model=(
70
+ typed_dict.DataModelField
71
+ if target_python_version.has_typed_dict_non_required
72
+ else typed_dict.DataModelFieldBackport
73
+ ),
63
74
  data_type_manager=DataTypeManager,
64
75
  dump_resolve_reference_action=None,
65
76
  )
@@ -68,7 +79,7 @@ def get_data_model_types(
68
79
  data_model=msgspec.Struct,
69
80
  root_model=msgspec.RootModel,
70
81
  field_model=msgspec.DataModelField,
71
- data_type_manager=DataTypeManager,
82
+ data_type_manager=msgspec.DataTypeManager,
72
83
  dump_resolve_reference_action=None,
73
84
  known_third_party=['msgspec'],
74
85
  )
@@ -1,5 +1,6 @@
1
1
  from abc import ABC, abstractmethod
2
2
  from collections import defaultdict
3
+ from copy import deepcopy
3
4
  from functools import lru_cache
4
5
  from pathlib import Path
5
6
  from typing import (
@@ -118,6 +119,7 @@ class DataModelFieldBase(_BaseModel):
118
119
  _exclude_fields: ClassVar[Set[str]] = {'parent'}
119
120
  _pass_fields: ClassVar[Set[str]] = {'parent', 'data_type'}
120
121
  can_have_extra_keys: ClassVar[bool] = True
122
+ type_has_null: Optional[bool] = None
121
123
 
122
124
  if not TYPE_CHECKING:
123
125
 
@@ -150,6 +152,8 @@ class DataModelFieldBase(_BaseModel):
150
152
  return get_optional_type(type_hint, self.data_type.use_union_operator)
151
153
  return type_hint
152
154
  elif self.required:
155
+ if self.type_has_null:
156
+ return get_optional_type(type_hint, self.data_type.use_union_operator)
153
157
  return type_hint
154
158
  elif self.fall_back_to_nullable:
155
159
  return get_optional_type(type_hint, self.data_type.use_union_operator)
@@ -316,6 +320,8 @@ class DataModel(TemplateBase, Nullable, ABC):
316
320
  self.reference.source = self
317
321
 
318
322
  self.extra_template_data = (
323
+ # The supplied defaultdict will either create a new entry,
324
+ # or already contain a predefined entry for this type
319
325
  extra_template_data[self.name]
320
326
  if extra_template_data is not None
321
327
  else defaultdict(dict)
@@ -327,10 +333,12 @@ class DataModel(TemplateBase, Nullable, ABC):
327
333
  if base_class.reference:
328
334
  base_class.reference.children.append(self)
329
335
 
330
- if extra_template_data:
336
+ if extra_template_data is not None:
331
337
  all_model_extra_template_data = extra_template_data.get(ALL_MODEL)
332
338
  if all_model_extra_template_data:
333
- self.extra_template_data.update(all_model_extra_template_data)
339
+ # The deepcopy is needed here to ensure that different models don't
340
+ # end up inadvertently sharing state (such as "base_class_kwargs")
341
+ self.extra_template_data.update(deepcopy(all_model_extra_template_data))
334
342
 
335
343
  self.methods: List[str] = methods or []
336
344
 
@@ -2,6 +2,7 @@ from datamodel_code_generator.imports import Import
2
2
 
3
3
  IMPORT_DATACLASS = Import.from_full_path('dataclasses.dataclass')
4
4
  IMPORT_FIELD = Import.from_full_path('dataclasses.field')
5
+ IMPORT_CLASSVAR = Import.from_full_path('typing.ClassVar')
5
6
  IMPORT_TYPED_DICT = Import.from_full_path('typing.TypedDict')
6
7
  IMPORT_TYPED_DICT_BACKPORT = Import.from_full_path('typing_extensions.TypedDict')
7
8
  IMPORT_NOT_REQUIRED = Import.from_full_path('typing.NotRequired')
@@ -7,6 +7,7 @@ from typing import (
7
7
  Dict,
8
8
  List,
9
9
  Optional,
10
+ Sequence,
10
11
  Set,
11
12
  Tuple,
12
13
  Type,
@@ -15,21 +16,36 @@ from typing import (
15
16
 
16
17
  from pydantic import Field
17
18
 
18
- from datamodel_code_generator.imports import Import
19
+ from datamodel_code_generator import DatetimeClassType, PythonVersion
20
+ from datamodel_code_generator.imports import (
21
+ IMPORT_DATE,
22
+ IMPORT_DATETIME,
23
+ IMPORT_TIME,
24
+ IMPORT_TIMEDELTA,
25
+ Import,
26
+ )
19
27
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
20
28
  from datamodel_code_generator.model.base import UNDEFINED
21
29
  from datamodel_code_generator.model.imports import (
30
+ IMPORT_CLASSVAR,
22
31
  IMPORT_MSGSPEC_CONVERT,
23
32
  IMPORT_MSGSPEC_FIELD,
24
33
  IMPORT_MSGSPEC_META,
25
- IMPORT_MSGSPEC_STRUCT,
26
34
  )
27
35
  from datamodel_code_generator.model.pydantic.base_model import (
28
36
  Constraints as _Constraints,
29
37
  )
30
38
  from datamodel_code_generator.model.rootmodel import RootModel as _RootModel
39
+ from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
40
+ from datamodel_code_generator.model.types import type_map_factory
31
41
  from datamodel_code_generator.reference import Reference
32
- from datamodel_code_generator.types import chain_as_tuple, get_optional_type
42
+ from datamodel_code_generator.types import (
43
+ DataType,
44
+ StrictTypes,
45
+ Types,
46
+ chain_as_tuple,
47
+ get_optional_type,
48
+ )
33
49
 
34
50
 
35
51
  def _has_field_assignment(field: DataModelFieldBase) -> bool:
@@ -56,6 +72,8 @@ def import_extender(cls: Type[DataModelFieldBaseT]) -> Type[DataModelFieldBaseT]
56
72
  extra_imports.append(IMPORT_MSGSPEC_CONVERT)
57
73
  if self.annotated:
58
74
  extra_imports.append(IMPORT_MSGSPEC_META)
75
+ if self.extras.get('is_classvar'):
76
+ extra_imports.append(IMPORT_CLASSVAR)
59
77
  return chain_as_tuple(original_imports.fget(self), extra_imports) # type: ignore
60
78
 
61
79
  setattr(cls, 'imports', property(new_imports))
@@ -69,7 +87,7 @@ class RootModel(_RootModel):
69
87
  class Struct(DataModel):
70
88
  TEMPLATE_FILE_PATH: ClassVar[str] = 'msgspec.jinja2'
71
89
  BASE_CLASS: ClassVar[str] = 'msgspec.Struct'
72
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_MSGSPEC_STRUCT,)
90
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ()
73
91
 
74
92
  def __init__(
75
93
  self,
@@ -103,6 +121,12 @@ class Struct(DataModel):
103
121
  nullable=nullable,
104
122
  keyword_only=keyword_only,
105
123
  )
124
+ self.extra_template_data.setdefault('base_class_kwargs', {})
125
+ if self.keyword_only:
126
+ self.add_base_class_kwarg('kw_only', 'True')
127
+
128
+ def add_base_class_kwarg(self, name: str, value):
129
+ self.extra_template_data['base_class_kwargs'][name] = value
106
130
 
107
131
 
108
132
  class Constraints(_Constraints):
@@ -241,11 +265,16 @@ class DataModelField(DataModelFieldBase):
241
265
 
242
266
  meta = f'Meta({", ".join(meta_arguments)})'
243
267
 
244
- if not self.required:
268
+ if not self.required and not self.extras.get('is_classvar'):
245
269
  type_hint = self.data_type.type_hint
246
270
  annotated_type = f'Annotated[{type_hint}, {meta}]'
247
271
  return get_optional_type(annotated_type, self.data_type.use_union_operator)
248
- return f'Annotated[{self.type_hint}, {meta}]'
272
+
273
+ annotated_type = f'Annotated[{self.type_hint}, {meta}]'
274
+ if self.extras.get('is_classvar'):
275
+ annotated_type = f'ClassVar[{annotated_type}]'
276
+
277
+ return annotated_type
249
278
 
250
279
  def _get_default_as_struct_model(self) -> Optional[str]:
251
280
  for data_type in self.data_type.data_types or (self.data_type,):
@@ -267,3 +296,43 @@ class DataModelField(DataModelFieldBase):
267
296
  elif data_type.reference and isinstance(data_type.reference.source, Struct):
268
297
  return f'lambda: {self._PARSE_METHOD}({repr(self.default)}, type={data_type.alias or data_type.reference.source.class_name})'
269
298
  return None
299
+
300
+
301
+ class DataTypeManager(_DataTypeManager):
302
+ def __init__(
303
+ self,
304
+ python_version: PythonVersion = PythonVersion.PY_38,
305
+ use_standard_collections: bool = False,
306
+ use_generic_container_types: bool = False,
307
+ strict_types: Optional[Sequence[StrictTypes]] = None,
308
+ use_non_positive_negative_number_constrained_types: bool = False,
309
+ use_union_operator: bool = False,
310
+ use_pendulum: bool = False,
311
+ target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
312
+ ):
313
+ super().__init__(
314
+ python_version,
315
+ use_standard_collections,
316
+ use_generic_container_types,
317
+ strict_types,
318
+ use_non_positive_negative_number_constrained_types,
319
+ use_union_operator,
320
+ use_pendulum,
321
+ target_datetime_class,
322
+ )
323
+
324
+ datetime_map = (
325
+ {
326
+ Types.time: self.data_type.from_import(IMPORT_TIME),
327
+ Types.date: self.data_type.from_import(IMPORT_DATE),
328
+ Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
329
+ Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
330
+ }
331
+ if target_datetime_class is DatetimeClassType.Datetime
332
+ else {}
333
+ )
334
+
335
+ self.type_map: Dict[Types, DataType] = {
336
+ **type_map_factory(self.data_type),
337
+ **datetime_map,
338
+ }
@@ -23,6 +23,7 @@ class ConfigDict(_BaseModel):
23
23
  arbitrary_types_allowed: Optional[bool] = None
24
24
  protected_namespaces: Optional[Tuple[str, ...]] = None
25
25
  regex_engine: Optional[str] = None
26
+ use_enum_values: Optional[bool] = None
26
27
 
27
28
 
28
29
  __all__ = [
@@ -2,7 +2,9 @@
2
2
  {{ decorator }}
3
3
  {% endfor -%}
4
4
  {%- if base_class %}
5
- class {{ class_name }}({{ base_class }}):
5
+ class {{ class_name }}({{ base_class }}{%- for key, value in (base_class_kwargs|default({})).items() -%}
6
+ , {{ key }}={{ value }}
7
+ {%- endfor -%}):
6
8
  {%- else %}
7
9
  class {{ class_name }}:
8
10
  {%- endif %}
@@ -39,6 +39,7 @@ from datamodel_code_generator.imports import (
39
39
  Imports,
40
40
  )
41
41
  from datamodel_code_generator.model import dataclass as dataclass_model
42
+ from datamodel_code_generator.model import msgspec as msgspec_model
42
43
  from datamodel_code_generator.model import pydantic as pydantic_model
43
44
  from datamodel_code_generator.model import pydantic_v2 as pydantic_model_v2
44
45
  from datamodel_code_generator.model.base import (
@@ -411,6 +412,7 @@ class Parser(ABC):
411
412
  default_field_extras: Optional[Dict[str, Any]] = None,
412
413
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
413
414
  keyword_only: bool = False,
415
+ no_alias: bool = False,
414
416
  ) -> None:
415
417
  self.keyword_only = keyword_only
416
418
  self.data_type_manager: DataTypeManager = data_type_manager_type(
@@ -511,6 +513,7 @@ class Parser(ABC):
511
513
  special_field_name_prefix=special_field_name_prefix,
512
514
  remove_special_field_name_prefix=remove_special_field_name_prefix,
513
515
  capitalise_enum_members=capitalise_enum_members,
516
+ no_alias=no_alias,
514
517
  )
515
518
  self.class_name: Optional[str] = class_name
516
519
  self.wrap_string_literal: Optional[bool] = wrap_string_literal
@@ -803,35 +806,54 @@ class Parser(ABC):
803
806
  if not data_type.reference: # pragma: no cover
804
807
  continue
805
808
  discriminator_model = data_type.reference.source
809
+
806
810
  if not isinstance( # pragma: no cover
807
811
  discriminator_model,
808
812
  (
809
813
  pydantic_model.BaseModel,
810
814
  pydantic_model_v2.BaseModel,
811
815
  dataclass_model.DataClass,
816
+ msgspec_model.Struct,
812
817
  ),
813
818
  ):
814
819
  continue # pragma: no cover
815
- type_names = []
816
- if mapping:
820
+
821
+ type_names: List[str] = []
822
+
823
+ def check_paths(
824
+ model: Union[
825
+ pydantic_model.BaseModel,
826
+ pydantic_model_v2.BaseModel,
827
+ Reference,
828
+ ],
829
+ mapping: Dict[str, str],
830
+ type_names: List[str] = type_names,
831
+ ) -> None:
832
+ """Helper function to validate paths for a given model."""
817
833
  for name, path in mapping.items():
818
834
  if (
819
- discriminator_model.path.split('#/')[-1]
820
- != path.split('#/')[-1]
835
+ model.path.split('#/')[-1] != path.split('#/')[-1]
836
+ ) and (
837
+ path.startswith('#/')
838
+ or model.path[:-1] != path.split('/')[-1]
821
839
  ):
822
- if (
823
- path.startswith('#/')
824
- or discriminator_model.path[:-1]
825
- != path.split('/')[-1]
826
- ):
827
- t_path = path[str(path).find('/') + 1 :]
828
- t_disc = discriminator_model.path[
829
- : str(discriminator_model.path).find('#')
830
- ].lstrip('../')
831
- t_disc_2 = '/'.join(t_disc.split('/')[1:])
832
- if t_path != t_disc and t_path != t_disc_2:
833
- continue
840
+ t_path = path[str(path).find('/') + 1 :]
841
+ t_disc = model.path[: str(model.path).find('#')].lstrip(
842
+ '../'
843
+ )
844
+ t_disc_2 = '/'.join(t_disc.split('/')[1:])
845
+ if t_path != t_disc and t_path != t_disc_2:
846
+ continue
834
847
  type_names.append(name)
848
+
849
+ # Check the main discriminator model path
850
+ if mapping:
851
+ check_paths(discriminator_model, mapping)
852
+
853
+ # Check the base_classes if they exist
854
+ if len(type_names) == 0:
855
+ for base_class in discriminator_model.base_classes:
856
+ check_paths(base_class.reference, mapping)
835
857
  else:
836
858
  type_names = [discriminator_model.path.split('/')[-1]]
837
859
  if not type_names: # pragma: no cover
@@ -846,13 +868,22 @@ class Parser(ABC):
846
868
  ) != property_name:
847
869
  continue
848
870
  literals = discriminator_field.data_type.literals
849
- if (
850
- len(literals) == 1 and literals[0] == type_names[0]
851
- if type_names
852
- else None
871
+ if len(literals) == 1 and literals[0] == (
872
+ type_names[0] if type_names else None
853
873
  ):
854
874
  has_one_literal = True
855
- continue
875
+ if isinstance(
876
+ discriminator_model, msgspec_model.Struct
877
+ ): # pragma: no cover
878
+ discriminator_model.add_base_class_kwarg(
879
+ 'tag_field', f"'{property_name}'"
880
+ )
881
+ discriminator_model.add_base_class_kwarg(
882
+ 'tag', discriminator_field.represented_default
883
+ )
884
+ discriminator_field.extras['is_classvar'] = True
885
+ # Found the discriminator field, no need to keep looking
886
+ break
856
887
  for (
857
888
  field_data_type
858
889
  ) in discriminator_field.data_type.all_data_types:
@@ -879,7 +910,8 @@ class Parser(ABC):
879
910
  else IMPORT_LITERAL_BACKPORT
880
911
  )
881
912
  has_imported_literal = any(
882
- literal == import_ for import_ in imports
913
+ literal == import_ # type: ignore [comparison-overlap]
914
+ for import_ in imports
883
915
  )
884
916
  if has_imported_literal: # pragma: no cover
885
917
  imports.append(literal)
@@ -160,6 +160,7 @@ class GraphQLParser(Parser):
160
160
  default_field_extras: Optional[Dict[str, Any]] = None,
161
161
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
162
162
  keyword_only: bool = False,
163
+ no_alias: bool = False,
163
164
  ) -> None:
164
165
  super().__init__(
165
166
  source=source,
@@ -232,6 +233,7 @@ class GraphQLParser(Parser):
232
233
  default_field_extras=default_field_extras,
233
234
  target_datetime_class=target_datetime_class,
234
235
  keyword_only=keyword_only,
236
+ no_alias=no_alias,
235
237
  )
236
238
 
237
239
  self.data_model_scalar_type = data_model_scalar_type
@@ -448,6 +448,7 @@ class JsonSchemaParser(Parser):
448
448
  default_field_extras: Optional[Dict[str, Any]] = None,
449
449
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
450
450
  keyword_only: bool = False,
451
+ no_alias: bool = False,
451
452
  ) -> None:
452
453
  super().__init__(
453
454
  source=source,
@@ -520,6 +521,7 @@ class JsonSchemaParser(Parser):
520
521
  default_field_extras=default_field_extras,
521
522
  target_datetime_class=target_datetime_class,
522
523
  keyword_only=keyword_only,
524
+ no_alias=no_alias,
523
525
  )
524
526
 
525
527
  self.remote_object_cache: DefaultPutDict[str, Dict[str, Any]] = DefaultPutDict()
@@ -618,6 +620,7 @@ class JsonSchemaParser(Parser):
618
620
  use_default_kwarg=self.use_default_kwarg,
619
621
  original_name=original_field_name,
620
622
  has_default=field.has_default,
623
+ type_has_null=field.type_has_null,
621
624
  )
622
625
 
623
626
  def get_data_type(self, obj: JsonSchemaObject) -> DataType:
@@ -1715,6 +1718,9 @@ class JsonSchemaParser(Parser):
1715
1718
  def parse_raw(self) -> None:
1716
1719
  for source, path_parts in self._get_context_source_path_parts():
1717
1720
  self.raw_obj = load_yaml(source.text)
1721
+ if self.raw_obj is None: # pragma: no cover
1722
+ warn(f'{source.path} is empty. Skipping this file')
1723
+ continue
1718
1724
  if self.custom_class_name_generator:
1719
1725
  obj_name = self.raw_obj.get('title', 'Model')
1720
1726
  else:
@@ -228,6 +228,7 @@ class OpenAPIParser(JsonSchemaParser):
228
228
  default_field_extras: Optional[Dict[str, Any]] = None,
229
229
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
230
230
  keyword_only: bool = False,
231
+ no_alias: bool = False,
231
232
  ):
232
233
  super().__init__(
233
234
  source=source,
@@ -300,6 +301,7 @@ class OpenAPIParser(JsonSchemaParser):
300
301
  default_field_extras=default_field_extras,
301
302
  target_datetime_class=target_datetime_class,
302
303
  keyword_only=keyword_only,
304
+ no_alias=no_alias,
303
305
  )
304
306
  self.open_api_scopes: List[OpenAPIScope] = openapi_scopes or [
305
307
  OpenAPIScope.Schemas
@@ -314,8 +316,10 @@ class OpenAPIParser(JsonSchemaParser):
314
316
  return get_model_by_path(ref_body, ref_path.split('/')[1:])
315
317
 
316
318
  def get_data_type(self, obj: JsonSchemaObject) -> DataType:
317
- # OpenAPI doesn't allow `null` in `type` field and list of types
319
+ # OpenAPI 3.0 doesn't allow `null` in the `type` field and list of types
318
320
  # https://swagger.io/docs/specification/data-models/data-types/#null
321
+ # OpenAPI 3.1 does allow `null` in the `type` field and is equivalent to
322
+ # a `nullable` flag on the property itself
319
323
  if obj.nullable and self.strict_nullable and isinstance(obj.type, str):
320
324
  obj.type = [obj.type, 'null']
321
325
 
@@ -504,6 +508,9 @@ class OpenAPIParser(JsonSchemaParser):
504
508
  has_default=object_schema.has_default
505
509
  if object_schema
506
510
  else False,
511
+ type_has_null=object_schema.type_has_null
512
+ if object_schema
513
+ else None,
507
514
  )
508
515
  )
509
516
 
@@ -513,6 +520,7 @@ class OpenAPIParser(JsonSchemaParser):
513
520
  fields=fields,
514
521
  reference=reference,
515
522
  custom_base_class=self.base_class,
523
+ custom_template_dir=self.custom_template_dir,
516
524
  keyword_only=self.keyword_only,
517
525
  )
518
526
  )
@@ -198,6 +198,7 @@ class FieldNameResolver:
198
198
  special_field_name_prefix: Optional[str] = None,
199
199
  remove_special_field_name_prefix: bool = False,
200
200
  capitalise_enum_members: bool = False,
201
+ no_alias: bool = False,
201
202
  ):
202
203
  self.aliases: Mapping[str, str] = {} if aliases is None else {**aliases}
203
204
  self.empty_field_name: str = empty_field_name or '_'
@@ -208,6 +209,7 @@ class FieldNameResolver:
208
209
  )
209
210
  self.remove_special_field_name_prefix: bool = remove_special_field_name_prefix
210
211
  self.capitalise_enum_members: bool = capitalise_enum_members
212
+ self.no_alias = no_alias
211
213
 
212
214
  @classmethod
213
215
  def _validate_field_name(cls, field_name: str) -> bool:
@@ -274,7 +276,10 @@ class FieldNameResolver:
274
276
  if field_name in self.aliases:
275
277
  return self.aliases[field_name], field_name
276
278
  valid_name = self.get_valid_name(field_name, excludes=excludes)
277
- return valid_name, None if field_name == valid_name else field_name
279
+ return (
280
+ valid_name,
281
+ None if self.no_alias or field_name == valid_name else field_name,
282
+ )
278
283
 
279
284
 
280
285
  class PydanticFieldNameResolver(FieldNameResolver):
@@ -354,6 +359,7 @@ class ModelResolver:
354
359
  special_field_name_prefix: Optional[str] = None,
355
360
  remove_special_field_name_prefix: bool = False,
356
361
  capitalise_enum_members: bool = False,
362
+ no_alias: bool = False,
357
363
  ) -> None:
358
364
  self.references: Dict[str, Reference] = {}
359
365
  self._current_root: Sequence[str] = []
@@ -383,6 +389,7 @@ class ModelResolver:
383
389
  capitalise_enum_members=capitalise_enum_members
384
390
  if k == ModelType.ENUM
385
391
  else False,
392
+ no_alias=no_alias,
386
393
  )
387
394
  for k, v in merged_field_name_resolver_classes.items()
388
395
  }
@@ -566,11 +573,13 @@ class ModelResolver:
566
573
  split_ref = ref.rsplit('/', 1)
567
574
  if len(split_ref) == 1:
568
575
  original_name = Path(
569
- split_ref[0][:-1] if self.is_external_root_ref(path) else split_ref[0]
576
+ split_ref[0].rstrip('#')
577
+ if self.is_external_root_ref(path)
578
+ else split_ref[0]
570
579
  ).stem
571
580
  else:
572
581
  original_name = (
573
- Path(split_ref[1][:-1]).stem
582
+ Path(split_ref[1].rstrip('#')).stem
574
583
  if self.is_external_root_ref(path)
575
584
  else split_ref[1]
576
585
  )
@@ -0,0 +1 @@
1
+ version: str = '0.26.4'
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "datamodel-code-generator"
3
- version = "0.26.2"
3
+ version = "0.26.4"
4
4
  description = "Datamodel Code Generator"
5
5
  authors = ["Koudai Aono <koxudaxi@gmail.com>"]
6
6
  readme = "README.md"
@@ -19,6 +19,7 @@ classifiers = [
19
19
  "Programming Language :: Python :: 3.10",
20
20
  "Programming Language :: Python :: 3.11",
21
21
  "Programming Language :: Python :: 3.12",
22
+ "Programming Language :: Python :: 3.13",
22
23
  "Programming Language :: Python :: Implementation :: CPython"]
23
24
 
24
25
  [build-system]
@@ -68,7 +69,7 @@ pytest-benchmark = "*"
68
69
  pytest-cov = ">=2.12.1"
69
70
  pytest-mock = "*"
70
71
  mypy = ">=1.4.1,<1.5.0"
71
- black = "^23.3.0"
72
+ black = ">=23.3,<25.0"
72
73
  freezegun = "*"
73
74
  types-Jinja2 = "*"
74
75
  types-PyYAML = "*"
@@ -77,8 +78,8 @@ types-setuptools = ">=67.6.0.5,<70.0.0.0"
77
78
  pydantic = "*"
78
79
  httpx = ">=0.24.1"
79
80
  PySnooper = "*"
80
- ruff = ">=0.0.290,<0.5.7"
81
- ruff-lsp = ">=0.0.39,<0.0.41"
81
+ ruff = ">=0.0.290,<0.7.5"
82
+ ruff-lsp = ">=0.0.39,<0.0.60"
82
83
  pre-commit = "*"
83
84
  pytest-xdist = "^3.3.1"
84
85
  prance = "*"
@@ -1 +0,0 @@
1
- version: str = '0.26.2'