datamodel-code-generator 0.26.1__tar.gz → 0.26.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (60) hide show
  1. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/PKG-INFO +8 -1
  2. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/README.md +6 -0
  3. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/__init__.py +9 -2
  4. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/__main__.py +31 -1
  5. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/arguments.py +14 -1
  6. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/format.py +10 -0
  7. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/__init__.py +6 -4
  8. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/base.py +3 -0
  9. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/dataclass.py +65 -4
  10. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/enum.py +2 -0
  11. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/imports.py +1 -0
  12. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/msgspec.py +74 -4
  13. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic/base_model.py +4 -0
  14. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic/types.py +13 -2
  15. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic_v2/__init__.py +1 -0
  16. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic_v2/base_model.py +2 -0
  17. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic_v2/imports.py +1 -0
  18. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic_v2/types.py +16 -5
  19. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/scalar.py +2 -0
  20. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/dataclass.jinja2 +1 -1
  21. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/msgspec.jinja2 +3 -1
  22. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/typed_dict.py +2 -0
  23. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/types.py +5 -7
  24. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/union.py +2 -0
  25. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/parser/base.py +63 -19
  26. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/parser/graphql.py +6 -0
  27. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/parser/jsonschema.py +8 -0
  28. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/parser/openapi.py +6 -0
  29. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/types.py +3 -1
  30. datamodel_code_generator-0.26.3/datamodel_code_generator/version.py +1 -0
  31. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/pyproject.toml +3 -3
  32. datamodel_code_generator-0.26.1/datamodel_code_generator/version.py +0 -1
  33. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/LICENSE +0 -0
  34. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/http.py +0 -0
  35. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/imports.py +0 -0
  36. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic/__init__.py +0 -0
  37. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic/custom_root_type.py +0 -0
  38. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic/dataclass.py +0 -0
  39. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic/imports.py +0 -0
  40. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/pydantic_v2/root_model.py +0 -0
  41. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/rootmodel.py +0 -0
  42. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/Enum.jinja2 +0 -0
  43. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/Scalar.jinja2 +0 -0
  44. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/TypedDict.jinja2 +0 -0
  45. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/TypedDictClass.jinja2 +0 -0
  46. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/TypedDictFunction.jinja2 +0 -0
  47. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/Union.jinja2 +0 -0
  48. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +0 -0
  49. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +0 -0
  50. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/pydantic/Config.jinja2 +0 -0
  51. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +0 -0
  52. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +0 -0
  53. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +0 -0
  54. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +0 -0
  55. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/model/template/root.jinja2 +0 -0
  56. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/parser/__init__.py +0 -0
  57. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/py.typed +0 -0
  58. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/pydantic_patch.py +0 -0
  59. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/reference.py +0 -0
  60. {datamodel_code_generator-0.26.1 → datamodel_code_generator-0.26.3}/datamodel_code_generator/util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamodel-code-generator
3
- Version: 0.26.1
3
+ Version: 0.26.3
4
4
  Summary: Datamodel Code Generator
5
5
  Home-page: https://github.com/koxudaxi/datamodel-code-generator
6
6
  License: MIT
@@ -16,6 +16,7 @@ Classifier: Programming Language :: Python :: 3.9
16
16
  Classifier: Programming Language :: Python :: 3.10
17
17
  Classifier: Programming Language :: Python :: 3.11
18
18
  Classifier: Programming Language :: Python :: 3.12
19
+ Classifier: Programming Language :: Python :: 3.13
19
20
  Classifier: Programming Language :: Python :: Implementation :: CPython
20
21
  Provides-Extra: debug
21
22
  Provides-Extra: graphql
@@ -494,6 +495,12 @@ Model customization:
494
495
  --enable-version-header
495
496
  Enable package version on file headers
496
497
  --keep-model-order Keep generated models'' order
498
+ --keyword-only Defined models as keyword only (for example
499
+ dataclass(kw_only=True)).
500
+ --output-datetime-class {datetime,AwareDatetime,NaiveDatetime}
501
+ Choose Datetime class between AwareDatetime, NaiveDatetime or
502
+ datetime. Each output model has its default mapping, and only
503
+ pydantic, dataclass, and msgspec support this override"
497
504
  --reuse-model Reuse models on the field when a module has the model with the same
498
505
  content
499
506
  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
@@ -450,6 +450,12 @@ Model customization:
450
450
  --enable-version-header
451
451
  Enable package version on file headers
452
452
  --keep-model-order Keep generated models'' order
453
+ --keyword-only Defined models as keyword only (for example
454
+ dataclass(kw_only=True)).
455
+ --output-datetime-class {datetime,AwareDatetime,NaiveDatetime}
456
+ Choose Datetime class between AwareDatetime, NaiveDatetime or
457
+ datetime. Each output model has its default mapping, and only
458
+ pydantic, dataclass, and msgspec support this override"
453
459
  --reuse-model Reuse models on the field when a module has the model with the same
454
460
  content
455
461
  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
@@ -30,7 +30,7 @@ from urllib.parse import ParseResult
30
30
  import yaml
31
31
 
32
32
  import datamodel_code_generator.pydantic_patch # noqa: F401
33
- from datamodel_code_generator.format import PythonVersion
33
+ from datamodel_code_generator.format import DatetimeClassType, PythonVersion
34
34
  from datamodel_code_generator.model.pydantic_v2 import UnionMode
35
35
  from datamodel_code_generator.parser import DefaultPutDict, LiteralType
36
36
  from datamodel_code_generator.parser.base import Parser
@@ -301,6 +301,8 @@ def generate(
301
301
  treat_dots_as_module: bool = False,
302
302
  use_exact_imports: bool = False,
303
303
  union_mode: Optional[UnionMode] = None,
304
+ output_datetime_class: Optional[DatetimeClassType] = None,
305
+ keyword_only: bool = False,
304
306
  ) -> None:
305
307
  remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
306
308
  if isinstance(input_, str):
@@ -395,9 +397,12 @@ def generate(
395
397
  raise Error('union_mode is only supported for pydantic_v2.BaseModel')
396
398
  else:
397
399
  default_field_extras = None
400
+
398
401
  from datamodel_code_generator.model import get_data_model_types
399
402
 
400
- data_model_types = get_data_model_types(output_model_type, target_python_version)
403
+ data_model_types = get_data_model_types(
404
+ output_model_type, target_python_version, output_datetime_class
405
+ )
401
406
  parser = parser_class(
402
407
  source=input_text or input_,
403
408
  data_model_type=data_model_types.data_model,
@@ -471,6 +476,8 @@ def generate(
471
476
  treat_dots_as_module=treat_dots_as_module,
472
477
  use_exact_imports=use_exact_imports,
473
478
  default_field_extras=default_field_extras,
479
+ target_datetime_class=output_datetime_class,
480
+ keyword_only=keyword_only,
474
481
  **kwargs,
475
482
  )
476
483
 
@@ -51,6 +51,7 @@ from datamodel_code_generator import (
51
51
  )
52
52
  from datamodel_code_generator.arguments import DEFAULT_ENCODING, arg_parser, namespace
53
53
  from datamodel_code_generator.format import (
54
+ DatetimeClassType,
54
55
  PythonVersion,
55
56
  black_find_project_root,
56
57
  is_supported_in_black,
@@ -159,7 +160,7 @@ class Config(BaseModel):
159
160
  target_python_version: PythonVersion = values['target_python_version']
160
161
  if target_python_version == target_python_version.PY_36:
161
162
  raise Error(
162
- f'`--use-generic-container-types` can not be used with `--target-python_version` {target_python_version.PY_36.value}.\n'
163
+ f'`--use-generic-container-types` can not be used with `--target-python-version` {target_python_version.PY_36.value}.\n'
163
164
  ' The version will be not supported in a future version'
164
165
  )
165
166
  return values
@@ -183,6 +184,31 @@ class Config(BaseModel):
183
184
  ) # pragma: no cover
184
185
  return values
185
186
 
187
+ @model_validator(mode='after')
188
+ def validate_keyword_only(cls, values: Dict[str, Any]) -> Dict[str, Any]:
189
+ python_target: PythonVersion = values.get('target_python_version')
190
+ if values.get('keyword_only') and not python_target.has_kw_only_dataclass:
191
+ raise Error(
192
+ f'`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher.'
193
+ )
194
+ return values
195
+
196
+ @model_validator(mode='after')
197
+ def validate_output_datetime_class(cls, values: Dict[str, Any]) -> Dict[str, Any]:
198
+ datetime_class_type: Optional[DatetimeClassType] = values.get(
199
+ 'output_datetime_class'
200
+ )
201
+ if (
202
+ datetime_class_type
203
+ and datetime_class_type is not DatetimeClassType.Datetime
204
+ and values.get('output_model_type') == DataModelType.DataclassesDataclass
205
+ ):
206
+ raise Error(
207
+ '`--output-datetime-class` only allows "datetime" for '
208
+ f'`--output-model-type` {DataModelType.DataclassesDataclass.value}'
209
+ )
210
+ return values
211
+
186
212
  # Pydantic 1.5.1 doesn't support each_item=True correctly
187
213
  @field_validator('http_headers', mode='before')
188
214
  def validate_http_headers(cls, value: Any) -> Optional[List[Tuple[str, str]]]:
@@ -313,6 +339,8 @@ class Config(BaseModel):
313
339
  treat_dot_as_module: bool = False
314
340
  use_exact_imports: bool = False
315
341
  union_mode: Optional[UnionMode] = None
342
+ output_datetime_class: Optional[DatetimeClassType] = None
343
+ keyword_only: bool = False
316
344
 
317
345
  def merge_args(self, args: Namespace) -> None:
318
346
  set_args = {
@@ -512,6 +540,8 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
512
540
  treat_dots_as_module=config.treat_dot_as_module,
513
541
  use_exact_imports=config.use_exact_imports,
514
542
  union_mode=config.union_mode,
543
+ output_datetime_class=config.output_datetime_class,
544
+ keyword_only=config.keyword_only,
515
545
  )
516
546
  return Exit.OK
517
547
  except InvalidClassNameError as e:
@@ -6,7 +6,7 @@ from operator import attrgetter
6
6
  from typing import TYPE_CHECKING
7
7
 
8
8
  from datamodel_code_generator import DataModelType, InputFileType, OpenAPIScope
9
- from datamodel_code_generator.format import PythonVersion
9
+ from datamodel_code_generator.format import DatetimeClassType, PythonVersion
10
10
  from datamodel_code_generator.model.pydantic_v2 import UnionMode
11
11
  from datamodel_code_generator.parser import LiteralType
12
12
  from datamodel_code_generator.types import StrictTypes
@@ -150,6 +150,12 @@ model_options.add_argument(
150
150
  action='store_true',
151
151
  default=None,
152
152
  )
153
+ model_options.add_argument(
154
+ '--keyword-only',
155
+ help='Defined models as keyword only (for example dataclass(kw_only=True)).',
156
+ action='store_true',
157
+ default=None,
158
+ )
153
159
  model_options.add_argument(
154
160
  '--reuse-model',
155
161
  help='Reuse models on the field when a module has the model with the same content',
@@ -192,6 +198,13 @@ model_options.add_argument(
192
198
  action='store_true',
193
199
  default=False,
194
200
  )
201
+ model_options.add_argument(
202
+ '--output-datetime-class',
203
+ help='Choose Datetime class between AwareDatetime, NaiveDatetime or datetime. '
204
+ 'Each output model has its default mapping (for example pydantic: datetime, dataclass: str, ...)',
205
+ choices=[i.value for i in DatetimeClassType],
206
+ default=None,
207
+ )
195
208
 
196
209
  # ======================================================================================
197
210
  # Typing options for generated models
@@ -17,6 +17,12 @@ except ImportError: # pragma: no cover
17
17
  black.mode = None
18
18
 
19
19
 
20
+ class DatetimeClassType(Enum):
21
+ Datetime = 'datetime'
22
+ Awaredatetime = 'AwareDatetime'
23
+ Naivedatetime = 'NaiveDatetime'
24
+
25
+
20
26
  class PythonVersion(Enum):
21
27
  PY_36 = '3.6'
22
28
  PY_37 = '3.7'
@@ -73,6 +79,10 @@ class PythonVersion(Enum):
73
79
  def has_typed_dict_non_required(self) -> bool:
74
80
  return self._is_py_311_or_later
75
81
 
82
+ @property
83
+ def has_kw_only_dataclass(self) -> bool:
84
+ return self._is_py_310_or_later
85
+
76
86
 
77
87
  if TYPE_CHECKING:
78
88
 
@@ -6,7 +6,7 @@ from ..types import DataTypeManager as DataTypeManagerABC
6
6
  from .base import ConstraintsBase, DataModel, DataModelFieldBase
7
7
 
8
8
  if TYPE_CHECKING:
9
- from .. import DataModelType, PythonVersion
9
+ from .. import DataModelType, DatetimeClassType, PythonVersion
10
10
 
11
11
 
12
12
  class DataModelSet(NamedTuple):
@@ -19,7 +19,9 @@ class DataModelSet(NamedTuple):
19
19
 
20
20
 
21
21
  def get_data_model_types(
22
- data_model_type: DataModelType, target_python_version: PythonVersion
22
+ data_model_type: DataModelType,
23
+ target_python_version: PythonVersion,
24
+ target_datetime_class: DatetimeClassType,
23
25
  ) -> DataModelSet:
24
26
  from .. import DataModelType
25
27
  from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict
@@ -46,7 +48,7 @@ def get_data_model_types(
46
48
  data_model=dataclass.DataClass,
47
49
  root_model=rootmodel.RootModel,
48
50
  field_model=dataclass.DataModelField,
49
- data_type_manager=DataTypeManager,
51
+ data_type_manager=dataclass.DataTypeManager,
50
52
  dump_resolve_reference_action=None,
51
53
  )
52
54
  elif data_model_type == DataModelType.TypingTypedDict:
@@ -66,7 +68,7 @@ def get_data_model_types(
66
68
  data_model=msgspec.Struct,
67
69
  root_model=msgspec.RootModel,
68
70
  field_model=msgspec.DataModelField,
69
- data_type_manager=DataTypeManager,
71
+ data_type_manager=msgspec.DataTypeManager,
70
72
  dump_resolve_reference_action=None,
71
73
  known_third_party=['msgspec'],
72
74
  )
@@ -293,7 +293,9 @@ class DataModel(TemplateBase, Nullable, ABC):
293
293
  description: Optional[str] = None,
294
294
  default: Any = UNDEFINED,
295
295
  nullable: bool = False,
296
+ keyword_only: bool = False,
296
297
  ) -> None:
298
+ self.keyword_only = keyword_only
297
299
  if not self.TEMPLATE_FILE_PATH:
298
300
  raise Exception('TEMPLATE_FILE_PATH is undefined')
299
301
 
@@ -452,6 +454,7 @@ class DataModel(TemplateBase, Nullable, ABC):
452
454
  base_class=self.base_class,
453
455
  methods=self.methods,
454
456
  description=self.description,
457
+ keyword_only=self.keyword_only,
455
458
  **self.extra_template_data,
456
459
  )
457
460
  return response
@@ -1,13 +1,32 @@
1
1
  from pathlib import Path
2
- from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Set, Tuple
3
-
4
- from datamodel_code_generator.imports import Import
2
+ from typing import (
3
+ Any,
4
+ ClassVar,
5
+ DefaultDict,
6
+ Dict,
7
+ List,
8
+ Optional,
9
+ Sequence,
10
+ Set,
11
+ Tuple,
12
+ )
13
+
14
+ from datamodel_code_generator import DatetimeClassType, PythonVersion
15
+ from datamodel_code_generator.imports import (
16
+ IMPORT_DATE,
17
+ IMPORT_DATETIME,
18
+ IMPORT_TIME,
19
+ IMPORT_TIMEDELTA,
20
+ Import,
21
+ )
5
22
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
6
23
  from datamodel_code_generator.model.base import UNDEFINED
7
24
  from datamodel_code_generator.model.imports import IMPORT_DATACLASS, IMPORT_FIELD
8
25
  from datamodel_code_generator.model.pydantic.base_model import Constraints
26
+ from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
27
+ from datamodel_code_generator.model.types import type_map_factory
9
28
  from datamodel_code_generator.reference import Reference
10
- from datamodel_code_generator.types import chain_as_tuple
29
+ from datamodel_code_generator.types import DataType, StrictTypes, Types, chain_as_tuple
11
30
 
12
31
 
13
32
  def _has_field_assignment(field: DataModelFieldBase) -> bool:
@@ -36,6 +55,7 @@ class DataClass(DataModel):
36
55
  description: Optional[str] = None,
37
56
  default: Any = UNDEFINED,
38
57
  nullable: bool = False,
58
+ keyword_only: bool = False,
39
59
  ) -> None:
40
60
  super().__init__(
41
61
  reference=reference,
@@ -50,6 +70,7 @@ class DataClass(DataModel):
50
70
  description=description,
51
71
  default=default,
52
72
  nullable=nullable,
73
+ keyword_only=keyword_only,
53
74
  )
54
75
 
55
76
 
@@ -118,3 +139,43 @@ class DataModelField(DataModelFieldBase):
118
139
  f'{k}={v if k == "default_factory" else repr(v)}' for k, v in data.items()
119
140
  ]
120
141
  return f'field({", ".join(kwargs)})'
142
+
143
+
144
+ class DataTypeManager(_DataTypeManager):
145
+ def __init__(
146
+ self,
147
+ python_version: PythonVersion = PythonVersion.PY_38,
148
+ use_standard_collections: bool = False,
149
+ use_generic_container_types: bool = False,
150
+ strict_types: Optional[Sequence[StrictTypes]] = None,
151
+ use_non_positive_negative_number_constrained_types: bool = False,
152
+ use_union_operator: bool = False,
153
+ use_pendulum: bool = False,
154
+ target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
155
+ ):
156
+ super().__init__(
157
+ python_version,
158
+ use_standard_collections,
159
+ use_generic_container_types,
160
+ strict_types,
161
+ use_non_positive_negative_number_constrained_types,
162
+ use_union_operator,
163
+ use_pendulum,
164
+ target_datetime_class,
165
+ )
166
+
167
+ datetime_map = (
168
+ {
169
+ Types.time: self.data_type.from_import(IMPORT_TIME),
170
+ Types.date: self.data_type.from_import(IMPORT_DATE),
171
+ Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
172
+ Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
173
+ }
174
+ if target_datetime_class is DatetimeClassType.Datetime
175
+ else {}
176
+ )
177
+
178
+ self.type_map: Dict[Types, DataType] = {
179
+ **type_map_factory(self.data_type),
180
+ **datetime_map,
181
+ }
@@ -47,6 +47,7 @@ class Enum(DataModel):
47
47
  type_: Optional[Types] = None,
48
48
  default: Any = UNDEFINED,
49
49
  nullable: bool = False,
50
+ keyword_only: bool = False,
50
51
  ):
51
52
  super().__init__(
52
53
  reference=reference,
@@ -61,6 +62,7 @@ class Enum(DataModel):
61
62
  description=description,
62
63
  default=default,
63
64
  nullable=nullable,
65
+ keyword_only=keyword_only,
64
66
  )
65
67
 
66
68
  if not base_classes and type_:
@@ -2,6 +2,7 @@ from datamodel_code_generator.imports import Import
2
2
 
3
3
  IMPORT_DATACLASS = Import.from_full_path('dataclasses.dataclass')
4
4
  IMPORT_FIELD = Import.from_full_path('dataclasses.field')
5
+ IMPORT_CLASSVAR = Import.from_full_path('typing.ClassVar')
5
6
  IMPORT_TYPED_DICT = Import.from_full_path('typing.TypedDict')
6
7
  IMPORT_TYPED_DICT_BACKPORT = Import.from_full_path('typing_extensions.TypedDict')
7
8
  IMPORT_NOT_REQUIRED = Import.from_full_path('typing.NotRequired')
@@ -7,6 +7,7 @@ from typing import (
7
7
  Dict,
8
8
  List,
9
9
  Optional,
10
+ Sequence,
10
11
  Set,
11
12
  Tuple,
12
13
  Type,
@@ -15,10 +16,18 @@ from typing import (
15
16
 
16
17
  from pydantic import Field
17
18
 
18
- from datamodel_code_generator.imports import Import
19
+ from datamodel_code_generator import DatetimeClassType, PythonVersion
20
+ from datamodel_code_generator.imports import (
21
+ IMPORT_DATE,
22
+ IMPORT_DATETIME,
23
+ IMPORT_TIME,
24
+ IMPORT_TIMEDELTA,
25
+ Import,
26
+ )
19
27
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
20
28
  from datamodel_code_generator.model.base import UNDEFINED
21
29
  from datamodel_code_generator.model.imports import (
30
+ IMPORT_CLASSVAR,
22
31
  IMPORT_MSGSPEC_CONVERT,
23
32
  IMPORT_MSGSPEC_FIELD,
24
33
  IMPORT_MSGSPEC_META,
@@ -28,8 +37,16 @@ from datamodel_code_generator.model.pydantic.base_model import (
28
37
  Constraints as _Constraints,
29
38
  )
30
39
  from datamodel_code_generator.model.rootmodel import RootModel as _RootModel
40
+ from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
41
+ from datamodel_code_generator.model.types import type_map_factory
31
42
  from datamodel_code_generator.reference import Reference
32
- from datamodel_code_generator.types import chain_as_tuple, get_optional_type
43
+ from datamodel_code_generator.types import (
44
+ DataType,
45
+ StrictTypes,
46
+ Types,
47
+ chain_as_tuple,
48
+ get_optional_type,
49
+ )
33
50
 
34
51
 
35
52
  def _has_field_assignment(field: DataModelFieldBase) -> bool:
@@ -56,6 +73,8 @@ def import_extender(cls: Type[DataModelFieldBaseT]) -> Type[DataModelFieldBaseT]
56
73
  extra_imports.append(IMPORT_MSGSPEC_CONVERT)
57
74
  if self.annotated:
58
75
  extra_imports.append(IMPORT_MSGSPEC_META)
76
+ if self.extras.get('is_classvar'):
77
+ extra_imports.append(IMPORT_CLASSVAR)
59
78
  return chain_as_tuple(original_imports.fget(self), extra_imports) # type: ignore
60
79
 
61
80
  setattr(cls, 'imports', property(new_imports))
@@ -86,6 +105,7 @@ class Struct(DataModel):
86
105
  description: Optional[str] = None,
87
106
  default: Any = UNDEFINED,
88
107
  nullable: bool = False,
108
+ keyword_only: bool = False,
89
109
  ) -> None:
90
110
  super().__init__(
91
111
  reference=reference,
@@ -100,7 +120,12 @@ class Struct(DataModel):
100
120
  description=description,
101
121
  default=default,
102
122
  nullable=nullable,
123
+ keyword_only=keyword_only,
103
124
  )
125
+ self.extra_template_data.setdefault('base_class_kwargs', {})
126
+
127
+ def add_base_class_kwarg(self, name: str, value):
128
+ self.extra_template_data['base_class_kwargs'][name] = value
104
129
 
105
130
 
106
131
  class Constraints(_Constraints):
@@ -239,11 +264,16 @@ class DataModelField(DataModelFieldBase):
239
264
 
240
265
  meta = f'Meta({", ".join(meta_arguments)})'
241
266
 
242
- if not self.required:
267
+ if not self.required and not self.extras.get('is_classvar'):
243
268
  type_hint = self.data_type.type_hint
244
269
  annotated_type = f'Annotated[{type_hint}, {meta}]'
245
270
  return get_optional_type(annotated_type, self.data_type.use_union_operator)
246
- return f'Annotated[{self.type_hint}, {meta}]'
271
+
272
+ annotated_type = f'Annotated[{self.type_hint}, {meta}]'
273
+ if self.extras.get('is_classvar'):
274
+ annotated_type = f'ClassVar[{annotated_type}]'
275
+
276
+ return annotated_type
247
277
 
248
278
  def _get_default_as_struct_model(self) -> Optional[str]:
249
279
  for data_type in self.data_type.data_types or (self.data_type,):
@@ -265,3 +295,43 @@ class DataModelField(DataModelFieldBase):
265
295
  elif data_type.reference and isinstance(data_type.reference.source, Struct):
266
296
  return f'lambda: {self._PARSE_METHOD}({repr(self.default)}, type={data_type.alias or data_type.reference.source.class_name})'
267
297
  return None
298
+
299
+
300
+ class DataTypeManager(_DataTypeManager):
301
+ def __init__(
302
+ self,
303
+ python_version: PythonVersion = PythonVersion.PY_38,
304
+ use_standard_collections: bool = False,
305
+ use_generic_container_types: bool = False,
306
+ strict_types: Optional[Sequence[StrictTypes]] = None,
307
+ use_non_positive_negative_number_constrained_types: bool = False,
308
+ use_union_operator: bool = False,
309
+ use_pendulum: bool = False,
310
+ target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
311
+ ):
312
+ super().__init__(
313
+ python_version,
314
+ use_standard_collections,
315
+ use_generic_container_types,
316
+ strict_types,
317
+ use_non_positive_negative_number_constrained_types,
318
+ use_union_operator,
319
+ use_pendulum,
320
+ target_datetime_class,
321
+ )
322
+
323
+ datetime_map = (
324
+ {
325
+ Types.time: self.data_type.from_import(IMPORT_TIME),
326
+ Types.date: self.data_type.from_import(IMPORT_DATE),
327
+ Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
328
+ Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
329
+ }
330
+ if target_datetime_class is DatetimeClassType.Datetime
331
+ else {}
332
+ )
333
+
334
+ self.type_map: Dict[Types, DataType] = {
335
+ **type_map_factory(self.data_type),
336
+ **datetime_map,
337
+ }
@@ -225,6 +225,7 @@ class BaseModelBase(DataModel, ABC):
225
225
  description: Optional[str] = None,
226
226
  default: Any = UNDEFINED,
227
227
  nullable: bool = False,
228
+ keyword_only: bool = False,
228
229
  ) -> None:
229
230
  methods: List[str] = [field.method for field in fields if field.method]
230
231
 
@@ -241,6 +242,7 @@ class BaseModelBase(DataModel, ABC):
241
242
  description=description,
242
243
  default=default,
243
244
  nullable=nullable,
245
+ keyword_only=keyword_only,
244
246
  )
245
247
 
246
248
  @cached_property
@@ -275,6 +277,7 @@ class BaseModel(BaseModelBase):
275
277
  description: Optional[str] = None,
276
278
  default: Any = UNDEFINED,
277
279
  nullable: bool = False,
280
+ keyword_only: bool = False,
278
281
  ) -> None:
279
282
  super().__init__(
280
283
  reference=reference,
@@ -288,6 +291,7 @@ class BaseModel(BaseModelBase):
288
291
  description=description,
289
292
  default=default,
290
293
  nullable=nullable,
294
+ keyword_only=keyword_only,
291
295
  )
292
296
  config_parameters: Dict[str, Any] = {}
293
297
 
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  from decimal import Decimal
4
4
  from typing import Any, ClassVar, Dict, Optional, Sequence, Set, Type
5
5
 
6
- from datamodel_code_generator.format import PythonVersion
6
+ from datamodel_code_generator.format import DatetimeClassType, PythonVersion
7
7
  from datamodel_code_generator.imports import (
8
8
  IMPORT_ANY,
9
9
  IMPORT_DATE,
@@ -59,6 +59,7 @@ def type_map_factory(
59
59
  strict_types: Sequence[StrictTypes],
60
60
  pattern_key: str,
61
61
  use_pendulum: bool,
62
+ target_datetime_class: DatetimeClassType,
62
63
  ) -> Dict[Types, DataType]:
63
64
  data_type_int = data_type(type='int')
64
65
  data_type_float = data_type(type='float')
@@ -162,6 +163,7 @@ class DataTypeManager(_DataTypeManager):
162
163
  use_non_positive_negative_number_constrained_types: bool = False,
163
164
  use_union_operator: bool = False,
164
165
  use_pendulum: bool = False,
166
+ target_datetime_class: Optional[DatetimeClassType] = None,
165
167
  ):
166
168
  super().__init__(
167
169
  python_version,
@@ -171,12 +173,14 @@ class DataTypeManager(_DataTypeManager):
171
173
  use_non_positive_negative_number_constrained_types,
172
174
  use_union_operator,
173
175
  use_pendulum,
176
+ target_datetime_class,
174
177
  )
175
178
 
176
179
  self.type_map: Dict[Types, DataType] = self.type_map_factory(
177
180
  self.data_type,
178
181
  strict_types=self.strict_types,
179
182
  pattern_key=self.PATTERN_KEY,
183
+ target_datetime_class=target_datetime_class,
180
184
  )
181
185
  self.strict_type_map: Dict[StrictTypes, DataType] = strict_type_map_factory(
182
186
  self.data_type,
@@ -200,8 +204,15 @@ class DataTypeManager(_DataTypeManager):
200
204
  data_type: Type[DataType],
201
205
  strict_types: Sequence[StrictTypes],
202
206
  pattern_key: str,
207
+ target_datetime_class: DatetimeClassType,
203
208
  ) -> Dict[Types, DataType]:
204
- return type_map_factory(data_type, strict_types, pattern_key, self.use_pendulum)
209
+ return type_map_factory(
210
+ data_type,
211
+ strict_types,
212
+ pattern_key,
213
+ self.use_pendulum,
214
+ self.target_datetime_class,
215
+ )
205
216
 
206
217
  def transform_kwargs(
207
218
  self, kwargs: Dict[str, Any], filter_: Set[str]
@@ -23,6 +23,7 @@ class ConfigDict(_BaseModel):
23
23
  arbitrary_types_allowed: Optional[bool] = None
24
24
  protected_namespaces: Optional[Tuple[str, ...]] = None
25
25
  regex_engine: Optional[str] = None
26
+ use_enum_values: Optional[bool] = None
26
27
 
27
28
 
28
29
  __all__ = [
@@ -183,6 +183,7 @@ class BaseModel(BaseModelBase):
183
183
  description: Optional[str] = None,
184
184
  default: Any = UNDEFINED,
185
185
  nullable: bool = False,
186
+ keyword_only: bool = False,
186
187
  ) -> None:
187
188
  super().__init__(
188
189
  reference=reference,
@@ -196,6 +197,7 @@ class BaseModel(BaseModelBase):
196
197
  description=description,
197
198
  default=default,
198
199
  nullable=nullable,
200
+ keyword_only=keyword_only,
199
201
  )
200
202
  config_parameters: Dict[str, Any] = {}
201
203
 
@@ -2,3 +2,4 @@ from datamodel_code_generator.imports import Import
2
2
 
3
3
  IMPORT_CONFIG_DICT = Import.from_full_path('pydantic.ConfigDict')
4
4
  IMPORT_AWARE_DATETIME = Import.from_full_path('pydantic.AwareDatetime')
5
+ IMPORT_NAIVE_DATETIME = Import.from_full_path('pydantic.NaiveDatetime')
@@ -1,10 +1,14 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import ClassVar, Dict, Sequence, Type
3
+ from typing import ClassVar, Dict, Optional, Sequence, Type
4
4
 
5
+ from datamodel_code_generator.format import DatetimeClassType
5
6
  from datamodel_code_generator.model.pydantic import DataTypeManager as _DataTypeManager
6
7
  from datamodel_code_generator.model.pydantic.imports import IMPORT_CONSTR
7
- from datamodel_code_generator.model.pydantic_v2.imports import IMPORT_AWARE_DATETIME
8
+ from datamodel_code_generator.model.pydantic_v2.imports import (
9
+ IMPORT_AWARE_DATETIME,
10
+ IMPORT_NAIVE_DATETIME,
11
+ )
8
12
  from datamodel_code_generator.types import DataType, StrictTypes, Types
9
13
 
10
14
 
@@ -16,9 +20,12 @@ class DataTypeManager(_DataTypeManager):
16
20
  data_type: Type[DataType],
17
21
  strict_types: Sequence[StrictTypes],
18
22
  pattern_key: str,
23
+ target_datetime_class: Optional[DatetimeClassType] = None,
19
24
  ) -> Dict[Types, DataType]:
20
- return {
21
- **super().type_map_factory(data_type, strict_types, pattern_key),
25
+ result = {
26
+ **super().type_map_factory(
27
+ data_type, strict_types, pattern_key, target_datetime_class
28
+ ),
22
29
  Types.hostname: self.data_type.from_import(
23
30
  IMPORT_CONSTR,
24
31
  strict=StrictTypes.str in strict_types,
@@ -28,5 +35,9 @@ class DataTypeManager(_DataTypeManager):
28
35
  **({'strict': True} if StrictTypes.str in strict_types else {}),
29
36
  },
30
37
  ),
31
- Types.date_time: data_type.from_import(IMPORT_AWARE_DATETIME),
32
38
  }
39
+ if target_datetime_class == DatetimeClassType.Awaredatetime:
40
+ result[Types.date_time] = data_type.from_import(IMPORT_AWARE_DATETIME)
41
+ if target_datetime_class == DatetimeClassType.Naivedatetime:
42
+ result[Types.date_time] = data_type.from_import(IMPORT_NAIVE_DATETIME)
43
+ return result
@@ -46,6 +46,7 @@ class DataTypeScalar(DataModel):
46
46
  description: Optional[str] = None,
47
47
  default: Any = UNDEFINED,
48
48
  nullable: bool = False,
49
+ keyword_only: bool = False,
49
50
  ):
50
51
  extra_template_data = extra_template_data or defaultdict(dict)
51
52
 
@@ -75,4 +76,5 @@ class DataTypeScalar(DataModel):
75
76
  description=description,
76
77
  default=default,
77
78
  nullable=nullable,
79
+ keyword_only=keyword_only,
78
80
  )
@@ -1,7 +1,7 @@
1
1
  {% for decorator in decorators -%}
2
2
  {{ decorator }}
3
3
  {% endfor -%}
4
- @dataclass
4
+ @dataclass{%- if keyword_only -%}(kw_only=True){%- endif %}
5
5
  {%- if base_class %}
6
6
  class {{ class_name }}({{ base_class }}):
7
7
  {%- else %}
@@ -2,7 +2,9 @@
2
2
  {{ decorator }}
3
3
  {% endfor -%}
4
4
  {%- if base_class %}
5
- class {{ class_name }}({{ base_class }}):
5
+ class {{ class_name }}({{ base_class }}{%- for key, value in (base_class_kwargs|default({})).items() -%}
6
+ , {{ key }}={{ value }}
7
+ {%- endfor -%}):
6
8
  {%- else %}
7
9
  class {{ class_name }}:
8
10
  {%- endif %}
@@ -63,6 +63,7 @@ class TypedDict(DataModel):
63
63
  description: Optional[str] = None,
64
64
  default: Any = UNDEFINED,
65
65
  nullable: bool = False,
66
+ keyword_only: bool = False,
66
67
  ) -> None:
67
68
  super().__init__(
68
69
  reference=reference,
@@ -77,6 +78,7 @@ class TypedDict(DataModel):
77
78
  description=description,
78
79
  default=default,
79
80
  nullable=nullable,
81
+ keyword_only=keyword_only,
80
82
  )
81
83
 
82
84
  @property
@@ -1,6 +1,6 @@
1
1
  from typing import Any, Dict, Optional, Sequence, Type
2
2
 
3
- from datamodel_code_generator import PythonVersion
3
+ from datamodel_code_generator import DatetimeClassType, PythonVersion
4
4
  from datamodel_code_generator.imports import (
5
5
  IMPORT_ANY,
6
6
  IMPORT_DECIMAL,
@@ -10,9 +10,7 @@ from datamodel_code_generator.types import DataType, StrictTypes, Types
10
10
  from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
11
11
 
12
12
 
13
- def type_map_factory(
14
- data_type: Type[DataType],
15
- ) -> Dict[Types, DataType]:
13
+ def type_map_factory(data_type: Type[DataType]) -> Dict[Types, DataType]:
16
14
  data_type_int = data_type(type='int')
17
15
  data_type_float = data_type(type='float')
18
16
  data_type_str = data_type(type='str')
@@ -64,6 +62,7 @@ class DataTypeManager(_DataTypeManager):
64
62
  use_non_positive_negative_number_constrained_types: bool = False,
65
63
  use_union_operator: bool = False,
66
64
  use_pendulum: bool = False,
65
+ target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
67
66
  ):
68
67
  super().__init__(
69
68
  python_version,
@@ -73,11 +72,10 @@ class DataTypeManager(_DataTypeManager):
73
72
  use_non_positive_negative_number_constrained_types,
74
73
  use_union_operator,
75
74
  use_pendulum,
75
+ target_datetime_class,
76
76
  )
77
77
 
78
- self.type_map: Dict[Types, DataType] = type_map_factory(
79
- self.data_type,
80
- )
78
+ self.type_map: Dict[Types, DataType] = type_map_factory(self.data_type)
81
79
 
82
80
  def get_data_type(
83
81
  self,
@@ -32,6 +32,7 @@ class DataTypeUnion(DataModel):
32
32
  description: Optional[str] = None,
33
33
  default: Any = UNDEFINED,
34
34
  nullable: bool = False,
35
+ keyword_only: bool = False,
35
36
  ):
36
37
  super().__init__(
37
38
  reference=reference,
@@ -46,4 +47,5 @@ class DataTypeUnion(DataModel):
46
47
  description=description,
47
48
  default=default,
48
49
  nullable=nullable,
50
+ keyword_only=keyword_only,
49
51
  )
@@ -26,7 +26,11 @@ from urllib.parse import ParseResult
26
26
 
27
27
  from pydantic import BaseModel
28
28
 
29
- from datamodel_code_generator.format import CodeFormatter, PythonVersion
29
+ from datamodel_code_generator.format import (
30
+ CodeFormatter,
31
+ DatetimeClassType,
32
+ PythonVersion,
33
+ )
30
34
  from datamodel_code_generator.imports import (
31
35
  IMPORT_ANNOTATIONS,
32
36
  IMPORT_LITERAL,
@@ -34,6 +38,8 @@ from datamodel_code_generator.imports import (
34
38
  Import,
35
39
  Imports,
36
40
  )
41
+ from datamodel_code_generator.model import dataclass as dataclass_model
42
+ from datamodel_code_generator.model import msgspec as msgspec_model
37
43
  from datamodel_code_generator.model import pydantic as pydantic_model
38
44
  from datamodel_code_generator.model import pydantic_v2 as pydantic_model_v2
39
45
  from datamodel_code_generator.model.base import (
@@ -404,7 +410,10 @@ class Parser(ABC):
404
410
  treat_dots_as_module: bool = False,
405
411
  use_exact_imports: bool = False,
406
412
  default_field_extras: Optional[Dict[str, Any]] = None,
413
+ target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
414
+ keyword_only: bool = False,
407
415
  ) -> None:
416
+ self.keyword_only = keyword_only
408
417
  self.data_type_manager: DataTypeManager = data_type_manager_type(
409
418
  python_version=target_python_version,
410
419
  use_standard_collections=use_standard_collections,
@@ -412,6 +421,7 @@ class Parser(ABC):
412
421
  strict_types=strict_types,
413
422
  use_union_operator=use_union_operator,
414
423
  use_pendulum=use_pendulum,
424
+ target_datetime_class=target_datetime_class,
415
425
  )
416
426
  self.data_model_type: Type[DataModel] = data_model_type
417
427
  self.data_model_root_type: Type[DataModel] = data_model_root_type
@@ -794,31 +804,54 @@ class Parser(ABC):
794
804
  if not data_type.reference: # pragma: no cover
795
805
  continue
796
806
  discriminator_model = data_type.reference.source
807
+
797
808
  if not isinstance( # pragma: no cover
798
809
  discriminator_model,
799
- (pydantic_model.BaseModel, pydantic_model_v2.BaseModel),
810
+ (
811
+ pydantic_model.BaseModel,
812
+ pydantic_model_v2.BaseModel,
813
+ dataclass_model.DataClass,
814
+ msgspec_model.Struct,
815
+ ),
800
816
  ):
801
817
  continue # pragma: no cover
802
- type_names = []
803
- if mapping:
818
+
819
+ type_names: List[str] = []
820
+
821
+ def check_paths(
822
+ model: Union[
823
+ pydantic_model.BaseModel,
824
+ pydantic_model_v2.BaseModel,
825
+ Reference,
826
+ ],
827
+ mapping: Dict[str, str],
828
+ type_names: List[str] = type_names,
829
+ ) -> None:
830
+ """Helper function to validate paths for a given model."""
804
831
  for name, path in mapping.items():
805
832
  if (
806
- discriminator_model.path.split('#/')[-1]
807
- != path.split('#/')[-1]
833
+ model.path.split('#/')[-1] != path.split('#/')[-1]
834
+ ) and (
835
+ path.startswith('#/')
836
+ or model.path[:-1] != path.split('/')[-1]
808
837
  ):
809
- if (
810
- path.startswith('#/')
811
- or discriminator_model.path[:-1]
812
- != path.split('/')[-1]
813
- ):
814
- t_path = path[str(path).find('/') + 1 :]
815
- t_disc = discriminator_model.path[
816
- : str(discriminator_model.path).find('#')
817
- ].lstrip('../')
818
- t_disc_2 = '/'.join(t_disc.split('/')[1:])
819
- if t_path != t_disc and t_path != t_disc_2:
820
- continue
838
+ t_path = path[str(path).find('/') + 1 :]
839
+ t_disc = model.path[: str(model.path).find('#')].lstrip(
840
+ '../'
841
+ )
842
+ t_disc_2 = '/'.join(t_disc.split('/')[1:])
843
+ if t_path != t_disc and t_path != t_disc_2:
844
+ continue
821
845
  type_names.append(name)
846
+
847
+ # Check the main discriminator model path
848
+ if mapping:
849
+ check_paths(discriminator_model, mapping)
850
+
851
+ # Check the base_classes if they exist
852
+ if len(type_names) == 0:
853
+ for base_class in discriminator_model.base_classes:
854
+ check_paths(base_class.reference, mapping)
822
855
  else:
823
856
  type_names = [discriminator_model.path.split('/')[-1]]
824
857
  if not type_names: # pragma: no cover
@@ -839,6 +872,16 @@ class Parser(ABC):
839
872
  else None
840
873
  ):
841
874
  has_one_literal = True
875
+ if isinstance(
876
+ discriminator_model, msgspec_model.Struct
877
+ ): # pragma: no cover
878
+ discriminator_model.add_base_class_kwarg(
879
+ 'tag_field', f"'{property_name}'"
880
+ )
881
+ discriminator_model.add_base_class_kwarg(
882
+ 'tag', discriminator_field.represented_default
883
+ )
884
+ discriminator_field.extras['is_classvar'] = True
842
885
  continue
843
886
  for (
844
887
  field_data_type
@@ -866,7 +909,8 @@ class Parser(ABC):
866
909
  else IMPORT_LITERAL_BACKPORT
867
910
  )
868
911
  has_imported_literal = any(
869
- literal == import_ for import_ in imports
912
+ literal == import_ # type: ignore [comparison-overlap]
913
+ for import_ in imports
870
914
  )
871
915
  if has_imported_literal: # pragma: no cover
872
916
  imports.append(literal)
@@ -46,6 +46,7 @@ except ImportError: # pragma: no cover
46
46
  "Please run `$pip install 'datamodel-code-generator[graphql]`' to generate data-model from a GraphQL schema."
47
47
  )
48
48
 
49
+ from datamodel_code_generator.format import DatetimeClassType
49
50
 
50
51
  graphql_resolver = graphql.type.introspection.TypeResolvers()
51
52
 
@@ -157,6 +158,8 @@ class GraphQLParser(Parser):
157
158
  treat_dots_as_module: bool = False,
158
159
  use_exact_imports: bool = False,
159
160
  default_field_extras: Optional[Dict[str, Any]] = None,
161
+ target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
162
+ keyword_only: bool = False,
160
163
  ) -> None:
161
164
  super().__init__(
162
165
  source=source,
@@ -227,6 +230,8 @@ class GraphQLParser(Parser):
227
230
  treat_dots_as_module=treat_dots_as_module,
228
231
  use_exact_imports=use_exact_imports,
229
232
  default_field_extras=default_field_extras,
233
+ target_datetime_class=target_datetime_class,
234
+ keyword_only=keyword_only,
230
235
  )
231
236
 
232
237
  self.data_model_scalar_type = data_model_scalar_type
@@ -460,6 +465,7 @@ class GraphQLParser(Parser):
460
465
  extra_template_data=self.extra_template_data,
461
466
  path=self.current_source_path,
462
467
  description=obj.description,
468
+ keyword_only=self.keyword_only,
463
469
  )
464
470
  self.results.append(data_model_type)
465
471
 
@@ -71,6 +71,8 @@ from datamodel_code_generator.util import (
71
71
  if PYDANTIC_V2:
72
72
  from pydantic import ConfigDict
73
73
 
74
+ from datamodel_code_generator.format import DatetimeClassType
75
+
74
76
 
75
77
  def get_model_by_path(
76
78
  schema: Union[Dict[str, Any], List[Any]], keys: Union[List[str], List[int]]
@@ -444,6 +446,8 @@ class JsonSchemaParser(Parser):
444
446
  treat_dots_as_module: bool = False,
445
447
  use_exact_imports: bool = False,
446
448
  default_field_extras: Optional[Dict[str, Any]] = None,
449
+ target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
450
+ keyword_only: bool = False,
447
451
  ) -> None:
448
452
  super().__init__(
449
453
  source=source,
@@ -514,6 +518,8 @@ class JsonSchemaParser(Parser):
514
518
  treat_dots_as_module=treat_dots_as_module,
515
519
  use_exact_imports=use_exact_imports,
516
520
  default_field_extras=default_field_extras,
521
+ target_datetime_class=target_datetime_class,
522
+ keyword_only=keyword_only,
517
523
  )
518
524
 
519
525
  self.remote_object_cache: DefaultPutDict[str, Dict[str, Any]] = DefaultPutDict()
@@ -804,6 +810,7 @@ class JsonSchemaParser(Parser):
804
810
  extra_template_data=self.extra_template_data,
805
811
  path=self.current_source_path,
806
812
  description=obj.description if self.use_schema_description else None,
813
+ keyword_only=self.keyword_only,
807
814
  )
808
815
  self.results.append(data_model_type)
809
816
 
@@ -1047,6 +1054,7 @@ class JsonSchemaParser(Parser):
1047
1054
  path=self.current_source_path,
1048
1055
  description=obj.description if self.use_schema_description else None,
1049
1056
  nullable=obj.type_has_null,
1057
+ keyword_only=self.keyword_only,
1050
1058
  )
1051
1059
  self.results.append(data_model_type)
1052
1060
  return self.data_type(reference=reference)
@@ -36,6 +36,7 @@ from datamodel_code_generator import (
36
36
  load_yaml,
37
37
  snooper_to_methods,
38
38
  )
39
+ from datamodel_code_generator.format import DatetimeClassType
39
40
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
40
41
  from datamodel_code_generator.model import pydantic as pydantic_model
41
42
  from datamodel_code_generator.parser.base import get_special_path
@@ -225,6 +226,8 @@ class OpenAPIParser(JsonSchemaParser):
225
226
  treat_dots_as_module: bool = False,
226
227
  use_exact_imports: bool = False,
227
228
  default_field_extras: Optional[Dict[str, Any]] = None,
229
+ target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
230
+ keyword_only: bool = False,
228
231
  ):
229
232
  super().__init__(
230
233
  source=source,
@@ -295,6 +298,8 @@ class OpenAPIParser(JsonSchemaParser):
295
298
  treat_dots_as_module=treat_dots_as_module,
296
299
  use_exact_imports=use_exact_imports,
297
300
  default_field_extras=default_field_extras,
301
+ target_datetime_class=target_datetime_class,
302
+ keyword_only=keyword_only,
298
303
  )
299
304
  self.open_api_scopes: List[OpenAPIScope] = openapi_scopes or [
300
305
  OpenAPIScope.Schemas
@@ -508,6 +513,7 @@ class OpenAPIParser(JsonSchemaParser):
508
513
  fields=fields,
509
514
  reference=reference,
510
515
  custom_base_class=self.base_class,
516
+ keyword_only=self.keyword_only,
511
517
  )
512
518
  )
513
519
 
@@ -27,7 +27,7 @@ import pydantic
27
27
  from packaging import version
28
28
  from pydantic import StrictBool, StrictInt, StrictStr, create_model
29
29
 
30
- from datamodel_code_generator.format import PythonVersion
30
+ from datamodel_code_generator.format import DatetimeClassType, PythonVersion
31
31
  from datamodel_code_generator.imports import (
32
32
  IMPORT_ABC_MAPPING,
33
33
  IMPORT_ABC_SEQUENCE,
@@ -575,6 +575,7 @@ class DataTypeManager(ABC):
575
575
  use_non_positive_negative_number_constrained_types: bool = False,
576
576
  use_union_operator: bool = False,
577
577
  use_pendulum: bool = False,
578
+ target_datetime_class: Optional[DatetimeClassType] = None,
578
579
  ) -> None:
579
580
  self.python_version = python_version
580
581
  self.use_standard_collections: bool = use_standard_collections
@@ -585,6 +586,7 @@ class DataTypeManager(ABC):
585
586
  )
586
587
  self.use_union_operator: bool = use_union_operator
587
588
  self.use_pendulum: bool = use_pendulum
589
+ self.target_datetime_class: DatetimeClassType = target_datetime_class
588
590
 
589
591
  if (
590
592
  use_generic_container_types and python_version == PythonVersion.PY_36
@@ -0,0 +1 @@
1
+ version: str = '0.26.3'
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "datamodel-code-generator"
3
- version = "0.26.1"
3
+ version = "0.26.3"
4
4
  description = "Datamodel Code Generator"
5
5
  authors = ["Koudai Aono <koxudaxi@gmail.com>"]
6
6
  readme = "README.md"
@@ -68,7 +68,7 @@ pytest-benchmark = "*"
68
68
  pytest-cov = ">=2.12.1"
69
69
  pytest-mock = "*"
70
70
  mypy = ">=1.4.1,<1.5.0"
71
- black = "^23.3.0"
71
+ black = ">=23.3,<25.0"
72
72
  freezegun = "*"
73
73
  types-Jinja2 = "*"
74
74
  types-PyYAML = "*"
@@ -77,7 +77,7 @@ types-setuptools = ">=67.6.0.5,<70.0.0.0"
77
77
  pydantic = "*"
78
78
  httpx = ">=0.24.1"
79
79
  PySnooper = "*"
80
- ruff = ">=0.0.290,<0.5.7"
80
+ ruff = ">=0.0.290,<0.6.10"
81
81
  ruff-lsp = ">=0.0.39,<0.0.41"
82
82
  pre-commit = "*"
83
83
  pytest-xdist = "^3.3.1"
@@ -1 +0,0 @@
1
- version: str = '0.26.1'