datamodel-code-generator 0.26.3__py3-none-any.whl → 0.26.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

@@ -231,7 +231,7 @@ def get_first_file(path: Path) -> Path: # pragma: no cover
231
231
 
232
232
 
233
233
  def generate(
234
- input_: Union[Path, str, ParseResult],
234
+ input_: Union[Path, str, ParseResult, Mapping[str, Any]],
235
235
  *,
236
236
  input_filename: Optional[str] = None,
237
237
  input_file_type: InputFileType = InputFileType.Auto,
@@ -303,6 +303,7 @@ def generate(
303
303
  union_mode: Optional[UnionMode] = None,
304
304
  output_datetime_class: Optional[DatetimeClassType] = None,
305
305
  keyword_only: bool = False,
306
+ no_alias: bool = False,
306
307
  ) -> None:
307
308
  remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
308
309
  if isinstance(input_, str):
@@ -353,6 +354,8 @@ def generate(
353
354
  parser_class = JsonSchemaParser
354
355
 
355
356
  if input_file_type in RAW_DATA_TYPES:
357
+ import json
358
+
356
359
  try:
357
360
  if isinstance(input_, Path) and input_.is_dir(): # pragma: no cover
358
361
  raise Error(f'Input must be a file for {input_file_type}')
@@ -371,15 +374,33 @@ def generate(
371
374
  import io
372
375
 
373
376
  obj = get_header_and_first_line(io.StringIO(input_text))
374
- else:
377
+ elif input_file_type == InputFileType.Yaml:
375
378
  obj = load_yaml(
376
379
  input_.read_text(encoding=encoding) # type: ignore
377
380
  if isinstance(input_, Path)
378
381
  else input_text
379
382
  )
383
+ elif input_file_type == InputFileType.Json:
384
+ obj = json.loads(
385
+ input_.read_text(encoding=encoding) # type: ignore
386
+ if isinstance(input_, Path)
387
+ else input_text
388
+ )
389
+ elif input_file_type == InputFileType.Dict:
390
+ import ast
391
+
392
+ # Input can be a dict object stored in a python file
393
+ obj = (
394
+ ast.literal_eval(
395
+ input_.read_text(encoding=encoding) # type: ignore
396
+ )
397
+ if isinstance(input_, Path)
398
+ else input_
399
+ )
400
+ else: # pragma: no cover
401
+ raise Error(f'Unsupported input file type: {input_file_type}')
380
402
  except: # noqa
381
403
  raise Error('Invalid file format')
382
- import json
383
404
 
384
405
  from genson import SchemaBuilder
385
406
 
@@ -478,6 +499,7 @@ def generate(
478
499
  default_field_extras=default_field_extras,
479
500
  target_datetime_class=output_datetime_class,
480
501
  keyword_only=keyword_only,
502
+ no_alias=no_alias,
481
503
  **kwargs,
482
504
  )
483
505
 
@@ -488,6 +510,9 @@ def generate(
488
510
  input_filename = '<stdin>'
489
511
  elif isinstance(input_, ParseResult):
490
512
  input_filename = input_.geturl()
513
+ elif input_file_type == InputFileType.Dict:
514
+ # input_ might be a dict object provided directly, and missing a name field
515
+ input_filename = getattr(input_, 'name', '<dict>')
491
516
  else:
492
517
  input_filename = input_.name
493
518
  if not results:
@@ -53,7 +53,6 @@ from datamodel_code_generator.arguments import DEFAULT_ENCODING, arg_parser, nam
53
53
  from datamodel_code_generator.format import (
54
54
  DatetimeClassType,
55
55
  PythonVersion,
56
- black_find_project_root,
57
56
  is_supported_in_black,
58
57
  )
59
58
  from datamodel_code_generator.parser import LiteralType
@@ -186,8 +185,13 @@ class Config(BaseModel):
186
185
 
187
186
  @model_validator(mode='after')
188
187
  def validate_keyword_only(cls, values: Dict[str, Any]) -> Dict[str, Any]:
188
+ output_model_type: DataModelType = values.get('output_model_type')
189
189
  python_target: PythonVersion = values.get('target_python_version')
190
- if values.get('keyword_only') and not python_target.has_kw_only_dataclass:
190
+ if (
191
+ values.get('keyword_only')
192
+ and output_model_type == DataModelType.DataclassesDataclass
193
+ and not python_target.has_kw_only_dataclass
194
+ ):
191
195
  raise Error(
192
196
  f'`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher.'
193
197
  )
@@ -341,6 +345,7 @@ class Config(BaseModel):
341
345
  union_mode: Optional[UnionMode] = None
342
346
  output_datetime_class: Optional[DatetimeClassType] = None
343
347
  keyword_only: bool = False
348
+ no_alias: bool = False
344
349
 
345
350
  def merge_args(self, args: Namespace) -> None:
346
351
  set_args = {
@@ -360,6 +365,26 @@ class Config(BaseModel):
360
365
  setattr(self, field_name, getattr(parsed_args, field_name))
361
366
 
362
367
 
368
+ def _get_pyproject_toml_config(source: Path) -> Optional[Dict[str, Any]]:
369
+ """Find and return the [tool.datamodel-codgen] section of the closest
370
+ pyproject.toml if it exists.
371
+ """
372
+
373
+ current_path = source
374
+ while current_path != current_path.parent:
375
+ if (current_path / 'pyproject.toml').is_file():
376
+ pyproject_toml = load_toml(current_path / 'pyproject.toml')
377
+ if 'datamodel-codegen' in pyproject_toml.get('tool', {}):
378
+ return pyproject_toml['tool']['datamodel-codegen']
379
+
380
+ if (current_path / '.git').exists():
381
+ # Stop early if we see a git repository root.
382
+ return None
383
+
384
+ current_path = current_path.parent
385
+ return None
386
+
387
+
363
388
  def main(args: Optional[Sequence[str]] = None) -> Exit:
364
389
  """Main function."""
365
390
 
@@ -377,16 +402,9 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
377
402
  print(version)
378
403
  exit(0)
379
404
 
380
- root = black_find_project_root((Path().resolve(),))
381
- pyproject_toml_path = root / 'pyproject.toml'
382
- if pyproject_toml_path.is_file():
383
- pyproject_toml: Dict[str, Any] = {
384
- k.replace('-', '_'): v
385
- for k, v in load_toml(pyproject_toml_path)
386
- .get('tool', {})
387
- .get('datamodel-codegen', {})
388
- .items()
389
- }
405
+ pyproject_config = _get_pyproject_toml_config(Path().resolve())
406
+ if pyproject_config is not None:
407
+ pyproject_toml = {k.replace('-', '_'): v for k, v in pyproject_config.items()}
390
408
  else:
391
409
  pyproject_toml = {}
392
410
 
@@ -542,6 +560,7 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
542
560
  union_mode=config.union_mode,
543
561
  output_datetime_class=config.output_datetime_class,
544
562
  keyword_only=config.keyword_only,
563
+ no_alias=config.no_alias,
545
564
  )
546
565
  return Exit.OK
547
566
  except InvalidClassNameError as e:
@@ -98,7 +98,7 @@ base_options.add_argument(
98
98
  # ======================================================================================
99
99
  model_options.add_argument(
100
100
  '--allow-extra-fields',
101
- help='Allow to pass extra fields, if this flag is not passed, extra fields are forbidden.',
101
+ help='Allow passing extra fields, if this flag is not passed, extra fields are forbidden.',
102
102
  action='store_true',
103
103
  default=None,
104
104
  )
@@ -381,6 +381,13 @@ field_options.add_argument(
381
381
  choices=[u.value for u in UnionMode],
382
382
  default=None,
383
383
  )
384
+ field_options.add_argument(
385
+ '--no-alias',
386
+ help="""Do not add a field alias. E.g., if --snake-case-field is used along with a base class, which has an
387
+ alias_generator""",
388
+ action='store_true',
389
+ default=None,
390
+ )
384
391
 
385
392
  # ======================================================================================
386
393
  # Options for templating output
@@ -31,6 +31,7 @@ class PythonVersion(Enum):
31
31
  PY_310 = '3.10'
32
32
  PY_311 = '3.11'
33
33
  PY_312 = '3.12'
34
+ PY_313 = '3.13'
34
35
 
35
36
  @cached_property
36
37
  def _is_py_38_or_later(self) -> bool: # pragma: no cover
@@ -43,7 +43,7 @@ class Imports(DefaultDict[Optional[str], Set[str]]):
43
43
 
44
44
  def create_line(self, from_: Optional[str], imports: Set[str]) -> str:
45
45
  if from_:
46
- return f"from {from_} import {', '.join(self._set_alias(from_, imports))}"
46
+ return f'from {from_} import {", ".join(self._set_alias(from_, imports))}'
47
47
  return '\n'.join(f'import {i}' for i in self._set_alias(from_, imports))
48
48
 
49
49
  def dump(self) -> str:
@@ -1,12 +1,19 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import sys
3
4
  from typing import TYPE_CHECKING, Callable, Iterable, List, NamedTuple, Optional, Type
4
5
 
6
+ from .. import DatetimeClassType, PythonVersion
5
7
  from ..types import DataTypeManager as DataTypeManagerABC
6
8
  from .base import ConstraintsBase, DataModel, DataModelFieldBase
7
9
 
8
10
  if TYPE_CHECKING:
9
- from .. import DataModelType, DatetimeClassType, PythonVersion
11
+ from .. import DataModelType
12
+
13
+ DEFAULT_TARGET_DATETIME_CLASS = DatetimeClassType.Datetime
14
+ DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(
15
+ f'{sys.version_info.major}.{sys.version_info.minor}'
16
+ )
10
17
 
11
18
 
12
19
  class DataModelSet(NamedTuple):
@@ -20,8 +27,8 @@ class DataModelSet(NamedTuple):
20
27
 
21
28
  def get_data_model_types(
22
29
  data_model_type: DataModelType,
23
- target_python_version: PythonVersion,
24
- target_datetime_class: DatetimeClassType,
30
+ target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
31
+ target_datetime_class: DatetimeClassType = DEFAULT_TARGET_DATETIME_CLASS,
25
32
  ) -> DataModelSet:
26
33
  from .. import DataModelType
27
34
  from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict
@@ -53,13 +60,17 @@ def get_data_model_types(
53
60
  )
54
61
  elif data_model_type == DataModelType.TypingTypedDict:
55
62
  return DataModelSet(
56
- data_model=typed_dict.TypedDict
57
- if target_python_version.has_typed_dict
58
- else typed_dict.TypedDictBackport,
63
+ data_model=(
64
+ typed_dict.TypedDict
65
+ if target_python_version.has_typed_dict
66
+ else typed_dict.TypedDictBackport
67
+ ),
59
68
  root_model=rootmodel.RootModel,
60
- field_model=typed_dict.DataModelField
61
- if target_python_version.has_typed_dict_non_required
62
- else typed_dict.DataModelFieldBackport,
69
+ field_model=(
70
+ typed_dict.DataModelField
71
+ if target_python_version.has_typed_dict_non_required
72
+ else typed_dict.DataModelFieldBackport
73
+ ),
63
74
  data_type_manager=DataTypeManager,
64
75
  dump_resolve_reference_action=None,
65
76
  )
@@ -1,5 +1,6 @@
1
1
  from abc import ABC, abstractmethod
2
2
  from collections import defaultdict
3
+ from copy import deepcopy
3
4
  from functools import lru_cache
4
5
  from pathlib import Path
5
6
  from typing import (
@@ -118,6 +119,7 @@ class DataModelFieldBase(_BaseModel):
118
119
  _exclude_fields: ClassVar[Set[str]] = {'parent'}
119
120
  _pass_fields: ClassVar[Set[str]] = {'parent', 'data_type'}
120
121
  can_have_extra_keys: ClassVar[bool] = True
122
+ type_has_null: Optional[bool] = None
121
123
 
122
124
  if not TYPE_CHECKING:
123
125
 
@@ -150,6 +152,8 @@ class DataModelFieldBase(_BaseModel):
150
152
  return get_optional_type(type_hint, self.data_type.use_union_operator)
151
153
  return type_hint
152
154
  elif self.required:
155
+ if self.type_has_null:
156
+ return get_optional_type(type_hint, self.data_type.use_union_operator)
153
157
  return type_hint
154
158
  elif self.fall_back_to_nullable:
155
159
  return get_optional_type(type_hint, self.data_type.use_union_operator)
@@ -316,6 +320,8 @@ class DataModel(TemplateBase, Nullable, ABC):
316
320
  self.reference.source = self
317
321
 
318
322
  self.extra_template_data = (
323
+ # The supplied defaultdict will either create a new entry,
324
+ # or already contain a predefined entry for this type
319
325
  extra_template_data[self.name]
320
326
  if extra_template_data is not None
321
327
  else defaultdict(dict)
@@ -327,10 +333,12 @@ class DataModel(TemplateBase, Nullable, ABC):
327
333
  if base_class.reference:
328
334
  base_class.reference.children.append(self)
329
335
 
330
- if extra_template_data:
336
+ if extra_template_data is not None:
331
337
  all_model_extra_template_data = extra_template_data.get(ALL_MODEL)
332
338
  if all_model_extra_template_data:
333
- self.extra_template_data.update(all_model_extra_template_data)
339
+ # The deepcopy is needed here to ensure that different models don't
340
+ # end up inadvertently sharing state (such as "base_class_kwargs")
341
+ self.extra_template_data.update(deepcopy(all_model_extra_template_data))
334
342
 
335
343
  self.methods: List[str] = methods or []
336
344
 
@@ -415,7 +423,7 @@ class DataModel(TemplateBase, Nullable, ABC):
415
423
  def class_name(self, class_name: str) -> None:
416
424
  if '.' in self.reference.name:
417
425
  self.reference.name = (
418
- f"{self.reference.name.rsplit('.', 1)[0]}.{class_name}"
426
+ f'{self.reference.name.rsplit(".", 1)[0]}.{class_name}'
419
427
  )
420
428
  else:
421
429
  self.reference.name = class_name
@@ -82,10 +82,22 @@ class Enum(DataModel):
82
82
 
83
83
  def find_member(self, value: Any) -> Optional[Member]:
84
84
  repr_value = repr(value)
85
- for field in self.fields: # pragma: no cover
86
- if field.default == repr_value:
85
+ # Remove surrounding quotes from the string representation
86
+ str_value = str(value).strip('\'"')
87
+
88
+ for field in self.fields:
89
+ # Remove surrounding quotes from field default value
90
+ field_default = field.default.strip('\'"')
91
+
92
+ # Compare values after removing quotes
93
+ if field_default == str_value:
87
94
  return self.get_member(field)
88
- return None # pragma: no cover
95
+
96
+ # Keep original comparison for backwards compatibility
97
+ if field.default == repr_value: # pragma: no cover
98
+ return self.get_member(field)
99
+
100
+ return None
89
101
 
90
102
  @property
91
103
  def imports(self) -> Tuple[Import, ...]:
@@ -31,7 +31,6 @@ from datamodel_code_generator.model.imports import (
31
31
  IMPORT_MSGSPEC_CONVERT,
32
32
  IMPORT_MSGSPEC_FIELD,
33
33
  IMPORT_MSGSPEC_META,
34
- IMPORT_MSGSPEC_STRUCT,
35
34
  )
36
35
  from datamodel_code_generator.model.pydantic.base_model import (
37
36
  Constraints as _Constraints,
@@ -88,7 +87,7 @@ class RootModel(_RootModel):
88
87
  class Struct(DataModel):
89
88
  TEMPLATE_FILE_PATH: ClassVar[str] = 'msgspec.jinja2'
90
89
  BASE_CLASS: ClassVar[str] = 'msgspec.Struct'
91
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_MSGSPEC_STRUCT,)
90
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ()
92
91
 
93
92
  def __init__(
94
93
  self,
@@ -123,6 +122,8 @@ class Struct(DataModel):
123
122
  keyword_only=keyword_only,
124
123
  )
125
124
  self.extra_template_data.setdefault('base_class_kwargs', {})
125
+ if self.keyword_only:
126
+ self.add_base_class_kwarg('kw_only', 'True')
126
127
 
127
128
  def add_base_class_kwarg(self, name: str, value):
128
129
  self.extra_template_data['base_class_kwargs'][name] = value
@@ -65,6 +65,7 @@ def get_special_path(keyword: str, path: List[str]) -> List[str]:
65
65
 
66
66
  escape_characters = str.maketrans(
67
67
  {
68
+ '\u0000': r'\x00', # Null byte
68
69
  '\\': r'\\',
69
70
  "'": r'\'',
70
71
  '\b': r'\b',
@@ -412,6 +413,7 @@ class Parser(ABC):
412
413
  default_field_extras: Optional[Dict[str, Any]] = None,
413
414
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
414
415
  keyword_only: bool = False,
416
+ no_alias: bool = False,
415
417
  ) -> None:
416
418
  self.keyword_only = keyword_only
417
419
  self.data_type_manager: DataTypeManager = data_type_manager_type(
@@ -512,6 +514,7 @@ class Parser(ABC):
512
514
  special_field_name_prefix=special_field_name_prefix,
513
515
  remove_special_field_name_prefix=remove_special_field_name_prefix,
514
516
  capitalise_enum_members=capitalise_enum_members,
517
+ no_alias=no_alias,
515
518
  )
516
519
  self.class_name: Optional[str] = class_name
517
520
  self.wrap_string_literal: Optional[bool] = wrap_string_literal
@@ -866,10 +869,8 @@ class Parser(ABC):
866
869
  ) != property_name:
867
870
  continue
868
871
  literals = discriminator_field.data_type.literals
869
- if (
870
- len(literals) == 1 and literals[0] == type_names[0]
871
- if type_names
872
- else None
872
+ if len(literals) == 1 and literals[0] == (
873
+ type_names[0] if type_names else None
873
874
  ):
874
875
  has_one_literal = True
875
876
  if isinstance(
@@ -882,7 +883,8 @@ class Parser(ABC):
882
883
  'tag', discriminator_field.represented_default
883
884
  )
884
885
  discriminator_field.extras['is_classvar'] = True
885
- continue
886
+ # Found the discriminator field, no need to keep looking
887
+ break
886
888
  for (
887
889
  field_data_type
888
890
  ) in discriminator_field.data_type.all_data_types:
@@ -160,6 +160,7 @@ class GraphQLParser(Parser):
160
160
  default_field_extras: Optional[Dict[str, Any]] = None,
161
161
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
162
162
  keyword_only: bool = False,
163
+ no_alias: bool = False,
163
164
  ) -> None:
164
165
  super().__init__(
165
166
  source=source,
@@ -232,6 +233,7 @@ class GraphQLParser(Parser):
232
233
  default_field_extras=default_field_extras,
233
234
  target_datetime_class=target_datetime_class,
234
235
  keyword_only=keyword_only,
236
+ no_alias=no_alias,
235
237
  )
236
238
 
237
239
  self.data_model_scalar_type = data_model_scalar_type
@@ -338,7 +338,7 @@ def _get_type(type_: str, format__: Optional[str] = None) -> Types:
338
338
  if data_formats is not None:
339
339
  return data_formats
340
340
 
341
- warn(f'format of {format__!r} not understood for {type_!r} - using default' '')
341
+ warn(f'format of {format__!r} not understood for {type_!r} - using default')
342
342
  return json_schema_data_formats[type_]['default']
343
343
 
344
344
 
@@ -448,6 +448,7 @@ class JsonSchemaParser(Parser):
448
448
  default_field_extras: Optional[Dict[str, Any]] = None,
449
449
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
450
450
  keyword_only: bool = False,
451
+ no_alias: bool = False,
451
452
  ) -> None:
452
453
  super().__init__(
453
454
  source=source,
@@ -520,6 +521,7 @@ class JsonSchemaParser(Parser):
520
521
  default_field_extras=default_field_extras,
521
522
  target_datetime_class=target_datetime_class,
522
523
  keyword_only=keyword_only,
524
+ no_alias=no_alias,
523
525
  )
524
526
 
525
527
  self.remote_object_cache: DefaultPutDict[str, Dict[str, Any]] = DefaultPutDict()
@@ -618,6 +620,7 @@ class JsonSchemaParser(Parser):
618
620
  use_default_kwarg=self.use_default_kwarg,
619
621
  original_name=original_field_name,
620
622
  has_default=field.has_default,
623
+ type_has_null=field.type_has_null,
621
624
  )
622
625
 
623
626
  def get_data_type(self, obj: JsonSchemaObject) -> DataType:
@@ -1715,6 +1718,9 @@ class JsonSchemaParser(Parser):
1715
1718
  def parse_raw(self) -> None:
1716
1719
  for source, path_parts in self._get_context_source_path_parts():
1717
1720
  self.raw_obj = load_yaml(source.text)
1721
+ if self.raw_obj is None: # pragma: no cover
1722
+ warn(f'{source.path} is empty. Skipping this file')
1723
+ continue
1718
1724
  if self.custom_class_name_generator:
1719
1725
  obj_name = self.raw_obj.get('title', 'Model')
1720
1726
  else:
@@ -228,6 +228,7 @@ class OpenAPIParser(JsonSchemaParser):
228
228
  default_field_extras: Optional[Dict[str, Any]] = None,
229
229
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
230
230
  keyword_only: bool = False,
231
+ no_alias: bool = False,
231
232
  ):
232
233
  super().__init__(
233
234
  source=source,
@@ -300,6 +301,7 @@ class OpenAPIParser(JsonSchemaParser):
300
301
  default_field_extras=default_field_extras,
301
302
  target_datetime_class=target_datetime_class,
302
303
  keyword_only=keyword_only,
304
+ no_alias=no_alias,
303
305
  )
304
306
  self.open_api_scopes: List[OpenAPIScope] = openapi_scopes or [
305
307
  OpenAPIScope.Schemas
@@ -314,8 +316,10 @@ class OpenAPIParser(JsonSchemaParser):
314
316
  return get_model_by_path(ref_body, ref_path.split('/')[1:])
315
317
 
316
318
  def get_data_type(self, obj: JsonSchemaObject) -> DataType:
317
- # OpenAPI doesn't allow `null` in `type` field and list of types
319
+ # OpenAPI 3.0 doesn't allow `null` in the `type` field and list of types
318
320
  # https://swagger.io/docs/specification/data-models/data-types/#null
321
+ # OpenAPI 3.1 does allow `null` in the `type` field and is equivalent to
322
+ # a `nullable` flag on the property itself
319
323
  if obj.nullable and self.strict_nullable and isinstance(obj.type, str):
320
324
  obj.type = [obj.type, 'null']
321
325
 
@@ -504,6 +508,9 @@ class OpenAPIParser(JsonSchemaParser):
504
508
  has_default=object_schema.has_default
505
509
  if object_schema
506
510
  else False,
511
+ type_has_null=object_schema.type_has_null
512
+ if object_schema
513
+ else None,
507
514
  )
508
515
  )
509
516
 
@@ -513,6 +520,7 @@ class OpenAPIParser(JsonSchemaParser):
513
520
  fields=fields,
514
521
  reference=reference,
515
522
  custom_base_class=self.base_class,
523
+ custom_template_dir=self.custom_template_dir,
516
524
  keyword_only=self.keyword_only,
517
525
  )
518
526
  )
@@ -198,6 +198,7 @@ class FieldNameResolver:
198
198
  special_field_name_prefix: Optional[str] = None,
199
199
  remove_special_field_name_prefix: bool = False,
200
200
  capitalise_enum_members: bool = False,
201
+ no_alias: bool = False,
201
202
  ):
202
203
  self.aliases: Mapping[str, str] = {} if aliases is None else {**aliases}
203
204
  self.empty_field_name: str = empty_field_name or '_'
@@ -208,6 +209,7 @@ class FieldNameResolver:
208
209
  )
209
210
  self.remove_special_field_name_prefix: bool = remove_special_field_name_prefix
210
211
  self.capitalise_enum_members: bool = capitalise_enum_members
212
+ self.no_alias = no_alias
211
213
 
212
214
  @classmethod
213
215
  def _validate_field_name(cls, field_name: str) -> bool:
@@ -274,7 +276,10 @@ class FieldNameResolver:
274
276
  if field_name in self.aliases:
275
277
  return self.aliases[field_name], field_name
276
278
  valid_name = self.get_valid_name(field_name, excludes=excludes)
277
- return valid_name, None if field_name == valid_name else field_name
279
+ return (
280
+ valid_name,
281
+ None if self.no_alias or field_name == valid_name else field_name,
282
+ )
278
283
 
279
284
 
280
285
  class PydanticFieldNameResolver(FieldNameResolver):
@@ -354,6 +359,7 @@ class ModelResolver:
354
359
  special_field_name_prefix: Optional[str] = None,
355
360
  remove_special_field_name_prefix: bool = False,
356
361
  capitalise_enum_members: bool = False,
362
+ no_alias: bool = False,
357
363
  ) -> None:
358
364
  self.references: Dict[str, Reference] = {}
359
365
  self._current_root: Sequence[str] = []
@@ -383,6 +389,7 @@ class ModelResolver:
383
389
  capitalise_enum_members=capitalise_enum_members
384
390
  if k == ModelType.ENUM
385
391
  else False,
392
+ no_alias=no_alias,
386
393
  )
387
394
  for k, v in merged_field_name_resolver_classes.items()
388
395
  }
@@ -466,7 +473,7 @@ class ModelResolver:
466
473
  else:
467
474
  joined_path = self.join_path(path)
468
475
  if joined_path == '#':
469
- return f"{'/'.join(self.current_root)}#"
476
+ return f'{"/".join(self.current_root)}#'
470
477
  if (
471
478
  self.current_base_path
472
479
  and not self.base_url
@@ -491,7 +498,7 @@ class ModelResolver:
491
498
 
492
499
  delimiter = joined_path.index('#')
493
500
  file_path = ''.join(joined_path[:delimiter])
494
- ref = f"{''.join(joined_path[:delimiter])}#{''.join(joined_path[delimiter + 1:])}"
501
+ ref = f'{"".join(joined_path[:delimiter])}#{"".join(joined_path[delimiter + 1 :])}'
495
502
  if self.root_id_base_path and not (
496
503
  is_url(joined_path) or Path(self._base_path, file_path).is_file()
497
504
  ):
@@ -566,11 +573,13 @@ class ModelResolver:
566
573
  split_ref = ref.rsplit('/', 1)
567
574
  if len(split_ref) == 1:
568
575
  original_name = Path(
569
- split_ref[0][:-1] if self.is_external_root_ref(path) else split_ref[0]
576
+ split_ref[0].rstrip('#')
577
+ if self.is_external_root_ref(path)
578
+ else split_ref[0]
570
579
  ).stem
571
580
  else:
572
581
  original_name = (
573
- Path(split_ref[1][:-1]).stem
582
+ Path(split_ref[1].rstrip('#')).stem
574
583
  if self.is_external_root_ref(path)
575
584
  else split_ref[1]
576
585
  )
@@ -362,22 +362,21 @@ class DataType(_BaseModel):
362
362
 
363
363
  @property
364
364
  def imports(self) -> Iterator[Import]:
365
+ # Add base import if exists
365
366
  if self.import_:
366
367
  yield self.import_
368
+
369
+ # Define required imports based on type features and conditions
367
370
  imports: Tuple[Tuple[bool, Import], ...] = (
368
371
  (self.is_optional and not self.use_union_operator, IMPORT_OPTIONAL),
369
372
  (len(self.data_types) > 1 and not self.use_union_operator, IMPORT_UNION),
370
- )
371
- if any(self.literals):
372
- import_literal = (
373
+ (
374
+ bool(self.literals),
373
375
  IMPORT_LITERAL
374
376
  if self.python_version.has_literal_type
375
- else IMPORT_LITERAL_BACKPORT
376
- )
377
- imports = (
378
- *imports,
379
- (any(self.literals), import_literal),
380
- )
377
+ else IMPORT_LITERAL_BACKPORT,
378
+ ),
379
+ )
381
380
 
382
381
  if self.use_generic_container:
383
382
  if self.use_standard_collections:
@@ -401,10 +400,13 @@ class DataType(_BaseModel):
401
400
  (self.is_set, IMPORT_SET),
402
401
  (self.is_dict, IMPORT_DICT),
403
402
  )
403
+
404
+ # Yield imports based on conditions
404
405
  for field, import_ in imports:
405
406
  if field and import_ != self.import_:
406
407
  yield import_
407
408
 
409
+ # Propagate imports from any dict_key type
408
410
  if self.dict_key:
409
411
  yield from self.dict_key.imports
410
412
 
@@ -463,7 +465,7 @@ class DataType(_BaseModel):
463
465
  elif len(self.data_types) == 1:
464
466
  type_ = self.data_types[0].type_hint
465
467
  elif self.literals:
466
- type_ = f"{LITERAL}[{', '.join(repr(literal) for literal in self.literals)}]"
468
+ type_ = f'{LITERAL}[{", ".join(repr(literal) for literal in self.literals)}]'
467
469
  else:
468
470
  if self.reference:
469
471
  type_ = self.reference.short_name
@@ -1 +1 @@
1
- version: str = '0.26.3'
1
+ version: str = '0.26.5'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamodel-code-generator
3
- Version: 0.26.3
3
+ Version: 0.26.5
4
4
  Summary: Datamodel Code Generator
5
5
  Home-page: https://github.com/koxudaxi/datamodel-code-generator
6
6
  License: MIT
@@ -458,6 +458,8 @@ Field customization:
458
458
  --field-include-all-keys
459
459
  Add all keys to field parameters
460
460
  --force-optional Force optional for required fields
461
+ --no-alias Do not add a field alias. E.g., if --snake-case-field is used along
462
+ with a base class, which has an alias_generator
461
463
  --original-field-name-delimiter ORIGINAL_FIELD_NAME_DELIMITER
462
464
  Set delimiter to convert to snake case. This option only can be used
463
465
  with --snake-case-field (default: `_` )
@@ -478,7 +480,7 @@ Field customization:
478
480
  Use schema description to populate field docstring
479
481
 
480
482
  Model customization:
481
- --allow-extra-fields Allow to pass extra fields, if this flag is not passed, extra fields
483
+ --allow-extra-fields Allow passing extra fields, if this flag is not passed, extra fields
482
484
  are forbidden.
483
485
  --allow-population-by-field-name
484
486
  Allow population by field name
@@ -499,11 +501,11 @@ Model customization:
499
501
  dataclass(kw_only=True)).
500
502
  --output-datetime-class {datetime,AwareDatetime,NaiveDatetime}
501
503
  Choose Datetime class between AwareDatetime, NaiveDatetime or
502
- datetime. Each output model has its default mapping, and only
503
- pydantic, dataclass, and msgspec support this override"
504
+ datetime. Each output model has its default mapping (for example
505
+ pydantic: datetime, dataclass: str, ...)
504
506
  --reuse-model Reuse models on the field when a module has the model with the same
505
507
  content
506
- --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
508
+ --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12,3.13}
507
509
  target python version (default: 3.8)
508
510
  --treat-dot-as-module
509
511
  treat dotted module names as modules
@@ -1,15 +1,15 @@
1
- datamodel_code_generator/__init__.py,sha256=iNtaR-iP5McYCDelHc_67DiuKlx-EU7j9rLbCJnYHII,18757
2
- datamodel_code_generator/__main__.py,sha256=oWeH6qccmFP5DJJWqCGmtLCXCExizYbro6hUb6Q4uGk,21085
3
- datamodel_code_generator/arguments.py,sha256=Q3LxZut72yqazXGjEG4CN1w4WFvpmldwOrgRhi_gL2o,16070
4
- datamodel_code_generator/format.py,sha256=kA2rESd51uCDGAzTCz4wX24WitU7X1Zi68v4a_59wmc,8884
1
+ datamodel_code_generator/__init__.py,sha256=OrU_lId2PgS_G7Mhn8ih2AmgGc-CXPMtWDgqONlCzfA,19937
2
+ datamodel_code_generator/__main__.py,sha256=QA6GA3USdZRjv47HeFhznM2L3rXiu3-PN3biVt25678,21835
3
+ datamodel_code_generator/arguments.py,sha256=jf5LnhDl6LnCqRs05iAzvnUwt3bFnfUA43PIbv1xhks,16306
4
+ datamodel_code_generator/format.py,sha256=M2lag7AeB4eIHaTORu1A_RzMdIflINbypoeqsEYEEGY,8904
5
5
  datamodel_code_generator/http.py,sha256=CwLVnXO4_W_fWKJsHnJp6Q_3GuF3qjCjeAe48Ihawrs,714
6
- datamodel_code_generator/imports.py,sha256=RagA3hne8OtrbZwDA7TaORQll6tKIzket-A6ShOWf8s,5728
7
- datamodel_code_generator/model/__init__.py,sha256=RMSfRzRLsMd264ck0go02DnwXDcZVHylIP3qXjyYXUM,3179
8
- datamodel_code_generator/model/base.py,sha256=KMrVsxoQEeKhA5akGLxM-nUzHINM0GTt2t6HTF9SrKQ,14180
6
+ datamodel_code_generator/imports.py,sha256=utqG4OCL_z92-nY6qpp_ctZbFvRtrcw2NH-mbT3HeLk,5728
7
+ datamodel_code_generator/model/__init__.py,sha256=PywJfSVTqeTh74jv0uLRIs1dcVrrO2OXPRoP39udqUM,3514
8
+ datamodel_code_generator/model/base.py,sha256=NaZJFLzVngLjGbAFn65-Y0lDOQSk_bwxTJUJvDKmoQA,14695
9
9
  datamodel_code_generator/model/dataclass.py,sha256=Ebn48PRvCOCcyKhxYxgYBbRcpIvXko-VomZ6N8gKrLA,5871
10
- datamodel_code_generator/model/enum.py,sha256=HTFMCNoHmJRHUoNqQCat3kLbtdmSVX666BubN3bcF1I,3413
10
+ datamodel_code_generator/model/enum.py,sha256=ftQCZ73JBNiLgBzFl2WzbBTrshlnYXvyapK2WxK0xxE,3835
11
11
  datamodel_code_generator/model/imports.py,sha256=9-JLfcilbRz9LI4Q9_YAVpRdIusULBaLsMhHE_6j0-w,784
12
- datamodel_code_generator/model/msgspec.py,sha256=1izuaoIjNjATCYaT8oklH23Hlndk-woTX2eudPAF_XA,11476
12
+ datamodel_code_generator/model/msgspec.py,sha256=TevwsJDtgEzlpd7TvIpcMZ1HGw6gwLkm6yR86b_w8fY,11514
13
13
  datamodel_code_generator/model/pydantic/__init__.py,sha256=AYMjDCtnV4vweYqe1asTRCYdOo8IGLBhd8pEdxyY8ok,1372
14
14
  datamodel_code_generator/model/pydantic/base_model.py,sha256=sTxrFItp7wpL0OxBYOGLyyjWWrzGDuV_pFeHPsSQ4Gs,12120
15
15
  datamodel_code_generator/model/pydantic/custom_root_type.py,sha256=XOeJqzUEAYE21C3hPAnRIz9iDWIjZvUOWDc9MCrpdvw,299
@@ -43,18 +43,18 @@ datamodel_code_generator/model/typed_dict.py,sha256=W1r3NRy8uFkYe3YVnjL9PAGZdGyo
43
43
  datamodel_code_generator/model/types.py,sha256=T3Xxa7MToHXIH1zXHT1P6PzE49aah0IhnwkCbYVc79c,3157
44
44
  datamodel_code_generator/model/union.py,sha256=4LT5E46c2OH1dvQmvRWM7mX1Pziu_oWBHwXsGsylUbY,1791
45
45
  datamodel_code_generator/parser/__init__.py,sha256=zHbw6RPlJC0SAQjb-XyVlyZhcOu5PfYgPidy6jlUM8M,793
46
- datamodel_code_generator/parser/base.py,sha256=15yBC7_szuRBrhuSK5s8_XjjrzX4_Ty0k_CEkaOAALg,61858
47
- datamodel_code_generator/parser/graphql.py,sha256=QpLHwyCw5F51w6_Iui_cL1cAblQ74ZrRx1vfzYh0Uo0,22354
48
- datamodel_code_generator/parser/jsonschema.py,sha256=L_QxO4lG6lzHbcVl33ImEQP6mNlZWUH3ootPMrtmHQI,70621
49
- datamodel_code_generator/parser/openapi.py,sha256=ENpjBKcqZyjcxgVfLKMbI5sDWfQufY1JmPXzir_IwCE,26255
46
+ datamodel_code_generator/parser/base.py,sha256=MBwA77ht66rSoQtuXDVRaOQ_LGCaXo9InqJJvpfruP8,61987
47
+ datamodel_code_generator/parser/graphql.py,sha256=MwaLVmEcYky8CRueOYxB2n5MKU4FOmASy9stKIUGBVo,22417
48
+ datamodel_code_generator/parser/jsonschema.py,sha256=sjqpGMXq-Nym4cF-ML6utBiZMYYS0-4QPTGPUBXS1u8,70878
49
+ datamodel_code_generator/parser/openapi.py,sha256=UMvLzvlNcNxik9ttPvjBl3PGUBhGhYedKcbGnrSNaIQ,26666
50
50
  datamodel_code_generator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
51
  datamodel_code_generator/pydantic_patch.py,sha256=5UKotLqor9HixzXk463CUevyCcH2zmQljMAPRyTOxiM,570
52
- datamodel_code_generator/reference.py,sha256=EoalUNUP5us31bfMrHDa1iiKqxL8gHrpZKY_IxgZrOg,26347
53
- datamodel_code_generator/types.py,sha256=Mtp-owOLiNlbbXU9tTFJH0MBFP3UYyFLL4jENdDD6es,19818
52
+ datamodel_code_generator/reference.py,sha256=Vaqni43Z1176rL5L83U6OvUM_btRwafQXW-7aB-LuoU,26580
53
+ datamodel_code_generator/types.py,sha256=vTa7-I_NECFkpaLI0-azHErdmr1txdLDdR7xKx5yhYo,19896
54
54
  datamodel_code_generator/util.py,sha256=OmYaVP0z0fGPnvmZQx63qmdMwFnMAIVHfTwSkExpoKk,2829
55
- datamodel_code_generator/version.py,sha256=ryqv7c3qpo-SIhTIAOIk0ZmjSS4hwlZq1FoIiWCePAE,24
56
- datamodel_code_generator-0.26.3.dist-info/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
57
- datamodel_code_generator-0.26.3.dist-info/METADATA,sha256=aKI2vaAIWQFJv8hoLImiH_CYCeqzAd3qQb7XMD07etI,24803
58
- datamodel_code_generator-0.26.3.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
59
- datamodel_code_generator-0.26.3.dist-info/entry_points.txt,sha256=bykbUWqOCiKfxJPGe8jpNqTqD1NG7uyRmozdnwzu7rk,76
60
- datamodel_code_generator-0.26.3.dist-info/RECORD,,
55
+ datamodel_code_generator/version.py,sha256=TNxw8_M4v5PH0eredAbnMHbocjoh_Mh-PTaI4X2JiFY,24
56
+ datamodel_code_generator-0.26.5.dist-info/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
57
+ datamodel_code_generator-0.26.5.dist-info/METADATA,sha256=1j7kIzH6carcuQiFE5nMo45gOd8G4VNaL5p_hsu9ZS8,24960
58
+ datamodel_code_generator-0.26.5.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
59
+ datamodel_code_generator-0.26.5.dist-info/entry_points.txt,sha256=bykbUWqOCiKfxJPGe8jpNqTqD1NG7uyRmozdnwzu7rk,76
60
+ datamodel_code_generator-0.26.5.dist-info/RECORD,,