datamodel-code-generator 0.30.1__py3-none-any.whl → 0.31.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

@@ -233,6 +233,7 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
233
233
  enable_version_header: bool = False,
234
234
  allow_population_by_field_name: bool = False,
235
235
  allow_extra_fields: bool = False,
236
+ extra_fields: str | None = None,
236
237
  apply_default_values_for_required_fields: bool = False,
237
238
  force_optional_for_required_fields: bool = False,
238
239
  class_name: str | None = None,
@@ -285,6 +286,7 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
285
286
  union_mode: UnionMode | None = None,
286
287
  output_datetime_class: DatetimeClassType | None = None,
287
288
  keyword_only: bool = False,
289
+ frozen_dataclasses: bool = False,
288
290
  no_alias: bool = False,
289
291
  formatters: list[Formatter] = DEFAULT_FORMATTERS,
290
292
  parent_scoped_naming: bool = False,
@@ -427,6 +429,7 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
427
429
  aliases=aliases,
428
430
  allow_population_by_field_name=allow_population_by_field_name,
429
431
  allow_extra_fields=allow_extra_fields,
432
+ extra_fields=extra_fields,
430
433
  apply_default_values_for_required_fields=apply_default_values_for_required_fields,
431
434
  force_optional_for_required_fields=force_optional_for_required_fields,
432
435
  class_name=class_name,
@@ -481,6 +484,7 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
481
484
  default_field_extras=default_field_extras,
482
485
  target_datetime_class=output_datetime_class,
483
486
  keyword_only=keyword_only,
487
+ frozen_dataclasses=frozen_dataclasses,
484
488
  no_alias=no_alias,
485
489
  formatters=formatters,
486
490
  encoding=encoding,
@@ -261,6 +261,7 @@ class Config(BaseModel):
261
261
  enable_version_header: bool = False
262
262
  allow_population_by_field_name: bool = False
263
263
  allow_extra_fields: bool = False
264
+ extra_fields: Optional[str] = None # noqa: UP045
264
265
  use_default: bool = False
265
266
  force_optional: bool = False
266
267
  class_name: Optional[str] = None # noqa: UP045
@@ -312,6 +313,7 @@ class Config(BaseModel):
312
313
  union_mode: Optional[UnionMode] = None # noqa: UP045
313
314
  output_datetime_class: Optional[DatetimeClassType] = None # noqa: UP045
314
315
  keyword_only: bool = False
316
+ frozen_dataclasses: bool = False
315
317
  no_alias: bool = False
316
318
  formatters: list[Formatter] = DEFAULT_FORMATTERS
317
319
  parent_scoped_naming: bool = False
@@ -474,6 +476,7 @@ def main(args: Sequence[str] | None = None) -> Exit: # noqa: PLR0911, PLR0912,
474
476
  enable_version_header=config.enable_version_header,
475
477
  allow_population_by_field_name=config.allow_population_by_field_name,
476
478
  allow_extra_fields=config.allow_extra_fields,
479
+ extra_fields=config.extra_fields,
477
480
  apply_default_values_for_required_fields=config.use_default,
478
481
  force_optional_for_required_fields=config.force_optional,
479
482
  class_name=config.class_name,
@@ -524,6 +527,7 @@ def main(args: Sequence[str] | None = None) -> Exit: # noqa: PLR0911, PLR0912,
524
527
  union_mode=config.union_mode,
525
528
  output_datetime_class=config.output_datetime_class,
526
529
  keyword_only=config.keyword_only,
530
+ frozen_dataclasses=config.frozen_dataclasses,
527
531
  no_alias=config.no_alias,
528
532
  formatters=config.formatters,
529
533
  parent_scoped_naming=config.parent_scoped_naming,
@@ -43,6 +43,7 @@ base_options = arg_parser.add_argument_group("Options")
43
43
  typing_options = arg_parser.add_argument_group("Typing customization")
44
44
  field_options = arg_parser.add_argument_group("Field customization")
45
45
  model_options = arg_parser.add_argument_group("Model customization")
46
+ extra_fields_model_options = model_options.add_mutually_exclusive_group()
46
47
  template_options = arg_parser.add_argument_group("Template customization")
47
48
  openapi_options = arg_parser.add_argument_group("OpenAPI-only options")
48
49
  general_options = arg_parser.add_argument_group("General options")
@@ -94,9 +95,10 @@ base_options.add_argument(
94
95
  # ======================================================================================
95
96
  # Customization options for generated models
96
97
  # ======================================================================================
97
- model_options.add_argument(
98
+ extra_fields_model_options.add_argument(
98
99
  "--allow-extra-fields",
99
- help="Allow passing extra fields, if this flag is not passed, extra fields are forbidden.",
100
+ help="Deprecated: Allow passing extra fields, if this flag is not passed, extra fields are forbidden. "
101
+ "This flag is deprecated. Use `--extra-fields=allow` instead.",
100
102
  action="store_true",
101
103
  default=None,
102
104
  )
@@ -141,6 +143,12 @@ model_options.add_argument(
141
143
  action="store_true",
142
144
  default=None,
143
145
  )
146
+ extra_fields_model_options.add_argument(
147
+ "--extra-fields",
148
+ help="Set the generated models to allow, forbid, or ignore extra fields.",
149
+ choices=["allow", "ignore", "forbid"],
150
+ default=None,
151
+ )
144
152
  model_options.add_argument(
145
153
  "--keep-model-order",
146
154
  help="Keep generated models' order",
@@ -153,6 +161,12 @@ model_options.add_argument(
153
161
  action="store_true",
154
162
  default=None,
155
163
  )
164
+ model_options.add_argument(
165
+ "--frozen-dataclasses",
166
+ help="Generate frozen dataclasses (dataclass(frozen=True)). Only applies to dataclass output.",
167
+ action="store_true",
168
+ default=None,
169
+ )
156
170
  model_options.add_argument(
157
171
  "--reuse-model",
158
172
  help="Reuse models on the field when a module has the model with the same content",
@@ -222,7 +222,7 @@ class CodeFormatter:
222
222
  )
223
223
 
224
224
  def apply_ruff_lint(self, code: str) -> str:
225
- result = subprocess.run( # noqa: S603
225
+ result = subprocess.run(
226
226
  ("ruff", "check", "--fix", "-"),
227
227
  input=code.encode(self.encoding),
228
228
  capture_output=True,
@@ -231,7 +231,7 @@ class CodeFormatter:
231
231
  return result.stdout.decode(self.encoding)
232
232
 
233
233
  def apply_ruff_formatter(self, code: str) -> str:
234
- result = subprocess.run( # noqa: S603
234
+ result = subprocess.run(
235
235
  ("ruff", "format", "-"),
236
236
  input=code.encode(self.encoding),
237
237
  capture_output=True,
@@ -275,9 +275,11 @@ class DataModel(TemplateBase, Nullable, ABC):
275
275
  default: Any = UNDEFINED,
276
276
  nullable: bool = False,
277
277
  keyword_only: bool = False,
278
+ frozen: bool = False,
278
279
  treat_dot_as_module: bool = False,
279
280
  ) -> None:
280
281
  self.keyword_only = keyword_only
282
+ self.frozen = frozen
281
283
  if not self.TEMPLATE_FILE_PATH:
282
284
  msg = "TEMPLATE_FILE_PATH is undefined"
283
285
  raise Exception(msg) # noqa: TRY002
@@ -434,5 +436,6 @@ class DataModel(TemplateBase, Nullable, ABC):
434
436
  methods=self.methods,
435
437
  description=self.description,
436
438
  keyword_only=self.keyword_only,
439
+ frozen=self.frozen,
437
440
  **self.extra_template_data,
438
441
  )
@@ -53,6 +53,7 @@ class DataClass(DataModel):
53
53
  default: Any = UNDEFINED,
54
54
  nullable: bool = False,
55
55
  keyword_only: bool = False,
56
+ frozen: bool = False,
56
57
  treat_dot_as_module: bool = False,
57
58
  ) -> None:
58
59
  super().__init__(
@@ -69,6 +70,7 @@ class DataClass(DataModel):
69
70
  default=default,
70
71
  nullable=nullable,
71
72
  keyword_only=keyword_only,
73
+ frozen=frozen,
72
74
  treat_dot_as_module=treat_dot_as_module,
73
75
  )
74
76
 
@@ -22,6 +22,7 @@ class Config(_BaseModel):
22
22
  title: Optional[str] = None # noqa: UP045
23
23
  allow_population_by_field_name: Optional[bool] = None # noqa: UP045
24
24
  allow_extra_fields: Optional[bool] = None # noqa: UP045
25
+ extra_fields: Optional[str] = None # noqa: UP045
25
26
  allow_mutation: Optional[bool] = None # noqa: UP045
26
27
  arbitrary_types_allowed: Optional[bool] = None # noqa: UP045
27
28
  orm_mode: Optional[bool] = None # noqa: UP045
@@ -286,12 +286,20 @@ class BaseModel(BaseModelBase):
286
286
 
287
287
  additional_properties = self.extra_template_data.get("additionalProperties")
288
288
  allow_extra_fields = self.extra_template_data.get("allow_extra_fields")
289
- if additional_properties is not None or allow_extra_fields:
290
- config_parameters["extra"] = (
291
- "Extra.allow" if additional_properties or allow_extra_fields else "Extra.forbid"
292
- )
289
+ extra_fields = self.extra_template_data.get("extra_fields")
290
+
291
+ if allow_extra_fields or extra_fields or additional_properties is not None:
293
292
  self._additional_imports.append(IMPORT_EXTRA)
294
293
 
294
+ if allow_extra_fields:
295
+ config_parameters["extra"] = "Extra.allow"
296
+ elif extra_fields:
297
+ config_parameters["extra"] = f"Extra.{extra_fields}"
298
+ elif additional_properties is True:
299
+ config_parameters["extra"] = "Extra.allow"
300
+ elif additional_properties is False:
301
+ config_parameters["extra"] = "Extra.forbid"
302
+
295
303
  for config_attribute in "allow_population_by_field_name", "allow_mutation":
296
304
  if config_attribute in self.extra_template_data:
297
305
  config_parameters[config_attribute] = self.extra_template_data[config_attribute]
@@ -21,12 +21,14 @@ class ConfigDict(_BaseModel):
21
21
  title: Optional[str] = None # noqa: UP045
22
22
  populate_by_name: Optional[bool] = None # noqa: UP045
23
23
  allow_extra_fields: Optional[bool] = None # noqa: UP045
24
+ extra_fields: Optional[str] = None # noqa: UP045
24
25
  from_attributes: Optional[bool] = None # noqa: UP045
25
26
  frozen: Optional[bool] = None # noqa: UP045
26
27
  arbitrary_types_allowed: Optional[bool] = None # noqa: UP045
27
28
  protected_namespaces: Optional[tuple[str, ...]] = None # noqa: UP045
28
29
  regex_engine: Optional[str] = None # noqa: UP045
29
30
  use_enum_values: Optional[bool] = None # noqa: UP045
31
+ coerce_numbers_to_str: Optional[bool] = None # noqa: UP045
30
32
 
31
33
 
32
34
  __all__ = [
@@ -1,7 +1,14 @@
1
1
  {% for decorator in decorators -%}
2
2
  {{ decorator }}
3
3
  {% endfor -%}
4
- @dataclass{%- if keyword_only -%}(kw_only=True){%- endif %}
4
+ @dataclass
5
+ {%- if keyword_only or frozen -%}
6
+ (
7
+ {%- if keyword_only -%}kw_only=True{%- endif -%}
8
+ {%- if keyword_only and frozen -%}, {% endif -%}
9
+ {%- if frozen -%}frozen=True{%- endif -%}
10
+ )
11
+ {%- endif %}
5
12
  {%- if base_class %}
6
13
  class {{ class_name }}({{ base_class }}):
7
14
  {%- else %}
@@ -326,6 +326,7 @@ class Parser(ABC):
326
326
  allow_population_by_field_name: bool = False,
327
327
  apply_default_values_for_required_fields: bool = False,
328
328
  allow_extra_fields: bool = False,
329
+ extra_fields: str | None = None,
329
330
  force_optional_for_required_fields: bool = False,
330
331
  class_name: str | None = None,
331
332
  use_standard_collections: bool = False,
@@ -377,11 +378,13 @@ class Parser(ABC):
377
378
  default_field_extras: dict[str, Any] | None = None,
378
379
  target_datetime_class: DatetimeClassType | None = DatetimeClassType.Datetime,
379
380
  keyword_only: bool = False,
381
+ frozen_dataclasses: bool = False,
380
382
  no_alias: bool = False,
381
383
  formatters: list[Formatter] = DEFAULT_FORMATTERS,
382
384
  parent_scoped_naming: bool = False,
383
385
  ) -> None:
384
386
  self.keyword_only = keyword_only
387
+ self.frozen_dataclasses = frozen_dataclasses
385
388
  self.data_type_manager: DataTypeManager = data_type_manager_type(
386
389
  python_version=target_python_version,
387
390
  use_standard_collections=use_standard_collections,
@@ -450,6 +453,9 @@ class Parser(ABC):
450
453
  if allow_extra_fields:
451
454
  self.extra_template_data[ALL_MODEL]["allow_extra_fields"] = True
452
455
 
456
+ if extra_fields:
457
+ self.extra_template_data[ALL_MODEL]["extra_fields"] = extra_fields
458
+
453
459
  if enable_faux_immutability:
454
460
  self.extra_template_data[ALL_MODEL]["allow_mutation"] = False
455
461
 
@@ -770,19 +776,31 @@ class Parser(ABC):
770
776
  continue
771
777
  type_names.append(name)
772
778
 
773
- # Check the main discriminator model path
774
- if mapping:
775
- check_paths(discriminator_model, mapping) # pyright: ignore[reportArgumentType]
779
+ # First try to get the discriminator value from the const field
780
+ for discriminator_field in discriminator_model.fields:
781
+ if field_name not in {discriminator_field.original_name, discriminator_field.name}:
782
+ continue
783
+ if discriminator_field.extras.get("const"):
784
+ type_names = [discriminator_field.extras["const"]]
785
+ break
786
+
787
+ # If no const value found, try to get it from the mapping
788
+ if not type_names:
789
+ # Check the main discriminator model path
790
+ if mapping:
791
+ check_paths(discriminator_model, mapping) # pyright: ignore[reportArgumentType]
792
+
793
+ # Check the base_classes if they exist
794
+ if len(type_names) == 0:
795
+ for base_class in discriminator_model.base_classes:
796
+ check_paths(base_class.reference, mapping) # pyright: ignore[reportArgumentType]
797
+ else:
798
+ type_names = [discriminator_model.path.split("/")[-1]]
776
799
 
777
- # Check the base_classes if they exist
778
- if len(type_names) == 0:
779
- for base_class in discriminator_model.base_classes:
780
- check_paths(base_class.reference, mapping) # pyright: ignore[reportArgumentType]
781
- else:
782
- type_names = [discriminator_model.path.split("/")[-1]]
783
800
  if not type_names: # pragma: no cover
784
801
  msg = f"Discriminator type is not found. {data_type.reference.path}"
785
802
  raise RuntimeError(msg)
803
+
786
804
  has_one_literal = False
787
805
  for discriminator_field in discriminator_model.fields:
788
806
  if field_name not in {discriminator_field.original_name, discriminator_field.name}:
@@ -1121,7 +1139,7 @@ class Parser(ABC):
1121
1139
  if self.data_model_type != pydantic_model_v2.BaseModel:
1122
1140
  return
1123
1141
  for model in models:
1124
- if model.base_class == "Enum":
1142
+ if "Enum" in model.base_class:
1125
1143
  continue
1126
1144
 
1127
1145
  for field in model.fields:
@@ -1202,7 +1220,10 @@ class Parser(ABC):
1202
1220
  ) -> None:
1203
1221
  for model in models:
1204
1222
  for model_field in model.fields:
1205
- if model_field.data_type.type in all_model_field_names:
1223
+ if (
1224
+ model_field.data_type.type in all_model_field_names
1225
+ and model_field.data_type.type == model_field.name
1226
+ ):
1206
1227
  alias = model_field.data_type.type + "_aliased"
1207
1228
  model_field.data_type.type = alias
1208
1229
  if model_field.data_type.import_: # pragma: no cover
@@ -17,6 +17,7 @@ from datamodel_code_generator import (
17
17
  )
18
18
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
19
19
  from datamodel_code_generator.model import pydantic as pydantic_model
20
+ from datamodel_code_generator.model.dataclass import DataClass
20
21
  from datamodel_code_generator.model.enum import Enum
21
22
  from datamodel_code_generator.model.scalar import DataTypeScalar
22
23
  from datamodel_code_generator.model.union import DataTypeUnion
@@ -101,6 +102,7 @@ class GraphQLParser(Parser):
101
102
  allow_population_by_field_name: bool = False,
102
103
  apply_default_values_for_required_fields: bool = False,
103
104
  allow_extra_fields: bool = False,
105
+ extra_fields: str | None = None,
104
106
  force_optional_for_required_fields: bool = False,
105
107
  class_name: str | None = None,
106
108
  use_standard_collections: bool = False,
@@ -152,6 +154,7 @@ class GraphQLParser(Parser):
152
154
  default_field_extras: dict[str, Any] | None = None,
153
155
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
154
156
  keyword_only: bool = False,
157
+ frozen_dataclasses: bool = False,
155
158
  no_alias: bool = False,
156
159
  formatters: list[Formatter] = DEFAULT_FORMATTERS,
157
160
  parent_scoped_naming: bool = False,
@@ -175,6 +178,7 @@ class GraphQLParser(Parser):
175
178
  aliases=aliases,
176
179
  allow_population_by_field_name=allow_population_by_field_name,
177
180
  allow_extra_fields=allow_extra_fields,
181
+ extra_fields=extra_fields,
178
182
  apply_default_values_for_required_fields=apply_default_values_for_required_fields,
179
183
  force_optional_for_required_fields=force_optional_for_required_fields,
180
184
  class_name=class_name,
@@ -227,6 +231,7 @@ class GraphQLParser(Parser):
227
231
  default_field_extras=default_field_extras,
228
232
  target_datetime_class=target_datetime_class,
229
233
  keyword_only=keyword_only,
234
+ frozen_dataclasses=frozen_dataclasses,
230
235
  no_alias=no_alias,
231
236
  formatters=formatters,
232
237
  parent_scoped_naming=parent_scoped_naming,
@@ -283,6 +288,13 @@ class GraphQLParser(Parser):
283
288
 
284
289
  self.support_graphql_types[resolved_type].append(type_)
285
290
 
291
+ def _create_data_model(self, model_type: type[DataModel] | None = None, **kwargs: Any) -> DataModel:
292
+ """Create data model instance with conditional frozen parameter for DataClass."""
293
+ data_model_class = model_type or self.data_model_type
294
+ if issubclass(data_model_class, DataClass):
295
+ kwargs["frozen"] = self.frozen_dataclasses
296
+ return data_model_class(**kwargs)
297
+
286
298
  def _typename_field(self, name: str) -> DataModelFieldBase:
287
299
  return self.data_model_field_type(
288
300
  name="typename__",
@@ -445,7 +457,7 @@ class GraphQLParser(Parser):
445
457
  if hasattr(obj, "interfaces"): # pragma: no cover
446
458
  base_classes = [self.references[i.name] for i in obj.interfaces] # pyright: ignore[reportAttributeAccessIssue]
447
459
 
448
- data_model_type = self.data_model_type(
460
+ data_model_type = self._create_data_model(
449
461
  reference=self.references[obj.name],
450
462
  fields=fields,
451
463
  base_classes=base_classes,
@@ -6,7 +6,7 @@ from contextlib import contextmanager
6
6
  from functools import cached_property, lru_cache
7
7
  from pathlib import Path
8
8
  from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Union
9
- from urllib.parse import ParseResult
9
+ from urllib.parse import ParseResult, unquote
10
10
  from warnings import warn
11
11
 
12
12
  from pydantic import (
@@ -23,6 +23,7 @@ from datamodel_code_generator.format import DEFAULT_FORMATTERS, Formatter, Pytho
23
23
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
24
24
  from datamodel_code_generator.model import pydantic as pydantic_model
25
25
  from datamodel_code_generator.model.base import UNDEFINED, get_module_name
26
+ from datamodel_code_generator.model.dataclass import DataClass
26
27
  from datamodel_code_generator.model.enum import Enum
27
28
  from datamodel_code_generator.parser import DefaultPutDict, LiteralType
28
29
  from datamodel_code_generator.parser.base import (
@@ -58,16 +59,26 @@ if TYPE_CHECKING:
58
59
  from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
59
60
 
60
61
 
62
+ def unescape_json_pointer_segment(segment: str) -> str:
63
+ # Unescape ~1, ~0, and percent-encoding
64
+ return unquote(segment.replace("~1", "/").replace("~0", "~"))
65
+
66
+
61
67
  def get_model_by_path(schema: dict[str, Any] | list[Any], keys: list[str] | list[int]) -> dict[Any, Any]:
62
68
  model: dict[Any, Any] | list[Any]
63
69
  if not keys:
64
70
  model = schema
65
- elif len(keys) == 1:
66
- model = schema.get(str(keys[0]), {}) if isinstance(schema, dict) else schema[int(keys[0])]
67
- elif isinstance(schema, dict):
68
- model = get_model_by_path(schema[str(keys[0])], keys[1:])
69
71
  else:
70
- model = get_model_by_path(schema[int(keys[0])], keys[1:])
72
+ # Unescape the key if it's a string (JSON pointer segment)
73
+ key = keys[0]
74
+ if isinstance(key, str):
75
+ key = unescape_json_pointer_segment(key)
76
+ if len(keys) == 1:
77
+ model = schema.get(str(key), {}) if isinstance(schema, dict) else schema[int(key)]
78
+ elif isinstance(schema, dict):
79
+ model = get_model_by_path(schema[str(key)], keys[1:])
80
+ else:
81
+ model = get_model_by_path(schema[int(key)], keys[1:])
71
82
  if isinstance(model, dict):
72
83
  return model
73
84
  msg = f"Does not support json pointer to array. schema={schema}, key={keys}"
@@ -370,6 +381,7 @@ class JsonSchemaParser(Parser):
370
381
  allow_population_by_field_name: bool = False,
371
382
  apply_default_values_for_required_fields: bool = False,
372
383
  allow_extra_fields: bool = False,
384
+ extra_fields: str | None = None,
373
385
  force_optional_for_required_fields: bool = False,
374
386
  class_name: str | None = None,
375
387
  use_standard_collections: bool = False,
@@ -421,6 +433,7 @@ class JsonSchemaParser(Parser):
421
433
  default_field_extras: dict[str, Any] | None = None,
422
434
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
423
435
  keyword_only: bool = False,
436
+ frozen_dataclasses: bool = False,
424
437
  no_alias: bool = False,
425
438
  formatters: list[Formatter] = DEFAULT_FORMATTERS,
426
439
  parent_scoped_naming: bool = False,
@@ -444,6 +457,7 @@ class JsonSchemaParser(Parser):
444
457
  aliases=aliases,
445
458
  allow_population_by_field_name=allow_population_by_field_name,
446
459
  allow_extra_fields=allow_extra_fields,
460
+ extra_fields=extra_fields,
447
461
  apply_default_values_for_required_fields=apply_default_values_for_required_fields,
448
462
  force_optional_for_required_fields=force_optional_for_required_fields,
449
463
  class_name=class_name,
@@ -496,6 +510,7 @@ class JsonSchemaParser(Parser):
496
510
  default_field_extras=default_field_extras,
497
511
  target_datetime_class=target_datetime_class,
498
512
  keyword_only=keyword_only,
513
+ frozen_dataclasses=frozen_dataclasses,
499
514
  no_alias=no_alias,
500
515
  formatters=formatters,
501
516
  parent_scoped_naming=parent_scoped_naming,
@@ -627,7 +642,7 @@ class JsonSchemaParser(Parser):
627
642
  result[key] = self._deep_merge(result[key], value)
628
643
  continue
629
644
  if isinstance(result[key], list) and isinstance(value, list):
630
- result[key] += value
645
+ result[key] = result[key] + value # noqa: PLR6104
631
646
  continue
632
647
  result[key] = value
633
648
  return result
@@ -686,6 +701,13 @@ class JsonSchemaParser(Parser):
686
701
  def parse_one_of(self, name: str, obj: JsonSchemaObject, path: list[str]) -> list[DataType]:
687
702
  return self.parse_combined_schema(name, obj, path, "oneOf")
688
703
 
704
+ def _create_data_model(self, model_type: type[DataModel] | None = None, **kwargs: Any) -> DataModel:
705
+ """Create data model instance with conditional frozen parameter for DataClass."""
706
+ data_model_class = model_type or self.data_model_type
707
+ if issubclass(data_model_class, DataClass):
708
+ kwargs["frozen"] = self.frozen_dataclasses
709
+ return data_model_class(**kwargs)
710
+
689
711
  def _parse_object_common_part( # noqa: PLR0913, PLR0917
690
712
  self,
691
713
  name: str,
@@ -733,7 +755,8 @@ class JsonSchemaParser(Parser):
733
755
  name = obj.title
734
756
  reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
735
757
  self.set_additional_properties(reference.name, obj)
736
- data_model_type = self.data_model_type(
758
+
759
+ data_model_type = self._create_data_model(
737
760
  reference=reference,
738
761
  fields=fields,
739
762
  base_classes=base_classes,
@@ -874,7 +897,7 @@ class JsonSchemaParser(Parser):
874
897
  exclude_field_names: set[str] = set()
875
898
  for original_field_name, field in properties.items():
876
899
  field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
877
- original_field_name, exclude_field_names
900
+ original_field_name, excludes=exclude_field_names
878
901
  )
879
902
  modular_name = f"{module_name}.{field_name}" if module_name else field_name
880
903
 
@@ -972,7 +995,9 @@ class JsonSchemaParser(Parser):
972
995
  data_model_type_class = self.data_model_root_type
973
996
 
974
997
  self.set_additional_properties(class_name, obj)
975
- data_model_type = data_model_type_class(
998
+
999
+ data_model_type = self._create_data_model(
1000
+ model_type=data_model_type_class,
976
1001
  reference=reference,
977
1002
  fields=fields,
978
1003
  custom_base_class=obj.custom_base_path or self.base_class,
@@ -42,6 +42,7 @@ if TYPE_CHECKING:
42
42
  from pathlib import Path
43
43
  from urllib.parse import ParseResult
44
44
 
45
+
45
46
  RE_APPLICATION_JSON_PATTERN: Pattern[str] = re.compile(r"^application/.*json$")
46
47
 
47
48
  OPERATION_NAMES: list[str] = [
@@ -161,6 +162,7 @@ class OpenAPIParser(JsonSchemaParser):
161
162
  aliases: Mapping[str, str] | None = None,
162
163
  allow_population_by_field_name: bool = False,
163
164
  allow_extra_fields: bool = False,
165
+ extra_fields: str | None = None,
164
166
  apply_default_values_for_required_fields: bool = False,
165
167
  force_optional_for_required_fields: bool = False,
166
168
  class_name: str | None = None,
@@ -214,6 +216,7 @@ class OpenAPIParser(JsonSchemaParser):
214
216
  default_field_extras: dict[str, Any] | None = None,
215
217
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
216
218
  keyword_only: bool = False,
219
+ frozen_dataclasses: bool = False,
217
220
  no_alias: bool = False,
218
221
  formatters: list[Formatter] = DEFAULT_FORMATTERS,
219
222
  parent_scoped_naming: bool = False,
@@ -237,6 +240,7 @@ class OpenAPIParser(JsonSchemaParser):
237
240
  aliases=aliases,
238
241
  allow_population_by_field_name=allow_population_by_field_name,
239
242
  allow_extra_fields=allow_extra_fields,
243
+ extra_fields=extra_fields,
240
244
  apply_default_values_for_required_fields=apply_default_values_for_required_fields,
241
245
  force_optional_for_required_fields=force_optional_for_required_fields,
242
246
  class_name=class_name,
@@ -289,6 +293,7 @@ class OpenAPIParser(JsonSchemaParser):
289
293
  default_field_extras=default_field_extras,
290
294
  target_datetime_class=target_datetime_class,
291
295
  keyword_only=keyword_only,
296
+ frozen_dataclasses=frozen_dataclasses,
292
297
  no_alias=no_alias,
293
298
  formatters=formatters,
294
299
  parent_scoped_naming=parent_scoped_naming,
@@ -483,8 +488,10 @@ class OpenAPIParser(JsonSchemaParser):
483
488
  )
484
489
 
485
490
  if OpenAPIScope.Parameters in self.open_api_scopes and fields:
491
+ # Using _create_data_model from parent class JsonSchemaParser
492
+ # This method automatically adds frozen=True for DataClass types
486
493
  self.results.append(
487
- self.data_model_type(
494
+ self._create_data_model(
488
495
  fields=fields,
489
496
  reference=reference,
490
497
  custom_base_class=self.base_class,
@@ -6,13 +6,27 @@ from enum import Enum, auto
6
6
  from functools import lru_cache
7
7
  from itertools import chain
8
8
  from re import Pattern
9
- from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Protocol, TypeVar, Union, runtime_checkable
9
+ from typing import (
10
+ TYPE_CHECKING,
11
+ Any,
12
+ Callable,
13
+ ClassVar,
14
+ Optional,
15
+ Protocol,
16
+ TypeVar,
17
+ Union,
18
+ runtime_checkable,
19
+ )
10
20
 
11
21
  import pydantic
12
22
  from packaging import version
13
23
  from pydantic import StrictBool, StrictInt, StrictStr, create_model
14
24
 
15
- from datamodel_code_generator.format import DatetimeClassType, PythonVersion, PythonVersionMin
25
+ from datamodel_code_generator.format import (
26
+ DatetimeClassType,
27
+ PythonVersion,
28
+ PythonVersionMin,
29
+ )
16
30
  from datamodel_code_generator.imports import (
17
31
  IMPORT_ABC_MAPPING,
18
32
  IMPORT_ABC_SEQUENCE,
@@ -167,53 +181,57 @@ def _remove_none_from_type(type_: str, split_pattern: Pattern[str], delimiter: s
167
181
  return types
168
182
 
169
183
 
170
- def _remove_none_from_union(type_: str, *, use_union_operator: bool) -> str: # noqa: PLR0911, PLR0912
184
+ def _remove_none_from_union(type_: str, *, use_union_operator: bool) -> str: # noqa: PLR0912
171
185
  if use_union_operator:
172
186
  if " | " not in type_:
173
187
  return type_
188
+ separator = "|"
189
+ inner_text = type_
190
+ else:
191
+ if not type_.startswith(UNION_PREFIX):
192
+ return type_
193
+ separator = ","
194
+ inner_text = type_[len(UNION_PREFIX) : -1]
174
195
 
175
- # Process each part of the union
176
- parts = UNION_OPERATOR_PATTERN.split(type_)
177
- processed_parts = []
178
- for part in parts:
179
- if part == NONE:
180
- continue
181
-
182
- # Check if this part contains a nested union
183
- processed_part = _remove_none_from_union(part, use_union_operator=True) if " | " in part else part
184
- processed_parts.append(processed_part)
185
-
186
- if not processed_parts:
187
- return NONE
188
-
189
- return UNION_OPERATOR_DELIMITER.join(processed_parts)
190
- if not type_.startswith(UNION_PREFIX):
191
- return type_
192
-
193
- inner_text = type_[len(UNION_PREFIX) : -1]
194
196
  parts = []
195
197
  inner_count = 0
196
198
  current_part = ""
197
199
 
200
+ # With this variable we count any non-escaped round bracket, whenever we are inside a
201
+ # constraint string expression. Once found a part starting with `constr(`, we increment
202
+ # this counter for each non-escaped opening round bracket and decrement it for each
203
+ # non-escaped closing round bracket.
204
+ in_constr = 0
205
+
198
206
  # Parse union parts carefully to handle nested structures
199
207
  for char in inner_text:
200
208
  current_part += char
201
- if char == "[":
209
+ if char == "[" and in_constr == 0:
202
210
  inner_count += 1
203
- elif char == "]":
211
+ elif char == "]" and in_constr == 0:
204
212
  inner_count -= 1
205
- elif char == "," and inner_count == 0:
213
+ elif char == "(":
214
+ if current_part.strip().startswith("constr(") and current_part[-2] != "\\":
215
+ # non-escaped opening round bracket found inside constraint string expression
216
+ in_constr += 1
217
+ elif char == ")":
218
+ if in_constr > 0 and current_part[-2] != "\\":
219
+ # non-escaped closing round bracket found inside constraint string expression
220
+ in_constr -= 1
221
+ elif char == separator and inner_count == 0 and in_constr == 0:
206
222
  part = current_part[:-1].strip()
207
223
  if part != NONE:
208
224
  # Process nested unions recursively
209
- if part.startswith(UNION_PREFIX):
225
+ # only UNION_PREFIX might be nested but not union_operator
226
+ if not use_union_operator and part.startswith(UNION_PREFIX):
210
227
  part = _remove_none_from_union(part, use_union_operator=False)
211
228
  parts.append(part)
212
229
  current_part = ""
213
230
 
214
231
  part = current_part.strip()
215
232
  if current_part and part != NONE:
216
- if part.startswith(UNION_PREFIX):
233
+ # only UNION_PREFIX might be nested but not union_operator
234
+ if not use_union_operator and part.startswith(UNION_PREFIX):
217
235
  part = _remove_none_from_union(part, use_union_operator=False)
218
236
  parts.append(part)
219
237
 
@@ -222,6 +240,9 @@ def _remove_none_from_union(type_: str, *, use_union_operator: bool) -> str: #
222
240
  if len(parts) == 1:
223
241
  return parts[0]
224
242
 
243
+ if use_union_operator:
244
+ return UNION_OPERATOR_DELIMITER.join(parts)
245
+
225
246
  return f"{UNION_PREFIX}{UNION_DELIMITER.join(parts)}]"
226
247
 
227
248
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datamodel-code-generator
3
- Version: 0.30.1
3
+ Version: 0.31.0
4
4
  Summary: Datamodel Code Generator
5
5
  Project-URL: Homepage, https://github.com/koxudaxi/datamodel-code-generator
6
6
  Project-URL: Source, https://github.com/koxudaxi/datamodel-code-generator
@@ -489,8 +489,9 @@ Field customization:
489
489
  Use schema description to populate field docstring
490
490
 
491
491
  Model customization:
492
- --allow-extra-fields Allow passing extra fields, if this flag is not passed, extra fields
493
- are forbidden.
492
+ --allow-extra-fields Deprecated: Allow passing extra fields, if this flag is not passed,
493
+ extra fields are forbidden. This flag is deprecated. Use `--extra-
494
+ fields=allow` instead.
494
495
  --allow-population-by-field-name
495
496
  Allow population by field name
496
497
  --class-name CLASS_NAME
@@ -505,6 +506,10 @@ Model customization:
505
506
  Enable faux immutability
506
507
  --enable-version-header
507
508
  Enable package version on file headers
509
+ --extra-fields {allow,ignore,forbid}
510
+ Set the generated models to allow, forbid, or ignore extra fields.
511
+ --frozen-dataclasses Generate frozen dataclasses (dataclass(frozen=True)). Only applies
512
+ to dataclass output.
508
513
  --keep-model-order Keep generated models'' order
509
514
  --keyword-only Defined models as keyword only (for example
510
515
  dataclass(kw_only=True)).
@@ -1,17 +1,17 @@
1
- datamodel_code_generator/__init__.py,sha256=2h8FA8O_x1VGRVlXKJHt3jwuC34MIEnmHwHv-zGuF_I,20445
2
- datamodel_code_generator/__main__.py,sha256=rF1Se8R0rfqDmJ4_INBdOYlU7G4XIthPCe3ldDwtgQI,22608
3
- datamodel_code_generator/arguments.py,sha256=xPAnid2-dC_Hj4o1hY9fL13NMJl5wCPphTsCHZ3QeNQ,16637
4
- datamodel_code_generator/format.py,sha256=zvX0KH1uWwGnTYoVM4KhAuKZn5erjkH5eyi4t3leirw,8962
1
+ datamodel_code_generator/__init__.py,sha256=H6ZnGum3Xw1u1o2e7a6gLjA09Vm2DlXo_Vm8GRrTUlY,20602
2
+ datamodel_code_generator/__main__.py,sha256=Z9g_NGrnxV63MRuqtUUpPW48felXmVxwGBtWWkgNlj4,22803
3
+ datamodel_code_generator/arguments.py,sha256=vGNwaq0B3Lchi30Uh1Lt-MBdkEW2nW2PwDQrmrpLKxA,17210
4
+ datamodel_code_generator/format.py,sha256=ZlnTCAl1H4og685smvCBSzexgpYbZtyYLIrt7lwUNcY,8934
5
5
  datamodel_code_generator/http.py,sha256=LE94GC7I9D8lWIg_YAGWedfy0XNxOXTmiYKuNMTwouo,887
6
6
  datamodel_code_generator/imports.py,sha256=Nq83WbEGCegntg3WX4VbKfzAIs84alZ7IrYyNPrlUbc,5517
7
7
  datamodel_code_generator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  datamodel_code_generator/pydantic_patch.py,sha256=co1IUDvZqQ-xEZ3C9gbV-BVm2Cin1vfyZNr2Dr0LdHY,718
9
9
  datamodel_code_generator/reference.py,sha256=OobfjN5hWaKzv4ECdCPc9Q3ODkoG93B4qaKlzDEcDrY,26748
10
- datamodel_code_generator/types.py,sha256=fobUZnNTOGpzF4qZMraoLogVkAU7zBdFVG-8SOFoDD4,21163
10
+ datamodel_code_generator/types.py,sha256=1g4RZZ1e5oC4EHaNyyDmsIfF4o1WAjQ2KaLjwc9mYss,21883
11
11
  datamodel_code_generator/util.py,sha256=mZW8-6CbFe6T4IY5OM9Av6cH-0VknQGe2eIKjTM6Jzo,2729
12
12
  datamodel_code_generator/model/__init__.py,sha256=pJlJ1juQ-Gv17ZKXy6OAfJSSoOAmYQ7QCbdneu1BENU,3594
13
- datamodel_code_generator/model/base.py,sha256=Ma28Vx1p5zsFt23BMCs1UNDXuBq_USEhuPUvaeFLkS4,14959
14
- datamodel_code_generator/model/dataclass.py,sha256=t4NtgVhopTtEDUJCqijJheVlewSt8IJzQhQb8gXspfs,6252
13
+ datamodel_code_generator/model/base.py,sha256=ZQ3Xy4Fs_I8M01tk1ps0EhWJM9vR-n7umPkz5NmHTjw,15050
14
+ datamodel_code_generator/model/dataclass.py,sha256=8Z02XY3S6byNe9Pb46LisE5opQcvpx8FVvPjUrlAacE,6309
15
15
  datamodel_code_generator/model/enum.py,sha256=yriQslY1hag_Qk-Xv3vl_LkPnbmMZ3iRTAGiiyMN0Io,4003
16
16
  datamodel_code_generator/model/imports.py,sha256=PTc09UzIBSsa5yAPoieb6hCGIohU2T1Y7igNy_pYarg,820
17
17
  datamodel_code_generator/model/msgspec.py,sha256=qL2DIEwBfpn-vd8p8KEmUViMUce6RgI4Ql-drOmPR7M,11845
@@ -20,13 +20,13 @@ datamodel_code_generator/model/scalar.py,sha256=xfONEK30eYJ2mSL9PK9zXqEG5-xApYMI
20
20
  datamodel_code_generator/model/typed_dict.py,sha256=FJi_fEZWuFe3nvidfl-jqr9PMRFTvfusoEFQkx1BqHI,4685
21
21
  datamodel_code_generator/model/types.py,sha256=ZyEwi76EBI5RS5JfoNUoRHmjOAZDup8oNFvQQDizfwQ,3502
22
22
  datamodel_code_generator/model/union.py,sha256=zwq1ayGFW3KbI4SxPCcdZcrM7X4Px25IdujDedtwgOw,1929
23
- datamodel_code_generator/model/pydantic/__init__.py,sha256=CtyzSriGEYGp1yfHapjD5lrS2vkSNe8AqKSYO-XaRWc,1095
24
- datamodel_code_generator/model/pydantic/base_model.py,sha256=qlGTxWYPS3XsHYwrRKjJDHvCixAxpWMEXTPtyAVXb6g,12182
23
+ datamodel_code_generator/model/pydantic/__init__.py,sha256=ggJNv7_6Vv-BgY50O-0Pa6IHGavkxGAjSa9lLEmFOnE,1149
24
+ datamodel_code_generator/model/pydantic/base_model.py,sha256=0ZsoQfjhhRTMlvsThGEsXJgYMubjrakkuOHUt5-f6HY,12492
25
25
  datamodel_code_generator/model/pydantic/custom_root_type.py,sha256=VJpEAmGFe3TzMKrR5YvR7PJ3pfGHcYytO1zhQrWyoWg,299
26
26
  datamodel_code_generator/model/pydantic/dataclass.py,sha256=jgjkqQk71CQP4RbTcPGSEOQDNqjTQnzFavvl5LjWTBw,455
27
27
  datamodel_code_generator/model/pydantic/imports.py,sha256=nWPiLgDeYNPHcAs8M-gaUUZg1daQRHdBPpjYuX3b5u4,2225
28
28
  datamodel_code_generator/model/pydantic/types.py,sha256=ttTiDsQ6FV3h4C_NTEhvPUmUpeqxBNQt-DJJFpKZS8s,13356
29
- datamodel_code_generator/model/pydantic_v2/__init__.py,sha256=xsfYcIUA2S8XzPIsYQSzDuBYZ1XRicfhGLHlQBlZwsg,1226
29
+ datamodel_code_generator/model/pydantic_v2/__init__.py,sha256=YHfOs8prPBsrQmYm0kJATSyU0wafQTqKNd24e2-KGJE,1344
30
30
  datamodel_code_generator/model/pydantic_v2/base_model.py,sha256=J_DxR6Auw0L-zHn0F5l9K8XtSmfEvDT26Bj-VZxihiE,8353
31
31
  datamodel_code_generator/model/pydantic_v2/imports.py,sha256=K3XD2kF9YCKmo5_7b2ipV5bGUrjz0avS-SiyDMVIpF0,299
32
32
  datamodel_code_generator/model/pydantic_v2/root_model.py,sha256=H4rwtg56N65-I3QHlPvlNhDcSPV0m56KSAgfGmxYXAQ,888
@@ -37,7 +37,7 @@ datamodel_code_generator/model/template/TypedDict.jinja2,sha256=J_Pe_CiuvTOb-EUC
37
37
  datamodel_code_generator/model/template/TypedDictClass.jinja2,sha256=URwp5__WyR8G21Hoyc17aMzoast-NppXnXe19VFi5wQ,377
38
38
  datamodel_code_generator/model/template/TypedDictFunction.jinja2,sha256=KjSij5_w4ow4a12SR3orYOndmXGkIvJBBUN735bQ6G0,321
39
39
  datamodel_code_generator/model/template/Union.jinja2,sha256=sq7o--2ESUSfIL4kCfgnr5ZXPFa_VeioqbATTY-N-5I,258
40
- datamodel_code_generator/model/template/dataclass.jinja2,sha256=wRSy2g11Dr1GN9YUl13OZt2xg37bQyFwKn2wEsQIndE,865
40
+ datamodel_code_generator/model/template/dataclass.jinja2,sha256=c3gs1ZwDEwLpmZ2PpOEWjHjfdl6kPP64xm18mt9lZMk,1007
41
41
  datamodel_code_generator/model/template/msgspec.jinja2,sha256=qMuFOH6SFFh558wImdI6uIjG4Mtam3J_ox8Hmgqkv0g,1174
42
42
  datamodel_code_generator/model/template/root.jinja2,sha256=3OTtibxLcGA-FMdR0QDCJUJQgf_kRW0OafeCTPFSFFo,162
43
43
  datamodel_code_generator/model/template/pydantic/BaseModel.jinja2,sha256=sYZa-47YAXqZrd5cYKVnPrsbDvLkHEJOUd7M0nAosP8,1084
@@ -48,12 +48,12 @@ datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2,sha256=i1Wg
48
48
  datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2,sha256=xHvBYrh__32O1xRCSl6_u5zbyYIjB8a5k8fZiTo0spY,149
49
49
  datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2,sha256=XQBlML7Hm5hN6_AExENNvVc_yxNWijcIfTTbbmegCpE,1223
50
50
  datamodel_code_generator/parser/__init__.py,sha256=3XtFcDPocaetfjmWFqj_CubqNCDipb7vXZHsYKdJXXU,851
51
- datamodel_code_generator/parser/base.py,sha256=h2YD0aRtGiW3jBBbJ-bx3YjBb9NTF1TBj51JxZnZcxQ,61947
52
- datamodel_code_generator/parser/graphql.py,sha256=scotG-q8zTS40i5rP9HfhriSVhXVnxEKtuBoXvbzECg,22684
53
- datamodel_code_generator/parser/jsonschema.py,sha256=bwXNvXjG8tiUPCNZsEXbo2TlCYJVKIzeYWEKfvlPzo8,70011
54
- datamodel_code_generator/parser/openapi.py,sha256=3IWF40DK0a710rVqXCod7Hi3Fh9u7neD4YR_kn_6VD4,27279
55
- datamodel_code_generator-0.30.1.dist-info/METADATA,sha256=9YcS5FD4-A9Dk96eQsagXfsuPAQs0qidE_szFAPChKs,25294
56
- datamodel_code_generator-0.30.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
57
- datamodel_code_generator-0.30.1.dist-info/entry_points.txt,sha256=cJVcHiEViQMANaoM5C1xR5hzmyCqH6hHHMpV8W00in8,77
58
- datamodel_code_generator-0.30.1.dist-info/licenses/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
59
- datamodel_code_generator-0.30.1.dist-info/RECORD,,
51
+ datamodel_code_generator/parser/base.py,sha256=4B4UeEjCfl_IdRak_qi5Wx8sUx5bX8Xt-Z0CmfsWsks,62940
52
+ datamodel_code_generator/parser/graphql.py,sha256=TNLxy-0wWJSpCm6HK5fKooZdaJodcwQqVr0J29ao3J8,23330
53
+ datamodel_code_generator/parser/jsonschema.py,sha256=oFSZOyLL3K0tneH09t2lNKdWcbmR_r5OjCyWKPq2Jxc,71114
54
+ datamodel_code_generator/parser/openapi.py,sha256=kI4kqVBG96rdAVb8z0l4gF6QpcylgVoGjlK_-KToZRg,27607
55
+ datamodel_code_generator-0.31.0.dist-info/METADATA,sha256=4pJIEOpNG7M2YhWpZi8OjNBSD0KO6UFIelQNRjIkSg0,25658
56
+ datamodel_code_generator-0.31.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
57
+ datamodel_code_generator-0.31.0.dist-info/entry_points.txt,sha256=cJVcHiEViQMANaoM5C1xR5hzmyCqH6hHHMpV8W00in8,77
58
+ datamodel_code_generator-0.31.0.dist-info/licenses/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
59
+ datamodel_code_generator-0.31.0.dist-info/RECORD,,