datamodel-code-generator 0.27.2__py3-none-any.whl → 0.27.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (43) hide show
  1. datamodel_code_generator/__init__.py +159 -190
  2. datamodel_code_generator/__main__.py +151 -173
  3. datamodel_code_generator/arguments.py +227 -230
  4. datamodel_code_generator/format.py +77 -99
  5. datamodel_code_generator/http.py +9 -10
  6. datamodel_code_generator/imports.py +57 -64
  7. datamodel_code_generator/model/__init__.py +26 -31
  8. datamodel_code_generator/model/base.py +94 -127
  9. datamodel_code_generator/model/dataclass.py +58 -59
  10. datamodel_code_generator/model/enum.py +34 -30
  11. datamodel_code_generator/model/imports.py +13 -11
  12. datamodel_code_generator/model/msgspec.py +112 -126
  13. datamodel_code_generator/model/pydantic/__init__.py +14 -27
  14. datamodel_code_generator/model/pydantic/base_model.py +120 -139
  15. datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
  16. datamodel_code_generator/model/pydantic/dataclass.py +6 -4
  17. datamodel_code_generator/model/pydantic/imports.py +35 -33
  18. datamodel_code_generator/model/pydantic/types.py +86 -117
  19. datamodel_code_generator/model/pydantic_v2/__init__.py +17 -17
  20. datamodel_code_generator/model/pydantic_v2/base_model.py +118 -119
  21. datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
  22. datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
  23. datamodel_code_generator/model/pydantic_v2/types.py +8 -7
  24. datamodel_code_generator/model/rootmodel.py +1 -1
  25. datamodel_code_generator/model/scalar.py +33 -32
  26. datamodel_code_generator/model/typed_dict.py +42 -41
  27. datamodel_code_generator/model/types.py +19 -17
  28. datamodel_code_generator/model/union.py +21 -17
  29. datamodel_code_generator/parser/__init__.py +12 -11
  30. datamodel_code_generator/parser/base.py +320 -492
  31. datamodel_code_generator/parser/graphql.py +80 -111
  32. datamodel_code_generator/parser/jsonschema.py +422 -580
  33. datamodel_code_generator/parser/openapi.py +175 -204
  34. datamodel_code_generator/pydantic_patch.py +8 -9
  35. datamodel_code_generator/reference.py +192 -274
  36. datamodel_code_generator/types.py +147 -182
  37. datamodel_code_generator/util.py +22 -26
  38. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/METADATA +7 -1
  39. datamodel_code_generator-0.27.3.dist-info/RECORD +59 -0
  40. datamodel_code_generator-0.27.2.dist-info/RECORD +0 -59
  41. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/WHEEL +0 -0
  42. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/entry_points.txt +0 -0
  43. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/licenses/LICENSE +0 -0
@@ -1,5 +1,3 @@
1
- #! /usr/bin/env python
2
-
3
1
  """
4
2
  Main function.
5
3
  """
@@ -17,8 +15,6 @@ from pathlib import Path
17
15
  from typing import (
18
16
  TYPE_CHECKING,
19
17
  Any,
20
- DefaultDict,
21
- Dict,
22
18
  List,
23
19
  Optional,
24
20
  Sequence,
@@ -33,8 +29,6 @@ import argcomplete
33
29
  import black
34
30
  from pydantic import BaseModel
35
31
 
36
- from datamodel_code_generator.model.pydantic_v2 import UnionMode
37
-
38
32
  if TYPE_CHECKING:
39
33
  from argparse import Namespace
40
34
 
@@ -55,9 +49,10 @@ from datamodel_code_generator.format import (
55
49
  PythonVersion,
56
50
  is_supported_in_black,
57
51
  )
58
- from datamodel_code_generator.parser import LiteralType
52
+ from datamodel_code_generator.model.pydantic_v2 import UnionMode # noqa: TC001 # needed for pydantic
53
+ from datamodel_code_generator.parser import LiteralType # noqa: TC001 # needed for pydantic
59
54
  from datamodel_code_generator.reference import is_url
60
- from datamodel_code_generator.types import StrictTypes
55
+ from datamodel_code_generator.types import StrictTypes # noqa: TC001 # needed for pydantic
61
56
  from datamodel_code_generator.util import (
62
57
  PYDANTIC_V2,
63
58
  ConfigDict,
@@ -77,7 +72,7 @@ class Exit(IntEnum):
77
72
 
78
73
 
79
74
  def sig_int_handler(_: int, __: Any) -> None: # pragma: no cover
80
- exit(Exit.OK)
75
+ sys.exit(Exit.OK)
81
76
 
82
77
 
83
78
  signal.signal(signal.SIGINT, sig_int_handler)
@@ -85,7 +80,7 @@ signal.signal(signal.SIGINT, sig_int_handler)
85
80
 
86
81
  class Config(BaseModel):
87
82
  if PYDANTIC_V2:
88
- model_config = ConfigDict(arbitrary_types_allowed=True) # pyright: ignore [reportAssignmentType]
83
+ model_config = ConfigDict(arbitrary_types_allowed=True) # pyright: ignore[reportAssignmentType]
89
84
 
90
85
  def get(self, item: str) -> Any:
91
86
  return getattr(self, item)
@@ -96,7 +91,7 @@ class Config(BaseModel):
96
91
  if TYPE_CHECKING:
97
92
 
98
93
  @classmethod
99
- def get_fields(cls) -> Dict[str, Any]: ...
94
+ def get_fields(cls) -> dict[str, Any]: ...
100
95
 
101
96
  else:
102
97
 
@@ -105,164 +100,153 @@ class Config(BaseModel):
105
100
  return cls.model_validate(obj)
106
101
 
107
102
  @classmethod
108
- def get_fields(cls) -> Dict[str, Any]:
103
+ def get_fields(cls) -> dict[str, Any]:
109
104
  return cls.model_fields
110
105
 
111
106
  else:
112
107
 
113
108
  class Config:
114
- # validate_assignment = True
115
109
  # Pydantic 1.5.1 doesn't support validate_assignment correctly
116
110
  arbitrary_types_allowed = (TextIOBase,)
117
111
 
118
112
  if not TYPE_CHECKING:
119
113
 
120
114
  @classmethod
121
- def get_fields(cls) -> Dict[str, Any]:
115
+ def get_fields(cls) -> dict[str, Any]:
122
116
  return cls.__fields__
123
117
 
124
- @field_validator(
125
- 'aliases', 'extra_template_data', 'custom_formatters_kwargs', mode='before'
126
- )
127
- def validate_file(cls, value: Any) -> Optional[TextIOBase]:
118
+ @field_validator("aliases", "extra_template_data", "custom_formatters_kwargs", mode="before")
119
+ def validate_file(cls, value: Any) -> TextIOBase | None: # noqa: N805
128
120
  if value is None or isinstance(value, TextIOBase):
129
121
  return value
130
- return cast(TextIOBase, Path(value).expanduser().resolve().open('rt'))
122
+ return cast("TextIOBase", Path(value).expanduser().resolve().open("rt"))
131
123
 
132
124
  @field_validator(
133
- 'input',
134
- 'output',
135
- 'custom_template_dir',
136
- 'custom_file_header_path',
137
- mode='before',
125
+ "input",
126
+ "output",
127
+ "custom_template_dir",
128
+ "custom_file_header_path",
129
+ mode="before",
138
130
  )
139
- def validate_path(cls, value: Any) -> Optional[Path]:
131
+ def validate_path(cls, value: Any) -> Path | None: # noqa: N805
140
132
  if value is None or isinstance(value, Path):
141
133
  return value # pragma: no cover
142
134
  return Path(value).expanduser().resolve()
143
135
 
144
- @field_validator('url', mode='before')
145
- def validate_url(cls, value: Any) -> Optional[ParseResult]:
136
+ @field_validator("url", mode="before")
137
+ def validate_url(cls, value: Any) -> ParseResult | None: # noqa: N805
146
138
  if isinstance(value, str) and is_url(value): # pragma: no cover
147
139
  return urlparse(value)
148
- elif value is None: # pragma: no cover
140
+ if value is None: # pragma: no cover
149
141
  return None
150
- raise Error(
151
- f"This protocol doesn't support only http/https. --input={value}"
152
- ) # pragma: no cover
153
-
154
- @model_validator(mode='after')
155
- def validate_use_generic_container_types(
156
- cls, values: Dict[str, Any]
157
- ) -> Dict[str, Any]:
158
- if values.get('use_generic_container_types'):
159
- target_python_version: PythonVersion = values['target_python_version']
142
+ msg = f"This protocol doesn't support only http/https. --input={value}"
143
+ raise Error(msg) # pragma: no cover
144
+
145
+ @model_validator(mode="after")
146
+ def validate_use_generic_container_types(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
147
+ if values.get("use_generic_container_types"):
148
+ target_python_version: PythonVersion = values["target_python_version"]
160
149
  if target_python_version == target_python_version.PY_36:
161
- raise Error(
162
- f'`--use-generic-container-types` can not be used with `--target-python-version` {target_python_version.PY_36.value}.\n'
163
- ' The version will be not supported in a future version'
150
+ msg = (
151
+ f"`--use-generic-container-types` can not be used with `--target-python-version` "
152
+ f"{target_python_version.PY_36.value}.\n"
153
+ " The version will be not supported in a future version"
164
154
  )
155
+ raise Error(msg)
165
156
  return values
166
157
 
167
- @model_validator(mode='after')
168
- def validate_original_field_name_delimiter(
169
- cls, values: Dict[str, Any]
170
- ) -> Dict[str, Any]:
171
- if values.get('original_field_name_delimiter') is not None:
172
- if not values.get('snake_case_field'):
173
- raise Error(
174
- '`--original-field-name-delimiter` can not be used without `--snake-case-field`.'
175
- )
158
+ @model_validator(mode="after")
159
+ def validate_original_field_name_delimiter(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
160
+ if values.get("original_field_name_delimiter") is not None and not values.get("snake_case_field"):
161
+ msg = "`--original-field-name-delimiter` can not be used without `--snake-case-field`."
162
+ raise Error(msg)
176
163
  return values
177
164
 
178
- @model_validator(mode='after')
179
- def validate_custom_file_header(cls, values: Dict[str, Any]) -> Dict[str, Any]:
180
- if values.get('custom_file_header') and values.get('custom_file_header_path'):
181
- raise Error(
182
- '`--custom_file_header_path` can not be used with `--custom_file_header`.'
183
- ) # pragma: no cover
165
+ @model_validator(mode="after")
166
+ def validate_custom_file_header(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
167
+ if values.get("custom_file_header") and values.get("custom_file_header_path"):
168
+ msg = "`--custom_file_header_path` can not be used with `--custom_file_header`."
169
+ raise Error(msg) # pragma: no cover
184
170
  return values
185
171
 
186
- @model_validator(mode='after')
187
- def validate_keyword_only(cls, values: Dict[str, Any]) -> Dict[str, Any]:
188
- output_model_type: DataModelType = values.get('output_model_type') # pyright: ignore [reportAssignmentType]
189
- python_target: PythonVersion = values.get('target_python_version') # pyright: ignore [reportAssignmentType]
172
+ @model_validator(mode="after")
173
+ def validate_keyword_only(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
174
+ output_model_type: DataModelType = values.get("output_model_type") # pyright: ignore[reportAssignmentType]
175
+ python_target: PythonVersion = values.get("target_python_version") # pyright: ignore[reportAssignmentType]
190
176
  if (
191
- values.get('keyword_only')
177
+ values.get("keyword_only")
192
178
  and output_model_type == DataModelType.DataclassesDataclass
193
179
  and not python_target.has_kw_only_dataclass
194
180
  ):
195
- raise Error(
196
- f'`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher.'
197
- )
181
+ msg = f"`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher."
182
+ raise Error(msg)
198
183
  return values
199
184
 
200
- @model_validator(mode='after')
201
- def validate_output_datetime_class(cls, values: Dict[str, Any]) -> Dict[str, Any]:
202
- datetime_class_type: Optional[DatetimeClassType] = values.get(
203
- 'output_datetime_class'
204
- )
185
+ @model_validator(mode="after")
186
+ def validate_output_datetime_class(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
187
+ datetime_class_type: DatetimeClassType | None = values.get("output_datetime_class")
205
188
  if (
206
189
  datetime_class_type
207
190
  and datetime_class_type is not DatetimeClassType.Datetime
208
- and values.get('output_model_type') == DataModelType.DataclassesDataclass
191
+ and values.get("output_model_type") == DataModelType.DataclassesDataclass
209
192
  ):
210
- raise Error(
193
+ msg = (
211
194
  '`--output-datetime-class` only allows "datetime" for '
212
- f'`--output-model-type` {DataModelType.DataclassesDataclass.value}'
195
+ f"`--output-model-type` {DataModelType.DataclassesDataclass.value}"
213
196
  )
197
+ raise Error(msg)
214
198
  return values
215
199
 
216
200
  # Pydantic 1.5.1 doesn't support each_item=True correctly
217
- @field_validator('http_headers', mode='before')
218
- def validate_http_headers(cls, value: Any) -> Optional[List[Tuple[str, str]]]:
219
- def validate_each_item(each_item: Any) -> Tuple[str, str]:
201
+ @field_validator("http_headers", mode="before")
202
+ def validate_http_headers(cls, value: Any) -> list[tuple[str, str]] | None: # noqa: N805
203
+ def validate_each_item(each_item: Any) -> tuple[str, str]:
220
204
  if isinstance(each_item, str): # pragma: no cover
221
205
  try:
222
- field_name, field_value = each_item.split(':', maxsplit=1)
206
+ field_name, field_value = each_item.split(":", maxsplit=1)
223
207
  return field_name, field_value.lstrip()
224
- except ValueError:
225
- raise Error(f'Invalid http header: {each_item!r}')
208
+ except ValueError as exc:
209
+ msg = f"Invalid http header: {each_item!r}"
210
+ raise Error(msg) from exc
226
211
  return each_item # pragma: no cover
227
212
 
228
213
  if isinstance(value, list):
229
214
  return [validate_each_item(each_item) for each_item in value]
230
215
  return value # pragma: no cover
231
216
 
232
- @field_validator('http_query_parameters', mode='before')
233
- def validate_http_query_parameters(
234
- cls, value: Any
235
- ) -> Optional[List[Tuple[str, str]]]:
236
- def validate_each_item(each_item: Any) -> Tuple[str, str]:
217
+ @field_validator("http_query_parameters", mode="before")
218
+ def validate_http_query_parameters(cls, value: Any) -> list[tuple[str, str]] | None: # noqa: N805
219
+ def validate_each_item(each_item: Any) -> tuple[str, str]:
237
220
  if isinstance(each_item, str): # pragma: no cover
238
221
  try:
239
- field_name, field_value = each_item.split('=', maxsplit=1)
222
+ field_name, field_value = each_item.split("=", maxsplit=1)
240
223
  return field_name, field_value.lstrip()
241
- except ValueError:
242
- raise Error(f'Invalid http query parameter: {each_item!r}')
224
+ except ValueError as exc:
225
+ msg = f"Invalid http query parameter: {each_item!r}"
226
+ raise Error(msg) from exc
243
227
  return each_item # pragma: no cover
244
228
 
245
229
  if isinstance(value, list):
246
230
  return [validate_each_item(each_item) for each_item in value]
247
231
  return value # pragma: no cover
248
232
 
249
- @model_validator(mode='before')
250
- def validate_additional_imports(cls, values: Dict[str, Any]) -> Dict[str, Any]:
251
- additional_imports = values.get('additional_imports')
233
+ @model_validator(mode="before")
234
+ def validate_additional_imports(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
235
+ additional_imports = values.get("additional_imports")
252
236
  if additional_imports is not None:
253
- values['additional_imports'] = additional_imports.split(',')
237
+ values["additional_imports"] = additional_imports.split(",")
254
238
  return values
255
239
 
256
- @model_validator(mode='before')
257
- def validate_custom_formatters(cls, values: Dict[str, Any]) -> Dict[str, Any]:
258
- custom_formatters = values.get('custom_formatters')
240
+ @model_validator(mode="before")
241
+ def validate_custom_formatters(cls, values: dict[str, Any]) -> dict[str, Any]: # noqa: N805
242
+ custom_formatters = values.get("custom_formatters")
259
243
  if custom_formatters is not None:
260
- values['custom_formatters'] = custom_formatters.split(',')
244
+ values["custom_formatters"] = custom_formatters.split(",")
261
245
  return values
262
246
 
263
247
  if PYDANTIC_V2:
264
248
 
265
- @model_validator(mode='after') # type: ignore
249
+ @model_validator(mode="after") # pyright: ignore[reportArgumentType]
266
250
  def validate_root(self: Self) -> Self:
267
251
  if self.use_annotated:
268
252
  self.field_constraints = True
@@ -270,42 +254,42 @@ class Config(BaseModel):
270
254
 
271
255
  else:
272
256
 
273
- @model_validator(mode='after')
274
- def validate_root(cls, values: Any) -> Any:
275
- if values.get('use_annotated'):
276
- values['field_constraints'] = True
257
+ @model_validator(mode="after")
258
+ def validate_root(cls, values: Any) -> Any: # noqa: N805
259
+ if values.get("use_annotated"):
260
+ values["field_constraints"] = True
277
261
  return values
278
262
 
279
- input: Optional[Union[Path, str]] = None
263
+ input: Optional[Union[Path, str]] = None # noqa: UP007, UP045
280
264
  input_file_type: InputFileType = InputFileType.Auto
281
265
  output_model_type: DataModelType = DataModelType.PydanticBaseModel
282
- output: Optional[Path] = None
266
+ output: Optional[Path] = None # noqa: UP045
283
267
  debug: bool = False
284
268
  disable_warnings: bool = False
285
269
  target_python_version: PythonVersion = PythonVersion.PY_38
286
- base_class: str = ''
287
- additional_imports: Optional[List[str]] = None
288
- custom_template_dir: Optional[Path] = None
289
- extra_template_data: Optional[TextIOBase] = None
270
+ base_class: str = ""
271
+ additional_imports: Optional[List[str]] = None # noqa: UP006, UP045
272
+ custom_template_dir: Optional[Path] = None # noqa: UP045
273
+ extra_template_data: Optional[TextIOBase] = None # noqa: UP045
290
274
  validation: bool = False
291
275
  field_constraints: bool = False
292
276
  snake_case_field: bool = False
293
277
  strip_default_none: bool = False
294
- aliases: Optional[TextIOBase] = None
278
+ aliases: Optional[TextIOBase] = None # noqa: UP045
295
279
  disable_timestamp: bool = False
296
280
  enable_version_header: bool = False
297
281
  allow_population_by_field_name: bool = False
298
282
  allow_extra_fields: bool = False
299
283
  use_default: bool = False
300
284
  force_optional: bool = False
301
- class_name: Optional[str] = None
285
+ class_name: Optional[str] = None # noqa: UP045
302
286
  use_standard_collections: bool = False
303
287
  use_schema_description: bool = False
304
288
  use_field_description: bool = False
305
289
  use_default_kwarg: bool = False
306
290
  reuse_model: bool = False
307
291
  encoding: str = DEFAULT_ENCODING
308
- enum_field_as_literal: Optional[LiteralType] = None
292
+ enum_field_as_literal: Optional[LiteralType] = None # noqa: UP045
309
293
  use_one_literal_as_default: bool = False
310
294
  set_default_enum_member: bool = False
311
295
  use_subclass_enum: bool = False
@@ -313,73 +297,69 @@ class Config(BaseModel):
313
297
  use_generic_container_types: bool = False
314
298
  use_union_operator: bool = False
315
299
  enable_faux_immutability: bool = False
316
- url: Optional[ParseResult] = None
300
+ url: Optional[ParseResult] = None # noqa: UP045
317
301
  disable_appending_item_suffix: bool = False
318
- strict_types: List[StrictTypes] = []
319
- empty_enum_field_name: Optional[str] = None
320
- field_extra_keys: Optional[Set[str]] = None
302
+ strict_types: List[StrictTypes] = [] # noqa: UP006
303
+ empty_enum_field_name: Optional[str] = None # noqa: UP045
304
+ field_extra_keys: Optional[Set[str]] = None # noqa: UP006, UP045
321
305
  field_include_all_keys: bool = False
322
- field_extra_keys_without_x_prefix: Optional[Set[str]] = None
323
- openapi_scopes: Optional[List[OpenAPIScope]] = [OpenAPIScope.Schemas]
324
- wrap_string_literal: Optional[bool] = None
306
+ field_extra_keys_without_x_prefix: Optional[Set[str]] = None # noqa: UP006, UP045
307
+ openapi_scopes: Optional[List[OpenAPIScope]] = [OpenAPIScope.Schemas] # noqa: UP006, UP045
308
+ wrap_string_literal: Optional[bool] = None # noqa: UP045
325
309
  use_title_as_name: bool = False
326
310
  use_operation_id_as_name: bool = False
327
311
  use_unique_items_as_set: bool = False
328
- http_headers: Optional[Sequence[Tuple[str, str]]] = None
312
+ http_headers: Optional[Sequence[Tuple[str, str]]] = None # noqa: UP006, UP045
329
313
  http_ignore_tls: bool = False
330
314
  use_annotated: bool = False
331
315
  use_non_positive_negative_number_constrained_types: bool = False
332
- original_field_name_delimiter: Optional[str] = None
316
+ original_field_name_delimiter: Optional[str] = None # noqa: UP045
333
317
  use_double_quotes: bool = False
334
318
  collapse_root_models: bool = False
335
- special_field_name_prefix: Optional[str] = None
319
+ special_field_name_prefix: Optional[str] = None # noqa: UP045
336
320
  remove_special_field_name_prefix: bool = False
337
321
  capitalise_enum_members: bool = False
338
322
  keep_model_order: bool = False
339
- custom_file_header: Optional[str] = None
340
- custom_file_header_path: Optional[Path] = None
341
- custom_formatters: Optional[List[str]] = None
342
- custom_formatters_kwargs: Optional[TextIOBase] = None
323
+ custom_file_header: Optional[str] = None # noqa: UP045
324
+ custom_file_header_path: Optional[Path] = None # noqa: UP045
325
+ custom_formatters: Optional[List[str]] = None # noqa: UP006, UP045
326
+ custom_formatters_kwargs: Optional[TextIOBase] = None # noqa: UP045
343
327
  use_pendulum: bool = False
344
- http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None
328
+ http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None # noqa: UP006, UP045
345
329
  treat_dot_as_module: bool = False
346
330
  use_exact_imports: bool = False
347
- union_mode: Optional[UnionMode] = None
348
- output_datetime_class: Optional[DatetimeClassType] = None
331
+ union_mode: Optional[UnionMode] = None # noqa: UP045
332
+ output_datetime_class: Optional[DatetimeClassType] = None # noqa: UP045
349
333
  keyword_only: bool = False
350
334
  no_alias: bool = False
351
335
 
352
336
  def merge_args(self, args: Namespace) -> None:
353
- set_args = {
354
- f: getattr(args, f)
355
- for f in self.get_fields()
356
- if getattr(args, f) is not None
357
- }
337
+ set_args = {f: getattr(args, f) for f in self.get_fields() if getattr(args, f) is not None}
358
338
 
359
- if set_args.get('output_model_type') == DataModelType.MsgspecStruct.value:
360
- set_args['use_annotated'] = True
339
+ if set_args.get("output_model_type") == DataModelType.MsgspecStruct.value:
340
+ set_args["use_annotated"] = True
361
341
 
362
- if set_args.get('use_annotated'):
363
- set_args['field_constraints'] = True
342
+ if set_args.get("use_annotated"):
343
+ set_args["field_constraints"] = True
364
344
 
365
345
  parsed_args = Config.parse_obj(set_args)
366
346
  for field_name in set_args:
367
347
  setattr(self, field_name, getattr(parsed_args, field_name))
368
348
 
369
349
 
370
- def _get_pyproject_toml_config(source: Path) -> Optional[Dict[str, Any]]:
350
+ def _get_pyproject_toml_config(source: Path) -> dict[str, Any] | None:
371
351
  """Find and return the [tool.datamodel-codgen] section of the closest
372
352
  pyproject.toml if it exists.
373
353
  """
374
354
 
375
355
  current_path = source
376
356
  while current_path != current_path.parent:
377
- if (current_path / 'pyproject.toml').is_file():
378
- pyproject_toml = load_toml(current_path / 'pyproject.toml')
379
- if 'datamodel-codegen' in pyproject_toml.get('tool', {}):
380
- return pyproject_toml['tool']['datamodel-codegen']
357
+ if (current_path / "pyproject.toml").is_file():
358
+ pyproject_toml = load_toml(current_path / "pyproject.toml")
359
+ if "datamodel-codegen" in pyproject_toml.get("tool", {}):
360
+ return pyproject_toml["tool"]["datamodel-codegen"]
381
361
 
382
- if (current_path / '.git').exists():
362
+ if (current_path / ".git").exists():
383
363
  # Stop early if we see a git repository root.
384
364
  return None
385
365
 
@@ -387,7 +367,7 @@ def _get_pyproject_toml_config(source: Path) -> Optional[Dict[str, Any]]:
387
367
  return None
388
368
 
389
369
 
390
- def main(args: Optional[Sequence[str]] = None) -> Exit:
370
+ def main(args: Sequence[str] | None = None) -> Exit: # noqa: PLR0911, PLR0912, PLR0915
391
371
  """Main function."""
392
372
 
393
373
  # add cli completion support
@@ -399,14 +379,14 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
399
379
  arg_parser.parse_args(args, namespace=namespace)
400
380
 
401
381
  if namespace.version:
402
- from datamodel_code_generator import get_version
382
+ from datamodel_code_generator import get_version # noqa: PLC0415
403
383
 
404
- print(get_version())
405
- exit(0)
384
+ print(get_version()) # noqa: T201
385
+ sys.exit(0)
406
386
 
407
- pyproject_config = _get_pyproject_toml_config(Path().resolve())
387
+ pyproject_config = _get_pyproject_toml_config(Path.cwd())
408
388
  if pyproject_config is not None:
409
- pyproject_toml = {k.replace('-', '_'): v for k, v in pyproject_config.items()}
389
+ pyproject_toml = {k.replace("-", "_"): v for k, v in pyproject_config.items()}
410
390
  else:
411
391
  pyproject_toml = {}
412
392
 
@@ -414,22 +394,22 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
414
394
  config = Config.parse_obj(pyproject_toml)
415
395
  config.merge_args(namespace)
416
396
  except Error as e:
417
- print(e.message, file=sys.stderr)
397
+ print(e.message, file=sys.stderr) # noqa: T201
418
398
  return Exit.ERROR
419
399
 
420
400
  if not config.input and not config.url and sys.stdin.isatty():
421
- print(
422
- 'Not Found Input: require `stdin` or arguments `--input` or `--url`',
401
+ print( # noqa: T201
402
+ "Not Found Input: require `stdin` or arguments `--input` or `--url`",
423
403
  file=sys.stderr,
424
404
  )
425
405
  arg_parser.print_help()
426
406
  return Exit.ERROR
427
407
 
428
408
  if not is_supported_in_black(config.target_python_version): # pragma: no cover
429
- print(
430
- f"Installed black doesn't support Python version {config.target_python_version.value}.\n" # type: ignore
431
- f'You have to install a newer black.\n'
432
- f'Installed black version: {black.__version__}',
409
+ print( # noqa: T201
410
+ f"Installed black doesn't support Python version {config.target_python_version.value}.\n"
411
+ f"You have to install a newer black.\n"
412
+ f"Installed black version: {black.__version__}",
433
413
  file=sys.stderr,
434
414
  )
435
415
  return Exit.ERROR
@@ -438,18 +418,16 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
438
418
  enable_debug_message()
439
419
 
440
420
  if config.disable_warnings:
441
- warnings.simplefilter('ignore')
442
- extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]]
421
+ warnings.simplefilter("ignore")
422
+ extra_template_data: defaultdict[str, dict[str, Any]] | None
443
423
  if config.extra_template_data is None:
444
424
  extra_template_data = None
445
425
  else:
446
426
  with config.extra_template_data as data:
447
427
  try:
448
- extra_template_data = json.load(
449
- data, object_hook=lambda d: defaultdict(dict, **d)
450
- )
428
+ extra_template_data = json.load(data, object_hook=lambda d: defaultdict(dict, **d))
451
429
  except json.JSONDecodeError as e:
452
- print(f'Unable to load extra template data: {e}', file=sys.stderr)
430
+ print(f"Unable to load extra template data: {e}", file=sys.stderr) # noqa: T201
453
431
  return Exit.ERROR
454
432
 
455
433
  if config.aliases is None:
@@ -459,12 +437,12 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
459
437
  try:
460
438
  aliases = json.load(data)
461
439
  except json.JSONDecodeError as e:
462
- print(f'Unable to load alias mapping: {e}', file=sys.stderr)
440
+ print(f"Unable to load alias mapping: {e}", file=sys.stderr) # noqa: T201
463
441
  return Exit.ERROR
464
442
  if not isinstance(aliases, dict) or not all(
465
443
  isinstance(k, str) and isinstance(v, str) for k, v in aliases.items()
466
444
  ):
467
- print(
445
+ print( # noqa: T201
468
446
  'Alias mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
469
447
  file=sys.stderr,
470
448
  )
@@ -477,16 +455,15 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
477
455
  try:
478
456
  custom_formatters_kwargs = json.load(data)
479
457
  except json.JSONDecodeError as e: # pragma: no cover
480
- print(
481
- f'Unable to load custom_formatters_kwargs mapping: {e}',
458
+ print( # noqa: T201
459
+ f"Unable to load custom_formatters_kwargs mapping: {e}",
482
460
  file=sys.stderr,
483
461
  )
484
462
  return Exit.ERROR
485
463
  if not isinstance(custom_formatters_kwargs, dict) or not all(
486
- isinstance(k, str) and isinstance(v, str)
487
- for k, v in custom_formatters_kwargs.items()
464
+ isinstance(k, str) and isinstance(v, str) for k, v in custom_formatters_kwargs.items()
488
465
  ): # pragma: no cover
489
- print(
466
+ print( # noqa: T201
490
467
  'Custom formatters kwargs mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
491
468
  file=sys.stderr,
492
469
  )
@@ -564,19 +541,20 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
564
541
  keyword_only=config.keyword_only,
565
542
  no_alias=config.no_alias,
566
543
  )
567
- return Exit.OK
568
544
  except InvalidClassNameError as e:
569
- print(f'{e} You have to set `--class-name` option', file=sys.stderr)
545
+ print(f"{e} You have to set `--class-name` option", file=sys.stderr) # noqa: T201
570
546
  return Exit.ERROR
571
547
  except Error as e:
572
- print(str(e), file=sys.stderr)
548
+ print(str(e), file=sys.stderr) # noqa: T201
573
549
  return Exit.ERROR
574
- except Exception:
575
- import traceback
550
+ except Exception: # noqa: BLE001
551
+ import traceback # noqa: PLC0415
576
552
 
577
- print(traceback.format_exc(), file=sys.stderr)
553
+ print(traceback.format_exc(), file=sys.stderr) # noqa: T201
578
554
  return Exit.ERROR
555
+ else:
556
+ return Exit.OK
579
557
 
580
558
 
581
- if __name__ == '__main__':
559
+ if __name__ == "__main__":
582
560
  sys.exit(main())