datamodel-code-generator 0.27.2__py3-none-any.whl → 0.28.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (43) hide show
  1. datamodel_code_generator/__init__.py +168 -196
  2. datamodel_code_generator/__main__.py +146 -189
  3. datamodel_code_generator/arguments.py +227 -230
  4. datamodel_code_generator/format.py +77 -129
  5. datamodel_code_generator/http.py +12 -10
  6. datamodel_code_generator/imports.py +59 -65
  7. datamodel_code_generator/model/__init__.py +28 -31
  8. datamodel_code_generator/model/base.py +100 -144
  9. datamodel_code_generator/model/dataclass.py +62 -70
  10. datamodel_code_generator/model/enum.py +34 -30
  11. datamodel_code_generator/model/imports.py +13 -11
  12. datamodel_code_generator/model/msgspec.py +116 -138
  13. datamodel_code_generator/model/pydantic/__init__.py +18 -28
  14. datamodel_code_generator/model/pydantic/base_model.py +121 -140
  15. datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
  16. datamodel_code_generator/model/pydantic/dataclass.py +6 -4
  17. datamodel_code_generator/model/pydantic/imports.py +35 -33
  18. datamodel_code_generator/model/pydantic/types.py +91 -119
  19. datamodel_code_generator/model/pydantic_v2/__init__.py +21 -18
  20. datamodel_code_generator/model/pydantic_v2/base_model.py +118 -127
  21. datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
  22. datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
  23. datamodel_code_generator/model/pydantic_v2/types.py +11 -7
  24. datamodel_code_generator/model/rootmodel.py +1 -1
  25. datamodel_code_generator/model/scalar.py +33 -32
  26. datamodel_code_generator/model/typed_dict.py +41 -51
  27. datamodel_code_generator/model/types.py +24 -19
  28. datamodel_code_generator/model/union.py +21 -17
  29. datamodel_code_generator/parser/__init__.py +16 -12
  30. datamodel_code_generator/parser/base.py +327 -515
  31. datamodel_code_generator/parser/graphql.py +87 -119
  32. datamodel_code_generator/parser/jsonschema.py +438 -607
  33. datamodel_code_generator/parser/openapi.py +180 -220
  34. datamodel_code_generator/pydantic_patch.py +8 -9
  35. datamodel_code_generator/reference.py +199 -297
  36. datamodel_code_generator/types.py +149 -215
  37. datamodel_code_generator/util.py +23 -36
  38. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/METADATA +10 -5
  39. datamodel_code_generator-0.28.0.dist-info/RECORD +59 -0
  40. datamodel_code_generator-0.27.2.dist-info/RECORD +0 -59
  41. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/WHEEL +0 -0
  42. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/entry_points.txt +0 -0
  43. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/licenses/LICENSE +0 -0
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  import contextlib
4
4
  import os
5
5
  import sys
6
+ from collections.abc import Iterator, Mapping, Sequence
6
7
  from datetime import datetime, timezone
7
8
  from enum import Enum
8
9
  from pathlib import Path
@@ -11,33 +12,24 @@ from typing import (
11
12
  TYPE_CHECKING,
12
13
  Any,
13
14
  Callable,
14
- DefaultDict,
15
- Dict,
16
- Iterator,
17
- List,
18
- Mapping,
19
- Optional,
20
- Sequence,
21
- Set,
15
+ Final,
22
16
  TextIO,
23
- Tuple,
24
- Type,
25
17
  TypeVar,
26
- Union,
18
+ cast,
27
19
  )
28
20
  from urllib.parse import ParseResult
29
21
 
30
22
  import yaml
31
23
 
32
24
  import datamodel_code_generator.pydantic_patch # noqa: F401
33
- from datamodel_code_generator.format import DatetimeClassType, PythonVersion
34
- from datamodel_code_generator.model.pydantic_v2 import UnionMode
25
+ from datamodel_code_generator.format import DatetimeClassType, PythonVersion, PythonVersionMin
35
26
  from datamodel_code_generator.parser import DefaultPutDict, LiteralType
36
- from datamodel_code_generator.parser.base import Parser
37
- from datamodel_code_generator.types import StrictTypes
38
- from datamodel_code_generator.util import SafeLoader # type: ignore
27
+ from datamodel_code_generator.util import SafeLoader
39
28
 
40
- T = TypeVar('T')
29
+ MIN_VERSION: Final[int] = 9
30
+ MAX_VERSION: Final[int] = 13
31
+
32
+ T = TypeVar("T")
41
33
 
42
34
  try:
43
35
  import pysnooper
@@ -46,11 +38,11 @@ try:
46
38
  except ImportError: # pragma: no cover
47
39
  pysnooper = None
48
40
 
49
- DEFAULT_BASE_CLASS: str = 'pydantic.BaseModel'
41
+ DEFAULT_BASE_CLASS: str = "pydantic.BaseModel"
50
42
 
51
43
 
52
- def load_yaml(stream: Union[str, TextIO]) -> Any:
53
- return yaml.load(stream, Loader=SafeLoader)
44
+ def load_yaml(stream: str | TextIO) -> Any:
45
+ return yaml.load(stream, Loader=SafeLoader) # noqa: S506
54
46
 
55
47
 
56
48
  def load_yaml_from_path(path: Path, encoding: str) -> Any:
@@ -59,24 +51,28 @@ def load_yaml_from_path(path: Path, encoding: str) -> Any:
59
51
 
60
52
 
61
53
  if TYPE_CHECKING:
54
+ from collections import defaultdict
55
+
56
+ from datamodel_code_generator.model.pydantic_v2 import UnionMode
57
+ from datamodel_code_generator.parser.base import Parser
58
+ from datamodel_code_generator.types import StrictTypes
62
59
 
63
60
  def get_version() -> str: ...
64
61
 
65
62
  else:
66
63
 
67
64
  def get_version() -> str:
68
- package = 'datamodel-code-generator'
65
+ package = "datamodel-code-generator"
69
66
 
70
- from importlib.metadata import version
67
+ from importlib.metadata import version # noqa: PLC0415
71
68
 
72
69
  return version(package)
73
70
 
74
71
 
75
72
  def enable_debug_message() -> None: # pragma: no cover
76
73
  if not pysnooper:
77
- raise Exception(
78
- "Please run `$pip install 'datamodel-code-generator[debug]'` to use debug option"
79
- )
74
+ msg = "Please run `$pip install 'datamodel-code-generator[debug]'` to use debug option"
75
+ raise Exception(msg) # noqa: TRY002
80
76
 
81
77
  pysnooper.tracer.DISABLED = False
82
78
 
@@ -84,37 +80,15 @@ def enable_debug_message() -> None: # pragma: no cover
84
80
  DEFAULT_MAX_VARIABLE_LENGTH: int = 100
85
81
 
86
82
 
87
- def snooper_to_methods( # type: ignore
88
- output=None,
89
- watch=(),
90
- watch_explode=(),
91
- depth=1,
92
- prefix='',
93
- overwrite=False,
94
- thread_info=False,
95
- custom_repr=(),
96
- max_variable_length: Optional[int] = DEFAULT_MAX_VARIABLE_LENGTH,
97
- ) -> Callable[..., Any]:
98
- def inner(cls: Type[T]) -> Type[T]:
83
+ def snooper_to_methods() -> Callable[..., Any]:
84
+ def inner(cls: type[T]) -> type[T]:
99
85
  if not pysnooper:
100
86
  return cls
101
- import inspect
87
+ import inspect # noqa: PLC0415
102
88
 
103
89
  methods = inspect.getmembers(cls, predicate=inspect.isfunction)
104
90
  for name, method in methods:
105
- snooper_method = pysnooper.snoop(
106
- output,
107
- watch,
108
- watch_explode,
109
- depth,
110
- prefix,
111
- overwrite,
112
- thread_info,
113
- custom_repr,
114
- max_variable_length
115
- if max_variable_length is not None
116
- else DEFAULT_MAX_VARIABLE_LENGTH,
117
- )(method)
91
+ snooper_method = pysnooper.snoop(max_variable_length=DEFAULT_MAX_VARIABLE_LENGTH)(method)
118
92
  setattr(cls, name, snooper_method)
119
93
  return cls
120
94
 
@@ -122,7 +96,7 @@ def snooper_to_methods( # type: ignore
122
96
 
123
97
 
124
98
  @contextlib.contextmanager
125
- def chdir(path: Optional[Path]) -> Iterator[None]:
99
+ def chdir(path: Path | None) -> Iterator[None]:
126
100
  """Changes working directory and returns to previous on exit."""
127
101
 
128
102
  if path is None:
@@ -137,12 +111,12 @@ def chdir(path: Optional[Path]) -> Iterator[None]:
137
111
 
138
112
 
139
113
  def is_openapi(text: str) -> bool:
140
- return 'openapi' in load_yaml(text)
114
+ return "openapi" in load_yaml(text)
141
115
 
142
116
 
143
- JSON_SCHEMA_URLS: Tuple[str, ...] = (
144
- 'http://json-schema.org/',
145
- 'https://json-schema.org/',
117
+ JSON_SCHEMA_URLS: tuple[str, ...] = (
118
+ "http://json-schema.org/",
119
+ "https://json-schema.org/",
146
120
  )
147
121
 
148
122
 
@@ -150,39 +124,35 @@ def is_schema(text: str) -> bool:
150
124
  data = load_yaml(text)
151
125
  if not isinstance(data, dict):
152
126
  return False
153
- schema = data.get('$schema')
154
- if isinstance(schema, str) and any(
155
- schema.startswith(u) for u in JSON_SCHEMA_URLS
156
- ): # pragma: no cover
127
+ schema = data.get("$schema")
128
+ if isinstance(schema, str) and any(schema.startswith(u) for u in JSON_SCHEMA_URLS): # pragma: no cover
157
129
  return True
158
- if isinstance(data.get('type'), str):
130
+ if isinstance(data.get("type"), str):
159
131
  return True
160
132
  if any(
161
133
  isinstance(data.get(o), list)
162
134
  for o in (
163
- 'allOf',
164
- 'anyOf',
165
- 'oneOf',
135
+ "allOf",
136
+ "anyOf",
137
+ "oneOf",
166
138
  )
167
139
  ):
168
140
  return True
169
- if isinstance(data.get('properties'), dict):
170
- return True
171
- return False
141
+ return bool(isinstance(data.get("properties"), dict))
172
142
 
173
143
 
174
144
  class InputFileType(Enum):
175
- Auto = 'auto'
176
- OpenAPI = 'openapi'
177
- JsonSchema = 'jsonschema'
178
- Json = 'json'
179
- Yaml = 'yaml'
180
- Dict = 'dict'
181
- CSV = 'csv'
182
- GraphQL = 'graphql'
145
+ Auto = "auto"
146
+ OpenAPI = "openapi"
147
+ JsonSchema = "jsonschema"
148
+ Json = "json"
149
+ Yaml = "yaml"
150
+ Dict = "dict"
151
+ CSV = "csv"
152
+ GraphQL = "graphql"
183
153
 
184
154
 
185
- RAW_DATA_TYPES: List[InputFileType] = [
155
+ RAW_DATA_TYPES: list[InputFileType] = [
186
156
  InputFileType.Json,
187
157
  InputFileType.Yaml,
188
158
  InputFileType.Dict,
@@ -192,22 +162,22 @@ RAW_DATA_TYPES: List[InputFileType] = [
192
162
 
193
163
 
194
164
  class DataModelType(Enum):
195
- PydanticBaseModel = 'pydantic.BaseModel'
196
- PydanticV2BaseModel = 'pydantic_v2.BaseModel'
197
- DataclassesDataclass = 'dataclasses.dataclass'
198
- TypingTypedDict = 'typing.TypedDict'
199
- MsgspecStruct = 'msgspec.Struct'
165
+ PydanticBaseModel = "pydantic.BaseModel"
166
+ PydanticV2BaseModel = "pydantic_v2.BaseModel"
167
+ DataclassesDataclass = "dataclasses.dataclass"
168
+ TypingTypedDict = "typing.TypedDict"
169
+ MsgspecStruct = "msgspec.Struct"
200
170
 
201
171
 
202
172
  class OpenAPIScope(Enum):
203
- Schemas = 'schemas'
204
- Paths = 'paths'
205
- Tags = 'tags'
206
- Parameters = 'parameters'
173
+ Schemas = "schemas"
174
+ Paths = "paths"
175
+ Tags = "tags"
176
+ Parameters = "parameters"
207
177
 
208
178
 
209
179
  class GraphQLScope(Enum):
210
- Schema = 'schema'
180
+ Schema = "schema"
211
181
 
212
182
 
213
183
  class Error(Exception):
@@ -221,51 +191,52 @@ class Error(Exception):
221
191
  class InvalidClassNameError(Error):
222
192
  def __init__(self, class_name: str) -> None:
223
193
  self.class_name = class_name
224
- message = f'title={repr(class_name)} is invalid class name.'
194
+ message = f"title={class_name!r} is invalid class name."
225
195
  super().__init__(message=message)
226
196
 
227
197
 
228
198
  def get_first_file(path: Path) -> Path: # pragma: no cover
229
199
  if path.is_file():
230
200
  return path
231
- elif path.is_dir():
232
- for child in path.rglob('*'):
201
+ if path.is_dir():
202
+ for child in path.rglob("*"):
233
203
  if child.is_file():
234
204
  return child
235
- raise Error('File not found')
205
+ msg = "File not found"
206
+ raise Error(msg)
236
207
 
237
208
 
238
- def generate(
239
- input_: Union[Path, str, ParseResult, Mapping[str, Any]],
209
+ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
210
+ input_: Path | str | ParseResult | Mapping[str, Any],
240
211
  *,
241
- input_filename: Optional[str] = None,
212
+ input_filename: str | None = None,
242
213
  input_file_type: InputFileType = InputFileType.Auto,
243
- output: Optional[Path] = None,
214
+ output: Path | None = None,
244
215
  output_model_type: DataModelType = DataModelType.PydanticBaseModel,
245
- target_python_version: PythonVersion = PythonVersion.PY_38,
246
- base_class: str = '',
247
- additional_imports: Optional[List[str]] = None,
248
- custom_template_dir: Optional[Path] = None,
249
- extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
216
+ target_python_version: PythonVersion = PythonVersionMin,
217
+ base_class: str = "",
218
+ additional_imports: list[str] | None = None,
219
+ custom_template_dir: Path | None = None,
220
+ extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
250
221
  validation: bool = False,
251
222
  field_constraints: bool = False,
252
223
  snake_case_field: bool = False,
253
224
  strip_default_none: bool = False,
254
- aliases: Optional[Mapping[str, str]] = None,
225
+ aliases: Mapping[str, str] | None = None,
255
226
  disable_timestamp: bool = False,
256
227
  enable_version_header: bool = False,
257
228
  allow_population_by_field_name: bool = False,
258
229
  allow_extra_fields: bool = False,
259
230
  apply_default_values_for_required_fields: bool = False,
260
231
  force_optional_for_required_fields: bool = False,
261
- class_name: Optional[str] = None,
232
+ class_name: str | None = None,
262
233
  use_standard_collections: bool = False,
263
234
  use_schema_description: bool = False,
264
235
  use_field_description: bool = False,
265
236
  use_default_kwarg: bool = False,
266
237
  reuse_model: bool = False,
267
- encoding: str = 'utf-8',
268
- enum_field_as_literal: Optional[LiteralType] = None,
238
+ encoding: str = "utf-8",
239
+ enum_field_as_literal: LiteralType | None = None,
269
240
  use_one_literal_as_default: bool = False,
270
241
  set_default_enum_member: bool = False,
271
242
  use_subclass_enum: bool = False,
@@ -273,54 +244,52 @@ def generate(
273
244
  use_generic_container_types: bool = False,
274
245
  enable_faux_immutability: bool = False,
275
246
  disable_appending_item_suffix: bool = False,
276
- strict_types: Optional[Sequence[StrictTypes]] = None,
277
- empty_enum_field_name: Optional[str] = None,
278
- custom_class_name_generator: Optional[Callable[[str], str]] = None,
279
- field_extra_keys: Optional[Set[str]] = None,
247
+ strict_types: Sequence[StrictTypes] | None = None,
248
+ empty_enum_field_name: str | None = None,
249
+ custom_class_name_generator: Callable[[str], str] | None = None,
250
+ field_extra_keys: set[str] | None = None,
280
251
  field_include_all_keys: bool = False,
281
- field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
282
- openapi_scopes: Optional[List[OpenAPIScope]] = None,
283
- graphql_scopes: Optional[List[GraphQLScope]] = None,
284
- wrap_string_literal: Optional[bool] = None,
252
+ field_extra_keys_without_x_prefix: set[str] | None = None,
253
+ openapi_scopes: list[OpenAPIScope] | None = None,
254
+ graphql_scopes: list[GraphQLScope] | None = None, # noqa: ARG001
255
+ wrap_string_literal: bool | None = None,
285
256
  use_title_as_name: bool = False,
286
257
  use_operation_id_as_name: bool = False,
287
258
  use_unique_items_as_set: bool = False,
288
- http_headers: Optional[Sequence[Tuple[str, str]]] = None,
259
+ http_headers: Sequence[tuple[str, str]] | None = None,
289
260
  http_ignore_tls: bool = False,
290
261
  use_annotated: bool = False,
291
262
  use_non_positive_negative_number_constrained_types: bool = False,
292
- original_field_name_delimiter: Optional[str] = None,
263
+ original_field_name_delimiter: str | None = None,
293
264
  use_double_quotes: bool = False,
294
265
  use_union_operator: bool = False,
295
266
  collapse_root_models: bool = False,
296
- special_field_name_prefix: Optional[str] = None,
267
+ special_field_name_prefix: str | None = None,
297
268
  remove_special_field_name_prefix: bool = False,
298
269
  capitalise_enum_members: bool = False,
299
270
  keep_model_order: bool = False,
300
- custom_file_header: Optional[str] = None,
301
- custom_file_header_path: Optional[Path] = None,
302
- custom_formatters: Optional[List[str]] = None,
303
- custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
271
+ custom_file_header: str | None = None,
272
+ custom_file_header_path: Path | None = None,
273
+ custom_formatters: list[str] | None = None,
274
+ custom_formatters_kwargs: dict[str, Any] | None = None,
304
275
  use_pendulum: bool = False,
305
- http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
276
+ http_query_parameters: Sequence[tuple[str, str]] | None = None,
306
277
  treat_dots_as_module: bool = False,
307
278
  use_exact_imports: bool = False,
308
- union_mode: Optional[UnionMode] = None,
309
- output_datetime_class: Optional[DatetimeClassType] = None,
279
+ union_mode: UnionMode | None = None,
280
+ output_datetime_class: DatetimeClassType | None = None,
310
281
  keyword_only: bool = False,
311
282
  no_alias: bool = False,
312
283
  ) -> None:
313
284
  remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
314
285
  if isinstance(input_, str):
315
- input_text: Optional[str] = input_
286
+ input_text: str | None = input_
316
287
  elif isinstance(input_, ParseResult):
317
- from datamodel_code_generator.http import get_body
288
+ from datamodel_code_generator.http import get_body # noqa: PLC0415
318
289
 
319
290
  input_text = remote_text_cache.get_or_put(
320
291
  input_.geturl(),
321
- default_factory=lambda url: get_body(
322
- url, http_headers, http_ignore_tls, http_query_parameters
323
- ),
292
+ default_factory=lambda url: get_body(url, http_headers, http_ignore_tls, http_query_parameters),
324
293
  )
325
294
  else:
326
295
  input_text = None
@@ -330,84 +299,85 @@ def generate(
330
299
  if input_file_type == InputFileType.Auto:
331
300
  try:
332
301
  input_text_ = (
333
- get_first_file(input_).read_text(encoding=encoding)
334
- if isinstance(input_, Path)
335
- else input_text
302
+ get_first_file(input_).read_text(encoding=encoding) if isinstance(input_, Path) else input_text
336
303
  )
337
304
  assert isinstance(input_text_, str)
338
305
  input_file_type = infer_input_type(input_text_)
339
- print(
306
+ print( # noqa: T201
340
307
  inferred_message.format(input_file_type.value),
341
308
  file=sys.stderr,
342
309
  )
343
- except: # noqa
344
- raise Error('Invalid file format')
310
+ except Exception as exc:
311
+ msg = "Invalid file format"
312
+ raise Error(msg) from exc
345
313
 
346
- kwargs: Dict[str, Any] = {}
347
- if input_file_type == InputFileType.OpenAPI:
348
- from datamodel_code_generator.parser.openapi import OpenAPIParser
314
+ kwargs: dict[str, Any] = {}
315
+ if input_file_type == InputFileType.OpenAPI: # noqa: PLR1702
316
+ from datamodel_code_generator.parser.openapi import OpenAPIParser # noqa: PLC0415
349
317
 
350
- parser_class: Type[Parser] = OpenAPIParser
351
- kwargs['openapi_scopes'] = openapi_scopes
318
+ parser_class: type[Parser] = OpenAPIParser
319
+ kwargs["openapi_scopes"] = openapi_scopes
352
320
  elif input_file_type == InputFileType.GraphQL:
353
- from datamodel_code_generator.parser.graphql import GraphQLParser
321
+ from datamodel_code_generator.parser.graphql import GraphQLParser # noqa: PLC0415
354
322
 
355
- parser_class: Type[Parser] = GraphQLParser
323
+ parser_class: type[Parser] = GraphQLParser
356
324
  else:
357
- from datamodel_code_generator.parser.jsonschema import JsonSchemaParser
325
+ from datamodel_code_generator.parser.jsonschema import JsonSchemaParser # noqa: PLC0415
358
326
 
359
327
  parser_class = JsonSchemaParser
360
328
 
361
329
  if input_file_type in RAW_DATA_TYPES:
362
- import json
330
+ import json # noqa: PLC0415
363
331
 
364
332
  try:
365
333
  if isinstance(input_, Path) and input_.is_dir(): # pragma: no cover
366
- raise Error(f'Input must be a file for {input_file_type}')
367
- obj: Dict[Any, Any]
334
+ msg = f"Input must be a file for {input_file_type}"
335
+ raise Error(msg) # noqa: TRY301
336
+ obj: dict[Any, Any]
368
337
  if input_file_type == InputFileType.CSV:
369
- import csv
338
+ import csv # noqa: PLC0415
370
339
 
371
- def get_header_and_first_line(csv_file: IO[str]) -> Dict[str, Any]:
340
+ def get_header_and_first_line(csv_file: IO[str]) -> dict[str, Any]:
372
341
  csv_reader = csv.DictReader(csv_file)
373
- return dict(zip(csv_reader.fieldnames, next(csv_reader))) # type: ignore
342
+ assert csv_reader.fieldnames is not None
343
+ return dict(zip(csv_reader.fieldnames, next(csv_reader)))
374
344
 
375
345
  if isinstance(input_, Path):
376
346
  with input_.open(encoding=encoding) as f:
377
347
  obj = get_header_and_first_line(f)
378
348
  else:
379
- import io
349
+ import io # noqa: PLC0415
380
350
 
381
351
  obj = get_header_and_first_line(io.StringIO(input_text))
382
352
  elif input_file_type == InputFileType.Yaml:
383
- obj = load_yaml(
384
- input_.read_text(encoding=encoding) # type: ignore
385
- if isinstance(input_, Path)
386
- else input_text
387
- )
353
+ if isinstance(input_, Path):
354
+ obj = load_yaml(input_.read_text(encoding=encoding))
355
+ else:
356
+ assert input_text is not None
357
+ obj = load_yaml(input_text)
388
358
  elif input_file_type == InputFileType.Json:
389
- obj = json.loads(
390
- input_.read_text(encoding=encoding) # type: ignore
391
- if isinstance(input_, Path)
392
- else input_text
393
- )
359
+ if isinstance(input_, Path):
360
+ obj = json.loads(input_.read_text(encoding=encoding))
361
+ else:
362
+ assert input_text is not None
363
+ obj = json.loads(input_text)
394
364
  elif input_file_type == InputFileType.Dict:
395
- import ast
365
+ import ast # noqa: PLC0415
396
366
 
397
367
  # Input can be a dict object stored in a python file
398
368
  obj = (
399
- ast.literal_eval(
400
- input_.read_text(encoding=encoding) # type: ignore
401
- )
369
+ ast.literal_eval(input_.read_text(encoding=encoding))
402
370
  if isinstance(input_, Path)
403
- else input_
371
+ else cast("dict[Any, Any]", input_)
404
372
  )
405
373
  else: # pragma: no cover
406
- raise Error(f'Unsupported input file type: {input_file_type}')
407
- except: # noqa
408
- raise Error('Invalid file format')
374
+ msg = f"Unsupported input file type: {input_file_type}"
375
+ raise Error(msg) # noqa: TRY301
376
+ except Exception as exc:
377
+ msg = "Invalid file format"
378
+ raise Error(msg) from exc
409
379
 
410
- from genson import SchemaBuilder
380
+ from genson import SchemaBuilder # noqa: PLC0415
411
381
 
412
382
  builder = SchemaBuilder()
413
383
  builder.add_object(obj)
@@ -418,17 +388,16 @@ def generate(
418
388
 
419
389
  if union_mode is not None:
420
390
  if output_model_type == DataModelType.PydanticV2BaseModel:
421
- default_field_extras = {'union_mode': union_mode}
391
+ default_field_extras = {"union_mode": union_mode}
422
392
  else: # pragma: no cover
423
- raise Error('union_mode is only supported for pydantic_v2.BaseModel')
393
+ msg = "union_mode is only supported for pydantic_v2.BaseModel"
394
+ raise Error(msg)
424
395
  else:
425
396
  default_field_extras = None
426
397
 
427
- from datamodel_code_generator.model import get_data_model_types
398
+ from datamodel_code_generator.model import get_data_model_types # noqa: PLC0415
428
399
 
429
- data_model_types = get_data_model_types(
430
- output_model_type, target_python_version, output_datetime_class
431
- )
400
+ data_model_types = get_data_model_types(output_model_type, target_python_version, output_datetime_class)
432
401
  source = input_text or input_
433
402
  assert not isinstance(source, Mapping)
434
403
  parser = parser_class(
@@ -454,9 +423,7 @@ def generate(
454
423
  force_optional_for_required_fields=force_optional_for_required_fields,
455
424
  class_name=class_name,
456
425
  use_standard_collections=use_standard_collections,
457
- base_path=input_.parent
458
- if isinstance(input_, Path) and input_.is_file()
459
- else None,
426
+ base_path=input_.parent if isinstance(input_, Path) and input_.is_file() else None,
460
427
  use_schema_description=use_schema_description,
461
428
  use_field_description=use_field_description,
462
429
  use_default_kwarg=use_default_kwarg,
@@ -514,24 +481,27 @@ def generate(
514
481
  results = parser.parse()
515
482
  if not input_filename: # pragma: no cover
516
483
  if isinstance(input_, str):
517
- input_filename = '<stdin>'
484
+ input_filename = "<stdin>"
518
485
  elif isinstance(input_, ParseResult):
519
486
  input_filename = input_.geturl()
520
487
  elif input_file_type == InputFileType.Dict:
521
488
  # input_ might be a dict object provided directly, and missing a name field
522
- input_filename = getattr(input_, 'name', '<dict>')
489
+ input_filename = getattr(input_, "name", "<dict>")
523
490
  else:
524
491
  assert isinstance(input_, Path)
525
492
  input_filename = input_.name
526
493
  if not results:
527
- raise Error('Models not found in the input data')
528
- elif isinstance(results, str):
494
+ msg = "Models not found in the input data"
495
+ raise Error(msg)
496
+ if isinstance(results, str):
529
497
  modules = {output: (results, input_filename)}
530
498
  else:
531
499
  if output is None:
532
- raise Error('Modular references require an output directory')
500
+ msg = "Modular references require an output directory"
501
+ raise Error(msg)
533
502
  if output.suffix:
534
- raise Error('Modular references require an output directory, not a file')
503
+ msg = "Modular references require an output directory, not a file"
504
+ raise Error(msg)
535
505
  modules = {
536
506
  output.joinpath(*name): (
537
507
  result.body,
@@ -549,22 +519,22 @@ def generate(
549
519
  # generated by datamodel-codegen:
550
520
  # filename: {}"""
551
521
  if not disable_timestamp:
552
- header += f'\n# timestamp: {timestamp}'
522
+ header += f"\n# timestamp: {timestamp}"
553
523
  if enable_version_header:
554
- header += f'\n# version: {get_version()}'
524
+ header += f"\n# version: {get_version()}"
555
525
 
556
- file: Optional[IO[Any]]
526
+ file: IO[Any] | None
557
527
  for path, (body, filename) in modules.items():
558
528
  if path is None:
559
529
  file = None
560
530
  else:
561
531
  if not path.parent.exists():
562
532
  path.parent.mkdir(parents=True)
563
- file = path.open('wt', encoding=encoding)
533
+ file = path.open("wt", encoding=encoding)
564
534
 
565
535
  print(custom_file_header or header.format(filename), file=file)
566
536
  if body:
567
- print('', file=file)
537
+ print(file=file)
568
538
  print(body.rstrip(), file=file)
569
539
 
570
540
  if file is not None:
@@ -574,22 +544,24 @@ def generate(
574
544
  def infer_input_type(text: str) -> InputFileType:
575
545
  if is_openapi(text):
576
546
  return InputFileType.OpenAPI
577
- elif is_schema(text):
547
+ if is_schema(text):
578
548
  return InputFileType.JsonSchema
579
549
  return InputFileType.Json
580
550
 
581
551
 
582
552
  inferred_message = (
583
- 'The input file type was determined to be: {}\nThis can be specified explicitly with the '
584
- '`--input-file-type` option.'
553
+ "The input file type was determined to be: {}\nThis can be specified explicitly with the "
554
+ "`--input-file-type` option."
585
555
  )
586
556
 
587
557
  __all__ = [
588
- 'DefaultPutDict',
589
- 'Error',
590
- 'InputFileType',
591
- 'InvalidClassNameError',
592
- 'LiteralType',
593
- 'PythonVersion',
594
- 'generate',
558
+ "MAX_VERSION",
559
+ "MIN_VERSION",
560
+ "DefaultPutDict",
561
+ "Error",
562
+ "InputFileType",
563
+ "InvalidClassNameError",
564
+ "LiteralType",
565
+ "PythonVersion",
566
+ "generate",
595
567
  ]