datamodel-code-generator 0.27.2__py3-none-any.whl → 0.27.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (43) hide show
  1. datamodel_code_generator/__init__.py +159 -190
  2. datamodel_code_generator/__main__.py +151 -173
  3. datamodel_code_generator/arguments.py +227 -230
  4. datamodel_code_generator/format.py +77 -99
  5. datamodel_code_generator/http.py +9 -10
  6. datamodel_code_generator/imports.py +57 -64
  7. datamodel_code_generator/model/__init__.py +26 -31
  8. datamodel_code_generator/model/base.py +94 -127
  9. datamodel_code_generator/model/dataclass.py +58 -59
  10. datamodel_code_generator/model/enum.py +34 -30
  11. datamodel_code_generator/model/imports.py +13 -11
  12. datamodel_code_generator/model/msgspec.py +112 -126
  13. datamodel_code_generator/model/pydantic/__init__.py +14 -27
  14. datamodel_code_generator/model/pydantic/base_model.py +120 -139
  15. datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
  16. datamodel_code_generator/model/pydantic/dataclass.py +6 -4
  17. datamodel_code_generator/model/pydantic/imports.py +35 -33
  18. datamodel_code_generator/model/pydantic/types.py +86 -117
  19. datamodel_code_generator/model/pydantic_v2/__init__.py +17 -17
  20. datamodel_code_generator/model/pydantic_v2/base_model.py +118 -119
  21. datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
  22. datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
  23. datamodel_code_generator/model/pydantic_v2/types.py +8 -7
  24. datamodel_code_generator/model/rootmodel.py +1 -1
  25. datamodel_code_generator/model/scalar.py +33 -32
  26. datamodel_code_generator/model/typed_dict.py +42 -41
  27. datamodel_code_generator/model/types.py +19 -17
  28. datamodel_code_generator/model/union.py +21 -17
  29. datamodel_code_generator/parser/__init__.py +12 -11
  30. datamodel_code_generator/parser/base.py +320 -492
  31. datamodel_code_generator/parser/graphql.py +80 -111
  32. datamodel_code_generator/parser/jsonschema.py +422 -580
  33. datamodel_code_generator/parser/openapi.py +175 -204
  34. datamodel_code_generator/pydantic_patch.py +8 -9
  35. datamodel_code_generator/reference.py +192 -274
  36. datamodel_code_generator/types.py +147 -182
  37. datamodel_code_generator/util.py +22 -26
  38. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/METADATA +7 -1
  39. datamodel_code_generator-0.27.3.dist-info/RECORD +59 -0
  40. datamodel_code_generator-0.27.2.dist-info/RECORD +0 -59
  41. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/WHEEL +0 -0
  42. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/entry_points.txt +0 -0
  43. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/licenses/LICENSE +0 -0
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  from abc import ABC, abstractmethod
2
4
  from collections import defaultdict
3
5
  from copy import deepcopy
@@ -7,16 +9,12 @@ from typing import (
7
9
  TYPE_CHECKING,
8
10
  Any,
9
11
  ClassVar,
10
- DefaultDict,
11
12
  Dict,
12
- FrozenSet,
13
13
  Iterator,
14
- List,
15
14
  Optional,
16
15
  Set,
17
16
  Tuple,
18
17
  TypeVar,
19
- Union,
20
18
  )
21
19
  from warnings import warn
22
20
 
@@ -42,18 +40,18 @@ from datamodel_code_generator.types import (
42
40
  )
43
41
  from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict, cached_property
44
42
 
45
- TEMPLATE_DIR: Path = Path(__file__).parents[0] / 'template'
43
+ TEMPLATE_DIR: Path = Path(__file__).parents[0] / "template"
46
44
 
47
- ALL_MODEL: str = '#all#'
45
+ ALL_MODEL: str = "#all#"
48
46
 
49
- ConstraintsBaseT = TypeVar('ConstraintsBaseT', bound='ConstraintsBase')
47
+ ConstraintsBaseT = TypeVar("ConstraintsBaseT", bound="ConstraintsBase")
50
48
 
51
49
 
52
50
  class ConstraintsBase(_BaseModel):
53
- unique_items: Optional[bool] = Field(None, alias='uniqueItems')
54
- _exclude_fields: ClassVar[Set[str]] = {'has_constraints'}
51
+ unique_items: Optional[bool] = Field(None, alias="uniqueItems") # noqa: UP045
52
+ _exclude_fields: ClassVar[Set[str]] = {"has_constraints"} # noqa: UP006
55
53
  if PYDANTIC_V2:
56
- model_config = ConfigDict( # pyright: ignore [reportAssignmentType]
54
+ model_config = ConfigDict( # pyright: ignore[reportAssignmentType]
57
55
  arbitrary_types_allowed=True, ignored_types=(cached_property,)
58
56
  )
59
57
  else:
@@ -67,61 +65,51 @@ class ConstraintsBase(_BaseModel):
67
65
  return any(v is not None for v in self.dict().values())
68
66
 
69
67
  @staticmethod
70
- def merge_constraints(
71
- a: ConstraintsBaseT, b: ConstraintsBaseT
72
- ) -> Optional[ConstraintsBaseT]:
68
+ def merge_constraints(a: ConstraintsBaseT, b: ConstraintsBaseT) -> ConstraintsBaseT | None:
73
69
  constraints_class = None
74
70
  if isinstance(a, ConstraintsBase): # pragma: no cover
75
- root_type_field_constraints = {
76
- k: v for k, v in a.dict(by_alias=True).items() if v is not None
77
- }
71
+ root_type_field_constraints = {k: v for k, v in a.dict(by_alias=True).items() if v is not None}
78
72
  constraints_class = a.__class__
79
73
  else:
80
74
  root_type_field_constraints = {} # pragma: no cover
81
75
 
82
76
  if isinstance(b, ConstraintsBase): # pragma: no cover
83
- model_field_constraints = {
84
- k: v for k, v in b.dict(by_alias=True).items() if v is not None
85
- }
77
+ model_field_constraints = {k: v for k, v in b.dict(by_alias=True).items() if v is not None}
86
78
  constraints_class = constraints_class or b.__class__
87
79
  else:
88
80
  model_field_constraints = {}
89
81
 
90
- if constraints_class is None or not issubclass(
91
- constraints_class, ConstraintsBase
92
- ): # pragma: no cover
82
+ if constraints_class is None or not issubclass(constraints_class, ConstraintsBase): # pragma: no cover
93
83
  return None
94
84
 
95
- return constraints_class.parse_obj(
96
- {
97
- **root_type_field_constraints,
98
- **model_field_constraints,
99
- }
100
- )
85
+ return constraints_class.parse_obj({
86
+ **root_type_field_constraints,
87
+ **model_field_constraints,
88
+ })
101
89
 
102
90
 
103
91
  class DataModelFieldBase(_BaseModel):
104
- name: Optional[str] = None
105
- default: Optional[Any] = None
92
+ name: Optional[str] = None # noqa: UP045
93
+ default: Optional[Any] = None # noqa: UP045
106
94
  required: bool = False
107
- alias: Optional[str] = None
95
+ alias: Optional[str] = None # noqa: UP045
108
96
  data_type: DataType
109
97
  constraints: Any = None
110
98
  strip_default_none: bool = False
111
- nullable: Optional[bool] = None
112
- parent: Optional[Any] = None
113
- extras: Dict[str, Any] = {}
99
+ nullable: Optional[bool] = None # noqa: UP045
100
+ parent: Optional[Any] = None # noqa: UP045
101
+ extras: Dict[str, Any] = {} # noqa: RUF012, UP006
114
102
  use_annotated: bool = False
115
103
  has_default: bool = False
116
104
  use_field_description: bool = False
117
105
  const: bool = False
118
- original_name: Optional[str] = None
106
+ original_name: Optional[str] = None # noqa: UP045
119
107
  use_default_kwarg: bool = False
120
108
  use_one_literal_as_default: bool = False
121
- _exclude_fields: ClassVar[Set[str]] = {'parent'}
122
- _pass_fields: ClassVar[Set[str]] = {'parent', 'data_type'}
109
+ _exclude_fields: ClassVar[Set[str]] = {"parent"} # noqa: UP006
110
+ _pass_fields: ClassVar[Set[str]] = {"parent", "data_type"} # noqa: UP006
123
111
  can_have_extra_keys: ClassVar[bool] = True
124
- type_has_null: Optional[bool] = None
112
+ type_has_null: Optional[bool] = None # noqa: UP045
125
113
 
126
114
  if not TYPE_CHECKING:
127
115
 
@@ -132,46 +120,39 @@ class DataModelFieldBase(_BaseModel):
132
120
  self.process_const()
133
121
 
134
122
  def process_const(self) -> None:
135
- if 'const' not in self.extras:
136
- return None
137
- self.default = self.extras['const']
123
+ if "const" not in self.extras:
124
+ return
125
+ self.default = self.extras["const"]
138
126
  self.const = True
139
127
  self.required = False
140
128
  self.nullable = False
141
129
 
142
130
  @property
143
- def type_hint(self) -> str:
131
+ def type_hint(self) -> str: # noqa: PLR0911
144
132
  type_hint = self.data_type.type_hint
145
133
 
146
134
  if not type_hint:
147
135
  return NONE
148
- elif self.has_default_factory:
149
- return type_hint
150
- elif self.data_type.is_optional and self.data_type.type != ANY:
136
+ if self.has_default_factory or (self.data_type.is_optional and self.data_type.type != ANY):
151
137
  return type_hint
152
- elif self.nullable is not None:
138
+ if self.nullable is not None:
153
139
  if self.nullable:
154
140
  return get_optional_type(type_hint, self.data_type.use_union_operator)
155
141
  return type_hint
156
- elif self.required:
142
+ if self.required:
157
143
  if self.type_has_null:
158
144
  return get_optional_type(type_hint, self.data_type.use_union_operator)
159
145
  return type_hint
160
- elif self.fall_back_to_nullable:
146
+ if self.fall_back_to_nullable:
161
147
  return get_optional_type(type_hint, self.data_type.use_union_operator)
162
- else:
163
- return type_hint
148
+ return type_hint
164
149
 
165
150
  @property
166
- def imports(self) -> Tuple[Import, ...]:
151
+ def imports(self) -> tuple[Import, ...]:
167
152
  type_hint = self.type_hint
168
153
  has_union = not self.data_type.use_union_operator and UNION_PREFIX in type_hint
169
- imports: List[Union[Tuple[Import], Iterator[Import]]] = [
170
- iter(
171
- i
172
- for i in self.data_type.all_imports
173
- if not (not has_union and i == IMPORT_UNION)
174
- )
154
+ imports: list[tuple[Import] | Iterator[Import]] = [
155
+ iter(i for i in self.data_type.all_imports if not (not has_union and i == IMPORT_UNION))
175
156
  ]
176
157
 
177
158
  if self.fall_back_to_nullable:
@@ -179,39 +160,34 @@ class DataModelFieldBase(_BaseModel):
179
160
  self.nullable or (self.nullable is None and not self.required)
180
161
  ) and not self.data_type.use_union_operator:
181
162
  imports.append((IMPORT_OPTIONAL,))
182
- else:
183
- if (
184
- self.nullable and not self.data_type.use_union_operator
185
- ): # pragma: no cover
186
- imports.append((IMPORT_OPTIONAL,))
163
+ elif self.nullable and not self.data_type.use_union_operator: # pragma: no cover
164
+ imports.append((IMPORT_OPTIONAL,))
187
165
  if self.use_annotated and self.annotated:
188
166
  import_annotated = (
189
- IMPORT_ANNOTATED
190
- if self.data_type.python_version.has_annotated_type
191
- else IMPORT_ANNOTATED_BACKPORT
167
+ IMPORT_ANNOTATED if self.data_type.python_version.has_annotated_type else IMPORT_ANNOTATED_BACKPORT
192
168
  )
193
169
  imports.append((import_annotated,))
194
170
  return chain_as_tuple(*imports)
195
171
 
196
172
  @property
197
- def docstring(self) -> Optional[str]:
173
+ def docstring(self) -> str | None:
198
174
  if self.use_field_description:
199
- description = self.extras.get('description', None)
175
+ description = self.extras.get("description", None)
200
176
  if description is not None:
201
- return f'{description}'
177
+ return f"{description}"
202
178
  return None
203
179
 
204
180
  @property
205
- def unresolved_types(self) -> FrozenSet[str]:
181
+ def unresolved_types(self) -> frozenset[str]:
206
182
  return self.data_type.unresolved_types
207
183
 
208
184
  @property
209
- def field(self) -> Optional[str]:
185
+ def field(self) -> str | None:
210
186
  """for backwards compatibility"""
211
187
  return None
212
188
 
213
189
  @property
214
- def method(self) -> Optional[str]:
190
+ def method(self) -> str | None:
215
191
  return None
216
192
 
217
193
  @property
@@ -219,12 +195,12 @@ class DataModelFieldBase(_BaseModel):
219
195
  return repr(self.default)
220
196
 
221
197
  @property
222
- def annotated(self) -> Optional[str]:
198
+ def annotated(self) -> str | None:
223
199
  return None
224
200
 
225
201
  @property
226
202
  def has_default_factory(self) -> bool:
227
- return 'default_factory' in self.extras
203
+ return "default_factory" in self.extras
228
204
 
229
205
  @property
230
206
  def fall_back_to_nullable(self) -> bool:
@@ -234,22 +210,22 @@ class DataModelFieldBase(_BaseModel):
234
210
  @lru_cache
235
211
  def get_template(template_file_path: Path) -> Template:
236
212
  loader = FileSystemLoader(str(TEMPLATE_DIR / template_file_path.parent))
237
- environment: Environment = Environment(loader=loader)
213
+ environment: Environment = Environment(loader=loader) # noqa: S701
238
214
  return environment.get_template(template_file_path.name)
239
215
 
240
216
 
241
- def get_module_path(name: str, file_path: Optional[Path]) -> List[str]:
217
+ def get_module_path(name: str, file_path: Path | None) -> list[str]:
242
218
  if file_path:
243
219
  return [
244
220
  *file_path.parts[:-1],
245
221
  file_path.stem,
246
- *name.split('.')[:-1],
222
+ *name.split(".")[:-1],
247
223
  ]
248
- return name.split('.')[:-1]
224
+ return name.split(".")[:-1]
249
225
 
250
226
 
251
- def get_module_name(name: str, file_path: Optional[Path]) -> str:
252
- return '.'.join(get_module_path(name, file_path))
227
+ def get_module_name(name: str, file_path: Path | None) -> str:
228
+ return ".".join(get_module_path(name, file_path))
253
229
 
254
230
 
255
231
  class TemplateBase(ABC):
@@ -280,43 +256,42 @@ UNDEFINED: Any = object()
280
256
 
281
257
 
282
258
  class DataModel(TemplateBase, Nullable, ABC):
283
- TEMPLATE_FILE_PATH: ClassVar[str] = ''
284
- BASE_CLASS: ClassVar[str] = ''
285
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ()
259
+ TEMPLATE_FILE_PATH: ClassVar[str] = ""
260
+ BASE_CLASS: ClassVar[str] = ""
261
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = () # noqa: UP006
286
262
 
287
- def __init__(
263
+ def __init__( # noqa: PLR0913
288
264
  self,
289
265
  *,
290
266
  reference: Reference,
291
- fields: List[DataModelFieldBase],
292
- decorators: Optional[List[str]] = None,
293
- base_classes: Optional[List[Reference]] = None,
294
- custom_base_class: Optional[str] = None,
295
- custom_template_dir: Optional[Path] = None,
296
- extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
297
- methods: Optional[List[str]] = None,
298
- path: Optional[Path] = None,
299
- description: Optional[str] = None,
267
+ fields: list[DataModelFieldBase],
268
+ decorators: list[str] | None = None,
269
+ base_classes: list[Reference] | None = None,
270
+ custom_base_class: str | None = None,
271
+ custom_template_dir: Path | None = None,
272
+ extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
273
+ methods: list[str] | None = None,
274
+ path: Path | None = None,
275
+ description: str | None = None,
300
276
  default: Any = UNDEFINED,
301
277
  nullable: bool = False,
302
278
  keyword_only: bool = False,
303
279
  ) -> None:
304
280
  self.keyword_only = keyword_only
305
281
  if not self.TEMPLATE_FILE_PATH:
306
- raise Exception('TEMPLATE_FILE_PATH is undefined')
282
+ msg = "TEMPLATE_FILE_PATH is undefined"
283
+ raise Exception(msg) # noqa: TRY002
307
284
 
308
- self._custom_template_dir: Optional[Path] = custom_template_dir
309
- self.decorators: List[str] = decorators or []
310
- self._additional_imports: List[Import] = []
285
+ self._custom_template_dir: Path | None = custom_template_dir
286
+ self.decorators: list[str] = decorators or []
287
+ self._additional_imports: list[Import] = []
311
288
  self.custom_base_class = custom_base_class
312
289
  if base_classes:
313
- self.base_classes: List[BaseClassDataType] = [
314
- BaseClassDataType(reference=b) for b in base_classes
315
- ]
290
+ self.base_classes: list[BaseClassDataType] = [BaseClassDataType(reference=b) for b in base_classes]
316
291
  else:
317
292
  self.set_base_class()
318
293
 
319
- self.file_path: Optional[Path] = path
294
+ self.file_path: Path | None = path
320
295
  self.reference: Reference = reference
321
296
 
322
297
  self.reference.source = self
@@ -324,9 +299,7 @@ class DataModel(TemplateBase, Nullable, ABC):
324
299
  self.extra_template_data = (
325
300
  # The supplied defaultdict will either create a new entry,
326
301
  # or already contain a predefined entry for this type
327
- extra_template_data[self.name]
328
- if extra_template_data is not None
329
- else defaultdict(dict)
302
+ extra_template_data[self.name] if extra_template_data is not None else defaultdict(dict)
330
303
  )
331
304
 
332
305
  self.fields = self._validate_fields(fields) if fields else []
@@ -342,7 +315,7 @@ class DataModel(TemplateBase, Nullable, ABC):
342
315
  # end up inadvertently sharing state (such as "base_class_kwargs")
343
316
  self.extra_template_data.update(deepcopy(all_model_extra_template_data))
344
317
 
345
- self.methods: List[str] = methods or []
318
+ self.methods: list[str] = methods or []
346
319
 
347
320
  self.description = description
348
321
  for field in self.fields:
@@ -352,18 +325,15 @@ class DataModel(TemplateBase, Nullable, ABC):
352
325
  self.default: Any = default
353
326
  self._nullable: bool = nullable
354
327
 
355
- def _validate_fields(
356
- self, fields: List[DataModelFieldBase]
357
- ) -> List[DataModelFieldBase]:
358
- names: Set[str] = set()
359
- unique_fields: List[DataModelFieldBase] = []
328
+ def _validate_fields(self, fields: list[DataModelFieldBase]) -> list[DataModelFieldBase]:
329
+ names: set[str] = set()
330
+ unique_fields: list[DataModelFieldBase] = []
360
331
  for field in fields:
361
332
  if field.name:
362
333
  if field.name in names:
363
- warn(f'Field name `{field.name}` is duplicated on {self.name}')
334
+ warn(f"Field name `{field.name}` is duplicated on {self.name}", stacklevel=2)
364
335
  continue
365
- else:
366
- names.add(field.name)
336
+ names.add(field.name)
367
337
  unique_fields.append(field)
368
338
  return unique_fields
369
339
 
@@ -371,7 +341,7 @@ class DataModel(TemplateBase, Nullable, ABC):
371
341
  base_class = self.custom_base_class or self.BASE_CLASS
372
342
  if not base_class:
373
343
  self.base_classes = []
374
- return None
344
+ return
375
345
  base_class_import = Import.from_full_path(base_class)
376
346
  self._additional_imports.append(base_class_import)
377
347
  self.base_classes = [BaseClassDataType.from_import(base_class_import)]
@@ -386,14 +356,14 @@ class DataModel(TemplateBase, Nullable, ABC):
386
356
  return template_file_path
387
357
 
388
358
  @property
389
- def imports(self) -> Tuple[Import, ...]:
359
+ def imports(self) -> tuple[Import, ...]:
390
360
  return chain_as_tuple(
391
361
  (i for f in self.fields for i in f.imports),
392
362
  self._additional_imports,
393
363
  )
394
364
 
395
365
  @property
396
- def reference_classes(self) -> FrozenSet[str]:
366
+ def reference_classes(self) -> frozenset[str]:
397
367
  return frozenset(
398
368
  {r.reference.path for r in self.base_classes if r.reference}
399
369
  | {t for f in self.fields for t in f.unresolved_types}
@@ -405,16 +375,16 @@ class DataModel(TemplateBase, Nullable, ABC):
405
375
 
406
376
  @property
407
377
  def duplicate_name(self) -> str:
408
- return self.reference.duplicate_name or ''
378
+ return self.reference.duplicate_name or ""
409
379
 
410
380
  @property
411
381
  def base_class(self) -> str:
412
- return ', '.join(b.type_hint for b in self.base_classes)
382
+ return ", ".join(b.type_hint for b in self.base_classes)
413
383
 
414
384
  @staticmethod
415
385
  def _get_class_name(name: str) -> str:
416
- if '.' in name:
417
- return name.rsplit('.', 1)[-1]
386
+ if "." in name:
387
+ return name.rsplit(".", 1)[-1]
418
388
  return name
419
389
 
420
390
  @property
@@ -423,10 +393,8 @@ class DataModel(TemplateBase, Nullable, ABC):
423
393
 
424
394
  @class_name.setter
425
395
  def class_name(self, class_name: str) -> None:
426
- if '.' in self.reference.name:
427
- self.reference.name = (
428
- f'{self.reference.name.rsplit(".", 1)[0]}.{class_name}'
429
- )
396
+ if "." in self.reference.name:
397
+ self.reference.name = f"{self.reference.name.rsplit('.', 1)[0]}.{class_name}"
430
398
  else:
431
399
  self.reference.name = class_name
432
400
 
@@ -435,7 +403,7 @@ class DataModel(TemplateBase, Nullable, ABC):
435
403
  return self._get_class_name(self.duplicate_name)
436
404
 
437
405
  @property
438
- def module_path(self) -> List[str]:
406
+ def module_path(self) -> list[str]:
439
407
  return get_module_path(self.name, self.file_path)
440
408
 
441
409
  @property
@@ -456,8 +424,8 @@ class DataModel(TemplateBase, Nullable, ABC):
456
424
  def path(self) -> str:
457
425
  return self.reference.path
458
426
 
459
- def render(self, *, class_name: Optional[str] = None) -> str:
460
- response = self._render(
427
+ def render(self, *, class_name: str | None = None) -> str:
428
+ return self._render(
461
429
  class_name=class_name or self.class_name,
462
430
  fields=self.fields,
463
431
  decorators=self.decorators,
@@ -467,4 +435,3 @@ class DataModel(TemplateBase, Nullable, ABC):
467
435
  keyword_only=self.keyword_only,
468
436
  **self.extra_template_data,
469
437
  )
470
- return response
@@ -1,10 +1,9 @@
1
- from pathlib import Path
1
+ from __future__ import annotations
2
+
2
3
  from typing import (
4
+ TYPE_CHECKING,
3
5
  Any,
4
6
  ClassVar,
5
- DefaultDict,
6
- Dict,
7
- List,
8
7
  Optional,
9
8
  Sequence,
10
9
  Set,
@@ -22,37 +21,42 @@ from datamodel_code_generator.imports import (
22
21
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
23
22
  from datamodel_code_generator.model.base import UNDEFINED
24
23
  from datamodel_code_generator.model.imports import IMPORT_DATACLASS, IMPORT_FIELD
25
- from datamodel_code_generator.model.pydantic.base_model import Constraints
26
24
  from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
27
25
  from datamodel_code_generator.model.types import type_map_factory
28
- from datamodel_code_generator.reference import Reference
29
26
  from datamodel_code_generator.types import DataType, StrictTypes, Types, chain_as_tuple
30
27
 
28
+ if TYPE_CHECKING:
29
+ from collections import defaultdict
30
+ from pathlib import Path
31
+
32
+ from datamodel_code_generator.reference import Reference
33
+
34
+ from datamodel_code_generator.model.pydantic.base_model import Constraints # noqa: TC001
35
+
31
36
 
32
37
  def _has_field_assignment(field: DataModelFieldBase) -> bool:
33
38
  return bool(field.field) or not (
34
- field.required
35
- or (field.represented_default == 'None' and field.strip_default_none)
39
+ field.required or (field.represented_default == "None" and field.strip_default_none)
36
40
  )
37
41
 
38
42
 
39
43
  class DataClass(DataModel):
40
- TEMPLATE_FILE_PATH: ClassVar[str] = 'dataclass.jinja2'
41
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_DATACLASS,)
44
+ TEMPLATE_FILE_PATH: ClassVar[str] = "dataclass.jinja2"
45
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_DATACLASS,) # noqa: UP006
42
46
 
43
- def __init__(
47
+ def __init__( # noqa: PLR0913
44
48
  self,
45
49
  *,
46
50
  reference: Reference,
47
- fields: List[DataModelFieldBase],
48
- decorators: Optional[List[str]] = None,
49
- base_classes: Optional[List[Reference]] = None,
50
- custom_base_class: Optional[str] = None,
51
- custom_template_dir: Optional[Path] = None,
52
- extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
53
- methods: Optional[List[str]] = None,
54
- path: Optional[Path] = None,
55
- description: Optional[str] = None,
51
+ fields: list[DataModelFieldBase],
52
+ decorators: list[str] | None = None,
53
+ base_classes: list[Reference] | None = None,
54
+ custom_base_class: str | None = None,
55
+ custom_template_dir: Path | None = None,
56
+ extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
57
+ methods: list[str] | None = None,
58
+ path: Path | None = None,
59
+ description: str | None = None,
56
60
  default: Any = UNDEFINED,
57
61
  nullable: bool = False,
58
62
  keyword_only: bool = False,
@@ -75,21 +79,21 @@ class DataClass(DataModel):
75
79
 
76
80
 
77
81
  class DataModelField(DataModelFieldBase):
78
- _FIELD_KEYS: ClassVar[Set[str]] = {
79
- 'default_factory',
80
- 'init',
81
- 'repr',
82
- 'hash',
83
- 'compare',
84
- 'metadata',
85
- 'kw_only',
82
+ _FIELD_KEYS: ClassVar[Set[str]] = { # noqa: UP006
83
+ "default_factory",
84
+ "init",
85
+ "repr",
86
+ "hash",
87
+ "compare",
88
+ "metadata",
89
+ "kw_only",
86
90
  }
87
- constraints: Optional[Constraints] = None
91
+ constraints: Optional[Constraints] = None # noqa: UP045
88
92
 
89
93
  @property
90
- def imports(self) -> Tuple[Import, ...]:
94
+ def imports(self) -> tuple[Import, ...]:
91
95
  field = self.field
92
- if field and field.startswith('field('):
96
+ if field and field.startswith("field("):
93
97
  return chain_as_tuple(super().imports, (IMPORT_FIELD,))
94
98
  return super().imports
95
99
 
@@ -99,60 +103,55 @@ class DataModelField(DataModelFieldBase):
99
103
  }
100
104
 
101
105
  @property
102
- def field(self) -> Optional[str]:
106
+ def field(self) -> str | None:
103
107
  """for backwards compatibility"""
104
108
  result = str(self)
105
- if result == '':
109
+ if not result:
106
110
  return None
107
-
108
111
  return result
109
112
 
110
113
  def __str__(self) -> str:
111
- data: Dict[str, Any] = {
112
- k: v for k, v in self.extras.items() if k in self._FIELD_KEYS
113
- }
114
+ data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._FIELD_KEYS}
114
115
 
115
116
  if self.default != UNDEFINED and self.default is not None:
116
- data['default'] = self.default
117
+ data["default"] = self.default
117
118
 
118
119
  if self.required:
119
120
  data = {
120
121
  k: v
121
122
  for k, v in data.items()
122
123
  if k
123
- not in (
124
- 'default',
125
- 'default_factory',
126
- )
124
+ not in {
125
+ "default",
126
+ "default_factory",
127
+ }
127
128
  }
128
129
 
129
130
  if not data:
130
- return ''
131
+ return ""
131
132
 
132
- if len(data) == 1 and 'default' in data:
133
- default = data['default']
133
+ if len(data) == 1 and "default" in data:
134
+ default = data["default"]
134
135
 
135
136
  if isinstance(default, (list, dict)):
136
- return f'field(default_factory=lambda :{repr(default)})'
137
+ return f"field(default_factory=lambda :{default!r})"
137
138
  return repr(default)
138
- kwargs = [
139
- f'{k}={v if k == "default_factory" else repr(v)}' for k, v in data.items()
140
- ]
141
- return f'field({", ".join(kwargs)})'
139
+ kwargs = [f"{k}={v if k == 'default_factory' else repr(v)}" for k, v in data.items()]
140
+ return f"field({', '.join(kwargs)})"
142
141
 
143
142
 
144
143
  class DataTypeManager(_DataTypeManager):
145
- def __init__(
144
+ def __init__( # noqa: PLR0913, PLR0917
146
145
  self,
147
146
  python_version: PythonVersion = PythonVersion.PY_38,
148
- use_standard_collections: bool = False,
149
- use_generic_container_types: bool = False,
150
- strict_types: Optional[Sequence[StrictTypes]] = None,
151
- use_non_positive_negative_number_constrained_types: bool = False,
152
- use_union_operator: bool = False,
153
- use_pendulum: bool = False,
147
+ use_standard_collections: bool = False, # noqa: FBT001, FBT002
148
+ use_generic_container_types: bool = False, # noqa: FBT001, FBT002
149
+ strict_types: Sequence[StrictTypes] | None = None,
150
+ use_non_positive_negative_number_constrained_types: bool = False, # noqa: FBT001, FBT002
151
+ use_union_operator: bool = False, # noqa: FBT001, FBT002
152
+ use_pendulum: bool = False, # noqa: FBT001, FBT002
154
153
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
155
- ):
154
+ ) -> None:
156
155
  super().__init__(
157
156
  python_version,
158
157
  use_standard_collections,
@@ -175,7 +174,7 @@ class DataTypeManager(_DataTypeManager):
175
174
  else {}
176
175
  )
177
176
 
178
- self.type_map: Dict[Types, DataType] = {
177
+ self.type_map: dict[Types, DataType] = {
179
178
  **type_map_factory(self.data_type),
180
179
  **datetime_map,
181
180
  }