datamodel-code-generator 0.27.2__py3-none-any.whl → 0.27.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (43) hide show
  1. datamodel_code_generator/__init__.py +159 -190
  2. datamodel_code_generator/__main__.py +151 -173
  3. datamodel_code_generator/arguments.py +227 -230
  4. datamodel_code_generator/format.py +77 -99
  5. datamodel_code_generator/http.py +9 -10
  6. datamodel_code_generator/imports.py +57 -64
  7. datamodel_code_generator/model/__init__.py +26 -31
  8. datamodel_code_generator/model/base.py +94 -127
  9. datamodel_code_generator/model/dataclass.py +58 -59
  10. datamodel_code_generator/model/enum.py +34 -30
  11. datamodel_code_generator/model/imports.py +13 -11
  12. datamodel_code_generator/model/msgspec.py +112 -126
  13. datamodel_code_generator/model/pydantic/__init__.py +14 -27
  14. datamodel_code_generator/model/pydantic/base_model.py +120 -139
  15. datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
  16. datamodel_code_generator/model/pydantic/dataclass.py +6 -4
  17. datamodel_code_generator/model/pydantic/imports.py +35 -33
  18. datamodel_code_generator/model/pydantic/types.py +86 -117
  19. datamodel_code_generator/model/pydantic_v2/__init__.py +17 -17
  20. datamodel_code_generator/model/pydantic_v2/base_model.py +118 -119
  21. datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
  22. datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
  23. datamodel_code_generator/model/pydantic_v2/types.py +8 -7
  24. datamodel_code_generator/model/rootmodel.py +1 -1
  25. datamodel_code_generator/model/scalar.py +33 -32
  26. datamodel_code_generator/model/typed_dict.py +42 -41
  27. datamodel_code_generator/model/types.py +19 -17
  28. datamodel_code_generator/model/union.py +21 -17
  29. datamodel_code_generator/parser/__init__.py +12 -11
  30. datamodel_code_generator/parser/base.py +320 -492
  31. datamodel_code_generator/parser/graphql.py +80 -111
  32. datamodel_code_generator/parser/jsonschema.py +422 -580
  33. datamodel_code_generator/parser/openapi.py +175 -204
  34. datamodel_code_generator/pydantic_patch.py +8 -9
  35. datamodel_code_generator/reference.py +192 -274
  36. datamodel_code_generator/types.py +147 -182
  37. datamodel_code_generator/util.py +22 -26
  38. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/METADATA +7 -1
  39. datamodel_code_generator-0.27.3.dist-info/RECORD +59 -0
  40. datamodel_code_generator-0.27.2.dist-info/RECORD +0 -59
  41. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/WHEEL +0 -0
  42. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/entry_points.txt +0 -0
  43. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.27.3.dist-info}/licenses/LICENSE +0 -0
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import re
2
4
  from abc import ABC, abstractmethod
3
5
  from enum import Enum, auto
@@ -9,7 +11,6 @@ from typing import (
9
11
  Callable,
10
12
  ClassVar,
11
13
  Dict,
12
- FrozenSet,
13
14
  Iterable,
14
15
  Iterator,
15
16
  List,
@@ -17,8 +18,6 @@ from typing import (
17
18
  Pattern,
18
19
  Sequence,
19
20
  Set,
20
- Tuple,
21
- Type,
22
21
  TypeVar,
23
22
  Union,
24
23
  )
@@ -52,50 +51,53 @@ from datamodel_code_generator.util import (
52
51
  runtime_checkable,
53
52
  )
54
53
 
54
+ if TYPE_CHECKING:
55
+ import builtins
56
+
55
57
  if PYDANTIC_V2:
56
58
  from pydantic import GetCoreSchemaHandler
57
59
  from pydantic_core import core_schema
58
60
 
59
- T = TypeVar('T')
60
-
61
- OPTIONAL = 'Optional'
62
- OPTIONAL_PREFIX = f'{OPTIONAL}['
63
-
64
- UNION = 'Union'
65
- UNION_PREFIX = f'{UNION}['
66
- UNION_DELIMITER = ', '
67
- UNION_PATTERN: Pattern[str] = re.compile(r'\s*,\s*')
68
- UNION_OPERATOR_DELIMITER = ' | '
69
- UNION_OPERATOR_PATTERN: Pattern[str] = re.compile(r'\s*\|\s*')
70
- NONE = 'None'
71
- ANY = 'Any'
72
- LITERAL = 'Literal'
73
- SEQUENCE = 'Sequence'
74
- FROZEN_SET = 'FrozenSet'
75
- MAPPING = 'Mapping'
76
- DICT = 'Dict'
77
- SET = 'Set'
78
- LIST = 'List'
79
- STANDARD_DICT = 'dict'
80
- STANDARD_LIST = 'list'
81
- STANDARD_SET = 'set'
82
- STR = 'str'
83
-
84
- NOT_REQUIRED = 'NotRequired'
85
- NOT_REQUIRED_PREFIX = f'{NOT_REQUIRED}['
61
+ T = TypeVar("T")
62
+
63
+ OPTIONAL = "Optional"
64
+ OPTIONAL_PREFIX = f"{OPTIONAL}["
65
+
66
+ UNION = "Union"
67
+ UNION_PREFIX = f"{UNION}["
68
+ UNION_DELIMITER = ", "
69
+ UNION_PATTERN: Pattern[str] = re.compile(r"\s*,\s*")
70
+ UNION_OPERATOR_DELIMITER = " | "
71
+ UNION_OPERATOR_PATTERN: Pattern[str] = re.compile(r"\s*\|\s*")
72
+ NONE = "None"
73
+ ANY = "Any"
74
+ LITERAL = "Literal"
75
+ SEQUENCE = "Sequence"
76
+ FROZEN_SET = "FrozenSet"
77
+ MAPPING = "Mapping"
78
+ DICT = "Dict"
79
+ SET = "Set"
80
+ LIST = "List"
81
+ STANDARD_DICT = "dict"
82
+ STANDARD_LIST = "list"
83
+ STANDARD_SET = "set"
84
+ STR = "str"
85
+
86
+ NOT_REQUIRED = "NotRequired"
87
+ NOT_REQUIRED_PREFIX = f"{NOT_REQUIRED}["
86
88
 
87
89
 
88
90
  class StrictTypes(Enum):
89
- str = 'str'
90
- bytes = 'bytes'
91
- int = 'int'
92
- float = 'float'
93
- bool = 'bool'
91
+ str = "str"
92
+ bytes = "bytes"
93
+ int = "int"
94
+ float = "float"
95
+ bool = "bool"
94
96
 
95
97
 
96
98
  class UnionIntFloat:
97
- def __init__(self, value: Union[int, float]) -> None:
98
- self.value: Union[int, float] = value
99
+ def __init__(self, value: float) -> None:
100
+ self.value: int | float = value
99
101
 
100
102
  def __int__(self) -> int:
101
103
  return int(self.value)
@@ -107,41 +109,41 @@ class UnionIntFloat:
107
109
  return str(self.value)
108
110
 
109
111
  @classmethod
110
- def __get_validators__(cls) -> Iterator[Callable[[Any], Any]]:
112
+ def __get_validators__(cls) -> Iterator[Callable[[Any], Any]]: # noqa: PLW3201
111
113
  yield cls.validate
112
114
 
113
115
  @classmethod
114
- def __get_pydantic_core_schema__(
115
- cls, _source_type: Any, _handler: 'GetCoreSchemaHandler'
116
- ) -> 'core_schema.CoreSchema':
117
- from_int_schema = core_schema.chain_schema( # pyright: ignore [reportPossiblyUnboundVariable]
116
+ def __get_pydantic_core_schema__( # noqa: PLW3201
117
+ cls, _source_type: Any, _handler: GetCoreSchemaHandler
118
+ ) -> core_schema.CoreSchema:
119
+ from_int_schema = core_schema.chain_schema( # pyright: ignore[reportPossiblyUnboundVariable]
118
120
  [
119
- core_schema.union_schema( # pyright: ignore [reportPossiblyUnboundVariable]
120
- [core_schema.int_schema(), core_schema.float_schema()] # pyright: ignore [reportPossiblyUnboundVariable]
121
+ core_schema.union_schema( # pyright: ignore[reportPossiblyUnboundVariable]
122
+ [core_schema.int_schema(), core_schema.float_schema()] # pyright: ignore[reportPossiblyUnboundVariable]
121
123
  ),
122
- core_schema.no_info_plain_validator_function(cls.validate), # pyright: ignore [reportPossiblyUnboundVariable]
124
+ core_schema.no_info_plain_validator_function(cls.validate), # pyright: ignore[reportPossiblyUnboundVariable]
123
125
  ]
124
126
  )
125
127
 
126
- return core_schema.json_or_python_schema( # pyright: ignore [reportPossiblyUnboundVariable]
128
+ return core_schema.json_or_python_schema( # pyright: ignore[reportPossiblyUnboundVariable]
127
129
  json_schema=from_int_schema,
128
- python_schema=core_schema.union_schema( # pyright: ignore [reportPossiblyUnboundVariable]
130
+ python_schema=core_schema.union_schema( # pyright: ignore[reportPossiblyUnboundVariable]
129
131
  [
130
132
  # check if it's an instance first before doing any further work
131
- core_schema.is_instance_schema(UnionIntFloat), # pyright: ignore [reportPossiblyUnboundVariable]
133
+ core_schema.is_instance_schema(UnionIntFloat), # pyright: ignore[reportPossiblyUnboundVariable]
132
134
  from_int_schema,
133
135
  ]
134
136
  ),
135
- serialization=core_schema.plain_serializer_function_ser_schema( # pyright: ignore [reportPossiblyUnboundVariable]
137
+ serialization=core_schema.plain_serializer_function_ser_schema( # pyright: ignore[reportPossiblyUnboundVariable]
136
138
  lambda instance: instance.value
137
139
  ),
138
140
  )
139
141
 
140
142
  @classmethod
141
- def validate(cls, v: Any) -> 'UnionIntFloat':
143
+ def validate(cls, v: Any) -> UnionIntFloat:
142
144
  if isinstance(v, UnionIntFloat):
143
145
  return v
144
- elif not isinstance(v, (int, float)): # pragma: no cover
146
+ if not isinstance(v, (int, float)): # pragma: no cover
145
147
  try:
146
148
  int(v)
147
149
  return cls(v)
@@ -153,69 +155,63 @@ class UnionIntFloat:
153
155
  except (TypeError, ValueError):
154
156
  pass
155
157
 
156
- raise TypeError(f'{v} is not int or float')
158
+ msg = f"{v} is not int or float"
159
+ raise TypeError(msg)
157
160
  return cls(v)
158
161
 
159
162
 
160
- def chain_as_tuple(*iterables: Iterable[T]) -> Tuple[T, ...]:
163
+ def chain_as_tuple(*iterables: Iterable[T]) -> tuple[T, ...]:
161
164
  return tuple(chain(*iterables))
162
165
 
163
166
 
164
167
  @lru_cache
165
- def _remove_none_from_type(
166
- type_: str, split_pattern: Pattern[str], delimiter: str
167
- ) -> List[str]:
168
- types: List[str] = []
169
- split_type: str = ''
168
+ def _remove_none_from_type(type_: str, split_pattern: Pattern[str], delimiter: str) -> list[str]:
169
+ types: list[str] = []
170
+ split_type: str = ""
170
171
  inner_count: int = 0
171
172
  for part in re.split(split_pattern, type_):
172
173
  if part == NONE:
173
174
  continue
174
- inner_count += part.count('[') - part.count(']')
175
+ inner_count += part.count("[") - part.count("]")
175
176
  if split_type:
176
177
  split_type += delimiter
177
178
  if inner_count == 0:
178
179
  if split_type:
179
- types.append(f'{split_type}{part}')
180
+ types.append(f"{split_type}{part}")
180
181
  else:
181
182
  types.append(part)
182
- split_type = ''
183
+ split_type = ""
183
184
  continue
184
- else:
185
- split_type += part
185
+ split_type += part
186
186
  return types
187
187
 
188
188
 
189
- def _remove_none_from_union(type_: str, use_union_operator: bool) -> str:
189
+ def _remove_none_from_union(type_: str, use_union_operator: bool) -> str: # noqa: FBT001
190
190
  if use_union_operator:
191
- if not re.match(r'^\w+ | ', type_):
191
+ if not re.match(r"^\w+ | ", type_):
192
192
  return type_
193
193
  return UNION_OPERATOR_DELIMITER.join(
194
- _remove_none_from_type(
195
- type_, UNION_OPERATOR_PATTERN, UNION_OPERATOR_DELIMITER
196
- )
194
+ _remove_none_from_type(type_, UNION_OPERATOR_PATTERN, UNION_OPERATOR_DELIMITER)
197
195
  )
198
196
 
199
197
  if not type_.startswith(UNION_PREFIX):
200
198
  return type_
201
- inner_types = _remove_none_from_type(
202
- type_[len(UNION_PREFIX) :][:-1], UNION_PATTERN, UNION_DELIMITER
203
- )
199
+ inner_types = _remove_none_from_type(type_[len(UNION_PREFIX) :][:-1], UNION_PATTERN, UNION_DELIMITER)
204
200
 
205
201
  if len(inner_types) == 1:
206
202
  return inner_types[0]
207
- return f'{UNION_PREFIX}{UNION_DELIMITER.join(inner_types)}]'
203
+ return f"{UNION_PREFIX}{UNION_DELIMITER.join(inner_types)}]"
208
204
 
209
205
 
210
206
  @lru_cache
211
- def get_optional_type(type_: str, use_union_operator: bool) -> str:
207
+ def get_optional_type(type_: str, use_union_operator: bool) -> str: # noqa: FBT001
212
208
  type_ = _remove_none_from_union(type_, use_union_operator)
213
209
 
214
210
  if not type_ or type_ == NONE:
215
211
  return NONE
216
212
  if use_union_operator:
217
- return f'{type_} | {NONE}'
218
- return f'{OPTIONAL_PREFIX}{type_}]'
213
+ return f"{type_} | {NONE}"
214
+ return f"{OPTIONAL_PREFIX}{type_}]"
219
215
 
220
216
 
221
217
  @runtime_checkable
@@ -236,9 +232,9 @@ class DataType(_BaseModel):
236
232
  if PYDANTIC_V2:
237
233
  # TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
238
234
  # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
239
- model_config = ConfigDict( # pyright: ignore [reportAssignmentType]
240
- extra='forbid',
241
- revalidate_instances='never',
235
+ model_config = ConfigDict( # pyright: ignore[reportAssignmentType]
236
+ extra="forbid",
237
+ revalidate_instances="never",
242
238
  )
243
239
  else:
244
240
  if not TYPE_CHECKING:
@@ -248,41 +244,37 @@ class DataType(_BaseModel):
248
244
  cls.update_forward_refs()
249
245
 
250
246
  class Config:
251
- extra = 'forbid'
252
- copy_on_model_validation = (
253
- False
254
- if version.parse(pydantic.VERSION) < version.parse('1.9.2')
255
- else 'none'
256
- )
247
+ extra = "forbid"
248
+ copy_on_model_validation = False if version.parse(pydantic.VERSION) < version.parse("1.9.2") else "none"
257
249
 
258
- type: Optional[str] = None
259
- reference: Optional[Reference] = None
260
- data_types: List['DataType'] = []
250
+ type: Optional[str] = None # noqa: UP045
251
+ reference: Optional[Reference] = None # noqa: UP045
252
+ data_types: List[DataType] = [] # noqa: RUF012, UP006
261
253
  is_func: bool = False
262
- kwargs: Optional[Dict[str, Any]] = None
263
- import_: Optional[Import] = None
254
+ kwargs: Optional[Dict[str, Any]] = None # noqa: UP006, UP045
255
+ import_: Optional[Import] = None # noqa: UP045
264
256
  python_version: PythonVersion = PythonVersion.PY_38
265
257
  is_optional: bool = False
266
258
  is_dict: bool = False
267
259
  is_list: bool = False
268
260
  is_set: bool = False
269
261
  is_custom_type: bool = False
270
- literals: List[Union[StrictBool, StrictInt, StrictStr]] = []
262
+ literals: List[Union[StrictBool, StrictInt, StrictStr]] = [] # noqa: RUF012, UP006, UP007
271
263
  use_standard_collections: bool = False
272
264
  use_generic_container: bool = False
273
265
  use_union_operator: bool = False
274
- alias: Optional[str] = None
275
- parent: Optional[Any] = None
276
- children: List[Any] = []
266
+ alias: Optional[str] = None # noqa: UP045
267
+ parent: Optional[Any] = None # noqa: UP045
268
+ children: List[Any] = [] # noqa: RUF012, UP006
277
269
  strict: bool = False
278
- dict_key: Optional['DataType'] = None
270
+ dict_key: Optional[DataType] = None # noqa: UP045
279
271
 
280
- _exclude_fields: ClassVar[Set[str]] = {'parent', 'children'}
281
- _pass_fields: ClassVar[Set[str]] = {'parent', 'children', 'data_types', 'reference'}
272
+ _exclude_fields: ClassVar[Set[str]] = {"parent", "children"} # noqa: UP006
273
+ _pass_fields: ClassVar[Set[str]] = {"parent", "children", "data_types", "reference"} # noqa: UP006
282
274
 
283
275
  @classmethod
284
- def from_import(
285
- cls: Type['DataTypeT'],
276
+ def from_import( # noqa: PLR0913
277
+ cls: builtins.type[DataTypeT],
286
278
  import_: Import,
287
279
  *,
288
280
  is_optional: bool = False,
@@ -291,8 +283,8 @@ class DataType(_BaseModel):
291
283
  is_set: bool = False,
292
284
  is_custom_type: bool = False,
293
285
  strict: bool = False,
294
- kwargs: Optional[Dict[str, Any]] = None,
295
- ) -> 'DataTypeT':
286
+ kwargs: dict[str, Any] | None = None,
287
+ ) -> DataTypeT:
296
288
  return cls(
297
289
  type=import_.import_,
298
290
  import_=import_,
@@ -300,34 +292,25 @@ class DataType(_BaseModel):
300
292
  is_dict=is_dict,
301
293
  is_list=is_list,
302
294
  is_set=is_set,
303
- is_func=True if kwargs else False,
295
+ is_func=bool(kwargs),
304
296
  is_custom_type=is_custom_type,
305
297
  strict=strict,
306
298
  kwargs=kwargs,
307
299
  )
308
300
 
309
301
  @property
310
- def unresolved_types(self) -> FrozenSet[str]:
302
+ def unresolved_types(self) -> frozenset[str]:
311
303
  return frozenset(
312
- {
313
- t.reference.path
314
- for data_types in self.data_types
315
- for t in data_types.all_data_types
316
- if t.reference
317
- }
304
+ {t.reference.path for data_types in self.data_types for t in data_types.all_data_types if t.reference}
318
305
  | ({self.reference.path} if self.reference else set())
319
306
  )
320
307
 
321
- def replace_reference(self, reference: Optional[Reference]) -> None:
308
+ def replace_reference(self, reference: Reference | None) -> None:
322
309
  if not self.reference: # pragma: no cover
323
- raise Exception(
324
- f"`{self.__class__.__name__}.replace_reference()` can't be called"
325
- f' when `reference` field is empty.'
326
- )
310
+ msg = f"`{self.__class__.__name__}.replace_reference()` can't be called when `reference` field is empty."
311
+ raise Exception(msg) # noqa: TRY002
327
312
  self_id = id(self)
328
- self.reference.children = [
329
- c for c in self.reference.children if id(c) != self_id
330
- ]
313
+ self.reference.children = [c for c in self.reference.children if id(c) != self_id]
331
314
  self.reference = reference
332
315
  if reference:
333
316
  reference.children.append(self)
@@ -336,7 +319,7 @@ class DataType(_BaseModel):
336
319
  self.replace_reference(None)
337
320
 
338
321
  @property
339
- def module_name(self) -> Optional[str]:
322
+ def module_name(self) -> str | None:
340
323
  if self.reference and isinstance(self.reference.source, Modular):
341
324
  return self.reference.source.module_name
342
325
  return None # pragma: no cover
@@ -345,11 +328,11 @@ class DataType(_BaseModel):
345
328
  def full_name(self) -> str:
346
329
  module_name = self.module_name
347
330
  if module_name:
348
- return f'{module_name}.{self.reference.short_name}' # type: ignore
349
- return self.reference.short_name # type: ignore
331
+ return f"{module_name}.{self.reference.short_name if self.reference else ''}"
332
+ return self.reference.short_name if self.reference else ""
350
333
 
351
334
  @property
352
- def all_data_types(self) -> Iterator['DataType']:
335
+ def all_data_types(self) -> Iterator[DataType]:
353
336
  for data_type in self.data_types:
354
337
  yield from data_type.all_data_types
355
338
  yield self
@@ -367,14 +350,12 @@ class DataType(_BaseModel):
367
350
  yield self.import_
368
351
 
369
352
  # Define required imports based on type features and conditions
370
- imports: Tuple[Tuple[bool, Import], ...] = (
353
+ imports: tuple[tuple[bool, Import], ...] = (
371
354
  (self.is_optional and not self.use_union_operator, IMPORT_OPTIONAL),
372
355
  (len(self.data_types) > 1 and not self.use_union_operator, IMPORT_UNION),
373
356
  (
374
357
  bool(self.literals),
375
- IMPORT_LITERAL
376
- if self.python_version.has_literal_type
377
- else IMPORT_LITERAL_BACKPORT,
358
+ IMPORT_LITERAL if self.python_version.has_literal_type else IMPORT_LITERAL_BACKPORT,
378
359
  ),
379
360
  )
380
361
 
@@ -418,11 +399,7 @@ class DataType(_BaseModel):
418
399
  if type_.type == ANY and type_.is_optional:
419
400
  if any(t for t in self.data_types if t.type != ANY): # pragma: no cover
420
401
  self.is_optional = True
421
- self.data_types = [
422
- t
423
- for t in self.data_types
424
- if not (t.type == ANY and t.is_optional)
425
- ]
402
+ self.data_types = [t for t in self.data_types if not (t.type == ANY and t.is_optional)]
426
403
  break # pragma: no cover
427
404
 
428
405
  for data_type in self.data_types:
@@ -433,23 +410,21 @@ class DataType(_BaseModel):
433
410
  self.reference.children.append(self)
434
411
 
435
412
  @property
436
- def type_hint(self) -> str:
437
- type_: Optional[str] = self.alias or self.type
413
+ def type_hint(self) -> str: # noqa: PLR0912, PLR0915
414
+ type_: str | None = self.alias or self.type
438
415
  if not type_:
439
416
  if self.is_union:
440
- data_types: List[str] = []
417
+ data_types: list[str] = []
441
418
  for data_type in self.data_types:
442
419
  data_type_type = data_type.type_hint
443
420
  if data_type_type in data_types: # pragma: no cover
444
421
  continue
445
422
 
446
- if NONE == data_type_type:
423
+ if data_type_type == NONE:
447
424
  self.is_optional = True
448
425
  continue
449
426
 
450
- non_optional_data_type_type = _remove_none_from_union(
451
- data_type_type, self.use_union_operator
452
- )
427
+ non_optional_data_type_type = _remove_none_from_union(data_type_type, self.use_union_operator)
453
428
 
454
429
  if non_optional_data_type_type != data_type_type:
455
430
  self.is_optional = True
@@ -457,22 +432,19 @@ class DataType(_BaseModel):
457
432
  data_types.append(non_optional_data_type_type)
458
433
  if len(data_types) == 1:
459
434
  type_ = data_types[0]
435
+ elif self.use_union_operator:
436
+ type_ = UNION_OPERATOR_DELIMITER.join(data_types)
460
437
  else:
461
- if self.use_union_operator:
462
- type_ = UNION_OPERATOR_DELIMITER.join(data_types)
463
- else:
464
- type_ = f'{UNION_PREFIX}{UNION_DELIMITER.join(data_types)}]'
438
+ type_ = f"{UNION_PREFIX}{UNION_DELIMITER.join(data_types)}]"
465
439
  elif len(self.data_types) == 1:
466
440
  type_ = self.data_types[0].type_hint
467
441
  elif self.literals:
468
- type_ = f'{LITERAL}[{", ".join(repr(literal) for literal in self.literals)}]'
442
+ type_ = f"{LITERAL}[{', '.join(repr(literal) for literal in self.literals)}]"
443
+ elif self.reference:
444
+ type_ = self.reference.short_name
469
445
  else:
470
- if self.reference:
471
- type_ = self.reference.short_name
472
- else:
473
- # TODO support strict Any
474
- # type_ = 'Any'
475
- type_ = ''
446
+ # TODO support strict Any
447
+ type_ = ""
476
448
  if self.reference:
477
449
  source = self.reference.source
478
450
  if isinstance(source, Nullable) and source.nullable:
@@ -486,7 +458,7 @@ class DataType(_BaseModel):
486
458
  list_ = STANDARD_LIST
487
459
  else:
488
460
  list_ = LIST
489
- type_ = f'{list_}[{type_}]' if type_ else list_
461
+ type_ = f"{list_}[{type_}]" if type_ else list_
490
462
  elif self.is_set:
491
463
  if self.use_generic_container:
492
464
  set_ = FROZEN_SET
@@ -494,7 +466,7 @@ class DataType(_BaseModel):
494
466
  set_ = STANDARD_SET
495
467
  else:
496
468
  set_ = SET
497
- type_ = f'{set_}[{type_}]' if type_ else set_
469
+ type_ = f"{set_}[{type_}]" if type_ else set_
498
470
  elif self.is_dict:
499
471
  if self.use_generic_container:
500
472
  dict_ = MAPPING
@@ -504,16 +476,16 @@ class DataType(_BaseModel):
504
476
  dict_ = DICT
505
477
  if self.dict_key or type_:
506
478
  key = self.dict_key.type_hint if self.dict_key else STR
507
- type_ = f'{dict_}[{key}, {type_ or ANY}]'
479
+ type_ = f"{dict_}[{key}, {type_ or ANY}]"
508
480
  else: # pragma: no cover
509
481
  type_ = dict_
510
482
  if self.is_optional and type_ != ANY:
511
483
  return get_optional_type(type_, self.use_union_operator)
512
- elif self.is_func:
484
+ if self.is_func:
513
485
  if self.kwargs:
514
- kwargs: str = ', '.join(f'{k}={v}' for k, v in self.kwargs.items())
515
- return f'{type_}({kwargs})'
516
- return f'{type_}()'
486
+ kwargs: str = ", ".join(f"{k}={v}" for k, v in self.kwargs.items())
487
+ return f"{type_}({kwargs})"
488
+ return f"{type_}()"
517
489
  return type_
518
490
 
519
491
  @property
@@ -523,7 +495,7 @@ class DataType(_BaseModel):
523
495
 
524
496
  DataType.model_rebuild()
525
497
 
526
- DataTypeT = TypeVar('DataTypeT', bound=DataType)
498
+ DataTypeT = TypeVar("DataTypeT", bound=DataType)
527
499
 
528
500
 
529
501
  class EmptyDataType(DataType):
@@ -568,16 +540,16 @@ class Types(Enum):
568
540
 
569
541
 
570
542
  class DataTypeManager(ABC):
571
- def __init__(
543
+ def __init__( # noqa: PLR0913, PLR0917
572
544
  self,
573
545
  python_version: PythonVersion = PythonVersion.PY_38,
574
- use_standard_collections: bool = False,
575
- use_generic_container_types: bool = False,
576
- strict_types: Optional[Sequence[StrictTypes]] = None,
577
- use_non_positive_negative_number_constrained_types: bool = False,
578
- use_union_operator: bool = False,
579
- use_pendulum: bool = False,
580
- target_datetime_class: Optional[DatetimeClassType] = None,
546
+ use_standard_collections: bool = False, # noqa: FBT001, FBT002
547
+ use_generic_container_types: bool = False, # noqa: FBT001, FBT002
548
+ strict_types: Sequence[StrictTypes] | None = None,
549
+ use_non_positive_negative_number_constrained_types: bool = False, # noqa: FBT001, FBT002
550
+ use_union_operator: bool = False, # noqa: FBT001, FBT002
551
+ use_pendulum: bool = False, # noqa: FBT001, FBT002
552
+ target_datetime_class: DatetimeClassType | None = None,
581
553
  ) -> None:
582
554
  self.python_version = python_version
583
555
  self.use_standard_collections: bool = use_standard_collections
@@ -588,23 +560,20 @@ class DataTypeManager(ABC):
588
560
  )
589
561
  self.use_union_operator: bool = use_union_operator
590
562
  self.use_pendulum: bool = use_pendulum
591
- self.target_datetime_class: DatetimeClassType = (
592
- target_datetime_class or DatetimeClassType.Datetime
593
- )
563
+ self.target_datetime_class: DatetimeClassType = target_datetime_class or DatetimeClassType.Datetime
594
564
 
595
- if (
596
- use_generic_container_types and python_version == PythonVersion.PY_36
597
- ): # pragma: no cover
598
- raise Exception(
599
- 'use_generic_container_types can not be used with target_python_version 3.6.\n'
600
- ' The version will be not supported in a future version'
565
+ if use_generic_container_types and python_version == PythonVersion.PY_36: # pragma: no cover
566
+ msg = (
567
+ "use_generic_container_types can not be used with target_python_version 3.6.\n"
568
+ " The version will be not supported in a future version"
601
569
  )
570
+ raise Exception(msg) # noqa: TRY002
602
571
 
603
572
  if TYPE_CHECKING:
604
- self.data_type: Type[DataType]
573
+ self.data_type: type[DataType]
605
574
  else:
606
- self.data_type: Type[DataType] = create_model(
607
- 'ContextDataType',
575
+ self.data_type: type[DataType] = create_model(
576
+ "ContextDataType",
608
577
  python_version=(PythonVersion, python_version),
609
578
  use_standard_collections=(bool, use_standard_collections),
610
579
  use_generic_container=(bool, use_generic_container_types),
@@ -616,15 +585,11 @@ class DataTypeManager(ABC):
616
585
  def get_data_type(self, types: Types, **kwargs: Any) -> DataType:
617
586
  raise NotImplementedError
618
587
 
619
- def get_data_type_from_full_path(
620
- self, full_path: str, is_custom_type: bool
621
- ) -> DataType:
622
- return self.data_type.from_import(
623
- Import.from_full_path(full_path), is_custom_type=is_custom_type
624
- )
588
+ def get_data_type_from_full_path(self, full_path: str, is_custom_type: bool) -> DataType: # noqa: FBT001
589
+ return self.data_type.from_import(Import.from_full_path(full_path), is_custom_type=is_custom_type)
625
590
 
626
591
  def get_data_type_from_value(self, value: Any) -> DataType:
627
- type_: Optional[Types] = None
592
+ type_: Types | None = None
628
593
  if isinstance(value, str):
629
594
  type_ = Types.string
630
595
  elif isinstance(value, bool):