datamodel-code-generator 0.27.1__py3-none-any.whl → 0.27.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (43) hide show
  1. datamodel_code_generator/__init__.py +159 -190
  2. datamodel_code_generator/__main__.py +151 -173
  3. datamodel_code_generator/arguments.py +227 -230
  4. datamodel_code_generator/format.py +77 -99
  5. datamodel_code_generator/http.py +9 -10
  6. datamodel_code_generator/imports.py +57 -64
  7. datamodel_code_generator/model/__init__.py +26 -31
  8. datamodel_code_generator/model/base.py +94 -127
  9. datamodel_code_generator/model/dataclass.py +58 -59
  10. datamodel_code_generator/model/enum.py +34 -30
  11. datamodel_code_generator/model/imports.py +13 -11
  12. datamodel_code_generator/model/msgspec.py +112 -126
  13. datamodel_code_generator/model/pydantic/__init__.py +14 -27
  14. datamodel_code_generator/model/pydantic/base_model.py +120 -139
  15. datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
  16. datamodel_code_generator/model/pydantic/dataclass.py +6 -4
  17. datamodel_code_generator/model/pydantic/imports.py +35 -33
  18. datamodel_code_generator/model/pydantic/types.py +86 -117
  19. datamodel_code_generator/model/pydantic_v2/__init__.py +17 -17
  20. datamodel_code_generator/model/pydantic_v2/base_model.py +118 -119
  21. datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
  22. datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
  23. datamodel_code_generator/model/pydantic_v2/types.py +8 -7
  24. datamodel_code_generator/model/rootmodel.py +1 -1
  25. datamodel_code_generator/model/scalar.py +33 -32
  26. datamodel_code_generator/model/typed_dict.py +42 -41
  27. datamodel_code_generator/model/types.py +19 -17
  28. datamodel_code_generator/model/union.py +21 -17
  29. datamodel_code_generator/parser/__init__.py +12 -11
  30. datamodel_code_generator/parser/base.py +320 -492
  31. datamodel_code_generator/parser/graphql.py +80 -111
  32. datamodel_code_generator/parser/jsonschema.py +422 -580
  33. datamodel_code_generator/parser/openapi.py +175 -204
  34. datamodel_code_generator/pydantic_patch.py +8 -9
  35. datamodel_code_generator/reference.py +192 -274
  36. datamodel_code_generator/types.py +147 -182
  37. datamodel_code_generator/util.py +22 -26
  38. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/METADATA +12 -11
  39. datamodel_code_generator-0.27.3.dist-info/RECORD +59 -0
  40. datamodel_code_generator-0.27.1.dist-info/RECORD +0 -59
  41. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/WHEEL +0 -0
  42. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/entry_points.txt +0 -0
  43. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/licenses/LICENSE +0 -0
@@ -1,20 +1,24 @@
1
1
  from __future__ import annotations
2
2
 
3
- from pathlib import Path
4
- from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Tuple
3
+ from typing import TYPE_CHECKING, Any, ClassVar, Optional, Tuple
5
4
 
6
5
  from datamodel_code_generator.imports import IMPORT_ANY, IMPORT_ENUM, Import
7
6
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
8
7
  from datamodel_code_generator.model.base import UNDEFINED, BaseClassDataType
9
- from datamodel_code_generator.reference import Reference
10
8
  from datamodel_code_generator.types import DataType, Types
11
9
 
12
- _INT: str = 'int'
13
- _FLOAT: str = 'float'
14
- _BYTES: str = 'bytes'
15
- _STR: str = 'str'
10
+ if TYPE_CHECKING:
11
+ from collections import defaultdict
12
+ from pathlib import Path
16
13
 
17
- SUBCLASS_BASE_CLASSES: Dict[Types, str] = {
14
+ from datamodel_code_generator.reference import Reference
15
+
16
+ _INT: str = "int"
17
+ _FLOAT: str = "float"
18
+ _BYTES: str = "bytes"
19
+ _STR: str = "str"
20
+
21
+ SUBCLASS_BASE_CLASSES: dict[Types, str] = {
18
22
  Types.int32: _INT,
19
23
  Types.int64: _INT,
20
24
  Types.integer: _INT,
@@ -27,28 +31,28 @@ SUBCLASS_BASE_CLASSES: Dict[Types, str] = {
27
31
 
28
32
 
29
33
  class Enum(DataModel):
30
- TEMPLATE_FILE_PATH: ClassVar[str] = 'Enum.jinja2'
31
- BASE_CLASS: ClassVar[str] = 'enum.Enum'
32
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_ENUM,)
34
+ TEMPLATE_FILE_PATH: ClassVar[str] = "Enum.jinja2"
35
+ BASE_CLASS: ClassVar[str] = "enum.Enum"
36
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_ENUM,) # noqa: UP006
33
37
 
34
- def __init__(
38
+ def __init__( # noqa: PLR0913
35
39
  self,
36
40
  *,
37
41
  reference: Reference,
38
- fields: List[DataModelFieldBase],
39
- decorators: Optional[List[str]] = None,
40
- base_classes: Optional[List[Reference]] = None,
41
- custom_base_class: Optional[str] = None,
42
- custom_template_dir: Optional[Path] = None,
43
- extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
44
- methods: Optional[List[str]] = None,
45
- path: Optional[Path] = None,
46
- description: Optional[str] = None,
47
- type_: Optional[Types] = None,
42
+ fields: list[DataModelFieldBase],
43
+ decorators: list[str] | None = None,
44
+ base_classes: list[Reference] | None = None,
45
+ custom_base_class: str | None = None,
46
+ custom_template_dir: Path | None = None,
47
+ extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
48
+ methods: list[str] | None = None,
49
+ path: Path | None = None,
50
+ description: str | None = None,
51
+ type_: Types | None = None,
48
52
  default: Any = UNDEFINED,
49
53
  nullable: bool = False,
50
54
  keyword_only: bool = False,
51
- ):
55
+ ) -> None:
52
56
  super().__init__(
53
57
  reference=reference,
54
58
  fields=fields,
@@ -68,7 +72,7 @@ class Enum(DataModel):
68
72
  if not base_classes and type_:
69
73
  base_class = SUBCLASS_BASE_CLASSES.get(type_)
70
74
  if base_class:
71
- self.base_classes: List[BaseClassDataType] = [
75
+ self.base_classes: list[BaseClassDataType] = [
72
76
  BaseClassDataType(type=base_class),
73
77
  *self.base_classes,
74
78
  ]
@@ -80,14 +84,14 @@ class Enum(DataModel):
80
84
  def get_member(self, field: DataModelFieldBase) -> Member:
81
85
  return Member(self, field)
82
86
 
83
- def find_member(self, value: Any) -> Optional[Member]:
87
+ def find_member(self, value: Any) -> Member | None:
84
88
  repr_value = repr(value)
85
89
  # Remove surrounding quotes from the string representation
86
- str_value = str(value).strip('\'"')
90
+ str_value = str(value).strip("'\"")
87
91
 
88
92
  for field in self.fields:
89
93
  # Remove surrounding quotes from field default value
90
- field_default = (field.default or '').strip('\'"')
94
+ field_default = (field.default or "").strip("'\"")
91
95
 
92
96
  # Compare values after removing quotes
93
97
  if field_default == str_value:
@@ -100,7 +104,7 @@ class Enum(DataModel):
100
104
  return None
101
105
 
102
106
  @property
103
- def imports(self) -> Tuple[Import, ...]:
107
+ def imports(self) -> tuple[Import, ...]:
104
108
  return tuple(i for i in super().imports if i != IMPORT_ANY)
105
109
 
106
110
 
@@ -108,7 +112,7 @@ class Member:
108
112
  def __init__(self, enum: Enum, field: DataModelFieldBase) -> None:
109
113
  self.enum: Enum = enum
110
114
  self.field: DataModelFieldBase = field
111
- self.alias: Optional[str] = None
115
+ self.alias: Optional[str] = None # noqa: UP045
112
116
 
113
117
  def __repr__(self) -> str:
114
- return f'{self.alias or self.enum.name}.{self.field.name}'
118
+ return f"{self.alias or self.enum.name}.{self.field.name}"
@@ -1,13 +1,15 @@
1
+ from __future__ import annotations
2
+
1
3
  from datamodel_code_generator.imports import Import
2
4
 
3
- IMPORT_DATACLASS = Import.from_full_path('dataclasses.dataclass')
4
- IMPORT_FIELD = Import.from_full_path('dataclasses.field')
5
- IMPORT_CLASSVAR = Import.from_full_path('typing.ClassVar')
6
- IMPORT_TYPED_DICT = Import.from_full_path('typing.TypedDict')
7
- IMPORT_TYPED_DICT_BACKPORT = Import.from_full_path('typing_extensions.TypedDict')
8
- IMPORT_NOT_REQUIRED = Import.from_full_path('typing.NotRequired')
9
- IMPORT_NOT_REQUIRED_BACKPORT = Import.from_full_path('typing_extensions.NotRequired')
10
- IMPORT_MSGSPEC_STRUCT = Import.from_full_path('msgspec.Struct')
11
- IMPORT_MSGSPEC_FIELD = Import.from_full_path('msgspec.field')
12
- IMPORT_MSGSPEC_META = Import.from_full_path('msgspec.Meta')
13
- IMPORT_MSGSPEC_CONVERT = Import.from_full_path('msgspec.convert')
5
+ IMPORT_DATACLASS = Import.from_full_path("dataclasses.dataclass")
6
+ IMPORT_FIELD = Import.from_full_path("dataclasses.field")
7
+ IMPORT_CLASSVAR = Import.from_full_path("typing.ClassVar")
8
+ IMPORT_TYPED_DICT = Import.from_full_path("typing.TypedDict")
9
+ IMPORT_TYPED_DICT_BACKPORT = Import.from_full_path("typing_extensions.TypedDict")
10
+ IMPORT_NOT_REQUIRED = Import.from_full_path("typing.NotRequired")
11
+ IMPORT_NOT_REQUIRED_BACKPORT = Import.from_full_path("typing_extensions.NotRequired")
12
+ IMPORT_MSGSPEC_STRUCT = Import.from_full_path("msgspec.Struct")
13
+ IMPORT_MSGSPEC_FIELD = Import.from_full_path("msgspec.field")
14
+ IMPORT_MSGSPEC_META = Import.from_full_path("msgspec.Meta")
15
+ IMPORT_MSGSPEC_CONVERT = Import.from_full_path("msgspec.convert")
@@ -1,16 +1,14 @@
1
+ from __future__ import annotations
2
+
1
3
  from functools import wraps
2
- from pathlib import Path
3
4
  from typing import (
5
+ TYPE_CHECKING,
4
6
  Any,
5
7
  ClassVar,
6
- DefaultDict,
7
- Dict,
8
- List,
9
8
  Optional,
10
9
  Sequence,
11
10
  Set,
12
11
  Tuple,
13
- Type,
14
12
  TypeVar,
15
13
  )
16
14
 
@@ -38,7 +36,6 @@ from datamodel_code_generator.model.pydantic.base_model import (
38
36
  from datamodel_code_generator.model.rootmodel import RootModel as _RootModel
39
37
  from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
40
38
  from datamodel_code_generator.model.types import type_map_factory
41
- from datamodel_code_generator.reference import Reference
42
39
  from datamodel_code_generator.types import (
43
40
  DataType,
44
41
  StrictTypes,
@@ -47,36 +44,39 @@ from datamodel_code_generator.types import (
47
44
  get_optional_type,
48
45
  )
49
46
 
47
+ if TYPE_CHECKING:
48
+ from collections import defaultdict
49
+ from pathlib import Path
50
+
51
+ from datamodel_code_generator.reference import Reference
52
+
50
53
 
51
54
  def _has_field_assignment(field: DataModelFieldBase) -> bool:
52
- return not (
53
- field.required
54
- or (field.represented_default == 'None' and field.strip_default_none)
55
- )
55
+ return not (field.required or (field.represented_default == "None" and field.strip_default_none))
56
56
 
57
57
 
58
- DataModelFieldBaseT = TypeVar('DataModelFieldBaseT', bound=DataModelFieldBase)
58
+ DataModelFieldBaseT = TypeVar("DataModelFieldBaseT", bound=DataModelFieldBase)
59
59
 
60
60
 
61
- def import_extender(cls: Type[DataModelFieldBaseT]) -> Type[DataModelFieldBaseT]:
62
- original_imports: property = getattr(cls, 'imports', None) # type: ignore
61
+ def import_extender(cls: type[DataModelFieldBaseT]) -> type[DataModelFieldBaseT]:
62
+ original_imports: property = cls.imports
63
63
 
64
- @wraps(original_imports.fget) # type: ignore
65
- def new_imports(self: DataModelFieldBaseT) -> Tuple[Import, ...]:
64
+ @wraps(original_imports.fget) # pyright: ignore[reportArgumentType]
65
+ def new_imports(self: DataModelFieldBaseT) -> tuple[Import, ...]:
66
66
  extra_imports = []
67
67
  field = self.field
68
68
  # TODO: Improve field detection
69
- if field and field.startswith('field('):
69
+ if field and field.startswith("field("):
70
70
  extra_imports.append(IMPORT_MSGSPEC_FIELD)
71
- if self.field and 'lambda: convert' in self.field:
71
+ if self.field and "lambda: convert" in self.field:
72
72
  extra_imports.append(IMPORT_MSGSPEC_CONVERT)
73
73
  if self.annotated:
74
74
  extra_imports.append(IMPORT_MSGSPEC_META)
75
- if self.extras.get('is_classvar'):
75
+ if self.extras.get("is_classvar"):
76
76
  extra_imports.append(IMPORT_CLASSVAR)
77
- return chain_as_tuple(original_imports.fget(self), extra_imports) # type: ignore
77
+ return chain_as_tuple(original_imports.fget(self), extra_imports) # pyright: ignore[reportOptionalCall]
78
78
 
79
- setattr(cls, 'imports', property(new_imports))
79
+ cls.imports = property(new_imports) # pyright: ignore[reportAttributeAccessIssue]
80
80
  return cls
81
81
 
82
82
 
@@ -85,23 +85,23 @@ class RootModel(_RootModel):
85
85
 
86
86
 
87
87
  class Struct(DataModel):
88
- TEMPLATE_FILE_PATH: ClassVar[str] = 'msgspec.jinja2'
89
- BASE_CLASS: ClassVar[str] = 'msgspec.Struct'
90
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ()
88
+ TEMPLATE_FILE_PATH: ClassVar[str] = "msgspec.jinja2"
89
+ BASE_CLASS: ClassVar[str] = "msgspec.Struct"
90
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = () # noqa: UP006
91
91
 
92
- def __init__(
92
+ def __init__( # noqa: PLR0913
93
93
  self,
94
94
  *,
95
95
  reference: Reference,
96
- fields: List[DataModelFieldBase],
97
- decorators: Optional[List[str]] = None,
98
- base_classes: Optional[List[Reference]] = None,
99
- custom_base_class: Optional[str] = None,
100
- custom_template_dir: Optional[Path] = None,
101
- extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
102
- methods: Optional[List[str]] = None,
103
- path: Optional[Path] = None,
104
- description: Optional[str] = None,
96
+ fields: list[DataModelFieldBase],
97
+ decorators: list[str] | None = None,
98
+ base_classes: list[Reference] | None = None,
99
+ custom_base_class: str | None = None,
100
+ custom_template_dir: Path | None = None,
101
+ extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
102
+ methods: list[str] | None = None,
103
+ path: Path | None = None,
104
+ description: str | None = None,
105
105
  default: Any = UNDEFINED,
106
106
  nullable: bool = False,
107
107
  keyword_only: bool = False,
@@ -121,45 +121,45 @@ class Struct(DataModel):
121
121
  nullable=nullable,
122
122
  keyword_only=keyword_only,
123
123
  )
124
- self.extra_template_data.setdefault('base_class_kwargs', {})
124
+ self.extra_template_data.setdefault("base_class_kwargs", {})
125
125
  if self.keyword_only:
126
- self.add_base_class_kwarg('kw_only', 'True')
126
+ self.add_base_class_kwarg("kw_only", "True")
127
127
 
128
- def add_base_class_kwarg(self, name: str, value):
129
- self.extra_template_data['base_class_kwargs'][name] = value
128
+ def add_base_class_kwarg(self, name: str, value: str) -> None:
129
+ self.extra_template_data["base_class_kwargs"][name] = value
130
130
 
131
131
 
132
132
  class Constraints(_Constraints):
133
133
  # To override existing pattern alias
134
- regex: Optional[str] = Field(None, alias='regex')
135
- pattern: Optional[str] = Field(None, alias='pattern')
134
+ regex: Optional[str] = Field(None, alias="regex") # noqa: UP045
135
+ pattern: Optional[str] = Field(None, alias="pattern") # noqa: UP045
136
136
 
137
137
 
138
138
  @import_extender
139
139
  class DataModelField(DataModelFieldBase):
140
- _FIELD_KEYS: ClassVar[Set[str]] = {
141
- 'default',
142
- 'default_factory',
140
+ _FIELD_KEYS: ClassVar[Set[str]] = { # noqa: UP006
141
+ "default",
142
+ "default_factory",
143
143
  }
144
- _META_FIELD_KEYS: ClassVar[Set[str]] = {
145
- 'title',
146
- 'description',
147
- 'gt',
148
- 'ge',
149
- 'lt',
150
- 'le',
151
- 'multiple_of',
144
+ _META_FIELD_KEYS: ClassVar[Set[str]] = { # noqa: UP006
145
+ "title",
146
+ "description",
147
+ "gt",
148
+ "ge",
149
+ "lt",
150
+ "le",
151
+ "multiple_of",
152
152
  # 'min_items', # not supported by msgspec
153
153
  # 'max_items', # not supported by msgspec
154
- 'min_length',
155
- 'max_length',
156
- 'pattern',
157
- 'examples',
154
+ "min_length",
155
+ "max_length",
156
+ "pattern",
157
+ "examples",
158
158
  # 'unique_items', # not supported by msgspec
159
159
  }
160
- _PARSE_METHOD = 'convert'
161
- _COMPARE_EXPRESSIONS: ClassVar[Set[str]] = {'gt', 'ge', 'lt', 'le', 'multiple_of'}
162
- constraints: Optional[Constraints] = None
160
+ _PARSE_METHOD = "convert"
161
+ _COMPARE_EXPRESSIONS: ClassVar[Set[str]] = {"gt", "ge", "lt", "le", "multiple_of"} # noqa: UP006
162
+ constraints: Optional[Constraints] = None # noqa: UP045
163
163
 
164
164
  def self_reference(self) -> bool: # pragma: no cover
165
165
  return isinstance(self.parent, Struct) and self.parent.reference.path in {
@@ -167,87 +167,72 @@ class DataModelField(DataModelFieldBase):
167
167
  }
168
168
 
169
169
  def process_const(self) -> None:
170
- if 'const' not in self.extras:
171
- return None
170
+ if "const" not in self.extras:
171
+ return
172
172
  self.const = True
173
173
  self.nullable = False
174
- const = self.extras['const']
175
- if self.data_type.type == 'str' and isinstance(
176
- const, str
177
- ): # pragma: no cover # Literal supports only str
174
+ const = self.extras["const"]
175
+ if self.data_type.type == "str" and isinstance(const, str): # pragma: no cover # Literal supports only str
178
176
  self.data_type = self.data_type.__class__(literals=[const])
179
177
 
180
178
  def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
181
179
  if value is None or constraint not in self._COMPARE_EXPRESSIONS:
182
180
  return value
183
181
 
184
- if any(
185
- data_type.type == 'float' for data_type in self.data_type.all_data_types
186
- ):
182
+ if any(data_type.type == "float" for data_type in self.data_type.all_data_types):
187
183
  return float(value)
188
184
  return int(value)
189
185
 
190
186
  @property
191
- def field(self) -> Optional[str]:
187
+ def field(self) -> str | None:
192
188
  """for backwards compatibility"""
193
189
  result = str(self)
194
- if result == '':
190
+ if not result:
195
191
  return None
196
-
197
192
  return result
198
193
 
199
194
  def __str__(self) -> str:
200
- data: Dict[str, Any] = {
201
- k: v for k, v in self.extras.items() if k in self._FIELD_KEYS
202
- }
195
+ data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._FIELD_KEYS}
203
196
  if self.alias:
204
- data['name'] = self.alias
197
+ data["name"] = self.alias
205
198
 
206
199
  if self.default != UNDEFINED and self.default is not None:
207
- data['default'] = self.default
200
+ data["default"] = self.default
208
201
  elif not self.required:
209
- data['default'] = None
202
+ data["default"] = None
210
203
 
211
204
  if self.required:
212
205
  data = {
213
206
  k: v
214
207
  for k, v in data.items()
215
208
  if k
216
- not in (
217
- 'default',
218
- 'default_factory',
219
- )
209
+ not in {
210
+ "default",
211
+ "default_factory",
212
+ }
220
213
  }
221
- elif self.default and 'default_factory' not in data:
214
+ elif self.default and "default_factory" not in data:
222
215
  default_factory = self._get_default_as_struct_model()
223
216
  if default_factory is not None:
224
- data.pop('default')
225
- data['default_factory'] = default_factory
217
+ data.pop("default")
218
+ data["default_factory"] = default_factory
226
219
 
227
220
  if not data:
228
- return ''
221
+ return ""
229
222
 
230
- if len(data) == 1 and 'default' in data:
231
- return repr(data['default'])
223
+ if len(data) == 1 and "default" in data:
224
+ return repr(data["default"])
232
225
 
233
- kwargs = [
234
- f'{k}={v if k == "default_factory" else repr(v)}' for k, v in data.items()
235
- ]
236
- return f'field({", ".join(kwargs)})'
226
+ kwargs = [f"{k}={v if k == 'default_factory' else repr(v)}" for k, v in data.items()]
227
+ return f"field({', '.join(kwargs)})"
237
228
 
238
229
  @property
239
- def annotated(self) -> Optional[str]:
230
+ def annotated(self) -> str | None:
240
231
  if not self.use_annotated: # pragma: no cover
241
232
  return None
242
233
 
243
- data: Dict[str, Any] = {
244
- k: v for k, v in self.extras.items() if k in self._META_FIELD_KEYS
245
- }
246
- if (
247
- self.constraints is not None
248
- and not self.self_reference()
249
- and not self.data_type.strict
250
- ):
234
+ data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._META_FIELD_KEYS}
235
+ if self.constraints is not None and not self.self_reference() and not self.data_type.strict:
251
236
  data = {
252
237
  **data,
253
238
  **{
@@ -257,59 +242,60 @@ class DataModelField(DataModelFieldBase):
257
242
  },
258
243
  }
259
244
 
260
- meta_arguments = sorted(
261
- f'{k}={repr(v)}' for k, v in data.items() if v is not None
262
- )
245
+ meta_arguments = sorted(f"{k}={v!r}" for k, v in data.items() if v is not None)
263
246
  if not meta_arguments:
264
247
  return None
265
248
 
266
- meta = f'Meta({", ".join(meta_arguments)})'
249
+ meta = f"Meta({', '.join(meta_arguments)})"
267
250
 
268
- if not self.required and not self.extras.get('is_classvar'):
251
+ if not self.required and not self.extras.get("is_classvar"):
269
252
  type_hint = self.data_type.type_hint
270
- annotated_type = f'Annotated[{type_hint}, {meta}]'
253
+ annotated_type = f"Annotated[{type_hint}, {meta}]"
271
254
  return get_optional_type(annotated_type, self.data_type.use_union_operator)
272
255
 
273
- annotated_type = f'Annotated[{self.type_hint}, {meta}]'
274
- if self.extras.get('is_classvar'):
275
- annotated_type = f'ClassVar[{annotated_type}]'
256
+ annotated_type = f"Annotated[{self.type_hint}, {meta}]"
257
+ if self.extras.get("is_classvar"):
258
+ annotated_type = f"ClassVar[{annotated_type}]"
276
259
 
277
260
  return annotated_type
278
261
 
279
- def _get_default_as_struct_model(self) -> Optional[str]:
262
+ def _get_default_as_struct_model(self) -> str | None:
280
263
  for data_type in self.data_type.data_types or (self.data_type,):
281
264
  # TODO: Check nested data_types
282
265
  if data_type.is_dict or self.data_type.is_union:
283
266
  # TODO: Parse Union and dict model for default
284
267
  continue # pragma: no cover
285
- elif data_type.is_list and len(data_type.data_types) == 1:
286
- data_type = data_type.data_types[0]
268
+ if data_type.is_list and len(data_type.data_types) == 1:
269
+ data_type_child = data_type.data_types[0]
287
270
  if ( # pragma: no cover
288
- data_type.reference
289
- and (
290
- isinstance(data_type.reference.source, Struct)
291
- or isinstance(data_type.reference.source, RootModel)
292
- )
271
+ data_type_child.reference
272
+ and (isinstance(data_type_child.reference.source, (Struct, RootModel)))
293
273
  and isinstance(self.default, list)
294
274
  ):
295
- return f'lambda: {self._PARSE_METHOD}({repr(self.default)}, type=list[{data_type.alias or data_type.reference.source.class_name}])'
275
+ return (
276
+ f"lambda: {self._PARSE_METHOD}({self.default!r}, "
277
+ f"type=list[{data_type_child.alias or data_type_child.reference.source.class_name}])"
278
+ )
296
279
  elif data_type.reference and isinstance(data_type.reference.source, Struct):
297
- return f'lambda: {self._PARSE_METHOD}({repr(self.default)}, type={data_type.alias or data_type.reference.source.class_name})'
280
+ return (
281
+ f"lambda: {self._PARSE_METHOD}({self.default!r}, "
282
+ f"type={data_type.alias or data_type.reference.source.class_name})"
283
+ )
298
284
  return None
299
285
 
300
286
 
301
287
  class DataTypeManager(_DataTypeManager):
302
- def __init__(
288
+ def __init__( # noqa: PLR0913, PLR0917
303
289
  self,
304
290
  python_version: PythonVersion = PythonVersion.PY_38,
305
- use_standard_collections: bool = False,
306
- use_generic_container_types: bool = False,
307
- strict_types: Optional[Sequence[StrictTypes]] = None,
308
- use_non_positive_negative_number_constrained_types: bool = False,
309
- use_union_operator: bool = False,
310
- use_pendulum: bool = False,
291
+ use_standard_collections: bool = False, # noqa: FBT001, FBT002
292
+ use_generic_container_types: bool = False, # noqa: FBT001, FBT002
293
+ strict_types: Sequence[StrictTypes] | None = None,
294
+ use_non_positive_negative_number_constrained_types: bool = False, # noqa: FBT001, FBT002
295
+ use_union_operator: bool = False, # noqa: FBT001, FBT002
296
+ use_pendulum: bool = False, # noqa: FBT001, FBT002
311
297
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
312
- ):
298
+ ) -> None:
313
299
  super().__init__(
314
300
  python_version,
315
301
  use_standard_collections,
@@ -332,7 +318,7 @@ class DataTypeManager(_DataTypeManager):
332
318
  else {}
333
319
  )
334
320
 
335
- self.type_map: Dict[Types, DataType] = {
321
+ self.type_map: dict[Types, DataType] = {
336
322
  **type_map_factory(self.data_type),
337
323
  **datetime_map,
338
324
  }
@@ -11,37 +11,24 @@ from .types import DataTypeManager
11
11
 
12
12
 
13
13
  def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
14
- return '\n'.join(
15
- f'{class_name}.update_forward_refs()' for class_name in class_names
16
- )
14
+ return "\n".join(f"{class_name}.update_forward_refs()" for class_name in class_names)
17
15
 
18
16
 
19
17
  class Config(_BaseModel):
20
- extra: Optional[str] = None
21
- title: Optional[str] = None
22
- allow_population_by_field_name: Optional[bool] = None
23
- allow_extra_fields: Optional[bool] = None
24
- allow_mutation: Optional[bool] = None
25
- arbitrary_types_allowed: Optional[bool] = None
26
- orm_mode: Optional[bool] = None
27
-
28
-
29
- # def get_validator_template() -> Template:
30
- # template_file_path: Path = Path('pydantic') / 'one_of_validator.jinja2'
31
- # loader = FileSystemLoader(str(TEMPLATE_DIR / template_file_path.parent))
32
- # environment: Environment = Environment(loader=loader, autoescape=True)
33
- # return environment.get_template(template_file_path.name)
34
- #
35
- #
36
- # VALIDATOR_TEMPLATE: Template = get_validator_template()
18
+ extra: Optional[str] = None # noqa: UP045
19
+ title: Optional[str] = None # noqa: UP045
20
+ allow_population_by_field_name: Optional[bool] = None # noqa: UP045
21
+ allow_extra_fields: Optional[bool] = None # noqa: UP045
22
+ allow_mutation: Optional[bool] = None # noqa: UP045
23
+ arbitrary_types_allowed: Optional[bool] = None # noqa: UP045
24
+ orm_mode: Optional[bool] = None # noqa: UP045
37
25
 
38
26
 
39
27
  __all__ = [
40
- 'BaseModel',
41
- 'DataModelField',
42
- 'CustomRootType',
43
- 'DataClass',
44
- 'dump_resolve_reference_action',
45
- 'DataTypeManager',
46
- # 'VALIDATOR_TEMPLATE',
28
+ "BaseModel",
29
+ "CustomRootType",
30
+ "DataClass",
31
+ "DataModelField",
32
+ "DataTypeManager",
33
+ "dump_resolve_reference_action",
47
34
  ]