datamodel-code-generator 0.27.1__py3-none-any.whl → 0.27.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (43) hide show
  1. datamodel_code_generator/__init__.py +159 -190
  2. datamodel_code_generator/__main__.py +151 -173
  3. datamodel_code_generator/arguments.py +227 -230
  4. datamodel_code_generator/format.py +77 -99
  5. datamodel_code_generator/http.py +9 -10
  6. datamodel_code_generator/imports.py +57 -64
  7. datamodel_code_generator/model/__init__.py +26 -31
  8. datamodel_code_generator/model/base.py +94 -127
  9. datamodel_code_generator/model/dataclass.py +58 -59
  10. datamodel_code_generator/model/enum.py +34 -30
  11. datamodel_code_generator/model/imports.py +13 -11
  12. datamodel_code_generator/model/msgspec.py +112 -126
  13. datamodel_code_generator/model/pydantic/__init__.py +14 -27
  14. datamodel_code_generator/model/pydantic/base_model.py +120 -139
  15. datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
  16. datamodel_code_generator/model/pydantic/dataclass.py +6 -4
  17. datamodel_code_generator/model/pydantic/imports.py +35 -33
  18. datamodel_code_generator/model/pydantic/types.py +86 -117
  19. datamodel_code_generator/model/pydantic_v2/__init__.py +17 -17
  20. datamodel_code_generator/model/pydantic_v2/base_model.py +118 -119
  21. datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
  22. datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
  23. datamodel_code_generator/model/pydantic_v2/types.py +8 -7
  24. datamodel_code_generator/model/rootmodel.py +1 -1
  25. datamodel_code_generator/model/scalar.py +33 -32
  26. datamodel_code_generator/model/typed_dict.py +42 -41
  27. datamodel_code_generator/model/types.py +19 -17
  28. datamodel_code_generator/model/union.py +21 -17
  29. datamodel_code_generator/parser/__init__.py +12 -11
  30. datamodel_code_generator/parser/base.py +320 -492
  31. datamodel_code_generator/parser/graphql.py +80 -111
  32. datamodel_code_generator/parser/jsonschema.py +422 -580
  33. datamodel_code_generator/parser/openapi.py +175 -204
  34. datamodel_code_generator/pydantic_patch.py +8 -9
  35. datamodel_code_generator/reference.py +192 -274
  36. datamodel_code_generator/types.py +147 -182
  37. datamodel_code_generator/util.py +22 -26
  38. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/METADATA +12 -11
  39. datamodel_code_generator-0.27.3.dist-info/RECORD +59 -0
  40. datamodel_code_generator-0.27.1.dist-info/RECORD +0 -59
  41. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/WHEEL +0 -0
  42. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/entry_points.txt +0 -0
  43. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/licenses/LICENSE +0 -0
@@ -1,11 +1,11 @@
1
+ from __future__ import annotations
2
+
1
3
  import re
2
4
  from enum import Enum
3
- from pathlib import Path
4
5
  from typing import (
6
+ TYPE_CHECKING,
5
7
  Any,
6
8
  ClassVar,
7
- DefaultDict,
8
- Dict,
9
9
  List,
10
10
  NamedTuple,
11
11
  Optional,
@@ -26,130 +26,133 @@ from datamodel_code_generator.model.pydantic.base_model import (
26
26
  DataModelField as DataModelFieldV1,
27
27
  )
28
28
  from datamodel_code_generator.model.pydantic_v2.imports import IMPORT_CONFIG_DICT
29
- from datamodel_code_generator.reference import Reference
30
29
  from datamodel_code_generator.util import field_validator, model_validator
31
30
 
31
+ if TYPE_CHECKING:
32
+ from collections import defaultdict
33
+ from pathlib import Path
34
+
35
+ from datamodel_code_generator.reference import Reference
36
+
32
37
 
33
38
  class UnionMode(Enum):
34
- smart = 'smart'
35
- left_to_right = 'left_to_right'
39
+ smart = "smart"
40
+ left_to_right = "left_to_right"
36
41
 
37
42
 
38
43
  class Constraints(_Constraints):
39
44
  # To override existing pattern alias
40
- regex: Optional[str] = Field(None, alias='regex')
41
- pattern: Optional[str] = Field(None, alias='pattern')
45
+ regex: Optional[str] = Field(None, alias="regex") # noqa: UP045
46
+ pattern: Optional[str] = Field(None, alias="pattern") # noqa: UP045
42
47
 
43
- @model_validator(mode='before')
44
- def validate_min_max_items(cls, values: Any) -> Dict[str, Any]:
48
+ @model_validator(mode="before")
49
+ def validate_min_max_items(cls, values: Any) -> dict[str, Any]: # noqa: N805
45
50
  if not isinstance(values, dict): # pragma: no cover
46
51
  return values
47
- min_items = values.pop('minItems', None)
52
+ min_items = values.pop("minItems", None)
48
53
  if min_items is not None:
49
- values['minLength'] = min_items
50
- max_items = values.pop('maxItems', None)
54
+ values["minLength"] = min_items
55
+ max_items = values.pop("maxItems", None)
51
56
  if max_items is not None:
52
- values['maxLength'] = max_items
57
+ values["maxLength"] = max_items
53
58
  return values
54
59
 
55
60
 
56
61
  class DataModelField(DataModelFieldV1):
57
- _EXCLUDE_FIELD_KEYS: ClassVar[Set[str]] = {
58
- 'alias',
59
- 'default',
60
- 'gt',
61
- 'ge',
62
- 'lt',
63
- 'le',
64
- 'multiple_of',
65
- 'min_length',
66
- 'max_length',
67
- 'pattern',
62
+ _EXCLUDE_FIELD_KEYS: ClassVar[Set[str]] = { # noqa: UP006
63
+ "alias",
64
+ "default",
65
+ "gt",
66
+ "ge",
67
+ "lt",
68
+ "le",
69
+ "multiple_of",
70
+ "min_length",
71
+ "max_length",
72
+ "pattern",
68
73
  }
69
- _DEFAULT_FIELD_KEYS: ClassVar[Set[str]] = {
70
- 'default',
71
- 'default_factory',
72
- 'alias',
73
- 'alias_priority',
74
- 'validation_alias',
75
- 'serialization_alias',
76
- 'title',
77
- 'description',
78
- 'examples',
79
- 'exclude',
80
- 'discriminator',
81
- 'json_schema_extra',
82
- 'frozen',
83
- 'validate_default',
84
- 'repr',
85
- 'init_var',
86
- 'kw_only',
87
- 'pattern',
88
- 'strict',
89
- 'gt',
90
- 'ge',
91
- 'lt',
92
- 'le',
93
- 'multiple_of',
94
- 'allow_inf_nan',
95
- 'max_digits',
96
- 'decimal_places',
97
- 'min_length',
98
- 'max_length',
99
- 'union_mode',
74
+ _DEFAULT_FIELD_KEYS: ClassVar[Set[str]] = { # noqa: UP006
75
+ "default",
76
+ "default_factory",
77
+ "alias",
78
+ "alias_priority",
79
+ "validation_alias",
80
+ "serialization_alias",
81
+ "title",
82
+ "description",
83
+ "examples",
84
+ "exclude",
85
+ "discriminator",
86
+ "json_schema_extra",
87
+ "frozen",
88
+ "validate_default",
89
+ "repr",
90
+ "init_var",
91
+ "kw_only",
92
+ "pattern",
93
+ "strict",
94
+ "gt",
95
+ "ge",
96
+ "lt",
97
+ "le",
98
+ "multiple_of",
99
+ "allow_inf_nan",
100
+ "max_digits",
101
+ "decimal_places",
102
+ "min_length",
103
+ "max_length",
104
+ "union_mode",
100
105
  }
101
- constraints: Optional[Constraints] = None # pyright: ignore [reportIncompatibleVariableOverride]
102
- _PARSE_METHOD: ClassVar[str] = 'model_validate'
106
+ constraints: Optional[Constraints] = None # pyright: ignore[reportIncompatibleVariableOverride] # noqa: UP045
107
+ _PARSE_METHOD: ClassVar[str] = "model_validate"
103
108
  can_have_extra_keys: ClassVar[bool] = False
104
109
 
105
- @field_validator('extras')
106
- def validate_extras(cls, values: Any) -> Dict[str, Any]:
110
+ @field_validator("extras")
111
+ def validate_extras(cls, values: Any) -> dict[str, Any]: # noqa: N805
107
112
  if not isinstance(values, dict): # pragma: no cover
108
113
  return values
109
- if 'examples' in values:
114
+ if "examples" in values:
110
115
  return values
111
116
 
112
- if 'example' in values:
113
- values['examples'] = [values.pop('example')]
117
+ if "example" in values:
118
+ values["examples"] = [values.pop("example")]
114
119
  return values
115
120
 
116
121
  def process_const(self) -> None:
117
- if 'const' not in self.extras:
118
- return None
122
+ if "const" not in self.extras:
123
+ return
119
124
  self.const = True
120
125
  self.nullable = False
121
- const = self.extras['const']
126
+ const = self.extras["const"]
122
127
  self.data_type = self.data_type.__class__(literals=[const])
123
128
  if not self.default:
124
129
  self.default = const
125
130
 
126
- def _process_data_in_str(self, data: Dict[str, Any]) -> None:
131
+ def _process_data_in_str(self, data: dict[str, Any]) -> None:
127
132
  if self.const:
128
133
  # const is removed in pydantic 2.0
129
- data.pop('const')
134
+ data.pop("const")
130
135
 
131
136
  # unique_items is not supported in pydantic 2.0
132
- data.pop('unique_items', None)
137
+ data.pop("unique_items", None)
133
138
 
134
- if 'union_mode' in data:
139
+ if "union_mode" in data:
135
140
  if self.data_type.is_union:
136
- data['union_mode'] = data.pop('union_mode').value
141
+ data["union_mode"] = data.pop("union_mode").value
137
142
  else:
138
- data.pop('union_mode')
143
+ data.pop("union_mode")
139
144
 
140
145
  # **extra is not supported in pydantic 2.0
141
- json_schema_extra = {
142
- k: v for k, v in data.items() if k not in self._DEFAULT_FIELD_KEYS
143
- }
146
+ json_schema_extra = {k: v for k, v in data.items() if k not in self._DEFAULT_FIELD_KEYS}
144
147
  if json_schema_extra:
145
- data['json_schema_extra'] = json_schema_extra
146
- for key in json_schema_extra.keys():
148
+ data["json_schema_extra"] = json_schema_extra
149
+ for key in json_schema_extra:
147
150
  data.pop(key)
148
151
 
149
- def _process_annotated_field_arguments(
152
+ def _process_annotated_field_arguments( # noqa: PLR6301
150
153
  self,
151
- field_arguments: List[str],
152
- ) -> List[str]:
154
+ field_arguments: list[str],
155
+ ) -> list[str]:
153
156
  return field_arguments
154
157
 
155
158
 
@@ -160,27 +163,27 @@ class ConfigAttribute(NamedTuple):
160
163
 
161
164
 
162
165
  class BaseModel(BaseModelBase):
163
- TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/BaseModel.jinja2'
164
- BASE_CLASS: ClassVar[str] = 'pydantic.BaseModel'
165
- CONFIG_ATTRIBUTES: ClassVar[List[ConfigAttribute]] = [
166
- ConfigAttribute('allow_population_by_field_name', 'populate_by_name', False),
167
- ConfigAttribute('populate_by_name', 'populate_by_name', False),
168
- ConfigAttribute('allow_mutation', 'frozen', True),
169
- ConfigAttribute('frozen', 'frozen', False),
166
+ TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/BaseModel.jinja2"
167
+ BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
168
+ CONFIG_ATTRIBUTES: ClassVar[List[ConfigAttribute]] = [ # noqa: UP006
169
+ ConfigAttribute("allow_population_by_field_name", "populate_by_name", False), # noqa: FBT003
170
+ ConfigAttribute("populate_by_name", "populate_by_name", False), # noqa: FBT003
171
+ ConfigAttribute("allow_mutation", "frozen", True), # noqa: FBT003
172
+ ConfigAttribute("frozen", "frozen", False), # noqa: FBT003
170
173
  ]
171
174
 
172
- def __init__(
175
+ def __init__( # noqa: PLR0913
173
176
  self,
174
177
  *,
175
178
  reference: Reference,
176
- fields: List[DataModelFieldBase],
177
- decorators: Optional[List[str]] = None,
178
- base_classes: Optional[List[Reference]] = None,
179
- custom_base_class: Optional[str] = None,
180
- custom_template_dir: Optional[Path] = None,
181
- extra_template_data: Optional[DefaultDict[str, Any]] = None,
182
- path: Optional[Path] = None,
183
- description: Optional[str] = None,
179
+ fields: list[DataModelFieldBase],
180
+ decorators: list[str] | None = None,
181
+ base_classes: list[Reference] | None = None,
182
+ custom_base_class: str | None = None,
183
+ custom_template_dir: Path | None = None,
184
+ extra_template_data: defaultdict[str, Any] | None = None,
185
+ path: Path | None = None,
186
+ description: str | None = None,
184
187
  default: Any = UNDEFINED,
185
188
  nullable: bool = False,
186
189
  keyword_only: bool = False,
@@ -199,49 +202,45 @@ class BaseModel(BaseModelBase):
199
202
  nullable=nullable,
200
203
  keyword_only=keyword_only,
201
204
  )
202
- config_parameters: Dict[str, Any] = {}
205
+ config_parameters: dict[str, Any] = {}
203
206
 
204
207
  extra = self._get_config_extra()
205
208
  if extra:
206
- config_parameters['extra'] = extra
209
+ config_parameters["extra"] = extra
207
210
 
208
211
  for from_, to, invert in self.CONFIG_ATTRIBUTES:
209
212
  if from_ in self.extra_template_data:
210
213
  config_parameters[to] = (
211
- not self.extra_template_data[from_]
212
- if invert
213
- else self.extra_template_data[from_]
214
+ not self.extra_template_data[from_] if invert else self.extra_template_data[from_]
214
215
  )
215
216
  for data_type in self.all_data_types:
216
217
  if data_type.is_custom_type: # pragma: no cover
217
- config_parameters['arbitrary_types_allowed'] = True
218
+ config_parameters["arbitrary_types_allowed"] = True
218
219
  break
219
220
 
220
221
  for field in self.fields:
221
222
  # Check if a regex pattern uses lookarounds.
222
223
  # Depending on the generation configuration, the pattern may end up in two different places.
223
- pattern = (
224
- isinstance(field.constraints, Constraints) and field.constraints.pattern
225
- ) or (field.data_type.kwargs or {}).get('pattern')
226
- if pattern and re.search(r'\(\?<?[=!]', pattern):
227
- config_parameters['regex_engine'] = '"python-re"'
224
+ pattern = (isinstance(field.constraints, Constraints) and field.constraints.pattern) or (
225
+ field.data_type.kwargs or {}
226
+ ).get("pattern")
227
+ if pattern and re.search(r"\(\?<?[=!]", pattern):
228
+ config_parameters["regex_engine"] = '"python-re"'
228
229
  break
229
230
 
230
- if isinstance(self.extra_template_data.get('config'), dict):
231
- for key, value in self.extra_template_data['config'].items():
232
- config_parameters[key] = value
231
+ if isinstance(self.extra_template_data.get("config"), dict):
232
+ for key, value in self.extra_template_data["config"].items():
233
+ config_parameters[key] = value # noqa: PERF403
233
234
 
234
235
  if config_parameters:
235
- from datamodel_code_generator.model.pydantic_v2 import ConfigDict
236
+ from datamodel_code_generator.model.pydantic_v2 import ConfigDict # noqa: PLC0415
236
237
 
237
- self.extra_template_data['config'] = ConfigDict.parse_obj(config_parameters) # pyright: ignore [reportArgumentType]
238
+ self.extra_template_data["config"] = ConfigDict.parse_obj(config_parameters) # pyright: ignore[reportArgumentType]
238
239
  self._additional_imports.append(IMPORT_CONFIG_DICT)
239
240
 
240
- def _get_config_extra(self) -> Optional[Literal["'allow'", "'forbid'"]]:
241
- additionalProperties = self.extra_template_data.get('additionalProperties')
242
- allow_extra_fields = self.extra_template_data.get('allow_extra_fields')
243
- if additionalProperties is not None or allow_extra_fields:
244
- return (
245
- "'allow'" if additionalProperties or allow_extra_fields else "'forbid'"
246
- )
241
+ def _get_config_extra(self) -> Literal["'allow'", "'forbid'"] | None:
242
+ additional_properties = self.extra_template_data.get("additionalProperties")
243
+ allow_extra_fields = self.extra_template_data.get("allow_extra_fields")
244
+ if additional_properties is not None or allow_extra_fields:
245
+ return "'allow'" if additional_properties or allow_extra_fields else "'forbid'"
247
246
  return None
@@ -1,5 +1,7 @@
1
+ from __future__ import annotations
2
+
1
3
  from datamodel_code_generator.imports import Import
2
4
 
3
- IMPORT_CONFIG_DICT = Import.from_full_path('pydantic.ConfigDict')
4
- IMPORT_AWARE_DATETIME = Import.from_full_path('pydantic.AwareDatetime')
5
- IMPORT_NAIVE_DATETIME = Import.from_full_path('pydantic.NaiveDatetime')
5
+ IMPORT_CONFIG_DICT = Import.from_full_path("pydantic.ConfigDict")
6
+ IMPORT_AWARE_DATETIME = Import.from_full_path("pydantic.AwareDatetime")
7
+ IMPORT_NAIVE_DATETIME = Import.from_full_path("pydantic.NaiveDatetime")
@@ -1,13 +1,13 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Any, ClassVar, Literal, Optional
3
+ from typing import Any, ClassVar, Literal
4
4
 
5
5
  from datamodel_code_generator.model.pydantic_v2.base_model import BaseModel
6
6
 
7
7
 
8
8
  class RootModel(BaseModel):
9
- TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/RootModel.jinja2'
10
- BASE_CLASS: ClassVar[str] = 'pydantic.RootModel'
9
+ TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/RootModel.jinja2"
10
+ BASE_CLASS: ClassVar[str] = "pydantic.RootModel"
11
11
 
12
12
  def __init__(
13
13
  self,
@@ -15,11 +15,11 @@ class RootModel(BaseModel):
15
15
  ) -> None:
16
16
  # Remove custom_base_class for Pydantic V2 models; behaviour is different from Pydantic V1 as it will not
17
17
  # be treated as a root model. custom_base_class cannot both implement BaseModel and RootModel!
18
- if 'custom_base_class' in kwargs:
19
- kwargs.pop('custom_base_class')
18
+ if "custom_base_class" in kwargs:
19
+ kwargs.pop("custom_base_class")
20
20
 
21
21
  super().__init__(**kwargs)
22
22
 
23
- def _get_config_extra(self) -> Optional[Literal["'allow'", "'forbid'"]]:
23
+ def _get_config_extra(self) -> Literal["'allow'", "'forbid'"] | None: # noqa: PLR6301
24
24
  # PydanticV2 RootModels cannot have extra fields
25
25
  return None
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import ClassVar, Dict, Optional, Sequence, Type
3
+ from typing import ClassVar, Sequence
4
4
 
5
5
  from datamodel_code_generator.format import DatetimeClassType
6
6
  from datamodel_code_generator.model.pydantic import DataTypeManager as _DataTypeManager
@@ -13,15 +13,15 @@ from datamodel_code_generator.types import DataType, StrictTypes, Types
13
13
 
14
14
 
15
15
  class DataTypeManager(_DataTypeManager):
16
- PATTERN_KEY: ClassVar[str] = 'pattern'
16
+ PATTERN_KEY: ClassVar[str] = "pattern"
17
17
 
18
18
  def type_map_factory(
19
19
  self,
20
- data_type: Type[DataType],
20
+ data_type: type[DataType],
21
21
  strict_types: Sequence[StrictTypes],
22
22
  pattern_key: str,
23
- target_datetime_class: Optional[DatetimeClassType] = None,
24
- ) -> Dict[Types, DataType]:
23
+ target_datetime_class: DatetimeClassType | None = None,
24
+ ) -> dict[Types, DataType]:
25
25
  result = {
26
26
  **super().type_map_factory(
27
27
  data_type,
@@ -34,8 +34,9 @@ class DataTypeManager(_DataTypeManager):
34
34
  strict=StrictTypes.str in strict_types,
35
35
  # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
36
36
  kwargs={
37
- pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])$'",
38
- **({'strict': True} if StrictTypes.str in strict_types else {}),
37
+ pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*"
38
+ r"([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])$'",
39
+ **({"strict": True} if StrictTypes.str in strict_types else {}),
39
40
  },
40
41
  ),
41
42
  }
@@ -6,4 +6,4 @@ from datamodel_code_generator.model import DataModel
6
6
 
7
7
 
8
8
  class RootModel(DataModel):
9
- TEMPLATE_FILE_PATH: ClassVar[str] = 'root.jinja2'
9
+ TEMPLATE_FILE_PATH: ClassVar[str] = "root.jinja2"
@@ -1,53 +1,56 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from collections import defaultdict
4
- from pathlib import Path
5
- from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Tuple
4
+ from typing import TYPE_CHECKING, Any, ClassVar, Tuple
6
5
 
7
6
  from datamodel_code_generator.imports import IMPORT_TYPE_ALIAS, Import
8
7
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
9
8
  from datamodel_code_generator.model.base import UNDEFINED
10
- from datamodel_code_generator.reference import Reference
11
9
 
12
- _INT: str = 'int'
13
- _FLOAT: str = 'float'
14
- _BOOLEAN: str = 'bool'
15
- _STR: str = 'str'
10
+ if TYPE_CHECKING:
11
+ from pathlib import Path
12
+
13
+ from datamodel_code_generator.reference import Reference
14
+
15
+ _INT: str = "int"
16
+ _FLOAT: str = "float"
17
+ _BOOLEAN: str = "bool"
18
+ _STR: str = "str"
16
19
 
17
20
  # default graphql scalar types
18
21
  DEFAULT_GRAPHQL_SCALAR_TYPE = _STR
19
22
 
20
- DEFAULT_GRAPHQL_SCALAR_TYPES: Dict[str, str] = {
21
- 'Boolean': _BOOLEAN,
22
- 'String': _STR,
23
- 'ID': _STR,
24
- 'Int': _INT,
25
- 'Float': _FLOAT,
23
+ DEFAULT_GRAPHQL_SCALAR_TYPES: dict[str, str] = {
24
+ "Boolean": _BOOLEAN,
25
+ "String": _STR,
26
+ "ID": _STR,
27
+ "Int": _INT,
28
+ "Float": _FLOAT,
26
29
  }
27
30
 
28
31
 
29
32
  class DataTypeScalar(DataModel):
30
- TEMPLATE_FILE_PATH: ClassVar[str] = 'Scalar.jinja2'
31
- BASE_CLASS: ClassVar[str] = ''
32
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,)
33
+ TEMPLATE_FILE_PATH: ClassVar[str] = "Scalar.jinja2"
34
+ BASE_CLASS: ClassVar[str] = ""
35
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,) # noqa: UP006
33
36
 
34
- def __init__(
37
+ def __init__( # noqa: PLR0913
35
38
  self,
36
39
  *,
37
40
  reference: Reference,
38
- fields: List[DataModelFieldBase],
39
- decorators: Optional[List[str]] = None,
40
- base_classes: Optional[List[Reference]] = None,
41
- custom_base_class: Optional[str] = None,
42
- custom_template_dir: Optional[Path] = None,
43
- extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
44
- methods: Optional[List[str]] = None,
45
- path: Optional[Path] = None,
46
- description: Optional[str] = None,
41
+ fields: list[DataModelFieldBase],
42
+ decorators: list[str] | None = None,
43
+ base_classes: list[Reference] | None = None,
44
+ custom_base_class: str | None = None,
45
+ custom_template_dir: Path | None = None,
46
+ extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
47
+ methods: list[str] | None = None,
48
+ path: Path | None = None,
49
+ description: str | None = None,
47
50
  default: Any = UNDEFINED,
48
51
  nullable: bool = False,
49
52
  keyword_only: bool = False,
50
- ):
53
+ ) -> None:
51
54
  extra_template_data = extra_template_data or defaultdict(dict)
52
55
 
53
56
  scalar_name = reference.name
@@ -56,12 +59,10 @@ class DataTypeScalar(DataModel):
56
59
 
57
60
  # py_type
58
61
  py_type = extra_template_data[scalar_name].get(
59
- 'py_type',
60
- DEFAULT_GRAPHQL_SCALAR_TYPES.get(
61
- reference.name, DEFAULT_GRAPHQL_SCALAR_TYPE
62
- ),
62
+ "py_type",
63
+ DEFAULT_GRAPHQL_SCALAR_TYPES.get(reference.name, DEFAULT_GRAPHQL_SCALAR_TYPE),
63
64
  )
64
- extra_template_data[scalar_name]['py_type'] = py_type
65
+ extra_template_data[scalar_name]["py_type"] = py_type
65
66
 
66
67
  super().__init__(
67
68
  reference=reference,