datamodel-code-generator 0.11.12__py3-none-any.whl → 0.45.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. datamodel_code_generator/__init__.py +654 -185
  2. datamodel_code_generator/__main__.py +872 -388
  3. datamodel_code_generator/arguments.py +798 -0
  4. datamodel_code_generator/cli_options.py +295 -0
  5. datamodel_code_generator/format.py +292 -54
  6. datamodel_code_generator/http.py +85 -10
  7. datamodel_code_generator/imports.py +152 -43
  8. datamodel_code_generator/model/__init__.py +138 -1
  9. datamodel_code_generator/model/base.py +531 -120
  10. datamodel_code_generator/model/dataclass.py +211 -0
  11. datamodel_code_generator/model/enum.py +133 -12
  12. datamodel_code_generator/model/imports.py +22 -0
  13. datamodel_code_generator/model/msgspec.py +462 -0
  14. datamodel_code_generator/model/pydantic/__init__.py +30 -25
  15. datamodel_code_generator/model/pydantic/base_model.py +304 -100
  16. datamodel_code_generator/model/pydantic/custom_root_type.py +11 -2
  17. datamodel_code_generator/model/pydantic/dataclass.py +15 -4
  18. datamodel_code_generator/model/pydantic/imports.py +40 -27
  19. datamodel_code_generator/model/pydantic/types.py +188 -96
  20. datamodel_code_generator/model/pydantic_v2/__init__.py +51 -0
  21. datamodel_code_generator/model/pydantic_v2/base_model.py +268 -0
  22. datamodel_code_generator/model/pydantic_v2/imports.py +15 -0
  23. datamodel_code_generator/model/pydantic_v2/root_model.py +35 -0
  24. datamodel_code_generator/model/pydantic_v2/types.py +143 -0
  25. datamodel_code_generator/model/scalar.py +124 -0
  26. datamodel_code_generator/model/template/Enum.jinja2 +15 -2
  27. datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2 +6 -0
  28. datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2 +6 -0
  29. datamodel_code_generator/model/template/ScalarTypeStatement.jinja2 +6 -0
  30. datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2 +20 -0
  31. datamodel_code_generator/model/template/TypeAliasType.jinja2 +20 -0
  32. datamodel_code_generator/model/template/TypeStatement.jinja2 +20 -0
  33. datamodel_code_generator/model/template/TypedDict.jinja2 +5 -0
  34. datamodel_code_generator/model/template/TypedDictClass.jinja2 +25 -0
  35. datamodel_code_generator/model/template/TypedDictFunction.jinja2 +24 -0
  36. datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2 +10 -0
  37. datamodel_code_generator/model/template/UnionTypeAliasType.jinja2 +10 -0
  38. datamodel_code_generator/model/template/UnionTypeStatement.jinja2 +10 -0
  39. datamodel_code_generator/model/template/dataclass.jinja2 +50 -0
  40. datamodel_code_generator/model/template/msgspec.jinja2 +55 -0
  41. datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +17 -4
  42. datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +12 -4
  43. datamodel_code_generator/model/template/pydantic/Config.jinja2 +1 -1
  44. datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +15 -2
  45. datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +57 -0
  46. datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +5 -0
  47. datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +48 -0
  48. datamodel_code_generator/model/type_alias.py +70 -0
  49. datamodel_code_generator/model/typed_dict.py +161 -0
  50. datamodel_code_generator/model/types.py +106 -0
  51. datamodel_code_generator/model/union.py +105 -0
  52. datamodel_code_generator/parser/__init__.py +30 -12
  53. datamodel_code_generator/parser/_graph.py +67 -0
  54. datamodel_code_generator/parser/_scc.py +171 -0
  55. datamodel_code_generator/parser/base.py +2426 -380
  56. datamodel_code_generator/parser/graphql.py +652 -0
  57. datamodel_code_generator/parser/jsonschema.py +2518 -647
  58. datamodel_code_generator/parser/openapi.py +631 -222
  59. datamodel_code_generator/py.typed +0 -0
  60. datamodel_code_generator/pydantic_patch.py +28 -0
  61. datamodel_code_generator/reference.py +672 -290
  62. datamodel_code_generator/types.py +521 -145
  63. datamodel_code_generator/util.py +155 -0
  64. datamodel_code_generator/watch.py +65 -0
  65. datamodel_code_generator-0.45.0.dist-info/METADATA +301 -0
  66. datamodel_code_generator-0.45.0.dist-info/RECORD +69 -0
  67. {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info}/WHEEL +1 -1
  68. datamodel_code_generator-0.45.0.dist-info/entry_points.txt +2 -0
  69. datamodel_code_generator/version.py +0 -1
  70. datamodel_code_generator-0.11.12.dist-info/METADATA +0 -440
  71. datamodel_code_generator-0.11.12.dist-info/RECORD +0 -31
  72. datamodel_code_generator-0.11.12.dist-info/entry_points.txt +0 -3
  73. {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,268 @@
1
+ """Pydantic v2 BaseModel implementation.
2
+
3
+ Provides Constraints, DataModelField, and BaseModel for Pydantic v2
4
+ with support for Field() constraints and ConfigDict.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import re
10
+ from enum import Enum
11
+ from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, Optional
12
+
13
+ from pydantic import Field
14
+ from typing_extensions import Literal
15
+
16
+ from datamodel_code_generator.model.base import UNDEFINED, DataModelFieldBase
17
+ from datamodel_code_generator.model.pydantic.base_model import (
18
+ BaseModelBase,
19
+ )
20
+ from datamodel_code_generator.model.pydantic.base_model import (
21
+ Constraints as _Constraints,
22
+ )
23
+ from datamodel_code_generator.model.pydantic.base_model import (
24
+ DataModelField as DataModelFieldV1,
25
+ )
26
+ from datamodel_code_generator.model.pydantic_v2.imports import IMPORT_CONFIG_DICT
27
+ from datamodel_code_generator.util import field_validator, model_validator
28
+
29
+ if TYPE_CHECKING:
30
+ from collections import defaultdict
31
+ from pathlib import Path
32
+
33
+ from datamodel_code_generator.reference import Reference
34
+
35
+
36
+ class UnionMode(Enum):
37
+ """Union discriminator mode for Pydantic v2."""
38
+
39
+ smart = "smart"
40
+ left_to_right = "left_to_right"
41
+
42
+
43
+ class Constraints(_Constraints):
44
+ """Pydantic v2 field constraints with pattern support."""
45
+
46
+ # To override existing pattern alias
47
+ regex: Optional[str] = Field(None, alias="regex") # noqa: UP045
48
+ pattern: Optional[str] = Field(None, alias="pattern") # noqa: UP045
49
+
50
+ @model_validator(mode="before")
51
+ def validate_min_max_items(cls, values: Any) -> dict[str, Any]: # noqa: N805
52
+ """Validate and convert minItems/maxItems to minLength/maxLength."""
53
+ if not isinstance(values, dict): # pragma: no cover
54
+ return values
55
+ min_items = values.pop("minItems", None)
56
+ if min_items is not None:
57
+ values["minLength"] = min_items
58
+ max_items = values.pop("maxItems", None)
59
+ if max_items is not None:
60
+ values["maxLength"] = max_items
61
+ return values
62
+
63
+
64
+ class DataModelField(DataModelFieldV1):
65
+ """Pydantic v2 field with Field() constraints and json_schema_extra support."""
66
+
67
+ _EXCLUDE_FIELD_KEYS: ClassVar[set[str]] = {
68
+ "alias",
69
+ "default",
70
+ "gt",
71
+ "ge",
72
+ "lt",
73
+ "le",
74
+ "multiple_of",
75
+ "min_length",
76
+ "max_length",
77
+ "pattern",
78
+ }
79
+ _DEFAULT_FIELD_KEYS: ClassVar[set[str]] = {
80
+ "default",
81
+ "default_factory",
82
+ "alias",
83
+ "alias_priority",
84
+ "validation_alias",
85
+ "serialization_alias",
86
+ "title",
87
+ "description",
88
+ "examples",
89
+ "exclude",
90
+ "discriminator",
91
+ "json_schema_extra",
92
+ "frozen",
93
+ "validate_default",
94
+ "repr",
95
+ "init_var",
96
+ "kw_only",
97
+ "pattern",
98
+ "strict",
99
+ "gt",
100
+ "ge",
101
+ "lt",
102
+ "le",
103
+ "multiple_of",
104
+ "allow_inf_nan",
105
+ "max_digits",
106
+ "decimal_places",
107
+ "min_length",
108
+ "max_length",
109
+ "union_mode",
110
+ }
111
+ constraints: Optional[Constraints] = None # pyright: ignore[reportIncompatibleVariableOverride] # noqa: UP045
112
+ _PARSE_METHOD: ClassVar[str] = "model_validate"
113
+ can_have_extra_keys: ClassVar[bool] = False
114
+
115
+ @field_validator("extras")
116
+ def validate_extras(cls, values: Any) -> dict[str, Any]: # noqa: N805
117
+ """Validate and convert example to examples list."""
118
+ if not isinstance(values, dict): # pragma: no cover
119
+ return values
120
+ if "examples" in values:
121
+ return values
122
+
123
+ if "example" in values:
124
+ values["examples"] = [values.pop("example")]
125
+ return values
126
+
127
+ def process_const(self) -> None:
128
+ """Process const field constraint using literal type."""
129
+ self._process_const_as_literal()
130
+
131
+ def _process_data_in_str(self, data: dict[str, Any]) -> None:
132
+ if self.const:
133
+ # const is removed in pydantic 2.0
134
+ data.pop("const")
135
+
136
+ # unique_items is not supported in pydantic 2.0
137
+ data.pop("unique_items", None)
138
+
139
+ if self.use_frozen_field and self.read_only:
140
+ data["frozen"] = True
141
+
142
+ if "union_mode" in data:
143
+ if self.data_type.is_union:
144
+ data["union_mode"] = data.pop("union_mode").value
145
+ else:
146
+ data.pop("union_mode")
147
+
148
+ # **extra is not supported in pydantic 2.0
149
+ json_schema_extra = {k: v for k, v in data.items() if k not in self._DEFAULT_FIELD_KEYS}
150
+ if json_schema_extra:
151
+ data["json_schema_extra"] = json_schema_extra
152
+ for key in json_schema_extra:
153
+ data.pop(key)
154
+
155
+ def _process_annotated_field_arguments( # noqa: PLR6301
156
+ self,
157
+ field_arguments: list[str],
158
+ ) -> list[str]:
159
+ return field_arguments
160
+
161
+
162
+ class ConfigAttribute(NamedTuple):
163
+ """Configuration attribute mapping for ConfigDict conversion."""
164
+
165
+ from_: str
166
+ to: str
167
+ invert: bool
168
+
169
+
170
+ class BaseModel(BaseModelBase):
171
+ """Pydantic v2 BaseModel with ConfigDict and pattern-based regex_engine support."""
172
+
173
+ TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/BaseModel.jinja2"
174
+ BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
175
+ CONFIG_ATTRIBUTES: ClassVar[list[ConfigAttribute]] = [
176
+ ConfigAttribute("allow_population_by_field_name", "populate_by_name", False), # noqa: FBT003
177
+ ConfigAttribute("populate_by_name", "populate_by_name", False), # noqa: FBT003
178
+ ConfigAttribute("allow_mutation", "frozen", True), # noqa: FBT003
179
+ ConfigAttribute("frozen", "frozen", False), # noqa: FBT003
180
+ ConfigAttribute("use_attribute_docstrings", "use_attribute_docstrings", False), # noqa: FBT003
181
+ ]
182
+
183
+ def __init__( # noqa: PLR0913
184
+ self,
185
+ *,
186
+ reference: Reference,
187
+ fields: list[DataModelFieldBase],
188
+ decorators: list[str] | None = None,
189
+ base_classes: list[Reference] | None = None,
190
+ custom_base_class: str | None = None,
191
+ custom_template_dir: Path | None = None,
192
+ extra_template_data: defaultdict[str, Any] | None = None,
193
+ path: Path | None = None,
194
+ description: str | None = None,
195
+ default: Any = UNDEFINED,
196
+ nullable: bool = False,
197
+ keyword_only: bool = False,
198
+ treat_dot_as_module: bool = False,
199
+ ) -> None:
200
+ """Initialize BaseModel with ConfigDict generation from template data."""
201
+ super().__init__(
202
+ reference=reference,
203
+ fields=fields,
204
+ decorators=decorators,
205
+ base_classes=base_classes,
206
+ custom_base_class=custom_base_class,
207
+ custom_template_dir=custom_template_dir,
208
+ extra_template_data=extra_template_data,
209
+ path=path,
210
+ description=description,
211
+ default=default,
212
+ nullable=nullable,
213
+ keyword_only=keyword_only,
214
+ treat_dot_as_module=treat_dot_as_module,
215
+ )
216
+ config_parameters: dict[str, Any] = {}
217
+
218
+ extra = self._get_config_extra()
219
+ if extra:
220
+ config_parameters["extra"] = extra
221
+
222
+ for from_, to, invert in self.CONFIG_ATTRIBUTES:
223
+ if from_ in self.extra_template_data:
224
+ config_parameters[to] = (
225
+ not self.extra_template_data[from_] if invert else self.extra_template_data[from_]
226
+ )
227
+ for data_type in self.all_data_types:
228
+ if data_type.is_custom_type: # pragma: no cover
229
+ config_parameters["arbitrary_types_allowed"] = True
230
+ break
231
+
232
+ for field in self.fields:
233
+ # Check if a regex pattern uses lookarounds.
234
+ # Depending on the generation configuration, the pattern may end up in two different places.
235
+ pattern = (isinstance(field.constraints, Constraints) and field.constraints.pattern) or (
236
+ field.data_type.kwargs or {}
237
+ ).get("pattern")
238
+ if pattern and re.search(r"\(\?<?[=!]", pattern):
239
+ config_parameters["regex_engine"] = '"python-re"'
240
+ break
241
+
242
+ if isinstance(self.extra_template_data.get("config"), dict):
243
+ for key, value in self.extra_template_data["config"].items():
244
+ config_parameters[key] = value # noqa: PERF403
245
+
246
+ if config_parameters:
247
+ from datamodel_code_generator.model.pydantic_v2 import ConfigDict # noqa: PLC0415
248
+
249
+ self.extra_template_data["config"] = ConfigDict.parse_obj(config_parameters) # pyright: ignore[reportArgumentType]
250
+ self._additional_imports.append(IMPORT_CONFIG_DICT)
251
+
252
+ def _get_config_extra(self) -> Literal["'allow'", "'forbid'", "'ignore'"] | None:
253
+ additional_properties = self.extra_template_data.get("additionalProperties")
254
+ allow_extra_fields = self.extra_template_data.get("allow_extra_fields")
255
+ extra_fields = self.extra_template_data.get("extra_fields")
256
+
257
+ config_extra = None
258
+ if allow_extra_fields or extra_fields == "allow":
259
+ config_extra = "'allow'"
260
+ elif extra_fields == "forbid":
261
+ config_extra = "'forbid'"
262
+ elif extra_fields == "ignore":
263
+ config_extra = "'ignore'"
264
+ elif additional_properties is True:
265
+ config_extra = "'allow'"
266
+ elif additional_properties is False:
267
+ config_extra = "'forbid'"
268
+ return config_extra
@@ -0,0 +1,15 @@
1
+ """Import definitions for Pydantic v2 types.
2
+
3
+ Provides pre-defined Import objects for Pydantic v2 types (ConfigDict, AwareDatetime, etc.).
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ from datamodel_code_generator.imports import Import
9
+
10
+ IMPORT_CONFIG_DICT = Import.from_full_path("pydantic.ConfigDict")
11
+ IMPORT_AWARE_DATETIME = Import.from_full_path("pydantic.AwareDatetime")
12
+ IMPORT_NAIVE_DATETIME = Import.from_full_path("pydantic.NaiveDatetime")
13
+ IMPORT_BASE64STR = Import.from_full_path("pydantic.Base64Str")
14
+ # IMPORT_BASE64STR: Used for OpenAPI strings with format "byte" (base64 encoded characters).
15
+ IMPORT_SERIALIZE_AS_ANY = Import.from_full_path("pydantic.SerializeAsAny")
@@ -0,0 +1,35 @@
1
+ """Pydantic v2 RootModel implementation.
2
+
3
+ Generates models inheriting from pydantic.RootModel for wrapping single types.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ from typing import Any, ClassVar, Literal
9
+
10
+ from datamodel_code_generator.model.pydantic_v2.base_model import BaseModel
11
+
12
+
13
+ class RootModel(BaseModel):
14
+ """DataModel for Pydantic v2 RootModel."""
15
+
16
+ TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/RootModel.jinja2"
17
+ BASE_CLASS: ClassVar[str] = "pydantic.RootModel"
18
+
19
+ def __init__(
20
+ self,
21
+ **kwargs: Any,
22
+ ) -> None:
23
+ """Initialize RootModel and remove custom_base_class if present.
24
+
25
+ Remove custom_base_class for Pydantic V2 models; behaviour is different from Pydantic V1 as it will not
26
+ be treated as a root model. custom_base_class cannot both implement BaseModel and RootModel!
27
+ """
28
+ if "custom_base_class" in kwargs:
29
+ kwargs.pop("custom_base_class")
30
+
31
+ super().__init__(**kwargs)
32
+
33
+ def _get_config_extra(self) -> Literal["'allow'", "'forbid'"] | None: # noqa: PLR6301
34
+ # PydanticV2 RootModels cannot have extra fields
35
+ return None
@@ -0,0 +1,143 @@
1
+ """Pydantic v2 type manager.
2
+
3
+ Maps schema types to Pydantic v2 specific types with AwareDatetime, NaiveDatetime, etc.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ from typing import TYPE_CHECKING, ClassVar
9
+
10
+ from datamodel_code_generator.format import DatetimeClassType
11
+ from datamodel_code_generator.model.pydantic import DataTypeManager as _DataTypeManager
12
+ from datamodel_code_generator.model.pydantic.imports import IMPORT_CONSTR
13
+ from datamodel_code_generator.model.pydantic_v2.imports import (
14
+ IMPORT_AWARE_DATETIME,
15
+ IMPORT_BASE64STR,
16
+ IMPORT_NAIVE_DATETIME,
17
+ IMPORT_SERIALIZE_AS_ANY,
18
+ )
19
+ from datamodel_code_generator.types import (
20
+ DataType,
21
+ PythonVersion,
22
+ PythonVersionMin,
23
+ StrictTypes,
24
+ Types,
25
+ )
26
+
27
+ if TYPE_CHECKING:
28
+ from collections.abc import Iterator, Sequence
29
+
30
+ from datamodel_code_generator.imports import Import
31
+
32
+
33
+ class PydanticV2DataType(DataType):
34
+ """Pydantic v2-specific DataType with SerializeAsAny support."""
35
+
36
+ def _should_wrap_with_serialize_as_any(self) -> bool:
37
+ if not self.use_serialize_as_any:
38
+ return False
39
+
40
+ assert self.reference is not None
41
+
42
+ from datamodel_code_generator.model.base import DataModel # noqa: PLC0415
43
+
44
+ return any(isinstance(child, DataModel) and child.fields for child in self.reference.children)
45
+
46
+ def _get_wrapped_reference_type_hint(self, type_: str) -> str:
47
+ if self._should_wrap_with_serialize_as_any():
48
+ return f"SerializeAsAny[{type_}]"
49
+
50
+ return type_
51
+
52
+ @property
53
+ def imports(self) -> Iterator[Import]:
54
+ """Yield imports including SerializeAsAny when needed."""
55
+ yield from super().imports
56
+
57
+ if "SerializeAsAny" in self.type_hint:
58
+ yield IMPORT_SERIALIZE_AS_ANY
59
+
60
+
61
+ class DataTypeManager(_DataTypeManager):
62
+ """Type manager for Pydantic v2 with pattern key support."""
63
+
64
+ PATTERN_KEY: ClassVar[str] = "pattern"
65
+
66
+ def __init__( # noqa: PLR0913, PLR0917
67
+ self,
68
+ python_version: PythonVersion = PythonVersionMin,
69
+ use_standard_collections: bool = False, # noqa: FBT001, FBT002
70
+ use_generic_container_types: bool = False, # noqa: FBT001, FBT002
71
+ strict_types: Sequence[StrictTypes] | None = None,
72
+ use_non_positive_negative_number_constrained_types: bool = False, # noqa: FBT001, FBT002
73
+ use_decimal_for_multiple_of: bool = False, # noqa: FBT001, FBT002
74
+ use_union_operator: bool = False, # noqa: FBT001, FBT002
75
+ use_pendulum: bool = False, # noqa: FBT001, FBT002
76
+ target_datetime_class: DatetimeClassType | None = None,
77
+ treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
78
+ use_serialize_as_any: bool = False, # noqa: FBT001, FBT002
79
+ ) -> None:
80
+ """Initialize with pydantic v2-specific DataType."""
81
+ super().__init__(
82
+ python_version=python_version,
83
+ use_standard_collections=use_standard_collections,
84
+ use_generic_container_types=use_generic_container_types,
85
+ strict_types=strict_types,
86
+ use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
87
+ use_decimal_for_multiple_of=use_decimal_for_multiple_of,
88
+ use_union_operator=use_union_operator,
89
+ use_pendulum=use_pendulum,
90
+ target_datetime_class=target_datetime_class,
91
+ treat_dot_as_module=treat_dot_as_module,
92
+ use_serialize_as_any=use_serialize_as_any,
93
+ )
94
+
95
+ # Override the data_type with our pydantic v2 version
96
+ from pydantic import create_model # noqa: PLC0415
97
+
98
+ self.data_type: type[DataType] = create_model(
99
+ "PydanticV2ContextDataType",
100
+ python_version=(PythonVersion, python_version),
101
+ use_standard_collections=(bool, use_standard_collections),
102
+ use_generic_container=(bool, use_generic_container_types),
103
+ use_union_operator=(bool, use_union_operator),
104
+ treat_dot_as_module=(bool, treat_dot_as_module),
105
+ use_serialize_as_any=(bool, use_serialize_as_any),
106
+ __base__=PydanticV2DataType,
107
+ )
108
+
109
+ def type_map_factory(
110
+ self,
111
+ data_type: type[DataType],
112
+ strict_types: Sequence[StrictTypes],
113
+ pattern_key: str,
114
+ target_datetime_class: DatetimeClassType | None = None,
115
+ ) -> dict[Types, DataType]:
116
+ """Create type mapping with Pydantic v2 specific types and datetime classes."""
117
+ result = {
118
+ **super().type_map_factory(
119
+ data_type,
120
+ strict_types,
121
+ pattern_key,
122
+ target_datetime_class or DatetimeClassType.Datetime,
123
+ ),
124
+ Types.hostname: self.data_type.from_import(
125
+ IMPORT_CONSTR,
126
+ strict=StrictTypes.str in strict_types,
127
+ # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
128
+ kwargs={
129
+ pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*"
130
+ r"([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])$'",
131
+ **({"strict": True} if StrictTypes.str in strict_types else {}),
132
+ },
133
+ ),
134
+ Types.byte: self.data_type.from_import(
135
+ IMPORT_BASE64STR,
136
+ strict=StrictTypes.str in strict_types,
137
+ ),
138
+ }
139
+ if target_datetime_class == DatetimeClassType.Awaredatetime:
140
+ result[Types.date_time] = data_type.from_import(IMPORT_AWARE_DATETIME)
141
+ elif target_datetime_class == DatetimeClassType.Naivedatetime:
142
+ result[Types.date_time] = data_type.from_import(IMPORT_NAIVE_DATETIME)
143
+ return result
@@ -0,0 +1,124 @@
1
+ """Scalar type model generator.
2
+
3
+ Generates type aliases for GraphQL scalar types.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ from collections import defaultdict
9
+ from typing import TYPE_CHECKING, Any, ClassVar
10
+
11
+ from datamodel_code_generator.imports import (
12
+ IMPORT_TYPE_ALIAS,
13
+ IMPORT_TYPE_ALIAS_BACKPORT,
14
+ IMPORT_TYPE_ALIAS_TYPE,
15
+ Import,
16
+ )
17
+ from datamodel_code_generator.model import DataModel, DataModelFieldBase
18
+ from datamodel_code_generator.model.base import UNDEFINED
19
+
20
+ if TYPE_CHECKING:
21
+ from pathlib import Path
22
+
23
+ from datamodel_code_generator.reference import Reference
24
+
25
+ _INT: str = "int"
26
+ _FLOAT: str = "float"
27
+ _BOOLEAN: str = "bool"
28
+ _STR: str = "str"
29
+
30
+ # default graphql scalar types
31
+ DEFAULT_GRAPHQL_SCALAR_TYPE = _STR
32
+
33
+ DEFAULT_GRAPHQL_SCALAR_TYPES: dict[str, str] = {
34
+ "Boolean": _BOOLEAN,
35
+ "String": _STR,
36
+ "ID": _STR,
37
+ "Int": _INT,
38
+ "Float": _FLOAT,
39
+ }
40
+
41
+
42
+ class _DataTypeScalarBase(DataModel):
43
+ """Base class for GraphQL scalar types with shared __init__ logic."""
44
+
45
+ def __init__( # noqa: PLR0913
46
+ self,
47
+ *,
48
+ reference: Reference,
49
+ fields: list[DataModelFieldBase],
50
+ decorators: list[str] | None = None,
51
+ base_classes: list[Reference] | None = None,
52
+ custom_base_class: str | None = None,
53
+ custom_template_dir: Path | None = None,
54
+ extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
55
+ methods: list[str] | None = None,
56
+ path: Path | None = None,
57
+ description: str | None = None,
58
+ default: Any = UNDEFINED,
59
+ nullable: bool = False,
60
+ keyword_only: bool = False,
61
+ treat_dot_as_module: bool = False,
62
+ ) -> None:
63
+ """Initialize GraphQL scalar type with Python type mapping."""
64
+ extra_template_data = extra_template_data or defaultdict(dict)
65
+
66
+ scalar_name = reference.name
67
+ if scalar_name not in extra_template_data:
68
+ extra_template_data[scalar_name] = defaultdict(dict)
69
+
70
+ # py_type
71
+ py_type = extra_template_data[scalar_name].get(
72
+ "py_type",
73
+ DEFAULT_GRAPHQL_SCALAR_TYPES.get(reference.name, DEFAULT_GRAPHQL_SCALAR_TYPE),
74
+ )
75
+ extra_template_data[scalar_name]["py_type"] = py_type
76
+
77
+ super().__init__(
78
+ reference=reference,
79
+ fields=fields,
80
+ decorators=decorators,
81
+ base_classes=base_classes,
82
+ custom_base_class=custom_base_class,
83
+ custom_template_dir=custom_template_dir,
84
+ extra_template_data=extra_template_data,
85
+ methods=methods,
86
+ path=path,
87
+ description=description,
88
+ default=default,
89
+ nullable=nullable,
90
+ keyword_only=keyword_only,
91
+ treat_dot_as_module=treat_dot_as_module,
92
+ )
93
+
94
+
95
+ class DataTypeScalar(_DataTypeScalarBase):
96
+ """GraphQL scalar using TypeAlias annotation for Python 3.10+ (Name: TypeAlias = type)."""
97
+
98
+ TEMPLATE_FILE_PATH: ClassVar[str] = "ScalarTypeAliasAnnotation.jinja2"
99
+ BASE_CLASS: ClassVar[str] = ""
100
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,)
101
+
102
+
103
+ class DataTypeScalarBackport(_DataTypeScalarBase):
104
+ """GraphQL scalar using TypeAlias annotation for Python 3.9 (Name: TypeAlias = type)."""
105
+
106
+ TEMPLATE_FILE_PATH: ClassVar[str] = "ScalarTypeAliasAnnotation.jinja2"
107
+ BASE_CLASS: ClassVar[str] = ""
108
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS_BACKPORT,)
109
+
110
+
111
+ class DataTypeScalarTypeBackport(_DataTypeScalarBase):
112
+ """GraphQL scalar using TypeAliasType for Python 3.9-3.11 (Name = TypeAliasType("Name", type))."""
113
+
114
+ TEMPLATE_FILE_PATH: ClassVar[str] = "ScalarTypeAliasType.jinja2"
115
+ BASE_CLASS: ClassVar[str] = ""
116
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS_TYPE,)
117
+
118
+
119
+ class DataTypeScalarTypeStatement(_DataTypeScalarBase):
120
+ """GraphQL scalar using type statement for Python 3.12+ (type Name = type)."""
121
+
122
+ TEMPLATE_FILE_PATH: ClassVar[str] = "ScalarTypeStatement.jinja2"
123
+ BASE_CLASS: ClassVar[str] = ""
124
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = ()
@@ -1,12 +1,25 @@
1
1
  {% for decorator in decorators -%}
2
2
  {{ decorator }}
3
3
  {% endfor -%}
4
- class {{ class_name }}(Enum):
4
+ class {{ class_name }}({{ base_class }}):
5
5
  {%- if description %}
6
6
  """
7
- {{ description }}
7
+ {{ description | indent(4) }}
8
8
  """
9
9
  {%- endif %}
10
10
  {%- for field in fields %}
11
11
  {{ field.name }} = {{ field.default }}
12
+ {%- if field.docstring %}
13
+ """
14
+ {{ field.docstring | indent(4) }}
15
+ """
16
+ {%- if field.use_inline_field_description and not loop.last %}
17
+
18
+ {% endif %}
19
+ {%- elif field.inline_field_docstring %}
20
+ {{ field.inline_field_docstring }}
21
+ {%- if not loop.last %}
22
+
23
+ {% endif %}
24
+ {%- endif %}
12
25
  {%- endfor -%}
@@ -0,0 +1,6 @@
1
+ {{ class_name }}: TypeAlias = {{ py_type }}
2
+ {%- if description %}
3
+ """
4
+ {{ description }}
5
+ """
6
+ {%- endif %}
@@ -0,0 +1,6 @@
1
+ {{ class_name }} = TypeAliasType("{{ class_name }}", {{ py_type }})
2
+ {%- if description %}
3
+ """
4
+ {{ description }}
5
+ """
6
+ {%- endif %}
@@ -0,0 +1,6 @@
1
+ type {{ class_name }} = {{ py_type }}
2
+ {%- if description %}
3
+ """
4
+ {{ description }}
5
+ """
6
+ {%- endif %}
@@ -0,0 +1,20 @@
1
+ {%- macro get_type_annotation(_field) -%}
2
+ {%- if _field.annotated -%}
3
+ {{ _field.annotated }}
4
+ {%- elif _field.field -%}
5
+ Annotated[{{ _field.type_hint }}, {{ _field.field }}]
6
+ {%- else -%}
7
+ {{ _field.type_hint }}
8
+ {%- endif -%}
9
+ {%- endmacro -%}
10
+
11
+ {{ class_name }}: TypeAlias = {{ get_type_annotation(fields[0]) }}{% if comment is defined %} # {{ comment }}{% endif %}
12
+ {%- if description %}
13
+ """
14
+ {{ description | indent(0) }}
15
+ """
16
+ {%- elif fields and fields[0].docstring %}
17
+ """
18
+ {{ fields[0].docstring | indent(0) }}
19
+ """
20
+ {%- endif %}
@@ -0,0 +1,20 @@
1
+ {%- macro get_type_annotation(_field) -%}
2
+ {%- if _field.annotated -%}
3
+ {{ _field.annotated }}
4
+ {%- elif _field.field -%}
5
+ Annotated[{{ _field.type_hint }}, {{ _field.field }}]
6
+ {%- else -%}
7
+ {{ _field.type_hint }}
8
+ {%- endif -%}
9
+ {%- endmacro -%}
10
+
11
+ {{ class_name }} = TypeAliasType("{{ class_name }}", {{ get_type_annotation(fields[0]) }}){% if comment is defined %} # {{ comment }}{% endif %}
12
+ {%- if description %}
13
+ """
14
+ {{ description | indent(0) }}
15
+ """
16
+ {%- elif fields and fields[0].docstring %}
17
+ """
18
+ {{ fields[0].docstring | indent(0) }}
19
+ """
20
+ {%- endif %}