datamodel-code-generator 0.11.12__py3-none-any.whl → 0.45.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datamodel_code_generator/__init__.py +654 -185
- datamodel_code_generator/__main__.py +872 -388
- datamodel_code_generator/arguments.py +798 -0
- datamodel_code_generator/cli_options.py +295 -0
- datamodel_code_generator/format.py +292 -54
- datamodel_code_generator/http.py +85 -10
- datamodel_code_generator/imports.py +152 -43
- datamodel_code_generator/model/__init__.py +138 -1
- datamodel_code_generator/model/base.py +531 -120
- datamodel_code_generator/model/dataclass.py +211 -0
- datamodel_code_generator/model/enum.py +133 -12
- datamodel_code_generator/model/imports.py +22 -0
- datamodel_code_generator/model/msgspec.py +462 -0
- datamodel_code_generator/model/pydantic/__init__.py +30 -25
- datamodel_code_generator/model/pydantic/base_model.py +304 -100
- datamodel_code_generator/model/pydantic/custom_root_type.py +11 -2
- datamodel_code_generator/model/pydantic/dataclass.py +15 -4
- datamodel_code_generator/model/pydantic/imports.py +40 -27
- datamodel_code_generator/model/pydantic/types.py +188 -96
- datamodel_code_generator/model/pydantic_v2/__init__.py +51 -0
- datamodel_code_generator/model/pydantic_v2/base_model.py +268 -0
- datamodel_code_generator/model/pydantic_v2/imports.py +15 -0
- datamodel_code_generator/model/pydantic_v2/root_model.py +35 -0
- datamodel_code_generator/model/pydantic_v2/types.py +143 -0
- datamodel_code_generator/model/scalar.py +124 -0
- datamodel_code_generator/model/template/Enum.jinja2 +15 -2
- datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2 +6 -0
- datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2 +6 -0
- datamodel_code_generator/model/template/ScalarTypeStatement.jinja2 +6 -0
- datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2 +20 -0
- datamodel_code_generator/model/template/TypeAliasType.jinja2 +20 -0
- datamodel_code_generator/model/template/TypeStatement.jinja2 +20 -0
- datamodel_code_generator/model/template/TypedDict.jinja2 +5 -0
- datamodel_code_generator/model/template/TypedDictClass.jinja2 +25 -0
- datamodel_code_generator/model/template/TypedDictFunction.jinja2 +24 -0
- datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2 +10 -0
- datamodel_code_generator/model/template/UnionTypeAliasType.jinja2 +10 -0
- datamodel_code_generator/model/template/UnionTypeStatement.jinja2 +10 -0
- datamodel_code_generator/model/template/dataclass.jinja2 +50 -0
- datamodel_code_generator/model/template/msgspec.jinja2 +55 -0
- datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +17 -4
- datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +12 -4
- datamodel_code_generator/model/template/pydantic/Config.jinja2 +1 -1
- datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +15 -2
- datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +57 -0
- datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +5 -0
- datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +48 -0
- datamodel_code_generator/model/type_alias.py +70 -0
- datamodel_code_generator/model/typed_dict.py +161 -0
- datamodel_code_generator/model/types.py +106 -0
- datamodel_code_generator/model/union.py +105 -0
- datamodel_code_generator/parser/__init__.py +30 -12
- datamodel_code_generator/parser/_graph.py +67 -0
- datamodel_code_generator/parser/_scc.py +171 -0
- datamodel_code_generator/parser/base.py +2426 -380
- datamodel_code_generator/parser/graphql.py +652 -0
- datamodel_code_generator/parser/jsonschema.py +2518 -647
- datamodel_code_generator/parser/openapi.py +631 -222
- datamodel_code_generator/py.typed +0 -0
- datamodel_code_generator/pydantic_patch.py +28 -0
- datamodel_code_generator/reference.py +672 -290
- datamodel_code_generator/types.py +521 -145
- datamodel_code_generator/util.py +155 -0
- datamodel_code_generator/watch.py +65 -0
- datamodel_code_generator-0.45.0.dist-info/METADATA +301 -0
- datamodel_code_generator-0.45.0.dist-info/RECORD +69 -0
- {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info}/WHEEL +1 -1
- datamodel_code_generator-0.45.0.dist-info/entry_points.txt +2 -0
- datamodel_code_generator/version.py +0 -1
- datamodel_code_generator-0.11.12.dist-info/METADATA +0 -440
- datamodel_code_generator-0.11.12.dist-info/RECORD +0 -31
- datamodel_code_generator-0.11.12.dist-info/entry_points.txt +0 -3
- {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -1,136 +1,273 @@
|
|
|
1
|
+
"""Pydantic v1 BaseModel implementation.
|
|
2
|
+
|
|
3
|
+
Provides Constraints, DataModelField, and BaseModel for Pydantic v1.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from abc import ABC
|
|
9
|
+
from functools import cached_property
|
|
1
10
|
from pathlib import Path
|
|
2
|
-
from typing import Any, ClassVar,
|
|
11
|
+
from typing import TYPE_CHECKING, Any, ClassVar, Optional
|
|
3
12
|
|
|
4
13
|
from pydantic import Field
|
|
5
14
|
|
|
6
|
-
from datamodel_code_generator.imports import Import
|
|
7
15
|
from datamodel_code_generator.model import (
|
|
8
16
|
ConstraintsBase,
|
|
9
17
|
DataModel,
|
|
10
18
|
DataModelFieldBase,
|
|
11
19
|
)
|
|
12
|
-
from datamodel_code_generator.model.
|
|
13
|
-
from datamodel_code_generator.
|
|
14
|
-
|
|
20
|
+
from datamodel_code_generator.model.base import UNDEFINED
|
|
21
|
+
from datamodel_code_generator.model.pydantic.imports import (
|
|
22
|
+
IMPORT_ANYURL,
|
|
23
|
+
IMPORT_EXTRA,
|
|
24
|
+
IMPORT_FIELD,
|
|
25
|
+
)
|
|
26
|
+
from datamodel_code_generator.types import STANDARD_LIST, UnionIntFloat, chain_as_tuple
|
|
27
|
+
|
|
28
|
+
if TYPE_CHECKING:
|
|
29
|
+
from collections import defaultdict
|
|
30
|
+
|
|
31
|
+
from datamodel_code_generator.imports import Import
|
|
32
|
+
from datamodel_code_generator.reference import Reference
|
|
15
33
|
|
|
16
34
|
|
|
17
35
|
class Constraints(ConstraintsBase):
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
36
|
+
"""Pydantic v1 field constraints (gt, ge, lt, le, regex, etc.)."""
|
|
37
|
+
|
|
38
|
+
gt: Optional[UnionIntFloat] = Field(None, alias="exclusiveMinimum") # noqa: UP045
|
|
39
|
+
ge: Optional[UnionIntFloat] = Field(None, alias="minimum") # noqa: UP045
|
|
40
|
+
lt: Optional[UnionIntFloat] = Field(None, alias="exclusiveMaximum") # noqa: UP045
|
|
41
|
+
le: Optional[UnionIntFloat] = Field(None, alias="maximum") # noqa: UP045
|
|
42
|
+
multiple_of: Optional[float] = Field(None, alias="multipleOf") # noqa: UP045
|
|
43
|
+
min_items: Optional[int] = Field(None, alias="minItems") # noqa: UP045
|
|
44
|
+
max_items: Optional[int] = Field(None, alias="maxItems") # noqa: UP045
|
|
45
|
+
min_length: Optional[int] = Field(None, alias="minLength") # noqa: UP045
|
|
46
|
+
max_length: Optional[int] = Field(None, alias="maxLength") # noqa: UP045
|
|
47
|
+
regex: Optional[str] = Field(None, alias="pattern") # noqa: UP045
|
|
28
48
|
|
|
29
49
|
|
|
30
50
|
class DataModelField(DataModelFieldBase):
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
51
|
+
"""Field implementation for Pydantic v1 models."""
|
|
52
|
+
|
|
53
|
+
_EXCLUDE_FIELD_KEYS: ClassVar[set[str]] = {
|
|
54
|
+
"alias",
|
|
55
|
+
"default",
|
|
56
|
+
"const",
|
|
57
|
+
"gt",
|
|
58
|
+
"ge",
|
|
59
|
+
"lt",
|
|
60
|
+
"le",
|
|
61
|
+
"multiple_of",
|
|
62
|
+
"min_items",
|
|
63
|
+
"max_items",
|
|
64
|
+
"min_length",
|
|
65
|
+
"max_length",
|
|
66
|
+
"regex",
|
|
46
67
|
}
|
|
47
|
-
|
|
68
|
+
_COMPARE_EXPRESSIONS: ClassVar[set[str]] = {"gt", "ge", "lt", "le"}
|
|
69
|
+
constraints: Optional[Constraints] = None # noqa: UP045
|
|
70
|
+
_PARSE_METHOD: ClassVar[str] = "parse_obj"
|
|
48
71
|
|
|
49
72
|
@property
|
|
50
|
-
def
|
|
73
|
+
def has_default_factory_in_field(self) -> bool:
|
|
74
|
+
"""Check if this field has a default_factory in Field() including computed ones."""
|
|
75
|
+
return "default_factory" in self.extras or self.__dict__.get("_computed_default_factory") is not None
|
|
76
|
+
|
|
77
|
+
@property
|
|
78
|
+
def method(self) -> str | None:
|
|
79
|
+
"""Get the validation method name."""
|
|
51
80
|
return self.validator
|
|
52
81
|
|
|
53
82
|
@property
|
|
54
|
-
def validator(self) ->
|
|
83
|
+
def validator(self) -> str | None:
|
|
84
|
+
"""Get the validator name."""
|
|
55
85
|
return None
|
|
56
86
|
# TODO refactor this method for other validation logic
|
|
57
|
-
# from datamodel_code_generator.model.pydantic import VALIDATOR_TEMPLATE
|
|
58
|
-
#
|
|
59
|
-
# return VALIDATOR_TEMPLATE.render(
|
|
60
|
-
# field_name=self.name, types=','.join([t.type_hint for t in self.data_types])
|
|
61
|
-
# )
|
|
62
87
|
|
|
63
88
|
@property
|
|
64
|
-
def field(self) ->
|
|
65
|
-
"""
|
|
89
|
+
def field(self) -> str | None:
|
|
90
|
+
"""For backwards compatibility."""
|
|
66
91
|
result = str(self)
|
|
67
|
-
if
|
|
92
|
+
if (
|
|
93
|
+
self.use_default_kwarg
|
|
94
|
+
and not result.startswith("Field(...")
|
|
95
|
+
and not result.startswith("Field(default_factory=")
|
|
96
|
+
):
|
|
97
|
+
# Use `default=` for fields that have a default value so that type
|
|
98
|
+
# checkers using @dataclass_transform can infer the field as
|
|
99
|
+
# optional in __init__.
|
|
100
|
+
result = result.replace("Field(", "Field(default=")
|
|
101
|
+
if not result:
|
|
68
102
|
return None
|
|
69
|
-
|
|
70
103
|
return result
|
|
71
104
|
|
|
72
|
-
def
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
105
|
+
def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
|
|
106
|
+
if value is None or constraint not in self._COMPARE_EXPRESSIONS:
|
|
107
|
+
return value
|
|
108
|
+
|
|
109
|
+
is_float_type = any(
|
|
110
|
+
data_type.type == "float"
|
|
111
|
+
or (data_type.strict and data_type.import_ and "Float" in data_type.import_.import_)
|
|
112
|
+
for data_type in self.data_type.all_data_types
|
|
113
|
+
)
|
|
114
|
+
if is_float_type:
|
|
115
|
+
return float(value)
|
|
116
|
+
str_value = str(value)
|
|
117
|
+
if "e" in str_value.lower(): # pragma: no cover
|
|
118
|
+
# Scientific notation like 1e-08 - keep as float
|
|
119
|
+
return float(value)
|
|
120
|
+
if isinstance(value, int) and not isinstance(value, bool): # pragma: no branch
|
|
121
|
+
return value
|
|
122
|
+
return int(value)
|
|
123
|
+
|
|
124
|
+
def _get_default_as_pydantic_model(self) -> str | None:
|
|
125
|
+
for data_type in self.data_type.data_types or (self.data_type,):
|
|
126
|
+
# TODO: Check nested data_types
|
|
127
|
+
if data_type.is_dict:
|
|
128
|
+
# TODO: Parse dict model for default
|
|
129
|
+
continue
|
|
130
|
+
if data_type.is_list and len(data_type.data_types) == 1:
|
|
131
|
+
data_type_child = data_type.data_types[0]
|
|
132
|
+
if (
|
|
133
|
+
data_type_child.reference
|
|
134
|
+
and isinstance(data_type_child.reference.source, BaseModelBase)
|
|
135
|
+
and isinstance(self.default, list)
|
|
136
|
+
): # pragma: no cover
|
|
137
|
+
if not self.default:
|
|
138
|
+
return STANDARD_LIST
|
|
139
|
+
return (
|
|
140
|
+
f"lambda :[{data_type_child.alias or data_type_child.reference.source.class_name}."
|
|
141
|
+
f"{self._PARSE_METHOD}(v) for v in {self.default!r}]"
|
|
142
|
+
)
|
|
143
|
+
elif data_type.reference and isinstance(data_type.reference.source, BaseModelBase):
|
|
144
|
+
if self.data_type.is_union:
|
|
145
|
+
if not isinstance(self.default, (dict, list)):
|
|
146
|
+
continue
|
|
147
|
+
if isinstance(self.default, dict) and any(dt.is_dict for dt in self.data_type.data_types):
|
|
148
|
+
continue
|
|
149
|
+
return (
|
|
150
|
+
f"lambda :{data_type.alias or data_type.reference.source.class_name}."
|
|
151
|
+
f"{self._PARSE_METHOD}({self.default!r})"
|
|
152
|
+
)
|
|
153
|
+
return None
|
|
154
|
+
|
|
155
|
+
def _process_data_in_str(self, data: dict[str, Any]) -> None:
|
|
156
|
+
if self.const:
|
|
157
|
+
data["const"] = True
|
|
158
|
+
|
|
159
|
+
if self.use_frozen_field and self.read_only:
|
|
160
|
+
data["allow_mutation"] = False
|
|
76
161
|
|
|
77
|
-
def
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
162
|
+
def _process_annotated_field_arguments(self, field_arguments: list[str]) -> list[str]: # noqa: PLR6301
|
|
163
|
+
return field_arguments
|
|
164
|
+
|
|
165
|
+
def __str__(self) -> str: # noqa: PLR0912
|
|
166
|
+
"""Return Field() call with all constraints and metadata."""
|
|
167
|
+
data: dict[str, Any] = {k: v for k, v in self.extras.items() if k not in self._EXCLUDE_FIELD_KEYS}
|
|
81
168
|
if self.alias:
|
|
82
|
-
data[
|
|
169
|
+
data["alias"] = self.alias
|
|
170
|
+
has_type_constraints = self.data_type.kwargs is not None and len(self.data_type.kwargs) > 0
|
|
83
171
|
if (
|
|
84
172
|
self.constraints is not None
|
|
85
173
|
and not self.self_reference()
|
|
86
|
-
and not self.data_type.strict
|
|
174
|
+
and not (self.data_type.strict and has_type_constraints)
|
|
87
175
|
):
|
|
88
|
-
data = {
|
|
176
|
+
data = {
|
|
177
|
+
**data,
|
|
178
|
+
**(
|
|
179
|
+
{}
|
|
180
|
+
if any(d.import_ == IMPORT_ANYURL for d in self.data_type.all_data_types)
|
|
181
|
+
else {
|
|
182
|
+
k: self._get_strict_field_constraint_value(k, v)
|
|
183
|
+
for k, v in self.constraints.dict(exclude_unset=True).items()
|
|
184
|
+
}
|
|
185
|
+
),
|
|
186
|
+
}
|
|
89
187
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
188
|
+
if self.use_field_description:
|
|
189
|
+
data.pop("description", None) # Description is part of field docstring
|
|
190
|
+
|
|
191
|
+
self._process_data_in_str(data)
|
|
192
|
+
|
|
193
|
+
discriminator = data.pop("discriminator", None)
|
|
194
|
+
if discriminator:
|
|
195
|
+
if isinstance(discriminator, str):
|
|
196
|
+
data["discriminator"] = discriminator
|
|
197
|
+
elif isinstance(discriminator, dict): # pragma: no cover
|
|
198
|
+
data["discriminator"] = discriminator["propertyName"]
|
|
199
|
+
|
|
200
|
+
if self.required:
|
|
201
|
+
default_factory = None
|
|
202
|
+
elif self.default is not UNDEFINED and self.default is not None and "default_factory" not in data:
|
|
203
|
+
default_factory = self._get_default_as_pydantic_model()
|
|
204
|
+
else:
|
|
205
|
+
default_factory = data.pop("default_factory", None)
|
|
206
|
+
|
|
207
|
+
self.__dict__["_computed_default_factory"] = default_factory
|
|
208
|
+
|
|
209
|
+
field_arguments = sorted(f"{k}={v!r}" for k, v in data.items() if v is not None)
|
|
210
|
+
|
|
211
|
+
if not field_arguments and not default_factory:
|
|
94
212
|
if self.nullable and self.required:
|
|
95
|
-
return
|
|
213
|
+
return "Field(...)" # Field() is for mypy
|
|
96
214
|
return ""
|
|
97
215
|
|
|
98
|
-
|
|
216
|
+
if default_factory:
|
|
217
|
+
field_arguments = [f"default_factory={default_factory}", *field_arguments]
|
|
218
|
+
|
|
99
219
|
if self.use_annotated:
|
|
100
|
-
|
|
101
|
-
|
|
220
|
+
field_arguments = self._process_annotated_field_arguments(field_arguments)
|
|
221
|
+
elif self.required:
|
|
222
|
+
field_arguments = ["...", *field_arguments]
|
|
223
|
+
elif not default_factory:
|
|
224
|
+
from datamodel_code_generator.model.base import repr_set_sorted # noqa: PLC0415
|
|
225
|
+
|
|
226
|
+
default_repr = repr_set_sorted(self.default) if isinstance(self.default, set) else repr(self.default)
|
|
227
|
+
field_arguments = [default_repr, *field_arguments]
|
|
102
228
|
|
|
103
|
-
return f
|
|
229
|
+
return f"Field({', '.join(field_arguments)})"
|
|
104
230
|
|
|
105
231
|
@property
|
|
106
|
-
def annotated(self) ->
|
|
232
|
+
def annotated(self) -> str | None:
|
|
233
|
+
"""Get the Annotated type hint if use_annotated is enabled."""
|
|
107
234
|
if not self.use_annotated or not str(self):
|
|
108
235
|
return None
|
|
109
|
-
return f
|
|
236
|
+
return f"Annotated[{self.type_hint}, {self!s}]"
|
|
237
|
+
|
|
238
|
+
@property
|
|
239
|
+
def imports(self) -> tuple[Import, ...]:
|
|
240
|
+
"""Get all required imports including Field if needed."""
|
|
241
|
+
if self.field:
|
|
242
|
+
return chain_as_tuple(super().imports, (IMPORT_FIELD,))
|
|
243
|
+
return super().imports
|
|
110
244
|
|
|
111
245
|
|
|
112
|
-
class
|
|
113
|
-
|
|
114
|
-
BASE_CLASS: ClassVar[str] = 'pydantic.BaseModel'
|
|
246
|
+
class BaseModelBase(DataModel, ABC):
|
|
247
|
+
"""Abstract base class for Pydantic BaseModel implementations."""
|
|
115
248
|
|
|
116
|
-
def __init__(
|
|
249
|
+
def __init__( # noqa: PLR0913
|
|
117
250
|
self,
|
|
118
251
|
*,
|
|
119
252
|
reference: Reference,
|
|
120
|
-
fields:
|
|
121
|
-
decorators:
|
|
122
|
-
base_classes:
|
|
123
|
-
custom_base_class:
|
|
124
|
-
custom_template_dir:
|
|
125
|
-
extra_template_data:
|
|
126
|
-
path:
|
|
127
|
-
description:
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
253
|
+
fields: list[DataModelFieldBase],
|
|
254
|
+
decorators: list[str] | None = None,
|
|
255
|
+
base_classes: list[Reference] | None = None,
|
|
256
|
+
custom_base_class: str | None = None,
|
|
257
|
+
custom_template_dir: Path | None = None,
|
|
258
|
+
extra_template_data: defaultdict[str, Any] | None = None,
|
|
259
|
+
path: Path | None = None,
|
|
260
|
+
description: str | None = None,
|
|
261
|
+
default: Any = UNDEFINED,
|
|
262
|
+
nullable: bool = False,
|
|
263
|
+
keyword_only: bool = False,
|
|
264
|
+
treat_dot_as_module: bool = False,
|
|
265
|
+
) -> None:
|
|
266
|
+
"""Initialize the BaseModel with fields and configuration."""
|
|
267
|
+
methods: list[str] = [field.method for field in fields if field.method]
|
|
131
268
|
|
|
132
269
|
super().__init__(
|
|
133
|
-
fields=fields,
|
|
270
|
+
fields=fields,
|
|
134
271
|
reference=reference,
|
|
135
272
|
decorators=decorators,
|
|
136
273
|
base_classes=base_classes,
|
|
@@ -140,34 +277,101 @@ class BaseModel(DataModel):
|
|
|
140
277
|
methods=methods,
|
|
141
278
|
path=path,
|
|
142
279
|
description=description,
|
|
280
|
+
default=default,
|
|
281
|
+
nullable=nullable,
|
|
282
|
+
keyword_only=keyword_only,
|
|
283
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
@cached_property
|
|
287
|
+
def template_file_path(self) -> Path:
|
|
288
|
+
"""Get the template file path with backward compatibility support."""
|
|
289
|
+
# This property is for Backward compatibility
|
|
290
|
+
# Current version supports '{custom_template_dir}/BaseModel.jinja'
|
|
291
|
+
# But, Future version will support only '{custom_template_dir}/pydantic/BaseModel.jinja'
|
|
292
|
+
if self._custom_template_dir is not None:
|
|
293
|
+
custom_template_file_path = self._custom_template_dir / Path(self.TEMPLATE_FILE_PATH).name
|
|
294
|
+
if custom_template_file_path.exists():
|
|
295
|
+
return custom_template_file_path
|
|
296
|
+
return super().template_file_path
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
class BaseModel(BaseModelBase):
|
|
300
|
+
"""Pydantic v1 BaseModel implementation."""
|
|
301
|
+
|
|
302
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/BaseModel.jinja2"
|
|
303
|
+
BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
|
|
304
|
+
|
|
305
|
+
def __init__( # noqa: PLR0912, PLR0913
|
|
306
|
+
self,
|
|
307
|
+
*,
|
|
308
|
+
reference: Reference,
|
|
309
|
+
fields: list[DataModelFieldBase],
|
|
310
|
+
decorators: list[str] | None = None,
|
|
311
|
+
base_classes: list[Reference] | None = None,
|
|
312
|
+
custom_base_class: str | None = None,
|
|
313
|
+
custom_template_dir: Path | None = None,
|
|
314
|
+
extra_template_data: defaultdict[str, Any] | None = None,
|
|
315
|
+
path: Path | None = None,
|
|
316
|
+
description: str | None = None,
|
|
317
|
+
default: Any = UNDEFINED,
|
|
318
|
+
nullable: bool = False,
|
|
319
|
+
keyword_only: bool = False,
|
|
320
|
+
treat_dot_as_module: bool = False,
|
|
321
|
+
) -> None:
|
|
322
|
+
"""Initialize the BaseModel with Config and extra fields support."""
|
|
323
|
+
super().__init__(
|
|
324
|
+
reference=reference,
|
|
325
|
+
fields=fields,
|
|
326
|
+
decorators=decorators,
|
|
327
|
+
base_classes=base_classes,
|
|
328
|
+
custom_base_class=custom_base_class,
|
|
329
|
+
custom_template_dir=custom_template_dir,
|
|
330
|
+
extra_template_data=extra_template_data,
|
|
331
|
+
path=path,
|
|
332
|
+
description=description,
|
|
333
|
+
default=default,
|
|
334
|
+
nullable=nullable,
|
|
335
|
+
keyword_only=keyword_only,
|
|
336
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
143
337
|
)
|
|
338
|
+
config_parameters: dict[str, Any] = {}
|
|
144
339
|
|
|
145
|
-
|
|
340
|
+
additional_properties = self.extra_template_data.get("additionalProperties")
|
|
341
|
+
allow_extra_fields = self.extra_template_data.get("allow_extra_fields")
|
|
342
|
+
extra_fields = self.extra_template_data.get("extra_fields")
|
|
146
343
|
|
|
147
|
-
|
|
148
|
-
if additionalProperties is not None:
|
|
149
|
-
config_parameters['extra'] = (
|
|
150
|
-
'Extra.allow' if additionalProperties else 'Extra.forbid'
|
|
151
|
-
)
|
|
344
|
+
if allow_extra_fields or extra_fields or additional_properties is not None:
|
|
152
345
|
self._additional_imports.append(IMPORT_EXTRA)
|
|
153
346
|
|
|
154
|
-
|
|
347
|
+
if allow_extra_fields:
|
|
348
|
+
config_parameters["extra"] = "Extra.allow"
|
|
349
|
+
elif extra_fields:
|
|
350
|
+
config_parameters["extra"] = f"Extra.{extra_fields}"
|
|
351
|
+
elif additional_properties is True:
|
|
352
|
+
config_parameters["extra"] = "Extra.allow"
|
|
353
|
+
elif additional_properties is False:
|
|
354
|
+
config_parameters["extra"] = "Extra.forbid"
|
|
355
|
+
|
|
356
|
+
for config_attribute in "allow_population_by_field_name", "allow_mutation":
|
|
155
357
|
if config_attribute in self.extra_template_data:
|
|
156
|
-
config_parameters[config_attribute] = self.extra_template_data[
|
|
157
|
-
|
|
158
|
-
|
|
358
|
+
config_parameters[config_attribute] = self.extra_template_data[config_attribute]
|
|
359
|
+
|
|
360
|
+
if "validate_assignment" not in config_parameters and any(
|
|
361
|
+
field.use_frozen_field and field.read_only for field in self.fields
|
|
362
|
+
):
|
|
363
|
+
config_parameters["validate_assignment"] = True
|
|
364
|
+
|
|
159
365
|
for data_type in self.all_data_types:
|
|
160
|
-
if data_type.is_custom_type:
|
|
161
|
-
config_parameters[
|
|
366
|
+
if data_type.is_custom_type: # pragma: no cover
|
|
367
|
+
config_parameters["arbitrary_types_allowed"] = True
|
|
162
368
|
break
|
|
163
369
|
|
|
164
|
-
if
|
|
165
|
-
|
|
370
|
+
if isinstance(self.extra_template_data.get("config"), dict):
|
|
371
|
+
for key, value in self.extra_template_data["config"].items():
|
|
372
|
+
config_parameters[key] = value # noqa: PERF403
|
|
166
373
|
|
|
167
|
-
|
|
374
|
+
if config_parameters:
|
|
375
|
+
from datamodel_code_generator.model.pydantic import Config # noqa: PLC0415
|
|
168
376
|
|
|
169
|
-
|
|
170
|
-
def imports(self) -> Tuple[Import, ...]:
|
|
171
|
-
if any(f for f in self.fields if f.field):
|
|
172
|
-
return chain_as_tuple(super().imports, (IMPORT_FIELD,))
|
|
173
|
-
return super().imports
|
|
377
|
+
self.extra_template_data["config"] = Config.parse_obj(config_parameters) # pyright: ignore[reportArgumentType]
|
|
@@ -1,8 +1,17 @@
|
|
|
1
|
+
"""Pydantic v1 custom root type model.
|
|
2
|
+
|
|
3
|
+
Generates models with __root__ field for wrapping single types.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
1
8
|
from typing import ClassVar
|
|
2
9
|
|
|
3
10
|
from datamodel_code_generator.model.pydantic.base_model import BaseModel
|
|
4
11
|
|
|
5
12
|
|
|
6
13
|
class CustomRootType(BaseModel):
|
|
7
|
-
|
|
8
|
-
|
|
14
|
+
"""DataModel for Pydantic v1 custom root types (__root__ field)."""
|
|
15
|
+
|
|
16
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/BaseModel_root.jinja2"
|
|
17
|
+
BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
|
|
@@ -1,10 +1,21 @@
|
|
|
1
|
-
|
|
1
|
+
"""Pydantic v1 dataclass model.
|
|
2
|
+
|
|
3
|
+
Generates pydantic.dataclasses.dataclass decorated classes.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from typing import TYPE_CHECKING, ClassVar
|
|
2
9
|
|
|
3
|
-
from datamodel_code_generator.imports import Import
|
|
4
10
|
from datamodel_code_generator.model import DataModel
|
|
5
11
|
from datamodel_code_generator.model.pydantic.imports import IMPORT_DATACLASS
|
|
6
12
|
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from datamodel_code_generator.imports import Import
|
|
15
|
+
|
|
7
16
|
|
|
8
17
|
class DataClass(DataModel):
|
|
9
|
-
|
|
10
|
-
|
|
18
|
+
"""DataModel for Pydantic v1 dataclasses."""
|
|
19
|
+
|
|
20
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/dataclass.jinja2"
|
|
21
|
+
DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_DATACLASS,)
|
|
@@ -1,29 +1,42 @@
|
|
|
1
|
+
"""Import definitions for Pydantic v1 types.
|
|
2
|
+
|
|
3
|
+
Provides pre-defined Import objects for Pydantic v1 types (constr, AnyUrl, etc.).
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
1
8
|
from datamodel_code_generator.imports import Import
|
|
2
9
|
|
|
3
|
-
IMPORT_CONSTR = Import.from_full_path(
|
|
4
|
-
IMPORT_CONINT = Import.from_full_path(
|
|
5
|
-
IMPORT_CONFLOAT = Import.from_full_path(
|
|
6
|
-
IMPORT_CONDECIMAL = Import.from_full_path(
|
|
7
|
-
IMPORT_CONBYTES = Import.from_full_path(
|
|
8
|
-
IMPORT_POSITIVE_INT = Import.from_full_path(
|
|
9
|
-
IMPORT_NEGATIVE_INT = Import.from_full_path(
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
10
|
+
IMPORT_CONSTR = Import.from_full_path("pydantic.constr")
|
|
11
|
+
IMPORT_CONINT = Import.from_full_path("pydantic.conint")
|
|
12
|
+
IMPORT_CONFLOAT = Import.from_full_path("pydantic.confloat")
|
|
13
|
+
IMPORT_CONDECIMAL = Import.from_full_path("pydantic.condecimal")
|
|
14
|
+
IMPORT_CONBYTES = Import.from_full_path("pydantic.conbytes")
|
|
15
|
+
IMPORT_POSITIVE_INT = Import.from_full_path("pydantic.PositiveInt")
|
|
16
|
+
IMPORT_NEGATIVE_INT = Import.from_full_path("pydantic.NegativeInt")
|
|
17
|
+
IMPORT_NON_POSITIVE_INT = Import.from_full_path("pydantic.NonPositiveInt")
|
|
18
|
+
IMPORT_NON_NEGATIVE_INT = Import.from_full_path("pydantic.NonNegativeInt")
|
|
19
|
+
IMPORT_POSITIVE_FLOAT = Import.from_full_path("pydantic.PositiveFloat")
|
|
20
|
+
IMPORT_NEGATIVE_FLOAT = Import.from_full_path("pydantic.NegativeFloat")
|
|
21
|
+
IMPORT_NON_NEGATIVE_FLOAT = Import.from_full_path("pydantic.NonNegativeFloat")
|
|
22
|
+
IMPORT_NON_POSITIVE_FLOAT = Import.from_full_path("pydantic.NonPositiveFloat")
|
|
23
|
+
IMPORT_SECRET_STR = Import.from_full_path("pydantic.SecretStr")
|
|
24
|
+
IMPORT_EMAIL_STR = Import.from_full_path("pydantic.EmailStr")
|
|
25
|
+
IMPORT_UUID1 = Import.from_full_path("pydantic.UUID1")
|
|
26
|
+
IMPORT_UUID2 = Import.from_full_path("pydantic.UUID2")
|
|
27
|
+
IMPORT_UUID3 = Import.from_full_path("pydantic.UUID3")
|
|
28
|
+
IMPORT_UUID4 = Import.from_full_path("pydantic.UUID4")
|
|
29
|
+
IMPORT_UUID5 = Import.from_full_path("pydantic.UUID5")
|
|
30
|
+
IMPORT_ANYURL = Import.from_full_path("pydantic.AnyUrl")
|
|
31
|
+
IMPORT_IPV4ADDRESS = Import.from_full_path("ipaddress.IPv4Address")
|
|
32
|
+
IMPORT_IPV6ADDRESS = Import.from_full_path("ipaddress.IPv6Address")
|
|
33
|
+
IMPORT_IPV4NETWORKS = Import.from_full_path("ipaddress.IPv4Network")
|
|
34
|
+
IMPORT_IPV6NETWORKS = Import.from_full_path("ipaddress.IPv6Network")
|
|
35
|
+
IMPORT_EXTRA = Import.from_full_path("pydantic.Extra")
|
|
36
|
+
IMPORT_FIELD = Import.from_full_path("pydantic.Field")
|
|
37
|
+
IMPORT_STRICT_INT = Import.from_full_path("pydantic.StrictInt")
|
|
38
|
+
IMPORT_STRICT_FLOAT = Import.from_full_path("pydantic.StrictFloat")
|
|
39
|
+
IMPORT_STRICT_STR = Import.from_full_path("pydantic.StrictStr")
|
|
40
|
+
IMPORT_STRICT_BOOL = Import.from_full_path("pydantic.StrictBool")
|
|
41
|
+
IMPORT_STRICT_BYTES = Import.from_full_path("pydantic.StrictBytes")
|
|
42
|
+
IMPORT_DATACLASS = Import.from_full_path("pydantic.dataclasses.dataclass")
|