datamodel-code-generator 0.27.2__py3-none-any.whl → 0.28.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamodel-code-generator might be problematic. Click here for more details.
- datamodel_code_generator/__init__.py +168 -196
- datamodel_code_generator/__main__.py +146 -189
- datamodel_code_generator/arguments.py +227 -230
- datamodel_code_generator/format.py +77 -129
- datamodel_code_generator/http.py +12 -10
- datamodel_code_generator/imports.py +59 -65
- datamodel_code_generator/model/__init__.py +28 -31
- datamodel_code_generator/model/base.py +100 -144
- datamodel_code_generator/model/dataclass.py +62 -70
- datamodel_code_generator/model/enum.py +34 -30
- datamodel_code_generator/model/imports.py +13 -11
- datamodel_code_generator/model/msgspec.py +116 -138
- datamodel_code_generator/model/pydantic/__init__.py +18 -28
- datamodel_code_generator/model/pydantic/base_model.py +121 -140
- datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
- datamodel_code_generator/model/pydantic/dataclass.py +6 -4
- datamodel_code_generator/model/pydantic/imports.py +35 -33
- datamodel_code_generator/model/pydantic/types.py +91 -119
- datamodel_code_generator/model/pydantic_v2/__init__.py +21 -18
- datamodel_code_generator/model/pydantic_v2/base_model.py +118 -127
- datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
- datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
- datamodel_code_generator/model/pydantic_v2/types.py +11 -7
- datamodel_code_generator/model/rootmodel.py +1 -1
- datamodel_code_generator/model/scalar.py +33 -32
- datamodel_code_generator/model/typed_dict.py +41 -51
- datamodel_code_generator/model/types.py +24 -19
- datamodel_code_generator/model/union.py +21 -17
- datamodel_code_generator/parser/__init__.py +16 -12
- datamodel_code_generator/parser/base.py +327 -515
- datamodel_code_generator/parser/graphql.py +87 -119
- datamodel_code_generator/parser/jsonschema.py +438 -607
- datamodel_code_generator/parser/openapi.py +180 -220
- datamodel_code_generator/pydantic_patch.py +8 -9
- datamodel_code_generator/reference.py +199 -297
- datamodel_code_generator/types.py +149 -215
- datamodel_code_generator/util.py +23 -36
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/METADATA +10 -5
- datamodel_code_generator-0.28.0.dist-info/RECORD +59 -0
- datamodel_code_generator-0.27.2.dist-info/RECORD +0 -59
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/WHEEL +0 -0
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/entry_points.txt +0 -0
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,10 +1,12 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
from abc import ABC
|
|
4
|
+
from functools import cached_property
|
|
2
5
|
from pathlib import Path
|
|
3
|
-
from typing import Any, ClassVar,
|
|
6
|
+
from typing import TYPE_CHECKING, Any, ClassVar, Optional
|
|
4
7
|
|
|
5
8
|
from pydantic import Field
|
|
6
9
|
|
|
7
|
-
from datamodel_code_generator.imports import Import
|
|
8
10
|
from datamodel_code_generator.model import (
|
|
9
11
|
ConstraintsBase,
|
|
10
12
|
DataModel,
|
|
@@ -16,80 +18,76 @@ from datamodel_code_generator.model.pydantic.imports import (
|
|
|
16
18
|
IMPORT_EXTRA,
|
|
17
19
|
IMPORT_FIELD,
|
|
18
20
|
)
|
|
19
|
-
from datamodel_code_generator.reference import Reference
|
|
20
21
|
from datamodel_code_generator.types import UnionIntFloat, chain_as_tuple
|
|
21
|
-
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
from collections import defaultdict
|
|
25
|
+
|
|
26
|
+
from datamodel_code_generator.imports import Import
|
|
27
|
+
from datamodel_code_generator.reference import Reference
|
|
22
28
|
|
|
23
29
|
|
|
24
30
|
class Constraints(ConstraintsBase):
|
|
25
|
-
gt: Optional[UnionIntFloat] = Field(None, alias=
|
|
26
|
-
ge: Optional[UnionIntFloat] = Field(None, alias=
|
|
27
|
-
lt: Optional[UnionIntFloat] = Field(None, alias=
|
|
28
|
-
le: Optional[UnionIntFloat] = Field(None, alias=
|
|
29
|
-
multiple_of: Optional[float] = Field(None, alias=
|
|
30
|
-
min_items: Optional[int] = Field(None, alias=
|
|
31
|
-
max_items: Optional[int] = Field(None, alias=
|
|
32
|
-
min_length: Optional[int] = Field(None, alias=
|
|
33
|
-
max_length: Optional[int] = Field(None, alias=
|
|
34
|
-
regex: Optional[str] = Field(None, alias=
|
|
31
|
+
gt: Optional[UnionIntFloat] = Field(None, alias="exclusiveMinimum") # noqa: UP045
|
|
32
|
+
ge: Optional[UnionIntFloat] = Field(None, alias="minimum") # noqa: UP045
|
|
33
|
+
lt: Optional[UnionIntFloat] = Field(None, alias="exclusiveMaximum") # noqa: UP045
|
|
34
|
+
le: Optional[UnionIntFloat] = Field(None, alias="maximum") # noqa: UP045
|
|
35
|
+
multiple_of: Optional[float] = Field(None, alias="multipleOf") # noqa: UP045
|
|
36
|
+
min_items: Optional[int] = Field(None, alias="minItems") # noqa: UP045
|
|
37
|
+
max_items: Optional[int] = Field(None, alias="maxItems") # noqa: UP045
|
|
38
|
+
min_length: Optional[int] = Field(None, alias="minLength") # noqa: UP045
|
|
39
|
+
max_length: Optional[int] = Field(None, alias="maxLength") # noqa: UP045
|
|
40
|
+
regex: Optional[str] = Field(None, alias="pattern") # noqa: UP045
|
|
35
41
|
|
|
36
42
|
|
|
37
43
|
class DataModelField(DataModelFieldBase):
|
|
38
|
-
_EXCLUDE_FIELD_KEYS: ClassVar[
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
44
|
+
_EXCLUDE_FIELD_KEYS: ClassVar[set[str]] = {
|
|
45
|
+
"alias",
|
|
46
|
+
"default",
|
|
47
|
+
"const",
|
|
48
|
+
"gt",
|
|
49
|
+
"ge",
|
|
50
|
+
"lt",
|
|
51
|
+
"le",
|
|
52
|
+
"multiple_of",
|
|
53
|
+
"min_items",
|
|
54
|
+
"max_items",
|
|
55
|
+
"min_length",
|
|
56
|
+
"max_length",
|
|
57
|
+
"regex",
|
|
52
58
|
}
|
|
53
|
-
_COMPARE_EXPRESSIONS: ClassVar[
|
|
54
|
-
constraints: Optional[Constraints] = None
|
|
55
|
-
_PARSE_METHOD: ClassVar[str] =
|
|
59
|
+
_COMPARE_EXPRESSIONS: ClassVar[set[str]] = {"gt", "ge", "lt", "le"}
|
|
60
|
+
constraints: Optional[Constraints] = None # noqa: UP045
|
|
61
|
+
_PARSE_METHOD: ClassVar[str] = "parse_obj"
|
|
56
62
|
|
|
57
63
|
@property
|
|
58
|
-
def method(self) ->
|
|
64
|
+
def method(self) -> str | None:
|
|
59
65
|
return self.validator
|
|
60
66
|
|
|
61
67
|
@property
|
|
62
|
-
def validator(self) ->
|
|
68
|
+
def validator(self) -> str | None:
|
|
63
69
|
return None
|
|
64
70
|
# TODO refactor this method for other validation logic
|
|
65
|
-
# from datamodel_code_generator.model.pydantic import VALIDATOR_TEMPLATE
|
|
66
|
-
#
|
|
67
|
-
# return VALIDATOR_TEMPLATE.render(
|
|
68
|
-
# field_name=self.name, types=','.join([t.type_hint for t in self.data_types])
|
|
69
|
-
# )
|
|
70
71
|
|
|
71
72
|
@property
|
|
72
|
-
def field(self) ->
|
|
73
|
+
def field(self) -> str | None:
|
|
73
74
|
"""for backwards compatibility"""
|
|
74
75
|
result = str(self)
|
|
75
76
|
if (
|
|
76
77
|
self.use_default_kwarg
|
|
77
|
-
and not result.startswith(
|
|
78
|
-
and not result.startswith(
|
|
78
|
+
and not result.startswith("Field(...")
|
|
79
|
+
and not result.startswith("Field(default_factory=")
|
|
79
80
|
):
|
|
80
81
|
# Use `default=` for fields that have a default value so that type
|
|
81
82
|
# checkers using @dataclass_transform can infer the field as
|
|
82
83
|
# optional in __init__.
|
|
83
|
-
result = result.replace(
|
|
84
|
-
if result
|
|
84
|
+
result = result.replace("Field(", "Field(default=")
|
|
85
|
+
if not result:
|
|
85
86
|
return None
|
|
86
|
-
|
|
87
87
|
return result
|
|
88
88
|
|
|
89
89
|
def self_reference(self) -> bool:
|
|
90
|
-
return isinstance(
|
|
91
|
-
self.parent, BaseModelBase
|
|
92
|
-
) and self.parent.reference.path in {
|
|
90
|
+
return isinstance(self.parent, BaseModelBase) and self.parent.reference.path in {
|
|
93
91
|
d.reference.path for d in self.data_type.all_data_types if d.reference
|
|
94
92
|
}
|
|
95
93
|
|
|
@@ -97,60 +95,51 @@ class DataModelField(DataModelFieldBase):
|
|
|
97
95
|
if value is None or constraint not in self._COMPARE_EXPRESSIONS:
|
|
98
96
|
return value
|
|
99
97
|
|
|
100
|
-
if any(
|
|
101
|
-
data_type.type == 'float' for data_type in self.data_type.all_data_types
|
|
102
|
-
):
|
|
98
|
+
if any(data_type.type == "float" for data_type in self.data_type.all_data_types):
|
|
103
99
|
return float(value)
|
|
104
100
|
return int(value)
|
|
105
101
|
|
|
106
|
-
def _get_default_as_pydantic_model(self) ->
|
|
102
|
+
def _get_default_as_pydantic_model(self) -> str | None:
|
|
107
103
|
for data_type in self.data_type.data_types or (self.data_type,):
|
|
108
104
|
# TODO: Check nested data_types
|
|
109
105
|
if data_type.is_dict or self.data_type.is_union:
|
|
110
106
|
# TODO: Parse Union and dict model for default
|
|
111
107
|
continue
|
|
112
|
-
|
|
113
|
-
|
|
108
|
+
if data_type.is_list and len(data_type.data_types) == 1:
|
|
109
|
+
data_type_child = data_type.data_types[0]
|
|
114
110
|
if (
|
|
115
|
-
|
|
116
|
-
and isinstance(
|
|
111
|
+
data_type_child.reference
|
|
112
|
+
and isinstance(data_type_child.reference.source, BaseModelBase)
|
|
117
113
|
and isinstance(self.default, list)
|
|
118
114
|
): # pragma: no cover
|
|
119
|
-
return
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
115
|
+
return (
|
|
116
|
+
f"lambda :[{data_type_child.alias or data_type_child.reference.source.class_name}."
|
|
117
|
+
f"{self._PARSE_METHOD}(v) for v in {self.default!r}]"
|
|
118
|
+
)
|
|
119
|
+
elif data_type.reference and isinstance(data_type.reference.source, BaseModelBase): # pragma: no cover
|
|
120
|
+
return (
|
|
121
|
+
f"lambda :{data_type.alias or data_type.reference.source.class_name}."
|
|
122
|
+
f"{self._PARSE_METHOD}({self.default!r})"
|
|
123
|
+
)
|
|
124
124
|
return None
|
|
125
125
|
|
|
126
|
-
def _process_data_in_str(self, data:
|
|
126
|
+
def _process_data_in_str(self, data: dict[str, Any]) -> None:
|
|
127
127
|
if self.const:
|
|
128
|
-
data[
|
|
128
|
+
data["const"] = True
|
|
129
129
|
|
|
130
|
-
def _process_annotated_field_arguments(
|
|
131
|
-
self, field_arguments: List[str]
|
|
132
|
-
) -> List[str]:
|
|
130
|
+
def _process_annotated_field_arguments(self, field_arguments: list[str]) -> list[str]: # noqa: PLR6301
|
|
133
131
|
return field_arguments
|
|
134
132
|
|
|
135
|
-
def __str__(self) -> str:
|
|
136
|
-
data:
|
|
137
|
-
k: v for k, v in self.extras.items() if k not in self._EXCLUDE_FIELD_KEYS
|
|
138
|
-
}
|
|
133
|
+
def __str__(self) -> str: # noqa: PLR0912
|
|
134
|
+
data: dict[str, Any] = {k: v for k, v in self.extras.items() if k not in self._EXCLUDE_FIELD_KEYS}
|
|
139
135
|
if self.alias:
|
|
140
|
-
data[
|
|
141
|
-
if (
|
|
142
|
-
self.constraints is not None
|
|
143
|
-
and not self.self_reference()
|
|
144
|
-
and not self.data_type.strict
|
|
145
|
-
):
|
|
136
|
+
data["alias"] = self.alias
|
|
137
|
+
if self.constraints is not None and not self.self_reference() and not self.data_type.strict:
|
|
146
138
|
data = {
|
|
147
139
|
**data,
|
|
148
140
|
**(
|
|
149
141
|
{}
|
|
150
|
-
if any(
|
|
151
|
-
d.import_ == IMPORT_ANYURL
|
|
152
|
-
for d in self.data_type.all_data_types
|
|
153
|
-
)
|
|
142
|
+
if any(d.import_ == IMPORT_ANYURL for d in self.data_type.all_data_types)
|
|
154
143
|
else {
|
|
155
144
|
k: self._get_strict_field_constraint_value(k, v)
|
|
156
145
|
for k, v in self.constraints.dict(exclude_unset=True).items()
|
|
@@ -159,75 +148,73 @@ class DataModelField(DataModelFieldBase):
|
|
|
159
148
|
}
|
|
160
149
|
|
|
161
150
|
if self.use_field_description:
|
|
162
|
-
data.pop(
|
|
151
|
+
data.pop("description", None) # Description is part of field docstring
|
|
163
152
|
|
|
164
153
|
self._process_data_in_str(data)
|
|
165
154
|
|
|
166
|
-
discriminator = data.pop(
|
|
155
|
+
discriminator = data.pop("discriminator", None)
|
|
167
156
|
if discriminator:
|
|
168
157
|
if isinstance(discriminator, str):
|
|
169
|
-
data[
|
|
158
|
+
data["discriminator"] = discriminator
|
|
170
159
|
elif isinstance(discriminator, dict): # pragma: no cover
|
|
171
|
-
data[
|
|
160
|
+
data["discriminator"] = discriminator["propertyName"]
|
|
172
161
|
|
|
173
162
|
if self.required:
|
|
174
163
|
default_factory = None
|
|
175
|
-
elif self.default and
|
|
164
|
+
elif self.default and "default_factory" not in data:
|
|
176
165
|
default_factory = self._get_default_as_pydantic_model()
|
|
177
166
|
else:
|
|
178
|
-
default_factory = data.pop(
|
|
167
|
+
default_factory = data.pop("default_factory", None)
|
|
179
168
|
|
|
180
|
-
field_arguments = sorted(
|
|
181
|
-
f'{k}={repr(v)}' for k, v in data.items() if v is not None
|
|
182
|
-
)
|
|
169
|
+
field_arguments = sorted(f"{k}={v!r}" for k, v in data.items() if v is not None)
|
|
183
170
|
|
|
184
171
|
if not field_arguments and not default_factory:
|
|
185
172
|
if self.nullable and self.required:
|
|
186
|
-
return
|
|
187
|
-
return
|
|
173
|
+
return "Field(...)" # Field() is for mypy
|
|
174
|
+
return ""
|
|
188
175
|
|
|
189
176
|
if self.use_annotated:
|
|
190
177
|
field_arguments = self._process_annotated_field_arguments(field_arguments)
|
|
191
178
|
elif self.required:
|
|
192
|
-
field_arguments = [
|
|
179
|
+
field_arguments = ["...", *field_arguments]
|
|
193
180
|
elif default_factory:
|
|
194
|
-
field_arguments = [f
|
|
181
|
+
field_arguments = [f"default_factory={default_factory}", *field_arguments]
|
|
195
182
|
else:
|
|
196
|
-
field_arguments = [f
|
|
183
|
+
field_arguments = [f"{self.default!r}", *field_arguments]
|
|
197
184
|
|
|
198
|
-
return f
|
|
185
|
+
return f"Field({', '.join(field_arguments)})"
|
|
199
186
|
|
|
200
187
|
@property
|
|
201
|
-
def annotated(self) ->
|
|
188
|
+
def annotated(self) -> str | None:
|
|
202
189
|
if not self.use_annotated or not str(self):
|
|
203
190
|
return None
|
|
204
|
-
return f
|
|
191
|
+
return f"Annotated[{self.type_hint}, {self!s}]"
|
|
205
192
|
|
|
206
193
|
@property
|
|
207
|
-
def imports(self) ->
|
|
194
|
+
def imports(self) -> tuple[Import, ...]:
|
|
208
195
|
if self.field:
|
|
209
196
|
return chain_as_tuple(super().imports, (IMPORT_FIELD,))
|
|
210
197
|
return super().imports
|
|
211
198
|
|
|
212
199
|
|
|
213
200
|
class BaseModelBase(DataModel, ABC):
|
|
214
|
-
def __init__(
|
|
201
|
+
def __init__( # noqa: PLR0913
|
|
215
202
|
self,
|
|
216
203
|
*,
|
|
217
204
|
reference: Reference,
|
|
218
|
-
fields:
|
|
219
|
-
decorators:
|
|
220
|
-
base_classes:
|
|
221
|
-
custom_base_class:
|
|
222
|
-
custom_template_dir:
|
|
223
|
-
extra_template_data:
|
|
224
|
-
path:
|
|
225
|
-
description:
|
|
205
|
+
fields: list[DataModelFieldBase],
|
|
206
|
+
decorators: list[str] | None = None,
|
|
207
|
+
base_classes: list[Reference] | None = None,
|
|
208
|
+
custom_base_class: str | None = None,
|
|
209
|
+
custom_template_dir: Path | None = None,
|
|
210
|
+
extra_template_data: defaultdict[str, Any] | None = None,
|
|
211
|
+
path: Path | None = None,
|
|
212
|
+
description: str | None = None,
|
|
226
213
|
default: Any = UNDEFINED,
|
|
227
214
|
nullable: bool = False,
|
|
228
215
|
keyword_only: bool = False,
|
|
229
216
|
) -> None:
|
|
230
|
-
methods:
|
|
217
|
+
methods: list[str] = [field.method for field in fields if field.method]
|
|
231
218
|
|
|
232
219
|
super().__init__(
|
|
233
220
|
fields=fields,
|
|
@@ -251,30 +238,28 @@ class BaseModelBase(DataModel, ABC):
|
|
|
251
238
|
# Current version supports '{custom_template_dir}/BaseModel.jinja'
|
|
252
239
|
# But, Future version will support only '{custom_template_dir}/pydantic/BaseModel.jinja'
|
|
253
240
|
if self._custom_template_dir is not None:
|
|
254
|
-
custom_template_file_path = (
|
|
255
|
-
self._custom_template_dir / Path(self.TEMPLATE_FILE_PATH).name
|
|
256
|
-
)
|
|
241
|
+
custom_template_file_path = self._custom_template_dir / Path(self.TEMPLATE_FILE_PATH).name
|
|
257
242
|
if custom_template_file_path.exists():
|
|
258
243
|
return custom_template_file_path
|
|
259
244
|
return super().template_file_path
|
|
260
245
|
|
|
261
246
|
|
|
262
247
|
class BaseModel(BaseModelBase):
|
|
263
|
-
TEMPLATE_FILE_PATH: ClassVar[str] =
|
|
264
|
-
BASE_CLASS: ClassVar[str] =
|
|
248
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/BaseModel.jinja2"
|
|
249
|
+
BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
|
|
265
250
|
|
|
266
|
-
def __init__(
|
|
251
|
+
def __init__( # noqa: PLR0913
|
|
267
252
|
self,
|
|
268
253
|
*,
|
|
269
254
|
reference: Reference,
|
|
270
|
-
fields:
|
|
271
|
-
decorators:
|
|
272
|
-
base_classes:
|
|
273
|
-
custom_base_class:
|
|
274
|
-
custom_template_dir:
|
|
275
|
-
extra_template_data:
|
|
276
|
-
path:
|
|
277
|
-
description:
|
|
255
|
+
fields: list[DataModelFieldBase],
|
|
256
|
+
decorators: list[str] | None = None,
|
|
257
|
+
base_classes: list[Reference] | None = None,
|
|
258
|
+
custom_base_class: str | None = None,
|
|
259
|
+
custom_template_dir: Path | None = None,
|
|
260
|
+
extra_template_data: defaultdict[str, Any] | None = None,
|
|
261
|
+
path: Path | None = None,
|
|
262
|
+
description: str | None = None,
|
|
278
263
|
default: Any = UNDEFINED,
|
|
279
264
|
nullable: bool = False,
|
|
280
265
|
keyword_only: bool = False,
|
|
@@ -293,33 +278,29 @@ class BaseModel(BaseModelBase):
|
|
|
293
278
|
nullable=nullable,
|
|
294
279
|
keyword_only=keyword_only,
|
|
295
280
|
)
|
|
296
|
-
config_parameters:
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
allow_extra_fields = self.extra_template_data.get(
|
|
300
|
-
if
|
|
301
|
-
config_parameters[
|
|
302
|
-
|
|
303
|
-
if additionalProperties or allow_extra_fields
|
|
304
|
-
else 'Extra.forbid'
|
|
281
|
+
config_parameters: dict[str, Any] = {}
|
|
282
|
+
|
|
283
|
+
additional_properties = self.extra_template_data.get("additionalProperties")
|
|
284
|
+
allow_extra_fields = self.extra_template_data.get("allow_extra_fields")
|
|
285
|
+
if additional_properties is not None or allow_extra_fields:
|
|
286
|
+
config_parameters["extra"] = (
|
|
287
|
+
"Extra.allow" if additional_properties or allow_extra_fields else "Extra.forbid"
|
|
305
288
|
)
|
|
306
289
|
self._additional_imports.append(IMPORT_EXTRA)
|
|
307
290
|
|
|
308
|
-
for config_attribute in
|
|
291
|
+
for config_attribute in "allow_population_by_field_name", "allow_mutation":
|
|
309
292
|
if config_attribute in self.extra_template_data:
|
|
310
|
-
config_parameters[config_attribute] = self.extra_template_data[
|
|
311
|
-
config_attribute
|
|
312
|
-
]
|
|
293
|
+
config_parameters[config_attribute] = self.extra_template_data[config_attribute]
|
|
313
294
|
for data_type in self.all_data_types:
|
|
314
295
|
if data_type.is_custom_type:
|
|
315
|
-
config_parameters[
|
|
296
|
+
config_parameters["arbitrary_types_allowed"] = True
|
|
316
297
|
break
|
|
317
298
|
|
|
318
|
-
if isinstance(self.extra_template_data.get(
|
|
319
|
-
for key, value in self.extra_template_data[
|
|
320
|
-
config_parameters[key] = value
|
|
299
|
+
if isinstance(self.extra_template_data.get("config"), dict):
|
|
300
|
+
for key, value in self.extra_template_data["config"].items():
|
|
301
|
+
config_parameters[key] = value # noqa: PERF403
|
|
321
302
|
|
|
322
303
|
if config_parameters:
|
|
323
|
-
from datamodel_code_generator.model.pydantic import Config
|
|
304
|
+
from datamodel_code_generator.model.pydantic import Config # noqa: PLC0415
|
|
324
305
|
|
|
325
|
-
self.extra_template_data[
|
|
306
|
+
self.extra_template_data["config"] = Config.parse_obj(config_parameters) # pyright: ignore[reportArgumentType]
|
|
@@ -6,5 +6,5 @@ from datamodel_code_generator.model.pydantic.base_model import BaseModel
|
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
class CustomRootType(BaseModel):
|
|
9
|
-
TEMPLATE_FILE_PATH: ClassVar[str] =
|
|
10
|
-
BASE_CLASS: ClassVar[str] =
|
|
9
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/BaseModel_root.jinja2"
|
|
10
|
+
BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
|
|
@@ -1,12 +1,14 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import
|
|
3
|
+
from typing import TYPE_CHECKING, ClassVar
|
|
4
4
|
|
|
5
|
-
from datamodel_code_generator.imports import Import
|
|
6
5
|
from datamodel_code_generator.model import DataModel
|
|
7
6
|
from datamodel_code_generator.model.pydantic.imports import IMPORT_DATACLASS
|
|
8
7
|
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from datamodel_code_generator.imports import Import
|
|
10
|
+
|
|
9
11
|
|
|
10
12
|
class DataClass(DataModel):
|
|
11
|
-
TEMPLATE_FILE_PATH: ClassVar[str] =
|
|
12
|
-
DEFAULT_IMPORTS: ClassVar[
|
|
13
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/dataclass.jinja2"
|
|
14
|
+
DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_DATACLASS,)
|
|
@@ -1,35 +1,37 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
from datamodel_code_generator.imports import Import
|
|
2
4
|
|
|
3
|
-
IMPORT_CONSTR = Import.from_full_path(
|
|
4
|
-
IMPORT_CONINT = Import.from_full_path(
|
|
5
|
-
IMPORT_CONFLOAT = Import.from_full_path(
|
|
6
|
-
IMPORT_CONDECIMAL = Import.from_full_path(
|
|
7
|
-
IMPORT_CONBYTES = Import.from_full_path(
|
|
8
|
-
IMPORT_POSITIVE_INT = Import.from_full_path(
|
|
9
|
-
IMPORT_NEGATIVE_INT = Import.from_full_path(
|
|
10
|
-
IMPORT_NON_POSITIVE_INT = Import.from_full_path(
|
|
11
|
-
IMPORT_NON_NEGATIVE_INT = Import.from_full_path(
|
|
12
|
-
IMPORT_POSITIVE_FLOAT = Import.from_full_path(
|
|
13
|
-
IMPORT_NEGATIVE_FLOAT = Import.from_full_path(
|
|
14
|
-
IMPORT_NON_NEGATIVE_FLOAT = Import.from_full_path(
|
|
15
|
-
IMPORT_NON_POSITIVE_FLOAT = Import.from_full_path(
|
|
16
|
-
IMPORT_SECRET_STR = Import.from_full_path(
|
|
17
|
-
IMPORT_EMAIL_STR = Import.from_full_path(
|
|
18
|
-
IMPORT_UUID1 = Import.from_full_path(
|
|
19
|
-
IMPORT_UUID2 = Import.from_full_path(
|
|
20
|
-
IMPORT_UUID3 = Import.from_full_path(
|
|
21
|
-
IMPORT_UUID4 = Import.from_full_path(
|
|
22
|
-
IMPORT_UUID5 = Import.from_full_path(
|
|
23
|
-
IMPORT_ANYURL = Import.from_full_path(
|
|
24
|
-
IMPORT_IPV4ADDRESS = Import.from_full_path(
|
|
25
|
-
IMPORT_IPV6ADDRESS = Import.from_full_path(
|
|
26
|
-
IMPORT_IPV4NETWORKS = Import.from_full_path(
|
|
27
|
-
IMPORT_IPV6NETWORKS = Import.from_full_path(
|
|
28
|
-
IMPORT_EXTRA = Import.from_full_path(
|
|
29
|
-
IMPORT_FIELD = Import.from_full_path(
|
|
30
|
-
IMPORT_STRICT_INT = Import.from_full_path(
|
|
31
|
-
IMPORT_STRICT_FLOAT = Import.from_full_path(
|
|
32
|
-
IMPORT_STRICT_STR = Import.from_full_path(
|
|
33
|
-
IMPORT_STRICT_BOOL = Import.from_full_path(
|
|
34
|
-
IMPORT_STRICT_BYTES = Import.from_full_path(
|
|
35
|
-
IMPORT_DATACLASS = Import.from_full_path(
|
|
5
|
+
IMPORT_CONSTR = Import.from_full_path("pydantic.constr")
|
|
6
|
+
IMPORT_CONINT = Import.from_full_path("pydantic.conint")
|
|
7
|
+
IMPORT_CONFLOAT = Import.from_full_path("pydantic.confloat")
|
|
8
|
+
IMPORT_CONDECIMAL = Import.from_full_path("pydantic.condecimal")
|
|
9
|
+
IMPORT_CONBYTES = Import.from_full_path("pydantic.conbytes")
|
|
10
|
+
IMPORT_POSITIVE_INT = Import.from_full_path("pydantic.PositiveInt")
|
|
11
|
+
IMPORT_NEGATIVE_INT = Import.from_full_path("pydantic.NegativeInt")
|
|
12
|
+
IMPORT_NON_POSITIVE_INT = Import.from_full_path("pydantic.NonPositiveInt")
|
|
13
|
+
IMPORT_NON_NEGATIVE_INT = Import.from_full_path("pydantic.NonNegativeInt")
|
|
14
|
+
IMPORT_POSITIVE_FLOAT = Import.from_full_path("pydantic.PositiveFloat")
|
|
15
|
+
IMPORT_NEGATIVE_FLOAT = Import.from_full_path("pydantic.NegativeFloat")
|
|
16
|
+
IMPORT_NON_NEGATIVE_FLOAT = Import.from_full_path("pydantic.NonNegativeFloat")
|
|
17
|
+
IMPORT_NON_POSITIVE_FLOAT = Import.from_full_path("pydantic.NonPositiveFloat")
|
|
18
|
+
IMPORT_SECRET_STR = Import.from_full_path("pydantic.SecretStr")
|
|
19
|
+
IMPORT_EMAIL_STR = Import.from_full_path("pydantic.EmailStr")
|
|
20
|
+
IMPORT_UUID1 = Import.from_full_path("pydantic.UUID1")
|
|
21
|
+
IMPORT_UUID2 = Import.from_full_path("pydantic.UUID2")
|
|
22
|
+
IMPORT_UUID3 = Import.from_full_path("pydantic.UUID3")
|
|
23
|
+
IMPORT_UUID4 = Import.from_full_path("pydantic.UUID4")
|
|
24
|
+
IMPORT_UUID5 = Import.from_full_path("pydantic.UUID5")
|
|
25
|
+
IMPORT_ANYURL = Import.from_full_path("pydantic.AnyUrl")
|
|
26
|
+
IMPORT_IPV4ADDRESS = Import.from_full_path("ipaddress.IPv4Address")
|
|
27
|
+
IMPORT_IPV6ADDRESS = Import.from_full_path("ipaddress.IPv6Address")
|
|
28
|
+
IMPORT_IPV4NETWORKS = Import.from_full_path("ipaddress.IPv4Network")
|
|
29
|
+
IMPORT_IPV6NETWORKS = Import.from_full_path("ipaddress.IPv6Network")
|
|
30
|
+
IMPORT_EXTRA = Import.from_full_path("pydantic.Extra")
|
|
31
|
+
IMPORT_FIELD = Import.from_full_path("pydantic.Field")
|
|
32
|
+
IMPORT_STRICT_INT = Import.from_full_path("pydantic.StrictInt")
|
|
33
|
+
IMPORT_STRICT_FLOAT = Import.from_full_path("pydantic.StrictFloat")
|
|
34
|
+
IMPORT_STRICT_STR = Import.from_full_path("pydantic.StrictStr")
|
|
35
|
+
IMPORT_STRICT_BOOL = Import.from_full_path("pydantic.StrictBool")
|
|
36
|
+
IMPORT_STRICT_BYTES = Import.from_full_path("pydantic.StrictBytes")
|
|
37
|
+
IMPORT_DATACLASS = Import.from_full_path("pydantic.dataclasses.dataclass")
|