datamodel-code-generator 0.27.2__py3-none-any.whl → 0.28.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamodel-code-generator might be problematic. Click here for more details.
- datamodel_code_generator/__init__.py +168 -196
- datamodel_code_generator/__main__.py +146 -189
- datamodel_code_generator/arguments.py +227 -230
- datamodel_code_generator/format.py +77 -129
- datamodel_code_generator/http.py +12 -10
- datamodel_code_generator/imports.py +59 -65
- datamodel_code_generator/model/__init__.py +28 -31
- datamodel_code_generator/model/base.py +100 -144
- datamodel_code_generator/model/dataclass.py +62 -70
- datamodel_code_generator/model/enum.py +34 -30
- datamodel_code_generator/model/imports.py +13 -11
- datamodel_code_generator/model/msgspec.py +116 -138
- datamodel_code_generator/model/pydantic/__init__.py +18 -28
- datamodel_code_generator/model/pydantic/base_model.py +121 -140
- datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
- datamodel_code_generator/model/pydantic/dataclass.py +6 -4
- datamodel_code_generator/model/pydantic/imports.py +35 -33
- datamodel_code_generator/model/pydantic/types.py +91 -119
- datamodel_code_generator/model/pydantic_v2/__init__.py +21 -18
- datamodel_code_generator/model/pydantic_v2/base_model.py +118 -127
- datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
- datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
- datamodel_code_generator/model/pydantic_v2/types.py +11 -7
- datamodel_code_generator/model/rootmodel.py +1 -1
- datamodel_code_generator/model/scalar.py +33 -32
- datamodel_code_generator/model/typed_dict.py +41 -51
- datamodel_code_generator/model/types.py +24 -19
- datamodel_code_generator/model/union.py +21 -17
- datamodel_code_generator/parser/__init__.py +16 -12
- datamodel_code_generator/parser/base.py +327 -515
- datamodel_code_generator/parser/graphql.py +87 -119
- datamodel_code_generator/parser/jsonschema.py +438 -607
- datamodel_code_generator/parser/openapi.py +180 -220
- datamodel_code_generator/pydantic_patch.py +8 -9
- datamodel_code_generator/reference.py +199 -297
- datamodel_code_generator/types.py +149 -215
- datamodel_code_generator/util.py +23 -36
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/METADATA +10 -5
- datamodel_code_generator-0.28.0.dist-info/RECORD +59 -0
- datamodel_code_generator-0.27.2.dist-info/RECORD +0 -59
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/WHEEL +0 -0
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/entry_points.txt +0 -0
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,16 +1,8 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import re
|
|
2
4
|
from enum import Enum
|
|
3
|
-
from
|
|
4
|
-
from typing import (
|
|
5
|
-
Any,
|
|
6
|
-
ClassVar,
|
|
7
|
-
DefaultDict,
|
|
8
|
-
Dict,
|
|
9
|
-
List,
|
|
10
|
-
NamedTuple,
|
|
11
|
-
Optional,
|
|
12
|
-
Set,
|
|
13
|
-
)
|
|
5
|
+
from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, Optional
|
|
14
6
|
|
|
15
7
|
from pydantic import Field
|
|
16
8
|
from typing_extensions import Literal
|
|
@@ -26,130 +18,133 @@ from datamodel_code_generator.model.pydantic.base_model import (
|
|
|
26
18
|
DataModelField as DataModelFieldV1,
|
|
27
19
|
)
|
|
28
20
|
from datamodel_code_generator.model.pydantic_v2.imports import IMPORT_CONFIG_DICT
|
|
29
|
-
from datamodel_code_generator.reference import Reference
|
|
30
21
|
from datamodel_code_generator.util import field_validator, model_validator
|
|
31
22
|
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
from collections import defaultdict
|
|
25
|
+
from pathlib import Path
|
|
26
|
+
|
|
27
|
+
from datamodel_code_generator.reference import Reference
|
|
28
|
+
|
|
32
29
|
|
|
33
30
|
class UnionMode(Enum):
|
|
34
|
-
smart =
|
|
35
|
-
left_to_right =
|
|
31
|
+
smart = "smart"
|
|
32
|
+
left_to_right = "left_to_right"
|
|
36
33
|
|
|
37
34
|
|
|
38
35
|
class Constraints(_Constraints):
|
|
39
36
|
# To override existing pattern alias
|
|
40
|
-
regex: Optional[str] = Field(None, alias=
|
|
41
|
-
pattern: Optional[str] = Field(None, alias=
|
|
37
|
+
regex: Optional[str] = Field(None, alias="regex") # noqa: UP045
|
|
38
|
+
pattern: Optional[str] = Field(None, alias="pattern") # noqa: UP045
|
|
42
39
|
|
|
43
|
-
@model_validator(mode=
|
|
44
|
-
def validate_min_max_items(cls, values: Any) ->
|
|
40
|
+
@model_validator(mode="before")
|
|
41
|
+
def validate_min_max_items(cls, values: Any) -> dict[str, Any]: # noqa: N805
|
|
45
42
|
if not isinstance(values, dict): # pragma: no cover
|
|
46
43
|
return values
|
|
47
|
-
min_items = values.pop(
|
|
44
|
+
min_items = values.pop("minItems", None)
|
|
48
45
|
if min_items is not None:
|
|
49
|
-
values[
|
|
50
|
-
max_items = values.pop(
|
|
46
|
+
values["minLength"] = min_items
|
|
47
|
+
max_items = values.pop("maxItems", None)
|
|
51
48
|
if max_items is not None:
|
|
52
|
-
values[
|
|
49
|
+
values["maxLength"] = max_items
|
|
53
50
|
return values
|
|
54
51
|
|
|
55
52
|
|
|
56
53
|
class DataModelField(DataModelFieldV1):
|
|
57
|
-
_EXCLUDE_FIELD_KEYS: ClassVar[
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
54
|
+
_EXCLUDE_FIELD_KEYS: ClassVar[set[str]] = {
|
|
55
|
+
"alias",
|
|
56
|
+
"default",
|
|
57
|
+
"gt",
|
|
58
|
+
"ge",
|
|
59
|
+
"lt",
|
|
60
|
+
"le",
|
|
61
|
+
"multiple_of",
|
|
62
|
+
"min_length",
|
|
63
|
+
"max_length",
|
|
64
|
+
"pattern",
|
|
68
65
|
}
|
|
69
|
-
_DEFAULT_FIELD_KEYS: ClassVar[
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
66
|
+
_DEFAULT_FIELD_KEYS: ClassVar[set[str]] = {
|
|
67
|
+
"default",
|
|
68
|
+
"default_factory",
|
|
69
|
+
"alias",
|
|
70
|
+
"alias_priority",
|
|
71
|
+
"validation_alias",
|
|
72
|
+
"serialization_alias",
|
|
73
|
+
"title",
|
|
74
|
+
"description",
|
|
75
|
+
"examples",
|
|
76
|
+
"exclude",
|
|
77
|
+
"discriminator",
|
|
78
|
+
"json_schema_extra",
|
|
79
|
+
"frozen",
|
|
80
|
+
"validate_default",
|
|
81
|
+
"repr",
|
|
82
|
+
"init_var",
|
|
83
|
+
"kw_only",
|
|
84
|
+
"pattern",
|
|
85
|
+
"strict",
|
|
86
|
+
"gt",
|
|
87
|
+
"ge",
|
|
88
|
+
"lt",
|
|
89
|
+
"le",
|
|
90
|
+
"multiple_of",
|
|
91
|
+
"allow_inf_nan",
|
|
92
|
+
"max_digits",
|
|
93
|
+
"decimal_places",
|
|
94
|
+
"min_length",
|
|
95
|
+
"max_length",
|
|
96
|
+
"union_mode",
|
|
100
97
|
}
|
|
101
|
-
constraints: Optional[Constraints] = None # pyright: ignore
|
|
102
|
-
_PARSE_METHOD: ClassVar[str] =
|
|
98
|
+
constraints: Optional[Constraints] = None # pyright: ignore[reportIncompatibleVariableOverride] # noqa: UP045
|
|
99
|
+
_PARSE_METHOD: ClassVar[str] = "model_validate"
|
|
103
100
|
can_have_extra_keys: ClassVar[bool] = False
|
|
104
101
|
|
|
105
|
-
@field_validator(
|
|
106
|
-
def validate_extras(cls, values: Any) ->
|
|
102
|
+
@field_validator("extras")
|
|
103
|
+
def validate_extras(cls, values: Any) -> dict[str, Any]: # noqa: N805
|
|
107
104
|
if not isinstance(values, dict): # pragma: no cover
|
|
108
105
|
return values
|
|
109
|
-
if
|
|
106
|
+
if "examples" in values:
|
|
110
107
|
return values
|
|
111
108
|
|
|
112
|
-
if
|
|
113
|
-
values[
|
|
109
|
+
if "example" in values:
|
|
110
|
+
values["examples"] = [values.pop("example")]
|
|
114
111
|
return values
|
|
115
112
|
|
|
116
113
|
def process_const(self) -> None:
|
|
117
|
-
if
|
|
118
|
-
return
|
|
114
|
+
if "const" not in self.extras:
|
|
115
|
+
return
|
|
119
116
|
self.const = True
|
|
120
117
|
self.nullable = False
|
|
121
|
-
const = self.extras[
|
|
118
|
+
const = self.extras["const"]
|
|
122
119
|
self.data_type = self.data_type.__class__(literals=[const])
|
|
123
120
|
if not self.default:
|
|
124
121
|
self.default = const
|
|
125
122
|
|
|
126
|
-
def _process_data_in_str(self, data:
|
|
123
|
+
def _process_data_in_str(self, data: dict[str, Any]) -> None:
|
|
127
124
|
if self.const:
|
|
128
125
|
# const is removed in pydantic 2.0
|
|
129
|
-
data.pop(
|
|
126
|
+
data.pop("const")
|
|
130
127
|
|
|
131
128
|
# unique_items is not supported in pydantic 2.0
|
|
132
|
-
data.pop(
|
|
129
|
+
data.pop("unique_items", None)
|
|
133
130
|
|
|
134
|
-
if
|
|
131
|
+
if "union_mode" in data:
|
|
135
132
|
if self.data_type.is_union:
|
|
136
|
-
data[
|
|
133
|
+
data["union_mode"] = data.pop("union_mode").value
|
|
137
134
|
else:
|
|
138
|
-
data.pop(
|
|
135
|
+
data.pop("union_mode")
|
|
139
136
|
|
|
140
137
|
# **extra is not supported in pydantic 2.0
|
|
141
|
-
json_schema_extra = {
|
|
142
|
-
k: v for k, v in data.items() if k not in self._DEFAULT_FIELD_KEYS
|
|
143
|
-
}
|
|
138
|
+
json_schema_extra = {k: v for k, v in data.items() if k not in self._DEFAULT_FIELD_KEYS}
|
|
144
139
|
if json_schema_extra:
|
|
145
|
-
data[
|
|
146
|
-
for key in json_schema_extra
|
|
140
|
+
data["json_schema_extra"] = json_schema_extra
|
|
141
|
+
for key in json_schema_extra:
|
|
147
142
|
data.pop(key)
|
|
148
143
|
|
|
149
|
-
def _process_annotated_field_arguments(
|
|
144
|
+
def _process_annotated_field_arguments( # noqa: PLR6301
|
|
150
145
|
self,
|
|
151
|
-
field_arguments:
|
|
152
|
-
) ->
|
|
146
|
+
field_arguments: list[str],
|
|
147
|
+
) -> list[str]:
|
|
153
148
|
return field_arguments
|
|
154
149
|
|
|
155
150
|
|
|
@@ -160,27 +155,27 @@ class ConfigAttribute(NamedTuple):
|
|
|
160
155
|
|
|
161
156
|
|
|
162
157
|
class BaseModel(BaseModelBase):
|
|
163
|
-
TEMPLATE_FILE_PATH: ClassVar[str] =
|
|
164
|
-
BASE_CLASS: ClassVar[str] =
|
|
165
|
-
CONFIG_ATTRIBUTES: ClassVar[
|
|
166
|
-
ConfigAttribute(
|
|
167
|
-
ConfigAttribute(
|
|
168
|
-
ConfigAttribute(
|
|
169
|
-
ConfigAttribute(
|
|
158
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/BaseModel.jinja2"
|
|
159
|
+
BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
|
|
160
|
+
CONFIG_ATTRIBUTES: ClassVar[list[ConfigAttribute]] = [
|
|
161
|
+
ConfigAttribute("allow_population_by_field_name", "populate_by_name", False), # noqa: FBT003
|
|
162
|
+
ConfigAttribute("populate_by_name", "populate_by_name", False), # noqa: FBT003
|
|
163
|
+
ConfigAttribute("allow_mutation", "frozen", True), # noqa: FBT003
|
|
164
|
+
ConfigAttribute("frozen", "frozen", False), # noqa: FBT003
|
|
170
165
|
]
|
|
171
166
|
|
|
172
|
-
def __init__(
|
|
167
|
+
def __init__( # noqa: PLR0913
|
|
173
168
|
self,
|
|
174
169
|
*,
|
|
175
170
|
reference: Reference,
|
|
176
|
-
fields:
|
|
177
|
-
decorators:
|
|
178
|
-
base_classes:
|
|
179
|
-
custom_base_class:
|
|
180
|
-
custom_template_dir:
|
|
181
|
-
extra_template_data:
|
|
182
|
-
path:
|
|
183
|
-
description:
|
|
171
|
+
fields: list[DataModelFieldBase],
|
|
172
|
+
decorators: list[str] | None = None,
|
|
173
|
+
base_classes: list[Reference] | None = None,
|
|
174
|
+
custom_base_class: str | None = None,
|
|
175
|
+
custom_template_dir: Path | None = None,
|
|
176
|
+
extra_template_data: defaultdict[str, Any] | None = None,
|
|
177
|
+
path: Path | None = None,
|
|
178
|
+
description: str | None = None,
|
|
184
179
|
default: Any = UNDEFINED,
|
|
185
180
|
nullable: bool = False,
|
|
186
181
|
keyword_only: bool = False,
|
|
@@ -199,49 +194,45 @@ class BaseModel(BaseModelBase):
|
|
|
199
194
|
nullable=nullable,
|
|
200
195
|
keyword_only=keyword_only,
|
|
201
196
|
)
|
|
202
|
-
config_parameters:
|
|
197
|
+
config_parameters: dict[str, Any] = {}
|
|
203
198
|
|
|
204
199
|
extra = self._get_config_extra()
|
|
205
200
|
if extra:
|
|
206
|
-
config_parameters[
|
|
201
|
+
config_parameters["extra"] = extra
|
|
207
202
|
|
|
208
203
|
for from_, to, invert in self.CONFIG_ATTRIBUTES:
|
|
209
204
|
if from_ in self.extra_template_data:
|
|
210
205
|
config_parameters[to] = (
|
|
211
|
-
not self.extra_template_data[from_]
|
|
212
|
-
if invert
|
|
213
|
-
else self.extra_template_data[from_]
|
|
206
|
+
not self.extra_template_data[from_] if invert else self.extra_template_data[from_]
|
|
214
207
|
)
|
|
215
208
|
for data_type in self.all_data_types:
|
|
216
209
|
if data_type.is_custom_type: # pragma: no cover
|
|
217
|
-
config_parameters[
|
|
210
|
+
config_parameters["arbitrary_types_allowed"] = True
|
|
218
211
|
break
|
|
219
212
|
|
|
220
213
|
for field in self.fields:
|
|
221
214
|
# Check if a regex pattern uses lookarounds.
|
|
222
215
|
# Depending on the generation configuration, the pattern may end up in two different places.
|
|
223
|
-
pattern = (
|
|
224
|
-
|
|
225
|
-
)
|
|
226
|
-
if pattern and re.search(r
|
|
227
|
-
config_parameters[
|
|
216
|
+
pattern = (isinstance(field.constraints, Constraints) and field.constraints.pattern) or (
|
|
217
|
+
field.data_type.kwargs or {}
|
|
218
|
+
).get("pattern")
|
|
219
|
+
if pattern and re.search(r"\(\?<?[=!]", pattern):
|
|
220
|
+
config_parameters["regex_engine"] = '"python-re"'
|
|
228
221
|
break
|
|
229
222
|
|
|
230
|
-
if isinstance(self.extra_template_data.get(
|
|
231
|
-
for key, value in self.extra_template_data[
|
|
232
|
-
config_parameters[key] = value
|
|
223
|
+
if isinstance(self.extra_template_data.get("config"), dict):
|
|
224
|
+
for key, value in self.extra_template_data["config"].items():
|
|
225
|
+
config_parameters[key] = value # noqa: PERF403
|
|
233
226
|
|
|
234
227
|
if config_parameters:
|
|
235
|
-
from datamodel_code_generator.model.pydantic_v2 import ConfigDict
|
|
228
|
+
from datamodel_code_generator.model.pydantic_v2 import ConfigDict # noqa: PLC0415
|
|
236
229
|
|
|
237
|
-
self.extra_template_data[
|
|
230
|
+
self.extra_template_data["config"] = ConfigDict.parse_obj(config_parameters) # pyright: ignore[reportArgumentType]
|
|
238
231
|
self._additional_imports.append(IMPORT_CONFIG_DICT)
|
|
239
232
|
|
|
240
|
-
def _get_config_extra(self) ->
|
|
241
|
-
|
|
242
|
-
allow_extra_fields = self.extra_template_data.get(
|
|
243
|
-
if
|
|
244
|
-
return
|
|
245
|
-
"'allow'" if additionalProperties or allow_extra_fields else "'forbid'"
|
|
246
|
-
)
|
|
233
|
+
def _get_config_extra(self) -> Literal["'allow'", "'forbid'"] | None:
|
|
234
|
+
additional_properties = self.extra_template_data.get("additionalProperties")
|
|
235
|
+
allow_extra_fields = self.extra_template_data.get("allow_extra_fields")
|
|
236
|
+
if additional_properties is not None or allow_extra_fields:
|
|
237
|
+
return "'allow'" if additional_properties or allow_extra_fields else "'forbid'"
|
|
247
238
|
return None
|
|
@@ -1,5 +1,7 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
from datamodel_code_generator.imports import Import
|
|
2
4
|
|
|
3
|
-
IMPORT_CONFIG_DICT = Import.from_full_path(
|
|
4
|
-
IMPORT_AWARE_DATETIME = Import.from_full_path(
|
|
5
|
-
IMPORT_NAIVE_DATETIME = Import.from_full_path(
|
|
5
|
+
IMPORT_CONFIG_DICT = Import.from_full_path("pydantic.ConfigDict")
|
|
6
|
+
IMPORT_AWARE_DATETIME = Import.from_full_path("pydantic.AwareDatetime")
|
|
7
|
+
IMPORT_NAIVE_DATETIME = Import.from_full_path("pydantic.NaiveDatetime")
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import Any, ClassVar, Literal
|
|
3
|
+
from typing import Any, ClassVar, Literal
|
|
4
4
|
|
|
5
5
|
from datamodel_code_generator.model.pydantic_v2.base_model import BaseModel
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
class RootModel(BaseModel):
|
|
9
|
-
TEMPLATE_FILE_PATH: ClassVar[str] =
|
|
10
|
-
BASE_CLASS: ClassVar[str] =
|
|
9
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/RootModel.jinja2"
|
|
10
|
+
BASE_CLASS: ClassVar[str] = "pydantic.RootModel"
|
|
11
11
|
|
|
12
12
|
def __init__(
|
|
13
13
|
self,
|
|
@@ -15,11 +15,11 @@ class RootModel(BaseModel):
|
|
|
15
15
|
) -> None:
|
|
16
16
|
# Remove custom_base_class for Pydantic V2 models; behaviour is different from Pydantic V1 as it will not
|
|
17
17
|
# be treated as a root model. custom_base_class cannot both implement BaseModel and RootModel!
|
|
18
|
-
if
|
|
19
|
-
kwargs.pop(
|
|
18
|
+
if "custom_base_class" in kwargs:
|
|
19
|
+
kwargs.pop("custom_base_class")
|
|
20
20
|
|
|
21
21
|
super().__init__(**kwargs)
|
|
22
22
|
|
|
23
|
-
def _get_config_extra(self) ->
|
|
23
|
+
def _get_config_extra(self) -> Literal["'allow'", "'forbid'"] | None: # noqa: PLR6301
|
|
24
24
|
# PydanticV2 RootModels cannot have extra fields
|
|
25
25
|
return None
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import
|
|
3
|
+
from typing import TYPE_CHECKING, ClassVar
|
|
4
4
|
|
|
5
5
|
from datamodel_code_generator.format import DatetimeClassType
|
|
6
6
|
from datamodel_code_generator.model.pydantic import DataTypeManager as _DataTypeManager
|
|
@@ -11,17 +11,20 @@ from datamodel_code_generator.model.pydantic_v2.imports import (
|
|
|
11
11
|
)
|
|
12
12
|
from datamodel_code_generator.types import DataType, StrictTypes, Types
|
|
13
13
|
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from collections.abc import Sequence
|
|
16
|
+
|
|
14
17
|
|
|
15
18
|
class DataTypeManager(_DataTypeManager):
|
|
16
|
-
PATTERN_KEY: ClassVar[str] =
|
|
19
|
+
PATTERN_KEY: ClassVar[str] = "pattern"
|
|
17
20
|
|
|
18
21
|
def type_map_factory(
|
|
19
22
|
self,
|
|
20
|
-
data_type:
|
|
23
|
+
data_type: type[DataType],
|
|
21
24
|
strict_types: Sequence[StrictTypes],
|
|
22
25
|
pattern_key: str,
|
|
23
|
-
target_datetime_class:
|
|
24
|
-
) ->
|
|
26
|
+
target_datetime_class: DatetimeClassType | None = None,
|
|
27
|
+
) -> dict[Types, DataType]:
|
|
25
28
|
result = {
|
|
26
29
|
**super().type_map_factory(
|
|
27
30
|
data_type,
|
|
@@ -34,8 +37,9 @@ class DataTypeManager(_DataTypeManager):
|
|
|
34
37
|
strict=StrictTypes.str in strict_types,
|
|
35
38
|
# https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
|
|
36
39
|
kwargs={
|
|
37
|
-
pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*
|
|
38
|
-
|
|
40
|
+
pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*"
|
|
41
|
+
r"([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])$'",
|
|
42
|
+
**({"strict": True} if StrictTypes.str in strict_types else {}),
|
|
39
43
|
},
|
|
40
44
|
),
|
|
41
45
|
}
|
|
@@ -1,53 +1,56 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from collections import defaultdict
|
|
4
|
-
from
|
|
5
|
-
from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Tuple
|
|
4
|
+
from typing import TYPE_CHECKING, Any, ClassVar
|
|
6
5
|
|
|
7
6
|
from datamodel_code_generator.imports import IMPORT_TYPE_ALIAS, Import
|
|
8
7
|
from datamodel_code_generator.model import DataModel, DataModelFieldBase
|
|
9
8
|
from datamodel_code_generator.model.base import UNDEFINED
|
|
10
|
-
from datamodel_code_generator.reference import Reference
|
|
11
9
|
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
from datamodel_code_generator.reference import Reference
|
|
14
|
+
|
|
15
|
+
_INT: str = "int"
|
|
16
|
+
_FLOAT: str = "float"
|
|
17
|
+
_BOOLEAN: str = "bool"
|
|
18
|
+
_STR: str = "str"
|
|
16
19
|
|
|
17
20
|
# default graphql scalar types
|
|
18
21
|
DEFAULT_GRAPHQL_SCALAR_TYPE = _STR
|
|
19
22
|
|
|
20
|
-
DEFAULT_GRAPHQL_SCALAR_TYPES:
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
23
|
+
DEFAULT_GRAPHQL_SCALAR_TYPES: dict[str, str] = {
|
|
24
|
+
"Boolean": _BOOLEAN,
|
|
25
|
+
"String": _STR,
|
|
26
|
+
"ID": _STR,
|
|
27
|
+
"Int": _INT,
|
|
28
|
+
"Float": _FLOAT,
|
|
26
29
|
}
|
|
27
30
|
|
|
28
31
|
|
|
29
32
|
class DataTypeScalar(DataModel):
|
|
30
|
-
TEMPLATE_FILE_PATH: ClassVar[str] =
|
|
31
|
-
BASE_CLASS: ClassVar[str] =
|
|
32
|
-
DEFAULT_IMPORTS: ClassVar[
|
|
33
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = "Scalar.jinja2"
|
|
34
|
+
BASE_CLASS: ClassVar[str] = ""
|
|
35
|
+
DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,)
|
|
33
36
|
|
|
34
|
-
def __init__(
|
|
37
|
+
def __init__( # noqa: PLR0913
|
|
35
38
|
self,
|
|
36
39
|
*,
|
|
37
40
|
reference: Reference,
|
|
38
|
-
fields:
|
|
39
|
-
decorators:
|
|
40
|
-
base_classes:
|
|
41
|
-
custom_base_class:
|
|
42
|
-
custom_template_dir:
|
|
43
|
-
extra_template_data:
|
|
44
|
-
methods:
|
|
45
|
-
path:
|
|
46
|
-
description:
|
|
41
|
+
fields: list[DataModelFieldBase],
|
|
42
|
+
decorators: list[str] | None = None,
|
|
43
|
+
base_classes: list[Reference] | None = None,
|
|
44
|
+
custom_base_class: str | None = None,
|
|
45
|
+
custom_template_dir: Path | None = None,
|
|
46
|
+
extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
|
|
47
|
+
methods: list[str] | None = None,
|
|
48
|
+
path: Path | None = None,
|
|
49
|
+
description: str | None = None,
|
|
47
50
|
default: Any = UNDEFINED,
|
|
48
51
|
nullable: bool = False,
|
|
49
52
|
keyword_only: bool = False,
|
|
50
|
-
):
|
|
53
|
+
) -> None:
|
|
51
54
|
extra_template_data = extra_template_data or defaultdict(dict)
|
|
52
55
|
|
|
53
56
|
scalar_name = reference.name
|
|
@@ -56,12 +59,10 @@ class DataTypeScalar(DataModel):
|
|
|
56
59
|
|
|
57
60
|
# py_type
|
|
58
61
|
py_type = extra_template_data[scalar_name].get(
|
|
59
|
-
|
|
60
|
-
DEFAULT_GRAPHQL_SCALAR_TYPES.get(
|
|
61
|
-
reference.name, DEFAULT_GRAPHQL_SCALAR_TYPE
|
|
62
|
-
),
|
|
62
|
+
"py_type",
|
|
63
|
+
DEFAULT_GRAPHQL_SCALAR_TYPES.get(reference.name, DEFAULT_GRAPHQL_SCALAR_TYPE),
|
|
63
64
|
)
|
|
64
|
-
extra_template_data[scalar_name][
|
|
65
|
+
extra_template_data[scalar_name]["py_type"] = py_type
|
|
65
66
|
|
|
66
67
|
super().__init__(
|
|
67
68
|
reference=reference,
|