datamodel-code-generator 0.11.12__py3-none-any.whl → 0.45.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datamodel_code_generator/__init__.py +654 -185
- datamodel_code_generator/__main__.py +872 -388
- datamodel_code_generator/arguments.py +798 -0
- datamodel_code_generator/cli_options.py +295 -0
- datamodel_code_generator/format.py +292 -54
- datamodel_code_generator/http.py +85 -10
- datamodel_code_generator/imports.py +152 -43
- datamodel_code_generator/model/__init__.py +138 -1
- datamodel_code_generator/model/base.py +531 -120
- datamodel_code_generator/model/dataclass.py +211 -0
- datamodel_code_generator/model/enum.py +133 -12
- datamodel_code_generator/model/imports.py +22 -0
- datamodel_code_generator/model/msgspec.py +462 -0
- datamodel_code_generator/model/pydantic/__init__.py +30 -25
- datamodel_code_generator/model/pydantic/base_model.py +304 -100
- datamodel_code_generator/model/pydantic/custom_root_type.py +11 -2
- datamodel_code_generator/model/pydantic/dataclass.py +15 -4
- datamodel_code_generator/model/pydantic/imports.py +40 -27
- datamodel_code_generator/model/pydantic/types.py +188 -96
- datamodel_code_generator/model/pydantic_v2/__init__.py +51 -0
- datamodel_code_generator/model/pydantic_v2/base_model.py +268 -0
- datamodel_code_generator/model/pydantic_v2/imports.py +15 -0
- datamodel_code_generator/model/pydantic_v2/root_model.py +35 -0
- datamodel_code_generator/model/pydantic_v2/types.py +143 -0
- datamodel_code_generator/model/scalar.py +124 -0
- datamodel_code_generator/model/template/Enum.jinja2 +15 -2
- datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2 +6 -0
- datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2 +6 -0
- datamodel_code_generator/model/template/ScalarTypeStatement.jinja2 +6 -0
- datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2 +20 -0
- datamodel_code_generator/model/template/TypeAliasType.jinja2 +20 -0
- datamodel_code_generator/model/template/TypeStatement.jinja2 +20 -0
- datamodel_code_generator/model/template/TypedDict.jinja2 +5 -0
- datamodel_code_generator/model/template/TypedDictClass.jinja2 +25 -0
- datamodel_code_generator/model/template/TypedDictFunction.jinja2 +24 -0
- datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2 +10 -0
- datamodel_code_generator/model/template/UnionTypeAliasType.jinja2 +10 -0
- datamodel_code_generator/model/template/UnionTypeStatement.jinja2 +10 -0
- datamodel_code_generator/model/template/dataclass.jinja2 +50 -0
- datamodel_code_generator/model/template/msgspec.jinja2 +55 -0
- datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +17 -4
- datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +12 -4
- datamodel_code_generator/model/template/pydantic/Config.jinja2 +1 -1
- datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +15 -2
- datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +57 -0
- datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +5 -0
- datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +48 -0
- datamodel_code_generator/model/type_alias.py +70 -0
- datamodel_code_generator/model/typed_dict.py +161 -0
- datamodel_code_generator/model/types.py +106 -0
- datamodel_code_generator/model/union.py +105 -0
- datamodel_code_generator/parser/__init__.py +30 -12
- datamodel_code_generator/parser/_graph.py +67 -0
- datamodel_code_generator/parser/_scc.py +171 -0
- datamodel_code_generator/parser/base.py +2426 -380
- datamodel_code_generator/parser/graphql.py +652 -0
- datamodel_code_generator/parser/jsonschema.py +2518 -647
- datamodel_code_generator/parser/openapi.py +631 -222
- datamodel_code_generator/py.typed +0 -0
- datamodel_code_generator/pydantic_patch.py +28 -0
- datamodel_code_generator/reference.py +672 -290
- datamodel_code_generator/types.py +521 -145
- datamodel_code_generator/util.py +155 -0
- datamodel_code_generator/watch.py +65 -0
- datamodel_code_generator-0.45.0.dist-info/METADATA +301 -0
- datamodel_code_generator-0.45.0.dist-info/RECORD +69 -0
- {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info}/WHEEL +1 -1
- datamodel_code_generator-0.45.0.dist-info/entry_points.txt +2 -0
- datamodel_code_generator/version.py +0 -1
- datamodel_code_generator-0.11.12.dist-info/METADATA +0 -440
- datamodel_code_generator-0.11.12.dist-info/RECORD +0 -31
- datamodel_code_generator-0.11.12.dist-info/entry_points.txt +0 -3
- {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -1,221 +1,505 @@
|
|
|
1
|
+
"""Core type system for data model generation.
|
|
2
|
+
|
|
3
|
+
Provides DataType for representing types with references and constraints,
|
|
4
|
+
DataTypeManager as the abstract base for type mappings, and supporting
|
|
5
|
+
utilities for handling unions, optionals, and type hints.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import re
|
|
1
11
|
from abc import ABC, abstractmethod
|
|
2
12
|
from enum import Enum, auto
|
|
13
|
+
from functools import lru_cache
|
|
3
14
|
from itertools import chain
|
|
15
|
+
from re import Pattern
|
|
4
16
|
from typing import (
|
|
5
17
|
TYPE_CHECKING,
|
|
6
18
|
Any,
|
|
19
|
+
Callable,
|
|
7
20
|
ClassVar,
|
|
8
|
-
Dict,
|
|
9
|
-
FrozenSet,
|
|
10
|
-
Iterable,
|
|
11
|
-
Iterator,
|
|
12
|
-
List,
|
|
13
21
|
Optional,
|
|
14
|
-
|
|
15
|
-
Set,
|
|
16
|
-
Tuple,
|
|
17
|
-
Type,
|
|
22
|
+
Protocol,
|
|
18
23
|
TypeVar,
|
|
19
24
|
Union,
|
|
25
|
+
runtime_checkable,
|
|
20
26
|
)
|
|
21
27
|
|
|
22
|
-
|
|
28
|
+
import pydantic
|
|
29
|
+
from packaging import version
|
|
30
|
+
from pydantic import StrictBool, StrictInt, StrictStr, create_model
|
|
31
|
+
from typing_extensions import TypeIs
|
|
23
32
|
|
|
24
|
-
from datamodel_code_generator import
|
|
25
|
-
|
|
33
|
+
from datamodel_code_generator.format import (
|
|
34
|
+
DatetimeClassType,
|
|
35
|
+
PythonVersion,
|
|
36
|
+
PythonVersionMin,
|
|
37
|
+
)
|
|
26
38
|
from datamodel_code_generator.imports import (
|
|
27
39
|
IMPORT_ABC_MAPPING,
|
|
28
40
|
IMPORT_ABC_SEQUENCE,
|
|
41
|
+
IMPORT_ABC_SET,
|
|
42
|
+
IMPORT_ANY,
|
|
29
43
|
IMPORT_DICT,
|
|
44
|
+
IMPORT_FROZEN_SET,
|
|
30
45
|
IMPORT_LIST,
|
|
31
46
|
IMPORT_LITERAL,
|
|
32
|
-
IMPORT_LITERAL_BACKPORT,
|
|
33
47
|
IMPORT_MAPPING,
|
|
34
48
|
IMPORT_OPTIONAL,
|
|
35
49
|
IMPORT_SEQUENCE,
|
|
50
|
+
IMPORT_SET,
|
|
36
51
|
IMPORT_UNION,
|
|
37
52
|
Import,
|
|
38
53
|
)
|
|
39
54
|
from datamodel_code_generator.reference import Reference, _BaseModel
|
|
40
|
-
|
|
41
|
-
|
|
55
|
+
from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict
|
|
56
|
+
|
|
57
|
+
T = TypeVar("T")
|
|
58
|
+
SourceT = TypeVar("SourceT")
|
|
59
|
+
|
|
60
|
+
OPTIONAL = "Optional"
|
|
61
|
+
OPTIONAL_PREFIX = f"{OPTIONAL}["
|
|
62
|
+
|
|
63
|
+
UNION = "Union"
|
|
64
|
+
UNION_PREFIX = f"{UNION}["
|
|
65
|
+
UNION_DELIMITER = ", "
|
|
66
|
+
UNION_PATTERN: Pattern[str] = re.compile(r"\s*,\s*")
|
|
67
|
+
UNION_OPERATOR_DELIMITER = " | "
|
|
68
|
+
UNION_OPERATOR_PATTERN: Pattern[str] = re.compile(r"\s*\|\s*")
|
|
69
|
+
NONE = "None"
|
|
70
|
+
ANY = "Any"
|
|
71
|
+
LITERAL = "Literal"
|
|
72
|
+
SEQUENCE = "Sequence"
|
|
73
|
+
FROZEN_SET = "FrozenSet"
|
|
74
|
+
MAPPING = "Mapping"
|
|
75
|
+
DICT = "Dict"
|
|
76
|
+
SET = "Set"
|
|
77
|
+
LIST = "List"
|
|
78
|
+
STANDARD_DICT = "dict"
|
|
79
|
+
STANDARD_LIST = "list"
|
|
80
|
+
STANDARD_SET = "set"
|
|
81
|
+
STR = "str"
|
|
82
|
+
|
|
83
|
+
NOT_REQUIRED = "NotRequired"
|
|
84
|
+
NOT_REQUIRED_PREFIX = f"{NOT_REQUIRED}["
|
|
85
|
+
|
|
86
|
+
if TYPE_CHECKING:
|
|
87
|
+
import builtins
|
|
88
|
+
from collections.abc import Iterable, Iterator, Sequence
|
|
89
|
+
|
|
90
|
+
from pydantic_core import core_schema
|
|
91
|
+
|
|
92
|
+
from datamodel_code_generator.model.base import DataModelFieldBase
|
|
93
|
+
|
|
94
|
+
if PYDANTIC_V2:
|
|
95
|
+
from pydantic import GetCoreSchemaHandler
|
|
96
|
+
from pydantic_core import core_schema
|
|
42
97
|
|
|
43
98
|
|
|
44
99
|
class StrictTypes(Enum):
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
100
|
+
"""Strict type options for generated models."""
|
|
101
|
+
|
|
102
|
+
str = "str"
|
|
103
|
+
bytes = "bytes"
|
|
104
|
+
int = "int"
|
|
105
|
+
float = "float"
|
|
106
|
+
bool = "bool"
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class UnionIntFloat:
|
|
110
|
+
"""Pydantic-compatible type that accepts both int and float values."""
|
|
50
111
|
|
|
112
|
+
def __init__(self, value: float) -> None:
|
|
113
|
+
"""Initialize with an int or float value."""
|
|
114
|
+
self.value: int | float = value
|
|
115
|
+
|
|
116
|
+
def __int__(self) -> int:
|
|
117
|
+
"""Convert value to int."""
|
|
118
|
+
return int(self.value)
|
|
119
|
+
|
|
120
|
+
def __float__(self) -> float:
|
|
121
|
+
"""Convert value to float."""
|
|
122
|
+
return float(self.value)
|
|
123
|
+
|
|
124
|
+
def __str__(self) -> str:
|
|
125
|
+
"""Convert value to string."""
|
|
126
|
+
return str(self.value)
|
|
127
|
+
|
|
128
|
+
@classmethod
|
|
129
|
+
def __get_validators__(cls) -> Iterator[Callable[[Any], Any]]: # noqa: PLW3201
|
|
130
|
+
"""Return Pydantic v1 validators."""
|
|
131
|
+
yield cls.validate
|
|
132
|
+
|
|
133
|
+
@classmethod
|
|
134
|
+
def __get_pydantic_core_schema__( # noqa: PLW3201
|
|
135
|
+
cls, _source_type: Any, _handler: GetCoreSchemaHandler
|
|
136
|
+
) -> core_schema.CoreSchema:
|
|
137
|
+
"""Return Pydantic v2 core schema."""
|
|
138
|
+
from_int_schema = core_schema.chain_schema([
|
|
139
|
+
core_schema.union_schema([core_schema.int_schema(), core_schema.float_schema()]),
|
|
140
|
+
core_schema.no_info_plain_validator_function(cls.validate),
|
|
141
|
+
])
|
|
142
|
+
|
|
143
|
+
return core_schema.json_or_python_schema(
|
|
144
|
+
json_schema=from_int_schema,
|
|
145
|
+
python_schema=core_schema.union_schema([
|
|
146
|
+
# check if it's an instance first before doing any further work
|
|
147
|
+
core_schema.is_instance_schema(UnionIntFloat),
|
|
148
|
+
from_int_schema,
|
|
149
|
+
]),
|
|
150
|
+
serialization=core_schema.plain_serializer_function_ser_schema(lambda instance: instance.value),
|
|
151
|
+
)
|
|
51
152
|
|
|
52
|
-
|
|
153
|
+
@classmethod
|
|
154
|
+
def validate(cls, v: Any) -> UnionIntFloat:
|
|
155
|
+
"""Validate and convert value to UnionIntFloat."""
|
|
156
|
+
if isinstance(v, UnionIntFloat):
|
|
157
|
+
return v
|
|
158
|
+
if not isinstance(v, (int, float)): # pragma: no cover
|
|
159
|
+
try:
|
|
160
|
+
int(v)
|
|
161
|
+
return cls(v)
|
|
162
|
+
except (TypeError, ValueError):
|
|
163
|
+
pass
|
|
164
|
+
try:
|
|
165
|
+
float(v)
|
|
166
|
+
return cls(v)
|
|
167
|
+
except (TypeError, ValueError):
|
|
168
|
+
pass
|
|
169
|
+
|
|
170
|
+
msg = f"{v} is not int or float"
|
|
171
|
+
raise TypeError(msg)
|
|
172
|
+
return cls(v)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def chain_as_tuple(*iterables: Iterable[T]) -> tuple[T, ...]:
|
|
176
|
+
"""Chain multiple iterables and return as a tuple."""
|
|
53
177
|
return tuple(chain(*iterables))
|
|
54
178
|
|
|
55
179
|
|
|
180
|
+
def _remove_none_from_union(type_: str, *, use_union_operator: bool) -> str: # noqa: PLR0912
|
|
181
|
+
"""Remove None from a Union type string, handling nested unions."""
|
|
182
|
+
if use_union_operator:
|
|
183
|
+
if " | " not in type_:
|
|
184
|
+
return type_
|
|
185
|
+
separator = "|"
|
|
186
|
+
inner_text = type_
|
|
187
|
+
else:
|
|
188
|
+
if not type_.startswith(UNION_PREFIX):
|
|
189
|
+
return type_
|
|
190
|
+
separator = ","
|
|
191
|
+
inner_text = type_[len(UNION_PREFIX) : -1]
|
|
192
|
+
|
|
193
|
+
parts = []
|
|
194
|
+
inner_count = 0
|
|
195
|
+
current_part = ""
|
|
196
|
+
|
|
197
|
+
# With this variable we count any non-escaped round bracket, whenever we are inside a
|
|
198
|
+
# constraint string expression. Once found a part starting with `constr(`, we increment
|
|
199
|
+
# this counter for each non-escaped opening round bracket and decrement it for each
|
|
200
|
+
# non-escaped closing round bracket.
|
|
201
|
+
in_constr = 0
|
|
202
|
+
|
|
203
|
+
# Parse union parts carefully to handle nested structures
|
|
204
|
+
for char in inner_text:
|
|
205
|
+
current_part += char
|
|
206
|
+
if char == "[" and in_constr == 0:
|
|
207
|
+
inner_count += 1
|
|
208
|
+
elif char == "]" and in_constr == 0:
|
|
209
|
+
inner_count -= 1
|
|
210
|
+
elif char == "(":
|
|
211
|
+
if current_part.strip().startswith("constr(") and current_part[-2] != "\\":
|
|
212
|
+
# non-escaped opening round bracket found inside constraint string expression
|
|
213
|
+
in_constr += 1
|
|
214
|
+
elif char == ")":
|
|
215
|
+
if in_constr > 0 and current_part[-2] != "\\":
|
|
216
|
+
# non-escaped closing round bracket found inside constraint string expression
|
|
217
|
+
in_constr -= 1
|
|
218
|
+
elif char == separator and inner_count == 0 and in_constr == 0:
|
|
219
|
+
part = current_part[:-1].strip()
|
|
220
|
+
if part != NONE:
|
|
221
|
+
# Process nested unions recursively
|
|
222
|
+
# only UNION_PREFIX might be nested but not union_operator
|
|
223
|
+
if not use_union_operator and part.startswith(UNION_PREFIX):
|
|
224
|
+
part = _remove_none_from_union(part, use_union_operator=False)
|
|
225
|
+
parts.append(part)
|
|
226
|
+
current_part = ""
|
|
227
|
+
|
|
228
|
+
part = current_part.strip()
|
|
229
|
+
if current_part and part != NONE:
|
|
230
|
+
# only UNION_PREFIX might be nested but not union_operator
|
|
231
|
+
if not use_union_operator and part.startswith(UNION_PREFIX):
|
|
232
|
+
part = _remove_none_from_union(part, use_union_operator=False)
|
|
233
|
+
parts.append(part)
|
|
234
|
+
|
|
235
|
+
if not parts:
|
|
236
|
+
return NONE
|
|
237
|
+
if len(parts) == 1:
|
|
238
|
+
return parts[0]
|
|
239
|
+
|
|
240
|
+
if use_union_operator:
|
|
241
|
+
return UNION_OPERATOR_DELIMITER.join(parts)
|
|
242
|
+
|
|
243
|
+
return f"{UNION_PREFIX}{UNION_DELIMITER.join(parts)}]"
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
@lru_cache
|
|
247
|
+
def get_optional_type(type_: str, use_union_operator: bool) -> str: # noqa: FBT001
|
|
248
|
+
"""Wrap a type string in Optional or add | None suffix."""
|
|
249
|
+
type_ = _remove_none_from_union(type_, use_union_operator=use_union_operator)
|
|
250
|
+
|
|
251
|
+
if not type_ or type_ == NONE:
|
|
252
|
+
return NONE
|
|
253
|
+
if use_union_operator:
|
|
254
|
+
return f"{type_} | {NONE}"
|
|
255
|
+
return f"{OPTIONAL_PREFIX}{type_}]"
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
def is_data_model_field(obj: object) -> TypeIs[DataModelFieldBase]:
|
|
259
|
+
"""Check if an object is a DataModelFieldBase instance."""
|
|
260
|
+
from datamodel_code_generator.model.base import DataModelFieldBase # noqa: PLC0415
|
|
261
|
+
|
|
262
|
+
return isinstance(obj, DataModelFieldBase)
|
|
263
|
+
|
|
264
|
+
|
|
56
265
|
@runtime_checkable
|
|
57
266
|
class Modular(Protocol):
|
|
267
|
+
"""Protocol for objects with a module name property."""
|
|
268
|
+
|
|
58
269
|
@property
|
|
59
270
|
def module_name(self) -> str:
|
|
271
|
+
"""Return the module name."""
|
|
272
|
+
raise NotImplementedError
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
@runtime_checkable
|
|
276
|
+
class Nullable(Protocol):
|
|
277
|
+
"""Protocol for objects with a nullable property."""
|
|
278
|
+
|
|
279
|
+
@property
|
|
280
|
+
def nullable(self) -> bool:
|
|
281
|
+
"""Return whether the type is nullable."""
|
|
60
282
|
raise NotImplementedError
|
|
61
283
|
|
|
62
284
|
|
|
63
|
-
class DataType(_BaseModel):
|
|
64
|
-
type
|
|
65
|
-
|
|
66
|
-
|
|
285
|
+
class DataType(_BaseModel):
|
|
286
|
+
"""Represents a type in generated code with imports and references."""
|
|
287
|
+
|
|
288
|
+
if PYDANTIC_V2:
|
|
289
|
+
# TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
|
|
290
|
+
# Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
|
|
291
|
+
model_config = ConfigDict( # pyright: ignore[reportAssignmentType]
|
|
292
|
+
extra="forbid",
|
|
293
|
+
revalidate_instances="never",
|
|
294
|
+
)
|
|
295
|
+
else:
|
|
296
|
+
if not TYPE_CHECKING:
|
|
297
|
+
|
|
298
|
+
@classmethod
|
|
299
|
+
def model_rebuild(
|
|
300
|
+
cls,
|
|
301
|
+
*,
|
|
302
|
+
_types_namespace: dict[str, type] | None = None,
|
|
303
|
+
) -> None:
|
|
304
|
+
"""Update forward references for Pydantic v1."""
|
|
305
|
+
localns = _types_namespace or {}
|
|
306
|
+
cls.update_forward_refs(**localns)
|
|
307
|
+
|
|
308
|
+
class Config:
|
|
309
|
+
"""Pydantic v1 model configuration."""
|
|
310
|
+
|
|
311
|
+
extra = "forbid"
|
|
312
|
+
copy_on_model_validation = False if version.parse(pydantic.VERSION) < version.parse("1.9.2") else "none"
|
|
313
|
+
|
|
314
|
+
type: Optional[str] = None # noqa: UP045
|
|
315
|
+
reference: Optional[Reference] = None # noqa: UP045
|
|
316
|
+
data_types: list[DataType] = [] # noqa: RUF012
|
|
67
317
|
is_func: bool = False
|
|
68
|
-
kwargs: Optional[
|
|
69
|
-
import_: Optional[Import] = None
|
|
70
|
-
python_version: PythonVersion =
|
|
318
|
+
kwargs: Optional[dict[str, Any]] = None # noqa: UP045
|
|
319
|
+
import_: Optional[Import] = None # noqa: UP045
|
|
320
|
+
python_version: PythonVersion = PythonVersionMin
|
|
71
321
|
is_optional: bool = False
|
|
72
322
|
is_dict: bool = False
|
|
73
323
|
is_list: bool = False
|
|
324
|
+
is_set: bool = False
|
|
74
325
|
is_custom_type: bool = False
|
|
75
|
-
literals:
|
|
326
|
+
literals: list[Union[StrictBool, StrictInt, StrictStr]] = [] # noqa: RUF012, UP007
|
|
327
|
+
enum_member_literals: list[tuple[str, str]] = [] # noqa: RUF012 # [(EnumClassName, member_name), ...]
|
|
76
328
|
use_standard_collections: bool = False
|
|
77
329
|
use_generic_container: bool = False
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
330
|
+
use_union_operator: bool = False
|
|
331
|
+
alias: Optional[str] = None # noqa: UP045
|
|
332
|
+
parent: Union[DataModelFieldBase, DataType, None] = None # noqa: UP007
|
|
333
|
+
children: list[DataType] = [] # noqa: RUF012
|
|
81
334
|
strict: bool = False
|
|
82
|
-
dict_key: Optional[
|
|
335
|
+
dict_key: Optional[DataType] = None # noqa: UP045
|
|
336
|
+
treat_dot_as_module: bool = False
|
|
337
|
+
use_serialize_as_any: bool = False
|
|
83
338
|
|
|
84
|
-
_exclude_fields: ClassVar[
|
|
85
|
-
_pass_fields: ClassVar[
|
|
339
|
+
_exclude_fields: ClassVar[set[str]] = {"parent", "children"}
|
|
340
|
+
_pass_fields: ClassVar[set[str]] = {"parent", "children", "data_types", "reference"}
|
|
86
341
|
|
|
87
342
|
@classmethod
|
|
88
|
-
def from_import(
|
|
89
|
-
cls:
|
|
343
|
+
def from_import( # noqa: PLR0913
|
|
344
|
+
cls: builtins.type[DataTypeT],
|
|
90
345
|
import_: Import,
|
|
91
346
|
*,
|
|
92
347
|
is_optional: bool = False,
|
|
93
348
|
is_dict: bool = False,
|
|
94
349
|
is_list: bool = False,
|
|
350
|
+
is_set: bool = False,
|
|
95
351
|
is_custom_type: bool = False,
|
|
96
352
|
strict: bool = False,
|
|
97
|
-
kwargs:
|
|
98
|
-
) ->
|
|
353
|
+
kwargs: dict[str, Any] | None = None,
|
|
354
|
+
) -> DataTypeT:
|
|
355
|
+
"""Create a DataType from an Import object."""
|
|
99
356
|
return cls(
|
|
100
357
|
type=import_.import_,
|
|
101
358
|
import_=import_,
|
|
102
359
|
is_optional=is_optional,
|
|
103
360
|
is_dict=is_dict,
|
|
104
361
|
is_list=is_list,
|
|
105
|
-
|
|
362
|
+
is_set=is_set,
|
|
363
|
+
is_func=bool(kwargs),
|
|
106
364
|
is_custom_type=is_custom_type,
|
|
107
365
|
strict=strict,
|
|
108
366
|
kwargs=kwargs,
|
|
109
367
|
)
|
|
110
368
|
|
|
111
369
|
@property
|
|
112
|
-
def unresolved_types(self) ->
|
|
370
|
+
def unresolved_types(self) -> frozenset[str]:
|
|
371
|
+
"""Return set of unresolved type reference paths."""
|
|
113
372
|
return frozenset(
|
|
114
|
-
{
|
|
115
|
-
t.reference.path
|
|
116
|
-
for data_types in self.data_types
|
|
117
|
-
for t in data_types.all_data_types
|
|
118
|
-
if t.reference
|
|
119
|
-
}
|
|
373
|
+
{t.reference.path for data_types in self.data_types for t in data_types.all_data_types if t.reference}
|
|
120
374
|
| ({self.reference.path} if self.reference else set())
|
|
121
375
|
)
|
|
122
376
|
|
|
123
|
-
def replace_reference(self, reference: Reference) -> None:
|
|
377
|
+
def replace_reference(self, reference: Reference | None) -> None:
|
|
378
|
+
"""Replace this DataType's reference with a new one."""
|
|
124
379
|
if not self.reference: # pragma: no cover
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
self.reference.children.remove(self)
|
|
380
|
+
msg = f"`{self.__class__.__name__}.replace_reference()` can't be called when `reference` field is empty."
|
|
381
|
+
raise Exception(msg) # noqa: TRY002
|
|
382
|
+
self_id = id(self)
|
|
383
|
+
self.reference.children = [c for c in self.reference.children if id(c) != self_id]
|
|
131
384
|
self.reference = reference
|
|
132
|
-
reference
|
|
385
|
+
if reference:
|
|
386
|
+
reference.children.append(self)
|
|
387
|
+
|
|
388
|
+
def remove_reference(self) -> None:
|
|
389
|
+
"""Remove the reference from this DataType."""
|
|
390
|
+
self.replace_reference(None)
|
|
391
|
+
|
|
392
|
+
def swap_with(self, new_data_type: DataType) -> None:
|
|
393
|
+
"""Detach self and attach new_data_type to the same parent.
|
|
394
|
+
|
|
395
|
+
Replaces this DataType with new_data_type in the parent container.
|
|
396
|
+
Works with both field parents and nested DataType parents.
|
|
397
|
+
"""
|
|
398
|
+
parent = self.parent
|
|
399
|
+
self.parent = None
|
|
400
|
+
if parent is not None: # pragma: no cover
|
|
401
|
+
new_data_type.parent = parent
|
|
402
|
+
if is_data_model_field(parent):
|
|
403
|
+
parent.data_type = new_data_type
|
|
404
|
+
elif isinstance(parent, DataType): # pragma: no cover
|
|
405
|
+
parent.data_types = [new_data_type if d is self else d for d in parent.data_types]
|
|
133
406
|
|
|
134
407
|
@property
|
|
135
|
-
def module_name(self) ->
|
|
408
|
+
def module_name(self) -> str | None:
|
|
409
|
+
"""Return the module name from the reference source."""
|
|
136
410
|
if self.reference and isinstance(self.reference.source, Modular):
|
|
137
411
|
return self.reference.source.module_name
|
|
138
412
|
return None # pragma: no cover
|
|
139
413
|
|
|
140
414
|
@property
|
|
141
415
|
def full_name(self) -> str:
|
|
416
|
+
"""Return the fully qualified name including module."""
|
|
142
417
|
module_name = self.module_name
|
|
143
418
|
if module_name:
|
|
144
|
-
return f
|
|
145
|
-
return self.reference.short_name
|
|
419
|
+
return f"{module_name}.{self.reference.short_name if self.reference else ''}"
|
|
420
|
+
return self.reference.short_name if self.reference else ""
|
|
146
421
|
|
|
147
422
|
@property
|
|
148
|
-
def all_data_types(self) -> Iterator[
|
|
423
|
+
def all_data_types(self) -> Iterator[DataType]:
|
|
424
|
+
"""Recursively yield all nested DataTypes including self."""
|
|
149
425
|
for data_type in self.data_types:
|
|
150
426
|
yield from data_type.all_data_types
|
|
151
427
|
yield self
|
|
152
428
|
|
|
429
|
+
def find_source(self, source_type: type[SourceT]) -> SourceT | None:
|
|
430
|
+
"""Find the first reference source matching the given type from all nested data types."""
|
|
431
|
+
for data_type in self.all_data_types: # pragma: no branch
|
|
432
|
+
if not data_type.reference: # pragma: no cover
|
|
433
|
+
continue
|
|
434
|
+
source = data_type.reference.source
|
|
435
|
+
if isinstance(source, source_type): # pragma: no cover
|
|
436
|
+
return source
|
|
437
|
+
return None # pragma: no cover
|
|
438
|
+
|
|
153
439
|
@property
|
|
154
440
|
def all_imports(self) -> Iterator[Import]:
|
|
441
|
+
"""Recursively yield all imports from nested DataTypes and self."""
|
|
155
442
|
for data_type in self.data_types:
|
|
156
443
|
yield from data_type.all_imports
|
|
157
444
|
yield from self.imports
|
|
158
445
|
|
|
159
446
|
@property
|
|
160
447
|
def imports(self) -> Iterator[Import]:
|
|
448
|
+
"""Yield imports required by this DataType."""
|
|
449
|
+
# Add base import if exists
|
|
161
450
|
if self.import_:
|
|
162
451
|
yield self.import_
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
452
|
+
|
|
453
|
+
# Define required imports based on type features and conditions
|
|
454
|
+
imports: tuple[tuple[bool, Import], ...] = (
|
|
455
|
+
(self.is_optional and not self.use_union_operator, IMPORT_OPTIONAL),
|
|
456
|
+
(len(self.data_types) > 1 and not self.use_union_operator, IMPORT_UNION),
|
|
457
|
+
(bool(self.literals) or bool(self.enum_member_literals), IMPORT_LITERAL),
|
|
166
458
|
)
|
|
167
|
-
if any(self.literals):
|
|
168
|
-
import_literal = (
|
|
169
|
-
IMPORT_LITERAL
|
|
170
|
-
if self.python_version.has_literal_type
|
|
171
|
-
else IMPORT_LITERAL_BACKPORT
|
|
172
|
-
)
|
|
173
|
-
imports = (
|
|
174
|
-
*imports,
|
|
175
|
-
(any(self.literals), import_literal),
|
|
176
|
-
)
|
|
177
459
|
|
|
178
460
|
if self.use_generic_container:
|
|
179
461
|
if self.use_standard_collections:
|
|
180
462
|
imports = (
|
|
181
463
|
*imports,
|
|
182
464
|
(self.is_list, IMPORT_ABC_SEQUENCE),
|
|
465
|
+
(self.is_set, IMPORT_ABC_SET),
|
|
183
466
|
(self.is_dict, IMPORT_ABC_MAPPING),
|
|
184
467
|
)
|
|
185
468
|
else:
|
|
186
469
|
imports = (
|
|
187
470
|
*imports,
|
|
188
471
|
(self.is_list, IMPORT_SEQUENCE),
|
|
472
|
+
(self.is_set, IMPORT_FROZEN_SET),
|
|
189
473
|
(self.is_dict, IMPORT_MAPPING),
|
|
190
474
|
)
|
|
191
475
|
elif not self.use_standard_collections:
|
|
192
476
|
imports = (
|
|
193
477
|
*imports,
|
|
194
478
|
(self.is_list, IMPORT_LIST),
|
|
479
|
+
(self.is_set, IMPORT_SET),
|
|
195
480
|
(self.is_dict, IMPORT_DICT),
|
|
196
481
|
)
|
|
482
|
+
|
|
483
|
+
# Yield imports based on conditions
|
|
197
484
|
for field, import_ in imports:
|
|
198
485
|
if field and import_ != self.import_:
|
|
199
486
|
yield import_
|
|
200
487
|
|
|
488
|
+
# Propagate imports from any dict_key type
|
|
201
489
|
if self.dict_key:
|
|
202
490
|
yield from self.dict_key.imports
|
|
203
491
|
|
|
204
492
|
def __init__(self, **values: Any) -> None:
|
|
205
|
-
|
|
493
|
+
"""Initialize DataType with validation and reference setup."""
|
|
494
|
+
if not TYPE_CHECKING:
|
|
495
|
+
super().__init__(**values)
|
|
206
496
|
|
|
207
497
|
for type_ in self.data_types:
|
|
208
|
-
if type_.type ==
|
|
209
|
-
if any(
|
|
210
|
-
t for t in self.data_types if t.type != 'Any'
|
|
211
|
-
): # pragma: no cover
|
|
498
|
+
if type_.type == ANY and type_.is_optional:
|
|
499
|
+
if any(t for t in self.data_types if t.type != ANY): # pragma: no cover
|
|
212
500
|
self.is_optional = True
|
|
213
|
-
self.data_types = [
|
|
214
|
-
|
|
215
|
-
for t in self.data_types
|
|
216
|
-
if not (t.type == 'Any' and t.is_optional)
|
|
217
|
-
]
|
|
218
|
-
break
|
|
501
|
+
self.data_types = [t for t in self.data_types if not (t.type == ANY and t.is_optional)]
|
|
502
|
+
break # pragma: no cover
|
|
219
503
|
|
|
220
504
|
for data_type in self.data_types:
|
|
221
505
|
if data_type.reference or data_type.data_types:
|
|
@@ -224,63 +508,120 @@ class DataType(_BaseModel): # type: ignore
|
|
|
224
508
|
if self.reference:
|
|
225
509
|
self.reference.children.append(self)
|
|
226
510
|
|
|
511
|
+
def _get_wrapped_reference_type_hint(self, type_: str) -> str: # noqa: PLR6301
|
|
512
|
+
"""Wrap reference type name if needed (override in subclasses, e.g., for SerializeAsAny).
|
|
513
|
+
|
|
514
|
+
Args:
|
|
515
|
+
type_: The reference type name (e.g., "User")
|
|
516
|
+
|
|
517
|
+
Returns:
|
|
518
|
+
The potentially wrapped type name
|
|
519
|
+
"""
|
|
520
|
+
return type_
|
|
521
|
+
|
|
227
522
|
@property
|
|
228
|
-
def type_hint(self) -> str:
|
|
229
|
-
|
|
523
|
+
def type_hint(self) -> str: # noqa: PLR0912, PLR0915
|
|
524
|
+
"""Generate the Python type hint string for this DataType."""
|
|
525
|
+
type_: str | None = self.alias or self.type
|
|
230
526
|
if not type_:
|
|
231
|
-
if
|
|
232
|
-
|
|
527
|
+
if self.is_union:
|
|
528
|
+
data_types: list[str] = []
|
|
529
|
+
for data_type in self.data_types:
|
|
530
|
+
data_type_type = data_type.type_hint
|
|
531
|
+
if not data_type_type or data_type_type in data_types:
|
|
532
|
+
continue
|
|
533
|
+
|
|
534
|
+
if data_type_type == NONE:
|
|
535
|
+
self.is_optional = True
|
|
536
|
+
continue
|
|
537
|
+
|
|
538
|
+
non_optional_data_type_type = _remove_none_from_union(
|
|
539
|
+
data_type_type, use_union_operator=self.use_union_operator
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
if non_optional_data_type_type != data_type_type:
|
|
543
|
+
self.is_optional = True
|
|
544
|
+
|
|
545
|
+
data_types.append(non_optional_data_type_type)
|
|
546
|
+
if not data_types:
|
|
547
|
+
type_ = ANY
|
|
548
|
+
self.import_ = self.import_ or IMPORT_ANY
|
|
549
|
+
elif len(data_types) == 1:
|
|
550
|
+
type_ = data_types[0]
|
|
551
|
+
elif self.use_union_operator:
|
|
552
|
+
type_ = UNION_OPERATOR_DELIMITER.join(data_types)
|
|
553
|
+
else:
|
|
554
|
+
type_ = f"{UNION_PREFIX}{UNION_DELIMITER.join(data_types)}]"
|
|
233
555
|
elif len(self.data_types) == 1:
|
|
234
556
|
type_ = self.data_types[0].type_hint
|
|
557
|
+
elif self.enum_member_literals:
|
|
558
|
+
parts = [f"{enum_class}.{member}" for enum_class, member in self.enum_member_literals]
|
|
559
|
+
type_ = f"{LITERAL}[{', '.join(parts)}]"
|
|
235
560
|
elif self.literals:
|
|
236
|
-
type_ = (
|
|
237
|
-
|
|
238
|
-
|
|
561
|
+
type_ = f"{LITERAL}[{', '.join(repr(literal) for literal in self.literals)}]"
|
|
562
|
+
elif self.reference:
|
|
563
|
+
type_ = self.reference.short_name
|
|
564
|
+
type_ = self._get_wrapped_reference_type_hint(type_)
|
|
239
565
|
else:
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
if self.reference and self.python_version == PythonVersion.PY_36:
|
|
247
|
-
type_ = f"'{type_}'"
|
|
566
|
+
# TODO support strict Any
|
|
567
|
+
type_ = ""
|
|
568
|
+
if self.reference:
|
|
569
|
+
source = self.reference.source
|
|
570
|
+
if isinstance(source, Nullable) and source.nullable:
|
|
571
|
+
self.is_optional = True
|
|
248
572
|
if self.is_list:
|
|
249
573
|
if self.use_generic_container:
|
|
250
|
-
list_ =
|
|
574
|
+
list_ = SEQUENCE
|
|
575
|
+
elif self.use_standard_collections:
|
|
576
|
+
list_ = STANDARD_LIST
|
|
577
|
+
else:
|
|
578
|
+
list_ = LIST
|
|
579
|
+
type_ = f"{list_}[{type_}]" if type_ else list_
|
|
580
|
+
elif self.is_set:
|
|
581
|
+
if self.use_generic_container:
|
|
582
|
+
set_ = FROZEN_SET
|
|
251
583
|
elif self.use_standard_collections:
|
|
252
|
-
|
|
584
|
+
set_ = STANDARD_SET
|
|
253
585
|
else:
|
|
254
|
-
|
|
255
|
-
type_ = f
|
|
586
|
+
set_ = SET
|
|
587
|
+
type_ = f"{set_}[{type_}]" if type_ else set_
|
|
256
588
|
elif self.is_dict:
|
|
257
589
|
if self.use_generic_container:
|
|
258
|
-
dict_ =
|
|
590
|
+
dict_ = MAPPING
|
|
259
591
|
elif self.use_standard_collections:
|
|
260
|
-
dict_ =
|
|
592
|
+
dict_ = STANDARD_DICT
|
|
261
593
|
else:
|
|
262
|
-
dict_ =
|
|
594
|
+
dict_ = DICT
|
|
263
595
|
if self.dict_key or type_:
|
|
264
|
-
key = self.dict_key.type_hint if self.dict_key else
|
|
265
|
-
type_ = f
|
|
596
|
+
key = self.dict_key.type_hint if self.dict_key else STR
|
|
597
|
+
type_ = f"{dict_}[{key}, {type_ or ANY}]"
|
|
266
598
|
else: # pragma: no cover
|
|
267
599
|
type_ = dict_
|
|
268
|
-
if self.is_optional and type_ !=
|
|
269
|
-
type_
|
|
270
|
-
|
|
600
|
+
if self.is_optional and type_ != ANY:
|
|
601
|
+
return get_optional_type(type_, self.use_union_operator)
|
|
602
|
+
if self.is_func:
|
|
271
603
|
if self.kwargs:
|
|
272
|
-
kwargs: str =
|
|
273
|
-
return f
|
|
274
|
-
return f
|
|
604
|
+
kwargs: str = ", ".join(f"{k}={v}" for k, v in self.kwargs.items())
|
|
605
|
+
return f"{type_}({kwargs})"
|
|
606
|
+
return f"{type_}()"
|
|
275
607
|
return type_
|
|
276
608
|
|
|
609
|
+
@property
|
|
610
|
+
def is_union(self) -> bool:
|
|
611
|
+
"""Return whether this DataType represents a union of multiple types."""
|
|
612
|
+
return len(self.data_types) > 1
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
DataTypeT = TypeVar("DataTypeT", bound=DataType)
|
|
277
616
|
|
|
278
|
-
DataType.update_forward_refs()
|
|
279
617
|
|
|
280
|
-
|
|
618
|
+
class EmptyDataType(DataType):
|
|
619
|
+
"""A DataType placeholder for empty or unresolved types."""
|
|
281
620
|
|
|
282
621
|
|
|
283
622
|
class Types(Enum):
|
|
623
|
+
"""Standard type identifiers for schema type mapping."""
|
|
624
|
+
|
|
284
625
|
integer = auto()
|
|
285
626
|
int32 = auto()
|
|
286
627
|
int64 = auto()
|
|
@@ -294,7 +635,9 @@ class Types(Enum):
|
|
|
294
635
|
binary = auto()
|
|
295
636
|
date = auto()
|
|
296
637
|
date_time = auto()
|
|
638
|
+
timedelta = auto()
|
|
297
639
|
password = auto()
|
|
640
|
+
path = auto()
|
|
298
641
|
email = auto()
|
|
299
642
|
uuid = auto()
|
|
300
643
|
uuid1 = auto()
|
|
@@ -305,7 +648,9 @@ class Types(Enum):
|
|
|
305
648
|
uri = auto()
|
|
306
649
|
hostname = auto()
|
|
307
650
|
ipv4 = auto()
|
|
651
|
+
ipv4_network = auto()
|
|
308
652
|
ipv6 = auto()
|
|
653
|
+
ipv6_network = auto()
|
|
309
654
|
boolean = auto()
|
|
310
655
|
object = auto()
|
|
311
656
|
null = auto()
|
|
@@ -314,44 +659,75 @@ class Types(Enum):
|
|
|
314
659
|
|
|
315
660
|
|
|
316
661
|
class DataTypeManager(ABC):
|
|
317
|
-
|
|
662
|
+
"""Abstract base class for managing type mappings in code generation.
|
|
663
|
+
|
|
664
|
+
Subclasses implement get_data_type() to map schema types to DataType objects.
|
|
665
|
+
"""
|
|
666
|
+
|
|
667
|
+
def __init__( # noqa: PLR0913, PLR0917
|
|
318
668
|
self,
|
|
319
|
-
python_version: PythonVersion =
|
|
320
|
-
use_standard_collections: bool = False,
|
|
321
|
-
use_generic_container_types: bool = False,
|
|
322
|
-
strict_types:
|
|
669
|
+
python_version: PythonVersion = PythonVersionMin,
|
|
670
|
+
use_standard_collections: bool = False, # noqa: FBT001, FBT002
|
|
671
|
+
use_generic_container_types: bool = False, # noqa: FBT001, FBT002
|
|
672
|
+
strict_types: Sequence[StrictTypes] | None = None,
|
|
673
|
+
use_non_positive_negative_number_constrained_types: bool = False, # noqa: FBT001, FBT002
|
|
674
|
+
use_decimal_for_multiple_of: bool = False, # noqa: FBT001, FBT002
|
|
675
|
+
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
676
|
+
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
677
|
+
target_datetime_class: DatetimeClassType | None = None,
|
|
678
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
679
|
+
use_serialize_as_any: bool = False, # noqa: FBT001, FBT002
|
|
323
680
|
) -> None:
|
|
681
|
+
"""Initialize DataTypeManager with code generation options."""
|
|
324
682
|
self.python_version = python_version
|
|
325
683
|
self.use_standard_collections: bool = use_standard_collections
|
|
326
684
|
self.use_generic_container_types: bool = use_generic_container_types
|
|
327
685
|
self.strict_types: Sequence[StrictTypes] = strict_types or ()
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
686
|
+
self.use_non_positive_negative_number_constrained_types: bool = (
|
|
687
|
+
use_non_positive_negative_number_constrained_types
|
|
688
|
+
)
|
|
689
|
+
self.use_decimal_for_multiple_of: bool = use_decimal_for_multiple_of
|
|
690
|
+
self.use_union_operator: bool = use_union_operator
|
|
691
|
+
self.use_pendulum: bool = use_pendulum
|
|
692
|
+
self.target_datetime_class: DatetimeClassType | None = target_datetime_class
|
|
693
|
+
self.treat_dot_as_module: bool = treat_dot_as_module
|
|
694
|
+
self.use_serialize_as_any: bool = use_serialize_as_any
|
|
695
|
+
|
|
696
|
+
self.data_type: type[DataType] = create_model(
|
|
697
|
+
"ContextDataType",
|
|
698
|
+
python_version=(PythonVersion, python_version),
|
|
699
|
+
use_standard_collections=(bool, use_standard_collections),
|
|
700
|
+
use_generic_container=(bool, use_generic_container_types),
|
|
701
|
+
use_union_operator=(bool, use_union_operator),
|
|
702
|
+
treat_dot_as_module=(bool, treat_dot_as_module),
|
|
703
|
+
use_serialize_as_any=(bool, use_serialize_as_any),
|
|
704
|
+
__base__=DataType,
|
|
705
|
+
)
|
|
347
706
|
|
|
348
707
|
@abstractmethod
|
|
349
708
|
def get_data_type(self, types: Types, **kwargs: Any) -> DataType:
|
|
709
|
+
"""Map a Types enum value to a DataType. Must be implemented by subclasses."""
|
|
350
710
|
raise NotImplementedError
|
|
351
711
|
|
|
352
|
-
def get_data_type_from_full_path(
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
712
|
+
def get_data_type_from_full_path(self, full_path: str, is_custom_type: bool) -> DataType: # noqa: FBT001
|
|
713
|
+
"""Create a DataType from a fully qualified Python path."""
|
|
714
|
+
return self.data_type.from_import(Import.from_full_path(full_path), is_custom_type=is_custom_type)
|
|
715
|
+
|
|
716
|
+
def get_data_type_from_value(self, value: Any) -> DataType:
|
|
717
|
+
"""Infer a DataType from a Python value."""
|
|
718
|
+
type_: Types | None = None
|
|
719
|
+
if isinstance(value, str):
|
|
720
|
+
type_ = Types.string
|
|
721
|
+
elif isinstance(value, bool):
|
|
722
|
+
type_ = Types.boolean
|
|
723
|
+
elif isinstance(value, int):
|
|
724
|
+
type_ = Types.integer
|
|
725
|
+
elif isinstance(value, float):
|
|
726
|
+
type_ = Types.float
|
|
727
|
+
elif isinstance(value, dict):
|
|
728
|
+
return self.data_type.from_import(IMPORT_DICT)
|
|
729
|
+
elif isinstance(value, list):
|
|
730
|
+
return self.data_type.from_import(IMPORT_LIST)
|
|
731
|
+
else:
|
|
732
|
+
type_ = Types.any
|
|
733
|
+
return self.get_data_type(type_)
|