datamodel-code-generator 0.11.12__py3-none-any.whl → 0.45.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datamodel_code_generator/__init__.py +654 -185
- datamodel_code_generator/__main__.py +872 -388
- datamodel_code_generator/arguments.py +798 -0
- datamodel_code_generator/cli_options.py +295 -0
- datamodel_code_generator/format.py +292 -54
- datamodel_code_generator/http.py +85 -10
- datamodel_code_generator/imports.py +152 -43
- datamodel_code_generator/model/__init__.py +138 -1
- datamodel_code_generator/model/base.py +531 -120
- datamodel_code_generator/model/dataclass.py +211 -0
- datamodel_code_generator/model/enum.py +133 -12
- datamodel_code_generator/model/imports.py +22 -0
- datamodel_code_generator/model/msgspec.py +462 -0
- datamodel_code_generator/model/pydantic/__init__.py +30 -25
- datamodel_code_generator/model/pydantic/base_model.py +304 -100
- datamodel_code_generator/model/pydantic/custom_root_type.py +11 -2
- datamodel_code_generator/model/pydantic/dataclass.py +15 -4
- datamodel_code_generator/model/pydantic/imports.py +40 -27
- datamodel_code_generator/model/pydantic/types.py +188 -96
- datamodel_code_generator/model/pydantic_v2/__init__.py +51 -0
- datamodel_code_generator/model/pydantic_v2/base_model.py +268 -0
- datamodel_code_generator/model/pydantic_v2/imports.py +15 -0
- datamodel_code_generator/model/pydantic_v2/root_model.py +35 -0
- datamodel_code_generator/model/pydantic_v2/types.py +143 -0
- datamodel_code_generator/model/scalar.py +124 -0
- datamodel_code_generator/model/template/Enum.jinja2 +15 -2
- datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2 +6 -0
- datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2 +6 -0
- datamodel_code_generator/model/template/ScalarTypeStatement.jinja2 +6 -0
- datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2 +20 -0
- datamodel_code_generator/model/template/TypeAliasType.jinja2 +20 -0
- datamodel_code_generator/model/template/TypeStatement.jinja2 +20 -0
- datamodel_code_generator/model/template/TypedDict.jinja2 +5 -0
- datamodel_code_generator/model/template/TypedDictClass.jinja2 +25 -0
- datamodel_code_generator/model/template/TypedDictFunction.jinja2 +24 -0
- datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2 +10 -0
- datamodel_code_generator/model/template/UnionTypeAliasType.jinja2 +10 -0
- datamodel_code_generator/model/template/UnionTypeStatement.jinja2 +10 -0
- datamodel_code_generator/model/template/dataclass.jinja2 +50 -0
- datamodel_code_generator/model/template/msgspec.jinja2 +55 -0
- datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +17 -4
- datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +12 -4
- datamodel_code_generator/model/template/pydantic/Config.jinja2 +1 -1
- datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +15 -2
- datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +57 -0
- datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +5 -0
- datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +48 -0
- datamodel_code_generator/model/type_alias.py +70 -0
- datamodel_code_generator/model/typed_dict.py +161 -0
- datamodel_code_generator/model/types.py +106 -0
- datamodel_code_generator/model/union.py +105 -0
- datamodel_code_generator/parser/__init__.py +30 -12
- datamodel_code_generator/parser/_graph.py +67 -0
- datamodel_code_generator/parser/_scc.py +171 -0
- datamodel_code_generator/parser/base.py +2426 -380
- datamodel_code_generator/parser/graphql.py +652 -0
- datamodel_code_generator/parser/jsonschema.py +2518 -647
- datamodel_code_generator/parser/openapi.py +631 -222
- datamodel_code_generator/py.typed +0 -0
- datamodel_code_generator/pydantic_patch.py +28 -0
- datamodel_code_generator/reference.py +672 -290
- datamodel_code_generator/types.py +521 -145
- datamodel_code_generator/util.py +155 -0
- datamodel_code_generator/watch.py +65 -0
- datamodel_code_generator-0.45.0.dist-info/METADATA +301 -0
- datamodel_code_generator-0.45.0.dist-info/RECORD +69 -0
- {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info}/WHEEL +1 -1
- datamodel_code_generator-0.45.0.dist-info/entry_points.txt +2 -0
- datamodel_code_generator/version.py +0 -1
- datamodel_code_generator-0.11.12.dist-info/METADATA +0 -440
- datamodel_code_generator-0.11.12.dist-info/RECORD +0 -31
- datamodel_code_generator-0.11.12.dist-info/entry_points.txt +0 -3
- {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -1,79 +1,188 @@
|
|
|
1
|
+
"""Import management system for generated code.
|
|
2
|
+
|
|
3
|
+
Provides Import and Imports classes to track, organize, and render
|
|
4
|
+
Python import statements for generated data models.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
1
9
|
from collections import defaultdict
|
|
2
10
|
from functools import lru_cache
|
|
3
|
-
from
|
|
11
|
+
from itertools import starmap
|
|
12
|
+
from typing import TYPE_CHECKING, Optional
|
|
13
|
+
|
|
14
|
+
from datamodel_code_generator.util import BaseModel
|
|
4
15
|
|
|
5
|
-
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from collections.abc import Iterable
|
|
6
18
|
|
|
7
19
|
|
|
8
20
|
class Import(BaseModel):
|
|
9
|
-
|
|
21
|
+
"""Represents a single Python import statement."""
|
|
22
|
+
|
|
23
|
+
from_: Optional[str] = None # noqa: UP045
|
|
10
24
|
import_: str
|
|
11
|
-
alias: Optional[str]
|
|
25
|
+
alias: Optional[str] = None # noqa: UP045
|
|
26
|
+
reference_path: Optional[str] = None # noqa: UP045
|
|
27
|
+
|
|
28
|
+
@property
|
|
29
|
+
def is_future(self) -> bool:
|
|
30
|
+
"""Check if this is a __future__ import."""
|
|
31
|
+
return self.from_ == "__future__"
|
|
12
32
|
|
|
13
33
|
@classmethod
|
|
14
|
-
@lru_cache
|
|
15
|
-
def from_full_path(cls, class_path: str) ->
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
34
|
+
@lru_cache
|
|
35
|
+
def from_full_path(cls, class_path: str) -> Import:
|
|
36
|
+
"""Create an Import from a fully qualified path (e.g., 'typing.Optional')."""
|
|
37
|
+
split_class_path: list[str] = class_path.split(".")
|
|
38
|
+
return Import(from_=".".join(split_class_path[:-1]) or None, import_=split_class_path[-1])
|
|
39
|
+
|
|
20
40
|
|
|
41
|
+
class Imports(defaultdict[Optional[str], set[str]]):
|
|
42
|
+
"""Collection of imports with reference counting and alias support."""
|
|
21
43
|
|
|
22
|
-
class Imports(DefaultDict[Optional[str], Set[str]]):
|
|
23
44
|
def __str__(self) -> str:
|
|
45
|
+
"""Return formatted import statements."""
|
|
24
46
|
return self.dump()
|
|
25
47
|
|
|
26
|
-
def __init__(self) -> None:
|
|
48
|
+
def __init__(self, use_exact: bool = False) -> None: # noqa: FBT001, FBT002
|
|
49
|
+
"""Initialize empty import collection."""
|
|
27
50
|
super().__init__(set)
|
|
28
|
-
self.alias:
|
|
51
|
+
self.alias: defaultdict[str | None, dict[str, str]] = defaultdict(dict)
|
|
52
|
+
self.counter: dict[tuple[str | None, str], int] = defaultdict(int)
|
|
53
|
+
self.reference_paths: dict[str, Import] = {}
|
|
54
|
+
self.use_exact: bool = use_exact
|
|
55
|
+
self._exports: set[str] | None = None
|
|
29
56
|
|
|
30
|
-
def _set_alias(self, from_:
|
|
57
|
+
def _set_alias(self, from_: str | None, imports: set[str]) -> list[str]:
|
|
58
|
+
"""Apply aliases to imports and return sorted list."""
|
|
31
59
|
return [
|
|
32
|
-
f
|
|
33
|
-
if i in self.alias[from_] and i != self.alias[from_][i]
|
|
34
|
-
else i
|
|
60
|
+
f"{i} as {self.alias[from_][i]}" if i in self.alias[from_] and i != self.alias[from_][i] else i
|
|
35
61
|
for i in sorted(imports)
|
|
36
62
|
]
|
|
37
63
|
|
|
38
|
-
def create_line(self, from_:
|
|
64
|
+
def create_line(self, from_: str | None, imports: set[str]) -> str:
|
|
65
|
+
"""Create a single import line from module and names."""
|
|
39
66
|
if from_:
|
|
40
67
|
return f"from {from_} import {', '.join(self._set_alias(from_, imports))}"
|
|
41
|
-
return
|
|
68
|
+
return "\n".join(f"import {i}" for i in self._set_alias(from_, imports))
|
|
42
69
|
|
|
43
70
|
def dump(self) -> str:
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
)
|
|
71
|
+
"""Render all imports as a string."""
|
|
72
|
+
return "\n".join(starmap(self.create_line, self.items()))
|
|
47
73
|
|
|
48
|
-
def append(self, imports:
|
|
74
|
+
def append(self, imports: Import | Iterable[Import] | None) -> None:
|
|
75
|
+
"""Add one or more imports to the collection."""
|
|
49
76
|
if imports:
|
|
50
77
|
if isinstance(imports, Import):
|
|
51
78
|
imports = [imports]
|
|
52
79
|
for import_ in imports:
|
|
53
|
-
if
|
|
80
|
+
if import_.reference_path:
|
|
81
|
+
self.reference_paths[import_.reference_path] = import_
|
|
82
|
+
if "." in import_.import_:
|
|
54
83
|
self[None].add(import_.import_)
|
|
84
|
+
self.counter[None, import_.import_] += 1
|
|
55
85
|
else:
|
|
56
86
|
self[import_.from_].add(import_.import_)
|
|
87
|
+
self.counter[import_.from_, import_.import_] += 1
|
|
57
88
|
if import_.alias:
|
|
58
89
|
self.alias[import_.from_][import_.import_] = import_.alias
|
|
59
90
|
|
|
91
|
+
def remove(self, imports: Import | Iterable[Import]) -> None:
|
|
92
|
+
"""Remove one or more imports from the collection."""
|
|
93
|
+
if isinstance(imports, Import): # pragma: no cover
|
|
94
|
+
imports = [imports]
|
|
95
|
+
for import_ in imports:
|
|
96
|
+
if "." in import_.import_: # pragma: no cover
|
|
97
|
+
self.counter[None, import_.import_] -= 1
|
|
98
|
+
if self.counter[None, import_.import_] == 0: # pragma: no cover
|
|
99
|
+
self[None].remove(import_.import_)
|
|
100
|
+
if not self[None]:
|
|
101
|
+
del self[None]
|
|
102
|
+
else:
|
|
103
|
+
self.counter[import_.from_, import_.import_] -= 1 # pragma: no cover
|
|
104
|
+
if self.counter[import_.from_, import_.import_] == 0: # pragma: no cover
|
|
105
|
+
self[import_.from_].remove(import_.import_)
|
|
106
|
+
if not self[import_.from_]:
|
|
107
|
+
del self[import_.from_]
|
|
108
|
+
if import_.alias: # pragma: no cover
|
|
109
|
+
del self.alias[import_.from_][import_.import_]
|
|
110
|
+
if not self.alias[import_.from_]:
|
|
111
|
+
del self.alias[import_.from_]
|
|
112
|
+
|
|
113
|
+
def remove_referenced_imports(self, reference_path: str) -> None:
|
|
114
|
+
"""Remove imports associated with a reference path."""
|
|
115
|
+
if reference_path in self.reference_paths:
|
|
116
|
+
self.remove(self.reference_paths[reference_path])
|
|
117
|
+
|
|
118
|
+
def extract_future(self) -> Imports:
|
|
119
|
+
"""Extract and remove __future__ imports, returning them as a new Imports."""
|
|
120
|
+
future = Imports(self.use_exact)
|
|
121
|
+
future_key = "__future__"
|
|
122
|
+
if future_key in self:
|
|
123
|
+
future[future_key] = self.pop(future_key)
|
|
124
|
+
for key in list(self.counter.keys()):
|
|
125
|
+
if key[0] == future_key:
|
|
126
|
+
future.counter[key] = self.counter.pop(key)
|
|
127
|
+
if future_key in self.alias:
|
|
128
|
+
future.alias[future_key] = self.alias.pop(future_key)
|
|
129
|
+
return future
|
|
130
|
+
|
|
131
|
+
def add_export(self, name: str) -> None:
|
|
132
|
+
"""Add a name to export without importing it (for local definitions)."""
|
|
133
|
+
if self._exports is None:
|
|
134
|
+
self._exports = set()
|
|
135
|
+
self._exports.add(name)
|
|
136
|
+
|
|
137
|
+
def dump_all(self, *, multiline: bool = False) -> str:
|
|
138
|
+
"""Generate __all__ declaration from imported names and added exports.
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
multiline: If True, format with one name per line
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
Formatted __all__ = [...] string
|
|
145
|
+
"""
|
|
146
|
+
name_set: set[str] = (self._exports or set()).copy()
|
|
147
|
+
for from_, imports in self.items():
|
|
148
|
+
name_set.update(self.alias.get(from_, {}).get(import_) or import_ for import_ in imports)
|
|
149
|
+
name_list = sorted(name_set)
|
|
150
|
+
if multiline:
|
|
151
|
+
items = ",\n ".join(f'"{name}"' for name in name_list)
|
|
152
|
+
return f"__all__ = [\n {items},\n]"
|
|
153
|
+
items = ", ".join(f'"{name}"' for name in name_list)
|
|
154
|
+
return f"__all__ = [{items}]"
|
|
155
|
+
|
|
60
156
|
|
|
61
|
-
IMPORT_ANNOTATED = Import.from_full_path(
|
|
62
|
-
IMPORT_ANY = Import.from_full_path(
|
|
63
|
-
IMPORT_LIST = Import.from_full_path(
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
157
|
+
IMPORT_ANNOTATED = Import.from_full_path("typing.Annotated")
|
|
158
|
+
IMPORT_ANY = Import.from_full_path("typing.Any")
|
|
159
|
+
IMPORT_LIST = Import.from_full_path("typing.List")
|
|
160
|
+
IMPORT_SET = Import.from_full_path("typing.Set")
|
|
161
|
+
IMPORT_UNION = Import.from_full_path("typing.Union")
|
|
162
|
+
IMPORT_OPTIONAL = Import.from_full_path("typing.Optional")
|
|
163
|
+
IMPORT_LITERAL = Import.from_full_path("typing.Literal")
|
|
164
|
+
IMPORT_TYPE_ALIAS = Import.from_full_path("typing.TypeAlias")
|
|
165
|
+
IMPORT_TYPE_ALIAS_BACKPORT = Import.from_full_path("typing_extensions.TypeAlias")
|
|
166
|
+
IMPORT_TYPE_ALIAS_TYPE = Import.from_full_path("typing_extensions.TypeAliasType")
|
|
167
|
+
IMPORT_SEQUENCE = Import.from_full_path("typing.Sequence")
|
|
168
|
+
IMPORT_FROZEN_SET = Import.from_full_path("typing.FrozenSet")
|
|
169
|
+
IMPORT_MAPPING = Import.from_full_path("typing.Mapping")
|
|
170
|
+
IMPORT_ABC_SEQUENCE = Import.from_full_path("collections.abc.Sequence")
|
|
171
|
+
IMPORT_ABC_SET = Import.from_full_path("collections.abc.Set")
|
|
172
|
+
IMPORT_ABC_MAPPING = Import.from_full_path("collections.abc.Mapping")
|
|
173
|
+
IMPORT_ENUM = Import.from_full_path("enum.Enum")
|
|
174
|
+
IMPORT_STR_ENUM = Import.from_full_path("enum.StrEnum")
|
|
175
|
+
IMPORT_INT_ENUM = Import.from_full_path("enum.IntEnum")
|
|
176
|
+
IMPORT_ANNOTATIONS = Import.from_full_path("__future__.annotations")
|
|
177
|
+
IMPORT_DICT = Import.from_full_path("typing.Dict")
|
|
178
|
+
IMPORT_DECIMAL = Import.from_full_path("decimal.Decimal")
|
|
179
|
+
IMPORT_DATE = Import.from_full_path("datetime.date")
|
|
180
|
+
IMPORT_DATETIME = Import.from_full_path("datetime.datetime")
|
|
181
|
+
IMPORT_TIMEDELTA = Import.from_full_path("datetime.timedelta")
|
|
182
|
+
IMPORT_PATH = Import.from_full_path("pathlib.Path")
|
|
183
|
+
IMPORT_TIME = Import.from_full_path("datetime.time")
|
|
184
|
+
IMPORT_UUID = Import.from_full_path("uuid.UUID")
|
|
185
|
+
IMPORT_PENDULUM_DATE = Import.from_full_path("pendulum.Date")
|
|
186
|
+
IMPORT_PENDULUM_DATETIME = Import.from_full_path("pendulum.DateTime")
|
|
187
|
+
IMPORT_PENDULUM_DURATION = Import.from_full_path("pendulum.Duration")
|
|
188
|
+
IMPORT_PENDULUM_TIME = Import.from_full_path("pendulum.Time")
|
|
@@ -1,3 +1,140 @@
|
|
|
1
|
+
"""Model generation module.
|
|
2
|
+
|
|
3
|
+
Provides factory functions and classes for generating different output formats
|
|
4
|
+
(Pydantic, dataclasses, TypedDict, msgspec) based on configuration.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import sys
|
|
10
|
+
from typing import TYPE_CHECKING, Callable, NamedTuple
|
|
11
|
+
|
|
12
|
+
from datamodel_code_generator import PythonVersion
|
|
13
|
+
|
|
1
14
|
from .base import ConstraintsBase, DataModel, DataModelFieldBase
|
|
2
15
|
|
|
3
|
-
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from collections.abc import Iterable
|
|
18
|
+
|
|
19
|
+
from datamodel_code_generator import DataModelType
|
|
20
|
+
from datamodel_code_generator.types import DataTypeManager as DataTypeManagerABC
|
|
21
|
+
|
|
22
|
+
DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(f"{sys.version_info.major}.{sys.version_info.minor}")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class DataModelSet(NamedTuple):
|
|
26
|
+
"""Collection of model types needed for a specific output format."""
|
|
27
|
+
|
|
28
|
+
data_model: type[DataModel]
|
|
29
|
+
root_model: type[DataModel]
|
|
30
|
+
field_model: type[DataModelFieldBase]
|
|
31
|
+
data_type_manager: type[DataTypeManagerABC]
|
|
32
|
+
dump_resolve_reference_action: Callable[[Iterable[str]], str] | None
|
|
33
|
+
scalar_model: type[DataModel]
|
|
34
|
+
union_model: type[DataModel]
|
|
35
|
+
known_third_party: list[str] | None = None
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_data_model_types(
|
|
39
|
+
data_model_type: DataModelType,
|
|
40
|
+
target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
|
|
41
|
+
use_type_alias: bool = False, # noqa: FBT001, FBT002
|
|
42
|
+
) -> DataModelSet:
|
|
43
|
+
"""Get the appropriate model types for the given output format and Python version."""
|
|
44
|
+
from datamodel_code_generator import DataModelType # noqa: PLC0415
|
|
45
|
+
|
|
46
|
+
from . import ( # noqa: PLC0415
|
|
47
|
+
dataclass,
|
|
48
|
+
msgspec,
|
|
49
|
+
pydantic,
|
|
50
|
+
pydantic_v2,
|
|
51
|
+
scalar,
|
|
52
|
+
type_alias,
|
|
53
|
+
typed_dict,
|
|
54
|
+
union,
|
|
55
|
+
)
|
|
56
|
+
from .types import DataTypeManager # noqa: PLC0415
|
|
57
|
+
|
|
58
|
+
# Pydantic v2 requires TypeAliasType; other output types use TypeAlias for better compatibility
|
|
59
|
+
if data_model_type == DataModelType.PydanticV2BaseModel:
|
|
60
|
+
if target_python_version.has_type_statement:
|
|
61
|
+
type_alias_class = type_alias.TypeStatement
|
|
62
|
+
scalar_class = scalar.DataTypeScalarTypeStatement
|
|
63
|
+
union_class = union.DataTypeUnionTypeStatement
|
|
64
|
+
else:
|
|
65
|
+
type_alias_class = type_alias.TypeAliasTypeBackport
|
|
66
|
+
scalar_class = scalar.DataTypeScalarTypeBackport
|
|
67
|
+
union_class = union.DataTypeUnionTypeBackport
|
|
68
|
+
elif target_python_version.has_type_statement:
|
|
69
|
+
type_alias_class = type_alias.TypeStatement
|
|
70
|
+
scalar_class = scalar.DataTypeScalarTypeStatement
|
|
71
|
+
union_class = union.DataTypeUnionTypeStatement
|
|
72
|
+
elif target_python_version.has_type_alias:
|
|
73
|
+
type_alias_class = type_alias.TypeAlias
|
|
74
|
+
scalar_class = scalar.DataTypeScalar
|
|
75
|
+
union_class = union.DataTypeUnion
|
|
76
|
+
else:
|
|
77
|
+
type_alias_class = type_alias.TypeAliasBackport
|
|
78
|
+
scalar_class = scalar.DataTypeScalarBackport
|
|
79
|
+
union_class = union.DataTypeUnionBackport
|
|
80
|
+
|
|
81
|
+
if data_model_type == DataModelType.PydanticBaseModel:
|
|
82
|
+
return DataModelSet(
|
|
83
|
+
data_model=pydantic.BaseModel,
|
|
84
|
+
root_model=type_alias_class if use_type_alias else pydantic.CustomRootType,
|
|
85
|
+
field_model=pydantic.DataModelField,
|
|
86
|
+
data_type_manager=pydantic.DataTypeManager,
|
|
87
|
+
dump_resolve_reference_action=pydantic.dump_resolve_reference_action,
|
|
88
|
+
scalar_model=scalar_class,
|
|
89
|
+
union_model=union_class,
|
|
90
|
+
)
|
|
91
|
+
if data_model_type == DataModelType.PydanticV2BaseModel:
|
|
92
|
+
return DataModelSet(
|
|
93
|
+
data_model=pydantic_v2.BaseModel,
|
|
94
|
+
root_model=type_alias_class if use_type_alias else pydantic_v2.RootModel,
|
|
95
|
+
field_model=pydantic_v2.DataModelField,
|
|
96
|
+
data_type_manager=pydantic_v2.DataTypeManager,
|
|
97
|
+
dump_resolve_reference_action=pydantic_v2.dump_resolve_reference_action,
|
|
98
|
+
scalar_model=scalar_class,
|
|
99
|
+
union_model=union_class,
|
|
100
|
+
)
|
|
101
|
+
if data_model_type == DataModelType.DataclassesDataclass:
|
|
102
|
+
return DataModelSet(
|
|
103
|
+
data_model=dataclass.DataClass,
|
|
104
|
+
root_model=type_alias_class,
|
|
105
|
+
field_model=dataclass.DataModelField,
|
|
106
|
+
data_type_manager=dataclass.DataTypeManager,
|
|
107
|
+
dump_resolve_reference_action=None,
|
|
108
|
+
scalar_model=scalar_class,
|
|
109
|
+
union_model=union_class,
|
|
110
|
+
)
|
|
111
|
+
if data_model_type == DataModelType.TypingTypedDict:
|
|
112
|
+
return DataModelSet(
|
|
113
|
+
data_model=typed_dict.TypedDict,
|
|
114
|
+
root_model=type_alias_class,
|
|
115
|
+
field_model=(
|
|
116
|
+
typed_dict.DataModelField
|
|
117
|
+
if target_python_version.has_typed_dict_non_required
|
|
118
|
+
else typed_dict.DataModelFieldBackport
|
|
119
|
+
),
|
|
120
|
+
data_type_manager=DataTypeManager,
|
|
121
|
+
dump_resolve_reference_action=None,
|
|
122
|
+
scalar_model=scalar_class,
|
|
123
|
+
union_model=union_class,
|
|
124
|
+
)
|
|
125
|
+
if data_model_type == DataModelType.MsgspecStruct:
|
|
126
|
+
return DataModelSet(
|
|
127
|
+
data_model=msgspec.Struct,
|
|
128
|
+
root_model=type_alias_class,
|
|
129
|
+
field_model=msgspec.DataModelField,
|
|
130
|
+
data_type_manager=msgspec.DataTypeManager,
|
|
131
|
+
dump_resolve_reference_action=None,
|
|
132
|
+
known_third_party=["msgspec"],
|
|
133
|
+
scalar_model=scalar_class,
|
|
134
|
+
union_model=union_class,
|
|
135
|
+
)
|
|
136
|
+
msg = f"{data_model_type} is unsupported data model type" # pragma: no cover
|
|
137
|
+
raise ValueError(msg) # pragma: no cover
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
__all__ = ["ConstraintsBase", "DataModel", "DataModelFieldBase"]
|