datamodel-code-generator 0.11.12__py3-none-any.whl → 0.45.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datamodel_code_generator/__init__.py +654 -185
- datamodel_code_generator/__main__.py +872 -388
- datamodel_code_generator/arguments.py +798 -0
- datamodel_code_generator/cli_options.py +295 -0
- datamodel_code_generator/format.py +292 -54
- datamodel_code_generator/http.py +85 -10
- datamodel_code_generator/imports.py +152 -43
- datamodel_code_generator/model/__init__.py +138 -1
- datamodel_code_generator/model/base.py +531 -120
- datamodel_code_generator/model/dataclass.py +211 -0
- datamodel_code_generator/model/enum.py +133 -12
- datamodel_code_generator/model/imports.py +22 -0
- datamodel_code_generator/model/msgspec.py +462 -0
- datamodel_code_generator/model/pydantic/__init__.py +30 -25
- datamodel_code_generator/model/pydantic/base_model.py +304 -100
- datamodel_code_generator/model/pydantic/custom_root_type.py +11 -2
- datamodel_code_generator/model/pydantic/dataclass.py +15 -4
- datamodel_code_generator/model/pydantic/imports.py +40 -27
- datamodel_code_generator/model/pydantic/types.py +188 -96
- datamodel_code_generator/model/pydantic_v2/__init__.py +51 -0
- datamodel_code_generator/model/pydantic_v2/base_model.py +268 -0
- datamodel_code_generator/model/pydantic_v2/imports.py +15 -0
- datamodel_code_generator/model/pydantic_v2/root_model.py +35 -0
- datamodel_code_generator/model/pydantic_v2/types.py +143 -0
- datamodel_code_generator/model/scalar.py +124 -0
- datamodel_code_generator/model/template/Enum.jinja2 +15 -2
- datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2 +6 -0
- datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2 +6 -0
- datamodel_code_generator/model/template/ScalarTypeStatement.jinja2 +6 -0
- datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2 +20 -0
- datamodel_code_generator/model/template/TypeAliasType.jinja2 +20 -0
- datamodel_code_generator/model/template/TypeStatement.jinja2 +20 -0
- datamodel_code_generator/model/template/TypedDict.jinja2 +5 -0
- datamodel_code_generator/model/template/TypedDictClass.jinja2 +25 -0
- datamodel_code_generator/model/template/TypedDictFunction.jinja2 +24 -0
- datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2 +10 -0
- datamodel_code_generator/model/template/UnionTypeAliasType.jinja2 +10 -0
- datamodel_code_generator/model/template/UnionTypeStatement.jinja2 +10 -0
- datamodel_code_generator/model/template/dataclass.jinja2 +50 -0
- datamodel_code_generator/model/template/msgspec.jinja2 +55 -0
- datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +17 -4
- datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +12 -4
- datamodel_code_generator/model/template/pydantic/Config.jinja2 +1 -1
- datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +15 -2
- datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +57 -0
- datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +5 -0
- datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +48 -0
- datamodel_code_generator/model/type_alias.py +70 -0
- datamodel_code_generator/model/typed_dict.py +161 -0
- datamodel_code_generator/model/types.py +106 -0
- datamodel_code_generator/model/union.py +105 -0
- datamodel_code_generator/parser/__init__.py +30 -12
- datamodel_code_generator/parser/_graph.py +67 -0
- datamodel_code_generator/parser/_scc.py +171 -0
- datamodel_code_generator/parser/base.py +2426 -380
- datamodel_code_generator/parser/graphql.py +652 -0
- datamodel_code_generator/parser/jsonschema.py +2518 -647
- datamodel_code_generator/parser/openapi.py +631 -222
- datamodel_code_generator/py.typed +0 -0
- datamodel_code_generator/pydantic_patch.py +28 -0
- datamodel_code_generator/reference.py +672 -290
- datamodel_code_generator/types.py +521 -145
- datamodel_code_generator/util.py +155 -0
- datamodel_code_generator/watch.py +65 -0
- datamodel_code_generator-0.45.0.dist-info/METADATA +301 -0
- datamodel_code_generator-0.45.0.dist-info/RECORD +69 -0
- {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info}/WHEEL +1 -1
- datamodel_code_generator-0.45.0.dist-info/entry_points.txt +2 -0
- datamodel_code_generator/version.py +0 -1
- datamodel_code_generator-0.11.12.dist-info/METADATA +0 -440
- datamodel_code_generator-0.11.12.dist-info/RECORD +0 -31
- datamodel_code_generator-0.11.12.dist-info/entry_points.txt +0 -3
- {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,462 @@
|
|
|
1
|
+
"""msgspec.Struct model generator.
|
|
2
|
+
|
|
3
|
+
Generates Python models using msgspec.Struct for high-performance serialization.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from functools import lru_cache, wraps
|
|
9
|
+
from typing import TYPE_CHECKING, Any, ClassVar, Optional, TypeVar
|
|
10
|
+
|
|
11
|
+
from pydantic import Field
|
|
12
|
+
|
|
13
|
+
from datamodel_code_generator import DatetimeClassType, PythonVersion, PythonVersionMin
|
|
14
|
+
from datamodel_code_generator.imports import (
|
|
15
|
+
IMPORT_DATE,
|
|
16
|
+
IMPORT_DATETIME,
|
|
17
|
+
IMPORT_TIME,
|
|
18
|
+
IMPORT_TIMEDELTA,
|
|
19
|
+
IMPORT_UNION,
|
|
20
|
+
Import,
|
|
21
|
+
)
|
|
22
|
+
from datamodel_code_generator.model import DataModel, DataModelFieldBase
|
|
23
|
+
from datamodel_code_generator.model.base import UNDEFINED
|
|
24
|
+
from datamodel_code_generator.model.imports import (
|
|
25
|
+
IMPORT_MSGSPEC_CONVERT,
|
|
26
|
+
IMPORT_MSGSPEC_FIELD,
|
|
27
|
+
IMPORT_MSGSPEC_META,
|
|
28
|
+
IMPORT_MSGSPEC_UNSET,
|
|
29
|
+
IMPORT_MSGSPEC_UNSETTYPE,
|
|
30
|
+
)
|
|
31
|
+
from datamodel_code_generator.model.pydantic.base_model import (
|
|
32
|
+
Constraints as _Constraints,
|
|
33
|
+
)
|
|
34
|
+
from datamodel_code_generator.model.type_alias import TypeAliasBase
|
|
35
|
+
from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
|
|
36
|
+
from datamodel_code_generator.model.types import type_map_factory
|
|
37
|
+
from datamodel_code_generator.types import (
|
|
38
|
+
NONE,
|
|
39
|
+
OPTIONAL_PREFIX,
|
|
40
|
+
UNION_DELIMITER,
|
|
41
|
+
UNION_OPERATOR_DELIMITER,
|
|
42
|
+
UNION_PREFIX,
|
|
43
|
+
DataType,
|
|
44
|
+
StrictTypes,
|
|
45
|
+
Types,
|
|
46
|
+
_remove_none_from_union,
|
|
47
|
+
chain_as_tuple,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
UNSET_TYPE = "UnsetType"
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class _UNSET:
|
|
54
|
+
def __str__(self) -> str:
|
|
55
|
+
return "UNSET"
|
|
56
|
+
|
|
57
|
+
__repr__ = __str__
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
UNSET = _UNSET()
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
if TYPE_CHECKING:
|
|
64
|
+
from collections import defaultdict
|
|
65
|
+
from collections.abc import Sequence
|
|
66
|
+
from pathlib import Path
|
|
67
|
+
|
|
68
|
+
from datamodel_code_generator.reference import Reference
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _has_field_assignment(field: DataModelFieldBase) -> bool:
|
|
72
|
+
return not (field.required or (field.represented_default == "None" and field.strip_default_none))
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
DataModelFieldBaseT = TypeVar("DataModelFieldBaseT", bound=DataModelFieldBase)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def import_extender(cls: type[DataModelFieldBaseT]) -> type[DataModelFieldBaseT]:
|
|
79
|
+
"""Extend imports property with msgspec-specific imports."""
|
|
80
|
+
original_imports: property = cls.imports
|
|
81
|
+
|
|
82
|
+
@wraps(original_imports.fget) # pyright: ignore[reportArgumentType]
|
|
83
|
+
def new_imports(self: DataModelFieldBaseT) -> tuple[Import, ...]:
|
|
84
|
+
if self.extras.get("is_classvar"):
|
|
85
|
+
return ()
|
|
86
|
+
extra_imports = []
|
|
87
|
+
field = self.field
|
|
88
|
+
# TODO: Improve field detection
|
|
89
|
+
if field and field.startswith("field("):
|
|
90
|
+
extra_imports.append(IMPORT_MSGSPEC_FIELD)
|
|
91
|
+
if self.field and "lambda: convert" in self.field:
|
|
92
|
+
extra_imports.append(IMPORT_MSGSPEC_CONVERT)
|
|
93
|
+
if isinstance(self, DataModelField) and self.needs_meta_import:
|
|
94
|
+
extra_imports.append(IMPORT_MSGSPEC_META)
|
|
95
|
+
if not self.required and not self.nullable:
|
|
96
|
+
extra_imports.append(IMPORT_MSGSPEC_UNSETTYPE)
|
|
97
|
+
if not self.data_type.use_union_operator:
|
|
98
|
+
extra_imports.append(IMPORT_UNION)
|
|
99
|
+
if self.default is None or self.default is UNDEFINED:
|
|
100
|
+
extra_imports.append(IMPORT_MSGSPEC_UNSET)
|
|
101
|
+
return chain_as_tuple(original_imports.fget(self), extra_imports) # pyright: ignore[reportOptionalCall]
|
|
102
|
+
|
|
103
|
+
cls.imports = property(new_imports) # pyright: ignore[reportAttributeAccessIssue]
|
|
104
|
+
return cls
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class Struct(DataModel):
|
|
108
|
+
"""DataModel implementation for msgspec.Struct."""
|
|
109
|
+
|
|
110
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = "msgspec.jinja2"
|
|
111
|
+
BASE_CLASS: ClassVar[str] = "msgspec.Struct"
|
|
112
|
+
DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = ()
|
|
113
|
+
|
|
114
|
+
def __init__( # noqa: PLR0913
|
|
115
|
+
self,
|
|
116
|
+
*,
|
|
117
|
+
reference: Reference,
|
|
118
|
+
fields: list[DataModelFieldBase],
|
|
119
|
+
decorators: list[str] | None = None,
|
|
120
|
+
base_classes: list[Reference] | None = None,
|
|
121
|
+
custom_base_class: str | None = None,
|
|
122
|
+
custom_template_dir: Path | None = None,
|
|
123
|
+
extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
|
|
124
|
+
methods: list[str] | None = None,
|
|
125
|
+
path: Path | None = None,
|
|
126
|
+
description: str | None = None,
|
|
127
|
+
default: Any = UNDEFINED,
|
|
128
|
+
nullable: bool = False,
|
|
129
|
+
keyword_only: bool = False,
|
|
130
|
+
treat_dot_as_module: bool = False,
|
|
131
|
+
) -> None:
|
|
132
|
+
"""Initialize msgspec Struct with fields sorted by field assignment requirement."""
|
|
133
|
+
super().__init__(
|
|
134
|
+
reference=reference,
|
|
135
|
+
fields=sorted(fields, key=_has_field_assignment),
|
|
136
|
+
decorators=decorators,
|
|
137
|
+
base_classes=base_classes,
|
|
138
|
+
custom_base_class=custom_base_class,
|
|
139
|
+
custom_template_dir=custom_template_dir,
|
|
140
|
+
extra_template_data=extra_template_data,
|
|
141
|
+
methods=methods,
|
|
142
|
+
path=path,
|
|
143
|
+
description=description,
|
|
144
|
+
default=default,
|
|
145
|
+
nullable=nullable,
|
|
146
|
+
keyword_only=keyword_only,
|
|
147
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
148
|
+
)
|
|
149
|
+
self.extra_template_data.setdefault("base_class_kwargs", {})
|
|
150
|
+
if self.keyword_only:
|
|
151
|
+
self.add_base_class_kwarg("kw_only", "True")
|
|
152
|
+
|
|
153
|
+
def add_base_class_kwarg(self, name: str, value: str) -> None:
|
|
154
|
+
"""Add keyword argument to base class constructor."""
|
|
155
|
+
self.extra_template_data["base_class_kwargs"][name] = value
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class Constraints(_Constraints):
|
|
159
|
+
"""Constraint model for msgspec fields."""
|
|
160
|
+
|
|
161
|
+
# To override existing pattern alias
|
|
162
|
+
regex: Optional[str] = Field(None, alias="regex") # noqa: UP045
|
|
163
|
+
pattern: Optional[str] = Field(None, alias="pattern") # noqa: UP045
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
@lru_cache
|
|
167
|
+
def get_neither_required_nor_nullable_type(type_: str, use_union_operator: bool) -> str: # noqa: FBT001
|
|
168
|
+
"""Get type hint for fields that are neither required nor nullable, using UnsetType."""
|
|
169
|
+
type_ = _remove_none_from_union(type_, use_union_operator=use_union_operator)
|
|
170
|
+
if type_.startswith(OPTIONAL_PREFIX): # pragma: no cover
|
|
171
|
+
type_ = type_[len(OPTIONAL_PREFIX) : -1]
|
|
172
|
+
|
|
173
|
+
if not type_ or type_ == NONE:
|
|
174
|
+
return UNSET_TYPE
|
|
175
|
+
if use_union_operator:
|
|
176
|
+
return UNION_OPERATOR_DELIMITER.join((type_, UNSET_TYPE))
|
|
177
|
+
if type_.startswith(UNION_PREFIX):
|
|
178
|
+
return f"{type_[:-1]}{UNION_DELIMITER}{UNSET_TYPE}]"
|
|
179
|
+
return f"{UNION_PREFIX}{type_}{UNION_DELIMITER}{UNSET_TYPE}]"
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
@lru_cache
|
|
183
|
+
def _add_unset_type(type_: str, use_union_operator: bool) -> str: # noqa: FBT001
|
|
184
|
+
"""Add UnsetType to a type hint without removing None."""
|
|
185
|
+
if use_union_operator:
|
|
186
|
+
return f"{type_}{UNION_OPERATOR_DELIMITER}{UNSET_TYPE}"
|
|
187
|
+
if type_.startswith(UNION_PREFIX):
|
|
188
|
+
return f"{type_[:-1]}{UNION_DELIMITER}{UNSET_TYPE}]"
|
|
189
|
+
if type_.startswith(OPTIONAL_PREFIX): # pragma: no cover
|
|
190
|
+
inner_type = type_[len(OPTIONAL_PREFIX) : -1]
|
|
191
|
+
return f"{UNION_PREFIX}{inner_type}{UNION_DELIMITER}{NONE}{UNION_DELIMITER}{UNSET_TYPE}]"
|
|
192
|
+
return f"{UNION_PREFIX}{type_}{UNION_DELIMITER}{UNSET_TYPE}]"
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
@import_extender
|
|
196
|
+
class DataModelField(DataModelFieldBase):
|
|
197
|
+
"""Field implementation for msgspec Struct models."""
|
|
198
|
+
|
|
199
|
+
_FIELD_KEYS: ClassVar[set[str]] = {
|
|
200
|
+
"default",
|
|
201
|
+
"default_factory",
|
|
202
|
+
}
|
|
203
|
+
_META_FIELD_KEYS: ClassVar[set[str]] = {
|
|
204
|
+
"title",
|
|
205
|
+
"description",
|
|
206
|
+
"gt",
|
|
207
|
+
"ge",
|
|
208
|
+
"lt",
|
|
209
|
+
"le",
|
|
210
|
+
"multiple_of",
|
|
211
|
+
# 'min_items', # not supported by msgspec
|
|
212
|
+
# 'max_items', # not supported by msgspec
|
|
213
|
+
"min_length",
|
|
214
|
+
"max_length",
|
|
215
|
+
"pattern",
|
|
216
|
+
"examples",
|
|
217
|
+
# 'unique_items', # not supported by msgspec
|
|
218
|
+
}
|
|
219
|
+
_PARSE_METHOD = "convert"
|
|
220
|
+
_COMPARE_EXPRESSIONS: ClassVar[set[str]] = {"gt", "ge", "lt", "le", "multiple_of"}
|
|
221
|
+
constraints: Optional[Constraints] = None # noqa: UP045
|
|
222
|
+
|
|
223
|
+
def process_const(self) -> None:
|
|
224
|
+
"""Process const field constraint."""
|
|
225
|
+
if "const" not in self.extras:
|
|
226
|
+
return
|
|
227
|
+
self.const = True
|
|
228
|
+
self.nullable = False
|
|
229
|
+
const = self.extras["const"]
|
|
230
|
+
if self.data_type.type == "str" and isinstance(const, str): # pragma: no cover # Literal supports only str
|
|
231
|
+
self.replace_data_type(self.data_type.__class__(literals=[const]), clear_old_parent=False)
|
|
232
|
+
|
|
233
|
+
def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
|
|
234
|
+
"""Get constraint value with appropriate numeric type."""
|
|
235
|
+
if value is None or constraint not in self._COMPARE_EXPRESSIONS:
|
|
236
|
+
return value
|
|
237
|
+
|
|
238
|
+
if any(data_type.type == "float" for data_type in self.data_type.all_data_types):
|
|
239
|
+
return float(value)
|
|
240
|
+
return int(value)
|
|
241
|
+
|
|
242
|
+
@property
|
|
243
|
+
def field(self) -> str | None:
|
|
244
|
+
"""For backwards compatibility."""
|
|
245
|
+
result = str(self)
|
|
246
|
+
if not result:
|
|
247
|
+
return None
|
|
248
|
+
return result
|
|
249
|
+
|
|
250
|
+
def __str__(self) -> str:
|
|
251
|
+
"""Generate field() call or default value representation."""
|
|
252
|
+
data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._FIELD_KEYS}
|
|
253
|
+
if self.alias:
|
|
254
|
+
data["name"] = self.alias
|
|
255
|
+
|
|
256
|
+
if self.default is not UNDEFINED and self.default is not None:
|
|
257
|
+
data["default"] = self.default
|
|
258
|
+
elif self._not_required and "default_factory" not in data:
|
|
259
|
+
data["default"] = None if self.nullable else UNSET
|
|
260
|
+
|
|
261
|
+
if self.required:
|
|
262
|
+
data = {
|
|
263
|
+
k: v
|
|
264
|
+
for k, v in data.items()
|
|
265
|
+
if k
|
|
266
|
+
not in {
|
|
267
|
+
"default",
|
|
268
|
+
"default_factory",
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
elif self.default and "default_factory" not in data:
|
|
272
|
+
default_factory = self._get_default_as_struct_model()
|
|
273
|
+
if default_factory is not None:
|
|
274
|
+
data.pop("default")
|
|
275
|
+
data["default_factory"] = default_factory
|
|
276
|
+
|
|
277
|
+
if "default" in data and isinstance(data["default"], (list, dict, set)) and "default_factory" not in data:
|
|
278
|
+
default_value = data.pop("default")
|
|
279
|
+
if default_value:
|
|
280
|
+
from datamodel_code_generator.model.base import repr_set_sorted # noqa: PLC0415
|
|
281
|
+
|
|
282
|
+
default_repr = repr_set_sorted(default_value) if isinstance(default_value, set) else repr(default_value)
|
|
283
|
+
data["default_factory"] = f"lambda: {default_repr}"
|
|
284
|
+
else:
|
|
285
|
+
data["default_factory"] = type(default_value).__name__
|
|
286
|
+
|
|
287
|
+
if not data:
|
|
288
|
+
return ""
|
|
289
|
+
|
|
290
|
+
if len(data) == 1 and "default" in data:
|
|
291
|
+
return repr(data["default"])
|
|
292
|
+
|
|
293
|
+
kwargs = [f"{k}={v if k == 'default_factory' else repr(v)}" for k, v in data.items()]
|
|
294
|
+
return f"field({', '.join(kwargs)})"
|
|
295
|
+
|
|
296
|
+
@property
|
|
297
|
+
def type_hint(self) -> str:
|
|
298
|
+
"""Return the type hint, using UnsetType for non-required non-nullable fields."""
|
|
299
|
+
type_hint = super().type_hint
|
|
300
|
+
if self._not_required and not self.nullable:
|
|
301
|
+
if self.data_type.is_optional:
|
|
302
|
+
return _add_unset_type(type_hint, self.data_type.use_union_operator)
|
|
303
|
+
return get_neither_required_nor_nullable_type(type_hint, self.data_type.use_union_operator)
|
|
304
|
+
return type_hint
|
|
305
|
+
|
|
306
|
+
@property
|
|
307
|
+
def _not_required(self) -> bool:
|
|
308
|
+
return not self.required and isinstance(self.parent, Struct)
|
|
309
|
+
|
|
310
|
+
@property
|
|
311
|
+
def fall_back_to_nullable(self) -> bool:
|
|
312
|
+
"""Return whether to fall back to nullable type instead of UnsetType."""
|
|
313
|
+
return not self._not_required
|
|
314
|
+
|
|
315
|
+
def _get_meta_string(self) -> str | None:
|
|
316
|
+
"""Compute Meta(...) string if there are any meta constraints."""
|
|
317
|
+
data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._META_FIELD_KEYS}
|
|
318
|
+
has_type_constraints = self.data_type.kwargs is not None and len(self.data_type.kwargs) > 0
|
|
319
|
+
if (
|
|
320
|
+
self.constraints is not None
|
|
321
|
+
and not self.self_reference()
|
|
322
|
+
and not (self.data_type.strict and has_type_constraints)
|
|
323
|
+
):
|
|
324
|
+
data = {
|
|
325
|
+
**data,
|
|
326
|
+
**{
|
|
327
|
+
k: self._get_strict_field_constraint_value(k, v)
|
|
328
|
+
for k, v in self.constraints.dict().items()
|
|
329
|
+
if k in self._META_FIELD_KEYS
|
|
330
|
+
},
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
meta_arguments = sorted(f"{k}={v!r}" for k, v in data.items() if v is not None)
|
|
334
|
+
return f"Meta({', '.join(meta_arguments)})" if meta_arguments else None
|
|
335
|
+
|
|
336
|
+
@property
|
|
337
|
+
def annotated(self) -> str | None: # noqa: PLR0911
|
|
338
|
+
"""Get Annotated type hint with Meta constraints.
|
|
339
|
+
|
|
340
|
+
For ClassVar fields (discriminator tag_field), ClassVar is required
|
|
341
|
+
regardless of use_annotated setting.
|
|
342
|
+
"""
|
|
343
|
+
if self.extras.get("is_classvar"):
|
|
344
|
+
meta = self._get_meta_string()
|
|
345
|
+
if self.use_annotated and meta:
|
|
346
|
+
return f"ClassVar[Annotated[{self.type_hint}, {meta}]]"
|
|
347
|
+
return f"ClassVar[{self.type_hint}]"
|
|
348
|
+
|
|
349
|
+
if not self.use_annotated: # pragma: no cover
|
|
350
|
+
return None
|
|
351
|
+
|
|
352
|
+
meta = self._get_meta_string()
|
|
353
|
+
if not meta:
|
|
354
|
+
return None
|
|
355
|
+
|
|
356
|
+
if self.required:
|
|
357
|
+
return f"Annotated[{self.type_hint}, {meta}]"
|
|
358
|
+
|
|
359
|
+
type_hint = self.data_type.type_hint
|
|
360
|
+
annotated_type = f"Annotated[{type_hint}, {meta}]"
|
|
361
|
+
if self.nullable: # pragma: no cover
|
|
362
|
+
return annotated_type
|
|
363
|
+
if self.data_type.is_optional: # pragma: no cover
|
|
364
|
+
return _add_unset_type(annotated_type, self.data_type.use_union_operator)
|
|
365
|
+
return get_neither_required_nor_nullable_type(annotated_type, self.data_type.use_union_operator)
|
|
366
|
+
|
|
367
|
+
@property
|
|
368
|
+
def needs_annotated_import(self) -> bool:
|
|
369
|
+
"""Check if this field requires the Annotated import.
|
|
370
|
+
|
|
371
|
+
ClassVar fields with Meta need Annotated only when use_annotated is True.
|
|
372
|
+
ClassVar fields without Meta don't need Annotated.
|
|
373
|
+
"""
|
|
374
|
+
if not self.annotated:
|
|
375
|
+
return False
|
|
376
|
+
if self.extras.get("is_classvar"):
|
|
377
|
+
return self.use_annotated and self._get_meta_string() is not None
|
|
378
|
+
return True
|
|
379
|
+
|
|
380
|
+
@property
|
|
381
|
+
def needs_meta_import(self) -> bool:
|
|
382
|
+
"""Check if this field requires the Meta import."""
|
|
383
|
+
return self._get_meta_string() is not None
|
|
384
|
+
|
|
385
|
+
def _get_default_as_struct_model(self) -> str | None:
|
|
386
|
+
"""Convert default value to Struct model using msgspec convert."""
|
|
387
|
+
for data_type in self.data_type.data_types or (self.data_type,):
|
|
388
|
+
# TODO: Check nested data_types
|
|
389
|
+
if data_type.is_dict:
|
|
390
|
+
# TODO: Parse dict model for default
|
|
391
|
+
continue # pragma: no cover
|
|
392
|
+
if data_type.is_list and len(data_type.data_types) == 1:
|
|
393
|
+
data_type_child = data_type.data_types[0]
|
|
394
|
+
if ( # pragma: no cover
|
|
395
|
+
data_type_child.reference
|
|
396
|
+
and (isinstance(data_type_child.reference.source, (Struct, TypeAliasBase)))
|
|
397
|
+
and isinstance(self.default, list)
|
|
398
|
+
):
|
|
399
|
+
return (
|
|
400
|
+
f"lambda: {self._PARSE_METHOD}({self.default!r}, "
|
|
401
|
+
f"type=list[{data_type_child.alias or data_type_child.reference.source.class_name}])"
|
|
402
|
+
)
|
|
403
|
+
elif data_type.reference and isinstance(data_type.reference.source, Struct):
|
|
404
|
+
if self.data_type.is_union:
|
|
405
|
+
if not isinstance(self.default, (dict, list)):
|
|
406
|
+
continue
|
|
407
|
+
if isinstance(self.default, dict) and any(dt.is_dict for dt in self.data_type.data_types):
|
|
408
|
+
continue
|
|
409
|
+
return (
|
|
410
|
+
f"lambda: {self._PARSE_METHOD}({self.default!r}, "
|
|
411
|
+
f"type={data_type.alias or data_type.reference.source.class_name})"
|
|
412
|
+
)
|
|
413
|
+
return None
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
class DataTypeManager(_DataTypeManager):
|
|
417
|
+
"""Type manager for msgspec Struct models."""
|
|
418
|
+
|
|
419
|
+
def __init__( # noqa: PLR0913, PLR0917
|
|
420
|
+
self,
|
|
421
|
+
python_version: PythonVersion = PythonVersionMin,
|
|
422
|
+
use_standard_collections: bool = False, # noqa: FBT001, FBT002
|
|
423
|
+
use_generic_container_types: bool = False, # noqa: FBT001, FBT002
|
|
424
|
+
strict_types: Sequence[StrictTypes] | None = None,
|
|
425
|
+
use_non_positive_negative_number_constrained_types: bool = False, # noqa: FBT001, FBT002
|
|
426
|
+
use_decimal_for_multiple_of: bool = False, # noqa: FBT001, FBT002
|
|
427
|
+
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
428
|
+
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
429
|
+
target_datetime_class: DatetimeClassType | None = None,
|
|
430
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
431
|
+
use_serialize_as_any: bool = False, # noqa: FBT001, FBT002
|
|
432
|
+
) -> None:
|
|
433
|
+
"""Initialize type manager with optional datetime type mapping."""
|
|
434
|
+
super().__init__(
|
|
435
|
+
python_version,
|
|
436
|
+
use_standard_collections,
|
|
437
|
+
use_generic_container_types,
|
|
438
|
+
strict_types,
|
|
439
|
+
use_non_positive_negative_number_constrained_types,
|
|
440
|
+
use_decimal_for_multiple_of,
|
|
441
|
+
use_union_operator,
|
|
442
|
+
use_pendulum,
|
|
443
|
+
target_datetime_class,
|
|
444
|
+
treat_dot_as_module,
|
|
445
|
+
use_serialize_as_any,
|
|
446
|
+
)
|
|
447
|
+
|
|
448
|
+
datetime_map = (
|
|
449
|
+
{
|
|
450
|
+
Types.time: self.data_type.from_import(IMPORT_TIME),
|
|
451
|
+
Types.date: self.data_type.from_import(IMPORT_DATE),
|
|
452
|
+
Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
|
|
453
|
+
Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
|
|
454
|
+
}
|
|
455
|
+
if target_datetime_class is DatetimeClassType.Datetime
|
|
456
|
+
else {}
|
|
457
|
+
)
|
|
458
|
+
|
|
459
|
+
self.type_map: dict[Types, DataType] = {
|
|
460
|
+
**type_map_factory(self.data_type),
|
|
461
|
+
**datetime_map,
|
|
462
|
+
}
|
|
@@ -1,4 +1,12 @@
|
|
|
1
|
-
|
|
1
|
+
"""Pydantic v1 model generator.
|
|
2
|
+
|
|
3
|
+
Provides BaseModel, CustomRootType, and DataModelField for generating
|
|
4
|
+
Pydantic v1 compatible data models.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from typing import TYPE_CHECKING, Optional
|
|
2
10
|
|
|
3
11
|
from pydantic import BaseModel as _BaseModel
|
|
4
12
|
|
|
@@ -7,37 +15,34 @@ from .custom_root_type import CustomRootType
|
|
|
7
15
|
from .dataclass import DataClass
|
|
8
16
|
from .types import DataTypeManager
|
|
9
17
|
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from collections.abc import Iterable
|
|
20
|
+
|
|
10
21
|
|
|
11
22
|
def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
)
|
|
23
|
+
"""Generate update_forward_refs() calls for Pydantic v1 models."""
|
|
24
|
+
return "\n".join(f"{class_name}.update_forward_refs()" for class_name in class_names)
|
|
15
25
|
|
|
16
26
|
|
|
17
27
|
class Config(_BaseModel):
|
|
18
|
-
|
|
19
|
-
title: Optional[str] = None
|
|
20
|
-
allow_population_by_field_name: Optional[bool] = None
|
|
21
|
-
allow_mutation: Optional[bool] = None
|
|
22
|
-
arbitrary_types_allowed: Optional[bool] = None
|
|
23
|
-
|
|
28
|
+
"""Pydantic model config options."""
|
|
24
29
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
#
|
|
30
|
-
#
|
|
31
|
-
#
|
|
32
|
-
|
|
30
|
+
extra: Optional[str] = None # noqa: UP045
|
|
31
|
+
title: Optional[str] = None # noqa: UP045
|
|
32
|
+
allow_population_by_field_name: Optional[bool] = None # noqa: UP045
|
|
33
|
+
allow_extra_fields: Optional[bool] = None # noqa: UP045
|
|
34
|
+
extra_fields: Optional[str] = None # noqa: UP045
|
|
35
|
+
allow_mutation: Optional[bool] = None # noqa: UP045
|
|
36
|
+
arbitrary_types_allowed: Optional[bool] = None # noqa: UP045
|
|
37
|
+
orm_mode: Optional[bool] = None # noqa: UP045
|
|
38
|
+
validate_assignment: Optional[bool] = None # noqa: UP045
|
|
33
39
|
|
|
34
40
|
|
|
35
41
|
__all__ = [
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
# 'VALIDATOR_TEMPLATE',
|
|
42
|
+
"BaseModel",
|
|
43
|
+
"CustomRootType",
|
|
44
|
+
"DataClass",
|
|
45
|
+
"DataModelField",
|
|
46
|
+
"DataTypeManager",
|
|
47
|
+
"dump_resolve_reference_action",
|
|
43
48
|
]
|