fastapi 0.128.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fastapi/__init__.py +25 -0
- fastapi/__main__.py +3 -0
- fastapi/_compat/__init__.py +41 -0
- fastapi/_compat/shared.py +206 -0
- fastapi/_compat/v2.py +568 -0
- fastapi/applications.py +4669 -0
- fastapi/background.py +60 -0
- fastapi/cli.py +13 -0
- fastapi/concurrency.py +41 -0
- fastapi/datastructures.py +183 -0
- fastapi/dependencies/__init__.py +0 -0
- fastapi/dependencies/models.py +193 -0
- fastapi/dependencies/utils.py +1021 -0
- fastapi/encoders.py +346 -0
- fastapi/exception_handlers.py +34 -0
- fastapi/exceptions.py +246 -0
- fastapi/logger.py +3 -0
- fastapi/middleware/__init__.py +1 -0
- fastapi/middleware/asyncexitstack.py +18 -0
- fastapi/middleware/cors.py +1 -0
- fastapi/middleware/gzip.py +1 -0
- fastapi/middleware/httpsredirect.py +3 -0
- fastapi/middleware/trustedhost.py +3 -0
- fastapi/middleware/wsgi.py +1 -0
- fastapi/openapi/__init__.py +0 -0
- fastapi/openapi/constants.py +3 -0
- fastapi/openapi/docs.py +344 -0
- fastapi/openapi/models.py +438 -0
- fastapi/openapi/utils.py +567 -0
- fastapi/param_functions.py +2369 -0
- fastapi/params.py +755 -0
- fastapi/py.typed +0 -0
- fastapi/requests.py +2 -0
- fastapi/responses.py +48 -0
- fastapi/routing.py +4508 -0
- fastapi/security/__init__.py +15 -0
- fastapi/security/api_key.py +318 -0
- fastapi/security/base.py +6 -0
- fastapi/security/http.py +423 -0
- fastapi/security/oauth2.py +663 -0
- fastapi/security/open_id_connect_url.py +94 -0
- fastapi/security/utils.py +10 -0
- fastapi/staticfiles.py +1 -0
- fastapi/templating.py +1 -0
- fastapi/testclient.py +1 -0
- fastapi/types.py +11 -0
- fastapi/utils.py +164 -0
- fastapi/websockets.py +3 -0
- fastapi-0.128.0.dist-info/METADATA +645 -0
- fastapi-0.128.0.dist-info/RECORD +53 -0
- fastapi-0.128.0.dist-info/WHEEL +4 -0
- fastapi-0.128.0.dist-info/entry_points.txt +5 -0
- fastapi-0.128.0.dist-info/licenses/LICENSE +21 -0
fastapi/_compat/v2.py
ADDED
|
@@ -0,0 +1,568 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import warnings
|
|
3
|
+
from collections.abc import Sequence
|
|
4
|
+
from copy import copy, deepcopy
|
|
5
|
+
from dataclasses import dataclass, is_dataclass
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from functools import lru_cache
|
|
8
|
+
from typing import (
|
|
9
|
+
Annotated,
|
|
10
|
+
Any,
|
|
11
|
+
Union,
|
|
12
|
+
cast,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from fastapi._compat import shared
|
|
16
|
+
from fastapi.openapi.constants import REF_TEMPLATE
|
|
17
|
+
from fastapi.types import IncEx, ModelNameMap, UnionType
|
|
18
|
+
from pydantic import BaseModel, ConfigDict, Field, TypeAdapter, create_model
|
|
19
|
+
from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError
|
|
20
|
+
from pydantic import PydanticUndefinedAnnotation as PydanticUndefinedAnnotation
|
|
21
|
+
from pydantic import ValidationError as ValidationError
|
|
22
|
+
from pydantic._internal._schema_generation_shared import ( # type: ignore[attr-defined]
|
|
23
|
+
GetJsonSchemaHandler as GetJsonSchemaHandler,
|
|
24
|
+
)
|
|
25
|
+
from pydantic._internal._typing_extra import eval_type_lenient
|
|
26
|
+
from pydantic._internal._utils import lenient_issubclass as lenient_issubclass
|
|
27
|
+
from pydantic.fields import FieldInfo as FieldInfo
|
|
28
|
+
from pydantic.json_schema import GenerateJsonSchema as GenerateJsonSchema
|
|
29
|
+
from pydantic.json_schema import JsonSchemaValue as JsonSchemaValue
|
|
30
|
+
from pydantic_core import CoreSchema as CoreSchema
|
|
31
|
+
from pydantic_core import PydanticUndefined, PydanticUndefinedType
|
|
32
|
+
from pydantic_core import Url as Url
|
|
33
|
+
from typing_extensions import Literal, get_args, get_origin
|
|
34
|
+
|
|
35
|
+
try:
|
|
36
|
+
from pydantic_core.core_schema import (
|
|
37
|
+
with_info_plain_validator_function as with_info_plain_validator_function,
|
|
38
|
+
)
|
|
39
|
+
except ImportError: # pragma: no cover
|
|
40
|
+
from pydantic_core.core_schema import (
|
|
41
|
+
general_plain_validator_function as with_info_plain_validator_function, # noqa: F401
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
RequiredParam = PydanticUndefined
|
|
45
|
+
Undefined = PydanticUndefined
|
|
46
|
+
UndefinedType = PydanticUndefinedType
|
|
47
|
+
evaluate_forwardref = eval_type_lenient
|
|
48
|
+
Validator = Any
|
|
49
|
+
|
|
50
|
+
# TODO: remove when dropping support for Pydantic < v2.12.3
|
|
51
|
+
_Attrs = {
|
|
52
|
+
"default": ...,
|
|
53
|
+
"default_factory": None,
|
|
54
|
+
"alias": None,
|
|
55
|
+
"alias_priority": None,
|
|
56
|
+
"validation_alias": None,
|
|
57
|
+
"serialization_alias": None,
|
|
58
|
+
"title": None,
|
|
59
|
+
"field_title_generator": None,
|
|
60
|
+
"description": None,
|
|
61
|
+
"examples": None,
|
|
62
|
+
"exclude": None,
|
|
63
|
+
"exclude_if": None,
|
|
64
|
+
"discriminator": None,
|
|
65
|
+
"deprecated": None,
|
|
66
|
+
"json_schema_extra": None,
|
|
67
|
+
"frozen": None,
|
|
68
|
+
"validate_default": None,
|
|
69
|
+
"repr": True,
|
|
70
|
+
"init": None,
|
|
71
|
+
"init_var": None,
|
|
72
|
+
"kw_only": None,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
# TODO: remove when dropping support for Pydantic < v2.12.3
|
|
77
|
+
def asdict(field_info: FieldInfo) -> dict[str, Any]:
|
|
78
|
+
attributes = {}
|
|
79
|
+
for attr in _Attrs:
|
|
80
|
+
value = getattr(field_info, attr, Undefined)
|
|
81
|
+
if value is not Undefined:
|
|
82
|
+
attributes[attr] = value
|
|
83
|
+
return {
|
|
84
|
+
"annotation": field_info.annotation,
|
|
85
|
+
"metadata": field_info.metadata,
|
|
86
|
+
"attributes": attributes,
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class BaseConfig:
|
|
91
|
+
pass
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class ErrorWrapper(Exception):
|
|
95
|
+
pass
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@dataclass
|
|
99
|
+
class ModelField:
|
|
100
|
+
field_info: FieldInfo
|
|
101
|
+
name: str
|
|
102
|
+
mode: Literal["validation", "serialization"] = "validation"
|
|
103
|
+
config: Union[ConfigDict, None] = None
|
|
104
|
+
|
|
105
|
+
@property
|
|
106
|
+
def alias(self) -> str:
|
|
107
|
+
a = self.field_info.alias
|
|
108
|
+
return a if a is not None else self.name
|
|
109
|
+
|
|
110
|
+
@property
|
|
111
|
+
def validation_alias(self) -> Union[str, None]:
|
|
112
|
+
va = self.field_info.validation_alias
|
|
113
|
+
if isinstance(va, str) and va:
|
|
114
|
+
return va
|
|
115
|
+
return None
|
|
116
|
+
|
|
117
|
+
@property
|
|
118
|
+
def serialization_alias(self) -> Union[str, None]:
|
|
119
|
+
sa = self.field_info.serialization_alias
|
|
120
|
+
return sa or None
|
|
121
|
+
|
|
122
|
+
@property
|
|
123
|
+
def required(self) -> bool:
|
|
124
|
+
return self.field_info.is_required()
|
|
125
|
+
|
|
126
|
+
@property
|
|
127
|
+
def default(self) -> Any:
|
|
128
|
+
return self.get_default()
|
|
129
|
+
|
|
130
|
+
@property
|
|
131
|
+
def type_(self) -> Any:
|
|
132
|
+
return self.field_info.annotation
|
|
133
|
+
|
|
134
|
+
def __post_init__(self) -> None:
|
|
135
|
+
with warnings.catch_warnings():
|
|
136
|
+
# Pydantic >= 2.12.0 warns about field specific metadata that is unused
|
|
137
|
+
# (e.g. `TypeAdapter(Annotated[int, Field(alias='b')])`). In some cases, we
|
|
138
|
+
# end up building the type adapter from a model field annotation so we
|
|
139
|
+
# need to ignore the warning:
|
|
140
|
+
if shared.PYDANTIC_VERSION_MINOR_TUPLE >= (2, 12):
|
|
141
|
+
from pydantic.warnings import UnsupportedFieldAttributeWarning
|
|
142
|
+
|
|
143
|
+
warnings.simplefilter(
|
|
144
|
+
"ignore", category=UnsupportedFieldAttributeWarning
|
|
145
|
+
)
|
|
146
|
+
# TODO: remove after dropping support for Python 3.8 and
|
|
147
|
+
# setting the min Pydantic to v2.12.3 that adds asdict()
|
|
148
|
+
field_dict = asdict(self.field_info)
|
|
149
|
+
annotated_args = (
|
|
150
|
+
field_dict["annotation"],
|
|
151
|
+
*field_dict["metadata"],
|
|
152
|
+
# this FieldInfo needs to be created again so that it doesn't include
|
|
153
|
+
# the old field info metadata and only the rest of the attributes
|
|
154
|
+
Field(**field_dict["attributes"]),
|
|
155
|
+
)
|
|
156
|
+
self._type_adapter: TypeAdapter[Any] = TypeAdapter(
|
|
157
|
+
Annotated[annotated_args],
|
|
158
|
+
config=self.config,
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
def get_default(self) -> Any:
|
|
162
|
+
if self.field_info.is_required():
|
|
163
|
+
return Undefined
|
|
164
|
+
return self.field_info.get_default(call_default_factory=True)
|
|
165
|
+
|
|
166
|
+
def validate(
|
|
167
|
+
self,
|
|
168
|
+
value: Any,
|
|
169
|
+
values: dict[str, Any] = {}, # noqa: B006
|
|
170
|
+
*,
|
|
171
|
+
loc: tuple[Union[int, str], ...] = (),
|
|
172
|
+
) -> tuple[Any, Union[list[dict[str, Any]], None]]:
|
|
173
|
+
try:
|
|
174
|
+
return (
|
|
175
|
+
self._type_adapter.validate_python(value, from_attributes=True),
|
|
176
|
+
None,
|
|
177
|
+
)
|
|
178
|
+
except ValidationError as exc:
|
|
179
|
+
return None, _regenerate_error_with_loc(
|
|
180
|
+
errors=exc.errors(include_url=False), loc_prefix=loc
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
def serialize(
|
|
184
|
+
self,
|
|
185
|
+
value: Any,
|
|
186
|
+
*,
|
|
187
|
+
mode: Literal["json", "python"] = "json",
|
|
188
|
+
include: Union[IncEx, None] = None,
|
|
189
|
+
exclude: Union[IncEx, None] = None,
|
|
190
|
+
by_alias: bool = True,
|
|
191
|
+
exclude_unset: bool = False,
|
|
192
|
+
exclude_defaults: bool = False,
|
|
193
|
+
exclude_none: bool = False,
|
|
194
|
+
) -> Any:
|
|
195
|
+
# What calls this code passes a value that already called
|
|
196
|
+
# self._type_adapter.validate_python(value)
|
|
197
|
+
return self._type_adapter.dump_python(
|
|
198
|
+
value,
|
|
199
|
+
mode=mode,
|
|
200
|
+
include=include,
|
|
201
|
+
exclude=exclude,
|
|
202
|
+
by_alias=by_alias,
|
|
203
|
+
exclude_unset=exclude_unset,
|
|
204
|
+
exclude_defaults=exclude_defaults,
|
|
205
|
+
exclude_none=exclude_none,
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
def __hash__(self) -> int:
|
|
209
|
+
# Each ModelField is unique for our purposes, to allow making a dict from
|
|
210
|
+
# ModelField to its JSON Schema.
|
|
211
|
+
return id(self)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _has_computed_fields(field: ModelField) -> bool:
|
|
215
|
+
computed_fields = field._type_adapter.core_schema.get("schema", {}).get(
|
|
216
|
+
"computed_fields", []
|
|
217
|
+
)
|
|
218
|
+
return len(computed_fields) > 0
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def get_schema_from_model_field(
|
|
222
|
+
*,
|
|
223
|
+
field: ModelField,
|
|
224
|
+
model_name_map: ModelNameMap,
|
|
225
|
+
field_mapping: dict[
|
|
226
|
+
tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
|
|
227
|
+
],
|
|
228
|
+
separate_input_output_schemas: bool = True,
|
|
229
|
+
) -> dict[str, Any]:
|
|
230
|
+
override_mode: Union[Literal["validation"], None] = (
|
|
231
|
+
None
|
|
232
|
+
if (separate_input_output_schemas or _has_computed_fields(field))
|
|
233
|
+
else "validation"
|
|
234
|
+
)
|
|
235
|
+
field_alias = (
|
|
236
|
+
(field.validation_alias or field.alias)
|
|
237
|
+
if field.mode == "validation"
|
|
238
|
+
else (field.serialization_alias or field.alias)
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
# This expects that GenerateJsonSchema was already used to generate the definitions
|
|
242
|
+
json_schema = field_mapping[(field, override_mode or field.mode)]
|
|
243
|
+
if "$ref" not in json_schema:
|
|
244
|
+
# TODO remove when deprecating Pydantic v1
|
|
245
|
+
# Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207
|
|
246
|
+
json_schema["title"] = field.field_info.title or field_alias.title().replace(
|
|
247
|
+
"_", " "
|
|
248
|
+
)
|
|
249
|
+
return json_schema
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def get_definitions(
|
|
253
|
+
*,
|
|
254
|
+
fields: Sequence[ModelField],
|
|
255
|
+
model_name_map: ModelNameMap,
|
|
256
|
+
separate_input_output_schemas: bool = True,
|
|
257
|
+
) -> tuple[
|
|
258
|
+
dict[tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue],
|
|
259
|
+
dict[str, dict[str, Any]],
|
|
260
|
+
]:
|
|
261
|
+
schema_generator = GenerateJsonSchema(ref_template=REF_TEMPLATE)
|
|
262
|
+
validation_fields = [field for field in fields if field.mode == "validation"]
|
|
263
|
+
serialization_fields = [field for field in fields if field.mode == "serialization"]
|
|
264
|
+
flat_validation_models = get_flat_models_from_fields(
|
|
265
|
+
validation_fields, known_models=set()
|
|
266
|
+
)
|
|
267
|
+
flat_serialization_models = get_flat_models_from_fields(
|
|
268
|
+
serialization_fields, known_models=set()
|
|
269
|
+
)
|
|
270
|
+
flat_validation_model_fields = [
|
|
271
|
+
ModelField(
|
|
272
|
+
field_info=FieldInfo(annotation=model),
|
|
273
|
+
name=model.__name__,
|
|
274
|
+
mode="validation",
|
|
275
|
+
)
|
|
276
|
+
for model in flat_validation_models
|
|
277
|
+
]
|
|
278
|
+
flat_serialization_model_fields = [
|
|
279
|
+
ModelField(
|
|
280
|
+
field_info=FieldInfo(annotation=model),
|
|
281
|
+
name=model.__name__,
|
|
282
|
+
mode="serialization",
|
|
283
|
+
)
|
|
284
|
+
for model in flat_serialization_models
|
|
285
|
+
]
|
|
286
|
+
flat_model_fields = flat_validation_model_fields + flat_serialization_model_fields
|
|
287
|
+
input_types = {f.type_ for f in fields}
|
|
288
|
+
unique_flat_model_fields = {
|
|
289
|
+
f for f in flat_model_fields if f.type_ not in input_types
|
|
290
|
+
}
|
|
291
|
+
inputs = [
|
|
292
|
+
(
|
|
293
|
+
field,
|
|
294
|
+
(
|
|
295
|
+
field.mode
|
|
296
|
+
if (separate_input_output_schemas or _has_computed_fields(field))
|
|
297
|
+
else "validation"
|
|
298
|
+
),
|
|
299
|
+
field._type_adapter.core_schema,
|
|
300
|
+
)
|
|
301
|
+
for field in list(fields) + list(unique_flat_model_fields)
|
|
302
|
+
]
|
|
303
|
+
field_mapping, definitions = schema_generator.generate_definitions(inputs=inputs)
|
|
304
|
+
for item_def in cast(dict[str, dict[str, Any]], definitions).values():
|
|
305
|
+
if "description" in item_def:
|
|
306
|
+
item_description = cast(str, item_def["description"]).split("\f")[0]
|
|
307
|
+
item_def["description"] = item_description
|
|
308
|
+
new_mapping, new_definitions = _remap_definitions_and_field_mappings(
|
|
309
|
+
model_name_map=model_name_map,
|
|
310
|
+
definitions=definitions, # type: ignore[arg-type]
|
|
311
|
+
field_mapping=field_mapping,
|
|
312
|
+
)
|
|
313
|
+
return new_mapping, new_definitions
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def _replace_refs(
|
|
317
|
+
*,
|
|
318
|
+
schema: dict[str, Any],
|
|
319
|
+
old_name_to_new_name_map: dict[str, str],
|
|
320
|
+
) -> dict[str, Any]:
|
|
321
|
+
new_schema = deepcopy(schema)
|
|
322
|
+
for key, value in new_schema.items():
|
|
323
|
+
if key == "$ref":
|
|
324
|
+
value = schema["$ref"]
|
|
325
|
+
if isinstance(value, str):
|
|
326
|
+
ref_name = schema["$ref"].split("/")[-1]
|
|
327
|
+
if ref_name in old_name_to_new_name_map:
|
|
328
|
+
new_name = old_name_to_new_name_map[ref_name]
|
|
329
|
+
new_schema["$ref"] = REF_TEMPLATE.format(model=new_name)
|
|
330
|
+
continue
|
|
331
|
+
if isinstance(value, dict):
|
|
332
|
+
new_schema[key] = _replace_refs(
|
|
333
|
+
schema=value,
|
|
334
|
+
old_name_to_new_name_map=old_name_to_new_name_map,
|
|
335
|
+
)
|
|
336
|
+
elif isinstance(value, list):
|
|
337
|
+
new_value = []
|
|
338
|
+
for item in value:
|
|
339
|
+
if isinstance(item, dict):
|
|
340
|
+
new_item = _replace_refs(
|
|
341
|
+
schema=item,
|
|
342
|
+
old_name_to_new_name_map=old_name_to_new_name_map,
|
|
343
|
+
)
|
|
344
|
+
new_value.append(new_item)
|
|
345
|
+
|
|
346
|
+
else:
|
|
347
|
+
new_value.append(item)
|
|
348
|
+
new_schema[key] = new_value
|
|
349
|
+
return new_schema
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
def _remap_definitions_and_field_mappings(
|
|
353
|
+
*,
|
|
354
|
+
model_name_map: ModelNameMap,
|
|
355
|
+
definitions: dict[str, Any],
|
|
356
|
+
field_mapping: dict[
|
|
357
|
+
tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
|
|
358
|
+
],
|
|
359
|
+
) -> tuple[
|
|
360
|
+
dict[tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue],
|
|
361
|
+
dict[str, Any],
|
|
362
|
+
]:
|
|
363
|
+
old_name_to_new_name_map = {}
|
|
364
|
+
for field_key, schema in field_mapping.items():
|
|
365
|
+
model = field_key[0].type_
|
|
366
|
+
if model not in model_name_map or "$ref" not in schema:
|
|
367
|
+
continue
|
|
368
|
+
new_name = model_name_map[model]
|
|
369
|
+
old_name = schema["$ref"].split("/")[-1]
|
|
370
|
+
if old_name in {f"{new_name}-Input", f"{new_name}-Output"}:
|
|
371
|
+
continue
|
|
372
|
+
old_name_to_new_name_map[old_name] = new_name
|
|
373
|
+
|
|
374
|
+
new_field_mapping: dict[
|
|
375
|
+
tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
|
|
376
|
+
] = {}
|
|
377
|
+
for field_key, schema in field_mapping.items():
|
|
378
|
+
new_schema = _replace_refs(
|
|
379
|
+
schema=schema,
|
|
380
|
+
old_name_to_new_name_map=old_name_to_new_name_map,
|
|
381
|
+
)
|
|
382
|
+
new_field_mapping[field_key] = new_schema
|
|
383
|
+
|
|
384
|
+
new_definitions = {}
|
|
385
|
+
for key, value in definitions.items():
|
|
386
|
+
if key in old_name_to_new_name_map:
|
|
387
|
+
new_key = old_name_to_new_name_map[key]
|
|
388
|
+
else:
|
|
389
|
+
new_key = key
|
|
390
|
+
new_value = _replace_refs(
|
|
391
|
+
schema=value,
|
|
392
|
+
old_name_to_new_name_map=old_name_to_new_name_map,
|
|
393
|
+
)
|
|
394
|
+
new_definitions[new_key] = new_value
|
|
395
|
+
return new_field_mapping, new_definitions
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
def is_scalar_field(field: ModelField) -> bool:
|
|
399
|
+
from fastapi import params
|
|
400
|
+
|
|
401
|
+
return shared.field_annotation_is_scalar(
|
|
402
|
+
field.field_info.annotation
|
|
403
|
+
) and not isinstance(field.field_info, params.Body)
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
def is_sequence_field(field: ModelField) -> bool:
|
|
407
|
+
return shared.field_annotation_is_sequence(field.field_info.annotation)
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
def is_scalar_sequence_field(field: ModelField) -> bool:
|
|
411
|
+
return shared.field_annotation_is_scalar_sequence(field.field_info.annotation)
|
|
412
|
+
|
|
413
|
+
|
|
414
|
+
def is_bytes_field(field: ModelField) -> bool:
|
|
415
|
+
return shared.is_bytes_or_nonable_bytes_annotation(field.type_)
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
def is_bytes_sequence_field(field: ModelField) -> bool:
|
|
419
|
+
return shared.is_bytes_sequence_annotation(field.type_)
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
|
|
423
|
+
cls = type(field_info)
|
|
424
|
+
merged_field_info = cls.from_annotation(annotation)
|
|
425
|
+
new_field_info = copy(field_info)
|
|
426
|
+
new_field_info.metadata = merged_field_info.metadata
|
|
427
|
+
new_field_info.annotation = merged_field_info.annotation
|
|
428
|
+
return new_field_info
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
|
|
432
|
+
origin_type = get_origin(field.field_info.annotation) or field.field_info.annotation
|
|
433
|
+
if origin_type is Union or origin_type is UnionType: # Handle optional sequences
|
|
434
|
+
union_args = get_args(field.field_info.annotation)
|
|
435
|
+
for union_arg in union_args:
|
|
436
|
+
if union_arg is type(None):
|
|
437
|
+
continue
|
|
438
|
+
origin_type = get_origin(union_arg) or union_arg
|
|
439
|
+
break
|
|
440
|
+
assert issubclass(origin_type, shared.sequence_types) # type: ignore[arg-type]
|
|
441
|
+
return shared.sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return,index]
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
def get_missing_field_error(loc: tuple[str, ...]) -> dict[str, Any]:
|
|
445
|
+
error = ValidationError.from_exception_data(
|
|
446
|
+
"Field required", [{"type": "missing", "loc": loc, "input": {}}]
|
|
447
|
+
).errors(include_url=False)[0]
|
|
448
|
+
error["input"] = None
|
|
449
|
+
return error # type: ignore[return-value]
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
def create_body_model(
|
|
453
|
+
*, fields: Sequence[ModelField], model_name: str
|
|
454
|
+
) -> type[BaseModel]:
|
|
455
|
+
field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields}
|
|
456
|
+
BodyModel: type[BaseModel] = create_model(model_name, **field_params) # type: ignore[call-overload]
|
|
457
|
+
return BodyModel
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
def get_model_fields(model: type[BaseModel]) -> list[ModelField]:
|
|
461
|
+
model_fields: list[ModelField] = []
|
|
462
|
+
for name, field_info in model.model_fields.items():
|
|
463
|
+
type_ = field_info.annotation
|
|
464
|
+
if lenient_issubclass(type_, (BaseModel, dict)) or is_dataclass(type_):
|
|
465
|
+
model_config = None
|
|
466
|
+
else:
|
|
467
|
+
model_config = model.model_config
|
|
468
|
+
model_fields.append(
|
|
469
|
+
ModelField(
|
|
470
|
+
field_info=field_info,
|
|
471
|
+
name=name,
|
|
472
|
+
config=model_config,
|
|
473
|
+
)
|
|
474
|
+
)
|
|
475
|
+
return model_fields
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+
@lru_cache
|
|
479
|
+
def get_cached_model_fields(model: type[BaseModel]) -> list[ModelField]:
|
|
480
|
+
return get_model_fields(model) # type: ignore[return-value]
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
# Duplicate of several schema functions from Pydantic v1 to make them compatible with
|
|
484
|
+
# Pydantic v2 and allow mixing the models
|
|
485
|
+
|
|
486
|
+
TypeModelOrEnum = Union[type["BaseModel"], type[Enum]]
|
|
487
|
+
TypeModelSet = set[TypeModelOrEnum]
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
def normalize_name(name: str) -> str:
|
|
491
|
+
return re.sub(r"[^a-zA-Z0-9.\-_]", "_", name)
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
def get_model_name_map(unique_models: TypeModelSet) -> dict[TypeModelOrEnum, str]:
|
|
495
|
+
name_model_map = {}
|
|
496
|
+
for model in unique_models:
|
|
497
|
+
model_name = normalize_name(model.__name__)
|
|
498
|
+
name_model_map[model_name] = model
|
|
499
|
+
return {v: k for k, v in name_model_map.items()}
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
def get_compat_model_name_map(fields: list[ModelField]) -> ModelNameMap:
|
|
503
|
+
all_flat_models = set()
|
|
504
|
+
|
|
505
|
+
v2_model_fields = [field for field in fields if isinstance(field, ModelField)]
|
|
506
|
+
v2_flat_models = get_flat_models_from_fields(v2_model_fields, known_models=set())
|
|
507
|
+
all_flat_models = all_flat_models.union(v2_flat_models) # type: ignore[arg-type]
|
|
508
|
+
|
|
509
|
+
model_name_map = get_model_name_map(all_flat_models) # type: ignore[arg-type]
|
|
510
|
+
return model_name_map
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
def get_flat_models_from_model(
|
|
514
|
+
model: type["BaseModel"], known_models: Union[TypeModelSet, None] = None
|
|
515
|
+
) -> TypeModelSet:
|
|
516
|
+
known_models = known_models or set()
|
|
517
|
+
fields = get_model_fields(model)
|
|
518
|
+
get_flat_models_from_fields(fields, known_models=known_models)
|
|
519
|
+
return known_models
|
|
520
|
+
|
|
521
|
+
|
|
522
|
+
def get_flat_models_from_annotation(
|
|
523
|
+
annotation: Any, known_models: TypeModelSet
|
|
524
|
+
) -> TypeModelSet:
|
|
525
|
+
origin = get_origin(annotation)
|
|
526
|
+
if origin is not None:
|
|
527
|
+
for arg in get_args(annotation):
|
|
528
|
+
if lenient_issubclass(arg, (BaseModel, Enum)) and arg not in known_models:
|
|
529
|
+
known_models.add(arg)
|
|
530
|
+
if lenient_issubclass(arg, BaseModel):
|
|
531
|
+
get_flat_models_from_model(arg, known_models=known_models)
|
|
532
|
+
else:
|
|
533
|
+
get_flat_models_from_annotation(arg, known_models=known_models)
|
|
534
|
+
return known_models
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
def get_flat_models_from_field(
|
|
538
|
+
field: ModelField, known_models: TypeModelSet
|
|
539
|
+
) -> TypeModelSet:
|
|
540
|
+
field_type = field.type_
|
|
541
|
+
if lenient_issubclass(field_type, BaseModel):
|
|
542
|
+
if field_type in known_models:
|
|
543
|
+
return known_models
|
|
544
|
+
known_models.add(field_type)
|
|
545
|
+
get_flat_models_from_model(field_type, known_models=known_models)
|
|
546
|
+
elif lenient_issubclass(field_type, Enum):
|
|
547
|
+
known_models.add(field_type)
|
|
548
|
+
else:
|
|
549
|
+
get_flat_models_from_annotation(field_type, known_models=known_models)
|
|
550
|
+
return known_models
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
def get_flat_models_from_fields(
|
|
554
|
+
fields: Sequence[ModelField], known_models: TypeModelSet
|
|
555
|
+
) -> TypeModelSet:
|
|
556
|
+
for field in fields:
|
|
557
|
+
get_flat_models_from_field(field, known_models=known_models)
|
|
558
|
+
return known_models
|
|
559
|
+
|
|
560
|
+
|
|
561
|
+
def _regenerate_error_with_loc(
|
|
562
|
+
*, errors: Sequence[Any], loc_prefix: tuple[Union[str, int], ...]
|
|
563
|
+
) -> list[dict[str, Any]]:
|
|
564
|
+
updated_loc_errors: list[Any] = [
|
|
565
|
+
{**err, "loc": loc_prefix + err.get("loc", ())} for err in errors
|
|
566
|
+
]
|
|
567
|
+
|
|
568
|
+
return updated_loc_errors
|