fastapi 0.118.2__py3-none-any.whl → 0.119.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fastapi might be problematic. Click here for more details.

fastapi/_compat/v2.py ADDED
@@ -0,0 +1,459 @@
1
+ import re
2
+ import warnings
3
+ from copy import copy, deepcopy
4
+ from dataclasses import dataclass
5
+ from enum import Enum
6
+ from typing import (
7
+ Any,
8
+ Dict,
9
+ List,
10
+ Sequence,
11
+ Set,
12
+ Tuple,
13
+ Type,
14
+ Union,
15
+ cast,
16
+ )
17
+
18
+ from fastapi._compat import shared, v1
19
+ from fastapi.openapi.constants import REF_TEMPLATE
20
+ from fastapi.types import IncEx, ModelNameMap
21
+ from pydantic import BaseModel, TypeAdapter, create_model
22
+ from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError
23
+ from pydantic import PydanticUndefinedAnnotation as PydanticUndefinedAnnotation
24
+ from pydantic import ValidationError as ValidationError
25
+ from pydantic._internal._schema_generation_shared import ( # type: ignore[attr-defined]
26
+ GetJsonSchemaHandler as GetJsonSchemaHandler,
27
+ )
28
+ from pydantic._internal._typing_extra import eval_type_lenient
29
+ from pydantic._internal._utils import lenient_issubclass as lenient_issubclass
30
+ from pydantic.fields import FieldInfo as FieldInfo
31
+ from pydantic.json_schema import GenerateJsonSchema as GenerateJsonSchema
32
+ from pydantic.json_schema import JsonSchemaValue as JsonSchemaValue
33
+ from pydantic_core import CoreSchema as CoreSchema
34
+ from pydantic_core import PydanticUndefined, PydanticUndefinedType
35
+ from pydantic_core import Url as Url
36
+ from typing_extensions import Annotated, Literal, get_args, get_origin
37
+
38
+ try:
39
+ from pydantic_core.core_schema import (
40
+ with_info_plain_validator_function as with_info_plain_validator_function,
41
+ )
42
+ except ImportError: # pragma: no cover
43
+ from pydantic_core.core_schema import (
44
+ general_plain_validator_function as with_info_plain_validator_function, # noqa: F401
45
+ )
46
+
47
+ RequiredParam = PydanticUndefined
48
+ Undefined = PydanticUndefined
49
+ UndefinedType = PydanticUndefinedType
50
+ evaluate_forwardref = eval_type_lenient
51
+ Validator = Any
52
+
53
+
54
+ class BaseConfig:
55
+ pass
56
+
57
+
58
+ class ErrorWrapper(Exception):
59
+ pass
60
+
61
+
62
+ @dataclass
63
+ class ModelField:
64
+ field_info: FieldInfo
65
+ name: str
66
+ mode: Literal["validation", "serialization"] = "validation"
67
+
68
+ @property
69
+ def alias(self) -> str:
70
+ a = self.field_info.alias
71
+ return a if a is not None else self.name
72
+
73
+ @property
74
+ def required(self) -> bool:
75
+ return self.field_info.is_required()
76
+
77
+ @property
78
+ def default(self) -> Any:
79
+ return self.get_default()
80
+
81
+ @property
82
+ def type_(self) -> Any:
83
+ return self.field_info.annotation
84
+
85
+ def __post_init__(self) -> None:
86
+ with warnings.catch_warnings():
87
+ # Pydantic >= 2.12.0 warns about field specific metadata that is unused
88
+ # (e.g. `TypeAdapter(Annotated[int, Field(alias='b')])`). In some cases, we
89
+ # end up building the type adapter from a model field annotation so we
90
+ # need to ignore the warning:
91
+ if shared.PYDANTIC_VERSION_MINOR_TUPLE >= (2, 12):
92
+ from pydantic.warnings import UnsupportedFieldAttributeWarning
93
+
94
+ warnings.simplefilter(
95
+ "ignore", category=UnsupportedFieldAttributeWarning
96
+ )
97
+ self._type_adapter: TypeAdapter[Any] = TypeAdapter(
98
+ Annotated[self.field_info.annotation, self.field_info]
99
+ )
100
+
101
+ def get_default(self) -> Any:
102
+ if self.field_info.is_required():
103
+ return Undefined
104
+ return self.field_info.get_default(call_default_factory=True)
105
+
106
+ def validate(
107
+ self,
108
+ value: Any,
109
+ values: Dict[str, Any] = {}, # noqa: B006
110
+ *,
111
+ loc: Tuple[Union[int, str], ...] = (),
112
+ ) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
113
+ try:
114
+ return (
115
+ self._type_adapter.validate_python(value, from_attributes=True),
116
+ None,
117
+ )
118
+ except ValidationError as exc:
119
+ return None, v1._regenerate_error_with_loc(
120
+ errors=exc.errors(include_url=False), loc_prefix=loc
121
+ )
122
+
123
+ def serialize(
124
+ self,
125
+ value: Any,
126
+ *,
127
+ mode: Literal["json", "python"] = "json",
128
+ include: Union[IncEx, None] = None,
129
+ exclude: Union[IncEx, None] = None,
130
+ by_alias: bool = True,
131
+ exclude_unset: bool = False,
132
+ exclude_defaults: bool = False,
133
+ exclude_none: bool = False,
134
+ ) -> Any:
135
+ # What calls this code passes a value that already called
136
+ # self._type_adapter.validate_python(value)
137
+ return self._type_adapter.dump_python(
138
+ value,
139
+ mode=mode,
140
+ include=include,
141
+ exclude=exclude,
142
+ by_alias=by_alias,
143
+ exclude_unset=exclude_unset,
144
+ exclude_defaults=exclude_defaults,
145
+ exclude_none=exclude_none,
146
+ )
147
+
148
+ def __hash__(self) -> int:
149
+ # Each ModelField is unique for our purposes, to allow making a dict from
150
+ # ModelField to its JSON Schema.
151
+ return id(self)
152
+
153
+
154
+ def get_annotation_from_field_info(
155
+ annotation: Any, field_info: FieldInfo, field_name: str
156
+ ) -> Any:
157
+ return annotation
158
+
159
+
160
+ def _model_rebuild(model: Type[BaseModel]) -> None:
161
+ model.model_rebuild()
162
+
163
+
164
+ def _model_dump(
165
+ model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
166
+ ) -> Any:
167
+ return model.model_dump(mode=mode, **kwargs)
168
+
169
+
170
+ def _get_model_config(model: BaseModel) -> Any:
171
+ return model.model_config
172
+
173
+
174
+ def get_schema_from_model_field(
175
+ *,
176
+ field: ModelField,
177
+ model_name_map: ModelNameMap,
178
+ field_mapping: Dict[
179
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
180
+ ],
181
+ separate_input_output_schemas: bool = True,
182
+ ) -> Dict[str, Any]:
183
+ override_mode: Union[Literal["validation"], None] = (
184
+ None if separate_input_output_schemas else "validation"
185
+ )
186
+ # This expects that GenerateJsonSchema was already used to generate the definitions
187
+ json_schema = field_mapping[(field, override_mode or field.mode)]
188
+ if "$ref" not in json_schema:
189
+ # TODO remove when deprecating Pydantic v1
190
+ # Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207
191
+ json_schema["title"] = field.field_info.title or field.alias.title().replace(
192
+ "_", " "
193
+ )
194
+ return json_schema
195
+
196
+
197
+ def get_definitions(
198
+ *,
199
+ fields: Sequence[ModelField],
200
+ model_name_map: ModelNameMap,
201
+ separate_input_output_schemas: bool = True,
202
+ ) -> Tuple[
203
+ Dict[Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue],
204
+ Dict[str, Dict[str, Any]],
205
+ ]:
206
+ schema_generator = GenerateJsonSchema(ref_template=REF_TEMPLATE)
207
+ override_mode: Union[Literal["validation"], None] = (
208
+ None if separate_input_output_schemas else "validation"
209
+ )
210
+ flat_models = get_flat_models_from_fields(fields, known_models=set())
211
+ flat_model_fields = [
212
+ ModelField(field_info=FieldInfo(annotation=model), name=model.__name__)
213
+ for model in flat_models
214
+ ]
215
+ input_types = {f.type_ for f in fields}
216
+ unique_flat_model_fields = {
217
+ f for f in flat_model_fields if f.type_ not in input_types
218
+ }
219
+
220
+ inputs = [
221
+ (field, override_mode or field.mode, field._type_adapter.core_schema)
222
+ for field in list(fields) + list(unique_flat_model_fields)
223
+ ]
224
+ field_mapping, definitions = schema_generator.generate_definitions(inputs=inputs)
225
+ for item_def in cast(Dict[str, Dict[str, Any]], definitions).values():
226
+ if "description" in item_def:
227
+ item_description = cast(str, item_def["description"]).split("\f")[0]
228
+ item_def["description"] = item_description
229
+ new_mapping, new_definitions = _remap_definitions_and_field_mappings(
230
+ model_name_map=model_name_map,
231
+ definitions=definitions, # type: ignore[arg-type]
232
+ field_mapping=field_mapping,
233
+ )
234
+ return new_mapping, new_definitions
235
+
236
+
237
+ def _replace_refs(
238
+ *,
239
+ schema: Dict[str, Any],
240
+ old_name_to_new_name_map: Dict[str, str],
241
+ ) -> Dict[str, Any]:
242
+ new_schema = deepcopy(schema)
243
+ for key, value in new_schema.items():
244
+ if key == "$ref":
245
+ ref_name = schema["$ref"].split("/")[-1]
246
+ if ref_name in old_name_to_new_name_map:
247
+ new_name = old_name_to_new_name_map[ref_name]
248
+ new_schema["$ref"] = REF_TEMPLATE.format(model=new_name)
249
+ else:
250
+ new_schema["$ref"] = schema["$ref"]
251
+ continue
252
+ if isinstance(value, dict):
253
+ new_schema[key] = _replace_refs(
254
+ schema=value,
255
+ old_name_to_new_name_map=old_name_to_new_name_map,
256
+ )
257
+ elif isinstance(value, list):
258
+ new_value = []
259
+ for item in value:
260
+ if isinstance(item, dict):
261
+ new_item = _replace_refs(
262
+ schema=item,
263
+ old_name_to_new_name_map=old_name_to_new_name_map,
264
+ )
265
+ new_value.append(new_item)
266
+
267
+ else:
268
+ new_value.append(item)
269
+ new_schema[key] = new_value
270
+ return new_schema
271
+
272
+
273
+ def _remap_definitions_and_field_mappings(
274
+ *,
275
+ model_name_map: ModelNameMap,
276
+ definitions: Dict[str, Any],
277
+ field_mapping: Dict[
278
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
279
+ ],
280
+ ) -> Tuple[
281
+ Dict[Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue],
282
+ Dict[str, Any],
283
+ ]:
284
+ old_name_to_new_name_map = {}
285
+ for field_key, schema in field_mapping.items():
286
+ model = field_key[0].type_
287
+ if model not in model_name_map:
288
+ continue
289
+ new_name = model_name_map[model]
290
+ old_name = schema["$ref"].split("/")[-1]
291
+ if old_name in {f"{new_name}-Input", f"{new_name}-Output"}:
292
+ continue
293
+ old_name_to_new_name_map[old_name] = new_name
294
+
295
+ new_field_mapping: Dict[
296
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
297
+ ] = {}
298
+ for field_key, schema in field_mapping.items():
299
+ new_schema = _replace_refs(
300
+ schema=schema,
301
+ old_name_to_new_name_map=old_name_to_new_name_map,
302
+ )
303
+ new_field_mapping[field_key] = new_schema
304
+
305
+ new_definitions = {}
306
+ for key, value in definitions.items():
307
+ if key in old_name_to_new_name_map:
308
+ new_key = old_name_to_new_name_map[key]
309
+ else:
310
+ new_key = key
311
+ new_value = _replace_refs(
312
+ schema=value,
313
+ old_name_to_new_name_map=old_name_to_new_name_map,
314
+ )
315
+ new_definitions[new_key] = new_value
316
+ return new_field_mapping, new_definitions
317
+
318
+
319
+ def is_scalar_field(field: ModelField) -> bool:
320
+ from fastapi import params
321
+
322
+ return shared.field_annotation_is_scalar(
323
+ field.field_info.annotation
324
+ ) and not isinstance(field.field_info, params.Body)
325
+
326
+
327
+ def is_sequence_field(field: ModelField) -> bool:
328
+ return shared.field_annotation_is_sequence(field.field_info.annotation)
329
+
330
+
331
+ def is_scalar_sequence_field(field: ModelField) -> bool:
332
+ return shared.field_annotation_is_scalar_sequence(field.field_info.annotation)
333
+
334
+
335
+ def is_bytes_field(field: ModelField) -> bool:
336
+ return shared.is_bytes_or_nonable_bytes_annotation(field.type_)
337
+
338
+
339
+ def is_bytes_sequence_field(field: ModelField) -> bool:
340
+ return shared.is_bytes_sequence_annotation(field.type_)
341
+
342
+
343
+ def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
344
+ cls = type(field_info)
345
+ merged_field_info = cls.from_annotation(annotation)
346
+ new_field_info = copy(field_info)
347
+ new_field_info.metadata = merged_field_info.metadata
348
+ new_field_info.annotation = merged_field_info.annotation
349
+ return new_field_info
350
+
351
+
352
+ def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
353
+ origin_type = get_origin(field.field_info.annotation) or field.field_info.annotation
354
+ assert issubclass(origin_type, shared.sequence_types) # type: ignore[arg-type]
355
+ return shared.sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return]
356
+
357
+
358
+ def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]:
359
+ error = ValidationError.from_exception_data(
360
+ "Field required", [{"type": "missing", "loc": loc, "input": {}}]
361
+ ).errors(include_url=False)[0]
362
+ error["input"] = None
363
+ return error # type: ignore[return-value]
364
+
365
+
366
+ def create_body_model(
367
+ *, fields: Sequence[ModelField], model_name: str
368
+ ) -> Type[BaseModel]:
369
+ field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields}
370
+ BodyModel: Type[BaseModel] = create_model(model_name, **field_params) # type: ignore[call-overload]
371
+ return BodyModel
372
+
373
+
374
+ def get_model_fields(model: Type[BaseModel]) -> List[ModelField]:
375
+ return [
376
+ ModelField(field_info=field_info, name=name)
377
+ for name, field_info in model.model_fields.items()
378
+ ]
379
+
380
+
381
+ # Duplicate of several schema functions from Pydantic v1 to make them compatible with
382
+ # Pydantic v2 and allow mixing the models
383
+
384
+ TypeModelOrEnum = Union[Type["BaseModel"], Type[Enum]]
385
+ TypeModelSet = Set[TypeModelOrEnum]
386
+
387
+
388
+ def normalize_name(name: str) -> str:
389
+ return re.sub(r"[^a-zA-Z0-9.\-_]", "_", name)
390
+
391
+
392
+ def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]:
393
+ name_model_map = {}
394
+ conflicting_names: Set[str] = set()
395
+ for model in unique_models:
396
+ model_name = normalize_name(model.__name__)
397
+ if model_name in conflicting_names:
398
+ model_name = get_long_model_name(model)
399
+ name_model_map[model_name] = model
400
+ elif model_name in name_model_map:
401
+ conflicting_names.add(model_name)
402
+ conflicting_model = name_model_map.pop(model_name)
403
+ name_model_map[get_long_model_name(conflicting_model)] = conflicting_model
404
+ name_model_map[get_long_model_name(model)] = model
405
+ else:
406
+ name_model_map[model_name] = model
407
+ return {v: k for k, v in name_model_map.items()}
408
+
409
+
410
+ def get_flat_models_from_model(
411
+ model: Type["BaseModel"], known_models: Union[TypeModelSet, None] = None
412
+ ) -> TypeModelSet:
413
+ known_models = known_models or set()
414
+ fields = get_model_fields(model)
415
+ get_flat_models_from_fields(fields, known_models=known_models)
416
+ return known_models
417
+
418
+
419
+ def get_flat_models_from_annotation(
420
+ annotation: Any, known_models: TypeModelSet
421
+ ) -> TypeModelSet:
422
+ origin = get_origin(annotation)
423
+ if origin is not None:
424
+ for arg in get_args(annotation):
425
+ if lenient_issubclass(arg, (BaseModel, Enum)) and arg not in known_models:
426
+ known_models.add(arg)
427
+ if lenient_issubclass(arg, BaseModel):
428
+ get_flat_models_from_model(arg, known_models=known_models)
429
+ else:
430
+ get_flat_models_from_annotation(arg, known_models=known_models)
431
+ return known_models
432
+
433
+
434
+ def get_flat_models_from_field(
435
+ field: ModelField, known_models: TypeModelSet
436
+ ) -> TypeModelSet:
437
+ field_type = field.type_
438
+ if lenient_issubclass(field_type, BaseModel):
439
+ if field_type in known_models:
440
+ return known_models
441
+ known_models.add(field_type)
442
+ get_flat_models_from_model(field_type, known_models=known_models)
443
+ elif lenient_issubclass(field_type, Enum):
444
+ known_models.add(field_type)
445
+ else:
446
+ get_flat_models_from_annotation(field_type, known_models=known_models)
447
+ return known_models
448
+
449
+
450
+ def get_flat_models_from_fields(
451
+ fields: Sequence[ModelField], known_models: TypeModelSet
452
+ ) -> TypeModelSet:
453
+ for field in fields:
454
+ get_flat_models_from_field(field, known_models=known_models)
455
+ return known_models
456
+
457
+
458
+ def get_long_model_name(model: TypeModelOrEnum) -> str:
459
+ return f"{model.__module__}__{model.__qualname__}".replace(".", "__")
fastapi/datastructures.py CHANGED
@@ -11,11 +11,9 @@ from typing import (
11
11
  )
12
12
 
13
13
  from fastapi._compat import (
14
- PYDANTIC_V2,
15
14
  CoreSchema,
16
15
  GetJsonSchemaHandler,
17
16
  JsonSchemaValue,
18
- with_info_plain_validator_function,
19
17
  )
20
18
  from starlette.datastructures import URL as URL # noqa: F401
21
19
  from starlette.datastructures import Address as Address # noqa: F401
@@ -154,11 +152,10 @@ class UploadFile(StarletteUploadFile):
154
152
  raise ValueError(f"Expected UploadFile, received: {type(__input_value)}")
155
153
  return cast(UploadFile, __input_value)
156
154
 
157
- if not PYDANTIC_V2:
158
-
159
- @classmethod
160
- def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
161
- field_schema.update({"type": "string", "format": "binary"})
155
+ # TODO: remove when deprecating Pydantic v1
156
+ @classmethod
157
+ def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
158
+ field_schema.update({"type": "string", "format": "binary"})
162
159
 
163
160
  @classmethod
164
161
  def __get_pydantic_json_schema__(
@@ -170,6 +167,8 @@ class UploadFile(StarletteUploadFile):
170
167
  def __get_pydantic_core_schema__(
171
168
  cls, source: Type[Any], handler: Callable[[Any], CoreSchema]
172
169
  ) -> CoreSchema:
170
+ from ._compat.v2 import with_info_plain_validator_function
171
+
173
172
  return with_info_plain_validator_function(cls._validate)
174
173
 
175
174