fastapi 0.99.0__py3-none-any.whl → 0.100.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fastapi might be problematic. Click here for more details.

fastapi/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """FastAPI framework, high performance, easy to learn, fast to code, ready for production"""
2
2
 
3
- __version__ = "0.99.0"
3
+ __version__ = "0.100.0"
4
4
 
5
5
  from starlette import status as status
6
6
 
fastapi/_compat.py ADDED
@@ -0,0 +1,616 @@
1
+ from collections import deque
2
+ from copy import copy
3
+ from dataclasses import dataclass, is_dataclass
4
+ from enum import Enum
5
+ from typing import (
6
+ Any,
7
+ Callable,
8
+ Deque,
9
+ Dict,
10
+ FrozenSet,
11
+ List,
12
+ Mapping,
13
+ Sequence,
14
+ Set,
15
+ Tuple,
16
+ Type,
17
+ Union,
18
+ )
19
+
20
+ from fastapi.exceptions import RequestErrorModel
21
+ from fastapi.types import IncEx, ModelNameMap, UnionType
22
+ from pydantic import BaseModel, create_model
23
+ from pydantic.version import VERSION as PYDANTIC_VERSION
24
+ from starlette.datastructures import UploadFile
25
+ from typing_extensions import Annotated, Literal, get_args, get_origin
26
+
27
+ PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
28
+
29
+
30
+ sequence_annotation_to_type = {
31
+ Sequence: list,
32
+ List: list,
33
+ list: list,
34
+ Tuple: tuple,
35
+ tuple: tuple,
36
+ Set: set,
37
+ set: set,
38
+ FrozenSet: frozenset,
39
+ frozenset: frozenset,
40
+ Deque: deque,
41
+ deque: deque,
42
+ }
43
+
44
+ sequence_types = tuple(sequence_annotation_to_type.keys())
45
+
46
+ if PYDANTIC_V2:
47
+ from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError
48
+ from pydantic import TypeAdapter
49
+ from pydantic import ValidationError as ValidationError
50
+ from pydantic._internal._schema_generation_shared import ( # type: ignore[attr-defined]
51
+ GetJsonSchemaHandler as GetJsonSchemaHandler,
52
+ )
53
+ from pydantic._internal._typing_extra import eval_type_lenient
54
+ from pydantic._internal._utils import lenient_issubclass as lenient_issubclass
55
+ from pydantic.fields import FieldInfo
56
+ from pydantic.json_schema import GenerateJsonSchema as GenerateJsonSchema
57
+ from pydantic.json_schema import JsonSchemaValue as JsonSchemaValue
58
+ from pydantic_core import CoreSchema as CoreSchema
59
+ from pydantic_core import MultiHostUrl as MultiHostUrl
60
+ from pydantic_core import PydanticUndefined, PydanticUndefinedType
61
+ from pydantic_core import Url as Url
62
+ from pydantic_core.core_schema import (
63
+ general_plain_validator_function as general_plain_validator_function,
64
+ )
65
+
66
+ Required = PydanticUndefined
67
+ Undefined = PydanticUndefined
68
+ UndefinedType = PydanticUndefinedType
69
+ evaluate_forwardref = eval_type_lenient
70
+ Validator = Any
71
+
72
+ class BaseConfig:
73
+ pass
74
+
75
+ class ErrorWrapper(Exception):
76
+ pass
77
+
78
+ @dataclass
79
+ class ModelField:
80
+ field_info: FieldInfo
81
+ name: str
82
+ mode: Literal["validation", "serialization"] = "validation"
83
+
84
+ @property
85
+ def alias(self) -> str:
86
+ a = self.field_info.alias
87
+ return a if a is not None else self.name
88
+
89
+ @property
90
+ def required(self) -> bool:
91
+ return self.field_info.is_required()
92
+
93
+ @property
94
+ def default(self) -> Any:
95
+ return self.get_default()
96
+
97
+ @property
98
+ def type_(self) -> Any:
99
+ return self.field_info.annotation
100
+
101
+ def __post_init__(self) -> None:
102
+ self._type_adapter: TypeAdapter[Any] = TypeAdapter(
103
+ Annotated[self.field_info.annotation, self.field_info]
104
+ )
105
+
106
+ def get_default(self) -> Any:
107
+ if self.field_info.is_required():
108
+ return Undefined
109
+ return self.field_info.get_default(call_default_factory=True)
110
+
111
+ def validate(
112
+ self,
113
+ value: Any,
114
+ values: Dict[str, Any] = {}, # noqa: B006
115
+ *,
116
+ loc: Tuple[Union[int, str], ...] = (),
117
+ ) -> Tuple[Any, Union[List[Dict[str, Any]], None]]:
118
+ try:
119
+ return (
120
+ self._type_adapter.validate_python(value, from_attributes=True),
121
+ None,
122
+ )
123
+ except ValidationError as exc:
124
+ return None, _regenerate_error_with_loc(
125
+ errors=exc.errors(), loc_prefix=loc
126
+ )
127
+
128
+ def serialize(
129
+ self,
130
+ value: Any,
131
+ *,
132
+ mode: Literal["json", "python"] = "json",
133
+ include: Union[IncEx, None] = None,
134
+ exclude: Union[IncEx, None] = None,
135
+ by_alias: bool = True,
136
+ exclude_unset: bool = False,
137
+ exclude_defaults: bool = False,
138
+ exclude_none: bool = False,
139
+ ) -> Any:
140
+ # What calls this code passes a value that already called
141
+ # self._type_adapter.validate_python(value)
142
+ return self._type_adapter.dump_python(
143
+ value,
144
+ mode=mode,
145
+ include=include,
146
+ exclude=exclude,
147
+ by_alias=by_alias,
148
+ exclude_unset=exclude_unset,
149
+ exclude_defaults=exclude_defaults,
150
+ exclude_none=exclude_none,
151
+ )
152
+
153
+ def __hash__(self) -> int:
154
+ # Each ModelField is unique for our purposes, to allow making a dict from
155
+ # ModelField to its JSON Schema.
156
+ return id(self)
157
+
158
+ def get_annotation_from_field_info(
159
+ annotation: Any, field_info: FieldInfo, field_name: str
160
+ ) -> Any:
161
+ return annotation
162
+
163
+ def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]:
164
+ return errors # type: ignore[return-value]
165
+
166
+ def _model_rebuild(model: Type[BaseModel]) -> None:
167
+ model.model_rebuild()
168
+
169
+ def _model_dump(
170
+ model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
171
+ ) -> Any:
172
+ return model.model_dump(mode=mode, **kwargs)
173
+
174
+ def _get_model_config(model: BaseModel) -> Any:
175
+ return model.model_config
176
+
177
+ def get_schema_from_model_field(
178
+ *,
179
+ field: ModelField,
180
+ schema_generator: GenerateJsonSchema,
181
+ model_name_map: ModelNameMap,
182
+ field_mapping: Dict[
183
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
184
+ ],
185
+ ) -> Dict[str, Any]:
186
+ # This expects that GenerateJsonSchema was already used to generate the definitions
187
+ json_schema = field_mapping[(field, field.mode)]
188
+ if "$ref" not in json_schema:
189
+ # TODO remove when deprecating Pydantic v1
190
+ # Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207
191
+ json_schema[
192
+ "title"
193
+ ] = field.field_info.title or field.alias.title().replace("_", " ")
194
+ return json_schema
195
+
196
+ def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap:
197
+ return {}
198
+
199
+ def get_definitions(
200
+ *,
201
+ fields: List[ModelField],
202
+ schema_generator: GenerateJsonSchema,
203
+ model_name_map: ModelNameMap,
204
+ ) -> Tuple[
205
+ Dict[
206
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
207
+ ],
208
+ Dict[str, Dict[str, Any]],
209
+ ]:
210
+ inputs = [
211
+ (field, field.mode, field._type_adapter.core_schema) for field in fields
212
+ ]
213
+ field_mapping, definitions = schema_generator.generate_definitions(
214
+ inputs=inputs
215
+ )
216
+ return field_mapping, definitions # type: ignore[return-value]
217
+
218
+ def is_scalar_field(field: ModelField) -> bool:
219
+ from fastapi import params
220
+
221
+ return field_annotation_is_scalar(
222
+ field.field_info.annotation
223
+ ) and not isinstance(field.field_info, params.Body)
224
+
225
+ def is_sequence_field(field: ModelField) -> bool:
226
+ return field_annotation_is_sequence(field.field_info.annotation)
227
+
228
+ def is_scalar_sequence_field(field: ModelField) -> bool:
229
+ return field_annotation_is_scalar_sequence(field.field_info.annotation)
230
+
231
+ def is_bytes_field(field: ModelField) -> bool:
232
+ return is_bytes_or_nonable_bytes_annotation(field.type_)
233
+
234
+ def is_bytes_sequence_field(field: ModelField) -> bool:
235
+ return is_bytes_sequence_annotation(field.type_)
236
+
237
+ def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
238
+ return type(field_info).from_annotation(annotation)
239
+
240
+ def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
241
+ origin_type = (
242
+ get_origin(field.field_info.annotation) or field.field_info.annotation
243
+ )
244
+ assert issubclass(origin_type, sequence_types) # type: ignore[arg-type]
245
+ return sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return]
246
+
247
+ def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]:
248
+ error = ValidationError.from_exception_data(
249
+ "Field required", [{"type": "missing", "loc": loc, "input": {}}]
250
+ ).errors()[0]
251
+ error["input"] = None
252
+ return error # type: ignore[return-value]
253
+
254
+ def create_body_model(
255
+ *, fields: Sequence[ModelField], model_name: str
256
+ ) -> Type[BaseModel]:
257
+ field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields}
258
+ BodyModel: Type[BaseModel] = create_model(model_name, **field_params) # type: ignore[call-overload]
259
+ return BodyModel
260
+
261
+ else:
262
+ from fastapi.openapi.constants import REF_PREFIX as REF_PREFIX
263
+ from pydantic import AnyUrl as Url # noqa: F401
264
+ from pydantic import ( # type: ignore[assignment]
265
+ BaseConfig as BaseConfig, # noqa: F401
266
+ )
267
+ from pydantic import ValidationError as ValidationError # noqa: F401
268
+ from pydantic.class_validators import ( # type: ignore[no-redef]
269
+ Validator as Validator, # noqa: F401
270
+ )
271
+ from pydantic.error_wrappers import ( # type: ignore[no-redef]
272
+ ErrorWrapper as ErrorWrapper, # noqa: F401
273
+ )
274
+ from pydantic.errors import MissingError
275
+ from pydantic.fields import ( # type: ignore[attr-defined]
276
+ SHAPE_FROZENSET,
277
+ SHAPE_LIST,
278
+ SHAPE_SEQUENCE,
279
+ SHAPE_SET,
280
+ SHAPE_SINGLETON,
281
+ SHAPE_TUPLE,
282
+ SHAPE_TUPLE_ELLIPSIS,
283
+ )
284
+ from pydantic.fields import FieldInfo as FieldInfo
285
+ from pydantic.fields import ( # type: ignore[no-redef,attr-defined]
286
+ ModelField as ModelField, # noqa: F401
287
+ )
288
+ from pydantic.fields import ( # type: ignore[no-redef,attr-defined]
289
+ Required as Required, # noqa: F401
290
+ )
291
+ from pydantic.fields import ( # type: ignore[no-redef,attr-defined]
292
+ Undefined as Undefined,
293
+ )
294
+ from pydantic.fields import ( # type: ignore[no-redef, attr-defined]
295
+ UndefinedType as UndefinedType, # noqa: F401
296
+ )
297
+ from pydantic.networks import ( # type: ignore[no-redef]
298
+ MultiHostDsn as MultiHostUrl, # noqa: F401
299
+ )
300
+ from pydantic.schema import (
301
+ field_schema,
302
+ get_flat_models_from_fields,
303
+ get_model_name_map,
304
+ model_process_schema,
305
+ )
306
+ from pydantic.schema import ( # type: ignore[no-redef] # noqa: F401
307
+ get_annotation_from_field_info as get_annotation_from_field_info,
308
+ )
309
+ from pydantic.typing import ( # type: ignore[no-redef]
310
+ evaluate_forwardref as evaluate_forwardref, # noqa: F401
311
+ )
312
+ from pydantic.utils import ( # type: ignore[no-redef]
313
+ lenient_issubclass as lenient_issubclass, # noqa: F401
314
+ )
315
+
316
+ GetJsonSchemaHandler = Any # type: ignore[assignment,misc]
317
+ JsonSchemaValue = Dict[str, Any] # type: ignore[misc]
318
+ CoreSchema = Any # type: ignore[assignment,misc]
319
+
320
+ sequence_shapes = {
321
+ SHAPE_LIST,
322
+ SHAPE_SET,
323
+ SHAPE_FROZENSET,
324
+ SHAPE_TUPLE,
325
+ SHAPE_SEQUENCE,
326
+ SHAPE_TUPLE_ELLIPSIS,
327
+ }
328
+ sequence_shape_to_type = {
329
+ SHAPE_LIST: list,
330
+ SHAPE_SET: set,
331
+ SHAPE_TUPLE: tuple,
332
+ SHAPE_SEQUENCE: list,
333
+ SHAPE_TUPLE_ELLIPSIS: list,
334
+ }
335
+
336
+ @dataclass
337
+ class GenerateJsonSchema: # type: ignore[no-redef]
338
+ ref_template: str
339
+
340
+ class PydanticSchemaGenerationError(Exception): # type: ignore[no-redef]
341
+ pass
342
+
343
+ def general_plain_validator_function( # type: ignore[misc]
344
+ function: Callable[..., Any],
345
+ *,
346
+ ref: Union[str, None] = None,
347
+ metadata: Any = None,
348
+ serialization: Any = None,
349
+ ) -> Any:
350
+ return {}
351
+
352
+ def get_model_definitions(
353
+ *,
354
+ flat_models: Set[Union[Type[BaseModel], Type[Enum]]],
355
+ model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str],
356
+ ) -> Dict[str, Any]:
357
+ definitions: Dict[str, Dict[str, Any]] = {}
358
+ for model in flat_models:
359
+ m_schema, m_definitions, m_nested_models = model_process_schema(
360
+ model, model_name_map=model_name_map, ref_prefix=REF_PREFIX
361
+ )
362
+ definitions.update(m_definitions)
363
+ model_name = model_name_map[model]
364
+ if "description" in m_schema:
365
+ m_schema["description"] = m_schema["description"].split("\f")[0]
366
+ definitions[model_name] = m_schema
367
+ return definitions
368
+
369
+ def is_pv1_scalar_field(field: ModelField) -> bool:
370
+ from fastapi import params
371
+
372
+ field_info = field.field_info
373
+ if not (
374
+ field.shape == SHAPE_SINGLETON # type: ignore[attr-defined]
375
+ and not lenient_issubclass(field.type_, BaseModel)
376
+ and not lenient_issubclass(field.type_, dict)
377
+ and not field_annotation_is_sequence(field.type_)
378
+ and not is_dataclass(field.type_)
379
+ and not isinstance(field_info, params.Body)
380
+ ):
381
+ return False
382
+ if field.sub_fields: # type: ignore[attr-defined]
383
+ if not all(
384
+ is_pv1_scalar_field(f)
385
+ for f in field.sub_fields # type: ignore[attr-defined]
386
+ ):
387
+ return False
388
+ return True
389
+
390
+ def is_pv1_scalar_sequence_field(field: ModelField) -> bool:
391
+ if (field.shape in sequence_shapes) and not lenient_issubclass( # type: ignore[attr-defined]
392
+ field.type_, BaseModel
393
+ ):
394
+ if field.sub_fields is not None: # type: ignore[attr-defined]
395
+ for sub_field in field.sub_fields: # type: ignore[attr-defined]
396
+ if not is_pv1_scalar_field(sub_field):
397
+ return False
398
+ return True
399
+ if _annotation_is_sequence(field.type_):
400
+ return True
401
+ return False
402
+
403
+ def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]:
404
+ use_errors: List[Any] = []
405
+ for error in errors:
406
+ if isinstance(error, ErrorWrapper):
407
+ new_errors = ValidationError( # type: ignore[call-arg]
408
+ errors=[error], model=RequestErrorModel
409
+ ).errors()
410
+ use_errors.extend(new_errors)
411
+ elif isinstance(error, list):
412
+ use_errors.extend(_normalize_errors(error))
413
+ else:
414
+ use_errors.append(error)
415
+ return use_errors
416
+
417
+ def _model_rebuild(model: Type[BaseModel]) -> None:
418
+ model.update_forward_refs()
419
+
420
+ def _model_dump(
421
+ model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any
422
+ ) -> Any:
423
+ return model.dict(**kwargs)
424
+
425
+ def _get_model_config(model: BaseModel) -> Any:
426
+ return model.__config__ # type: ignore[attr-defined]
427
+
428
+ def get_schema_from_model_field(
429
+ *,
430
+ field: ModelField,
431
+ schema_generator: GenerateJsonSchema,
432
+ model_name_map: ModelNameMap,
433
+ field_mapping: Dict[
434
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
435
+ ],
436
+ ) -> Dict[str, Any]:
437
+ # This expects that GenerateJsonSchema was already used to generate the definitions
438
+ return field_schema( # type: ignore[no-any-return]
439
+ field, model_name_map=model_name_map, ref_prefix=REF_PREFIX
440
+ )[0]
441
+
442
+ def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap:
443
+ models = get_flat_models_from_fields(fields, known_models=set())
444
+ return get_model_name_map(models) # type: ignore[no-any-return]
445
+
446
+ def get_definitions(
447
+ *,
448
+ fields: List[ModelField],
449
+ schema_generator: GenerateJsonSchema,
450
+ model_name_map: ModelNameMap,
451
+ ) -> Tuple[
452
+ Dict[
453
+ Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue
454
+ ],
455
+ Dict[str, Dict[str, Any]],
456
+ ]:
457
+ models = get_flat_models_from_fields(fields, known_models=set())
458
+ return {}, get_model_definitions(
459
+ flat_models=models, model_name_map=model_name_map
460
+ )
461
+
462
+ def is_scalar_field(field: ModelField) -> bool:
463
+ return is_pv1_scalar_field(field)
464
+
465
+ def is_sequence_field(field: ModelField) -> bool:
466
+ return field.shape in sequence_shapes or _annotation_is_sequence(field.type_) # type: ignore[attr-defined]
467
+
468
+ def is_scalar_sequence_field(field: ModelField) -> bool:
469
+ return is_pv1_scalar_sequence_field(field)
470
+
471
+ def is_bytes_field(field: ModelField) -> bool:
472
+ return lenient_issubclass(field.type_, bytes)
473
+
474
+ def is_bytes_sequence_field(field: ModelField) -> bool:
475
+ return field.shape in sequence_shapes and lenient_issubclass(field.type_, bytes) # type: ignore[attr-defined]
476
+
477
+ def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo:
478
+ return copy(field_info)
479
+
480
+ def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]:
481
+ return sequence_shape_to_type[field.shape](value) # type: ignore[no-any-return,attr-defined]
482
+
483
+ def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]:
484
+ missing_field_error = ErrorWrapper(MissingError(), loc=loc) # type: ignore[call-arg]
485
+ new_error = ValidationError([missing_field_error], RequestErrorModel)
486
+ return new_error.errors()[0] # type: ignore[return-value]
487
+
488
+ def create_body_model(
489
+ *, fields: Sequence[ModelField], model_name: str
490
+ ) -> Type[BaseModel]:
491
+ BodyModel = create_model(model_name)
492
+ for f in fields:
493
+ BodyModel.__fields__[f.name] = f # type: ignore[index]
494
+ return BodyModel
495
+
496
+
497
+ def _regenerate_error_with_loc(
498
+ *, errors: Sequence[Any], loc_prefix: Tuple[Union[str, int], ...]
499
+ ) -> List[Dict[str, Any]]:
500
+ updated_loc_errors: List[Any] = [
501
+ {**err, "loc": loc_prefix + err.get("loc", ())}
502
+ for err in _normalize_errors(errors)
503
+ ]
504
+
505
+ return updated_loc_errors
506
+
507
+
508
+ def _annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
509
+ if lenient_issubclass(annotation, (str, bytes)):
510
+ return False
511
+ return lenient_issubclass(annotation, sequence_types)
512
+
513
+
514
+ def field_annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool:
515
+ return _annotation_is_sequence(annotation) or _annotation_is_sequence(
516
+ get_origin(annotation)
517
+ )
518
+
519
+
520
+ def value_is_sequence(value: Any) -> bool:
521
+ return isinstance(value, sequence_types) and not isinstance(value, (str, bytes)) # type: ignore[arg-type]
522
+
523
+
524
+ def _annotation_is_complex(annotation: Union[Type[Any], None]) -> bool:
525
+ return (
526
+ lenient_issubclass(annotation, (BaseModel, Mapping, UploadFile))
527
+ or _annotation_is_sequence(annotation)
528
+ or is_dataclass(annotation)
529
+ )
530
+
531
+
532
+ def field_annotation_is_complex(annotation: Union[Type[Any], None]) -> bool:
533
+ origin = get_origin(annotation)
534
+ if origin is Union or origin is UnionType:
535
+ return any(field_annotation_is_complex(arg) for arg in get_args(annotation))
536
+
537
+ return (
538
+ _annotation_is_complex(annotation)
539
+ or _annotation_is_complex(origin)
540
+ or hasattr(origin, "__pydantic_core_schema__")
541
+ or hasattr(origin, "__get_pydantic_core_schema__")
542
+ )
543
+
544
+
545
+ def field_annotation_is_scalar(annotation: Any) -> bool:
546
+ # handle Ellipsis here to make tuple[int, ...] work nicely
547
+ return annotation is Ellipsis or not field_annotation_is_complex(annotation)
548
+
549
+
550
+ def field_annotation_is_scalar_sequence(annotation: Union[Type[Any], None]) -> bool:
551
+ origin = get_origin(annotation)
552
+ if origin is Union or origin is UnionType:
553
+ at_least_one_scalar_sequence = False
554
+ for arg in get_args(annotation):
555
+ if field_annotation_is_scalar_sequence(arg):
556
+ at_least_one_scalar_sequence = True
557
+ continue
558
+ elif not field_annotation_is_scalar(arg):
559
+ return False
560
+ return at_least_one_scalar_sequence
561
+ return field_annotation_is_sequence(annotation) and all(
562
+ field_annotation_is_scalar(sub_annotation)
563
+ for sub_annotation in get_args(annotation)
564
+ )
565
+
566
+
567
+ def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool:
568
+ if lenient_issubclass(annotation, bytes):
569
+ return True
570
+ origin = get_origin(annotation)
571
+ if origin is Union or origin is UnionType:
572
+ for arg in get_args(annotation):
573
+ if lenient_issubclass(arg, bytes):
574
+ return True
575
+ return False
576
+
577
+
578
+ def is_uploadfile_or_nonable_uploadfile_annotation(annotation: Any) -> bool:
579
+ if lenient_issubclass(annotation, UploadFile):
580
+ return True
581
+ origin = get_origin(annotation)
582
+ if origin is Union or origin is UnionType:
583
+ for arg in get_args(annotation):
584
+ if lenient_issubclass(arg, UploadFile):
585
+ return True
586
+ return False
587
+
588
+
589
+ def is_bytes_sequence_annotation(annotation: Any) -> bool:
590
+ origin = get_origin(annotation)
591
+ if origin is Union or origin is UnionType:
592
+ at_least_one = False
593
+ for arg in get_args(annotation):
594
+ if is_bytes_sequence_annotation(arg):
595
+ at_least_one = True
596
+ continue
597
+ return at_least_one
598
+ return field_annotation_is_sequence(annotation) and all(
599
+ is_bytes_or_nonable_bytes_annotation(sub_annotation)
600
+ for sub_annotation in get_args(annotation)
601
+ )
602
+
603
+
604
+ def is_uploadfile_sequence_annotation(annotation: Any) -> bool:
605
+ origin = get_origin(annotation)
606
+ if origin is Union or origin is UnionType:
607
+ at_least_one = False
608
+ for arg in get_args(annotation):
609
+ if is_uploadfile_sequence_annotation(arg):
610
+ at_least_one = True
611
+ continue
612
+ return at_least_one
613
+ return field_annotation_is_sequence(annotation) and all(
614
+ is_uploadfile_or_nonable_uploadfile_annotation(sub_annotation)
615
+ for sub_annotation in get_args(annotation)
616
+ )