mixpeek 0.8.41__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mixpeek/__init__.py +93 -1
- mixpeek/_base_client.py +2041 -0
- mixpeek/_client.py +444 -0
- mixpeek/_compat.py +219 -0
- mixpeek/_constants.py +14 -0
- mixpeek/_exceptions.py +108 -0
- mixpeek/_files.py +123 -0
- mixpeek/_models.py +785 -0
- mixpeek/_qs.py +150 -0
- mixpeek/_resource.py +43 -0
- mixpeek/_response.py +824 -0
- mixpeek/_streaming.py +333 -0
- mixpeek/_types.py +217 -0
- mixpeek/_utils/__init__.py +55 -0
- mixpeek/_utils/_logs.py +25 -0
- mixpeek/_utils/_proxy.py +62 -0
- mixpeek/_utils/_reflection.py +42 -0
- mixpeek/_utils/_streams.py +12 -0
- mixpeek/_utils/_sync.py +81 -0
- mixpeek/_utils/_transform.py +382 -0
- mixpeek/_utils/_typing.py +120 -0
- mixpeek/_utils/_utils.py +397 -0
- mixpeek/_version.py +4 -0
- mixpeek/lib/.keep +4 -0
- mixpeek/resources/__init__.py +159 -0
- mixpeek/resources/accounts/__init__.py +33 -0
- mixpeek/resources/accounts/accounts.py +102 -0
- mixpeek/resources/accounts/private.py +232 -0
- mixpeek/resources/agent/__init__.py +33 -0
- mixpeek/resources/agent/agent.py +225 -0
- mixpeek/resources/agent/task.py +189 -0
- mixpeek/resources/collections/__init__.py +33 -0
- mixpeek/resources/collections/collections.py +459 -0
- mixpeek/resources/collections/files.py +679 -0
- mixpeek/resources/describe.py +338 -0
- mixpeek/resources/embed.py +234 -0
- mixpeek/resources/indexes.py +506 -0
- mixpeek/resources/read.py +183 -0
- mixpeek/resources/recognize.py +183 -0
- mixpeek/resources/search.py +542 -0
- mixpeek/resources/tasks.py +294 -0
- mixpeek/resources/transcribe.py +192 -0
- mixpeek/types/__init__.py +19 -0
- mixpeek/types/accounts/__init__.py +6 -0
- mixpeek/types/accounts/private_update_params.py +25 -0
- mixpeek/types/accounts/user.py +32 -0
- mixpeek/types/agent/__init__.py +3 -0
- mixpeek/types/agent_create_params.py +18 -0
- mixpeek/types/agentresponse.py +11 -0
- mixpeek/types/collection_search_params.py +29 -0
- mixpeek/types/collections/__init__.py +9 -0
- mixpeek/types/collections/file_create_params.py +31 -0
- mixpeek/types/collections/file_full_params.py +22 -0
- mixpeek/types/collections/file_update_params.py +18 -0
- mixpeek/types/collections/fileresponse.py +23 -0
- mixpeek/types/collections/groupedfiledata.py +38 -0
- mixpeek/types/describe_upload_params.py +21 -0
- mixpeek/types/describe_url_params.py +20 -0
- mixpeek/types/embed_create_params.py +29 -0
- mixpeek/types/embeddingresponse.py +15 -0
- mixpeek/types/index_face_params.py +23 -0
- mixpeek/types/index_upload_params.py +27 -0
- mixpeek/types/index_url_params.py +159 -0
- mixpeek/types/search_text_params.py +45 -0
- mixpeek/types/search_upload_params.py +25 -0
- mixpeek/types/search_url_params.py +45 -0
- mixpeek/types/taskresponse.py +15 -0
- mixpeek/types/transcribe_url_params.py +18 -0
- mixpeek-0.10.0.dist-info/METADATA +356 -0
- mixpeek-0.10.0.dist-info/RECORD +73 -0
- {mixpeek-0.8.41.dist-info → mixpeek-0.10.0.dist-info}/WHEEL +1 -2
- mixpeek-0.10.0.dist-info/licenses/LICENSE +201 -0
- mixpeek/client.py +0 -27
- mixpeek/endpoints/collections.py +0 -86
- mixpeek/endpoints/embed.py +0 -66
- mixpeek/endpoints/index.py +0 -51
- mixpeek/endpoints/register.py +0 -34
- mixpeek/endpoints/search.py +0 -67
- mixpeek/endpoints/tasks.py +0 -26
- mixpeek/endpoints/tools.py +0 -138
- mixpeek/exceptions.py +0 -13
- mixpeek-0.8.41.dist-info/METADATA +0 -375
- mixpeek-0.8.41.dist-info/RECORD +0 -15
- mixpeek-0.8.41.dist-info/top_level.txt +0 -1
- /mixpeek/{endpoints/__init__.py → py.typed} +0 -0
@@ -0,0 +1,42 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import inspect
|
4
|
+
from typing import Any, Callable
|
5
|
+
|
6
|
+
|
7
|
+
def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool:
|
8
|
+
"""Returns whether or not the given function has a specific parameter"""
|
9
|
+
sig = inspect.signature(func)
|
10
|
+
return arg_name in sig.parameters
|
11
|
+
|
12
|
+
|
13
|
+
def assert_signatures_in_sync(
|
14
|
+
source_func: Callable[..., Any],
|
15
|
+
check_func: Callable[..., Any],
|
16
|
+
*,
|
17
|
+
exclude_params: set[str] = set(),
|
18
|
+
) -> None:
|
19
|
+
"""Ensure that the signature of the second function matches the first."""
|
20
|
+
|
21
|
+
check_sig = inspect.signature(check_func)
|
22
|
+
source_sig = inspect.signature(source_func)
|
23
|
+
|
24
|
+
errors: list[str] = []
|
25
|
+
|
26
|
+
for name, source_param in source_sig.parameters.items():
|
27
|
+
if name in exclude_params:
|
28
|
+
continue
|
29
|
+
|
30
|
+
custom_param = check_sig.parameters.get(name)
|
31
|
+
if not custom_param:
|
32
|
+
errors.append(f"the `{name}` param is missing")
|
33
|
+
continue
|
34
|
+
|
35
|
+
if custom_param.annotation != source_param.annotation:
|
36
|
+
errors.append(
|
37
|
+
f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}"
|
38
|
+
)
|
39
|
+
continue
|
40
|
+
|
41
|
+
if errors:
|
42
|
+
raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors))
|
@@ -0,0 +1,12 @@
|
|
1
|
+
from typing import Any
|
2
|
+
from typing_extensions import Iterator, AsyncIterator
|
3
|
+
|
4
|
+
|
5
|
+
def consume_sync_iterator(iterator: Iterator[Any]) -> None:
|
6
|
+
for _ in iterator:
|
7
|
+
...
|
8
|
+
|
9
|
+
|
10
|
+
async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None:
|
11
|
+
async for _ in iterator:
|
12
|
+
...
|
mixpeek/_utils/_sync.py
ADDED
@@ -0,0 +1,81 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import functools
|
4
|
+
from typing import TypeVar, Callable, Awaitable
|
5
|
+
from typing_extensions import ParamSpec
|
6
|
+
|
7
|
+
import anyio
|
8
|
+
import anyio.to_thread
|
9
|
+
|
10
|
+
from ._reflection import function_has_argument
|
11
|
+
|
12
|
+
T_Retval = TypeVar("T_Retval")
|
13
|
+
T_ParamSpec = ParamSpec("T_ParamSpec")
|
14
|
+
|
15
|
+
|
16
|
+
# copied from `asyncer`, https://github.com/tiangolo/asyncer
|
17
|
+
def asyncify(
|
18
|
+
function: Callable[T_ParamSpec, T_Retval],
|
19
|
+
*,
|
20
|
+
cancellable: bool = False,
|
21
|
+
limiter: anyio.CapacityLimiter | None = None,
|
22
|
+
) -> Callable[T_ParamSpec, Awaitable[T_Retval]]:
|
23
|
+
"""
|
24
|
+
Take a blocking function and create an async one that receives the same
|
25
|
+
positional and keyword arguments, and that when called, calls the original function
|
26
|
+
in a worker thread using `anyio.to_thread.run_sync()`. Internally,
|
27
|
+
`asyncer.asyncify()` uses the same `anyio.to_thread.run_sync()`, but it supports
|
28
|
+
keyword arguments additional to positional arguments and it adds better support for
|
29
|
+
autocompletion and inline errors for the arguments of the function called and the
|
30
|
+
return value.
|
31
|
+
|
32
|
+
If the `cancellable` option is enabled and the task waiting for its completion is
|
33
|
+
cancelled, the thread will still run its course but its return value (or any raised
|
34
|
+
exception) will be ignored.
|
35
|
+
|
36
|
+
Use it like this:
|
37
|
+
|
38
|
+
```Python
|
39
|
+
def do_work(arg1, arg2, kwarg1="", kwarg2="") -> str:
|
40
|
+
# Do work
|
41
|
+
return "Some result"
|
42
|
+
|
43
|
+
|
44
|
+
result = await to_thread.asyncify(do_work)("spam", "ham", kwarg1="a", kwarg2="b")
|
45
|
+
print(result)
|
46
|
+
```
|
47
|
+
|
48
|
+
## Arguments
|
49
|
+
|
50
|
+
`function`: a blocking regular callable (e.g. a function)
|
51
|
+
`cancellable`: `True` to allow cancellation of the operation
|
52
|
+
`limiter`: capacity limiter to use to limit the total amount of threads running
|
53
|
+
(if omitted, the default limiter is used)
|
54
|
+
|
55
|
+
## Return
|
56
|
+
|
57
|
+
An async function that takes the same positional and keyword arguments as the
|
58
|
+
original one, that when called runs the same original function in a thread worker
|
59
|
+
and returns the result.
|
60
|
+
"""
|
61
|
+
|
62
|
+
async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval:
|
63
|
+
partial_f = functools.partial(function, *args, **kwargs)
|
64
|
+
|
65
|
+
# In `v4.1.0` anyio added the `abandon_on_cancel` argument and deprecated the old
|
66
|
+
# `cancellable` argument, so we need to use the new `abandon_on_cancel` to avoid
|
67
|
+
# surfacing deprecation warnings.
|
68
|
+
if function_has_argument(anyio.to_thread.run_sync, "abandon_on_cancel"):
|
69
|
+
return await anyio.to_thread.run_sync(
|
70
|
+
partial_f,
|
71
|
+
abandon_on_cancel=cancellable,
|
72
|
+
limiter=limiter,
|
73
|
+
)
|
74
|
+
|
75
|
+
return await anyio.to_thread.run_sync(
|
76
|
+
partial_f,
|
77
|
+
cancellable=cancellable,
|
78
|
+
limiter=limiter,
|
79
|
+
)
|
80
|
+
|
81
|
+
return wrapper
|
@@ -0,0 +1,382 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import io
|
4
|
+
import base64
|
5
|
+
import pathlib
|
6
|
+
from typing import Any, Mapping, TypeVar, cast
|
7
|
+
from datetime import date, datetime
|
8
|
+
from typing_extensions import Literal, get_args, override, get_type_hints
|
9
|
+
|
10
|
+
import anyio
|
11
|
+
import pydantic
|
12
|
+
|
13
|
+
from ._utils import (
|
14
|
+
is_list,
|
15
|
+
is_mapping,
|
16
|
+
is_iterable,
|
17
|
+
)
|
18
|
+
from .._files import is_base64_file_input
|
19
|
+
from ._typing import (
|
20
|
+
is_list_type,
|
21
|
+
is_union_type,
|
22
|
+
extract_type_arg,
|
23
|
+
is_iterable_type,
|
24
|
+
is_required_type,
|
25
|
+
is_annotated_type,
|
26
|
+
strip_annotated_type,
|
27
|
+
)
|
28
|
+
from .._compat import model_dump, is_typeddict
|
29
|
+
|
30
|
+
_T = TypeVar("_T")
|
31
|
+
|
32
|
+
|
33
|
+
# TODO: support for drilling globals() and locals()
|
34
|
+
# TODO: ensure works correctly with forward references in all cases
|
35
|
+
|
36
|
+
|
37
|
+
PropertyFormat = Literal["iso8601", "base64", "custom"]
|
38
|
+
|
39
|
+
|
40
|
+
class PropertyInfo:
|
41
|
+
"""Metadata class to be used in Annotated types to provide information about a given type.
|
42
|
+
|
43
|
+
For example:
|
44
|
+
|
45
|
+
class MyParams(TypedDict):
|
46
|
+
account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')]
|
47
|
+
|
48
|
+
This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API.
|
49
|
+
"""
|
50
|
+
|
51
|
+
alias: str | None
|
52
|
+
format: PropertyFormat | None
|
53
|
+
format_template: str | None
|
54
|
+
discriminator: str | None
|
55
|
+
|
56
|
+
def __init__(
|
57
|
+
self,
|
58
|
+
*,
|
59
|
+
alias: str | None = None,
|
60
|
+
format: PropertyFormat | None = None,
|
61
|
+
format_template: str | None = None,
|
62
|
+
discriminator: str | None = None,
|
63
|
+
) -> None:
|
64
|
+
self.alias = alias
|
65
|
+
self.format = format
|
66
|
+
self.format_template = format_template
|
67
|
+
self.discriminator = discriminator
|
68
|
+
|
69
|
+
@override
|
70
|
+
def __repr__(self) -> str:
|
71
|
+
return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')"
|
72
|
+
|
73
|
+
|
74
|
+
def maybe_transform(
|
75
|
+
data: object,
|
76
|
+
expected_type: object,
|
77
|
+
) -> Any | None:
|
78
|
+
"""Wrapper over `transform()` that allows `None` to be passed.
|
79
|
+
|
80
|
+
See `transform()` for more details.
|
81
|
+
"""
|
82
|
+
if data is None:
|
83
|
+
return None
|
84
|
+
return transform(data, expected_type)
|
85
|
+
|
86
|
+
|
87
|
+
# Wrapper over _transform_recursive providing fake types
|
88
|
+
def transform(
|
89
|
+
data: _T,
|
90
|
+
expected_type: object,
|
91
|
+
) -> _T:
|
92
|
+
"""Transform dictionaries based off of type information from the given type, for example:
|
93
|
+
|
94
|
+
```py
|
95
|
+
class Params(TypedDict, total=False):
|
96
|
+
card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]]
|
97
|
+
|
98
|
+
|
99
|
+
transformed = transform({"card_id": "<my card ID>"}, Params)
|
100
|
+
# {'cardID': '<my card ID>'}
|
101
|
+
```
|
102
|
+
|
103
|
+
Any keys / data that does not have type information given will be included as is.
|
104
|
+
|
105
|
+
It should be noted that the transformations that this function does are not represented in the type system.
|
106
|
+
"""
|
107
|
+
transformed = _transform_recursive(data, annotation=cast(type, expected_type))
|
108
|
+
return cast(_T, transformed)
|
109
|
+
|
110
|
+
|
111
|
+
def _get_annotated_type(type_: type) -> type | None:
|
112
|
+
"""If the given type is an `Annotated` type then it is returned, if not `None` is returned.
|
113
|
+
|
114
|
+
This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]`
|
115
|
+
"""
|
116
|
+
if is_required_type(type_):
|
117
|
+
# Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]`
|
118
|
+
type_ = get_args(type_)[0]
|
119
|
+
|
120
|
+
if is_annotated_type(type_):
|
121
|
+
return type_
|
122
|
+
|
123
|
+
return None
|
124
|
+
|
125
|
+
|
126
|
+
def _maybe_transform_key(key: str, type_: type) -> str:
|
127
|
+
"""Transform the given `data` based on the annotations provided in `type_`.
|
128
|
+
|
129
|
+
Note: this function only looks at `Annotated` types that contain `PropertInfo` metadata.
|
130
|
+
"""
|
131
|
+
annotated_type = _get_annotated_type(type_)
|
132
|
+
if annotated_type is None:
|
133
|
+
# no `Annotated` definition for this type, no transformation needed
|
134
|
+
return key
|
135
|
+
|
136
|
+
# ignore the first argument as it is the actual type
|
137
|
+
annotations = get_args(annotated_type)[1:]
|
138
|
+
for annotation in annotations:
|
139
|
+
if isinstance(annotation, PropertyInfo) and annotation.alias is not None:
|
140
|
+
return annotation.alias
|
141
|
+
|
142
|
+
return key
|
143
|
+
|
144
|
+
|
145
|
+
def _transform_recursive(
|
146
|
+
data: object,
|
147
|
+
*,
|
148
|
+
annotation: type,
|
149
|
+
inner_type: type | None = None,
|
150
|
+
) -> object:
|
151
|
+
"""Transform the given data against the expected type.
|
152
|
+
|
153
|
+
Args:
|
154
|
+
annotation: The direct type annotation given to the particular piece of data.
|
155
|
+
This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
|
156
|
+
|
157
|
+
inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
|
158
|
+
is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
|
159
|
+
the list can be transformed using the metadata from the container type.
|
160
|
+
|
161
|
+
Defaults to the same value as the `annotation` argument.
|
162
|
+
"""
|
163
|
+
if inner_type is None:
|
164
|
+
inner_type = annotation
|
165
|
+
|
166
|
+
stripped_type = strip_annotated_type(inner_type)
|
167
|
+
if is_typeddict(stripped_type) and is_mapping(data):
|
168
|
+
return _transform_typeddict(data, stripped_type)
|
169
|
+
|
170
|
+
if (
|
171
|
+
# List[T]
|
172
|
+
(is_list_type(stripped_type) and is_list(data))
|
173
|
+
# Iterable[T]
|
174
|
+
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
|
175
|
+
):
|
176
|
+
inner_type = extract_type_arg(stripped_type, 0)
|
177
|
+
return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
|
178
|
+
|
179
|
+
if is_union_type(stripped_type):
|
180
|
+
# For union types we run the transformation against all subtypes to ensure that everything is transformed.
|
181
|
+
#
|
182
|
+
# TODO: there may be edge cases where the same normalized field name will transform to two different names
|
183
|
+
# in different subtypes.
|
184
|
+
for subtype in get_args(stripped_type):
|
185
|
+
data = _transform_recursive(data, annotation=annotation, inner_type=subtype)
|
186
|
+
return data
|
187
|
+
|
188
|
+
if isinstance(data, pydantic.BaseModel):
|
189
|
+
return model_dump(data, exclude_unset=True)
|
190
|
+
|
191
|
+
annotated_type = _get_annotated_type(annotation)
|
192
|
+
if annotated_type is None:
|
193
|
+
return data
|
194
|
+
|
195
|
+
# ignore the first argument as it is the actual type
|
196
|
+
annotations = get_args(annotated_type)[1:]
|
197
|
+
for annotation in annotations:
|
198
|
+
if isinstance(annotation, PropertyInfo) and annotation.format is not None:
|
199
|
+
return _format_data(data, annotation.format, annotation.format_template)
|
200
|
+
|
201
|
+
return data
|
202
|
+
|
203
|
+
|
204
|
+
def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
|
205
|
+
if isinstance(data, (date, datetime)):
|
206
|
+
if format_ == "iso8601":
|
207
|
+
return data.isoformat()
|
208
|
+
|
209
|
+
if format_ == "custom" and format_template is not None:
|
210
|
+
return data.strftime(format_template)
|
211
|
+
|
212
|
+
if format_ == "base64" and is_base64_file_input(data):
|
213
|
+
binary: str | bytes | None = None
|
214
|
+
|
215
|
+
if isinstance(data, pathlib.Path):
|
216
|
+
binary = data.read_bytes()
|
217
|
+
elif isinstance(data, io.IOBase):
|
218
|
+
binary = data.read()
|
219
|
+
|
220
|
+
if isinstance(binary, str): # type: ignore[unreachable]
|
221
|
+
binary = binary.encode()
|
222
|
+
|
223
|
+
if not isinstance(binary, bytes):
|
224
|
+
raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}")
|
225
|
+
|
226
|
+
return base64.b64encode(binary).decode("ascii")
|
227
|
+
|
228
|
+
return data
|
229
|
+
|
230
|
+
|
231
|
+
def _transform_typeddict(
|
232
|
+
data: Mapping[str, object],
|
233
|
+
expected_type: type,
|
234
|
+
) -> Mapping[str, object]:
|
235
|
+
result: dict[str, object] = {}
|
236
|
+
annotations = get_type_hints(expected_type, include_extras=True)
|
237
|
+
for key, value in data.items():
|
238
|
+
type_ = annotations.get(key)
|
239
|
+
if type_ is None:
|
240
|
+
# we do not have a type annotation for this field, leave it as is
|
241
|
+
result[key] = value
|
242
|
+
else:
|
243
|
+
result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_)
|
244
|
+
return result
|
245
|
+
|
246
|
+
|
247
|
+
async def async_maybe_transform(
|
248
|
+
data: object,
|
249
|
+
expected_type: object,
|
250
|
+
) -> Any | None:
|
251
|
+
"""Wrapper over `async_transform()` that allows `None` to be passed.
|
252
|
+
|
253
|
+
See `async_transform()` for more details.
|
254
|
+
"""
|
255
|
+
if data is None:
|
256
|
+
return None
|
257
|
+
return await async_transform(data, expected_type)
|
258
|
+
|
259
|
+
|
260
|
+
async def async_transform(
|
261
|
+
data: _T,
|
262
|
+
expected_type: object,
|
263
|
+
) -> _T:
|
264
|
+
"""Transform dictionaries based off of type information from the given type, for example:
|
265
|
+
|
266
|
+
```py
|
267
|
+
class Params(TypedDict, total=False):
|
268
|
+
card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]]
|
269
|
+
|
270
|
+
|
271
|
+
transformed = transform({"card_id": "<my card ID>"}, Params)
|
272
|
+
# {'cardID': '<my card ID>'}
|
273
|
+
```
|
274
|
+
|
275
|
+
Any keys / data that does not have type information given will be included as is.
|
276
|
+
|
277
|
+
It should be noted that the transformations that this function does are not represented in the type system.
|
278
|
+
"""
|
279
|
+
transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type))
|
280
|
+
return cast(_T, transformed)
|
281
|
+
|
282
|
+
|
283
|
+
async def _async_transform_recursive(
|
284
|
+
data: object,
|
285
|
+
*,
|
286
|
+
annotation: type,
|
287
|
+
inner_type: type | None = None,
|
288
|
+
) -> object:
|
289
|
+
"""Transform the given data against the expected type.
|
290
|
+
|
291
|
+
Args:
|
292
|
+
annotation: The direct type annotation given to the particular piece of data.
|
293
|
+
This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
|
294
|
+
|
295
|
+
inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
|
296
|
+
is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
|
297
|
+
the list can be transformed using the metadata from the container type.
|
298
|
+
|
299
|
+
Defaults to the same value as the `annotation` argument.
|
300
|
+
"""
|
301
|
+
if inner_type is None:
|
302
|
+
inner_type = annotation
|
303
|
+
|
304
|
+
stripped_type = strip_annotated_type(inner_type)
|
305
|
+
if is_typeddict(stripped_type) and is_mapping(data):
|
306
|
+
return await _async_transform_typeddict(data, stripped_type)
|
307
|
+
|
308
|
+
if (
|
309
|
+
# List[T]
|
310
|
+
(is_list_type(stripped_type) and is_list(data))
|
311
|
+
# Iterable[T]
|
312
|
+
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
|
313
|
+
):
|
314
|
+
inner_type = extract_type_arg(stripped_type, 0)
|
315
|
+
return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
|
316
|
+
|
317
|
+
if is_union_type(stripped_type):
|
318
|
+
# For union types we run the transformation against all subtypes to ensure that everything is transformed.
|
319
|
+
#
|
320
|
+
# TODO: there may be edge cases where the same normalized field name will transform to two different names
|
321
|
+
# in different subtypes.
|
322
|
+
for subtype in get_args(stripped_type):
|
323
|
+
data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype)
|
324
|
+
return data
|
325
|
+
|
326
|
+
if isinstance(data, pydantic.BaseModel):
|
327
|
+
return model_dump(data, exclude_unset=True)
|
328
|
+
|
329
|
+
annotated_type = _get_annotated_type(annotation)
|
330
|
+
if annotated_type is None:
|
331
|
+
return data
|
332
|
+
|
333
|
+
# ignore the first argument as it is the actual type
|
334
|
+
annotations = get_args(annotated_type)[1:]
|
335
|
+
for annotation in annotations:
|
336
|
+
if isinstance(annotation, PropertyInfo) and annotation.format is not None:
|
337
|
+
return await _async_format_data(data, annotation.format, annotation.format_template)
|
338
|
+
|
339
|
+
return data
|
340
|
+
|
341
|
+
|
342
|
+
async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
|
343
|
+
if isinstance(data, (date, datetime)):
|
344
|
+
if format_ == "iso8601":
|
345
|
+
return data.isoformat()
|
346
|
+
|
347
|
+
if format_ == "custom" and format_template is not None:
|
348
|
+
return data.strftime(format_template)
|
349
|
+
|
350
|
+
if format_ == "base64" and is_base64_file_input(data):
|
351
|
+
binary: str | bytes | None = None
|
352
|
+
|
353
|
+
if isinstance(data, pathlib.Path):
|
354
|
+
binary = await anyio.Path(data).read_bytes()
|
355
|
+
elif isinstance(data, io.IOBase):
|
356
|
+
binary = data.read()
|
357
|
+
|
358
|
+
if isinstance(binary, str): # type: ignore[unreachable]
|
359
|
+
binary = binary.encode()
|
360
|
+
|
361
|
+
if not isinstance(binary, bytes):
|
362
|
+
raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}")
|
363
|
+
|
364
|
+
return base64.b64encode(binary).decode("ascii")
|
365
|
+
|
366
|
+
return data
|
367
|
+
|
368
|
+
|
369
|
+
async def _async_transform_typeddict(
|
370
|
+
data: Mapping[str, object],
|
371
|
+
expected_type: type,
|
372
|
+
) -> Mapping[str, object]:
|
373
|
+
result: dict[str, object] = {}
|
374
|
+
annotations = get_type_hints(expected_type, include_extras=True)
|
375
|
+
for key, value in data.items():
|
376
|
+
type_ = annotations.get(key)
|
377
|
+
if type_ is None:
|
378
|
+
# we do not have a type annotation for this field, leave it as is
|
379
|
+
result[key] = value
|
380
|
+
else:
|
381
|
+
result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_)
|
382
|
+
return result
|
@@ -0,0 +1,120 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import Any, TypeVar, Iterable, cast
|
4
|
+
from collections import abc as _c_abc
|
5
|
+
from typing_extensions import Required, Annotated, get_args, get_origin
|
6
|
+
|
7
|
+
from .._types import InheritsGeneric
|
8
|
+
from .._compat import is_union as _is_union
|
9
|
+
|
10
|
+
|
11
|
+
def is_annotated_type(typ: type) -> bool:
|
12
|
+
return get_origin(typ) == Annotated
|
13
|
+
|
14
|
+
|
15
|
+
def is_list_type(typ: type) -> bool:
|
16
|
+
return (get_origin(typ) or typ) == list
|
17
|
+
|
18
|
+
|
19
|
+
def is_iterable_type(typ: type) -> bool:
|
20
|
+
"""If the given type is `typing.Iterable[T]`"""
|
21
|
+
origin = get_origin(typ) or typ
|
22
|
+
return origin == Iterable or origin == _c_abc.Iterable
|
23
|
+
|
24
|
+
|
25
|
+
def is_union_type(typ: type) -> bool:
|
26
|
+
return _is_union(get_origin(typ))
|
27
|
+
|
28
|
+
|
29
|
+
def is_required_type(typ: type) -> bool:
|
30
|
+
return get_origin(typ) == Required
|
31
|
+
|
32
|
+
|
33
|
+
def is_typevar(typ: type) -> bool:
|
34
|
+
# type ignore is required because type checkers
|
35
|
+
# think this expression will always return False
|
36
|
+
return type(typ) == TypeVar # type: ignore
|
37
|
+
|
38
|
+
|
39
|
+
# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]]
|
40
|
+
def strip_annotated_type(typ: type) -> type:
|
41
|
+
if is_required_type(typ) or is_annotated_type(typ):
|
42
|
+
return strip_annotated_type(cast(type, get_args(typ)[0]))
|
43
|
+
|
44
|
+
return typ
|
45
|
+
|
46
|
+
|
47
|
+
def extract_type_arg(typ: type, index: int) -> type:
|
48
|
+
args = get_args(typ)
|
49
|
+
try:
|
50
|
+
return cast(type, args[index])
|
51
|
+
except IndexError as err:
|
52
|
+
raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err
|
53
|
+
|
54
|
+
|
55
|
+
def extract_type_var_from_base(
|
56
|
+
typ: type,
|
57
|
+
*,
|
58
|
+
generic_bases: tuple[type, ...],
|
59
|
+
index: int,
|
60
|
+
failure_message: str | None = None,
|
61
|
+
) -> type:
|
62
|
+
"""Given a type like `Foo[T]`, returns the generic type variable `T`.
|
63
|
+
|
64
|
+
This also handles the case where a concrete subclass is given, e.g.
|
65
|
+
```py
|
66
|
+
class MyResponse(Foo[bytes]):
|
67
|
+
...
|
68
|
+
|
69
|
+
extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes
|
70
|
+
```
|
71
|
+
|
72
|
+
And where a generic subclass is given:
|
73
|
+
```py
|
74
|
+
_T = TypeVar('_T')
|
75
|
+
class MyResponse(Foo[_T]):
|
76
|
+
...
|
77
|
+
|
78
|
+
extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes
|
79
|
+
```
|
80
|
+
"""
|
81
|
+
cls = cast(object, get_origin(typ) or typ)
|
82
|
+
if cls in generic_bases:
|
83
|
+
# we're given the class directly
|
84
|
+
return extract_type_arg(typ, index)
|
85
|
+
|
86
|
+
# if a subclass is given
|
87
|
+
# ---
|
88
|
+
# this is needed as __orig_bases__ is not present in the typeshed stubs
|
89
|
+
# because it is intended to be for internal use only, however there does
|
90
|
+
# not seem to be a way to resolve generic TypeVars for inherited subclasses
|
91
|
+
# without using it.
|
92
|
+
if isinstance(cls, InheritsGeneric):
|
93
|
+
target_base_class: Any | None = None
|
94
|
+
for base in cls.__orig_bases__:
|
95
|
+
if base.__origin__ in generic_bases:
|
96
|
+
target_base_class = base
|
97
|
+
break
|
98
|
+
|
99
|
+
if target_base_class is None:
|
100
|
+
raise RuntimeError(
|
101
|
+
"Could not find the generic base class;\n"
|
102
|
+
"This should never happen;\n"
|
103
|
+
f"Does {cls} inherit from one of {generic_bases} ?"
|
104
|
+
)
|
105
|
+
|
106
|
+
extracted = extract_type_arg(target_base_class, index)
|
107
|
+
if is_typevar(extracted):
|
108
|
+
# If the extracted type argument is itself a type variable
|
109
|
+
# then that means the subclass itself is generic, so we have
|
110
|
+
# to resolve the type argument from the class itself, not
|
111
|
+
# the base class.
|
112
|
+
#
|
113
|
+
# Note: if there is more than 1 type argument, the subclass could
|
114
|
+
# change the ordering of the type arguments, this is not currently
|
115
|
+
# supported.
|
116
|
+
return extract_type_arg(typ, index)
|
117
|
+
|
118
|
+
return extracted
|
119
|
+
|
120
|
+
raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}")
|