usdk 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
uapi/_streaming.py ADDED
@@ -0,0 +1,331 @@
1
+ # Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import inspect
6
+ from types import TracebackType
7
+ from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast
8
+ from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable
9
+
10
+ import httpx
11
+
12
+ from ._utils import extract_type_var_from_base
13
+
14
+ if TYPE_CHECKING:
15
+ from ._client import AsyncuAPI, uAPI
16
+
17
+
18
+ _T = TypeVar("_T")
19
+
20
+
21
+ class Stream(Generic[_T]):
22
+ """Provides the core interface to iterate over a synchronous stream response."""
23
+
24
+ response: httpx.Response
25
+
26
+ _decoder: SSEBytesDecoder
27
+
28
+ def __init__(
29
+ self,
30
+ *,
31
+ cast_to: type[_T],
32
+ response: httpx.Response,
33
+ client: uAPI,
34
+ ) -> None:
35
+ self.response = response
36
+ self._cast_to = cast_to
37
+ self._client = client
38
+ self._decoder = client._make_sse_decoder()
39
+ self._iterator = self.__stream__()
40
+
41
+ def __next__(self) -> _T:
42
+ return self._iterator.__next__()
43
+
44
+ def __iter__(self) -> Iterator[_T]:
45
+ for item in self._iterator:
46
+ yield item
47
+
48
+ def _iter_events(self) -> Iterator[ServerSentEvent]:
49
+ yield from self._decoder.iter_bytes(self.response.iter_bytes())
50
+
51
+ def __stream__(self) -> Iterator[_T]:
52
+ cast_to = cast(Any, self._cast_to)
53
+ response = self.response
54
+ process_data = self._client._process_response_data
55
+ iterator = self._iter_events()
56
+
57
+ for sse in iterator:
58
+ yield process_data(data=sse.json(), cast_to=cast_to, response=response)
59
+
60
+ # As we might not fully consume the response stream, we need to close it explicitly
61
+ response.close()
62
+
63
+ def __enter__(self) -> Self:
64
+ return self
65
+
66
+ def __exit__(
67
+ self,
68
+ exc_type: type[BaseException] | None,
69
+ exc: BaseException | None,
70
+ exc_tb: TracebackType | None,
71
+ ) -> None:
72
+ self.close()
73
+
74
+ def close(self) -> None:
75
+ """
76
+ Close the response and release the connection.
77
+
78
+ Automatically called if the response body is read to completion.
79
+ """
80
+ self.response.close()
81
+
82
+
83
+ class AsyncStream(Generic[_T]):
84
+ """Provides the core interface to iterate over an asynchronous stream response."""
85
+
86
+ response: httpx.Response
87
+
88
+ _decoder: SSEDecoder | SSEBytesDecoder
89
+
90
+ def __init__(
91
+ self,
92
+ *,
93
+ cast_to: type[_T],
94
+ response: httpx.Response,
95
+ client: AsyncuAPI,
96
+ ) -> None:
97
+ self.response = response
98
+ self._cast_to = cast_to
99
+ self._client = client
100
+ self._decoder = client._make_sse_decoder()
101
+ self._iterator = self.__stream__()
102
+
103
+ async def __anext__(self) -> _T:
104
+ return await self._iterator.__anext__()
105
+
106
+ async def __aiter__(self) -> AsyncIterator[_T]:
107
+ async for item in self._iterator:
108
+ yield item
109
+
110
+ async def _iter_events(self) -> AsyncIterator[ServerSentEvent]:
111
+ async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()):
112
+ yield sse
113
+
114
+ async def __stream__(self) -> AsyncIterator[_T]:
115
+ cast_to = cast(Any, self._cast_to)
116
+ response = self.response
117
+ process_data = self._client._process_response_data
118
+ iterator = self._iter_events()
119
+
120
+ async for sse in iterator:
121
+ yield process_data(data=sse.json(), cast_to=cast_to, response=response)
122
+
123
+ # As we might not fully consume the response stream, we need to close it explicitly
124
+ await response.aclose()
125
+
126
+ async def __aenter__(self) -> Self:
127
+ return self
128
+
129
+ async def __aexit__(
130
+ self,
131
+ exc_type: type[BaseException] | None,
132
+ exc: BaseException | None,
133
+ exc_tb: TracebackType | None,
134
+ ) -> None:
135
+ await self.close()
136
+
137
+ async def close(self) -> None:
138
+ """
139
+ Close the response and release the connection.
140
+
141
+ Automatically called if the response body is read to completion.
142
+ """
143
+ await self.response.aclose()
144
+
145
+
146
+ class ServerSentEvent:
147
+ def __init__(
148
+ self,
149
+ *,
150
+ event: str | None = None,
151
+ data: str | None = None,
152
+ id: str | None = None,
153
+ retry: int | None = None,
154
+ ) -> None:
155
+ if data is None:
156
+ data = ""
157
+
158
+ self._id = id
159
+ self._data = data
160
+ self._event = event or None
161
+ self._retry = retry
162
+
163
+ @property
164
+ def event(self) -> str | None:
165
+ return self._event
166
+
167
+ @property
168
+ def id(self) -> str | None:
169
+ return self._id
170
+
171
+ @property
172
+ def retry(self) -> int | None:
173
+ return self._retry
174
+
175
+ @property
176
+ def data(self) -> str:
177
+ return self._data
178
+
179
+ def json(self) -> Any:
180
+ return json.loads(self.data)
181
+
182
+ @override
183
+ def __repr__(self) -> str:
184
+ return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})"
185
+
186
+
187
+ class SSEDecoder:
188
+ _data: list[str]
189
+ _event: str | None
190
+ _retry: int | None
191
+ _last_event_id: str | None
192
+
193
+ def __init__(self) -> None:
194
+ self._event = None
195
+ self._data = []
196
+ self._last_event_id = None
197
+ self._retry = None
198
+
199
+ def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]:
200
+ """Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
201
+ for chunk in self._iter_chunks(iterator):
202
+ # Split before decoding so splitlines() only uses \r and \n
203
+ for raw_line in chunk.splitlines():
204
+ line = raw_line.decode("utf-8")
205
+ sse = self.decode(line)
206
+ if sse:
207
+ yield sse
208
+
209
+ def _iter_chunks(self, iterator: Iterator[bytes]) -> Iterator[bytes]:
210
+ """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks"""
211
+ data = b""
212
+ for chunk in iterator:
213
+ for line in chunk.splitlines(keepends=True):
214
+ data += line
215
+ if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")):
216
+ yield data
217
+ data = b""
218
+ if data:
219
+ yield data
220
+
221
+ async def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]:
222
+ """Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
223
+ async for chunk in self._aiter_chunks(iterator):
224
+ # Split before decoding so splitlines() only uses \r and \n
225
+ for raw_line in chunk.splitlines():
226
+ line = raw_line.decode("utf-8")
227
+ sse = self.decode(line)
228
+ if sse:
229
+ yield sse
230
+
231
+ async def _aiter_chunks(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[bytes]:
232
+ """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks"""
233
+ data = b""
234
+ async for chunk in iterator:
235
+ for line in chunk.splitlines(keepends=True):
236
+ data += line
237
+ if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")):
238
+ yield data
239
+ data = b""
240
+ if data:
241
+ yield data
242
+
243
+ def decode(self, line: str) -> ServerSentEvent | None:
244
+ # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501
245
+
246
+ if not line:
247
+ if not self._event and not self._data and not self._last_event_id and self._retry is None:
248
+ return None
249
+
250
+ sse = ServerSentEvent(
251
+ event=self._event,
252
+ data="\n".join(self._data),
253
+ id=self._last_event_id,
254
+ retry=self._retry,
255
+ )
256
+
257
+ # NOTE: as per the SSE spec, do not reset last_event_id.
258
+ self._event = None
259
+ self._data = []
260
+ self._retry = None
261
+
262
+ return sse
263
+
264
+ if line.startswith(":"):
265
+ return None
266
+
267
+ fieldname, _, value = line.partition(":")
268
+
269
+ if value.startswith(" "):
270
+ value = value[1:]
271
+
272
+ if fieldname == "event":
273
+ self._event = value
274
+ elif fieldname == "data":
275
+ self._data.append(value)
276
+ elif fieldname == "id":
277
+ if "\0" in value:
278
+ pass
279
+ else:
280
+ self._last_event_id = value
281
+ elif fieldname == "retry":
282
+ try:
283
+ self._retry = int(value)
284
+ except (TypeError, ValueError):
285
+ pass
286
+ else:
287
+ pass # Field is ignored.
288
+
289
+ return None
290
+
291
+
292
+ @runtime_checkable
293
+ class SSEBytesDecoder(Protocol):
294
+ def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]:
295
+ """Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
296
+ ...
297
+
298
+ def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]:
299
+ """Given an async iterator that yields raw binary data, iterate over it & yield every event encountered"""
300
+ ...
301
+
302
+
303
+ def is_stream_class_type(typ: type) -> TypeGuard[type[Stream[object]] | type[AsyncStream[object]]]:
304
+ """TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`"""
305
+ origin = get_origin(typ) or typ
306
+ return inspect.isclass(origin) and issubclass(origin, (Stream, AsyncStream))
307
+
308
+
309
+ def extract_stream_chunk_type(
310
+ stream_cls: type,
311
+ *,
312
+ failure_message: str | None = None,
313
+ ) -> type:
314
+ """Given a type like `Stream[T]`, returns the generic type variable `T`.
315
+
316
+ This also handles the case where a concrete subclass is given, e.g.
317
+ ```py
318
+ class MyStream(Stream[bytes]):
319
+ ...
320
+
321
+ extract_stream_chunk_type(MyStream) -> bytes
322
+ ```
323
+ """
324
+ from ._base_client import Stream, AsyncStream
325
+
326
+ return extract_type_var_from_base(
327
+ stream_cls,
328
+ index=0,
329
+ generic_bases=cast("tuple[type, ...]", (Stream, AsyncStream)),
330
+ failure_message=failure_message,
331
+ )
uapi/_types.py ADDED
@@ -0,0 +1,260 @@
1
+ from __future__ import annotations
2
+
3
+ from os import PathLike
4
+ from typing import (
5
+ IO,
6
+ TYPE_CHECKING,
7
+ Any,
8
+ Dict,
9
+ List,
10
+ Type,
11
+ Tuple,
12
+ Union,
13
+ Mapping,
14
+ TypeVar,
15
+ Callable,
16
+ Iterator,
17
+ Optional,
18
+ Sequence,
19
+ )
20
+ from typing_extensions import (
21
+ Set,
22
+ Literal,
23
+ Protocol,
24
+ TypeAlias,
25
+ TypedDict,
26
+ SupportsIndex,
27
+ overload,
28
+ override,
29
+ runtime_checkable,
30
+ )
31
+
32
+ import httpx
33
+ import pydantic
34
+ from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport
35
+
36
+ if TYPE_CHECKING:
37
+ from ._models import BaseModel
38
+ from ._response import APIResponse, AsyncAPIResponse
39
+
40
+ Transport = BaseTransport
41
+ AsyncTransport = AsyncBaseTransport
42
+ Query = Mapping[str, object]
43
+ Body = object
44
+ AnyMapping = Mapping[str, object]
45
+ ModelT = TypeVar("ModelT", bound=pydantic.BaseModel)
46
+ _T = TypeVar("_T")
47
+
48
+
49
+ # Approximates httpx internal ProxiesTypes and RequestFiles types
50
+ # while adding support for `PathLike` instances
51
+ ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]]
52
+ ProxiesTypes = Union[str, Proxy, ProxiesDict]
53
+ if TYPE_CHECKING:
54
+ Base64FileInput = Union[IO[bytes], PathLike[str]]
55
+ FileContent = Union[IO[bytes], bytes, PathLike[str]]
56
+ else:
57
+ Base64FileInput = Union[IO[bytes], PathLike]
58
+ FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8.
59
+ FileTypes = Union[
60
+ # file (or bytes)
61
+ FileContent,
62
+ # (filename, file (or bytes))
63
+ Tuple[Optional[str], FileContent],
64
+ # (filename, file (or bytes), content_type)
65
+ Tuple[Optional[str], FileContent, Optional[str]],
66
+ # (filename, file (or bytes), content_type, headers)
67
+ Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]],
68
+ ]
69
+ RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]]
70
+
71
+ # duplicate of the above but without our custom file support
72
+ HttpxFileContent = Union[IO[bytes], bytes]
73
+ HttpxFileTypes = Union[
74
+ # file (or bytes)
75
+ HttpxFileContent,
76
+ # (filename, file (or bytes))
77
+ Tuple[Optional[str], HttpxFileContent],
78
+ # (filename, file (or bytes), content_type)
79
+ Tuple[Optional[str], HttpxFileContent, Optional[str]],
80
+ # (filename, file (or bytes), content_type, headers)
81
+ Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]],
82
+ ]
83
+ HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]]
84
+
85
+ # Workaround to support (cast_to: Type[ResponseT]) -> ResponseT
86
+ # where ResponseT includes `None`. In order to support directly
87
+ # passing `None`, overloads would have to be defined for every
88
+ # method that uses `ResponseT` which would lead to an unacceptable
89
+ # amount of code duplication and make it unreadable. See _base_client.py
90
+ # for example usage.
91
+ #
92
+ # This unfortunately means that you will either have
93
+ # to import this type and pass it explicitly:
94
+ #
95
+ # from uapi import NoneType
96
+ # client.get('/foo', cast_to=NoneType)
97
+ #
98
+ # or build it yourself:
99
+ #
100
+ # client.get('/foo', cast_to=type(None))
101
+ if TYPE_CHECKING:
102
+ NoneType: Type[None]
103
+ else:
104
+ NoneType = type(None)
105
+
106
+
107
+ class RequestOptions(TypedDict, total=False):
108
+ headers: Headers
109
+ max_retries: int
110
+ timeout: float | Timeout | None
111
+ params: Query
112
+ extra_json: AnyMapping
113
+ idempotency_key: str
114
+ follow_redirects: bool
115
+
116
+
117
+ # Sentinel class used until PEP 0661 is accepted
118
+ class NotGiven:
119
+ """
120
+ For parameters with a meaningful None value, we need to distinguish between
121
+ the user explicitly passing None, and the user not passing the parameter at
122
+ all.
123
+
124
+ User code shouldn't need to use not_given directly.
125
+
126
+ For example:
127
+
128
+ ```py
129
+ def create(timeout: Timeout | None | NotGiven = not_given): ...
130
+
131
+
132
+ create(timeout=1) # 1s timeout
133
+ create(timeout=None) # No timeout
134
+ create() # Default timeout behavior
135
+ ```
136
+ """
137
+
138
+ def __bool__(self) -> Literal[False]:
139
+ return False
140
+
141
+ @override
142
+ def __repr__(self) -> str:
143
+ return "NOT_GIVEN"
144
+
145
+
146
+ not_given = NotGiven()
147
+ # for backwards compatibility:
148
+ NOT_GIVEN = NotGiven()
149
+
150
+
151
+ class Omit:
152
+ """
153
+ To explicitly omit something from being sent in a request, use `omit`.
154
+
155
+ ```py
156
+ # as the default `Content-Type` header is `application/json` that will be sent
157
+ client.post("/upload/files", files={"file": b"my raw file content"})
158
+
159
+ # you can't explicitly override the header as it has to be dynamically generated
160
+ # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
161
+ client.post(..., headers={"Content-Type": "multipart/form-data"})
162
+
163
+ # instead you can remove the default `application/json` header by passing omit
164
+ client.post(..., headers={"Content-Type": omit})
165
+ ```
166
+ """
167
+
168
+ def __bool__(self) -> Literal[False]:
169
+ return False
170
+
171
+
172
+ omit = Omit()
173
+
174
+
175
+ @runtime_checkable
176
+ class ModelBuilderProtocol(Protocol):
177
+ @classmethod
178
+ def build(
179
+ cls: type[_T],
180
+ *,
181
+ response: Response,
182
+ data: object,
183
+ ) -> _T: ...
184
+
185
+
186
+ Headers = Mapping[str, Union[str, Omit]]
187
+
188
+
189
+ class HeadersLikeProtocol(Protocol):
190
+ def get(self, __key: str) -> str | None: ...
191
+
192
+
193
+ HeadersLike = Union[Headers, HeadersLikeProtocol]
194
+
195
+ ResponseT = TypeVar(
196
+ "ResponseT",
197
+ bound=Union[
198
+ object,
199
+ str,
200
+ None,
201
+ "BaseModel",
202
+ List[Any],
203
+ Dict[str, Any],
204
+ Response,
205
+ ModelBuilderProtocol,
206
+ "APIResponse[Any]",
207
+ "AsyncAPIResponse[Any]",
208
+ ],
209
+ )
210
+
211
+ StrBytesIntFloat = Union[str, bytes, int, float]
212
+
213
+ # Note: copied from Pydantic
214
+ # https://github.com/pydantic/pydantic/blob/6f31f8f68ef011f84357330186f603ff295312fd/pydantic/main.py#L79
215
+ IncEx: TypeAlias = Union[Set[int], Set[str], Mapping[int, Union["IncEx", bool]], Mapping[str, Union["IncEx", bool]]]
216
+
217
+ PostParser = Callable[[Any], Any]
218
+
219
+
220
+ @runtime_checkable
221
+ class InheritsGeneric(Protocol):
222
+ """Represents a type that has inherited from `Generic`
223
+
224
+ The `__orig_bases__` property can be used to determine the resolved
225
+ type variable for a given base class.
226
+ """
227
+
228
+ __orig_bases__: tuple[_GenericAlias]
229
+
230
+
231
+ class _GenericAlias(Protocol):
232
+ __origin__: type[object]
233
+
234
+
235
+ class HttpxSendArgs(TypedDict, total=False):
236
+ auth: httpx.Auth
237
+ follow_redirects: bool
238
+
239
+
240
+ _T_co = TypeVar("_T_co", covariant=True)
241
+
242
+
243
+ if TYPE_CHECKING:
244
+ # This works because str.__contains__ does not accept object (either in typeshed or at runtime)
245
+ # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285
246
+ class SequenceNotStr(Protocol[_T_co]):
247
+ @overload
248
+ def __getitem__(self, index: SupportsIndex, /) -> _T_co: ...
249
+ @overload
250
+ def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ...
251
+ def __contains__(self, value: object, /) -> bool: ...
252
+ def __len__(self) -> int: ...
253
+ def __iter__(self) -> Iterator[_T_co]: ...
254
+ def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ...
255
+ def count(self, value: Any, /) -> int: ...
256
+ def __reversed__(self) -> Iterator[_T_co]: ...
257
+ else:
258
+ # just point this to a normal `Sequence` at runtime to avoid having to special case
259
+ # deserializing our custom sequence type
260
+ SequenceNotStr = Sequence
@@ -0,0 +1,64 @@
1
+ from ._sync import asyncify as asyncify
2
+ from ._proxy import LazyProxy as LazyProxy
3
+ from ._utils import (
4
+ flatten as flatten,
5
+ is_dict as is_dict,
6
+ is_list as is_list,
7
+ is_given as is_given,
8
+ is_tuple as is_tuple,
9
+ json_safe as json_safe,
10
+ lru_cache as lru_cache,
11
+ is_mapping as is_mapping,
12
+ is_tuple_t as is_tuple_t,
13
+ is_iterable as is_iterable,
14
+ is_sequence as is_sequence,
15
+ coerce_float as coerce_float,
16
+ is_mapping_t as is_mapping_t,
17
+ removeprefix as removeprefix,
18
+ removesuffix as removesuffix,
19
+ extract_files as extract_files,
20
+ is_sequence_t as is_sequence_t,
21
+ required_args as required_args,
22
+ coerce_boolean as coerce_boolean,
23
+ coerce_integer as coerce_integer,
24
+ file_from_path as file_from_path,
25
+ strip_not_given as strip_not_given,
26
+ deepcopy_minimal as deepcopy_minimal,
27
+ get_async_library as get_async_library,
28
+ maybe_coerce_float as maybe_coerce_float,
29
+ get_required_header as get_required_header,
30
+ maybe_coerce_boolean as maybe_coerce_boolean,
31
+ maybe_coerce_integer as maybe_coerce_integer,
32
+ )
33
+ from ._compat import (
34
+ get_args as get_args,
35
+ is_union as is_union,
36
+ get_origin as get_origin,
37
+ is_typeddict as is_typeddict,
38
+ is_literal_type as is_literal_type,
39
+ )
40
+ from ._typing import (
41
+ is_list_type as is_list_type,
42
+ is_union_type as is_union_type,
43
+ extract_type_arg as extract_type_arg,
44
+ is_iterable_type as is_iterable_type,
45
+ is_required_type as is_required_type,
46
+ is_sequence_type as is_sequence_type,
47
+ is_annotated_type as is_annotated_type,
48
+ is_type_alias_type as is_type_alias_type,
49
+ strip_annotated_type as strip_annotated_type,
50
+ extract_type_var_from_base as extract_type_var_from_base,
51
+ )
52
+ from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator
53
+ from ._transform import (
54
+ PropertyInfo as PropertyInfo,
55
+ transform as transform,
56
+ async_transform as async_transform,
57
+ maybe_transform as maybe_transform,
58
+ async_maybe_transform as async_maybe_transform,
59
+ )
60
+ from ._reflection import (
61
+ function_has_argument as function_has_argument,
62
+ assert_signatures_in_sync as assert_signatures_in_sync,
63
+ )
64
+ from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
uapi/_utils/_compat.py ADDED
@@ -0,0 +1,45 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ import typing_extensions
5
+ from typing import Any, Type, Union, Literal, Optional
6
+ from datetime import date, datetime
7
+ from typing_extensions import get_args as _get_args, get_origin as _get_origin
8
+
9
+ from .._types import StrBytesIntFloat
10
+ from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime
11
+
12
+ _LITERAL_TYPES = {Literal, typing_extensions.Literal}
13
+
14
+
15
+ def get_args(tp: type[Any]) -> tuple[Any, ...]:
16
+ return _get_args(tp)
17
+
18
+
19
+ def get_origin(tp: type[Any]) -> type[Any] | None:
20
+ return _get_origin(tp)
21
+
22
+
23
+ def is_union(tp: Optional[Type[Any]]) -> bool:
24
+ if sys.version_info < (3, 10):
25
+ return tp is Union # type: ignore[comparison-overlap]
26
+ else:
27
+ import types
28
+
29
+ return tp is Union or tp is types.UnionType
30
+
31
+
32
+ def is_typeddict(tp: Type[Any]) -> bool:
33
+ return typing_extensions.is_typeddict(tp)
34
+
35
+
36
+ def is_literal_type(tp: Type[Any]) -> bool:
37
+ return get_origin(tp) in _LITERAL_TYPES
38
+
39
+
40
+ def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
41
+ return _parse_date(value)
42
+
43
+
44
+ def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
45
+ return _parse_datetime(value)