perplexityai 0.22.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. perplexity/__init__.py +102 -0
  2. perplexity/_base_client.py +2001 -0
  3. perplexity/_client.py +529 -0
  4. perplexity/_compat.py +219 -0
  5. perplexity/_constants.py +14 -0
  6. perplexity/_exceptions.py +108 -0
  7. perplexity/_files.py +123 -0
  8. perplexity/_models.py +857 -0
  9. perplexity/_qs.py +150 -0
  10. perplexity/_resource.py +43 -0
  11. perplexity/_response.py +832 -0
  12. perplexity/_streaming.py +371 -0
  13. perplexity/_types.py +261 -0
  14. perplexity/_utils/__init__.py +64 -0
  15. perplexity/_utils/_compat.py +45 -0
  16. perplexity/_utils/_datetime_parse.py +136 -0
  17. perplexity/_utils/_logs.py +25 -0
  18. perplexity/_utils/_proxy.py +65 -0
  19. perplexity/_utils/_reflection.py +42 -0
  20. perplexity/_utils/_resources_proxy.py +24 -0
  21. perplexity/_utils/_streams.py +12 -0
  22. perplexity/_utils/_sync.py +58 -0
  23. perplexity/_utils/_transform.py +457 -0
  24. perplexity/_utils/_typing.py +156 -0
  25. perplexity/_utils/_utils.py +421 -0
  26. perplexity/_version.py +4 -0
  27. perplexity/lib/.keep +4 -0
  28. perplexity/py.typed +0 -0
  29. perplexity/resources/__init__.py +47 -0
  30. perplexity/resources/async_/__init__.py +33 -0
  31. perplexity/resources/async_/async_.py +102 -0
  32. perplexity/resources/async_/chat/__init__.py +33 -0
  33. perplexity/resources/async_/chat/chat.py +102 -0
  34. perplexity/resources/async_/chat/completions.py +359 -0
  35. perplexity/resources/chat/__init__.py +33 -0
  36. perplexity/resources/chat/chat.py +102 -0
  37. perplexity/resources/chat/completions.py +900 -0
  38. perplexity/resources/search.py +228 -0
  39. perplexity/types/__init__.py +14 -0
  40. perplexity/types/async_/__init__.py +3 -0
  41. perplexity/types/async_/chat/__init__.py +9 -0
  42. perplexity/types/async_/chat/completion_create_params.py +242 -0
  43. perplexity/types/async_/chat/completion_create_response.py +30 -0
  44. perplexity/types/async_/chat/completion_get_params.py +25 -0
  45. perplexity/types/async_/chat/completion_get_response.py +30 -0
  46. perplexity/types/async_/chat/completion_list_response.py +31 -0
  47. perplexity/types/chat/__init__.py +5 -0
  48. perplexity/types/chat/completion_create_params.py +244 -0
  49. perplexity/types/search_create_params.py +40 -0
  50. perplexity/types/search_create_response.py +27 -0
  51. perplexity/types/shared/__init__.py +7 -0
  52. perplexity/types/shared/api_public_search_result.py +22 -0
  53. perplexity/types/shared/chat_message_input.py +176 -0
  54. perplexity/types/shared/chat_message_output.py +176 -0
  55. perplexity/types/shared/choice.py +19 -0
  56. perplexity/types/shared/usage_info.py +41 -0
  57. perplexity/types/shared_params/__init__.py +4 -0
  58. perplexity/types/shared_params/api_public_search_result.py +22 -0
  59. perplexity/types/shared_params/chat_message_input.py +178 -0
  60. perplexity/types/stream_chunk.py +33 -0
  61. perplexityai-0.22.3.dist-info/METADATA +548 -0
  62. perplexityai-0.22.3.dist-info/RECORD +64 -0
  63. perplexityai-0.22.3.dist-info/WHEEL +4 -0
  64. perplexityai-0.22.3.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,371 @@
1
+ # Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import inspect
6
+ from types import TracebackType
7
+ from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast
8
+ from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable
9
+
10
+ import httpx
11
+
12
+ from ._utils import extract_type_var_from_base
13
+
14
+ if TYPE_CHECKING:
15
+ from ._client import Perplexity, AsyncPerplexity
16
+
17
+
18
+ _T = TypeVar("_T")
19
+
20
+
21
+ class Stream(Generic[_T]):
22
+ """Provides the core interface to iterate over a synchronous stream response."""
23
+
24
+ response: httpx.Response
25
+
26
+ _decoder: SSEBytesDecoder
27
+
28
+ def __init__(
29
+ self,
30
+ *,
31
+ cast_to: type[_T],
32
+ response: httpx.Response,
33
+ client: Perplexity,
34
+ ) -> None:
35
+ self.response = response
36
+ self._cast_to = cast_to
37
+ self._client = client
38
+ self._decoder = client._make_sse_decoder()
39
+ self._iterator = self.__stream__()
40
+
41
+ def __next__(self) -> _T:
42
+ return self._iterator.__next__()
43
+
44
+ def __iter__(self) -> Iterator[_T]:
45
+ for item in self._iterator:
46
+ yield item
47
+
48
+ def _iter_events(self) -> Iterator[ServerSentEvent]:
49
+ yield from self._decoder.iter_bytes(self.response.iter_bytes())
50
+
51
+ def __stream__(self) -> Iterator[_T]:
52
+ cast_to = cast(Any, self._cast_to)
53
+ response = self.response
54
+ process_data = self._client._process_response_data
55
+ iterator = self._iter_events()
56
+
57
+ try:
58
+ for sse in iterator:
59
+ if sse.data.startswith("[DONE]"):
60
+ break
61
+
62
+ if sse.event == "error":
63
+ body = sse.data
64
+
65
+ try:
66
+ body = sse.json()
67
+ err_msg = f"{body}"
68
+ except Exception:
69
+ err_msg = sse.data or f"Error code: {response.status_code}"
70
+
71
+ raise self._client._make_status_error(
72
+ err_msg,
73
+ body=body,
74
+ response=self.response,
75
+ )
76
+
77
+ if sse.event is None:
78
+ yield process_data(data=sse.json(), cast_to=cast_to, response=response)
79
+ finally:
80
+ # Ensure the response is closed even if the consumer doesn't read all data
81
+ response.close()
82
+
83
+ def __enter__(self) -> Self:
84
+ return self
85
+
86
+ def __exit__(
87
+ self,
88
+ exc_type: type[BaseException] | None,
89
+ exc: BaseException | None,
90
+ exc_tb: TracebackType | None,
91
+ ) -> None:
92
+ self.close()
93
+
94
+ def close(self) -> None:
95
+ """
96
+ Close the response and release the connection.
97
+
98
+ Automatically called if the response body is read to completion.
99
+ """
100
+ self.response.close()
101
+
102
+
103
+ class AsyncStream(Generic[_T]):
104
+ """Provides the core interface to iterate over an asynchronous stream response."""
105
+
106
+ response: httpx.Response
107
+
108
+ _decoder: SSEDecoder | SSEBytesDecoder
109
+
110
+ def __init__(
111
+ self,
112
+ *,
113
+ cast_to: type[_T],
114
+ response: httpx.Response,
115
+ client: AsyncPerplexity,
116
+ ) -> None:
117
+ self.response = response
118
+ self._cast_to = cast_to
119
+ self._client = client
120
+ self._decoder = client._make_sse_decoder()
121
+ self._iterator = self.__stream__()
122
+
123
+ async def __anext__(self) -> _T:
124
+ return await self._iterator.__anext__()
125
+
126
+ async def __aiter__(self) -> AsyncIterator[_T]:
127
+ async for item in self._iterator:
128
+ yield item
129
+
130
+ async def _iter_events(self) -> AsyncIterator[ServerSentEvent]:
131
+ async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()):
132
+ yield sse
133
+
134
+ async def __stream__(self) -> AsyncIterator[_T]:
135
+ cast_to = cast(Any, self._cast_to)
136
+ response = self.response
137
+ process_data = self._client._process_response_data
138
+ iterator = self._iter_events()
139
+
140
+ try:
141
+ async for sse in iterator:
142
+ if sse.data.startswith("[DONE]"):
143
+ break
144
+
145
+ if sse.event == "error":
146
+ body = sse.data
147
+
148
+ try:
149
+ body = sse.json()
150
+ err_msg = f"{body}"
151
+ except Exception:
152
+ err_msg = sse.data or f"Error code: {response.status_code}"
153
+
154
+ raise self._client._make_status_error(
155
+ err_msg,
156
+ body=body,
157
+ response=self.response,
158
+ )
159
+
160
+ if sse.event is None:
161
+ yield process_data(data=sse.json(), cast_to=cast_to, response=response)
162
+ finally:
163
+ # Ensure the response is closed even if the consumer doesn't read all data
164
+ await response.aclose()
165
+
166
+ async def __aenter__(self) -> Self:
167
+ return self
168
+
169
+ async def __aexit__(
170
+ self,
171
+ exc_type: type[BaseException] | None,
172
+ exc: BaseException | None,
173
+ exc_tb: TracebackType | None,
174
+ ) -> None:
175
+ await self.close()
176
+
177
+ async def close(self) -> None:
178
+ """
179
+ Close the response and release the connection.
180
+
181
+ Automatically called if the response body is read to completion.
182
+ """
183
+ await self.response.aclose()
184
+
185
+
186
+ class ServerSentEvent:
187
+ def __init__(
188
+ self,
189
+ *,
190
+ event: str | None = None,
191
+ data: str | None = None,
192
+ id: str | None = None,
193
+ retry: int | None = None,
194
+ ) -> None:
195
+ if data is None:
196
+ data = ""
197
+
198
+ self._id = id
199
+ self._data = data
200
+ self._event = event or None
201
+ self._retry = retry
202
+
203
+ @property
204
+ def event(self) -> str | None:
205
+ return self._event
206
+
207
+ @property
208
+ def id(self) -> str | None:
209
+ return self._id
210
+
211
+ @property
212
+ def retry(self) -> int | None:
213
+ return self._retry
214
+
215
+ @property
216
+ def data(self) -> str:
217
+ return self._data
218
+
219
+ def json(self) -> Any:
220
+ return json.loads(self.data)
221
+
222
+ @override
223
+ def __repr__(self) -> str:
224
+ return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})"
225
+
226
+
227
+ class SSEDecoder:
228
+ _data: list[str]
229
+ _event: str | None
230
+ _retry: int | None
231
+ _last_event_id: str | None
232
+
233
+ def __init__(self) -> None:
234
+ self._event = None
235
+ self._data = []
236
+ self._last_event_id = None
237
+ self._retry = None
238
+
239
+ def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]:
240
+ """Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
241
+ for chunk in self._iter_chunks(iterator):
242
+ # Split before decoding so splitlines() only uses \r and \n
243
+ for raw_line in chunk.splitlines():
244
+ line = raw_line.decode("utf-8")
245
+ sse = self.decode(line)
246
+ if sse:
247
+ yield sse
248
+
249
+ def _iter_chunks(self, iterator: Iterator[bytes]) -> Iterator[bytes]:
250
+ """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks"""
251
+ data = b""
252
+ for chunk in iterator:
253
+ for line in chunk.splitlines(keepends=True):
254
+ data += line
255
+ if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")):
256
+ yield data
257
+ data = b""
258
+ if data:
259
+ yield data
260
+
261
+ async def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]:
262
+ """Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
263
+ async for chunk in self._aiter_chunks(iterator):
264
+ # Split before decoding so splitlines() only uses \r and \n
265
+ for raw_line in chunk.splitlines():
266
+ line = raw_line.decode("utf-8")
267
+ sse = self.decode(line)
268
+ if sse:
269
+ yield sse
270
+
271
+ async def _aiter_chunks(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[bytes]:
272
+ """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks"""
273
+ data = b""
274
+ async for chunk in iterator:
275
+ for line in chunk.splitlines(keepends=True):
276
+ data += line
277
+ if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")):
278
+ yield data
279
+ data = b""
280
+ if data:
281
+ yield data
282
+
283
+ def decode(self, line: str) -> ServerSentEvent | None:
284
+ # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501
285
+
286
+ if not line:
287
+ if not self._event and not self._data and not self._last_event_id and self._retry is None:
288
+ return None
289
+
290
+ sse = ServerSentEvent(
291
+ event=self._event,
292
+ data="\n".join(self._data),
293
+ id=self._last_event_id,
294
+ retry=self._retry,
295
+ )
296
+
297
+ # NOTE: as per the SSE spec, do not reset last_event_id.
298
+ self._event = None
299
+ self._data = []
300
+ self._retry = None
301
+
302
+ return sse
303
+
304
+ if line.startswith(":"):
305
+ return None
306
+
307
+ fieldname, _, value = line.partition(":")
308
+
309
+ if value.startswith(" "):
310
+ value = value[1:]
311
+
312
+ if fieldname == "event":
313
+ self._event = value
314
+ elif fieldname == "data":
315
+ self._data.append(value)
316
+ elif fieldname == "id":
317
+ if "\0" in value:
318
+ pass
319
+ else:
320
+ self._last_event_id = value
321
+ elif fieldname == "retry":
322
+ try:
323
+ self._retry = int(value)
324
+ except (TypeError, ValueError):
325
+ pass
326
+ else:
327
+ pass # Field is ignored.
328
+
329
+ return None
330
+
331
+
332
+ @runtime_checkable
333
+ class SSEBytesDecoder(Protocol):
334
+ def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]:
335
+ """Given an iterator that yields raw binary data, iterate over it & yield every event encountered"""
336
+ ...
337
+
338
+ def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]:
339
+ """Given an async iterator that yields raw binary data, iterate over it & yield every event encountered"""
340
+ ...
341
+
342
+
343
+ def is_stream_class_type(typ: type) -> TypeGuard[type[Stream[object]] | type[AsyncStream[object]]]:
344
+ """TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`"""
345
+ origin = get_origin(typ) or typ
346
+ return inspect.isclass(origin) and issubclass(origin, (Stream, AsyncStream))
347
+
348
+
349
+ def extract_stream_chunk_type(
350
+ stream_cls: type,
351
+ *,
352
+ failure_message: str | None = None,
353
+ ) -> type:
354
+ """Given a type like `Stream[T]`, returns the generic type variable `T`.
355
+
356
+ This also handles the case where a concrete subclass is given, e.g.
357
+ ```py
358
+ class MyStream(Stream[bytes]):
359
+ ...
360
+
361
+ extract_stream_chunk_type(MyStream) -> bytes
362
+ ```
363
+ """
364
+ from ._base_client import Stream, AsyncStream
365
+
366
+ return extract_type_var_from_base(
367
+ stream_cls,
368
+ index=0,
369
+ generic_bases=cast("tuple[type, ...]", (Stream, AsyncStream)),
370
+ failure_message=failure_message,
371
+ )
perplexity/_types.py ADDED
@@ -0,0 +1,261 @@
1
+ from __future__ import annotations
2
+
3
+ from os import PathLike
4
+ from typing import (
5
+ IO,
6
+ TYPE_CHECKING,
7
+ Any,
8
+ Dict,
9
+ List,
10
+ Type,
11
+ Tuple,
12
+ Union,
13
+ Mapping,
14
+ TypeVar,
15
+ Callable,
16
+ Iterator,
17
+ Optional,
18
+ Sequence,
19
+ )
20
+ from typing_extensions import (
21
+ Set,
22
+ Literal,
23
+ Protocol,
24
+ TypeAlias,
25
+ TypedDict,
26
+ SupportsIndex,
27
+ overload,
28
+ override,
29
+ runtime_checkable,
30
+ )
31
+
32
+ import httpx
33
+ import pydantic
34
+ from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport
35
+
36
+ if TYPE_CHECKING:
37
+ from ._models import BaseModel
38
+ from ._response import APIResponse, AsyncAPIResponse
39
+
40
+ Transport = BaseTransport
41
+ AsyncTransport = AsyncBaseTransport
42
+ Query = Mapping[str, object]
43
+ Body = object
44
+ AnyMapping = Mapping[str, object]
45
+ ModelT = TypeVar("ModelT", bound=pydantic.BaseModel)
46
+ _T = TypeVar("_T")
47
+
48
+
49
+ # Approximates httpx internal ProxiesTypes and RequestFiles types
50
+ # while adding support for `PathLike` instances
51
+ ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]]
52
+ ProxiesTypes = Union[str, Proxy, ProxiesDict]
53
+ if TYPE_CHECKING:
54
+ Base64FileInput = Union[IO[bytes], PathLike[str]]
55
+ FileContent = Union[IO[bytes], bytes, PathLike[str]]
56
+ else:
57
+ Base64FileInput = Union[IO[bytes], PathLike]
58
+ FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8.
59
+ FileTypes = Union[
60
+ # file (or bytes)
61
+ FileContent,
62
+ # (filename, file (or bytes))
63
+ Tuple[Optional[str], FileContent],
64
+ # (filename, file (or bytes), content_type)
65
+ Tuple[Optional[str], FileContent, Optional[str]],
66
+ # (filename, file (or bytes), content_type, headers)
67
+ Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]],
68
+ ]
69
+ RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]]
70
+
71
+ # duplicate of the above but without our custom file support
72
+ HttpxFileContent = Union[IO[bytes], bytes]
73
+ HttpxFileTypes = Union[
74
+ # file (or bytes)
75
+ HttpxFileContent,
76
+ # (filename, file (or bytes))
77
+ Tuple[Optional[str], HttpxFileContent],
78
+ # (filename, file (or bytes), content_type)
79
+ Tuple[Optional[str], HttpxFileContent, Optional[str]],
80
+ # (filename, file (or bytes), content_type, headers)
81
+ Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]],
82
+ ]
83
+ HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]]
84
+
85
+ # Workaround to support (cast_to: Type[ResponseT]) -> ResponseT
86
+ # where ResponseT includes `None`. In order to support directly
87
+ # passing `None`, overloads would have to be defined for every
88
+ # method that uses `ResponseT` which would lead to an unacceptable
89
+ # amount of code duplication and make it unreadable. See _base_client.py
90
+ # for example usage.
91
+ #
92
+ # This unfortunately means that you will either have
93
+ # to import this type and pass it explicitly:
94
+ #
95
+ # from perplexity import NoneType
96
+ # client.get('/foo', cast_to=NoneType)
97
+ #
98
+ # or build it yourself:
99
+ #
100
+ # client.get('/foo', cast_to=type(None))
101
+ if TYPE_CHECKING:
102
+ NoneType: Type[None]
103
+ else:
104
+ NoneType = type(None)
105
+
106
+
107
+ class RequestOptions(TypedDict, total=False):
108
+ headers: Headers
109
+ max_retries: int
110
+ timeout: float | Timeout | None
111
+ params: Query
112
+ extra_json: AnyMapping
113
+ idempotency_key: str
114
+ follow_redirects: bool
115
+
116
+
117
+ # Sentinel class used until PEP 0661 is accepted
118
+ class NotGiven:
119
+ """
120
+ For parameters with a meaningful None value, we need to distinguish between
121
+ the user explicitly passing None, and the user not passing the parameter at
122
+ all.
123
+
124
+ User code shouldn't need to use not_given directly.
125
+
126
+ For example:
127
+
128
+ ```py
129
+ def create(timeout: Timeout | None | NotGiven = not_given): ...
130
+
131
+
132
+ create(timeout=1) # 1s timeout
133
+ create(timeout=None) # No timeout
134
+ create() # Default timeout behavior
135
+ ```
136
+ """
137
+
138
+ def __bool__(self) -> Literal[False]:
139
+ return False
140
+
141
+ @override
142
+ def __repr__(self) -> str:
143
+ return "NOT_GIVEN"
144
+
145
+
146
+ not_given = NotGiven()
147
+ # for backwards compatibility:
148
+ NOT_GIVEN = NotGiven()
149
+
150
+
151
+ class Omit:
152
+ """
153
+ To explicitly omit something from being sent in a request, use `omit`.
154
+
155
+ ```py
156
+ # as the default `Content-Type` header is `application/json` that will be sent
157
+ client.post("/upload/files", files={"file": b"my raw file content"})
158
+
159
+ # you can't explicitly override the header as it has to be dynamically generated
160
+ # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
161
+ client.post(..., headers={"Content-Type": "multipart/form-data"})
162
+
163
+ # instead you can remove the default `application/json` header by passing omit
164
+ client.post(..., headers={"Content-Type": omit})
165
+ ```
166
+ """
167
+
168
+ def __bool__(self) -> Literal[False]:
169
+ return False
170
+
171
+
172
+ omit = Omit()
173
+
174
+
175
+ @runtime_checkable
176
+ class ModelBuilderProtocol(Protocol):
177
+ @classmethod
178
+ def build(
179
+ cls: type[_T],
180
+ *,
181
+ response: Response,
182
+ data: object,
183
+ ) -> _T: ...
184
+
185
+
186
+ Headers = Mapping[str, Union[str, Omit]]
187
+
188
+
189
+ class HeadersLikeProtocol(Protocol):
190
+ def get(self, __key: str) -> str | None: ...
191
+
192
+
193
+ HeadersLike = Union[Headers, HeadersLikeProtocol]
194
+
195
+ ResponseT = TypeVar(
196
+ "ResponseT",
197
+ bound=Union[
198
+ object,
199
+ str,
200
+ None,
201
+ "BaseModel",
202
+ List[Any],
203
+ Dict[str, Any],
204
+ Response,
205
+ ModelBuilderProtocol,
206
+ "APIResponse[Any]",
207
+ "AsyncAPIResponse[Any]",
208
+ ],
209
+ )
210
+
211
+ StrBytesIntFloat = Union[str, bytes, int, float]
212
+
213
+ # Note: copied from Pydantic
214
+ # https://github.com/pydantic/pydantic/blob/6f31f8f68ef011f84357330186f603ff295312fd/pydantic/main.py#L79
215
+ IncEx: TypeAlias = Union[Set[int], Set[str], Mapping[int, Union["IncEx", bool]], Mapping[str, Union["IncEx", bool]]]
216
+
217
+ PostParser = Callable[[Any], Any]
218
+
219
+
220
+ @runtime_checkable
221
+ class InheritsGeneric(Protocol):
222
+ """Represents a type that has inherited from `Generic`
223
+
224
+ The `__orig_bases__` property can be used to determine the resolved
225
+ type variable for a given base class.
226
+ """
227
+
228
+ __orig_bases__: tuple[_GenericAlias]
229
+
230
+
231
+ class _GenericAlias(Protocol):
232
+ __origin__: type[object]
233
+
234
+
235
+ class HttpxSendArgs(TypedDict, total=False):
236
+ auth: httpx.Auth
237
+ follow_redirects: bool
238
+
239
+
240
+ _T_co = TypeVar("_T_co", covariant=True)
241
+
242
+
243
+ if TYPE_CHECKING:
244
+ # This works because str.__contains__ does not accept object (either in typeshed or at runtime)
245
+ # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285
246
+ #
247
+ # Note: index() and count() methods are intentionally omitted to allow pyright to properly
248
+ # infer TypedDict types when dict literals are used in lists assigned to SequenceNotStr.
249
+ class SequenceNotStr(Protocol[_T_co]):
250
+ @overload
251
+ def __getitem__(self, index: SupportsIndex, /) -> _T_co: ...
252
+ @overload
253
+ def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ...
254
+ def __contains__(self, value: object, /) -> bool: ...
255
+ def __len__(self) -> int: ...
256
+ def __iter__(self) -> Iterator[_T_co]: ...
257
+ def __reversed__(self) -> Iterator[_T_co]: ...
258
+ else:
259
+ # just point this to a normal `Sequence` at runtime to avoid having to special case
260
+ # deserializing our custom sequence type
261
+ SequenceNotStr = Sequence
@@ -0,0 +1,64 @@
1
+ from ._sync import asyncify as asyncify
2
+ from ._proxy import LazyProxy as LazyProxy
3
+ from ._utils import (
4
+ flatten as flatten,
5
+ is_dict as is_dict,
6
+ is_list as is_list,
7
+ is_given as is_given,
8
+ is_tuple as is_tuple,
9
+ json_safe as json_safe,
10
+ lru_cache as lru_cache,
11
+ is_mapping as is_mapping,
12
+ is_tuple_t as is_tuple_t,
13
+ is_iterable as is_iterable,
14
+ is_sequence as is_sequence,
15
+ coerce_float as coerce_float,
16
+ is_mapping_t as is_mapping_t,
17
+ removeprefix as removeprefix,
18
+ removesuffix as removesuffix,
19
+ extract_files as extract_files,
20
+ is_sequence_t as is_sequence_t,
21
+ required_args as required_args,
22
+ coerce_boolean as coerce_boolean,
23
+ coerce_integer as coerce_integer,
24
+ file_from_path as file_from_path,
25
+ strip_not_given as strip_not_given,
26
+ deepcopy_minimal as deepcopy_minimal,
27
+ get_async_library as get_async_library,
28
+ maybe_coerce_float as maybe_coerce_float,
29
+ get_required_header as get_required_header,
30
+ maybe_coerce_boolean as maybe_coerce_boolean,
31
+ maybe_coerce_integer as maybe_coerce_integer,
32
+ )
33
+ from ._compat import (
34
+ get_args as get_args,
35
+ is_union as is_union,
36
+ get_origin as get_origin,
37
+ is_typeddict as is_typeddict,
38
+ is_literal_type as is_literal_type,
39
+ )
40
+ from ._typing import (
41
+ is_list_type as is_list_type,
42
+ is_union_type as is_union_type,
43
+ extract_type_arg as extract_type_arg,
44
+ is_iterable_type as is_iterable_type,
45
+ is_required_type as is_required_type,
46
+ is_sequence_type as is_sequence_type,
47
+ is_annotated_type as is_annotated_type,
48
+ is_type_alias_type as is_type_alias_type,
49
+ strip_annotated_type as strip_annotated_type,
50
+ extract_type_var_from_base as extract_type_var_from_base,
51
+ )
52
+ from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator
53
+ from ._transform import (
54
+ PropertyInfo as PropertyInfo,
55
+ transform as transform,
56
+ async_transform as async_transform,
57
+ maybe_transform as maybe_transform,
58
+ async_maybe_transform as async_maybe_transform,
59
+ )
60
+ from ._reflection import (
61
+ function_has_argument as function_has_argument,
62
+ assert_signatures_in_sync as assert_signatures_in_sync,
63
+ )
64
+ from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime