perplexityai 0.12.0__py3-none-any.whl → 0.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of perplexityai might be problematic. Click here for more details.
- perplexity/_client.py +4 -0
- perplexity/_streaming.py +40 -2
- perplexity/_version.py +1 -1
- perplexity/resources/chat/completions.py +472 -11
- perplexity/types/__init__.py +1 -0
- perplexity/types/async_/chat/completion_create_response.py +4 -28
- perplexity/types/async_/chat/completion_get_response.py +4 -28
- perplexity/types/chat/__init__.py +0 -1
- perplexity/types/chat/completion_create_params.py +15 -4
- perplexity/types/{chat/completion_create_response.py → stream_chunk.py} +6 -6
- {perplexityai-0.12.0.dist-info → perplexityai-0.13.0.dist-info}/METADATA +53 -27
- {perplexityai-0.12.0.dist-info → perplexityai-0.13.0.dist-info}/RECORD +14 -14
- {perplexityai-0.12.0.dist-info → perplexityai-0.13.0.dist-info}/WHEEL +0 -0
- {perplexityai-0.12.0.dist-info → perplexityai-0.13.0.dist-info}/licenses/LICENSE +0 -0
perplexity/_client.py
CHANGED
|
@@ -105,6 +105,8 @@ class Perplexity(SyncAPIClient):
|
|
|
105
105
|
_strict_response_validation=_strict_response_validation,
|
|
106
106
|
)
|
|
107
107
|
|
|
108
|
+
self._default_stream_cls = Stream
|
|
109
|
+
|
|
108
110
|
self.chat = chat.ChatResource(self)
|
|
109
111
|
self.async_ = async_.AsyncResource(self)
|
|
110
112
|
self.search = search.SearchResource(self)
|
|
@@ -277,6 +279,8 @@ class AsyncPerplexity(AsyncAPIClient):
|
|
|
277
279
|
_strict_response_validation=_strict_response_validation,
|
|
278
280
|
)
|
|
279
281
|
|
|
282
|
+
self._default_stream_cls = AsyncStream
|
|
283
|
+
|
|
280
284
|
self.chat = chat.AsyncChatResource(self)
|
|
281
285
|
self.async_ = async_.AsyncAsyncResource(self)
|
|
282
286
|
self.search = search.AsyncSearchResource(self)
|
perplexity/_streaming.py
CHANGED
|
@@ -55,7 +55,26 @@ class Stream(Generic[_T]):
|
|
|
55
55
|
iterator = self._iter_events()
|
|
56
56
|
|
|
57
57
|
for sse in iterator:
|
|
58
|
-
|
|
58
|
+
if sse.data.startswith("[DONE]"):
|
|
59
|
+
break
|
|
60
|
+
|
|
61
|
+
if sse.event == "error":
|
|
62
|
+
body = sse.data
|
|
63
|
+
|
|
64
|
+
try:
|
|
65
|
+
body = sse.json()
|
|
66
|
+
err_msg = f"{body}"
|
|
67
|
+
except Exception:
|
|
68
|
+
err_msg = sse.data or f"Error code: {response.status_code}"
|
|
69
|
+
|
|
70
|
+
raise self._client._make_status_error(
|
|
71
|
+
err_msg,
|
|
72
|
+
body=body,
|
|
73
|
+
response=self.response,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
if sse.event is None:
|
|
77
|
+
yield process_data(data=sse.json(), cast_to=cast_to, response=response)
|
|
59
78
|
|
|
60
79
|
# Ensure the entire stream is consumed
|
|
61
80
|
for _sse in iterator:
|
|
@@ -119,7 +138,26 @@ class AsyncStream(Generic[_T]):
|
|
|
119
138
|
iterator = self._iter_events()
|
|
120
139
|
|
|
121
140
|
async for sse in iterator:
|
|
122
|
-
|
|
141
|
+
if sse.data.startswith("[DONE]"):
|
|
142
|
+
break
|
|
143
|
+
|
|
144
|
+
if sse.event == "error":
|
|
145
|
+
body = sse.data
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
body = sse.json()
|
|
149
|
+
err_msg = f"{body}"
|
|
150
|
+
except Exception:
|
|
151
|
+
err_msg = sse.data or f"Error code: {response.status_code}"
|
|
152
|
+
|
|
153
|
+
raise self._client._make_status_error(
|
|
154
|
+
err_msg,
|
|
155
|
+
body=body,
|
|
156
|
+
response=self.response,
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
if sse.event is None:
|
|
160
|
+
yield process_data(data=sse.json(), cast_to=cast_to, response=response)
|
|
123
161
|
|
|
124
162
|
# Ensure the entire stream is consumed
|
|
125
163
|
async for _sse in iterator:
|
perplexity/_version.py
CHANGED
|
@@ -3,12 +3,12 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
from typing import Dict, Union, Iterable, Optional
|
|
6
|
-
from typing_extensions import Literal
|
|
6
|
+
from typing_extensions import Literal, overload
|
|
7
7
|
|
|
8
8
|
import httpx
|
|
9
9
|
|
|
10
10
|
from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
|
|
11
|
-
from ..._utils import maybe_transform, async_maybe_transform
|
|
11
|
+
from ..._utils import required_args, maybe_transform, async_maybe_transform
|
|
12
12
|
from ..._compat import cached_property
|
|
13
13
|
from ..._resource import SyncAPIResource, AsyncAPIResource
|
|
14
14
|
from ..._response import (
|
|
@@ -17,9 +17,10 @@ from ..._response import (
|
|
|
17
17
|
async_to_raw_response_wrapper,
|
|
18
18
|
async_to_streamed_response_wrapper,
|
|
19
19
|
)
|
|
20
|
+
from ..._streaming import Stream, AsyncStream
|
|
20
21
|
from ...types.chat import completion_create_params
|
|
21
22
|
from ..._base_client import make_request_options
|
|
22
|
-
from ...types.
|
|
23
|
+
from ...types.stream_chunk import StreamChunk
|
|
23
24
|
from ...types.shared_params.chat_message_input import ChatMessageInput
|
|
24
25
|
|
|
25
26
|
__all__ = ["CompletionsResource", "AsyncCompletionsResource"]
|
|
@@ -45,6 +46,7 @@ class CompletionsResource(SyncAPIResource):
|
|
|
45
46
|
"""
|
|
46
47
|
return CompletionsResourceWithStreamingResponse(self)
|
|
47
48
|
|
|
49
|
+
@overload
|
|
48
50
|
def create(
|
|
49
51
|
self,
|
|
50
52
|
*,
|
|
@@ -92,7 +94,7 @@ class CompletionsResource(SyncAPIResource):
|
|
|
92
94
|
search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
|
|
93
95
|
search_tenant: Optional[str] | Omit = omit,
|
|
94
96
|
stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
|
|
95
|
-
stream: Optional[
|
|
97
|
+
stream: Optional[Literal[False]] | Omit = omit,
|
|
96
98
|
temperature: Optional[float] | Omit = omit,
|
|
97
99
|
tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
|
|
98
100
|
tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
|
|
@@ -108,7 +110,7 @@ class CompletionsResource(SyncAPIResource):
|
|
|
108
110
|
extra_query: Query | None = None,
|
|
109
111
|
extra_body: Body | None = None,
|
|
110
112
|
timeout: float | httpx.Timeout | None | NotGiven = not_given,
|
|
111
|
-
) ->
|
|
113
|
+
) -> StreamChunk:
|
|
112
114
|
"""
|
|
113
115
|
FastAPI wrapper around chat completions
|
|
114
116
|
|
|
@@ -121,6 +123,231 @@ class CompletionsResource(SyncAPIResource):
|
|
|
121
123
|
|
|
122
124
|
timeout: Override the client-level default timeout for this request, in seconds
|
|
123
125
|
"""
|
|
126
|
+
...
|
|
127
|
+
|
|
128
|
+
@overload
|
|
129
|
+
def create(
|
|
130
|
+
self,
|
|
131
|
+
*,
|
|
132
|
+
messages: Iterable[ChatMessageInput],
|
|
133
|
+
model: str,
|
|
134
|
+
stream: Literal[True],
|
|
135
|
+
_debug_pro_search: bool | Omit = omit,
|
|
136
|
+
_inputs: Optional[Iterable[int]] | Omit = omit,
|
|
137
|
+
_is_browser_agent: Optional[bool] | Omit = omit,
|
|
138
|
+
_prompt_token_length: Optional[int] | Omit = omit,
|
|
139
|
+
best_of: Optional[int] | Omit = omit,
|
|
140
|
+
country: Optional[str] | Omit = omit,
|
|
141
|
+
cum_logprobs: Optional[bool] | Omit = omit,
|
|
142
|
+
debug_params: Optional[completion_create_params.DebugParams] | Omit = omit,
|
|
143
|
+
disable_search: Optional[bool] | Omit = omit,
|
|
144
|
+
diverse_first_token: Optional[bool] | Omit = omit,
|
|
145
|
+
enable_search_classifier: Optional[bool] | Omit = omit,
|
|
146
|
+
file_workspace_id: Optional[str] | Omit = omit,
|
|
147
|
+
frequency_penalty: Optional[float] | Omit = omit,
|
|
148
|
+
has_image_url: bool | Omit = omit,
|
|
149
|
+
image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
150
|
+
image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
151
|
+
last_updated_after_filter: Optional[str] | Omit = omit,
|
|
152
|
+
last_updated_before_filter: Optional[str] | Omit = omit,
|
|
153
|
+
latitude: Optional[float] | Omit = omit,
|
|
154
|
+
logprobs: Optional[bool] | Omit = omit,
|
|
155
|
+
longitude: Optional[float] | Omit = omit,
|
|
156
|
+
max_tokens: Optional[int] | Omit = omit,
|
|
157
|
+
n: Optional[int] | Omit = omit,
|
|
158
|
+
num_images: int | Omit = omit,
|
|
159
|
+
num_search_results: int | Omit = omit,
|
|
160
|
+
parallel_tool_calls: Optional[bool] | Omit = omit,
|
|
161
|
+
presence_penalty: Optional[float] | Omit = omit,
|
|
162
|
+
ranking_model: Optional[str] | Omit = omit,
|
|
163
|
+
reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
|
|
164
|
+
response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
|
|
165
|
+
response_metadata: Optional[Dict[str, object]] | Omit = omit,
|
|
166
|
+
return_images: Optional[bool] | Omit = omit,
|
|
167
|
+
return_related_questions: Optional[bool] | Omit = omit,
|
|
168
|
+
safe_search: Optional[bool] | Omit = omit,
|
|
169
|
+
search_after_date_filter: Optional[str] | Omit = omit,
|
|
170
|
+
search_before_date_filter: Optional[str] | Omit = omit,
|
|
171
|
+
search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
172
|
+
search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
|
|
173
|
+
search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
|
|
174
|
+
search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
|
|
175
|
+
search_tenant: Optional[str] | Omit = omit,
|
|
176
|
+
stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
|
|
177
|
+
temperature: Optional[float] | Omit = omit,
|
|
178
|
+
tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
|
|
179
|
+
tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
|
|
180
|
+
top_k: Optional[int] | Omit = omit,
|
|
181
|
+
top_logprobs: Optional[int] | Omit = omit,
|
|
182
|
+
top_p: Optional[float] | Omit = omit,
|
|
183
|
+
updated_after_timestamp: Optional[int] | Omit = omit,
|
|
184
|
+
updated_before_timestamp: Optional[int] | Omit = omit,
|
|
185
|
+
web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
|
|
186
|
+
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
|
187
|
+
# The extra values given here take precedence over values defined on the client or passed to this method.
|
|
188
|
+
extra_headers: Headers | None = None,
|
|
189
|
+
extra_query: Query | None = None,
|
|
190
|
+
extra_body: Body | None = None,
|
|
191
|
+
timeout: float | httpx.Timeout | None | NotGiven = not_given,
|
|
192
|
+
) -> Stream[StreamChunk]:
|
|
193
|
+
"""
|
|
194
|
+
FastAPI wrapper around chat completions
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
extra_headers: Send extra headers
|
|
198
|
+
|
|
199
|
+
extra_query: Add additional query parameters to the request
|
|
200
|
+
|
|
201
|
+
extra_body: Add additional JSON properties to the request
|
|
202
|
+
|
|
203
|
+
timeout: Override the client-level default timeout for this request, in seconds
|
|
204
|
+
"""
|
|
205
|
+
...
|
|
206
|
+
|
|
207
|
+
@overload
|
|
208
|
+
def create(
|
|
209
|
+
self,
|
|
210
|
+
*,
|
|
211
|
+
messages: Iterable[ChatMessageInput],
|
|
212
|
+
model: str,
|
|
213
|
+
stream: bool,
|
|
214
|
+
_debug_pro_search: bool | Omit = omit,
|
|
215
|
+
_inputs: Optional[Iterable[int]] | Omit = omit,
|
|
216
|
+
_is_browser_agent: Optional[bool] | Omit = omit,
|
|
217
|
+
_prompt_token_length: Optional[int] | Omit = omit,
|
|
218
|
+
best_of: Optional[int] | Omit = omit,
|
|
219
|
+
country: Optional[str] | Omit = omit,
|
|
220
|
+
cum_logprobs: Optional[bool] | Omit = omit,
|
|
221
|
+
debug_params: Optional[completion_create_params.DebugParams] | Omit = omit,
|
|
222
|
+
disable_search: Optional[bool] | Omit = omit,
|
|
223
|
+
diverse_first_token: Optional[bool] | Omit = omit,
|
|
224
|
+
enable_search_classifier: Optional[bool] | Omit = omit,
|
|
225
|
+
file_workspace_id: Optional[str] | Omit = omit,
|
|
226
|
+
frequency_penalty: Optional[float] | Omit = omit,
|
|
227
|
+
has_image_url: bool | Omit = omit,
|
|
228
|
+
image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
229
|
+
image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
230
|
+
last_updated_after_filter: Optional[str] | Omit = omit,
|
|
231
|
+
last_updated_before_filter: Optional[str] | Omit = omit,
|
|
232
|
+
latitude: Optional[float] | Omit = omit,
|
|
233
|
+
logprobs: Optional[bool] | Omit = omit,
|
|
234
|
+
longitude: Optional[float] | Omit = omit,
|
|
235
|
+
max_tokens: Optional[int] | Omit = omit,
|
|
236
|
+
n: Optional[int] | Omit = omit,
|
|
237
|
+
num_images: int | Omit = omit,
|
|
238
|
+
num_search_results: int | Omit = omit,
|
|
239
|
+
parallel_tool_calls: Optional[bool] | Omit = omit,
|
|
240
|
+
presence_penalty: Optional[float] | Omit = omit,
|
|
241
|
+
ranking_model: Optional[str] | Omit = omit,
|
|
242
|
+
reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
|
|
243
|
+
response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
|
|
244
|
+
response_metadata: Optional[Dict[str, object]] | Omit = omit,
|
|
245
|
+
return_images: Optional[bool] | Omit = omit,
|
|
246
|
+
return_related_questions: Optional[bool] | Omit = omit,
|
|
247
|
+
safe_search: Optional[bool] | Omit = omit,
|
|
248
|
+
search_after_date_filter: Optional[str] | Omit = omit,
|
|
249
|
+
search_before_date_filter: Optional[str] | Omit = omit,
|
|
250
|
+
search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
251
|
+
search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
|
|
252
|
+
search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
|
|
253
|
+
search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
|
|
254
|
+
search_tenant: Optional[str] | Omit = omit,
|
|
255
|
+
stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
|
|
256
|
+
temperature: Optional[float] | Omit = omit,
|
|
257
|
+
tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
|
|
258
|
+
tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
|
|
259
|
+
top_k: Optional[int] | Omit = omit,
|
|
260
|
+
top_logprobs: Optional[int] | Omit = omit,
|
|
261
|
+
top_p: Optional[float] | Omit = omit,
|
|
262
|
+
updated_after_timestamp: Optional[int] | Omit = omit,
|
|
263
|
+
updated_before_timestamp: Optional[int] | Omit = omit,
|
|
264
|
+
web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
|
|
265
|
+
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
|
266
|
+
# The extra values given here take precedence over values defined on the client or passed to this method.
|
|
267
|
+
extra_headers: Headers | None = None,
|
|
268
|
+
extra_query: Query | None = None,
|
|
269
|
+
extra_body: Body | None = None,
|
|
270
|
+
timeout: float | httpx.Timeout | None | NotGiven = not_given,
|
|
271
|
+
) -> StreamChunk | Stream[StreamChunk]:
|
|
272
|
+
"""
|
|
273
|
+
FastAPI wrapper around chat completions
|
|
274
|
+
|
|
275
|
+
Args:
|
|
276
|
+
extra_headers: Send extra headers
|
|
277
|
+
|
|
278
|
+
extra_query: Add additional query parameters to the request
|
|
279
|
+
|
|
280
|
+
extra_body: Add additional JSON properties to the request
|
|
281
|
+
|
|
282
|
+
timeout: Override the client-level default timeout for this request, in seconds
|
|
283
|
+
"""
|
|
284
|
+
...
|
|
285
|
+
|
|
286
|
+
@required_args(["messages", "model"], ["messages", "model", "stream"])
|
|
287
|
+
def create(
|
|
288
|
+
self,
|
|
289
|
+
*,
|
|
290
|
+
messages: Iterable[ChatMessageInput],
|
|
291
|
+
model: str,
|
|
292
|
+
_debug_pro_search: bool | Omit = omit,
|
|
293
|
+
_inputs: Optional[Iterable[int]] | Omit = omit,
|
|
294
|
+
_is_browser_agent: Optional[bool] | Omit = omit,
|
|
295
|
+
_prompt_token_length: Optional[int] | Omit = omit,
|
|
296
|
+
best_of: Optional[int] | Omit = omit,
|
|
297
|
+
country: Optional[str] | Omit = omit,
|
|
298
|
+
cum_logprobs: Optional[bool] | Omit = omit,
|
|
299
|
+
debug_params: Optional[completion_create_params.DebugParams] | Omit = omit,
|
|
300
|
+
disable_search: Optional[bool] | Omit = omit,
|
|
301
|
+
diverse_first_token: Optional[bool] | Omit = omit,
|
|
302
|
+
enable_search_classifier: Optional[bool] | Omit = omit,
|
|
303
|
+
file_workspace_id: Optional[str] | Omit = omit,
|
|
304
|
+
frequency_penalty: Optional[float] | Omit = omit,
|
|
305
|
+
has_image_url: bool | Omit = omit,
|
|
306
|
+
image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
307
|
+
image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
308
|
+
last_updated_after_filter: Optional[str] | Omit = omit,
|
|
309
|
+
last_updated_before_filter: Optional[str] | Omit = omit,
|
|
310
|
+
latitude: Optional[float] | Omit = omit,
|
|
311
|
+
logprobs: Optional[bool] | Omit = omit,
|
|
312
|
+
longitude: Optional[float] | Omit = omit,
|
|
313
|
+
max_tokens: Optional[int] | Omit = omit,
|
|
314
|
+
n: Optional[int] | Omit = omit,
|
|
315
|
+
num_images: int | Omit = omit,
|
|
316
|
+
num_search_results: int | Omit = omit,
|
|
317
|
+
parallel_tool_calls: Optional[bool] | Omit = omit,
|
|
318
|
+
presence_penalty: Optional[float] | Omit = omit,
|
|
319
|
+
ranking_model: Optional[str] | Omit = omit,
|
|
320
|
+
reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
|
|
321
|
+
response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
|
|
322
|
+
response_metadata: Optional[Dict[str, object]] | Omit = omit,
|
|
323
|
+
return_images: Optional[bool] | Omit = omit,
|
|
324
|
+
return_related_questions: Optional[bool] | Omit = omit,
|
|
325
|
+
safe_search: Optional[bool] | Omit = omit,
|
|
326
|
+
search_after_date_filter: Optional[str] | Omit = omit,
|
|
327
|
+
search_before_date_filter: Optional[str] | Omit = omit,
|
|
328
|
+
search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
329
|
+
search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
|
|
330
|
+
search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
|
|
331
|
+
search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
|
|
332
|
+
search_tenant: Optional[str] | Omit = omit,
|
|
333
|
+
stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
|
|
334
|
+
stream: Optional[Literal[False]] | Literal[True] | Omit = omit,
|
|
335
|
+
temperature: Optional[float] | Omit = omit,
|
|
336
|
+
tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
|
|
337
|
+
tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
|
|
338
|
+
top_k: Optional[int] | Omit = omit,
|
|
339
|
+
top_logprobs: Optional[int] | Omit = omit,
|
|
340
|
+
top_p: Optional[float] | Omit = omit,
|
|
341
|
+
updated_after_timestamp: Optional[int] | Omit = omit,
|
|
342
|
+
updated_before_timestamp: Optional[int] | Omit = omit,
|
|
343
|
+
web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
|
|
344
|
+
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
|
345
|
+
# The extra values given here take precedence over values defined on the client or passed to this method.
|
|
346
|
+
extra_headers: Headers | None = None,
|
|
347
|
+
extra_query: Query | None = None,
|
|
348
|
+
extra_body: Body | None = None,
|
|
349
|
+
timeout: float | httpx.Timeout | None | NotGiven = not_given,
|
|
350
|
+
) -> StreamChunk | Stream[StreamChunk]:
|
|
124
351
|
return self._post(
|
|
125
352
|
"/chat/completions",
|
|
126
353
|
body=maybe_transform(
|
|
@@ -180,12 +407,16 @@ class CompletionsResource(SyncAPIResource):
|
|
|
180
407
|
"updated_before_timestamp": updated_before_timestamp,
|
|
181
408
|
"web_search_options": web_search_options,
|
|
182
409
|
},
|
|
183
|
-
completion_create_params.
|
|
410
|
+
completion_create_params.CompletionCreateParamsStreaming
|
|
411
|
+
if stream
|
|
412
|
+
else completion_create_params.CompletionCreateParamsNonStreaming,
|
|
184
413
|
),
|
|
185
414
|
options=make_request_options(
|
|
186
415
|
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
|
187
416
|
),
|
|
188
|
-
cast_to=
|
|
417
|
+
cast_to=StreamChunk,
|
|
418
|
+
stream=stream or False,
|
|
419
|
+
stream_cls=Stream[StreamChunk],
|
|
189
420
|
)
|
|
190
421
|
|
|
191
422
|
|
|
@@ -209,6 +440,7 @@ class AsyncCompletionsResource(AsyncAPIResource):
|
|
|
209
440
|
"""
|
|
210
441
|
return AsyncCompletionsResourceWithStreamingResponse(self)
|
|
211
442
|
|
|
443
|
+
@overload
|
|
212
444
|
async def create(
|
|
213
445
|
self,
|
|
214
446
|
*,
|
|
@@ -256,7 +488,7 @@ class AsyncCompletionsResource(AsyncAPIResource):
|
|
|
256
488
|
search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
|
|
257
489
|
search_tenant: Optional[str] | Omit = omit,
|
|
258
490
|
stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
|
|
259
|
-
stream: Optional[
|
|
491
|
+
stream: Optional[Literal[False]] | Omit = omit,
|
|
260
492
|
temperature: Optional[float] | Omit = omit,
|
|
261
493
|
tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
|
|
262
494
|
tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
|
|
@@ -272,7 +504,7 @@ class AsyncCompletionsResource(AsyncAPIResource):
|
|
|
272
504
|
extra_query: Query | None = None,
|
|
273
505
|
extra_body: Body | None = None,
|
|
274
506
|
timeout: float | httpx.Timeout | None | NotGiven = not_given,
|
|
275
|
-
) ->
|
|
507
|
+
) -> StreamChunk:
|
|
276
508
|
"""
|
|
277
509
|
FastAPI wrapper around chat completions
|
|
278
510
|
|
|
@@ -285,6 +517,231 @@ class AsyncCompletionsResource(AsyncAPIResource):
|
|
|
285
517
|
|
|
286
518
|
timeout: Override the client-level default timeout for this request, in seconds
|
|
287
519
|
"""
|
|
520
|
+
...
|
|
521
|
+
|
|
522
|
+
@overload
|
|
523
|
+
async def create(
|
|
524
|
+
self,
|
|
525
|
+
*,
|
|
526
|
+
messages: Iterable[ChatMessageInput],
|
|
527
|
+
model: str,
|
|
528
|
+
stream: Literal[True],
|
|
529
|
+
_debug_pro_search: bool | Omit = omit,
|
|
530
|
+
_inputs: Optional[Iterable[int]] | Omit = omit,
|
|
531
|
+
_is_browser_agent: Optional[bool] | Omit = omit,
|
|
532
|
+
_prompt_token_length: Optional[int] | Omit = omit,
|
|
533
|
+
best_of: Optional[int] | Omit = omit,
|
|
534
|
+
country: Optional[str] | Omit = omit,
|
|
535
|
+
cum_logprobs: Optional[bool] | Omit = omit,
|
|
536
|
+
debug_params: Optional[completion_create_params.DebugParams] | Omit = omit,
|
|
537
|
+
disable_search: Optional[bool] | Omit = omit,
|
|
538
|
+
diverse_first_token: Optional[bool] | Omit = omit,
|
|
539
|
+
enable_search_classifier: Optional[bool] | Omit = omit,
|
|
540
|
+
file_workspace_id: Optional[str] | Omit = omit,
|
|
541
|
+
frequency_penalty: Optional[float] | Omit = omit,
|
|
542
|
+
has_image_url: bool | Omit = omit,
|
|
543
|
+
image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
544
|
+
image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
545
|
+
last_updated_after_filter: Optional[str] | Omit = omit,
|
|
546
|
+
last_updated_before_filter: Optional[str] | Omit = omit,
|
|
547
|
+
latitude: Optional[float] | Omit = omit,
|
|
548
|
+
logprobs: Optional[bool] | Omit = omit,
|
|
549
|
+
longitude: Optional[float] | Omit = omit,
|
|
550
|
+
max_tokens: Optional[int] | Omit = omit,
|
|
551
|
+
n: Optional[int] | Omit = omit,
|
|
552
|
+
num_images: int | Omit = omit,
|
|
553
|
+
num_search_results: int | Omit = omit,
|
|
554
|
+
parallel_tool_calls: Optional[bool] | Omit = omit,
|
|
555
|
+
presence_penalty: Optional[float] | Omit = omit,
|
|
556
|
+
ranking_model: Optional[str] | Omit = omit,
|
|
557
|
+
reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
|
|
558
|
+
response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
|
|
559
|
+
response_metadata: Optional[Dict[str, object]] | Omit = omit,
|
|
560
|
+
return_images: Optional[bool] | Omit = omit,
|
|
561
|
+
return_related_questions: Optional[bool] | Omit = omit,
|
|
562
|
+
safe_search: Optional[bool] | Omit = omit,
|
|
563
|
+
search_after_date_filter: Optional[str] | Omit = omit,
|
|
564
|
+
search_before_date_filter: Optional[str] | Omit = omit,
|
|
565
|
+
search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
566
|
+
search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
|
|
567
|
+
search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
|
|
568
|
+
search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
|
|
569
|
+
search_tenant: Optional[str] | Omit = omit,
|
|
570
|
+
stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
|
|
571
|
+
temperature: Optional[float] | Omit = omit,
|
|
572
|
+
tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
|
|
573
|
+
tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
|
|
574
|
+
top_k: Optional[int] | Omit = omit,
|
|
575
|
+
top_logprobs: Optional[int] | Omit = omit,
|
|
576
|
+
top_p: Optional[float] | Omit = omit,
|
|
577
|
+
updated_after_timestamp: Optional[int] | Omit = omit,
|
|
578
|
+
updated_before_timestamp: Optional[int] | Omit = omit,
|
|
579
|
+
web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
|
|
580
|
+
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
|
581
|
+
# The extra values given here take precedence over values defined on the client or passed to this method.
|
|
582
|
+
extra_headers: Headers | None = None,
|
|
583
|
+
extra_query: Query | None = None,
|
|
584
|
+
extra_body: Body | None = None,
|
|
585
|
+
timeout: float | httpx.Timeout | None | NotGiven = not_given,
|
|
586
|
+
) -> AsyncStream[StreamChunk]:
|
|
587
|
+
"""
|
|
588
|
+
FastAPI wrapper around chat completions
|
|
589
|
+
|
|
590
|
+
Args:
|
|
591
|
+
extra_headers: Send extra headers
|
|
592
|
+
|
|
593
|
+
extra_query: Add additional query parameters to the request
|
|
594
|
+
|
|
595
|
+
extra_body: Add additional JSON properties to the request
|
|
596
|
+
|
|
597
|
+
timeout: Override the client-level default timeout for this request, in seconds
|
|
598
|
+
"""
|
|
599
|
+
...
|
|
600
|
+
|
|
601
|
+
@overload
|
|
602
|
+
async def create(
|
|
603
|
+
self,
|
|
604
|
+
*,
|
|
605
|
+
messages: Iterable[ChatMessageInput],
|
|
606
|
+
model: str,
|
|
607
|
+
stream: bool,
|
|
608
|
+
_debug_pro_search: bool | Omit = omit,
|
|
609
|
+
_inputs: Optional[Iterable[int]] | Omit = omit,
|
|
610
|
+
_is_browser_agent: Optional[bool] | Omit = omit,
|
|
611
|
+
_prompt_token_length: Optional[int] | Omit = omit,
|
|
612
|
+
best_of: Optional[int] | Omit = omit,
|
|
613
|
+
country: Optional[str] | Omit = omit,
|
|
614
|
+
cum_logprobs: Optional[bool] | Omit = omit,
|
|
615
|
+
debug_params: Optional[completion_create_params.DebugParams] | Omit = omit,
|
|
616
|
+
disable_search: Optional[bool] | Omit = omit,
|
|
617
|
+
diverse_first_token: Optional[bool] | Omit = omit,
|
|
618
|
+
enable_search_classifier: Optional[bool] | Omit = omit,
|
|
619
|
+
file_workspace_id: Optional[str] | Omit = omit,
|
|
620
|
+
frequency_penalty: Optional[float] | Omit = omit,
|
|
621
|
+
has_image_url: bool | Omit = omit,
|
|
622
|
+
image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
623
|
+
image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
624
|
+
last_updated_after_filter: Optional[str] | Omit = omit,
|
|
625
|
+
last_updated_before_filter: Optional[str] | Omit = omit,
|
|
626
|
+
latitude: Optional[float] | Omit = omit,
|
|
627
|
+
logprobs: Optional[bool] | Omit = omit,
|
|
628
|
+
longitude: Optional[float] | Omit = omit,
|
|
629
|
+
max_tokens: Optional[int] | Omit = omit,
|
|
630
|
+
n: Optional[int] | Omit = omit,
|
|
631
|
+
num_images: int | Omit = omit,
|
|
632
|
+
num_search_results: int | Omit = omit,
|
|
633
|
+
parallel_tool_calls: Optional[bool] | Omit = omit,
|
|
634
|
+
presence_penalty: Optional[float] | Omit = omit,
|
|
635
|
+
ranking_model: Optional[str] | Omit = omit,
|
|
636
|
+
reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
|
|
637
|
+
response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
|
|
638
|
+
response_metadata: Optional[Dict[str, object]] | Omit = omit,
|
|
639
|
+
return_images: Optional[bool] | Omit = omit,
|
|
640
|
+
return_related_questions: Optional[bool] | Omit = omit,
|
|
641
|
+
safe_search: Optional[bool] | Omit = omit,
|
|
642
|
+
search_after_date_filter: Optional[str] | Omit = omit,
|
|
643
|
+
search_before_date_filter: Optional[str] | Omit = omit,
|
|
644
|
+
search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
645
|
+
search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
|
|
646
|
+
search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
|
|
647
|
+
search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
|
|
648
|
+
search_tenant: Optional[str] | Omit = omit,
|
|
649
|
+
stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
|
|
650
|
+
temperature: Optional[float] | Omit = omit,
|
|
651
|
+
tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
|
|
652
|
+
tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
|
|
653
|
+
top_k: Optional[int] | Omit = omit,
|
|
654
|
+
top_logprobs: Optional[int] | Omit = omit,
|
|
655
|
+
top_p: Optional[float] | Omit = omit,
|
|
656
|
+
updated_after_timestamp: Optional[int] | Omit = omit,
|
|
657
|
+
updated_before_timestamp: Optional[int] | Omit = omit,
|
|
658
|
+
web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
|
|
659
|
+
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
|
660
|
+
# The extra values given here take precedence over values defined on the client or passed to this method.
|
|
661
|
+
extra_headers: Headers | None = None,
|
|
662
|
+
extra_query: Query | None = None,
|
|
663
|
+
extra_body: Body | None = None,
|
|
664
|
+
timeout: float | httpx.Timeout | None | NotGiven = not_given,
|
|
665
|
+
) -> StreamChunk | AsyncStream[StreamChunk]:
|
|
666
|
+
"""
|
|
667
|
+
FastAPI wrapper around chat completions
|
|
668
|
+
|
|
669
|
+
Args:
|
|
670
|
+
extra_headers: Send extra headers
|
|
671
|
+
|
|
672
|
+
extra_query: Add additional query parameters to the request
|
|
673
|
+
|
|
674
|
+
extra_body: Add additional JSON properties to the request
|
|
675
|
+
|
|
676
|
+
timeout: Override the client-level default timeout for this request, in seconds
|
|
677
|
+
"""
|
|
678
|
+
...
|
|
679
|
+
|
|
680
|
+
@required_args(["messages", "model"], ["messages", "model", "stream"])
|
|
681
|
+
async def create(
|
|
682
|
+
self,
|
|
683
|
+
*,
|
|
684
|
+
messages: Iterable[ChatMessageInput],
|
|
685
|
+
model: str,
|
|
686
|
+
_debug_pro_search: bool | Omit = omit,
|
|
687
|
+
_inputs: Optional[Iterable[int]] | Omit = omit,
|
|
688
|
+
_is_browser_agent: Optional[bool] | Omit = omit,
|
|
689
|
+
_prompt_token_length: Optional[int] | Omit = omit,
|
|
690
|
+
best_of: Optional[int] | Omit = omit,
|
|
691
|
+
country: Optional[str] | Omit = omit,
|
|
692
|
+
cum_logprobs: Optional[bool] | Omit = omit,
|
|
693
|
+
debug_params: Optional[completion_create_params.DebugParams] | Omit = omit,
|
|
694
|
+
disable_search: Optional[bool] | Omit = omit,
|
|
695
|
+
diverse_first_token: Optional[bool] | Omit = omit,
|
|
696
|
+
enable_search_classifier: Optional[bool] | Omit = omit,
|
|
697
|
+
file_workspace_id: Optional[str] | Omit = omit,
|
|
698
|
+
frequency_penalty: Optional[float] | Omit = omit,
|
|
699
|
+
has_image_url: bool | Omit = omit,
|
|
700
|
+
image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
701
|
+
image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
702
|
+
last_updated_after_filter: Optional[str] | Omit = omit,
|
|
703
|
+
last_updated_before_filter: Optional[str] | Omit = omit,
|
|
704
|
+
latitude: Optional[float] | Omit = omit,
|
|
705
|
+
logprobs: Optional[bool] | Omit = omit,
|
|
706
|
+
longitude: Optional[float] | Omit = omit,
|
|
707
|
+
max_tokens: Optional[int] | Omit = omit,
|
|
708
|
+
n: Optional[int] | Omit = omit,
|
|
709
|
+
num_images: int | Omit = omit,
|
|
710
|
+
num_search_results: int | Omit = omit,
|
|
711
|
+
parallel_tool_calls: Optional[bool] | Omit = omit,
|
|
712
|
+
presence_penalty: Optional[float] | Omit = omit,
|
|
713
|
+
ranking_model: Optional[str] | Omit = omit,
|
|
714
|
+
reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
|
|
715
|
+
response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
|
|
716
|
+
response_metadata: Optional[Dict[str, object]] | Omit = omit,
|
|
717
|
+
return_images: Optional[bool] | Omit = omit,
|
|
718
|
+
return_related_questions: Optional[bool] | Omit = omit,
|
|
719
|
+
safe_search: Optional[bool] | Omit = omit,
|
|
720
|
+
search_after_date_filter: Optional[str] | Omit = omit,
|
|
721
|
+
search_before_date_filter: Optional[str] | Omit = omit,
|
|
722
|
+
search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
|
|
723
|
+
search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
|
|
724
|
+
search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
|
|
725
|
+
search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
|
|
726
|
+
search_tenant: Optional[str] | Omit = omit,
|
|
727
|
+
stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
|
|
728
|
+
stream: Optional[Literal[False]] | Literal[True] | Omit = omit,
|
|
729
|
+
temperature: Optional[float] | Omit = omit,
|
|
730
|
+
tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
|
|
731
|
+
tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
|
|
732
|
+
top_k: Optional[int] | Omit = omit,
|
|
733
|
+
top_logprobs: Optional[int] | Omit = omit,
|
|
734
|
+
top_p: Optional[float] | Omit = omit,
|
|
735
|
+
updated_after_timestamp: Optional[int] | Omit = omit,
|
|
736
|
+
updated_before_timestamp: Optional[int] | Omit = omit,
|
|
737
|
+
web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
|
|
738
|
+
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
|
|
739
|
+
# The extra values given here take precedence over values defined on the client or passed to this method.
|
|
740
|
+
extra_headers: Headers | None = None,
|
|
741
|
+
extra_query: Query | None = None,
|
|
742
|
+
extra_body: Body | None = None,
|
|
743
|
+
timeout: float | httpx.Timeout | None | NotGiven = not_given,
|
|
744
|
+
) -> StreamChunk | AsyncStream[StreamChunk]:
|
|
288
745
|
return await self._post(
|
|
289
746
|
"/chat/completions",
|
|
290
747
|
body=await async_maybe_transform(
|
|
@@ -344,12 +801,16 @@ class AsyncCompletionsResource(AsyncAPIResource):
|
|
|
344
801
|
"updated_before_timestamp": updated_before_timestamp,
|
|
345
802
|
"web_search_options": web_search_options,
|
|
346
803
|
},
|
|
347
|
-
completion_create_params.
|
|
804
|
+
completion_create_params.CompletionCreateParamsStreaming
|
|
805
|
+
if stream
|
|
806
|
+
else completion_create_params.CompletionCreateParamsNonStreaming,
|
|
348
807
|
),
|
|
349
808
|
options=make_request_options(
|
|
350
809
|
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
|
|
351
810
|
),
|
|
352
|
-
cast_to=
|
|
811
|
+
cast_to=StreamChunk,
|
|
812
|
+
stream=stream or False,
|
|
813
|
+
stream_cls=AsyncStream[StreamChunk],
|
|
353
814
|
)
|
|
354
815
|
|
|
355
816
|
|
perplexity/types/__init__.py
CHANGED
|
@@ -9,5 +9,6 @@ from .shared import (
|
|
|
9
9
|
ChatMessageOutput as ChatMessageOutput,
|
|
10
10
|
APIPublicSearchResult as APIPublicSearchResult,
|
|
11
11
|
)
|
|
12
|
+
from .stream_chunk import StreamChunk as StreamChunk
|
|
12
13
|
from .search_create_params import SearchCreateParams as SearchCreateParams
|
|
13
14
|
from .search_create_response import SearchCreateResponse as SearchCreateResponse
|
|
@@ -1,36 +1,12 @@
|
|
|
1
1
|
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2
2
|
|
|
3
|
-
from typing import
|
|
3
|
+
from typing import Optional
|
|
4
4
|
from typing_extensions import Literal
|
|
5
5
|
|
|
6
6
|
from ...._models import BaseModel
|
|
7
|
-
from ...
|
|
8
|
-
from ...shared.usage_info import UsageInfo
|
|
9
|
-
from ...shared.api_public_search_result import APIPublicSearchResult
|
|
7
|
+
from ...stream_chunk import StreamChunk
|
|
10
8
|
|
|
11
|
-
__all__ = ["CompletionCreateResponse"
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
class Response(BaseModel):
|
|
15
|
-
id: str
|
|
16
|
-
|
|
17
|
-
choices: List[Choice]
|
|
18
|
-
|
|
19
|
-
created: int
|
|
20
|
-
|
|
21
|
-
model: str
|
|
22
|
-
|
|
23
|
-
usage: UsageInfo
|
|
24
|
-
|
|
25
|
-
citations: Optional[List[str]] = None
|
|
26
|
-
|
|
27
|
-
object: Optional[str] = None
|
|
28
|
-
|
|
29
|
-
search_results: Optional[List[APIPublicSearchResult]] = None
|
|
30
|
-
|
|
31
|
-
status: Optional[Literal["PENDING", "COMPLETED"]] = None
|
|
32
|
-
|
|
33
|
-
type: Optional[Literal["message", "info", "end_of_stream"]] = None
|
|
9
|
+
__all__ = ["CompletionCreateResponse"]
|
|
34
10
|
|
|
35
11
|
|
|
36
12
|
class CompletionCreateResponse(BaseModel):
|
|
@@ -49,6 +25,6 @@ class CompletionCreateResponse(BaseModel):
|
|
|
49
25
|
|
|
50
26
|
failed_at: Optional[int] = None
|
|
51
27
|
|
|
52
|
-
response: Optional[
|
|
28
|
+
response: Optional[StreamChunk] = None
|
|
53
29
|
|
|
54
30
|
started_at: Optional[int] = None
|
|
@@ -1,36 +1,12 @@
|
|
|
1
1
|
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2
2
|
|
|
3
|
-
from typing import
|
|
3
|
+
from typing import Optional
|
|
4
4
|
from typing_extensions import Literal
|
|
5
5
|
|
|
6
6
|
from ...._models import BaseModel
|
|
7
|
-
from ...
|
|
8
|
-
from ...shared.usage_info import UsageInfo
|
|
9
|
-
from ...shared.api_public_search_result import APIPublicSearchResult
|
|
7
|
+
from ...stream_chunk import StreamChunk
|
|
10
8
|
|
|
11
|
-
__all__ = ["CompletionGetResponse"
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
class Response(BaseModel):
|
|
15
|
-
id: str
|
|
16
|
-
|
|
17
|
-
choices: List[Choice]
|
|
18
|
-
|
|
19
|
-
created: int
|
|
20
|
-
|
|
21
|
-
model: str
|
|
22
|
-
|
|
23
|
-
usage: UsageInfo
|
|
24
|
-
|
|
25
|
-
citations: Optional[List[str]] = None
|
|
26
|
-
|
|
27
|
-
object: Optional[str] = None
|
|
28
|
-
|
|
29
|
-
search_results: Optional[List[APIPublicSearchResult]] = None
|
|
30
|
-
|
|
31
|
-
status: Optional[Literal["PENDING", "COMPLETED"]] = None
|
|
32
|
-
|
|
33
|
-
type: Optional[Literal["message", "info", "end_of_stream"]] = None
|
|
9
|
+
__all__ = ["CompletionGetResponse"]
|
|
34
10
|
|
|
35
11
|
|
|
36
12
|
class CompletionGetResponse(BaseModel):
|
|
@@ -49,6 +25,6 @@ class CompletionGetResponse(BaseModel):
|
|
|
49
25
|
|
|
50
26
|
failed_at: Optional[int] = None
|
|
51
27
|
|
|
52
|
-
response: Optional[
|
|
28
|
+
response: Optional[StreamChunk] = None
|
|
53
29
|
|
|
54
30
|
started_at: Optional[int] = None
|
|
@@ -9,7 +9,7 @@ from ..._types import SequenceNotStr
|
|
|
9
9
|
from ..shared_params.chat_message_input import ChatMessageInput
|
|
10
10
|
|
|
11
11
|
__all__ = [
|
|
12
|
-
"
|
|
12
|
+
"CompletionCreateParamsBase",
|
|
13
13
|
"DebugParams",
|
|
14
14
|
"ResponseFormat",
|
|
15
15
|
"ResponseFormatResponseFormatText",
|
|
@@ -22,10 +22,12 @@ __all__ = [
|
|
|
22
22
|
"ToolFunctionParameters",
|
|
23
23
|
"WebSearchOptions",
|
|
24
24
|
"WebSearchOptionsUserLocation",
|
|
25
|
+
"CompletionCreateParamsNonStreaming",
|
|
26
|
+
"CompletionCreateParamsStreaming",
|
|
25
27
|
]
|
|
26
28
|
|
|
27
29
|
|
|
28
|
-
class
|
|
30
|
+
class CompletionCreateParamsBase(TypedDict, total=False):
|
|
29
31
|
messages: Required[Iterable[ChatMessageInput]]
|
|
30
32
|
|
|
31
33
|
model: Required[str]
|
|
@@ -114,8 +116,6 @@ class CompletionCreateParams(TypedDict, total=False):
|
|
|
114
116
|
|
|
115
117
|
stop: Union[str, SequenceNotStr[str], None]
|
|
116
118
|
|
|
117
|
-
stream: Optional[bool]
|
|
118
|
-
|
|
119
119
|
temperature: Optional[float]
|
|
120
120
|
|
|
121
121
|
tool_choice: Optional[Literal["none", "auto", "required"]]
|
|
@@ -228,3 +228,14 @@ class WebSearchOptions(TypedDict, total=False):
|
|
|
228
228
|
search_type: Literal["fast", "pro", "auto"]
|
|
229
229
|
|
|
230
230
|
user_location: Optional[WebSearchOptionsUserLocation]
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
class CompletionCreateParamsNonStreaming(CompletionCreateParamsBase, total=False):
|
|
234
|
+
stream: Optional[Literal[False]]
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
class CompletionCreateParamsStreaming(CompletionCreateParamsBase):
|
|
238
|
+
stream: Required[Literal[True]]
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
CompletionCreateParams = Union[CompletionCreateParamsNonStreaming, CompletionCreateParamsStreaming]
|
|
@@ -3,15 +3,15 @@
|
|
|
3
3
|
from typing import List, Optional
|
|
4
4
|
from typing_extensions import Literal
|
|
5
5
|
|
|
6
|
-
from
|
|
7
|
-
from
|
|
8
|
-
from
|
|
9
|
-
from
|
|
6
|
+
from .._models import BaseModel
|
|
7
|
+
from .shared.choice import Choice
|
|
8
|
+
from .shared.usage_info import UsageInfo
|
|
9
|
+
from .shared.api_public_search_result import APIPublicSearchResult
|
|
10
10
|
|
|
11
|
-
__all__ = ["
|
|
11
|
+
__all__ = ["StreamChunk"]
|
|
12
12
|
|
|
13
13
|
|
|
14
|
-
class
|
|
14
|
+
class StreamChunk(BaseModel):
|
|
15
15
|
id: str
|
|
16
16
|
|
|
17
17
|
choices: List[Choice]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: perplexityai
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.13.0
|
|
4
4
|
Summary: The official Python library for the perplexity API
|
|
5
5
|
Project-URL: Homepage, https://github.com/perplexityai/perplexity-py
|
|
6
6
|
Project-URL: Repository, https://github.com/perplexityai/perplexity-py
|
|
@@ -76,24 +76,6 @@ for result in search.results:
|
|
|
76
76
|
print(f"{result.title}: {result.url}")
|
|
77
77
|
```
|
|
78
78
|
|
|
79
|
-
## Content API
|
|
80
|
-
|
|
81
|
-
Extract and process content from URLs:
|
|
82
|
-
|
|
83
|
-
```python
|
|
84
|
-
from perplexity import Perplexity
|
|
85
|
-
|
|
86
|
-
client = Perplexity()
|
|
87
|
-
|
|
88
|
-
content = client.content.create(
|
|
89
|
-
urls=["https://en.wikipedia.org/wiki/Perplexity_AI"]
|
|
90
|
-
)
|
|
91
|
-
|
|
92
|
-
for result in content.results:
|
|
93
|
-
print(f"Title: {result.title}")
|
|
94
|
-
print(f"Content: {result.content[:200]}...")
|
|
95
|
-
```
|
|
96
|
-
|
|
97
79
|
## Chat Completions
|
|
98
80
|
|
|
99
81
|
The full API of this library can be found in [api.md](https://github.com/perplexityai/perplexity-py/tree/main/api.md).
|
|
@@ -106,7 +88,7 @@ client = Perplexity(
|
|
|
106
88
|
api_key=os.environ.get("PERPLEXITY_API_KEY"), # This is the default and can be omitted
|
|
107
89
|
)
|
|
108
90
|
|
|
109
|
-
|
|
91
|
+
stream_chunk = client.chat.completions.create(
|
|
110
92
|
messages=[
|
|
111
93
|
{
|
|
112
94
|
"role": "user",
|
|
@@ -115,7 +97,7 @@ completion = client.chat.completions.create(
|
|
|
115
97
|
],
|
|
116
98
|
model="sonar",
|
|
117
99
|
)
|
|
118
|
-
print(
|
|
100
|
+
print(stream_chunk.id)
|
|
119
101
|
```
|
|
120
102
|
|
|
121
103
|
While you can provide an `api_key` keyword argument,
|
|
@@ -138,7 +120,7 @@ client = AsyncPerplexity(
|
|
|
138
120
|
|
|
139
121
|
|
|
140
122
|
async def main() -> None:
|
|
141
|
-
|
|
123
|
+
stream_chunk = await client.chat.completions.create(
|
|
142
124
|
messages=[
|
|
143
125
|
{
|
|
144
126
|
"role": "user",
|
|
@@ -147,7 +129,7 @@ async def main() -> None:
|
|
|
147
129
|
],
|
|
148
130
|
model="sonar",
|
|
149
131
|
)
|
|
150
|
-
print(
|
|
132
|
+
print(stream_chunk.id)
|
|
151
133
|
|
|
152
134
|
|
|
153
135
|
asyncio.run(main())
|
|
@@ -179,7 +161,7 @@ async def main() -> None:
|
|
|
179
161
|
api_key="My API Key",
|
|
180
162
|
http_client=DefaultAioHttpClient(),
|
|
181
163
|
) as client:
|
|
182
|
-
|
|
164
|
+
stream_chunk = await client.chat.completions.create(
|
|
183
165
|
messages=[
|
|
184
166
|
{
|
|
185
167
|
"role": "user",
|
|
@@ -188,12 +170,56 @@ async def main() -> None:
|
|
|
188
170
|
],
|
|
189
171
|
model="sonar",
|
|
190
172
|
)
|
|
191
|
-
print(
|
|
173
|
+
print(stream_chunk.id)
|
|
192
174
|
|
|
193
175
|
|
|
194
176
|
asyncio.run(main())
|
|
195
177
|
```
|
|
196
178
|
|
|
179
|
+
## Streaming responses
|
|
180
|
+
|
|
181
|
+
We provide support for streaming responses using Server Side Events (SSE).
|
|
182
|
+
|
|
183
|
+
```python
|
|
184
|
+
from perplexity import Perplexity
|
|
185
|
+
|
|
186
|
+
client = Perplexity()
|
|
187
|
+
|
|
188
|
+
stream = client.chat.completions.create(
|
|
189
|
+
messages=[
|
|
190
|
+
{
|
|
191
|
+
"role": "user",
|
|
192
|
+
"content": "What is the capital of France?",
|
|
193
|
+
}
|
|
194
|
+
],
|
|
195
|
+
model="sonar",
|
|
196
|
+
stream=True,
|
|
197
|
+
)
|
|
198
|
+
for stream_chunk in stream:
|
|
199
|
+
print(stream_chunk.id)
|
|
200
|
+
```
|
|
201
|
+
|
|
202
|
+
The async client uses the exact same interface.
|
|
203
|
+
|
|
204
|
+
```python
|
|
205
|
+
from perplexity import AsyncPerplexity
|
|
206
|
+
|
|
207
|
+
client = AsyncPerplexity()
|
|
208
|
+
|
|
209
|
+
stream = await client.chat.completions.create(
|
|
210
|
+
messages=[
|
|
211
|
+
{
|
|
212
|
+
"role": "user",
|
|
213
|
+
"content": "What is the capital of France?",
|
|
214
|
+
}
|
|
215
|
+
],
|
|
216
|
+
model="sonar",
|
|
217
|
+
stream=True,
|
|
218
|
+
)
|
|
219
|
+
async for stream_chunk in stream:
|
|
220
|
+
print(stream_chunk.id)
|
|
221
|
+
```
|
|
222
|
+
|
|
197
223
|
## Using types
|
|
198
224
|
|
|
199
225
|
Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like:
|
|
@@ -212,7 +238,7 @@ from perplexity import Perplexity
|
|
|
212
238
|
|
|
213
239
|
client = Perplexity()
|
|
214
240
|
|
|
215
|
-
|
|
241
|
+
stream_chunk = client.chat.completions.create(
|
|
216
242
|
messages=[
|
|
217
243
|
{
|
|
218
244
|
"content": "string",
|
|
@@ -222,7 +248,7 @@ completion = client.chat.completions.create(
|
|
|
222
248
|
model="model",
|
|
223
249
|
debug_params={},
|
|
224
250
|
)
|
|
225
|
-
print(
|
|
251
|
+
print(stream_chunk.debug_params)
|
|
226
252
|
```
|
|
227
253
|
|
|
228
254
|
## Handling errors
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
perplexity/__init__.py,sha256=tK4KC0qHjpk-ym2IM9lG56HeBXAHz36gMzb0rmWEa7Y,2701
|
|
2
2
|
perplexity/_base_client.py,sha256=aOipaCx-xU9FMpoqKMCzs6LHxmeGlLUlGiihUUNLGUc,67051
|
|
3
|
-
perplexity/_client.py,sha256=
|
|
3
|
+
perplexity/_client.py,sha256=Zpp0EdtkpVduOGvdXcMINzb102FZ4cDi2bQ7i2XULEA,16154
|
|
4
4
|
perplexity/_compat.py,sha256=DQBVORjFb33zch24jzkhM14msvnzY7mmSmgDLaVFUM8,6562
|
|
5
5
|
perplexity/_constants.py,sha256=S14PFzyN9-I31wiV7SmIlL5Ga0MLHxdvegInGdXH7tM,462
|
|
6
6
|
perplexity/_exceptions.py,sha256=v-hOXWSDTEtXcn_By7pPml3HjEmG5HXpbE-RK_A6_0Q,3228
|
|
@@ -9,9 +9,9 @@ perplexity/_models.py,sha256=lKnskYPONAWDvWo8tmbbVk7HmG7UOsI0Nve0vSMmkRc,30452
|
|
|
9
9
|
perplexity/_qs.py,sha256=craIKyvPktJ94cvf9zn8j8ekG9dWJzhWv0ob34lIOv4,4828
|
|
10
10
|
perplexity/_resource.py,sha256=Pgc8KNBsIc1ltJn94uhDcDl0-3n5RLbe3iC2AiiNRnE,1124
|
|
11
11
|
perplexity/_response.py,sha256=bpqzmVGq6jnivoMkUgt3OI0Rh6xHd6BMcp5PHgSFPb0,28842
|
|
12
|
-
perplexity/_streaming.py,sha256=
|
|
12
|
+
perplexity/_streaming.py,sha256=0IcQb2A1XAarVUZD3Wgll7S8D4ps1qVXSu5-HEI7-vw,11254
|
|
13
13
|
perplexity/_types.py,sha256=BPuUCddonHjZ2AeEModCeb-zAYCRRT5XtxneIQhJxAk,7240
|
|
14
|
-
perplexity/_version.py,sha256=
|
|
14
|
+
perplexity/_version.py,sha256=nIzXJlfQywEDeOjx2PoSEuRVs7EtSZe66R6BQmVWKjs,163
|
|
15
15
|
perplexity/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
16
|
perplexity/_utils/__init__.py,sha256=7fch0GT9zpNnErbciSpUNa-SjTxxjY6kxHxKMOM4AGs,2305
|
|
17
17
|
perplexity/_utils/_compat.py,sha256=D8gtAvjJQrDWt9upS0XaG9Rr5l1QhiAx_I_1utT_tt0,1195
|
|
@@ -35,20 +35,20 @@ perplexity/resources/async_/chat/chat.py,sha256=M3hN3sfZKAt1C8FhOaWtxFPJAmPyE-GX
|
|
|
35
35
|
perplexity/resources/async_/chat/completions.py,sha256=xAhxfRXduWZy3JdCSLX7eTStd_rG55qsGqmsDqFI3gM,14007
|
|
36
36
|
perplexity/resources/chat/__init__.py,sha256=BVAfz9TM3DT5W9f_mt0P9YRxL_MsUxKCWAH6u1iogmA,1041
|
|
37
37
|
perplexity/resources/chat/chat.py,sha256=yvQmxxpr7k9-A8UwUukL5QIXHahfsdFjHQqRFWWbkzE,3680
|
|
38
|
-
perplexity/resources/chat/completions.py,sha256=
|
|
39
|
-
perplexity/types/__init__.py,sha256=
|
|
38
|
+
perplexity/resources/chat/completions.py,sha256=cAcNfJSM6xDR6sKBHC930QlZPXkvj6IpLfhxldfdxTo,43442
|
|
39
|
+
perplexity/types/__init__.py,sha256=LpZgQmlRWsFC0_xIzJhxSd2VY-vxDdAplR0Oqmtnu4M,544
|
|
40
40
|
perplexity/types/search_create_params.py,sha256=5cc01O0upHO7-dZrIHEi3KKhFYlCANUlhRSty4jQpHc,556
|
|
41
41
|
perplexity/types/search_create_response.py,sha256=lOteaJs4qpULkx5GLtEs6HhetqIBhM0I1AC1moWTeI8,426
|
|
42
|
+
perplexity/types/stream_chunk.py,sha256=TL8M2JVcxxPYiHcAuKqToUgK88yFpWgTvADR-GoBXUI,765
|
|
42
43
|
perplexity/types/async_/__init__.py,sha256=OKfJYcKb4NObdiRObqJV_dOyDQ8feXekDUge2o_4pXQ,122
|
|
43
44
|
perplexity/types/async_/chat/__init__.py,sha256=xo2Cya_CfjEBRos2yvW_Wrq39PZiXFBT7ukZZBNvIUM,552
|
|
44
45
|
perplexity/types/async_/chat/completion_create_params.py,sha256=qV8XTu_H9NDJmNEL5uiHxTgAaJD5_B6Pwyk1qe5QPss,5541
|
|
45
|
-
perplexity/types/async_/chat/completion_create_response.py,sha256=
|
|
46
|
+
perplexity/types/async_/chat/completion_create_response.py,sha256=XXYz7p2TVGn5iu5TklCm7nNpMjufuUO3YHNGPTpAmWU,676
|
|
46
47
|
perplexity/types/async_/chat/completion_get_params.py,sha256=3nh10bMw1nYn3oriD5CIBPyL7hN25Xz4vbVfxEf33Zw,670
|
|
47
|
-
perplexity/types/async_/chat/completion_get_response.py,sha256=
|
|
48
|
+
perplexity/types/async_/chat/completion_get_response.py,sha256=YTbspM1VcWRpUa5Wxmg6P7oA3S8yLLcY2mPuBXZqECI,670
|
|
48
49
|
perplexity/types/async_/chat/completion_list_response.py,sha256=63QSRV-2YA6gMZhyrmiZuzxasjjwT-kM3MyFuadTnZs,658
|
|
49
|
-
perplexity/types/chat/__init__.py,sha256=
|
|
50
|
-
perplexity/types/chat/completion_create_params.py,sha256=
|
|
51
|
-
perplexity/types/chat/completion_create_response.py,sha256=u4zFkOce7ER_H0j76a67B6RZlErUUI3xBed81iYklF0,795
|
|
50
|
+
perplexity/types/chat/__init__.py,sha256=9VtUhUp2vFpm5fgy2tMc-XF9NfkBXdFZvSzLmNprkqk,210
|
|
51
|
+
perplexity/types/chat/completion_create_params.py,sha256=8Rl9eTRQ_eozRFrcQpcpkA3Fdh9Uh9R_YX38P5cL2dk,5545
|
|
52
52
|
perplexity/types/shared/__init__.py,sha256=-RlflcttJZ_q_lP6YD0mVWbKhU33j91tUVuMO5OqMAM,397
|
|
53
53
|
perplexity/types/shared/api_public_search_result.py,sha256=0Wu4nwHvyQqV6jLvCVdEMWndCZ9DG7lgL3Y174rISX0,364
|
|
54
54
|
perplexity/types/shared/chat_message_input.py,sha256=wgqL1qb6hHU-sZgybgjL3VdAJFHdY0HjjNecey6CHLU,6118
|
|
@@ -58,7 +58,7 @@ perplexity/types/shared/usage_info.py,sha256=_jE7Nal9cMxtEpJjT4t2SAs6z3MufrjwPug
|
|
|
58
58
|
perplexity/types/shared_params/__init__.py,sha256=v5gr6-wq7IWgrQ8un401oApylzh3KnsIF_ilz-roX0s,241
|
|
59
59
|
perplexity/types/shared_params/api_public_search_result.py,sha256=n4VUQnGOFGGWUdwYd8P5o-vEqZKhRuI5R0dBs_ZsHtE,418
|
|
60
60
|
perplexity/types/shared_params/chat_message_input.py,sha256=BsNwhjwOFydvUo2OfrF9AHx--a1uPidSxdDyBGrK-sc,6690
|
|
61
|
-
perplexityai-0.
|
|
62
|
-
perplexityai-0.
|
|
63
|
-
perplexityai-0.
|
|
64
|
-
perplexityai-0.
|
|
61
|
+
perplexityai-0.13.0.dist-info/METADATA,sha256=VFMGNqBhWuLA0axrwdV9oGmrnz2lFN-XNWpuDeAhKcI,16552
|
|
62
|
+
perplexityai-0.13.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
|
|
63
|
+
perplexityai-0.13.0.dist-info/licenses/LICENSE,sha256=hkCriG3MT4vBhhc0roAOsrCE7IEDr1ywVEMonVHGmAQ,11340
|
|
64
|
+
perplexityai-0.13.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|