perplexityai 0.22.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. perplexity/__init__.py +102 -0
  2. perplexity/_base_client.py +2001 -0
  3. perplexity/_client.py +529 -0
  4. perplexity/_compat.py +219 -0
  5. perplexity/_constants.py +14 -0
  6. perplexity/_exceptions.py +108 -0
  7. perplexity/_files.py +123 -0
  8. perplexity/_models.py +857 -0
  9. perplexity/_qs.py +150 -0
  10. perplexity/_resource.py +43 -0
  11. perplexity/_response.py +832 -0
  12. perplexity/_streaming.py +371 -0
  13. perplexity/_types.py +261 -0
  14. perplexity/_utils/__init__.py +64 -0
  15. perplexity/_utils/_compat.py +45 -0
  16. perplexity/_utils/_datetime_parse.py +136 -0
  17. perplexity/_utils/_logs.py +25 -0
  18. perplexity/_utils/_proxy.py +65 -0
  19. perplexity/_utils/_reflection.py +42 -0
  20. perplexity/_utils/_resources_proxy.py +24 -0
  21. perplexity/_utils/_streams.py +12 -0
  22. perplexity/_utils/_sync.py +58 -0
  23. perplexity/_utils/_transform.py +457 -0
  24. perplexity/_utils/_typing.py +156 -0
  25. perplexity/_utils/_utils.py +421 -0
  26. perplexity/_version.py +4 -0
  27. perplexity/lib/.keep +4 -0
  28. perplexity/py.typed +0 -0
  29. perplexity/resources/__init__.py +47 -0
  30. perplexity/resources/async_/__init__.py +33 -0
  31. perplexity/resources/async_/async_.py +102 -0
  32. perplexity/resources/async_/chat/__init__.py +33 -0
  33. perplexity/resources/async_/chat/chat.py +102 -0
  34. perplexity/resources/async_/chat/completions.py +359 -0
  35. perplexity/resources/chat/__init__.py +33 -0
  36. perplexity/resources/chat/chat.py +102 -0
  37. perplexity/resources/chat/completions.py +900 -0
  38. perplexity/resources/search.py +228 -0
  39. perplexity/types/__init__.py +14 -0
  40. perplexity/types/async_/__init__.py +3 -0
  41. perplexity/types/async_/chat/__init__.py +9 -0
  42. perplexity/types/async_/chat/completion_create_params.py +242 -0
  43. perplexity/types/async_/chat/completion_create_response.py +30 -0
  44. perplexity/types/async_/chat/completion_get_params.py +25 -0
  45. perplexity/types/async_/chat/completion_get_response.py +30 -0
  46. perplexity/types/async_/chat/completion_list_response.py +31 -0
  47. perplexity/types/chat/__init__.py +5 -0
  48. perplexity/types/chat/completion_create_params.py +244 -0
  49. perplexity/types/search_create_params.py +40 -0
  50. perplexity/types/search_create_response.py +27 -0
  51. perplexity/types/shared/__init__.py +7 -0
  52. perplexity/types/shared/api_public_search_result.py +22 -0
  53. perplexity/types/shared/chat_message_input.py +176 -0
  54. perplexity/types/shared/chat_message_output.py +176 -0
  55. perplexity/types/shared/choice.py +19 -0
  56. perplexity/types/shared/usage_info.py +41 -0
  57. perplexity/types/shared_params/__init__.py +4 -0
  58. perplexity/types/shared_params/api_public_search_result.py +22 -0
  59. perplexity/types/shared_params/chat_message_input.py +178 -0
  60. perplexity/types/stream_chunk.py +33 -0
  61. perplexityai-0.22.3.dist-info/METADATA +548 -0
  62. perplexityai-0.22.3.dist-info/RECORD +64 -0
  63. perplexityai-0.22.3.dist-info/WHEEL +4 -0
  64. perplexityai-0.22.3.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,900 @@
1
+ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Dict, Union, Iterable, Optional
6
+ from typing_extensions import Literal, overload
7
+
8
+ import httpx
9
+
10
+ from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
11
+ from ..._utils import required_args, maybe_transform, async_maybe_transform
12
+ from ..._compat import cached_property
13
+ from ..._resource import SyncAPIResource, AsyncAPIResource
14
+ from ..._response import (
15
+ to_raw_response_wrapper,
16
+ to_streamed_response_wrapper,
17
+ async_to_raw_response_wrapper,
18
+ async_to_streamed_response_wrapper,
19
+ )
20
+ from ..._streaming import Stream, AsyncStream
21
+ from ...types.chat import completion_create_params
22
+ from ..._base_client import make_request_options
23
+ from ...types.stream_chunk import StreamChunk
24
+ from ...types.shared_params.chat_message_input import ChatMessageInput
25
+
26
+ __all__ = ["CompletionsResource", "AsyncCompletionsResource"]
27
+
28
+
29
+ class CompletionsResource(SyncAPIResource):
30
+ @cached_property
31
+ def with_raw_response(self) -> CompletionsResourceWithRawResponse:
32
+ """
33
+ This property can be used as a prefix for any HTTP method call to return
34
+ the raw response object instead of the parsed content.
35
+
36
+ For more information, see https://www.github.com/perplexityai/perplexity-py#accessing-raw-response-data-eg-headers
37
+ """
38
+ return CompletionsResourceWithRawResponse(self)
39
+
40
+ @cached_property
41
+ def with_streaming_response(self) -> CompletionsResourceWithStreamingResponse:
42
+ """
43
+ An alternative to `.with_raw_response` that doesn't eagerly read the response body.
44
+
45
+ For more information, see https://www.github.com/perplexityai/perplexity-py#with_streaming_response
46
+ """
47
+ return CompletionsResourceWithStreamingResponse(self)
48
+
49
+ @overload
50
+ def create(
51
+ self,
52
+ *,
53
+ messages: Iterable[ChatMessageInput],
54
+ model: str,
55
+ _debug_pro_search: bool | Omit = omit,
56
+ _force_new_agent: Optional[bool] | Omit = omit,
57
+ _inputs: Optional[Iterable[int]] | Omit = omit,
58
+ _prompt_token_length: Optional[int] | Omit = omit,
59
+ best_of: Optional[int] | Omit = omit,
60
+ country: Optional[str] | Omit = omit,
61
+ cum_logprobs: Optional[bool] | Omit = omit,
62
+ disable_search: Optional[bool] | Omit = omit,
63
+ diverse_first_token: Optional[bool] | Omit = omit,
64
+ enable_search_classifier: Optional[bool] | Omit = omit,
65
+ file_workspace_id: Optional[str] | Omit = omit,
66
+ frequency_penalty: Optional[float] | Omit = omit,
67
+ has_image_url: bool | Omit = omit,
68
+ image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
69
+ image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
70
+ language_preference: Optional[str] | Omit = omit,
71
+ last_updated_after_filter: Optional[str] | Omit = omit,
72
+ last_updated_before_filter: Optional[str] | Omit = omit,
73
+ latitude: Optional[float] | Omit = omit,
74
+ logprobs: Optional[bool] | Omit = omit,
75
+ longitude: Optional[float] | Omit = omit,
76
+ max_tokens: Optional[int] | Omit = omit,
77
+ n: Optional[int] | Omit = omit,
78
+ num_images: int | Omit = omit,
79
+ num_search_results: int | Omit = omit,
80
+ parallel_tool_calls: Optional[bool] | Omit = omit,
81
+ presence_penalty: Optional[float] | Omit = omit,
82
+ ranking_model: Optional[str] | Omit = omit,
83
+ reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
84
+ response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
85
+ response_metadata: Optional[Dict[str, object]] | Omit = omit,
86
+ return_images: Optional[bool] | Omit = omit,
87
+ return_related_questions: Optional[bool] | Omit = omit,
88
+ safe_search: Optional[bool] | Omit = omit,
89
+ search_after_date_filter: Optional[str] | Omit = omit,
90
+ search_before_date_filter: Optional[str] | Omit = omit,
91
+ search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
92
+ search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
93
+ search_language_filter: Optional[SequenceNotStr[str]] | Omit = omit,
94
+ search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
95
+ search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
96
+ search_tenant: Optional[str] | Omit = omit,
97
+ stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
98
+ stream: Optional[Literal[False]] | Omit = omit,
99
+ stream_mode: Literal["full", "concise"] | Omit = omit,
100
+ temperature: Optional[float] | Omit = omit,
101
+ thread_id: Optional[str] | Omit = omit,
102
+ tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
103
+ tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
104
+ top_k: Optional[int] | Omit = omit,
105
+ top_logprobs: Optional[int] | Omit = omit,
106
+ top_p: Optional[float] | Omit = omit,
107
+ updated_after_timestamp: Optional[int] | Omit = omit,
108
+ updated_before_timestamp: Optional[int] | Omit = omit,
109
+ use_threads: Optional[bool] | Omit = omit,
110
+ user_original_query: Optional[str] | Omit = omit,
111
+ web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
112
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
113
+ # The extra values given here take precedence over values defined on the client or passed to this method.
114
+ extra_headers: Headers | None = None,
115
+ extra_query: Query | None = None,
116
+ extra_body: Body | None = None,
117
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
118
+ ) -> StreamChunk:
119
+ """
120
+ Generate a chat completion response for the given conversation.
121
+
122
+ Args:
123
+ extra_headers: Send extra headers
124
+
125
+ extra_query: Add additional query parameters to the request
126
+
127
+ extra_body: Add additional JSON properties to the request
128
+
129
+ timeout: Override the client-level default timeout for this request, in seconds
130
+ """
131
+ ...
132
+
133
+ @overload
134
+ def create(
135
+ self,
136
+ *,
137
+ messages: Iterable[ChatMessageInput],
138
+ model: str,
139
+ stream: Literal[True],
140
+ _debug_pro_search: bool | Omit = omit,
141
+ _force_new_agent: Optional[bool] | Omit = omit,
142
+ _inputs: Optional[Iterable[int]] | Omit = omit,
143
+ _prompt_token_length: Optional[int] | Omit = omit,
144
+ best_of: Optional[int] | Omit = omit,
145
+ country: Optional[str] | Omit = omit,
146
+ cum_logprobs: Optional[bool] | Omit = omit,
147
+ disable_search: Optional[bool] | Omit = omit,
148
+ diverse_first_token: Optional[bool] | Omit = omit,
149
+ enable_search_classifier: Optional[bool] | Omit = omit,
150
+ file_workspace_id: Optional[str] | Omit = omit,
151
+ frequency_penalty: Optional[float] | Omit = omit,
152
+ has_image_url: bool | Omit = omit,
153
+ image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
154
+ image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
155
+ language_preference: Optional[str] | Omit = omit,
156
+ last_updated_after_filter: Optional[str] | Omit = omit,
157
+ last_updated_before_filter: Optional[str] | Omit = omit,
158
+ latitude: Optional[float] | Omit = omit,
159
+ logprobs: Optional[bool] | Omit = omit,
160
+ longitude: Optional[float] | Omit = omit,
161
+ max_tokens: Optional[int] | Omit = omit,
162
+ n: Optional[int] | Omit = omit,
163
+ num_images: int | Omit = omit,
164
+ num_search_results: int | Omit = omit,
165
+ parallel_tool_calls: Optional[bool] | Omit = omit,
166
+ presence_penalty: Optional[float] | Omit = omit,
167
+ ranking_model: Optional[str] | Omit = omit,
168
+ reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
169
+ response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
170
+ response_metadata: Optional[Dict[str, object]] | Omit = omit,
171
+ return_images: Optional[bool] | Omit = omit,
172
+ return_related_questions: Optional[bool] | Omit = omit,
173
+ safe_search: Optional[bool] | Omit = omit,
174
+ search_after_date_filter: Optional[str] | Omit = omit,
175
+ search_before_date_filter: Optional[str] | Omit = omit,
176
+ search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
177
+ search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
178
+ search_language_filter: Optional[SequenceNotStr[str]] | Omit = omit,
179
+ search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
180
+ search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
181
+ search_tenant: Optional[str] | Omit = omit,
182
+ stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
183
+ stream_mode: Literal["full", "concise"] | Omit = omit,
184
+ temperature: Optional[float] | Omit = omit,
185
+ thread_id: Optional[str] | Omit = omit,
186
+ tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
187
+ tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
188
+ top_k: Optional[int] | Omit = omit,
189
+ top_logprobs: Optional[int] | Omit = omit,
190
+ top_p: Optional[float] | Omit = omit,
191
+ updated_after_timestamp: Optional[int] | Omit = omit,
192
+ updated_before_timestamp: Optional[int] | Omit = omit,
193
+ use_threads: Optional[bool] | Omit = omit,
194
+ user_original_query: Optional[str] | Omit = omit,
195
+ web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
196
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
197
+ # The extra values given here take precedence over values defined on the client or passed to this method.
198
+ extra_headers: Headers | None = None,
199
+ extra_query: Query | None = None,
200
+ extra_body: Body | None = None,
201
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
202
+ ) -> Stream[StreamChunk]:
203
+ """
204
+ Generate a chat completion response for the given conversation.
205
+
206
+ Args:
207
+ extra_headers: Send extra headers
208
+
209
+ extra_query: Add additional query parameters to the request
210
+
211
+ extra_body: Add additional JSON properties to the request
212
+
213
+ timeout: Override the client-level default timeout for this request, in seconds
214
+ """
215
+ ...
216
+
217
+ @overload
218
+ def create(
219
+ self,
220
+ *,
221
+ messages: Iterable[ChatMessageInput],
222
+ model: str,
223
+ stream: bool,
224
+ _debug_pro_search: bool | Omit = omit,
225
+ _force_new_agent: Optional[bool] | Omit = omit,
226
+ _inputs: Optional[Iterable[int]] | Omit = omit,
227
+ _prompt_token_length: Optional[int] | Omit = omit,
228
+ best_of: Optional[int] | Omit = omit,
229
+ country: Optional[str] | Omit = omit,
230
+ cum_logprobs: Optional[bool] | Omit = omit,
231
+ disable_search: Optional[bool] | Omit = omit,
232
+ diverse_first_token: Optional[bool] | Omit = omit,
233
+ enable_search_classifier: Optional[bool] | Omit = omit,
234
+ file_workspace_id: Optional[str] | Omit = omit,
235
+ frequency_penalty: Optional[float] | Omit = omit,
236
+ has_image_url: bool | Omit = omit,
237
+ image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
238
+ image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
239
+ language_preference: Optional[str] | Omit = omit,
240
+ last_updated_after_filter: Optional[str] | Omit = omit,
241
+ last_updated_before_filter: Optional[str] | Omit = omit,
242
+ latitude: Optional[float] | Omit = omit,
243
+ logprobs: Optional[bool] | Omit = omit,
244
+ longitude: Optional[float] | Omit = omit,
245
+ max_tokens: Optional[int] | Omit = omit,
246
+ n: Optional[int] | Omit = omit,
247
+ num_images: int | Omit = omit,
248
+ num_search_results: int | Omit = omit,
249
+ parallel_tool_calls: Optional[bool] | Omit = omit,
250
+ presence_penalty: Optional[float] | Omit = omit,
251
+ ranking_model: Optional[str] | Omit = omit,
252
+ reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
253
+ response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
254
+ response_metadata: Optional[Dict[str, object]] | Omit = omit,
255
+ return_images: Optional[bool] | Omit = omit,
256
+ return_related_questions: Optional[bool] | Omit = omit,
257
+ safe_search: Optional[bool] | Omit = omit,
258
+ search_after_date_filter: Optional[str] | Omit = omit,
259
+ search_before_date_filter: Optional[str] | Omit = omit,
260
+ search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
261
+ search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
262
+ search_language_filter: Optional[SequenceNotStr[str]] | Omit = omit,
263
+ search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
264
+ search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
265
+ search_tenant: Optional[str] | Omit = omit,
266
+ stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
267
+ stream_mode: Literal["full", "concise"] | Omit = omit,
268
+ temperature: Optional[float] | Omit = omit,
269
+ thread_id: Optional[str] | Omit = omit,
270
+ tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
271
+ tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
272
+ top_k: Optional[int] | Omit = omit,
273
+ top_logprobs: Optional[int] | Omit = omit,
274
+ top_p: Optional[float] | Omit = omit,
275
+ updated_after_timestamp: Optional[int] | Omit = omit,
276
+ updated_before_timestamp: Optional[int] | Omit = omit,
277
+ use_threads: Optional[bool] | Omit = omit,
278
+ user_original_query: Optional[str] | Omit = omit,
279
+ web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
280
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
281
+ # The extra values given here take precedence over values defined on the client or passed to this method.
282
+ extra_headers: Headers | None = None,
283
+ extra_query: Query | None = None,
284
+ extra_body: Body | None = None,
285
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
286
+ ) -> StreamChunk | Stream[StreamChunk]:
287
+ """
288
+ Generate a chat completion response for the given conversation.
289
+
290
+ Args:
291
+ extra_headers: Send extra headers
292
+
293
+ extra_query: Add additional query parameters to the request
294
+
295
+ extra_body: Add additional JSON properties to the request
296
+
297
+ timeout: Override the client-level default timeout for this request, in seconds
298
+ """
299
+ ...
300
+
301
+ @required_args(["messages", "model"], ["messages", "model", "stream"])
302
+ def create(
303
+ self,
304
+ *,
305
+ messages: Iterable[ChatMessageInput],
306
+ model: str,
307
+ _debug_pro_search: bool | Omit = omit,
308
+ _force_new_agent: Optional[bool] | Omit = omit,
309
+ _inputs: Optional[Iterable[int]] | Omit = omit,
310
+ _prompt_token_length: Optional[int] | Omit = omit,
311
+ best_of: Optional[int] | Omit = omit,
312
+ country: Optional[str] | Omit = omit,
313
+ cum_logprobs: Optional[bool] | Omit = omit,
314
+ disable_search: Optional[bool] | Omit = omit,
315
+ diverse_first_token: Optional[bool] | Omit = omit,
316
+ enable_search_classifier: Optional[bool] | Omit = omit,
317
+ file_workspace_id: Optional[str] | Omit = omit,
318
+ frequency_penalty: Optional[float] | Omit = omit,
319
+ has_image_url: bool | Omit = omit,
320
+ image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
321
+ image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
322
+ language_preference: Optional[str] | Omit = omit,
323
+ last_updated_after_filter: Optional[str] | Omit = omit,
324
+ last_updated_before_filter: Optional[str] | Omit = omit,
325
+ latitude: Optional[float] | Omit = omit,
326
+ logprobs: Optional[bool] | Omit = omit,
327
+ longitude: Optional[float] | Omit = omit,
328
+ max_tokens: Optional[int] | Omit = omit,
329
+ n: Optional[int] | Omit = omit,
330
+ num_images: int | Omit = omit,
331
+ num_search_results: int | Omit = omit,
332
+ parallel_tool_calls: Optional[bool] | Omit = omit,
333
+ presence_penalty: Optional[float] | Omit = omit,
334
+ ranking_model: Optional[str] | Omit = omit,
335
+ reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
336
+ response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
337
+ response_metadata: Optional[Dict[str, object]] | Omit = omit,
338
+ return_images: Optional[bool] | Omit = omit,
339
+ return_related_questions: Optional[bool] | Omit = omit,
340
+ safe_search: Optional[bool] | Omit = omit,
341
+ search_after_date_filter: Optional[str] | Omit = omit,
342
+ search_before_date_filter: Optional[str] | Omit = omit,
343
+ search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
344
+ search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
345
+ search_language_filter: Optional[SequenceNotStr[str]] | Omit = omit,
346
+ search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
347
+ search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
348
+ search_tenant: Optional[str] | Omit = omit,
349
+ stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
350
+ stream: Optional[Literal[False]] | Literal[True] | Omit = omit,
351
+ stream_mode: Literal["full", "concise"] | Omit = omit,
352
+ temperature: Optional[float] | Omit = omit,
353
+ thread_id: Optional[str] | Omit = omit,
354
+ tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
355
+ tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
356
+ top_k: Optional[int] | Omit = omit,
357
+ top_logprobs: Optional[int] | Omit = omit,
358
+ top_p: Optional[float] | Omit = omit,
359
+ updated_after_timestamp: Optional[int] | Omit = omit,
360
+ updated_before_timestamp: Optional[int] | Omit = omit,
361
+ use_threads: Optional[bool] | Omit = omit,
362
+ user_original_query: Optional[str] | Omit = omit,
363
+ web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
364
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
365
+ # The extra values given here take precedence over values defined on the client or passed to this method.
366
+ extra_headers: Headers | None = None,
367
+ extra_query: Query | None = None,
368
+ extra_body: Body | None = None,
369
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
370
+ ) -> StreamChunk | Stream[StreamChunk]:
371
+ return self._post(
372
+ "/chat/completions",
373
+ body=maybe_transform(
374
+ {
375
+ "messages": messages,
376
+ "model": model,
377
+ "_debug_pro_search": _debug_pro_search,
378
+ "_force_new_agent": _force_new_agent,
379
+ "_inputs": _inputs,
380
+ "_prompt_token_length": _prompt_token_length,
381
+ "best_of": best_of,
382
+ "country": country,
383
+ "cum_logprobs": cum_logprobs,
384
+ "disable_search": disable_search,
385
+ "diverse_first_token": diverse_first_token,
386
+ "enable_search_classifier": enable_search_classifier,
387
+ "file_workspace_id": file_workspace_id,
388
+ "frequency_penalty": frequency_penalty,
389
+ "has_image_url": has_image_url,
390
+ "image_domain_filter": image_domain_filter,
391
+ "image_format_filter": image_format_filter,
392
+ "language_preference": language_preference,
393
+ "last_updated_after_filter": last_updated_after_filter,
394
+ "last_updated_before_filter": last_updated_before_filter,
395
+ "latitude": latitude,
396
+ "logprobs": logprobs,
397
+ "longitude": longitude,
398
+ "max_tokens": max_tokens,
399
+ "n": n,
400
+ "num_images": num_images,
401
+ "num_search_results": num_search_results,
402
+ "parallel_tool_calls": parallel_tool_calls,
403
+ "presence_penalty": presence_penalty,
404
+ "ranking_model": ranking_model,
405
+ "reasoning_effort": reasoning_effort,
406
+ "response_format": response_format,
407
+ "response_metadata": response_metadata,
408
+ "return_images": return_images,
409
+ "return_related_questions": return_related_questions,
410
+ "safe_search": safe_search,
411
+ "search_after_date_filter": search_after_date_filter,
412
+ "search_before_date_filter": search_before_date_filter,
413
+ "search_domain_filter": search_domain_filter,
414
+ "search_internal_properties": search_internal_properties,
415
+ "search_language_filter": search_language_filter,
416
+ "search_mode": search_mode,
417
+ "search_recency_filter": search_recency_filter,
418
+ "search_tenant": search_tenant,
419
+ "stop": stop,
420
+ "stream": stream,
421
+ "stream_mode": stream_mode,
422
+ "temperature": temperature,
423
+ "thread_id": thread_id,
424
+ "tool_choice": tool_choice,
425
+ "tools": tools,
426
+ "top_k": top_k,
427
+ "top_logprobs": top_logprobs,
428
+ "top_p": top_p,
429
+ "updated_after_timestamp": updated_after_timestamp,
430
+ "updated_before_timestamp": updated_before_timestamp,
431
+ "use_threads": use_threads,
432
+ "user_original_query": user_original_query,
433
+ "web_search_options": web_search_options,
434
+ },
435
+ completion_create_params.CompletionCreateParamsStreaming
436
+ if stream
437
+ else completion_create_params.CompletionCreateParamsNonStreaming,
438
+ ),
439
+ options=make_request_options(
440
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
441
+ ),
442
+ cast_to=StreamChunk,
443
+ stream=stream or False,
444
+ stream_cls=Stream[StreamChunk],
445
+ )
446
+
447
+
448
+ class AsyncCompletionsResource(AsyncAPIResource):
449
+ @cached_property
450
+ def with_raw_response(self) -> AsyncCompletionsResourceWithRawResponse:
451
+ """
452
+ This property can be used as a prefix for any HTTP method call to return
453
+ the raw response object instead of the parsed content.
454
+
455
+ For more information, see https://www.github.com/perplexityai/perplexity-py#accessing-raw-response-data-eg-headers
456
+ """
457
+ return AsyncCompletionsResourceWithRawResponse(self)
458
+
459
+ @cached_property
460
+ def with_streaming_response(self) -> AsyncCompletionsResourceWithStreamingResponse:
461
+ """
462
+ An alternative to `.with_raw_response` that doesn't eagerly read the response body.
463
+
464
+ For more information, see https://www.github.com/perplexityai/perplexity-py#with_streaming_response
465
+ """
466
+ return AsyncCompletionsResourceWithStreamingResponse(self)
467
+
468
+ @overload
469
+ async def create(
470
+ self,
471
+ *,
472
+ messages: Iterable[ChatMessageInput],
473
+ model: str,
474
+ _debug_pro_search: bool | Omit = omit,
475
+ _force_new_agent: Optional[bool] | Omit = omit,
476
+ _inputs: Optional[Iterable[int]] | Omit = omit,
477
+ _prompt_token_length: Optional[int] | Omit = omit,
478
+ best_of: Optional[int] | Omit = omit,
479
+ country: Optional[str] | Omit = omit,
480
+ cum_logprobs: Optional[bool] | Omit = omit,
481
+ disable_search: Optional[bool] | Omit = omit,
482
+ diverse_first_token: Optional[bool] | Omit = omit,
483
+ enable_search_classifier: Optional[bool] | Omit = omit,
484
+ file_workspace_id: Optional[str] | Omit = omit,
485
+ frequency_penalty: Optional[float] | Omit = omit,
486
+ has_image_url: bool | Omit = omit,
487
+ image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
488
+ image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
489
+ language_preference: Optional[str] | Omit = omit,
490
+ last_updated_after_filter: Optional[str] | Omit = omit,
491
+ last_updated_before_filter: Optional[str] | Omit = omit,
492
+ latitude: Optional[float] | Omit = omit,
493
+ logprobs: Optional[bool] | Omit = omit,
494
+ longitude: Optional[float] | Omit = omit,
495
+ max_tokens: Optional[int] | Omit = omit,
496
+ n: Optional[int] | Omit = omit,
497
+ num_images: int | Omit = omit,
498
+ num_search_results: int | Omit = omit,
499
+ parallel_tool_calls: Optional[bool] | Omit = omit,
500
+ presence_penalty: Optional[float] | Omit = omit,
501
+ ranking_model: Optional[str] | Omit = omit,
502
+ reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
503
+ response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
504
+ response_metadata: Optional[Dict[str, object]] | Omit = omit,
505
+ return_images: Optional[bool] | Omit = omit,
506
+ return_related_questions: Optional[bool] | Omit = omit,
507
+ safe_search: Optional[bool] | Omit = omit,
508
+ search_after_date_filter: Optional[str] | Omit = omit,
509
+ search_before_date_filter: Optional[str] | Omit = omit,
510
+ search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
511
+ search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
512
+ search_language_filter: Optional[SequenceNotStr[str]] | Omit = omit,
513
+ search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
514
+ search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
515
+ search_tenant: Optional[str] | Omit = omit,
516
+ stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
517
+ stream: Optional[Literal[False]] | Omit = omit,
518
+ stream_mode: Literal["full", "concise"] | Omit = omit,
519
+ temperature: Optional[float] | Omit = omit,
520
+ thread_id: Optional[str] | Omit = omit,
521
+ tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
522
+ tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
523
+ top_k: Optional[int] | Omit = omit,
524
+ top_logprobs: Optional[int] | Omit = omit,
525
+ top_p: Optional[float] | Omit = omit,
526
+ updated_after_timestamp: Optional[int] | Omit = omit,
527
+ updated_before_timestamp: Optional[int] | Omit = omit,
528
+ use_threads: Optional[bool] | Omit = omit,
529
+ user_original_query: Optional[str] | Omit = omit,
530
+ web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
531
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
532
+ # The extra values given here take precedence over values defined on the client or passed to this method.
533
+ extra_headers: Headers | None = None,
534
+ extra_query: Query | None = None,
535
+ extra_body: Body | None = None,
536
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
537
+ ) -> StreamChunk:
538
+ """
539
+ Generate a chat completion response for the given conversation.
540
+
541
+ Args:
542
+ extra_headers: Send extra headers
543
+
544
+ extra_query: Add additional query parameters to the request
545
+
546
+ extra_body: Add additional JSON properties to the request
547
+
548
+ timeout: Override the client-level default timeout for this request, in seconds
549
+ """
550
+ ...
551
+
552
+ @overload
553
+ async def create(
554
+ self,
555
+ *,
556
+ messages: Iterable[ChatMessageInput],
557
+ model: str,
558
+ stream: Literal[True],
559
+ _debug_pro_search: bool | Omit = omit,
560
+ _force_new_agent: Optional[bool] | Omit = omit,
561
+ _inputs: Optional[Iterable[int]] | Omit = omit,
562
+ _prompt_token_length: Optional[int] | Omit = omit,
563
+ best_of: Optional[int] | Omit = omit,
564
+ country: Optional[str] | Omit = omit,
565
+ cum_logprobs: Optional[bool] | Omit = omit,
566
+ disable_search: Optional[bool] | Omit = omit,
567
+ diverse_first_token: Optional[bool] | Omit = omit,
568
+ enable_search_classifier: Optional[bool] | Omit = omit,
569
+ file_workspace_id: Optional[str] | Omit = omit,
570
+ frequency_penalty: Optional[float] | Omit = omit,
571
+ has_image_url: bool | Omit = omit,
572
+ image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
573
+ image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
574
+ language_preference: Optional[str] | Omit = omit,
575
+ last_updated_after_filter: Optional[str] | Omit = omit,
576
+ last_updated_before_filter: Optional[str] | Omit = omit,
577
+ latitude: Optional[float] | Omit = omit,
578
+ logprobs: Optional[bool] | Omit = omit,
579
+ longitude: Optional[float] | Omit = omit,
580
+ max_tokens: Optional[int] | Omit = omit,
581
+ n: Optional[int] | Omit = omit,
582
+ num_images: int | Omit = omit,
583
+ num_search_results: int | Omit = omit,
584
+ parallel_tool_calls: Optional[bool] | Omit = omit,
585
+ presence_penalty: Optional[float] | Omit = omit,
586
+ ranking_model: Optional[str] | Omit = omit,
587
+ reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
588
+ response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
589
+ response_metadata: Optional[Dict[str, object]] | Omit = omit,
590
+ return_images: Optional[bool] | Omit = omit,
591
+ return_related_questions: Optional[bool] | Omit = omit,
592
+ safe_search: Optional[bool] | Omit = omit,
593
+ search_after_date_filter: Optional[str] | Omit = omit,
594
+ search_before_date_filter: Optional[str] | Omit = omit,
595
+ search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
596
+ search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
597
+ search_language_filter: Optional[SequenceNotStr[str]] | Omit = omit,
598
+ search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
599
+ search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
600
+ search_tenant: Optional[str] | Omit = omit,
601
+ stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
602
+ stream_mode: Literal["full", "concise"] | Omit = omit,
603
+ temperature: Optional[float] | Omit = omit,
604
+ thread_id: Optional[str] | Omit = omit,
605
+ tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
606
+ tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
607
+ top_k: Optional[int] | Omit = omit,
608
+ top_logprobs: Optional[int] | Omit = omit,
609
+ top_p: Optional[float] | Omit = omit,
610
+ updated_after_timestamp: Optional[int] | Omit = omit,
611
+ updated_before_timestamp: Optional[int] | Omit = omit,
612
+ use_threads: Optional[bool] | Omit = omit,
613
+ user_original_query: Optional[str] | Omit = omit,
614
+ web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
615
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
616
+ # The extra values given here take precedence over values defined on the client or passed to this method.
617
+ extra_headers: Headers | None = None,
618
+ extra_query: Query | None = None,
619
+ extra_body: Body | None = None,
620
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
621
+ ) -> AsyncStream[StreamChunk]:
622
+ """
623
+ Generate a chat completion response for the given conversation.
624
+
625
+ Args:
626
+ extra_headers: Send extra headers
627
+
628
+ extra_query: Add additional query parameters to the request
629
+
630
+ extra_body: Add additional JSON properties to the request
631
+
632
+ timeout: Override the client-level default timeout for this request, in seconds
633
+ """
634
+ ...
635
+
636
+ @overload
637
+ async def create(
638
+ self,
639
+ *,
640
+ messages: Iterable[ChatMessageInput],
641
+ model: str,
642
+ stream: bool,
643
+ _debug_pro_search: bool | Omit = omit,
644
+ _force_new_agent: Optional[bool] | Omit = omit,
645
+ _inputs: Optional[Iterable[int]] | Omit = omit,
646
+ _prompt_token_length: Optional[int] | Omit = omit,
647
+ best_of: Optional[int] | Omit = omit,
648
+ country: Optional[str] | Omit = omit,
649
+ cum_logprobs: Optional[bool] | Omit = omit,
650
+ disable_search: Optional[bool] | Omit = omit,
651
+ diverse_first_token: Optional[bool] | Omit = omit,
652
+ enable_search_classifier: Optional[bool] | Omit = omit,
653
+ file_workspace_id: Optional[str] | Omit = omit,
654
+ frequency_penalty: Optional[float] | Omit = omit,
655
+ has_image_url: bool | Omit = omit,
656
+ image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
657
+ image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
658
+ language_preference: Optional[str] | Omit = omit,
659
+ last_updated_after_filter: Optional[str] | Omit = omit,
660
+ last_updated_before_filter: Optional[str] | Omit = omit,
661
+ latitude: Optional[float] | Omit = omit,
662
+ logprobs: Optional[bool] | Omit = omit,
663
+ longitude: Optional[float] | Omit = omit,
664
+ max_tokens: Optional[int] | Omit = omit,
665
+ n: Optional[int] | Omit = omit,
666
+ num_images: int | Omit = omit,
667
+ num_search_results: int | Omit = omit,
668
+ parallel_tool_calls: Optional[bool] | Omit = omit,
669
+ presence_penalty: Optional[float] | Omit = omit,
670
+ ranking_model: Optional[str] | Omit = omit,
671
+ reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
672
+ response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
673
+ response_metadata: Optional[Dict[str, object]] | Omit = omit,
674
+ return_images: Optional[bool] | Omit = omit,
675
+ return_related_questions: Optional[bool] | Omit = omit,
676
+ safe_search: Optional[bool] | Omit = omit,
677
+ search_after_date_filter: Optional[str] | Omit = omit,
678
+ search_before_date_filter: Optional[str] | Omit = omit,
679
+ search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
680
+ search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
681
+ search_language_filter: Optional[SequenceNotStr[str]] | Omit = omit,
682
+ search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
683
+ search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
684
+ search_tenant: Optional[str] | Omit = omit,
685
+ stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
686
+ stream_mode: Literal["full", "concise"] | Omit = omit,
687
+ temperature: Optional[float] | Omit = omit,
688
+ thread_id: Optional[str] | Omit = omit,
689
+ tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
690
+ tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
691
+ top_k: Optional[int] | Omit = omit,
692
+ top_logprobs: Optional[int] | Omit = omit,
693
+ top_p: Optional[float] | Omit = omit,
694
+ updated_after_timestamp: Optional[int] | Omit = omit,
695
+ updated_before_timestamp: Optional[int] | Omit = omit,
696
+ use_threads: Optional[bool] | Omit = omit,
697
+ user_original_query: Optional[str] | Omit = omit,
698
+ web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
699
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
700
+ # The extra values given here take precedence over values defined on the client or passed to this method.
701
+ extra_headers: Headers | None = None,
702
+ extra_query: Query | None = None,
703
+ extra_body: Body | None = None,
704
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
705
+ ) -> StreamChunk | AsyncStream[StreamChunk]:
706
+ """
707
+ Generate a chat completion response for the given conversation.
708
+
709
+ Args:
710
+ extra_headers: Send extra headers
711
+
712
+ extra_query: Add additional query parameters to the request
713
+
714
+ extra_body: Add additional JSON properties to the request
715
+
716
+ timeout: Override the client-level default timeout for this request, in seconds
717
+ """
718
+ ...
719
+
720
+ @required_args(["messages", "model"], ["messages", "model", "stream"])
721
+ async def create(
722
+ self,
723
+ *,
724
+ messages: Iterable[ChatMessageInput],
725
+ model: str,
726
+ _debug_pro_search: bool | Omit = omit,
727
+ _force_new_agent: Optional[bool] | Omit = omit,
728
+ _inputs: Optional[Iterable[int]] | Omit = omit,
729
+ _prompt_token_length: Optional[int] | Omit = omit,
730
+ best_of: Optional[int] | Omit = omit,
731
+ country: Optional[str] | Omit = omit,
732
+ cum_logprobs: Optional[bool] | Omit = omit,
733
+ disable_search: Optional[bool] | Omit = omit,
734
+ diverse_first_token: Optional[bool] | Omit = omit,
735
+ enable_search_classifier: Optional[bool] | Omit = omit,
736
+ file_workspace_id: Optional[str] | Omit = omit,
737
+ frequency_penalty: Optional[float] | Omit = omit,
738
+ has_image_url: bool | Omit = omit,
739
+ image_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
740
+ image_format_filter: Optional[SequenceNotStr[str]] | Omit = omit,
741
+ language_preference: Optional[str] | Omit = omit,
742
+ last_updated_after_filter: Optional[str] | Omit = omit,
743
+ last_updated_before_filter: Optional[str] | Omit = omit,
744
+ latitude: Optional[float] | Omit = omit,
745
+ logprobs: Optional[bool] | Omit = omit,
746
+ longitude: Optional[float] | Omit = omit,
747
+ max_tokens: Optional[int] | Omit = omit,
748
+ n: Optional[int] | Omit = omit,
749
+ num_images: int | Omit = omit,
750
+ num_search_results: int | Omit = omit,
751
+ parallel_tool_calls: Optional[bool] | Omit = omit,
752
+ presence_penalty: Optional[float] | Omit = omit,
753
+ ranking_model: Optional[str] | Omit = omit,
754
+ reasoning_effort: Optional[Literal["minimal", "low", "medium", "high"]] | Omit = omit,
755
+ response_format: Optional[completion_create_params.ResponseFormat] | Omit = omit,
756
+ response_metadata: Optional[Dict[str, object]] | Omit = omit,
757
+ return_images: Optional[bool] | Omit = omit,
758
+ return_related_questions: Optional[bool] | Omit = omit,
759
+ safe_search: Optional[bool] | Omit = omit,
760
+ search_after_date_filter: Optional[str] | Omit = omit,
761
+ search_before_date_filter: Optional[str] | Omit = omit,
762
+ search_domain_filter: Optional[SequenceNotStr[str]] | Omit = omit,
763
+ search_internal_properties: Optional[Dict[str, object]] | Omit = omit,
764
+ search_language_filter: Optional[SequenceNotStr[str]] | Omit = omit,
765
+ search_mode: Optional[Literal["web", "academic", "sec"]] | Omit = omit,
766
+ search_recency_filter: Optional[Literal["hour", "day", "week", "month", "year"]] | Omit = omit,
767
+ search_tenant: Optional[str] | Omit = omit,
768
+ stop: Union[str, SequenceNotStr[str], None] | Omit = omit,
769
+ stream: Optional[Literal[False]] | Literal[True] | Omit = omit,
770
+ stream_mode: Literal["full", "concise"] | Omit = omit,
771
+ temperature: Optional[float] | Omit = omit,
772
+ thread_id: Optional[str] | Omit = omit,
773
+ tool_choice: Optional[Literal["none", "auto", "required"]] | Omit = omit,
774
+ tools: Optional[Iterable[completion_create_params.Tool]] | Omit = omit,
775
+ top_k: Optional[int] | Omit = omit,
776
+ top_logprobs: Optional[int] | Omit = omit,
777
+ top_p: Optional[float] | Omit = omit,
778
+ updated_after_timestamp: Optional[int] | Omit = omit,
779
+ updated_before_timestamp: Optional[int] | Omit = omit,
780
+ use_threads: Optional[bool] | Omit = omit,
781
+ user_original_query: Optional[str] | Omit = omit,
782
+ web_search_options: completion_create_params.WebSearchOptions | Omit = omit,
783
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
784
+ # The extra values given here take precedence over values defined on the client or passed to this method.
785
+ extra_headers: Headers | None = None,
786
+ extra_query: Query | None = None,
787
+ extra_body: Body | None = None,
788
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
789
+ ) -> StreamChunk | AsyncStream[StreamChunk]:
790
+ return await self._post(
791
+ "/chat/completions",
792
+ body=await async_maybe_transform(
793
+ {
794
+ "messages": messages,
795
+ "model": model,
796
+ "_debug_pro_search": _debug_pro_search,
797
+ "_force_new_agent": _force_new_agent,
798
+ "_inputs": _inputs,
799
+ "_prompt_token_length": _prompt_token_length,
800
+ "best_of": best_of,
801
+ "country": country,
802
+ "cum_logprobs": cum_logprobs,
803
+ "disable_search": disable_search,
804
+ "diverse_first_token": diverse_first_token,
805
+ "enable_search_classifier": enable_search_classifier,
806
+ "file_workspace_id": file_workspace_id,
807
+ "frequency_penalty": frequency_penalty,
808
+ "has_image_url": has_image_url,
809
+ "image_domain_filter": image_domain_filter,
810
+ "image_format_filter": image_format_filter,
811
+ "language_preference": language_preference,
812
+ "last_updated_after_filter": last_updated_after_filter,
813
+ "last_updated_before_filter": last_updated_before_filter,
814
+ "latitude": latitude,
815
+ "logprobs": logprobs,
816
+ "longitude": longitude,
817
+ "max_tokens": max_tokens,
818
+ "n": n,
819
+ "num_images": num_images,
820
+ "num_search_results": num_search_results,
821
+ "parallel_tool_calls": parallel_tool_calls,
822
+ "presence_penalty": presence_penalty,
823
+ "ranking_model": ranking_model,
824
+ "reasoning_effort": reasoning_effort,
825
+ "response_format": response_format,
826
+ "response_metadata": response_metadata,
827
+ "return_images": return_images,
828
+ "return_related_questions": return_related_questions,
829
+ "safe_search": safe_search,
830
+ "search_after_date_filter": search_after_date_filter,
831
+ "search_before_date_filter": search_before_date_filter,
832
+ "search_domain_filter": search_domain_filter,
833
+ "search_internal_properties": search_internal_properties,
834
+ "search_language_filter": search_language_filter,
835
+ "search_mode": search_mode,
836
+ "search_recency_filter": search_recency_filter,
837
+ "search_tenant": search_tenant,
838
+ "stop": stop,
839
+ "stream": stream,
840
+ "stream_mode": stream_mode,
841
+ "temperature": temperature,
842
+ "thread_id": thread_id,
843
+ "tool_choice": tool_choice,
844
+ "tools": tools,
845
+ "top_k": top_k,
846
+ "top_logprobs": top_logprobs,
847
+ "top_p": top_p,
848
+ "updated_after_timestamp": updated_after_timestamp,
849
+ "updated_before_timestamp": updated_before_timestamp,
850
+ "use_threads": use_threads,
851
+ "user_original_query": user_original_query,
852
+ "web_search_options": web_search_options,
853
+ },
854
+ completion_create_params.CompletionCreateParamsStreaming
855
+ if stream
856
+ else completion_create_params.CompletionCreateParamsNonStreaming,
857
+ ),
858
+ options=make_request_options(
859
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
860
+ ),
861
+ cast_to=StreamChunk,
862
+ stream=stream or False,
863
+ stream_cls=AsyncStream[StreamChunk],
864
+ )
865
+
866
+
867
+ class CompletionsResourceWithRawResponse:
868
+ def __init__(self, completions: CompletionsResource) -> None:
869
+ self._completions = completions
870
+
871
+ self.create = to_raw_response_wrapper(
872
+ completions.create,
873
+ )
874
+
875
+
876
+ class AsyncCompletionsResourceWithRawResponse:
877
+ def __init__(self, completions: AsyncCompletionsResource) -> None:
878
+ self._completions = completions
879
+
880
+ self.create = async_to_raw_response_wrapper(
881
+ completions.create,
882
+ )
883
+
884
+
885
+ class CompletionsResourceWithStreamingResponse:
886
+ def __init__(self, completions: CompletionsResource) -> None:
887
+ self._completions = completions
888
+
889
+ self.create = to_streamed_response_wrapper(
890
+ completions.create,
891
+ )
892
+
893
+
894
+ class AsyncCompletionsResourceWithStreamingResponse:
895
+ def __init__(self, completions: AsyncCompletionsResource) -> None:
896
+ self._completions = completions
897
+
898
+ self.create = async_to_streamed_response_wrapper(
899
+ completions.create,
900
+ )