sarvamai 0.1.22a4__py3-none-any.whl → 0.1.23a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sarvamai/__init__.py +405 -200
- sarvamai/chat/raw_client.py +20 -20
- sarvamai/client.py +186 -34
- sarvamai/core/__init__.py +76 -21
- sarvamai/core/client_wrapper.py +19 -3
- sarvamai/core/force_multipart.py +4 -2
- sarvamai/core/http_client.py +217 -97
- sarvamai/core/http_response.py +1 -1
- sarvamai/core/http_sse/__init__.py +42 -0
- sarvamai/core/http_sse/_api.py +112 -0
- sarvamai/core/http_sse/_decoders.py +61 -0
- sarvamai/core/http_sse/_exceptions.py +7 -0
- sarvamai/core/http_sse/_models.py +17 -0
- sarvamai/core/jsonable_encoder.py +8 -0
- sarvamai/core/pydantic_utilities.py +110 -4
- sarvamai/errors/__init__.py +40 -6
- sarvamai/errors/bad_request_error.py +1 -1
- sarvamai/errors/forbidden_error.py +1 -1
- sarvamai/errors/internal_server_error.py +1 -1
- sarvamai/errors/service_unavailable_error.py +1 -1
- sarvamai/errors/too_many_requests_error.py +1 -1
- sarvamai/errors/unprocessable_entity_error.py +1 -1
- sarvamai/requests/__init__.py +150 -62
- sarvamai/requests/audio_data.py +0 -6
- sarvamai/requests/error_response_data.py +1 -1
- sarvamai/requests/file_signed_url_details.py +1 -1
- sarvamai/requests/speech_to_text_transcription_data.py +2 -2
- sarvamai/speech_to_text/raw_client.py +54 -52
- sarvamai/speech_to_text_job/raw_client.py +120 -120
- sarvamai/speech_to_text_streaming/__init__.py +38 -8
- sarvamai/speech_to_text_streaming/client.py +0 -13
- sarvamai/speech_to_text_streaming/raw_client.py +0 -13
- sarvamai/speech_to_text_streaming/types/__init__.py +36 -6
- sarvamai/speech_to_text_translate_job/raw_client.py +120 -120
- sarvamai/speech_to_text_translate_streaming/__init__.py +36 -7
- sarvamai/speech_to_text_translate_streaming/client.py +0 -13
- sarvamai/speech_to_text_translate_streaming/raw_client.py +0 -13
- sarvamai/speech_to_text_translate_streaming/types/__init__.py +36 -5
- sarvamai/text/client.py +0 -12
- sarvamai/text/raw_client.py +60 -72
- sarvamai/text_to_speech/client.py +18 -0
- sarvamai/text_to_speech/raw_client.py +38 -20
- sarvamai/text_to_speech_streaming/__init__.py +28 -1
- sarvamai/text_to_speech_streaming/types/__init__.py +30 -1
- sarvamai/types/__init__.py +222 -100
- sarvamai/types/audio_data.py +0 -6
- sarvamai/types/chat_completion_request_message.py +6 -2
- sarvamai/types/error_response_data.py +1 -1
- sarvamai/types/file_signed_url_details.py +1 -1
- sarvamai/types/speech_to_text_transcription_data.py +2 -2
- {sarvamai-0.1.22a4.dist-info → sarvamai-0.1.23a1.dist-info}/METADATA +2 -1
- {sarvamai-0.1.22a4.dist-info → sarvamai-0.1.23a1.dist-info}/RECORD +53 -51
- sarvamai/speech_to_text_streaming/types/speech_to_text_streaming_input_audio_codec.py +0 -33
- sarvamai/speech_to_text_translate_streaming/types/speech_to_text_translate_streaming_input_audio_codec.py +0 -33
- sarvamai/types/audio_data_input_audio_codec.py +0 -33
- {sarvamai-0.1.22a4.dist-info → sarvamai-0.1.23a1.dist-info}/WHEEL +0 -0
sarvamai/core/http_client.py
CHANGED
|
@@ -5,7 +5,6 @@ import email.utils
|
|
|
5
5
|
import re
|
|
6
6
|
import time
|
|
7
7
|
import typing
|
|
8
|
-
import urllib.parse
|
|
9
8
|
from contextlib import asynccontextmanager, contextmanager
|
|
10
9
|
from random import random
|
|
11
10
|
|
|
@@ -14,13 +13,13 @@ from .file import File, convert_file_dict_to_httpx_tuples
|
|
|
14
13
|
from .force_multipart import FORCE_MULTIPART
|
|
15
14
|
from .jsonable_encoder import jsonable_encoder
|
|
16
15
|
from .query_encoder import encode_query
|
|
17
|
-
from .remove_none_from_dict import remove_none_from_dict
|
|
16
|
+
from .remove_none_from_dict import remove_none_from_dict as remove_none_from_dict
|
|
18
17
|
from .request_options import RequestOptions
|
|
19
18
|
from httpx._types import RequestFiles
|
|
20
19
|
|
|
21
|
-
INITIAL_RETRY_DELAY_SECONDS = 0
|
|
22
|
-
MAX_RETRY_DELAY_SECONDS =
|
|
23
|
-
|
|
20
|
+
INITIAL_RETRY_DELAY_SECONDS = 1.0
|
|
21
|
+
MAX_RETRY_DELAY_SECONDS = 60.0
|
|
22
|
+
JITTER_FACTOR = 0.2 # 20% random jitter
|
|
24
23
|
|
|
25
24
|
|
|
26
25
|
def _parse_retry_after(response_headers: httpx.Headers) -> typing.Optional[float]:
|
|
@@ -64,6 +63,38 @@ def _parse_retry_after(response_headers: httpx.Headers) -> typing.Optional[float
|
|
|
64
63
|
return seconds
|
|
65
64
|
|
|
66
65
|
|
|
66
|
+
def _add_positive_jitter(delay: float) -> float:
|
|
67
|
+
"""Add positive jitter (0-20%) to prevent thundering herd."""
|
|
68
|
+
jitter_multiplier = 1 + random() * JITTER_FACTOR
|
|
69
|
+
return delay * jitter_multiplier
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _add_symmetric_jitter(delay: float) -> float:
|
|
73
|
+
"""Add symmetric jitter (±10%) for exponential backoff."""
|
|
74
|
+
jitter_multiplier = 1 + (random() - 0.5) * JITTER_FACTOR
|
|
75
|
+
return delay * jitter_multiplier
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _parse_x_ratelimit_reset(response_headers: httpx.Headers) -> typing.Optional[float]:
|
|
79
|
+
"""
|
|
80
|
+
Parse the X-RateLimit-Reset header (Unix timestamp in seconds).
|
|
81
|
+
Returns seconds to wait, or None if header is missing/invalid.
|
|
82
|
+
"""
|
|
83
|
+
reset_time_str = response_headers.get("x-ratelimit-reset")
|
|
84
|
+
if reset_time_str is None:
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
reset_time = int(reset_time_str)
|
|
89
|
+
delay = reset_time - time.time()
|
|
90
|
+
if delay > 0:
|
|
91
|
+
return delay
|
|
92
|
+
except (ValueError, TypeError):
|
|
93
|
+
pass
|
|
94
|
+
|
|
95
|
+
return None
|
|
96
|
+
|
|
97
|
+
|
|
67
98
|
def _retry_timeout(response: httpx.Response, retries: int) -> float:
|
|
68
99
|
"""
|
|
69
100
|
Determine the amount of time to wait before retrying a request.
|
|
@@ -71,17 +102,19 @@ def _retry_timeout(response: httpx.Response, retries: int) -> float:
|
|
|
71
102
|
with a jitter to determine the number of seconds to wait.
|
|
72
103
|
"""
|
|
73
104
|
|
|
74
|
-
#
|
|
105
|
+
# 1. Check Retry-After header first
|
|
75
106
|
retry_after = _parse_retry_after(response.headers)
|
|
76
|
-
if retry_after is not None and retry_after
|
|
77
|
-
return retry_after
|
|
107
|
+
if retry_after is not None and retry_after > 0:
|
|
108
|
+
return min(retry_after, MAX_RETRY_DELAY_SECONDS)
|
|
78
109
|
|
|
79
|
-
#
|
|
80
|
-
|
|
110
|
+
# 2. Check X-RateLimit-Reset header (with positive jitter)
|
|
111
|
+
ratelimit_reset = _parse_x_ratelimit_reset(response.headers)
|
|
112
|
+
if ratelimit_reset is not None:
|
|
113
|
+
return _add_positive_jitter(min(ratelimit_reset, MAX_RETRY_DELAY_SECONDS))
|
|
81
114
|
|
|
82
|
-
#
|
|
83
|
-
|
|
84
|
-
return
|
|
115
|
+
# 3. Fall back to exponential backoff (with symmetric jitter)
|
|
116
|
+
backoff = min(INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS)
|
|
117
|
+
return _add_symmetric_jitter(backoff)
|
|
85
118
|
|
|
86
119
|
|
|
87
120
|
def _should_retry(response: httpx.Response) -> bool:
|
|
@@ -89,6 +122,45 @@ def _should_retry(response: httpx.Response) -> bool:
|
|
|
89
122
|
return response.status_code >= 500 or response.status_code in retryable_400s
|
|
90
123
|
|
|
91
124
|
|
|
125
|
+
def _build_url(base_url: str, path: typing.Optional[str]) -> str:
|
|
126
|
+
"""
|
|
127
|
+
Build a full URL by joining a base URL with a path.
|
|
128
|
+
|
|
129
|
+
This function correctly handles base URLs that contain path prefixes (e.g., tenant-based URLs)
|
|
130
|
+
by using string concatenation instead of urllib.parse.urljoin(), which would incorrectly
|
|
131
|
+
strip path components when the path starts with '/'.
|
|
132
|
+
|
|
133
|
+
Example:
|
|
134
|
+
>>> _build_url("https://cloud.example.com/org/tenant/api", "/users")
|
|
135
|
+
'https://cloud.example.com/org/tenant/api/users'
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
base_url: The base URL, which may contain path prefixes.
|
|
139
|
+
path: The path to append. Can be None or empty string.
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
The full URL with base_url and path properly joined.
|
|
143
|
+
"""
|
|
144
|
+
if not path:
|
|
145
|
+
return base_url
|
|
146
|
+
return f"{base_url.rstrip('/')}/{path.lstrip('/')}"
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def _maybe_filter_none_from_multipart_data(
|
|
150
|
+
data: typing.Optional[typing.Any],
|
|
151
|
+
request_files: typing.Optional[RequestFiles],
|
|
152
|
+
force_multipart: typing.Optional[bool],
|
|
153
|
+
) -> typing.Optional[typing.Any]:
|
|
154
|
+
"""
|
|
155
|
+
Filter None values from data body for multipart/form requests.
|
|
156
|
+
This prevents httpx from converting None to empty strings in multipart encoding.
|
|
157
|
+
Only applies when files are present or force_multipart is True.
|
|
158
|
+
"""
|
|
159
|
+
if data is not None and isinstance(data, typing.Mapping) and (request_files or force_multipart):
|
|
160
|
+
return remove_none_from_dict(data)
|
|
161
|
+
return data
|
|
162
|
+
|
|
163
|
+
|
|
92
164
|
def remove_omit_from_dict(
|
|
93
165
|
original: typing.Dict[str, typing.Optional[typing.Any]],
|
|
94
166
|
omit: typing.Optional[typing.Any],
|
|
@@ -143,8 +215,19 @@ def get_request_body(
|
|
|
143
215
|
# If both data and json are None, we send json data in the event extra properties are specified
|
|
144
216
|
json_body = maybe_filter_request_body(json, request_options, omit)
|
|
145
217
|
|
|
146
|
-
|
|
147
|
-
|
|
218
|
+
has_additional_body_parameters = bool(
|
|
219
|
+
request_options is not None and request_options.get("additional_body_parameters")
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
# Only collapse empty dict to None when the body was not explicitly provided
|
|
223
|
+
# and there are no additional body parameters. This preserves explicit empty
|
|
224
|
+
# bodies (e.g., when an endpoint has a request body type but all fields are optional).
|
|
225
|
+
if json_body == {} and json is None and not has_additional_body_parameters:
|
|
226
|
+
json_body = None
|
|
227
|
+
if data_body == {} and data is None and not has_additional_body_parameters:
|
|
228
|
+
data_body = None
|
|
229
|
+
|
|
230
|
+
return json_body, data_body
|
|
148
231
|
|
|
149
232
|
|
|
150
233
|
class HttpClient:
|
|
@@ -188,7 +271,7 @@ class HttpClient:
|
|
|
188
271
|
] = None,
|
|
189
272
|
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
190
273
|
request_options: typing.Optional[RequestOptions] = None,
|
|
191
|
-
retries: int =
|
|
274
|
+
retries: int = 0,
|
|
192
275
|
omit: typing.Optional[typing.Any] = None,
|
|
193
276
|
force_multipart: typing.Optional[bool] = None,
|
|
194
277
|
) -> httpx.Response:
|
|
@@ -210,9 +293,31 @@ class HttpClient:
|
|
|
210
293
|
if (request_files is None or len(request_files) == 0) and force_multipart:
|
|
211
294
|
request_files = FORCE_MULTIPART
|
|
212
295
|
|
|
296
|
+
data_body = _maybe_filter_none_from_multipart_data(data_body, request_files, force_multipart)
|
|
297
|
+
|
|
298
|
+
# Compute encoded params separately to avoid passing empty list to httpx
|
|
299
|
+
# (httpx strips existing query params from URL when params=[] is passed)
|
|
300
|
+
_encoded_params = encode_query(
|
|
301
|
+
jsonable_encoder(
|
|
302
|
+
remove_none_from_dict(
|
|
303
|
+
remove_omit_from_dict(
|
|
304
|
+
{
|
|
305
|
+
**(params if params is not None else {}),
|
|
306
|
+
**(
|
|
307
|
+
request_options.get("additional_query_parameters", {}) or {}
|
|
308
|
+
if request_options is not None
|
|
309
|
+
else {}
|
|
310
|
+
),
|
|
311
|
+
},
|
|
312
|
+
omit,
|
|
313
|
+
)
|
|
314
|
+
)
|
|
315
|
+
)
|
|
316
|
+
)
|
|
317
|
+
|
|
213
318
|
response = self.httpx_client.request(
|
|
214
319
|
method=method,
|
|
215
|
-
url=
|
|
320
|
+
url=_build_url(base_url, path),
|
|
216
321
|
headers=jsonable_encoder(
|
|
217
322
|
remove_none_from_dict(
|
|
218
323
|
{
|
|
@@ -222,23 +327,7 @@ class HttpClient:
|
|
|
222
327
|
}
|
|
223
328
|
)
|
|
224
329
|
),
|
|
225
|
-
params=
|
|
226
|
-
jsonable_encoder(
|
|
227
|
-
remove_none_from_dict(
|
|
228
|
-
remove_omit_from_dict(
|
|
229
|
-
{
|
|
230
|
-
**(params if params is not None else {}),
|
|
231
|
-
**(
|
|
232
|
-
request_options.get("additional_query_parameters", {}) or {}
|
|
233
|
-
if request_options is not None
|
|
234
|
-
else {}
|
|
235
|
-
),
|
|
236
|
-
},
|
|
237
|
-
omit,
|
|
238
|
-
)
|
|
239
|
-
)
|
|
240
|
-
)
|
|
241
|
-
),
|
|
330
|
+
params=_encoded_params if _encoded_params else None,
|
|
242
331
|
json=json_body,
|
|
243
332
|
data=data_body,
|
|
244
333
|
content=content,
|
|
@@ -246,9 +335,9 @@ class HttpClient:
|
|
|
246
335
|
timeout=timeout,
|
|
247
336
|
)
|
|
248
337
|
|
|
249
|
-
max_retries: int = request_options.get("max_retries",
|
|
338
|
+
max_retries: int = request_options.get("max_retries", 2) if request_options is not None else 2
|
|
250
339
|
if _should_retry(response=response):
|
|
251
|
-
if
|
|
340
|
+
if retries < max_retries:
|
|
252
341
|
time.sleep(_retry_timeout(response=response, retries=retries))
|
|
253
342
|
return self.request(
|
|
254
343
|
path=path,
|
|
@@ -285,7 +374,7 @@ class HttpClient:
|
|
|
285
374
|
] = None,
|
|
286
375
|
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
287
376
|
request_options: typing.Optional[RequestOptions] = None,
|
|
288
|
-
retries: int =
|
|
377
|
+
retries: int = 0,
|
|
289
378
|
omit: typing.Optional[typing.Any] = None,
|
|
290
379
|
force_multipart: typing.Optional[bool] = None,
|
|
291
380
|
) -> typing.Iterator[httpx.Response]:
|
|
@@ -307,9 +396,31 @@ class HttpClient:
|
|
|
307
396
|
|
|
308
397
|
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
309
398
|
|
|
399
|
+
data_body = _maybe_filter_none_from_multipart_data(data_body, request_files, force_multipart)
|
|
400
|
+
|
|
401
|
+
# Compute encoded params separately to avoid passing empty list to httpx
|
|
402
|
+
# (httpx strips existing query params from URL when params=[] is passed)
|
|
403
|
+
_encoded_params = encode_query(
|
|
404
|
+
jsonable_encoder(
|
|
405
|
+
remove_none_from_dict(
|
|
406
|
+
remove_omit_from_dict(
|
|
407
|
+
{
|
|
408
|
+
**(params if params is not None else {}),
|
|
409
|
+
**(
|
|
410
|
+
request_options.get("additional_query_parameters", {})
|
|
411
|
+
if request_options is not None
|
|
412
|
+
else {}
|
|
413
|
+
),
|
|
414
|
+
},
|
|
415
|
+
omit,
|
|
416
|
+
)
|
|
417
|
+
)
|
|
418
|
+
)
|
|
419
|
+
)
|
|
420
|
+
|
|
310
421
|
with self.httpx_client.stream(
|
|
311
422
|
method=method,
|
|
312
|
-
url=
|
|
423
|
+
url=_build_url(base_url, path),
|
|
313
424
|
headers=jsonable_encoder(
|
|
314
425
|
remove_none_from_dict(
|
|
315
426
|
{
|
|
@@ -319,23 +430,7 @@ class HttpClient:
|
|
|
319
430
|
}
|
|
320
431
|
)
|
|
321
432
|
),
|
|
322
|
-
params=
|
|
323
|
-
jsonable_encoder(
|
|
324
|
-
remove_none_from_dict(
|
|
325
|
-
remove_omit_from_dict(
|
|
326
|
-
{
|
|
327
|
-
**(params if params is not None else {}),
|
|
328
|
-
**(
|
|
329
|
-
request_options.get("additional_query_parameters", {})
|
|
330
|
-
if request_options is not None
|
|
331
|
-
else {}
|
|
332
|
-
),
|
|
333
|
-
},
|
|
334
|
-
omit,
|
|
335
|
-
)
|
|
336
|
-
)
|
|
337
|
-
)
|
|
338
|
-
),
|
|
433
|
+
params=_encoded_params if _encoded_params else None,
|
|
339
434
|
json=json_body,
|
|
340
435
|
data=data_body,
|
|
341
436
|
content=content,
|
|
@@ -353,12 +448,19 @@ class AsyncHttpClient:
|
|
|
353
448
|
base_timeout: typing.Callable[[], typing.Optional[float]],
|
|
354
449
|
base_headers: typing.Callable[[], typing.Dict[str, str]],
|
|
355
450
|
base_url: typing.Optional[typing.Callable[[], str]] = None,
|
|
451
|
+
async_base_headers: typing.Optional[typing.Callable[[], typing.Awaitable[typing.Dict[str, str]]]] = None,
|
|
356
452
|
):
|
|
357
453
|
self.base_url = base_url
|
|
358
454
|
self.base_timeout = base_timeout
|
|
359
455
|
self.base_headers = base_headers
|
|
456
|
+
self.async_base_headers = async_base_headers
|
|
360
457
|
self.httpx_client = httpx_client
|
|
361
458
|
|
|
459
|
+
async def _get_headers(self) -> typing.Dict[str, str]:
|
|
460
|
+
if self.async_base_headers is not None:
|
|
461
|
+
return await self.async_base_headers()
|
|
462
|
+
return self.base_headers()
|
|
463
|
+
|
|
362
464
|
def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
|
|
363
465
|
base_url = maybe_base_url
|
|
364
466
|
if self.base_url is not None and base_url is None:
|
|
@@ -386,7 +488,7 @@ class AsyncHttpClient:
|
|
|
386
488
|
] = None,
|
|
387
489
|
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
388
490
|
request_options: typing.Optional[RequestOptions] = None,
|
|
389
|
-
retries: int =
|
|
491
|
+
retries: int = 0,
|
|
390
492
|
omit: typing.Optional[typing.Any] = None,
|
|
391
493
|
force_multipart: typing.Optional[bool] = None,
|
|
392
494
|
) -> httpx.Response:
|
|
@@ -408,36 +510,45 @@ class AsyncHttpClient:
|
|
|
408
510
|
|
|
409
511
|
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
410
512
|
|
|
513
|
+
data_body = _maybe_filter_none_from_multipart_data(data_body, request_files, force_multipart)
|
|
514
|
+
|
|
515
|
+
# Get headers (supports async token providers)
|
|
516
|
+
_headers = await self._get_headers()
|
|
517
|
+
|
|
518
|
+
# Compute encoded params separately to avoid passing empty list to httpx
|
|
519
|
+
# (httpx strips existing query params from URL when params=[] is passed)
|
|
520
|
+
_encoded_params = encode_query(
|
|
521
|
+
jsonable_encoder(
|
|
522
|
+
remove_none_from_dict(
|
|
523
|
+
remove_omit_from_dict(
|
|
524
|
+
{
|
|
525
|
+
**(params if params is not None else {}),
|
|
526
|
+
**(
|
|
527
|
+
request_options.get("additional_query_parameters", {}) or {}
|
|
528
|
+
if request_options is not None
|
|
529
|
+
else {}
|
|
530
|
+
),
|
|
531
|
+
},
|
|
532
|
+
omit,
|
|
533
|
+
)
|
|
534
|
+
)
|
|
535
|
+
)
|
|
536
|
+
)
|
|
537
|
+
|
|
411
538
|
# Add the input to each of these and do None-safety checks
|
|
412
539
|
response = await self.httpx_client.request(
|
|
413
540
|
method=method,
|
|
414
|
-
url=
|
|
541
|
+
url=_build_url(base_url, path),
|
|
415
542
|
headers=jsonable_encoder(
|
|
416
543
|
remove_none_from_dict(
|
|
417
544
|
{
|
|
418
|
-
**
|
|
545
|
+
**_headers,
|
|
419
546
|
**(headers if headers is not None else {}),
|
|
420
547
|
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
|
|
421
548
|
}
|
|
422
549
|
)
|
|
423
550
|
),
|
|
424
|
-
params=
|
|
425
|
-
jsonable_encoder(
|
|
426
|
-
remove_none_from_dict(
|
|
427
|
-
remove_omit_from_dict(
|
|
428
|
-
{
|
|
429
|
-
**(params if params is not None else {}),
|
|
430
|
-
**(
|
|
431
|
-
request_options.get("additional_query_parameters", {}) or {}
|
|
432
|
-
if request_options is not None
|
|
433
|
-
else {}
|
|
434
|
-
),
|
|
435
|
-
},
|
|
436
|
-
omit,
|
|
437
|
-
)
|
|
438
|
-
)
|
|
439
|
-
)
|
|
440
|
-
),
|
|
551
|
+
params=_encoded_params if _encoded_params else None,
|
|
441
552
|
json=json_body,
|
|
442
553
|
data=data_body,
|
|
443
554
|
content=content,
|
|
@@ -445,9 +556,9 @@ class AsyncHttpClient:
|
|
|
445
556
|
timeout=timeout,
|
|
446
557
|
)
|
|
447
558
|
|
|
448
|
-
max_retries: int = request_options.get("max_retries",
|
|
559
|
+
max_retries: int = request_options.get("max_retries", 2) if request_options is not None else 2
|
|
449
560
|
if _should_retry(response=response):
|
|
450
|
-
if
|
|
561
|
+
if retries < max_retries:
|
|
451
562
|
await asyncio.sleep(_retry_timeout(response=response, retries=retries))
|
|
452
563
|
return await self.request(
|
|
453
564
|
path=path,
|
|
@@ -483,7 +594,7 @@ class AsyncHttpClient:
|
|
|
483
594
|
] = None,
|
|
484
595
|
headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
|
485
596
|
request_options: typing.Optional[RequestOptions] = None,
|
|
486
|
-
retries: int =
|
|
597
|
+
retries: int = 0,
|
|
487
598
|
omit: typing.Optional[typing.Any] = None,
|
|
488
599
|
force_multipart: typing.Optional[bool] = None,
|
|
489
600
|
) -> typing.AsyncIterator[httpx.Response]:
|
|
@@ -505,35 +616,44 @@ class AsyncHttpClient:
|
|
|
505
616
|
|
|
506
617
|
json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
|
|
507
618
|
|
|
619
|
+
data_body = _maybe_filter_none_from_multipart_data(data_body, request_files, force_multipart)
|
|
620
|
+
|
|
621
|
+
# Get headers (supports async token providers)
|
|
622
|
+
_headers = await self._get_headers()
|
|
623
|
+
|
|
624
|
+
# Compute encoded params separately to avoid passing empty list to httpx
|
|
625
|
+
# (httpx strips existing query params from URL when params=[] is passed)
|
|
626
|
+
_encoded_params = encode_query(
|
|
627
|
+
jsonable_encoder(
|
|
628
|
+
remove_none_from_dict(
|
|
629
|
+
remove_omit_from_dict(
|
|
630
|
+
{
|
|
631
|
+
**(params if params is not None else {}),
|
|
632
|
+
**(
|
|
633
|
+
request_options.get("additional_query_parameters", {})
|
|
634
|
+
if request_options is not None
|
|
635
|
+
else {}
|
|
636
|
+
),
|
|
637
|
+
},
|
|
638
|
+
omit=omit,
|
|
639
|
+
)
|
|
640
|
+
)
|
|
641
|
+
)
|
|
642
|
+
)
|
|
643
|
+
|
|
508
644
|
async with self.httpx_client.stream(
|
|
509
645
|
method=method,
|
|
510
|
-
url=
|
|
646
|
+
url=_build_url(base_url, path),
|
|
511
647
|
headers=jsonable_encoder(
|
|
512
648
|
remove_none_from_dict(
|
|
513
649
|
{
|
|
514
|
-
**
|
|
650
|
+
**_headers,
|
|
515
651
|
**(headers if headers is not None else {}),
|
|
516
652
|
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
|
517
653
|
}
|
|
518
654
|
)
|
|
519
655
|
),
|
|
520
|
-
params=
|
|
521
|
-
jsonable_encoder(
|
|
522
|
-
remove_none_from_dict(
|
|
523
|
-
remove_omit_from_dict(
|
|
524
|
-
{
|
|
525
|
-
**(params if params is not None else {}),
|
|
526
|
-
**(
|
|
527
|
-
request_options.get("additional_query_parameters", {})
|
|
528
|
-
if request_options is not None
|
|
529
|
-
else {}
|
|
530
|
-
),
|
|
531
|
-
},
|
|
532
|
-
omit=omit,
|
|
533
|
-
)
|
|
534
|
-
)
|
|
535
|
-
)
|
|
536
|
-
),
|
|
656
|
+
params=_encoded_params if _encoded_params else None,
|
|
537
657
|
json=json_body,
|
|
538
658
|
data=data_body,
|
|
539
659
|
content=content,
|
sarvamai/core/http_response.py
CHANGED
|
@@ -4,8 +4,8 @@ from typing import Dict, Generic, TypeVar
|
|
|
4
4
|
|
|
5
5
|
import httpx
|
|
6
6
|
|
|
7
|
+
# Generic to represent the underlying type of the data wrapped by the HTTP response.
|
|
7
8
|
T = TypeVar("T")
|
|
8
|
-
"""Generic to represent the underlying type of the data wrapped by the HTTP response."""
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class BaseHttpResponse:
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
# isort: skip_file
|
|
4
|
+
|
|
5
|
+
import typing
|
|
6
|
+
from importlib import import_module
|
|
7
|
+
|
|
8
|
+
if typing.TYPE_CHECKING:
|
|
9
|
+
from ._api import EventSource, aconnect_sse, connect_sse
|
|
10
|
+
from ._exceptions import SSEError
|
|
11
|
+
from ._models import ServerSentEvent
|
|
12
|
+
_dynamic_imports: typing.Dict[str, str] = {
|
|
13
|
+
"EventSource": "._api",
|
|
14
|
+
"SSEError": "._exceptions",
|
|
15
|
+
"ServerSentEvent": "._models",
|
|
16
|
+
"aconnect_sse": "._api",
|
|
17
|
+
"connect_sse": "._api",
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def __getattr__(attr_name: str) -> typing.Any:
|
|
22
|
+
module_name = _dynamic_imports.get(attr_name)
|
|
23
|
+
if module_name is None:
|
|
24
|
+
raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}")
|
|
25
|
+
try:
|
|
26
|
+
module = import_module(module_name, __package__)
|
|
27
|
+
if module_name == f".{attr_name}":
|
|
28
|
+
return module
|
|
29
|
+
else:
|
|
30
|
+
return getattr(module, attr_name)
|
|
31
|
+
except ImportError as e:
|
|
32
|
+
raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e
|
|
33
|
+
except AttributeError as e:
|
|
34
|
+
raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def __dir__():
|
|
38
|
+
lazy_attrs = list(_dynamic_imports.keys())
|
|
39
|
+
return sorted(lazy_attrs)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
__all__ = ["EventSource", "SSEError", "ServerSentEvent", "aconnect_sse", "connect_sse"]
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from contextlib import asynccontextmanager, contextmanager
|
|
5
|
+
from typing import Any, AsyncGenerator, AsyncIterator, Iterator, cast
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
from ._decoders import SSEDecoder
|
|
9
|
+
from ._exceptions import SSEError
|
|
10
|
+
from ._models import ServerSentEvent
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class EventSource:
|
|
14
|
+
def __init__(self, response: httpx.Response) -> None:
|
|
15
|
+
self._response = response
|
|
16
|
+
|
|
17
|
+
def _check_content_type(self) -> None:
|
|
18
|
+
content_type = self._response.headers.get("content-type", "").partition(";")[0]
|
|
19
|
+
if "text/event-stream" not in content_type:
|
|
20
|
+
raise SSEError(
|
|
21
|
+
f"Expected response header Content-Type to contain 'text/event-stream', got {content_type!r}"
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
def _get_charset(self) -> str:
|
|
25
|
+
"""Extract charset from Content-Type header, fallback to UTF-8."""
|
|
26
|
+
content_type = self._response.headers.get("content-type", "")
|
|
27
|
+
|
|
28
|
+
# Parse charset parameter using regex
|
|
29
|
+
charset_match = re.search(r"charset=([^;\s]+)", content_type, re.IGNORECASE)
|
|
30
|
+
if charset_match:
|
|
31
|
+
charset = charset_match.group(1).strip("\"'")
|
|
32
|
+
# Validate that it's a known encoding
|
|
33
|
+
try:
|
|
34
|
+
# Test if the charset is valid by trying to encode/decode
|
|
35
|
+
"test".encode(charset).decode(charset)
|
|
36
|
+
return charset
|
|
37
|
+
except (LookupError, UnicodeError):
|
|
38
|
+
# If charset is invalid, fall back to UTF-8
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
# Default to UTF-8 if no charset specified or invalid charset
|
|
42
|
+
return "utf-8"
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def response(self) -> httpx.Response:
|
|
46
|
+
return self._response
|
|
47
|
+
|
|
48
|
+
def iter_sse(self) -> Iterator[ServerSentEvent]:
|
|
49
|
+
self._check_content_type()
|
|
50
|
+
decoder = SSEDecoder()
|
|
51
|
+
charset = self._get_charset()
|
|
52
|
+
|
|
53
|
+
buffer = ""
|
|
54
|
+
for chunk in self._response.iter_bytes():
|
|
55
|
+
# Decode chunk using detected charset
|
|
56
|
+
text_chunk = chunk.decode(charset, errors="replace")
|
|
57
|
+
buffer += text_chunk
|
|
58
|
+
|
|
59
|
+
# Process complete lines
|
|
60
|
+
while "\n" in buffer:
|
|
61
|
+
line, buffer = buffer.split("\n", 1)
|
|
62
|
+
line = line.rstrip("\r")
|
|
63
|
+
sse = decoder.decode(line)
|
|
64
|
+
# when we reach a "\n\n" => line = ''
|
|
65
|
+
# => decoder will attempt to return an SSE Event
|
|
66
|
+
if sse is not None:
|
|
67
|
+
yield sse
|
|
68
|
+
|
|
69
|
+
# Process any remaining data in buffer
|
|
70
|
+
if buffer.strip():
|
|
71
|
+
line = buffer.rstrip("\r")
|
|
72
|
+
sse = decoder.decode(line)
|
|
73
|
+
if sse is not None:
|
|
74
|
+
yield sse
|
|
75
|
+
|
|
76
|
+
async def aiter_sse(self) -> AsyncGenerator[ServerSentEvent, None]:
|
|
77
|
+
self._check_content_type()
|
|
78
|
+
decoder = SSEDecoder()
|
|
79
|
+
lines = cast(AsyncGenerator[str, None], self._response.aiter_lines())
|
|
80
|
+
try:
|
|
81
|
+
async for line in lines:
|
|
82
|
+
line = line.rstrip("\n")
|
|
83
|
+
sse = decoder.decode(line)
|
|
84
|
+
if sse is not None:
|
|
85
|
+
yield sse
|
|
86
|
+
finally:
|
|
87
|
+
await lines.aclose()
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@contextmanager
|
|
91
|
+
def connect_sse(client: httpx.Client, method: str, url: str, **kwargs: Any) -> Iterator[EventSource]:
|
|
92
|
+
headers = kwargs.pop("headers", {})
|
|
93
|
+
headers["Accept"] = "text/event-stream"
|
|
94
|
+
headers["Cache-Control"] = "no-store"
|
|
95
|
+
|
|
96
|
+
with client.stream(method, url, headers=headers, **kwargs) as response:
|
|
97
|
+
yield EventSource(response)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@asynccontextmanager
|
|
101
|
+
async def aconnect_sse(
|
|
102
|
+
client: httpx.AsyncClient,
|
|
103
|
+
method: str,
|
|
104
|
+
url: str,
|
|
105
|
+
**kwargs: Any,
|
|
106
|
+
) -> AsyncIterator[EventSource]:
|
|
107
|
+
headers = kwargs.pop("headers", {})
|
|
108
|
+
headers["Accept"] = "text/event-stream"
|
|
109
|
+
headers["Cache-Control"] = "no-store"
|
|
110
|
+
|
|
111
|
+
async with client.stream(method, url, headers=headers, **kwargs) as response:
|
|
112
|
+
yield EventSource(response)
|