google-genai 1.19.0__tar.gz → 1.20.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {google_genai-1.19.0/google_genai.egg-info → google_genai-1.20.0}/PKG-INFO +3 -1
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_api_client.py +346 -101
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_common.py +15 -1
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_replay_api_client.py +1 -6
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/batches.py +1 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/caches.py +1 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/chats.py +5 -10
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/errors.py +32 -6
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/files.py +8 -4
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/models.py +103 -6
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/operations.py +1 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/tunings.py +1 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/types.py +173 -83
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/version.py +1 -1
- {google_genai-1.19.0 → google_genai-1.20.0/google_genai.egg-info}/PKG-INFO +3 -1
- {google_genai-1.19.0 → google_genai-1.20.0}/google_genai.egg-info/requires.txt +3 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/pyproject.toml +4 -1
- {google_genai-1.19.0 → google_genai-1.20.0}/LICENSE +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/MANIFEST.in +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/README.md +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/__init__.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_adapters.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_api_module.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_automatic_function_calling_util.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_base_url.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_extra_utils.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_live_converters.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_mcp_utils.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_test_api_client.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_tokens_converters.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/_transformers.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/client.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/live.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/live_music.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/pagers.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/py.typed +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google/genai/tokens.py +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google_genai.egg-info/SOURCES.txt +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google_genai.egg-info/dependency_links.txt +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/google_genai.egg-info/top_level.txt +0 -0
- {google_genai-1.19.0 → google_genai-1.20.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: google-genai
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.20.0
|
4
4
|
Summary: GenAI Python SDK
|
5
5
|
Author-email: Google LLC <googleapis-packages@google.com>
|
6
6
|
License: Apache-2.0
|
@@ -27,6 +27,8 @@ Requires-Dist: pydantic<3.0.0,>=2.0.0
|
|
27
27
|
Requires-Dist: requests<3.0.0,>=2.28.1
|
28
28
|
Requires-Dist: websockets<15.1.0,>=13.0.0
|
29
29
|
Requires-Dist: typing-extensions<5.0.0,>=4.11.0
|
30
|
+
Provides-Extra: aiohttp
|
31
|
+
Requires-Dist: aiohttp<4.0.0; extra == "aiohttp"
|
30
32
|
Dynamic: license-file
|
31
33
|
|
32
34
|
# Google Gen AI SDK
|
@@ -34,7 +34,7 @@ import ssl
|
|
34
34
|
import sys
|
35
35
|
import threading
|
36
36
|
import time
|
37
|
-
from typing import Any, AsyncIterator, Optional, Tuple, Union
|
37
|
+
from typing import Any, AsyncIterator, Optional, Tuple, TYPE_CHECKING, Union
|
38
38
|
from urllib.parse import urlparse
|
39
39
|
from urllib.parse import urlunparse
|
40
40
|
|
@@ -56,6 +56,19 @@ from .types import HttpOptions
|
|
56
56
|
from .types import HttpOptionsDict
|
57
57
|
from .types import HttpOptionsOrDict
|
58
58
|
|
59
|
+
has_aiohttp = False
|
60
|
+
try:
|
61
|
+
import aiohttp
|
62
|
+
has_aiohttp = True
|
63
|
+
except ImportError:
|
64
|
+
pass
|
65
|
+
|
66
|
+
has_aiohttp = False # exclude from copybar export
|
67
|
+
|
68
|
+
|
69
|
+
if TYPE_CHECKING:
|
70
|
+
from multidict import CIMultiDictProxy
|
71
|
+
|
59
72
|
|
60
73
|
logger = logging.getLogger('google_genai._api_client')
|
61
74
|
CHUNK_SIZE = 8 * 1024 * 1024 # 8 MB chunk size
|
@@ -216,7 +229,7 @@ class HttpResponse:
|
|
216
229
|
|
217
230
|
def __init__(
|
218
231
|
self,
|
219
|
-
headers: Union[dict[str, str], httpx.Headers],
|
232
|
+
headers: Union[dict[str, str], httpx.Headers, 'CIMultiDictProxy[str]'],
|
220
233
|
response_stream: Union[Any, str] = None,
|
221
234
|
byte_stream: Union[Any, bytes] = None,
|
222
235
|
):
|
@@ -282,6 +295,17 @@ class HttpResponse:
|
|
282
295
|
if chunk.startswith('data: '):
|
283
296
|
chunk = chunk[len('data: ') :]
|
284
297
|
yield json.loads(chunk)
|
298
|
+
elif hasattr(self.response_stream, 'content'):
|
299
|
+
async for chunk in self.response_stream.content.iter_any():
|
300
|
+
# This is aiohttp.ClientResponse.
|
301
|
+
if chunk:
|
302
|
+
# In async streaming mode, the chunk of JSON is prefixed with
|
303
|
+
# "data:" which we must strip before parsing.
|
304
|
+
if not isinstance(chunk, str):
|
305
|
+
chunk = chunk.decode('utf-8')
|
306
|
+
if chunk.startswith('data: '):
|
307
|
+
chunk = chunk[len('data: ') :]
|
308
|
+
yield json.loads(chunk)
|
285
309
|
else:
|
286
310
|
raise ValueError('Error parsing streaming response.')
|
287
311
|
|
@@ -480,14 +504,17 @@ class BaseApiClient:
|
|
480
504
|
if self._http_options.headers is not None:
|
481
505
|
_append_library_version_headers(self._http_options.headers)
|
482
506
|
|
483
|
-
client_args, async_client_args = self.
|
507
|
+
client_args, async_client_args = self._ensure_httpx_ssl_ctx(
|
508
|
+
self._http_options
|
509
|
+
)
|
484
510
|
self._httpx_client = SyncHttpxClient(**client_args)
|
485
511
|
self._async_httpx_client = AsyncHttpxClient(**async_client_args)
|
486
512
|
|
487
513
|
@staticmethod
|
488
|
-
def
|
489
|
-
|
490
|
-
|
514
|
+
def _ensure_httpx_ssl_ctx(
|
515
|
+
options: HttpOptions,
|
516
|
+
) -> Tuple[dict[str, Any], dict[str, Any]]:
|
517
|
+
"""Ensures the SSL context is present in the HTTPX client args.
|
491
518
|
|
492
519
|
Creates a default SSL context if one is not provided.
|
493
520
|
|
@@ -541,6 +568,58 @@ class BaseApiClient:
|
|
541
568
|
_maybe_set(async_args, ctx),
|
542
569
|
)
|
543
570
|
|
571
|
+
@staticmethod
|
572
|
+
def _ensure_aiohttp_ssl_ctx(options: HttpOptions) -> dict[str, Any]:
|
573
|
+
"""Ensures the SSL context is present in the async client args.
|
574
|
+
|
575
|
+
Creates a default SSL context if one is not provided.
|
576
|
+
|
577
|
+
Args:
|
578
|
+
options: The http options to check for SSL context.
|
579
|
+
|
580
|
+
Returns:
|
581
|
+
An async aiohttp ClientSession._request args.
|
582
|
+
"""
|
583
|
+
|
584
|
+
verify = 'verify'
|
585
|
+
async_args = options.async_client_args
|
586
|
+
ctx = async_args.get(verify) if async_args else None
|
587
|
+
|
588
|
+
if not ctx:
|
589
|
+
# Initialize the SSL context for the httpx client.
|
590
|
+
# Unlike requests, the aiohttp package does not automatically pull in the
|
591
|
+
# environment variables SSL_CERT_FILE or SSL_CERT_DIR. They need to be
|
592
|
+
# enabled explicitly. Instead of 'verify' at client level in httpx,
|
593
|
+
# aiohttp uses 'ssl' at request level.
|
594
|
+
ctx = ssl.create_default_context(
|
595
|
+
cafile=os.environ.get('SSL_CERT_FILE', certifi.where()),
|
596
|
+
capath=os.environ.get('SSL_CERT_DIR'),
|
597
|
+
)
|
598
|
+
|
599
|
+
def _maybe_set(
|
600
|
+
args: Optional[dict[str, Any]],
|
601
|
+
ctx: ssl.SSLContext,
|
602
|
+
) -> dict[str, Any]:
|
603
|
+
"""Sets the SSL context in the client args if not set.
|
604
|
+
|
605
|
+
Does not override the SSL context if it is already set.
|
606
|
+
|
607
|
+
Args:
|
608
|
+
args: The client args to to check for SSL context.
|
609
|
+
ctx: The SSL context to set.
|
610
|
+
|
611
|
+
Returns:
|
612
|
+
The client args with the SSL context included.
|
613
|
+
"""
|
614
|
+
if not args or not args.get(verify):
|
615
|
+
args = (args or {}).copy()
|
616
|
+
args['ssl'] = ctx
|
617
|
+
else:
|
618
|
+
args['ssl'] = args.pop(verify)
|
619
|
+
return args
|
620
|
+
|
621
|
+
return _maybe_set(async_args, ctx)
|
622
|
+
|
544
623
|
def _websocket_base_url(self) -> str:
|
545
624
|
url_parts = urlparse(self._http_options.base_url)
|
546
625
|
return url_parts._replace(scheme='wss').geturl() # type: ignore[arg-type, return-value]
|
@@ -737,33 +816,96 @@ class BaseApiClient:
|
|
737
816
|
data = http_request.data
|
738
817
|
|
739
818
|
if stream:
|
740
|
-
|
741
|
-
|
742
|
-
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
|
747
|
-
|
748
|
-
|
749
|
-
|
750
|
-
|
751
|
-
|
752
|
-
|
753
|
-
|
754
|
-
|
819
|
+
if has_aiohttp:
|
820
|
+
session = aiohttp.ClientSession(
|
821
|
+
headers=http_request.headers,
|
822
|
+
)
|
823
|
+
if self._http_options.async_client_args:
|
824
|
+
# When using aiohttp request options with ssl context, the latency will higher than using httpx.
|
825
|
+
# Use it only if necessary. Otherwise, httpx asyncclient is faster.
|
826
|
+
async_client_args = self._ensure_aiohttp_ssl_ctx(
|
827
|
+
self._http_options
|
828
|
+
)
|
829
|
+
response = await session.request(
|
830
|
+
method=http_request.method,
|
831
|
+
url=http_request.url,
|
832
|
+
headers=http_request.headers,
|
833
|
+
data=data,
|
834
|
+
timeout=aiohttp.ClientTimeout(connect=http_request.timeout),
|
835
|
+
**async_client_args,
|
836
|
+
)
|
837
|
+
else:
|
838
|
+
# Aiohttp performs better than httpx w/o ssl context.
|
839
|
+
response = await session.request(
|
840
|
+
method=http_request.method,
|
841
|
+
url=http_request.url,
|
842
|
+
headers=http_request.headers,
|
843
|
+
data=data,
|
844
|
+
timeout=aiohttp.ClientTimeout(connect=http_request.timeout),
|
845
|
+
)
|
846
|
+
await errors.APIError.raise_for_async_response(response)
|
847
|
+
return HttpResponse(response.headers, response)
|
848
|
+
else:
|
849
|
+
# aiohttp is not available. Fall back to httpx.
|
850
|
+
httpx_request = self._async_httpx_client.build_request(
|
851
|
+
method=http_request.method,
|
852
|
+
url=http_request.url,
|
853
|
+
content=data,
|
854
|
+
headers=http_request.headers,
|
855
|
+
timeout=http_request.timeout,
|
856
|
+
)
|
857
|
+
client_response = await self._async_httpx_client.send(
|
858
|
+
httpx_request,
|
859
|
+
stream=stream,
|
860
|
+
)
|
861
|
+
await errors.APIError.raise_for_async_response(client_response)
|
862
|
+
return HttpResponse(client_response.headers, client_response)
|
755
863
|
else:
|
756
|
-
|
757
|
-
|
758
|
-
|
759
|
-
|
760
|
-
|
761
|
-
|
762
|
-
|
763
|
-
|
764
|
-
|
765
|
-
|
766
|
-
|
864
|
+
if has_aiohttp:
|
865
|
+
if self._http_options.async_client_args:
|
866
|
+
# Note that when using aiohttp request options with ssl context, the
|
867
|
+
# latency will higher than using httpx async client with ssl context.
|
868
|
+
async_client_args = self._ensure_aiohttp_ssl_ctx(
|
869
|
+
self._http_options
|
870
|
+
)
|
871
|
+
async with aiohttp.ClientSession(
|
872
|
+
headers=http_request.headers
|
873
|
+
) as session:
|
874
|
+
response = await session.request(
|
875
|
+
method=http_request.method,
|
876
|
+
url=http_request.url,
|
877
|
+
headers=http_request.headers,
|
878
|
+
data=data,
|
879
|
+
timeout=aiohttp.ClientTimeout(connect=http_request.timeout),
|
880
|
+
**async_client_args,
|
881
|
+
)
|
882
|
+
await errors.APIError.raise_for_async_response(response)
|
883
|
+
return HttpResponse(response.headers, [await response.text()])
|
884
|
+
else:
|
885
|
+
# Aiohttp performs better than httpx if not using ssl context.
|
886
|
+
async with aiohttp.ClientSession(
|
887
|
+
headers=http_request.headers
|
888
|
+
) as session:
|
889
|
+
response = await session.request(
|
890
|
+
method=http_request.method,
|
891
|
+
url=http_request.url,
|
892
|
+
headers=http_request.headers,
|
893
|
+
data=data,
|
894
|
+
timeout=aiohttp.ClientTimeout(connect=http_request.timeout),
|
895
|
+
)
|
896
|
+
await errors.APIError.raise_for_async_response(response)
|
897
|
+
return HttpResponse(response.headers, [await response.text()])
|
898
|
+
else:
|
899
|
+
# aiohttp is not available. Fall back to httpx.
|
900
|
+
client_response = await self._async_httpx_client.request(
|
901
|
+
method=http_request.method,
|
902
|
+
url=http_request.url,
|
903
|
+
headers=http_request.headers,
|
904
|
+
content=data,
|
905
|
+
timeout=http_request.timeout,
|
906
|
+
)
|
907
|
+
await errors.APIError.raise_for_async_response(client_response)
|
908
|
+
return HttpResponse(client_response.headers, [client_response.text])
|
767
909
|
|
768
910
|
def get_read_only_http_options(self) -> dict[str, Any]:
|
769
911
|
if isinstance(self._http_options, BaseModel):
|
@@ -1048,68 +1190,155 @@ class BaseApiClient:
|
|
1048
1190
|
"""
|
1049
1191
|
offset = 0
|
1050
1192
|
# Upload the file in chunks
|
1051
|
-
|
1052
|
-
|
1053
|
-
|
1054
|
-
|
1055
|
-
|
1056
|
-
|
1057
|
-
|
1058
|
-
|
1059
|
-
|
1060
|
-
|
1061
|
-
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
else
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1071
|
-
|
1072
|
-
|
1073
|
-
|
1074
|
-
|
1193
|
+
if has_aiohttp: # pylint: disable=g-import-not-at-top
|
1194
|
+
async with aiohttp.ClientSession(
|
1195
|
+
headers=self._http_options.headers
|
1196
|
+
) as session:
|
1197
|
+
while True:
|
1198
|
+
if isinstance(file, io.IOBase):
|
1199
|
+
file_chunk = file.read(CHUNK_SIZE)
|
1200
|
+
else:
|
1201
|
+
file_chunk = await file.read(CHUNK_SIZE)
|
1202
|
+
chunk_size = 0
|
1203
|
+
if file_chunk:
|
1204
|
+
chunk_size = len(file_chunk)
|
1205
|
+
upload_command = 'upload'
|
1206
|
+
# If last chunk, finalize the upload.
|
1207
|
+
if chunk_size + offset >= upload_size:
|
1208
|
+
upload_command += ', finalize'
|
1209
|
+
http_options = http_options if http_options else self._http_options
|
1210
|
+
timeout = (
|
1211
|
+
http_options.get('timeout')
|
1212
|
+
if isinstance(http_options, dict)
|
1213
|
+
else http_options.timeout
|
1214
|
+
)
|
1215
|
+
if timeout is None:
|
1216
|
+
# Per request timeout is not configured. Check the global timeout.
|
1217
|
+
timeout = (
|
1218
|
+
self._http_options.timeout
|
1219
|
+
if isinstance(self._http_options, dict)
|
1220
|
+
else self._http_options.timeout
|
1221
|
+
)
|
1222
|
+
timeout_in_seconds = _get_timeout_in_seconds(timeout)
|
1223
|
+
upload_headers = {
|
1224
|
+
'X-Goog-Upload-Command': upload_command,
|
1225
|
+
'X-Goog-Upload-Offset': str(offset),
|
1226
|
+
'Content-Length': str(chunk_size),
|
1227
|
+
}
|
1228
|
+
_populate_server_timeout_header(upload_headers, timeout_in_seconds)
|
1229
|
+
|
1230
|
+
retry_count = 0
|
1231
|
+
response = None
|
1232
|
+
while retry_count < MAX_RETRY_COUNT:
|
1233
|
+
response = await session.request(
|
1234
|
+
method='POST',
|
1235
|
+
url=upload_url,
|
1236
|
+
data=file_chunk,
|
1237
|
+
headers=upload_headers,
|
1238
|
+
timeout=aiohttp.ClientTimeout(connect=timeout_in_seconds),
|
1239
|
+
)
|
1240
|
+
|
1241
|
+
if response.headers.get('X-Goog-Upload-Status'):
|
1242
|
+
break
|
1243
|
+
delay_seconds = INITIAL_RETRY_DELAY * (
|
1244
|
+
DELAY_MULTIPLIER**retry_count
|
1245
|
+
)
|
1246
|
+
retry_count += 1
|
1247
|
+
time.sleep(delay_seconds)
|
1248
|
+
|
1249
|
+
offset += chunk_size
|
1250
|
+
if (
|
1251
|
+
response is not None
|
1252
|
+
and response.headers.get('X-Goog-Upload-Status') != 'active'
|
1253
|
+
):
|
1254
|
+
break # upload is complete or it has been interrupted.
|
1255
|
+
|
1256
|
+
if upload_size <= offset: # Status is not finalized.
|
1257
|
+
raise ValueError(
|
1258
|
+
f'All content has been uploaded, but the upload status is not'
|
1259
|
+
f' finalized.'
|
1260
|
+
)
|
1261
|
+
if (
|
1262
|
+
response is not None
|
1263
|
+
and response.headers.get('X-Goog-Upload-Status') != 'final'
|
1264
|
+
):
|
1265
|
+
raise ValueError(
|
1266
|
+
'Failed to upload file: Upload status is not finalized.'
|
1267
|
+
)
|
1268
|
+
return HttpResponse(
|
1269
|
+
response.headers, response_stream=[await response.text()]
|
1075
1270
|
)
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
|
1089
|
-
|
1090
|
-
|
1091
|
-
|
1271
|
+
else:
|
1272
|
+
# aiohttp is not available. Fall back to httpx.
|
1273
|
+
while True:
|
1274
|
+
if isinstance(file, io.IOBase):
|
1275
|
+
file_chunk = file.read(CHUNK_SIZE)
|
1276
|
+
else:
|
1277
|
+
file_chunk = await file.read(CHUNK_SIZE)
|
1278
|
+
chunk_size = 0
|
1279
|
+
if file_chunk:
|
1280
|
+
chunk_size = len(file_chunk)
|
1281
|
+
upload_command = 'upload'
|
1282
|
+
# If last chunk, finalize the upload.
|
1283
|
+
if chunk_size + offset >= upload_size:
|
1284
|
+
upload_command += ', finalize'
|
1285
|
+
http_options = http_options if http_options else self._http_options
|
1286
|
+
timeout = (
|
1287
|
+
http_options.get('timeout')
|
1288
|
+
if isinstance(http_options, dict)
|
1289
|
+
else http_options.timeout
|
1092
1290
|
)
|
1093
|
-
if
|
1094
|
-
|
1095
|
-
|
1096
|
-
|
1097
|
-
|
1098
|
-
|
1099
|
-
|
1100
|
-
|
1101
|
-
|
1102
|
-
|
1103
|
-
|
1291
|
+
if timeout is None:
|
1292
|
+
# Per request timeout is not configured. Check the global timeout.
|
1293
|
+
timeout = (
|
1294
|
+
self._http_options.timeout
|
1295
|
+
if isinstance(self._http_options, dict)
|
1296
|
+
else self._http_options.timeout
|
1297
|
+
)
|
1298
|
+
timeout_in_seconds = _get_timeout_in_seconds(timeout)
|
1299
|
+
upload_headers = {
|
1300
|
+
'X-Goog-Upload-Command': upload_command,
|
1301
|
+
'X-Goog-Upload-Offset': str(offset),
|
1302
|
+
'Content-Length': str(chunk_size),
|
1303
|
+
}
|
1304
|
+
_populate_server_timeout_header(upload_headers, timeout_in_seconds)
|
1305
|
+
|
1306
|
+
retry_count = 0
|
1307
|
+
client_response = None
|
1308
|
+
while retry_count < MAX_RETRY_COUNT:
|
1309
|
+
client_response = await self._async_httpx_client.request(
|
1310
|
+
method='POST',
|
1311
|
+
url=upload_url,
|
1312
|
+
content=file_chunk,
|
1313
|
+
headers=upload_headers,
|
1314
|
+
timeout=timeout_in_seconds,
|
1315
|
+
)
|
1316
|
+
if client_response is not None and client_response.headers and client_response.headers.get('x-goog-upload-status'):
|
1317
|
+
break
|
1318
|
+
delay_seconds = INITIAL_RETRY_DELAY * (DELAY_MULTIPLIER**retry_count)
|
1319
|
+
retry_count += 1
|
1320
|
+
time.sleep(delay_seconds)
|
1321
|
+
|
1322
|
+
offset += chunk_size
|
1323
|
+
if (
|
1324
|
+
client_response is not None
|
1325
|
+
and client_response.headers.get('x-goog-upload-status') != 'active'
|
1326
|
+
):
|
1327
|
+
break # upload is complete or it has been interrupted.
|
1328
|
+
|
1329
|
+
if upload_size <= offset: # Status is not finalized.
|
1330
|
+
raise ValueError(
|
1331
|
+
'All content has been uploaded, but the upload status is not'
|
1332
|
+
' finalized.'
|
1333
|
+
)
|
1334
|
+
if (
|
1335
|
+
client_response is not None
|
1336
|
+
and client_response.headers.get('x-goog-upload-status') != 'final'
|
1337
|
+
):
|
1104
1338
|
raise ValueError(
|
1105
|
-
'
|
1106
|
-
f' finalized.'
|
1339
|
+
'Failed to upload file: Upload status is not finalized.'
|
1107
1340
|
)
|
1108
|
-
|
1109
|
-
raise ValueError(
|
1110
|
-
'Failed to upload file: Upload status is not finalized.'
|
1111
|
-
)
|
1112
|
-
return HttpResponse(response.headers, response_stream=[response.text])
|
1341
|
+
return HttpResponse(client_response.headers, response_stream=[client_response.text])
|
1113
1342
|
|
1114
1343
|
async def async_download_file(
|
1115
1344
|
self,
|
@@ -1137,18 +1366,34 @@ class BaseApiClient:
|
|
1137
1366
|
else:
|
1138
1367
|
data = http_request.data
|
1139
1368
|
|
1140
|
-
|
1141
|
-
|
1142
|
-
|
1143
|
-
|
1144
|
-
|
1145
|
-
|
1146
|
-
|
1147
|
-
|
1369
|
+
if has_aiohttp:
|
1370
|
+
async with aiohttp.ClientSession(headers=http_request.headers) as session:
|
1371
|
+
response = await session.request(
|
1372
|
+
method=http_request.method,
|
1373
|
+
url=http_request.url,
|
1374
|
+
headers=http_request.headers,
|
1375
|
+
data=data,
|
1376
|
+
timeout=aiohttp.ClientTimeout(connect=http_request.timeout),
|
1377
|
+
)
|
1378
|
+
await errors.APIError.raise_for_async_response(response)
|
1148
1379
|
|
1149
|
-
|
1150
|
-
|
1151
|
-
|
1380
|
+
return HttpResponse(
|
1381
|
+
response.headers, byte_stream=[await response.read()]
|
1382
|
+
).byte_stream[0]
|
1383
|
+
else:
|
1384
|
+
# aiohttp is not available. Fall back to httpx.
|
1385
|
+
client_response = await self._async_httpx_client.request(
|
1386
|
+
method=http_request.method,
|
1387
|
+
url=http_request.url,
|
1388
|
+
headers=http_request.headers,
|
1389
|
+
content=data,
|
1390
|
+
timeout=http_request.timeout,
|
1391
|
+
)
|
1392
|
+
await errors.APIError.raise_for_async_response(client_response)
|
1393
|
+
|
1394
|
+
return HttpResponse(
|
1395
|
+
client_response.headers, byte_stream=[client_response.read()]
|
1396
|
+
).byte_stream[0]
|
1152
1397
|
|
1153
1398
|
# This method does nothing in the real api client. It is used in the
|
1154
1399
|
# replay_api_client to verify the response from the SDK method matches the
|
@@ -253,7 +253,21 @@ class BaseModel(pydantic.BaseModel):
|
|
253
253
|
# To maintain forward compatibility, we need to remove extra fields from
|
254
254
|
# the response.
|
255
255
|
# We will provide another mechanism to allow users to access these fields.
|
256
|
-
|
256
|
+
|
257
|
+
# For Agent Engine we don't want to call _remove_all_fields because the
|
258
|
+
# user may pass a dict that is not a subclass of BaseModel.
|
259
|
+
# If more modules require we skip this, we may want a different approach
|
260
|
+
should_skip_removing_fields = (
|
261
|
+
kwargs is not None and
|
262
|
+
'config' in kwargs and
|
263
|
+
kwargs['config'] is not None and
|
264
|
+
isinstance(kwargs['config'], dict) and
|
265
|
+
'include_all_fields' in kwargs['config']
|
266
|
+
and kwargs['config']['include_all_fields']
|
267
|
+
)
|
268
|
+
|
269
|
+
if not should_skip_removing_fields:
|
270
|
+
_remove_extra_fields(cls, response)
|
257
271
|
validated_response = cls.model_validate(response)
|
258
272
|
return validated_response
|
259
273
|
|
@@ -454,12 +454,7 @@ class ReplayApiClient(BaseApiClient):
|
|
454
454
|
if isinstance(response_model, list):
|
455
455
|
response_model = response_model[0]
|
456
456
|
print('response_model: ', response_model.model_dump(exclude_none=True))
|
457
|
-
|
458
|
-
actual = response_model.model_dump(
|
459
|
-
exclude={'result'}, exclude_none=True, mode='json'
|
460
|
-
)
|
461
|
-
else:
|
462
|
-
actual = response_model.model_dump(exclude_none=True, mode='json')
|
457
|
+
actual = response_model.model_dump(exclude_none=True, mode='json')
|
463
458
|
expected = interaction.response.sdk_response_segments[
|
464
459
|
self._sdk_response_index
|
465
460
|
]
|
@@ -63,13 +63,8 @@ def _extract_curated_history(
|
|
63
63
|
"""Extracts the curated (valid) history from a comprehensive history.
|
64
64
|
|
65
65
|
The comprehensive history contains all turns (user input and model responses),
|
66
|
-
including any invalid or rejected model outputs.
|
67
|
-
|
68
|
-
|
69
|
-
A "turn" starts with one user input (a single content) and then follows by
|
70
|
-
corresponding model response (which may consist of multiple contents).
|
71
|
-
Turns are assumed to alternate: user input, model output, user input, model
|
72
|
-
output, etc.
|
66
|
+
including any invalid or rejected model outputs. This function filters that
|
67
|
+
history to return only the valid turns.
|
73
68
|
|
74
69
|
Args:
|
75
70
|
comprehensive_history: A list representing the complete chat history.
|
@@ -84,8 +79,6 @@ def _extract_curated_history(
|
|
84
79
|
length = len(comprehensive_history)
|
85
80
|
i = 0
|
86
81
|
current_input = comprehensive_history[i]
|
87
|
-
if current_input.role != "user":
|
88
|
-
raise ValueError("History must start with a user turn.")
|
89
82
|
while i < length:
|
90
83
|
if comprehensive_history[i].role not in ["user", "model"]:
|
91
84
|
raise ValueError(
|
@@ -94,6 +87,7 @@ def _extract_curated_history(
|
|
94
87
|
|
95
88
|
if comprehensive_history[i].role == "user":
|
96
89
|
current_input = comprehensive_history[i]
|
90
|
+
curated_history.append(current_input)
|
97
91
|
i += 1
|
98
92
|
else:
|
99
93
|
current_output = []
|
@@ -104,8 +98,9 @@ def _extract_curated_history(
|
|
104
98
|
is_valid = False
|
105
99
|
i += 1
|
106
100
|
if is_valid:
|
107
|
-
curated_history.append(current_input)
|
108
101
|
curated_history.extend(current_output)
|
102
|
+
elif curated_history:
|
103
|
+
curated_history.pop()
|
109
104
|
return curated_history
|
110
105
|
|
111
106
|
|