google-genai 1.18.0__py3-none-any.whl → 1.20.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- google/genai/_api_client.py +367 -102
- google/genai/_common.py +15 -1
- google/genai/_live_converters.py +7 -1
- google/genai/_replay_api_client.py +1 -6
- google/genai/_tokens_converters.py +7 -1
- google/genai/batches.py +1 -0
- google/genai/caches.py +8 -1
- google/genai/chats.py +5 -10
- google/genai/errors.py +32 -6
- google/genai/files.py +8 -4
- google/genai/models.py +124 -8
- google/genai/operations.py +1 -0
- google/genai/tokens.py +7 -2
- google/genai/tunings.py +1 -0
- google/genai/types.py +209 -88
- google/genai/version.py +1 -1
- {google_genai-1.18.0.dist-info → google_genai-1.20.0.dist-info}/METADATA +3 -1
- google_genai-1.20.0.dist-info/RECORD +35 -0
- google_genai-1.18.0.dist-info/RECORD +0 -35
- {google_genai-1.18.0.dist-info → google_genai-1.20.0.dist-info}/WHEEL +0 -0
- {google_genai-1.18.0.dist-info → google_genai-1.20.0.dist-info}/licenses/LICENSE +0 -0
- {google_genai-1.18.0.dist-info → google_genai-1.20.0.dist-info}/top_level.txt +0 -0
google/genai/_api_client.py
CHANGED
@@ -34,7 +34,7 @@ import ssl
|
|
34
34
|
import sys
|
35
35
|
import threading
|
36
36
|
import time
|
37
|
-
from typing import Any, AsyncIterator, Optional, Tuple, Union
|
37
|
+
from typing import Any, AsyncIterator, Optional, Tuple, TYPE_CHECKING, Union
|
38
38
|
from urllib.parse import urlparse
|
39
39
|
from urllib.parse import urlunparse
|
40
40
|
|
@@ -56,6 +56,19 @@ from .types import HttpOptions
|
|
56
56
|
from .types import HttpOptionsDict
|
57
57
|
from .types import HttpOptionsOrDict
|
58
58
|
|
59
|
+
has_aiohttp = False
|
60
|
+
try:
|
61
|
+
import aiohttp
|
62
|
+
has_aiohttp = True
|
63
|
+
except ImportError:
|
64
|
+
pass
|
65
|
+
|
66
|
+
has_aiohttp = False # exclude from copybar export
|
67
|
+
|
68
|
+
|
69
|
+
if TYPE_CHECKING:
|
70
|
+
from multidict import CIMultiDictProxy
|
71
|
+
|
59
72
|
|
60
73
|
logger = logging.getLogger('google_genai._api_client')
|
61
74
|
CHUNK_SIZE = 8 * 1024 * 1024 # 8 MB chunk size
|
@@ -68,6 +81,26 @@ class EphemeralTokenAPIKeyError(ValueError):
|
|
68
81
|
"""Error raised when the API key is invalid."""
|
69
82
|
|
70
83
|
|
84
|
+
# This method checks for the API key in the environment variables. Google API
|
85
|
+
# key is precedenced over Gemini API key.
|
86
|
+
def _get_env_api_key() -> Optional[str]:
|
87
|
+
"""Gets the API key from environment variables, prioritizing GOOGLE_API_KEY.
|
88
|
+
|
89
|
+
Returns:
|
90
|
+
The API key string if found, otherwise None. Empty string is considered
|
91
|
+
invalid.
|
92
|
+
"""
|
93
|
+
env_google_api_key = os.environ.get('GOOGLE_API_KEY', None)
|
94
|
+
env_gemini_api_key = os.environ.get('GEMINI_API_KEY', None)
|
95
|
+
if env_google_api_key and env_gemini_api_key:
|
96
|
+
logger.warning(
|
97
|
+
'Both GOOGLE_API_KEY and GEMINI_API_KEY are set. Using'
|
98
|
+
' GOOGLE_API_KEY.'
|
99
|
+
)
|
100
|
+
|
101
|
+
return env_google_api_key or env_gemini_api_key or None
|
102
|
+
|
103
|
+
|
71
104
|
def _append_library_version_headers(headers: dict[str, str]) -> None:
|
72
105
|
"""Appends the telemetry header to the headers dict."""
|
73
106
|
library_label = f'google-genai-sdk/{version.__version__}'
|
@@ -196,7 +229,7 @@ class HttpResponse:
|
|
196
229
|
|
197
230
|
def __init__(
|
198
231
|
self,
|
199
|
-
headers: Union[dict[str, str], httpx.Headers],
|
232
|
+
headers: Union[dict[str, str], httpx.Headers, 'CIMultiDictProxy[str]'],
|
200
233
|
response_stream: Union[Any, str] = None,
|
201
234
|
byte_stream: Union[Any, bytes] = None,
|
202
235
|
):
|
@@ -262,6 +295,17 @@ class HttpResponse:
|
|
262
295
|
if chunk.startswith('data: '):
|
263
296
|
chunk = chunk[len('data: ') :]
|
264
297
|
yield json.loads(chunk)
|
298
|
+
elif hasattr(self.response_stream, 'content'):
|
299
|
+
async for chunk in self.response_stream.content.iter_any():
|
300
|
+
# This is aiohttp.ClientResponse.
|
301
|
+
if chunk:
|
302
|
+
# In async streaming mode, the chunk of JSON is prefixed with
|
303
|
+
# "data:" which we must strip before parsing.
|
304
|
+
if not isinstance(chunk, str):
|
305
|
+
chunk = chunk.decode('utf-8')
|
306
|
+
if chunk.startswith('data: '):
|
307
|
+
chunk = chunk[len('data: ') :]
|
308
|
+
yield json.loads(chunk)
|
265
309
|
else:
|
266
310
|
raise ValueError('Error parsing streaming response.')
|
267
311
|
|
@@ -371,7 +415,7 @@ class BaseApiClient:
|
|
371
415
|
# Retrieve implicitly set values from the environment.
|
372
416
|
env_project = os.environ.get('GOOGLE_CLOUD_PROJECT', None)
|
373
417
|
env_location = os.environ.get('GOOGLE_CLOUD_LOCATION', None)
|
374
|
-
env_api_key =
|
418
|
+
env_api_key = _get_env_api_key()
|
375
419
|
self.project = project or env_project
|
376
420
|
self.location = location or env_location
|
377
421
|
self.api_key = api_key or env_api_key
|
@@ -460,14 +504,17 @@ class BaseApiClient:
|
|
460
504
|
if self._http_options.headers is not None:
|
461
505
|
_append_library_version_headers(self._http_options.headers)
|
462
506
|
|
463
|
-
client_args, async_client_args = self.
|
507
|
+
client_args, async_client_args = self._ensure_httpx_ssl_ctx(
|
508
|
+
self._http_options
|
509
|
+
)
|
464
510
|
self._httpx_client = SyncHttpxClient(**client_args)
|
465
511
|
self._async_httpx_client = AsyncHttpxClient(**async_client_args)
|
466
512
|
|
467
513
|
@staticmethod
|
468
|
-
def
|
469
|
-
|
470
|
-
|
514
|
+
def _ensure_httpx_ssl_ctx(
|
515
|
+
options: HttpOptions,
|
516
|
+
) -> Tuple[dict[str, Any], dict[str, Any]]:
|
517
|
+
"""Ensures the SSL context is present in the HTTPX client args.
|
471
518
|
|
472
519
|
Creates a default SSL context if one is not provided.
|
473
520
|
|
@@ -521,6 +568,58 @@ class BaseApiClient:
|
|
521
568
|
_maybe_set(async_args, ctx),
|
522
569
|
)
|
523
570
|
|
571
|
+
@staticmethod
|
572
|
+
def _ensure_aiohttp_ssl_ctx(options: HttpOptions) -> dict[str, Any]:
|
573
|
+
"""Ensures the SSL context is present in the async client args.
|
574
|
+
|
575
|
+
Creates a default SSL context if one is not provided.
|
576
|
+
|
577
|
+
Args:
|
578
|
+
options: The http options to check for SSL context.
|
579
|
+
|
580
|
+
Returns:
|
581
|
+
An async aiohttp ClientSession._request args.
|
582
|
+
"""
|
583
|
+
|
584
|
+
verify = 'verify'
|
585
|
+
async_args = options.async_client_args
|
586
|
+
ctx = async_args.get(verify) if async_args else None
|
587
|
+
|
588
|
+
if not ctx:
|
589
|
+
# Initialize the SSL context for the httpx client.
|
590
|
+
# Unlike requests, the aiohttp package does not automatically pull in the
|
591
|
+
# environment variables SSL_CERT_FILE or SSL_CERT_DIR. They need to be
|
592
|
+
# enabled explicitly. Instead of 'verify' at client level in httpx,
|
593
|
+
# aiohttp uses 'ssl' at request level.
|
594
|
+
ctx = ssl.create_default_context(
|
595
|
+
cafile=os.environ.get('SSL_CERT_FILE', certifi.where()),
|
596
|
+
capath=os.environ.get('SSL_CERT_DIR'),
|
597
|
+
)
|
598
|
+
|
599
|
+
def _maybe_set(
|
600
|
+
args: Optional[dict[str, Any]],
|
601
|
+
ctx: ssl.SSLContext,
|
602
|
+
) -> dict[str, Any]:
|
603
|
+
"""Sets the SSL context in the client args if not set.
|
604
|
+
|
605
|
+
Does not override the SSL context if it is already set.
|
606
|
+
|
607
|
+
Args:
|
608
|
+
args: The client args to to check for SSL context.
|
609
|
+
ctx: The SSL context to set.
|
610
|
+
|
611
|
+
Returns:
|
612
|
+
The client args with the SSL context included.
|
613
|
+
"""
|
614
|
+
if not args or not args.get(verify):
|
615
|
+
args = (args or {}).copy()
|
616
|
+
args['ssl'] = ctx
|
617
|
+
else:
|
618
|
+
args['ssl'] = args.pop(verify)
|
619
|
+
return args
|
620
|
+
|
621
|
+
return _maybe_set(async_args, ctx)
|
622
|
+
|
524
623
|
def _websocket_base_url(self) -> str:
|
525
624
|
url_parts = urlparse(self._http_options.base_url)
|
526
625
|
return url_parts._replace(scheme='wss').geturl() # type: ignore[arg-type, return-value]
|
@@ -717,33 +816,96 @@ class BaseApiClient:
|
|
717
816
|
data = http_request.data
|
718
817
|
|
719
818
|
if stream:
|
720
|
-
|
721
|
-
|
722
|
-
|
723
|
-
|
724
|
-
|
725
|
-
|
726
|
-
|
727
|
-
|
728
|
-
|
729
|
-
|
730
|
-
|
731
|
-
|
732
|
-
|
733
|
-
|
734
|
-
|
819
|
+
if has_aiohttp:
|
820
|
+
session = aiohttp.ClientSession(
|
821
|
+
headers=http_request.headers,
|
822
|
+
)
|
823
|
+
if self._http_options.async_client_args:
|
824
|
+
# When using aiohttp request options with ssl context, the latency will higher than using httpx.
|
825
|
+
# Use it only if necessary. Otherwise, httpx asyncclient is faster.
|
826
|
+
async_client_args = self._ensure_aiohttp_ssl_ctx(
|
827
|
+
self._http_options
|
828
|
+
)
|
829
|
+
response = await session.request(
|
830
|
+
method=http_request.method,
|
831
|
+
url=http_request.url,
|
832
|
+
headers=http_request.headers,
|
833
|
+
data=data,
|
834
|
+
timeout=aiohttp.ClientTimeout(connect=http_request.timeout),
|
835
|
+
**async_client_args,
|
836
|
+
)
|
837
|
+
else:
|
838
|
+
# Aiohttp performs better than httpx w/o ssl context.
|
839
|
+
response = await session.request(
|
840
|
+
method=http_request.method,
|
841
|
+
url=http_request.url,
|
842
|
+
headers=http_request.headers,
|
843
|
+
data=data,
|
844
|
+
timeout=aiohttp.ClientTimeout(connect=http_request.timeout),
|
845
|
+
)
|
846
|
+
await errors.APIError.raise_for_async_response(response)
|
847
|
+
return HttpResponse(response.headers, response)
|
848
|
+
else:
|
849
|
+
# aiohttp is not available. Fall back to httpx.
|
850
|
+
httpx_request = self._async_httpx_client.build_request(
|
851
|
+
method=http_request.method,
|
852
|
+
url=http_request.url,
|
853
|
+
content=data,
|
854
|
+
headers=http_request.headers,
|
855
|
+
timeout=http_request.timeout,
|
856
|
+
)
|
857
|
+
client_response = await self._async_httpx_client.send(
|
858
|
+
httpx_request,
|
859
|
+
stream=stream,
|
860
|
+
)
|
861
|
+
await errors.APIError.raise_for_async_response(client_response)
|
862
|
+
return HttpResponse(client_response.headers, client_response)
|
735
863
|
else:
|
736
|
-
|
737
|
-
|
738
|
-
|
739
|
-
|
740
|
-
|
741
|
-
|
742
|
-
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
|
864
|
+
if has_aiohttp:
|
865
|
+
if self._http_options.async_client_args:
|
866
|
+
# Note that when using aiohttp request options with ssl context, the
|
867
|
+
# latency will higher than using httpx async client with ssl context.
|
868
|
+
async_client_args = self._ensure_aiohttp_ssl_ctx(
|
869
|
+
self._http_options
|
870
|
+
)
|
871
|
+
async with aiohttp.ClientSession(
|
872
|
+
headers=http_request.headers
|
873
|
+
) as session:
|
874
|
+
response = await session.request(
|
875
|
+
method=http_request.method,
|
876
|
+
url=http_request.url,
|
877
|
+
headers=http_request.headers,
|
878
|
+
data=data,
|
879
|
+
timeout=aiohttp.ClientTimeout(connect=http_request.timeout),
|
880
|
+
**async_client_args,
|
881
|
+
)
|
882
|
+
await errors.APIError.raise_for_async_response(response)
|
883
|
+
return HttpResponse(response.headers, [await response.text()])
|
884
|
+
else:
|
885
|
+
# Aiohttp performs better than httpx if not using ssl context.
|
886
|
+
async with aiohttp.ClientSession(
|
887
|
+
headers=http_request.headers
|
888
|
+
) as session:
|
889
|
+
response = await session.request(
|
890
|
+
method=http_request.method,
|
891
|
+
url=http_request.url,
|
892
|
+
headers=http_request.headers,
|
893
|
+
data=data,
|
894
|
+
timeout=aiohttp.ClientTimeout(connect=http_request.timeout),
|
895
|
+
)
|
896
|
+
await errors.APIError.raise_for_async_response(response)
|
897
|
+
return HttpResponse(response.headers, [await response.text()])
|
898
|
+
else:
|
899
|
+
# aiohttp is not available. Fall back to httpx.
|
900
|
+
client_response = await self._async_httpx_client.request(
|
901
|
+
method=http_request.method,
|
902
|
+
url=http_request.url,
|
903
|
+
headers=http_request.headers,
|
904
|
+
content=data,
|
905
|
+
timeout=http_request.timeout,
|
906
|
+
)
|
907
|
+
await errors.APIError.raise_for_async_response(client_response)
|
908
|
+
return HttpResponse(client_response.headers, [client_response.text])
|
747
909
|
|
748
910
|
def get_read_only_http_options(self) -> dict[str, Any]:
|
749
911
|
if isinstance(self._http_options, BaseModel):
|
@@ -1028,68 +1190,155 @@ class BaseApiClient:
|
|
1028
1190
|
"""
|
1029
1191
|
offset = 0
|
1030
1192
|
# Upload the file in chunks
|
1031
|
-
|
1032
|
-
|
1033
|
-
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
1041
|
-
|
1042
|
-
|
1043
|
-
|
1044
|
-
|
1045
|
-
|
1046
|
-
|
1047
|
-
else
|
1048
|
-
|
1049
|
-
|
1050
|
-
|
1051
|
-
|
1052
|
-
|
1053
|
-
|
1054
|
-
|
1193
|
+
if has_aiohttp: # pylint: disable=g-import-not-at-top
|
1194
|
+
async with aiohttp.ClientSession(
|
1195
|
+
headers=self._http_options.headers
|
1196
|
+
) as session:
|
1197
|
+
while True:
|
1198
|
+
if isinstance(file, io.IOBase):
|
1199
|
+
file_chunk = file.read(CHUNK_SIZE)
|
1200
|
+
else:
|
1201
|
+
file_chunk = await file.read(CHUNK_SIZE)
|
1202
|
+
chunk_size = 0
|
1203
|
+
if file_chunk:
|
1204
|
+
chunk_size = len(file_chunk)
|
1205
|
+
upload_command = 'upload'
|
1206
|
+
# If last chunk, finalize the upload.
|
1207
|
+
if chunk_size + offset >= upload_size:
|
1208
|
+
upload_command += ', finalize'
|
1209
|
+
http_options = http_options if http_options else self._http_options
|
1210
|
+
timeout = (
|
1211
|
+
http_options.get('timeout')
|
1212
|
+
if isinstance(http_options, dict)
|
1213
|
+
else http_options.timeout
|
1214
|
+
)
|
1215
|
+
if timeout is None:
|
1216
|
+
# Per request timeout is not configured. Check the global timeout.
|
1217
|
+
timeout = (
|
1218
|
+
self._http_options.timeout
|
1219
|
+
if isinstance(self._http_options, dict)
|
1220
|
+
else self._http_options.timeout
|
1221
|
+
)
|
1222
|
+
timeout_in_seconds = _get_timeout_in_seconds(timeout)
|
1223
|
+
upload_headers = {
|
1224
|
+
'X-Goog-Upload-Command': upload_command,
|
1225
|
+
'X-Goog-Upload-Offset': str(offset),
|
1226
|
+
'Content-Length': str(chunk_size),
|
1227
|
+
}
|
1228
|
+
_populate_server_timeout_header(upload_headers, timeout_in_seconds)
|
1229
|
+
|
1230
|
+
retry_count = 0
|
1231
|
+
response = None
|
1232
|
+
while retry_count < MAX_RETRY_COUNT:
|
1233
|
+
response = await session.request(
|
1234
|
+
method='POST',
|
1235
|
+
url=upload_url,
|
1236
|
+
data=file_chunk,
|
1237
|
+
headers=upload_headers,
|
1238
|
+
timeout=aiohttp.ClientTimeout(connect=timeout_in_seconds),
|
1239
|
+
)
|
1240
|
+
|
1241
|
+
if response.headers.get('X-Goog-Upload-Status'):
|
1242
|
+
break
|
1243
|
+
delay_seconds = INITIAL_RETRY_DELAY * (
|
1244
|
+
DELAY_MULTIPLIER**retry_count
|
1245
|
+
)
|
1246
|
+
retry_count += 1
|
1247
|
+
time.sleep(delay_seconds)
|
1248
|
+
|
1249
|
+
offset += chunk_size
|
1250
|
+
if (
|
1251
|
+
response is not None
|
1252
|
+
and response.headers.get('X-Goog-Upload-Status') != 'active'
|
1253
|
+
):
|
1254
|
+
break # upload is complete or it has been interrupted.
|
1255
|
+
|
1256
|
+
if upload_size <= offset: # Status is not finalized.
|
1257
|
+
raise ValueError(
|
1258
|
+
f'All content has been uploaded, but the upload status is not'
|
1259
|
+
f' finalized.'
|
1260
|
+
)
|
1261
|
+
if (
|
1262
|
+
response is not None
|
1263
|
+
and response.headers.get('X-Goog-Upload-Status') != 'final'
|
1264
|
+
):
|
1265
|
+
raise ValueError(
|
1266
|
+
'Failed to upload file: Upload status is not finalized.'
|
1267
|
+
)
|
1268
|
+
return HttpResponse(
|
1269
|
+
response.headers, response_stream=[await response.text()]
|
1055
1270
|
)
|
1056
|
-
|
1057
|
-
|
1058
|
-
|
1059
|
-
|
1060
|
-
|
1061
|
-
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1071
|
-
|
1271
|
+
else:
|
1272
|
+
# aiohttp is not available. Fall back to httpx.
|
1273
|
+
while True:
|
1274
|
+
if isinstance(file, io.IOBase):
|
1275
|
+
file_chunk = file.read(CHUNK_SIZE)
|
1276
|
+
else:
|
1277
|
+
file_chunk = await file.read(CHUNK_SIZE)
|
1278
|
+
chunk_size = 0
|
1279
|
+
if file_chunk:
|
1280
|
+
chunk_size = len(file_chunk)
|
1281
|
+
upload_command = 'upload'
|
1282
|
+
# If last chunk, finalize the upload.
|
1283
|
+
if chunk_size + offset >= upload_size:
|
1284
|
+
upload_command += ', finalize'
|
1285
|
+
http_options = http_options if http_options else self._http_options
|
1286
|
+
timeout = (
|
1287
|
+
http_options.get('timeout')
|
1288
|
+
if isinstance(http_options, dict)
|
1289
|
+
else http_options.timeout
|
1072
1290
|
)
|
1073
|
-
if
|
1074
|
-
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1291
|
+
if timeout is None:
|
1292
|
+
# Per request timeout is not configured. Check the global timeout.
|
1293
|
+
timeout = (
|
1294
|
+
self._http_options.timeout
|
1295
|
+
if isinstance(self._http_options, dict)
|
1296
|
+
else self._http_options.timeout
|
1297
|
+
)
|
1298
|
+
timeout_in_seconds = _get_timeout_in_seconds(timeout)
|
1299
|
+
upload_headers = {
|
1300
|
+
'X-Goog-Upload-Command': upload_command,
|
1301
|
+
'X-Goog-Upload-Offset': str(offset),
|
1302
|
+
'Content-Length': str(chunk_size),
|
1303
|
+
}
|
1304
|
+
_populate_server_timeout_header(upload_headers, timeout_in_seconds)
|
1305
|
+
|
1306
|
+
retry_count = 0
|
1307
|
+
client_response = None
|
1308
|
+
while retry_count < MAX_RETRY_COUNT:
|
1309
|
+
client_response = await self._async_httpx_client.request(
|
1310
|
+
method='POST',
|
1311
|
+
url=upload_url,
|
1312
|
+
content=file_chunk,
|
1313
|
+
headers=upload_headers,
|
1314
|
+
timeout=timeout_in_seconds,
|
1315
|
+
)
|
1316
|
+
if client_response is not None and client_response.headers and client_response.headers.get('x-goog-upload-status'):
|
1317
|
+
break
|
1318
|
+
delay_seconds = INITIAL_RETRY_DELAY * (DELAY_MULTIPLIER**retry_count)
|
1319
|
+
retry_count += 1
|
1320
|
+
time.sleep(delay_seconds)
|
1321
|
+
|
1322
|
+
offset += chunk_size
|
1323
|
+
if (
|
1324
|
+
client_response is not None
|
1325
|
+
and client_response.headers.get('x-goog-upload-status') != 'active'
|
1326
|
+
):
|
1327
|
+
break # upload is complete or it has been interrupted.
|
1328
|
+
|
1329
|
+
if upload_size <= offset: # Status is not finalized.
|
1330
|
+
raise ValueError(
|
1331
|
+
'All content has been uploaded, but the upload status is not'
|
1332
|
+
' finalized.'
|
1333
|
+
)
|
1334
|
+
if (
|
1335
|
+
client_response is not None
|
1336
|
+
and client_response.headers.get('x-goog-upload-status') != 'final'
|
1337
|
+
):
|
1084
1338
|
raise ValueError(
|
1085
|
-
'
|
1086
|
-
f' finalized.'
|
1339
|
+
'Failed to upload file: Upload status is not finalized.'
|
1087
1340
|
)
|
1088
|
-
|
1089
|
-
raise ValueError(
|
1090
|
-
'Failed to upload file: Upload status is not finalized.'
|
1091
|
-
)
|
1092
|
-
return HttpResponse(response.headers, response_stream=[response.text])
|
1341
|
+
return HttpResponse(client_response.headers, response_stream=[client_response.text])
|
1093
1342
|
|
1094
1343
|
async def async_download_file(
|
1095
1344
|
self,
|
@@ -1117,18 +1366,34 @@ class BaseApiClient:
|
|
1117
1366
|
else:
|
1118
1367
|
data = http_request.data
|
1119
1368
|
|
1120
|
-
|
1121
|
-
|
1122
|
-
|
1123
|
-
|
1124
|
-
|
1125
|
-
|
1126
|
-
|
1127
|
-
|
1369
|
+
if has_aiohttp:
|
1370
|
+
async with aiohttp.ClientSession(headers=http_request.headers) as session:
|
1371
|
+
response = await session.request(
|
1372
|
+
method=http_request.method,
|
1373
|
+
url=http_request.url,
|
1374
|
+
headers=http_request.headers,
|
1375
|
+
data=data,
|
1376
|
+
timeout=aiohttp.ClientTimeout(connect=http_request.timeout),
|
1377
|
+
)
|
1378
|
+
await errors.APIError.raise_for_async_response(response)
|
1128
1379
|
|
1129
|
-
|
1130
|
-
|
1131
|
-
|
1380
|
+
return HttpResponse(
|
1381
|
+
response.headers, byte_stream=[await response.read()]
|
1382
|
+
).byte_stream[0]
|
1383
|
+
else:
|
1384
|
+
# aiohttp is not available. Fall back to httpx.
|
1385
|
+
client_response = await self._async_httpx_client.request(
|
1386
|
+
method=http_request.method,
|
1387
|
+
url=http_request.url,
|
1388
|
+
headers=http_request.headers,
|
1389
|
+
content=data,
|
1390
|
+
timeout=http_request.timeout,
|
1391
|
+
)
|
1392
|
+
await errors.APIError.raise_for_async_response(client_response)
|
1393
|
+
|
1394
|
+
return HttpResponse(
|
1395
|
+
client_response.headers, byte_stream=[client_response.read()]
|
1396
|
+
).byte_stream[0]
|
1132
1397
|
|
1133
1398
|
# This method does nothing in the real api client. It is used in the
|
1134
1399
|
# replay_api_client to verify the response from the SDK method matches the
|
google/genai/_common.py
CHANGED
@@ -253,7 +253,21 @@ class BaseModel(pydantic.BaseModel):
|
|
253
253
|
# To maintain forward compatibility, we need to remove extra fields from
|
254
254
|
# the response.
|
255
255
|
# We will provide another mechanism to allow users to access these fields.
|
256
|
-
|
256
|
+
|
257
|
+
# For Agent Engine we don't want to call _remove_all_fields because the
|
258
|
+
# user may pass a dict that is not a subclass of BaseModel.
|
259
|
+
# If more modules require we skip this, we may want a different approach
|
260
|
+
should_skip_removing_fields = (
|
261
|
+
kwargs is not None and
|
262
|
+
'config' in kwargs and
|
263
|
+
kwargs['config'] is not None and
|
264
|
+
isinstance(kwargs['config'], dict) and
|
265
|
+
'include_all_fields' in kwargs['config']
|
266
|
+
and kwargs['config']['include_all_fields']
|
267
|
+
)
|
268
|
+
|
269
|
+
if not should_skip_removing_fields:
|
270
|
+
_remove_extra_fields(cls, response)
|
257
271
|
validated_response = cls.model_validate(response)
|
258
272
|
return validated_response
|
259
273
|
|
google/genai/_live_converters.py
CHANGED
@@ -981,7 +981,13 @@ def _Tool_to_vertex(
|
|
981
981
|
)
|
982
982
|
|
983
983
|
if getv(from_object, ['url_context']) is not None:
|
984
|
-
|
984
|
+
setv(
|
985
|
+
to_object,
|
986
|
+
['urlContext'],
|
987
|
+
_UrlContext_to_vertex(
|
988
|
+
api_client, getv(from_object, ['url_context']), to_object
|
989
|
+
),
|
990
|
+
)
|
985
991
|
|
986
992
|
if getv(from_object, ['code_execution']) is not None:
|
987
993
|
setv(to_object, ['codeExecution'], getv(from_object, ['code_execution']))
|
@@ -454,12 +454,7 @@ class ReplayApiClient(BaseApiClient):
|
|
454
454
|
if isinstance(response_model, list):
|
455
455
|
response_model = response_model[0]
|
456
456
|
print('response_model: ', response_model.model_dump(exclude_none=True))
|
457
|
-
|
458
|
-
actual = response_model.model_dump(
|
459
|
-
exclude={'result'}, exclude_none=True, mode='json'
|
460
|
-
)
|
461
|
-
else:
|
462
|
-
actual = response_model.model_dump(exclude_none=True, mode='json')
|
457
|
+
actual = response_model.model_dump(exclude_none=True, mode='json')
|
463
458
|
expected = interaction.response.sdk_response_segments[
|
464
459
|
self._sdk_response_index
|
465
460
|
]
|
@@ -981,7 +981,13 @@ def _Tool_to_vertex(
|
|
981
981
|
)
|
982
982
|
|
983
983
|
if getv(from_object, ['url_context']) is not None:
|
984
|
-
|
984
|
+
setv(
|
985
|
+
to_object,
|
986
|
+
['urlContext'],
|
987
|
+
_UrlContext_to_vertex(
|
988
|
+
api_client, getv(from_object, ['url_context']), to_object
|
989
|
+
),
|
990
|
+
)
|
985
991
|
|
986
992
|
if getv(from_object, ['code_execution']) is not None:
|
987
993
|
setv(to_object, ['codeExecution'], getv(from_object, ['code_execution']))
|
google/genai/batches.py
CHANGED
google/genai/caches.py
CHANGED
@@ -18,6 +18,7 @@
|
|
18
18
|
import logging
|
19
19
|
from typing import Any, Optional, Union
|
20
20
|
from urllib.parse import urlencode
|
21
|
+
|
21
22
|
from . import _api_module
|
22
23
|
from . import _common
|
23
24
|
from . import _transformers as t
|
@@ -1092,7 +1093,13 @@ def _Tool_to_vertex(
|
|
1092
1093
|
)
|
1093
1094
|
|
1094
1095
|
if getv(from_object, ['url_context']) is not None:
|
1095
|
-
|
1096
|
+
setv(
|
1097
|
+
to_object,
|
1098
|
+
['urlContext'],
|
1099
|
+
_UrlContext_to_vertex(
|
1100
|
+
api_client, getv(from_object, ['url_context']), to_object
|
1101
|
+
),
|
1102
|
+
)
|
1096
1103
|
|
1097
1104
|
if getv(from_object, ['code_execution']) is not None:
|
1098
1105
|
setv(to_object, ['codeExecution'], getv(from_object, ['code_execution']))
|