google-genai 1.48.0__tar.gz → 1.50.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {google_genai-1.48.0/google_genai.egg-info → google_genai-1.50.0}/PKG-INFO +1 -1
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_api_client.py +36 -7
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_live_converters.py +23 -17
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_local_tokenizer_loader.py +0 -9
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_operations_converters.py +96 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_replay_api_client.py +24 -39
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_tokens_converters.py +10 -7
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_transformers.py +2 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/batches.py +15 -7
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/caches.py +16 -10
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/client.py +11 -0
- google_genai-1.50.0/google/genai/documents.py +552 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/errors.py +46 -2
- google_genai-1.50.0/google/genai/file_search_stores.py +1312 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/models.py +51 -20
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/operations.py +4 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/pagers.py +7 -1
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/tunings.py +313 -2
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/types.py +1723 -388
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/version.py +1 -1
- {google_genai-1.48.0 → google_genai-1.50.0/google_genai.egg-info}/PKG-INFO +1 -1
- {google_genai-1.48.0 → google_genai-1.50.0}/google_genai.egg-info/SOURCES.txt +2 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/pyproject.toml +5 -1
- {google_genai-1.48.0 → google_genai-1.50.0}/LICENSE +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/MANIFEST.in +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/README.md +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/__init__.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_adapters.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_api_module.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_automatic_function_calling_util.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_base_transformers.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_base_url.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_common.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_extra_utils.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_mcp_utils.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/_test_api_client.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/chats.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/files.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/live.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/live_music.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/local_tokenizer.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/py.typed +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google/genai/tokens.py +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google_genai.egg-info/dependency_links.txt +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google_genai.egg-info/requires.txt +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/google_genai.egg-info/top_level.txt +0 -0
- {google_genai-1.48.0 → google_genai-1.50.0}/setup.cfg +0 -0
|
@@ -37,14 +37,12 @@ import time
|
|
|
37
37
|
from typing import Any, AsyncIterator, Iterator, Optional, Tuple, TYPE_CHECKING, Union
|
|
38
38
|
from urllib.parse import urlparse
|
|
39
39
|
from urllib.parse import urlunparse
|
|
40
|
-
import warnings
|
|
41
40
|
|
|
42
41
|
import anyio
|
|
43
42
|
import certifi
|
|
44
43
|
import google.auth
|
|
45
44
|
import google.auth.credentials
|
|
46
45
|
from google.auth.credentials import Credentials
|
|
47
|
-
from google.auth.transport.requests import Request
|
|
48
46
|
import httpx
|
|
49
47
|
from pydantic import BaseModel
|
|
50
48
|
from pydantic import ValidationError
|
|
@@ -197,6 +195,7 @@ def load_auth(*, project: Union[str, None]) -> Tuple[Credentials, str]:
|
|
|
197
195
|
|
|
198
196
|
|
|
199
197
|
def refresh_auth(credentials: Credentials) -> Credentials:
|
|
198
|
+
from google.auth.transport.requests import Request
|
|
200
199
|
credentials.refresh(Request()) # type: ignore[no-untyped-call]
|
|
201
200
|
return credentials
|
|
202
201
|
|
|
@@ -1347,9 +1346,21 @@ class BaseApiClient:
|
|
|
1347
1346
|
|
|
1348
1347
|
session_response = self._request(http_request, http_options, stream=True)
|
|
1349
1348
|
for chunk in session_response.segments():
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1349
|
+
chunk_dump = json.dumps(chunk)
|
|
1350
|
+
try:
|
|
1351
|
+
if chunk_dump.startswith('{"error":'):
|
|
1352
|
+
chunk_json = json.loads(chunk_dump)
|
|
1353
|
+
errors.APIError.raise_error(
|
|
1354
|
+
chunk_json.get('error', {}).get('code'),
|
|
1355
|
+
chunk_json,
|
|
1356
|
+
session_response,
|
|
1357
|
+
)
|
|
1358
|
+
except json.decoder.JSONDecodeError:
|
|
1359
|
+
logger.debug(
|
|
1360
|
+
'Failed to decode chunk that contains an error: %s' % chunk_dump
|
|
1361
|
+
)
|
|
1362
|
+
pass
|
|
1363
|
+
yield SdkHttpResponse(headers=session_response.headers, body=chunk_dump)
|
|
1353
1364
|
|
|
1354
1365
|
async def async_request(
|
|
1355
1366
|
self,
|
|
@@ -1383,7 +1394,21 @@ class BaseApiClient:
|
|
|
1383
1394
|
|
|
1384
1395
|
async def async_generator(): # type: ignore[no-untyped-def]
|
|
1385
1396
|
async for chunk in response:
|
|
1386
|
-
|
|
1397
|
+
chunk_dump = json.dumps(chunk)
|
|
1398
|
+
try:
|
|
1399
|
+
if chunk_dump.startswith('{"error":'):
|
|
1400
|
+
chunk_json = json.loads(chunk_dump)
|
|
1401
|
+
await errors.APIError.raise_error_async(
|
|
1402
|
+
chunk_json.get('error', {}).get('code'),
|
|
1403
|
+
chunk_json,
|
|
1404
|
+
response,
|
|
1405
|
+
)
|
|
1406
|
+
except json.decoder.JSONDecodeError:
|
|
1407
|
+
logger.debug(
|
|
1408
|
+
'Failed to decode chunk that contains an error: %s' % chunk_dump
|
|
1409
|
+
)
|
|
1410
|
+
pass
|
|
1411
|
+
yield SdkHttpResponse(headers=response.headers, body=chunk_dump)
|
|
1387
1412
|
|
|
1388
1413
|
return async_generator() # type: ignore[no-untyped-call]
|
|
1389
1414
|
|
|
@@ -1493,7 +1518,7 @@ class BaseApiClient:
|
|
|
1493
1518
|
f'All content has been uploaded, but the upload status is not'
|
|
1494
1519
|
f' finalized.'
|
|
1495
1520
|
)
|
|
1496
|
-
|
|
1521
|
+
errors.APIError.raise_for_response(response)
|
|
1497
1522
|
if response.headers.get('x-goog-upload-status') != 'final':
|
|
1498
1523
|
raise ValueError('Failed to upload file: Upload status is not finalized.')
|
|
1499
1524
|
return HttpResponse(response.headers, response_stream=[response.text])
|
|
@@ -1656,6 +1681,8 @@ class BaseApiClient:
|
|
|
1656
1681
|
f'All content has been uploaded, but the upload status is not'
|
|
1657
1682
|
f' finalized.'
|
|
1658
1683
|
)
|
|
1684
|
+
|
|
1685
|
+
await errors.APIError.raise_for_async_response(response)
|
|
1659
1686
|
if (
|
|
1660
1687
|
response is not None
|
|
1661
1688
|
and response.headers.get('X-Goog-Upload-Status') != 'final'
|
|
@@ -1733,6 +1760,8 @@ class BaseApiClient:
|
|
|
1733
1760
|
'All content has been uploaded, but the upload status is not'
|
|
1734
1761
|
' finalized.'
|
|
1735
1762
|
)
|
|
1763
|
+
|
|
1764
|
+
await errors.APIError.raise_for_async_response(client_response)
|
|
1736
1765
|
if (
|
|
1737
1766
|
client_response is not None
|
|
1738
1767
|
and client_response.headers.get('x-goog-upload-status') != 'final'
|
|
@@ -127,6 +127,13 @@ def _GenerationConfig_to_vertex(
|
|
|
127
127
|
getv(from_object, ['model_selection_config']),
|
|
128
128
|
)
|
|
129
129
|
|
|
130
|
+
if getv(from_object, ['response_json_schema']) is not None:
|
|
131
|
+
setv(
|
|
132
|
+
to_object,
|
|
133
|
+
['responseJsonSchema'],
|
|
134
|
+
getv(from_object, ['response_json_schema']),
|
|
135
|
+
)
|
|
136
|
+
|
|
130
137
|
if getv(from_object, ['audio_timestamp']) is not None:
|
|
131
138
|
setv(to_object, ['audioTimestamp'], getv(from_object, ['audio_timestamp']))
|
|
132
139
|
|
|
@@ -165,13 +172,6 @@ def _GenerationConfig_to_vertex(
|
|
|
165
172
|
to_object, ['presencePenalty'], getv(from_object, ['presence_penalty'])
|
|
166
173
|
)
|
|
167
174
|
|
|
168
|
-
if getv(from_object, ['response_json_schema']) is not None:
|
|
169
|
-
setv(
|
|
170
|
-
to_object,
|
|
171
|
-
['responseJsonSchema'],
|
|
172
|
-
getv(from_object, ['response_json_schema']),
|
|
173
|
-
)
|
|
174
|
-
|
|
175
175
|
if getv(from_object, ['response_logprobs']) is not None:
|
|
176
176
|
setv(
|
|
177
177
|
to_object,
|
|
@@ -1269,16 +1269,12 @@ def _Tool_to_mldev(
|
|
|
1269
1269
|
getv(from_object, ['google_search_retrieval']),
|
|
1270
1270
|
)
|
|
1271
1271
|
|
|
1272
|
-
if getv(from_object, ['google_maps']) is not None:
|
|
1273
|
-
setv(
|
|
1274
|
-
to_object,
|
|
1275
|
-
['googleMaps'],
|
|
1276
|
-
_GoogleMaps_to_mldev(getv(from_object, ['google_maps']), to_object),
|
|
1277
|
-
)
|
|
1278
|
-
|
|
1279
1272
|
if getv(from_object, ['computer_use']) is not None:
|
|
1280
1273
|
setv(to_object, ['computerUse'], getv(from_object, ['computer_use']))
|
|
1281
1274
|
|
|
1275
|
+
if getv(from_object, ['file_search']) is not None:
|
|
1276
|
+
setv(to_object, ['fileSearch'], getv(from_object, ['file_search']))
|
|
1277
|
+
|
|
1282
1278
|
if getv(from_object, ['code_execution']) is not None:
|
|
1283
1279
|
setv(to_object, ['codeExecution'], getv(from_object, ['code_execution']))
|
|
1284
1280
|
|
|
@@ -1287,6 +1283,13 @@ def _Tool_to_mldev(
|
|
|
1287
1283
|
'enterprise_web_search parameter is not supported in Gemini API.'
|
|
1288
1284
|
)
|
|
1289
1285
|
|
|
1286
|
+
if getv(from_object, ['google_maps']) is not None:
|
|
1287
|
+
setv(
|
|
1288
|
+
to_object,
|
|
1289
|
+
['googleMaps'],
|
|
1290
|
+
_GoogleMaps_to_mldev(getv(from_object, ['google_maps']), to_object),
|
|
1291
|
+
)
|
|
1292
|
+
|
|
1290
1293
|
if getv(from_object, ['google_search']) is not None:
|
|
1291
1294
|
setv(
|
|
1292
1295
|
to_object,
|
|
@@ -1325,12 +1328,12 @@ def _Tool_to_vertex(
|
|
|
1325
1328
|
getv(from_object, ['google_search_retrieval']),
|
|
1326
1329
|
)
|
|
1327
1330
|
|
|
1328
|
-
if getv(from_object, ['google_maps']) is not None:
|
|
1329
|
-
setv(to_object, ['googleMaps'], getv(from_object, ['google_maps']))
|
|
1330
|
-
|
|
1331
1331
|
if getv(from_object, ['computer_use']) is not None:
|
|
1332
1332
|
setv(to_object, ['computerUse'], getv(from_object, ['computer_use']))
|
|
1333
1333
|
|
|
1334
|
+
if getv(from_object, ['file_search']) is not None:
|
|
1335
|
+
raise ValueError('file_search parameter is not supported in Vertex AI.')
|
|
1336
|
+
|
|
1334
1337
|
if getv(from_object, ['code_execution']) is not None:
|
|
1335
1338
|
setv(to_object, ['codeExecution'], getv(from_object, ['code_execution']))
|
|
1336
1339
|
|
|
@@ -1341,6 +1344,9 @@ def _Tool_to_vertex(
|
|
|
1341
1344
|
getv(from_object, ['enterprise_web_search']),
|
|
1342
1345
|
)
|
|
1343
1346
|
|
|
1347
|
+
if getv(from_object, ['google_maps']) is not None:
|
|
1348
|
+
setv(to_object, ['googleMaps'], getv(from_object, ['google_maps']))
|
|
1349
|
+
|
|
1344
1350
|
if getv(from_object, ['google_search']) is not None:
|
|
1345
1351
|
setv(to_object, ['googleSearch'], getv(from_object, ['google_search']))
|
|
1346
1352
|
|
|
@@ -28,9 +28,6 @@ from sentencepiece import sentencepiece_model_pb2
|
|
|
28
28
|
|
|
29
29
|
# Source of truth: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models
|
|
30
30
|
_GEMINI_MODELS_TO_TOKENIZER_NAMES = {
|
|
31
|
-
"gemini-1.0-pro": "gemma2",
|
|
32
|
-
"gemini-1.5-pro": "gemma2",
|
|
33
|
-
"gemini-1.5-flash": "gemma2",
|
|
34
31
|
"gemini-2.5-pro": "gemma3",
|
|
35
32
|
"gemini-2.5-flash": "gemma3",
|
|
36
33
|
"gemini-2.5-flash-lite": "gemma3",
|
|
@@ -38,12 +35,6 @@ _GEMINI_MODELS_TO_TOKENIZER_NAMES = {
|
|
|
38
35
|
"gemini-2.0-flash-lite": "gemma3",
|
|
39
36
|
}
|
|
40
37
|
_GEMINI_STABLE_MODELS_TO_TOKENIZER_NAMES = {
|
|
41
|
-
"gemini-1.0-pro-001": "gemma2",
|
|
42
|
-
"gemini-1.0-pro-002": "gemma2",
|
|
43
|
-
"gemini-1.5-pro-001": "gemma2",
|
|
44
|
-
"gemini-1.5-flash-001": "gemma2",
|
|
45
|
-
"gemini-1.5-flash-002": "gemma2",
|
|
46
|
-
"gemini-1.5-pro-002": "gemma2",
|
|
47
38
|
"gemini-2.5-pro-preview-06-05": "gemma3",
|
|
48
39
|
"gemini-2.5-pro-preview-05-06": "gemma3",
|
|
49
40
|
"gemini-2.5-pro-exp-03-25": "gemma3",
|
|
@@ -256,6 +256,102 @@ def _GetOperationParameters_to_vertex(
|
|
|
256
256
|
return to_object
|
|
257
257
|
|
|
258
258
|
|
|
259
|
+
def _ImportFileOperation_from_mldev(
|
|
260
|
+
from_object: Union[dict[str, Any], object],
|
|
261
|
+
parent_object: Optional[dict[str, Any]] = None,
|
|
262
|
+
) -> dict[str, Any]:
|
|
263
|
+
to_object: dict[str, Any] = {}
|
|
264
|
+
if getv(from_object, ['name']) is not None:
|
|
265
|
+
setv(to_object, ['name'], getv(from_object, ['name']))
|
|
266
|
+
|
|
267
|
+
if getv(from_object, ['metadata']) is not None:
|
|
268
|
+
setv(to_object, ['metadata'], getv(from_object, ['metadata']))
|
|
269
|
+
|
|
270
|
+
if getv(from_object, ['done']) is not None:
|
|
271
|
+
setv(to_object, ['done'], getv(from_object, ['done']))
|
|
272
|
+
|
|
273
|
+
if getv(from_object, ['error']) is not None:
|
|
274
|
+
setv(to_object, ['error'], getv(from_object, ['error']))
|
|
275
|
+
|
|
276
|
+
if getv(from_object, ['response']) is not None:
|
|
277
|
+
setv(
|
|
278
|
+
to_object,
|
|
279
|
+
['response'],
|
|
280
|
+
_ImportFileResponse_from_mldev(
|
|
281
|
+
getv(from_object, ['response']), to_object
|
|
282
|
+
),
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
return to_object
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def _ImportFileResponse_from_mldev(
|
|
289
|
+
from_object: Union[dict[str, Any], object],
|
|
290
|
+
parent_object: Optional[dict[str, Any]] = None,
|
|
291
|
+
) -> dict[str, Any]:
|
|
292
|
+
to_object: dict[str, Any] = {}
|
|
293
|
+
if getv(from_object, ['sdkHttpResponse']) is not None:
|
|
294
|
+
setv(
|
|
295
|
+
to_object, ['sdk_http_response'], getv(from_object, ['sdkHttpResponse'])
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
if getv(from_object, ['parent']) is not None:
|
|
299
|
+
setv(to_object, ['parent'], getv(from_object, ['parent']))
|
|
300
|
+
|
|
301
|
+
if getv(from_object, ['documentName']) is not None:
|
|
302
|
+
setv(to_object, ['document_name'], getv(from_object, ['documentName']))
|
|
303
|
+
|
|
304
|
+
return to_object
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def _UploadToFileSearchStoreOperation_from_mldev(
|
|
308
|
+
from_object: Union[dict[str, Any], object],
|
|
309
|
+
parent_object: Optional[dict[str, Any]] = None,
|
|
310
|
+
) -> dict[str, Any]:
|
|
311
|
+
to_object: dict[str, Any] = {}
|
|
312
|
+
if getv(from_object, ['name']) is not None:
|
|
313
|
+
setv(to_object, ['name'], getv(from_object, ['name']))
|
|
314
|
+
|
|
315
|
+
if getv(from_object, ['metadata']) is not None:
|
|
316
|
+
setv(to_object, ['metadata'], getv(from_object, ['metadata']))
|
|
317
|
+
|
|
318
|
+
if getv(from_object, ['done']) is not None:
|
|
319
|
+
setv(to_object, ['done'], getv(from_object, ['done']))
|
|
320
|
+
|
|
321
|
+
if getv(from_object, ['error']) is not None:
|
|
322
|
+
setv(to_object, ['error'], getv(from_object, ['error']))
|
|
323
|
+
|
|
324
|
+
if getv(from_object, ['response']) is not None:
|
|
325
|
+
setv(
|
|
326
|
+
to_object,
|
|
327
|
+
['response'],
|
|
328
|
+
_UploadToFileSearchStoreResponse_from_mldev(
|
|
329
|
+
getv(from_object, ['response']), to_object
|
|
330
|
+
),
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
return to_object
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
def _UploadToFileSearchStoreResponse_from_mldev(
|
|
337
|
+
from_object: Union[dict[str, Any], object],
|
|
338
|
+
parent_object: Optional[dict[str, Any]] = None,
|
|
339
|
+
) -> dict[str, Any]:
|
|
340
|
+
to_object: dict[str, Any] = {}
|
|
341
|
+
if getv(from_object, ['sdkHttpResponse']) is not None:
|
|
342
|
+
setv(
|
|
343
|
+
to_object, ['sdk_http_response'], getv(from_object, ['sdkHttpResponse'])
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
if getv(from_object, ['parent']) is not None:
|
|
347
|
+
setv(to_object, ['parent'], getv(from_object, ['parent']))
|
|
348
|
+
|
|
349
|
+
if getv(from_object, ['documentName']) is not None:
|
|
350
|
+
setv(to_object, ['document_name'], getv(from_object, ['documentName']))
|
|
351
|
+
|
|
352
|
+
return to_object
|
|
353
|
+
|
|
354
|
+
|
|
259
355
|
def _Video_from_mldev(
|
|
260
356
|
from_object: Union[dict[str, Any], object],
|
|
261
357
|
parent_object: Optional[dict[str, Any]] = None,
|
|
@@ -17,17 +17,16 @@
|
|
|
17
17
|
|
|
18
18
|
import base64
|
|
19
19
|
import copy
|
|
20
|
-
import
|
|
20
|
+
import contextlib
|
|
21
21
|
import enum
|
|
22
22
|
import inspect
|
|
23
23
|
import io
|
|
24
24
|
import json
|
|
25
25
|
import os
|
|
26
26
|
import re
|
|
27
|
-
from typing import Any, Literal, Optional, Union
|
|
27
|
+
from typing import Any, Literal, Optional, Union, Iterator, AsyncIterator
|
|
28
28
|
|
|
29
29
|
import google.auth
|
|
30
|
-
from requests.exceptions import HTTPError
|
|
31
30
|
|
|
32
31
|
from . import errors
|
|
33
32
|
from ._api_client import BaseApiClient
|
|
@@ -210,6 +209,22 @@ def pop_undeterministic_headers(headers: dict[str, str]) -> None:
|
|
|
210
209
|
headers.pop('Server-Timing', None) # pytype: disable=attribute-error
|
|
211
210
|
|
|
212
211
|
|
|
212
|
+
@contextlib.contextmanager
|
|
213
|
+
def _record_on_api_error(client: 'ReplayApiClient', http_request: HttpRequest) -> Iterator[None]:
|
|
214
|
+
try:
|
|
215
|
+
yield
|
|
216
|
+
except errors.APIError as e:
|
|
217
|
+
client._record_interaction(http_request, e)
|
|
218
|
+
raise e
|
|
219
|
+
|
|
220
|
+
@contextlib.asynccontextmanager
|
|
221
|
+
async def _async_record_on_api_error(client: 'ReplayApiClient', http_request: HttpRequest) -> AsyncIterator[None]:
|
|
222
|
+
try:
|
|
223
|
+
yield
|
|
224
|
+
except errors.APIError as e:
|
|
225
|
+
client._record_interaction(http_request, e)
|
|
226
|
+
raise e
|
|
227
|
+
|
|
213
228
|
class ReplayRequest(BaseModel):
|
|
214
229
|
"""Represents a single request in a replay."""
|
|
215
230
|
|
|
@@ -512,11 +527,8 @@ class ReplayApiClient(BaseApiClient):
|
|
|
512
527
|
self._initialize_replay_session_if_not_loaded()
|
|
513
528
|
if self._should_call_api():
|
|
514
529
|
_debug_print('api mode request: %s' % http_request)
|
|
515
|
-
|
|
530
|
+
with _record_on_api_error(self, http_request):
|
|
516
531
|
result = super()._request(http_request, http_options, stream)
|
|
517
|
-
except errors.APIError as e:
|
|
518
|
-
self._record_interaction(http_request, e)
|
|
519
|
-
raise e
|
|
520
532
|
if stream:
|
|
521
533
|
result_segments = []
|
|
522
534
|
for segment in result.segments():
|
|
@@ -541,13 +553,10 @@ class ReplayApiClient(BaseApiClient):
|
|
|
541
553
|
self._initialize_replay_session_if_not_loaded()
|
|
542
554
|
if self._should_call_api():
|
|
543
555
|
_debug_print('api mode request: %s' % http_request)
|
|
544
|
-
|
|
556
|
+
async with _async_record_on_api_error(self, http_request):
|
|
545
557
|
result = await super()._async_request(
|
|
546
558
|
http_request, http_options, stream
|
|
547
559
|
)
|
|
548
|
-
except errors.APIError as e:
|
|
549
|
-
self._record_interaction(http_request, e)
|
|
550
|
-
raise e
|
|
551
560
|
if stream:
|
|
552
561
|
result_segments = []
|
|
553
562
|
async for segment in result.async_segments():
|
|
@@ -587,16 +596,10 @@ class ReplayApiClient(BaseApiClient):
|
|
|
587
596
|
)
|
|
588
597
|
if self._should_call_api():
|
|
589
598
|
result: Union[str, HttpResponse]
|
|
590
|
-
|
|
599
|
+
with _record_on_api_error(self, request):
|
|
591
600
|
result = super().upload_file(
|
|
592
601
|
file_path, upload_url, upload_size, http_options=http_options
|
|
593
602
|
)
|
|
594
|
-
except HTTPError as e:
|
|
595
|
-
result = HttpResponse(
|
|
596
|
-
dict(e.response.headers), [json.dumps({'reason': e.response.reason})]
|
|
597
|
-
)
|
|
598
|
-
result.status_code = e.response.status_code
|
|
599
|
-
raise e
|
|
600
603
|
self._record_interaction(request, result)
|
|
601
604
|
return result
|
|
602
605
|
else:
|
|
@@ -626,16 +629,10 @@ class ReplayApiClient(BaseApiClient):
|
|
|
626
629
|
)
|
|
627
630
|
if self._should_call_api():
|
|
628
631
|
result: HttpResponse
|
|
629
|
-
|
|
632
|
+
async with _async_record_on_api_error(self, request):
|
|
630
633
|
result = await super().async_upload_file(
|
|
631
634
|
file_path, upload_url, upload_size, http_options=http_options
|
|
632
635
|
)
|
|
633
|
-
except HTTPError as e:
|
|
634
|
-
result = HttpResponse(
|
|
635
|
-
dict(e.response.headers), [json.dumps({'reason': e.response.reason})]
|
|
636
|
-
)
|
|
637
|
-
result.status_code = e.response.status_code
|
|
638
|
-
raise e
|
|
639
636
|
self._record_interaction(request, result)
|
|
640
637
|
return result
|
|
641
638
|
else:
|
|
@@ -649,14 +646,8 @@ class ReplayApiClient(BaseApiClient):
|
|
|
649
646
|
'get', path=path, request_dict={}, http_options=http_options
|
|
650
647
|
)
|
|
651
648
|
if self._should_call_api():
|
|
652
|
-
|
|
649
|
+
with _record_on_api_error(self, request):
|
|
653
650
|
result = super().download_file(path, http_options=http_options)
|
|
654
|
-
except HTTPError as e:
|
|
655
|
-
result = HttpResponse(
|
|
656
|
-
dict(e.response.headers), [json.dumps({'reason': e.response.reason})]
|
|
657
|
-
)
|
|
658
|
-
result.status_code = e.response.status_code
|
|
659
|
-
raise e
|
|
660
651
|
self._record_interaction(request, result)
|
|
661
652
|
return result
|
|
662
653
|
else:
|
|
@@ -670,16 +661,10 @@ class ReplayApiClient(BaseApiClient):
|
|
|
670
661
|
'get', path=path, request_dict={}, http_options=http_options
|
|
671
662
|
)
|
|
672
663
|
if self._should_call_api():
|
|
673
|
-
|
|
664
|
+
async with _async_record_on_api_error(self, request):
|
|
674
665
|
result = await super().async_download_file(
|
|
675
666
|
path, http_options=http_options
|
|
676
667
|
)
|
|
677
|
-
except HTTPError as e:
|
|
678
|
-
result = HttpResponse(
|
|
679
|
-
dict(e.response.headers), [json.dumps({'reason': e.response.reason})]
|
|
680
|
-
)
|
|
681
|
-
result.status_code = e.response.status_code
|
|
682
|
-
raise e
|
|
683
668
|
self._record_interaction(request, result)
|
|
684
669
|
return result
|
|
685
670
|
else:
|
|
@@ -454,16 +454,12 @@ def _Tool_to_mldev(
|
|
|
454
454
|
getv(from_object, ['google_search_retrieval']),
|
|
455
455
|
)
|
|
456
456
|
|
|
457
|
-
if getv(from_object, ['google_maps']) is not None:
|
|
458
|
-
setv(
|
|
459
|
-
to_object,
|
|
460
|
-
['googleMaps'],
|
|
461
|
-
_GoogleMaps_to_mldev(getv(from_object, ['google_maps']), to_object),
|
|
462
|
-
)
|
|
463
|
-
|
|
464
457
|
if getv(from_object, ['computer_use']) is not None:
|
|
465
458
|
setv(to_object, ['computerUse'], getv(from_object, ['computer_use']))
|
|
466
459
|
|
|
460
|
+
if getv(from_object, ['file_search']) is not None:
|
|
461
|
+
setv(to_object, ['fileSearch'], getv(from_object, ['file_search']))
|
|
462
|
+
|
|
467
463
|
if getv(from_object, ['code_execution']) is not None:
|
|
468
464
|
setv(to_object, ['codeExecution'], getv(from_object, ['code_execution']))
|
|
469
465
|
|
|
@@ -472,6 +468,13 @@ def _Tool_to_mldev(
|
|
|
472
468
|
'enterprise_web_search parameter is not supported in Gemini API.'
|
|
473
469
|
)
|
|
474
470
|
|
|
471
|
+
if getv(from_object, ['google_maps']) is not None:
|
|
472
|
+
setv(
|
|
473
|
+
to_object,
|
|
474
|
+
['googleMaps'],
|
|
475
|
+
_GoogleMaps_to_mldev(getv(from_object, ['google_maps']), to_object),
|
|
476
|
+
)
|
|
477
|
+
|
|
475
478
|
if getv(from_object, ['google_search']) is not None:
|
|
476
479
|
setv(
|
|
477
480
|
to_object,
|
|
@@ -199,6 +199,8 @@ def _resource_name(
|
|
|
199
199
|
def t_model(client: _api_client.BaseApiClient, model: str) -> str:
|
|
200
200
|
if not model:
|
|
201
201
|
raise ValueError('model is required.')
|
|
202
|
+
if '..' in model or '?' in model or '&' in model:
|
|
203
|
+
raise ValueError('invalid model parameter.')
|
|
202
204
|
if client.vertexai:
|
|
203
205
|
if (
|
|
204
206
|
model.startswith('projects/')
|
|
@@ -328,6 +328,11 @@ def _BatchJob_from_vertex(
|
|
|
328
328
|
),
|
|
329
329
|
)
|
|
330
330
|
|
|
331
|
+
if getv(from_object, ['completionStats']) is not None:
|
|
332
|
+
setv(
|
|
333
|
+
to_object, ['completion_stats'], getv(from_object, ['completionStats'])
|
|
334
|
+
)
|
|
335
|
+
|
|
331
336
|
return to_object
|
|
332
337
|
|
|
333
338
|
|
|
@@ -1332,16 +1337,12 @@ def _Tool_to_mldev(
|
|
|
1332
1337
|
getv(from_object, ['google_search_retrieval']),
|
|
1333
1338
|
)
|
|
1334
1339
|
|
|
1335
|
-
if getv(from_object, ['google_maps']) is not None:
|
|
1336
|
-
setv(
|
|
1337
|
-
to_object,
|
|
1338
|
-
['googleMaps'],
|
|
1339
|
-
_GoogleMaps_to_mldev(getv(from_object, ['google_maps']), to_object),
|
|
1340
|
-
)
|
|
1341
|
-
|
|
1342
1340
|
if getv(from_object, ['computer_use']) is not None:
|
|
1343
1341
|
setv(to_object, ['computerUse'], getv(from_object, ['computer_use']))
|
|
1344
1342
|
|
|
1343
|
+
if getv(from_object, ['file_search']) is not None:
|
|
1344
|
+
setv(to_object, ['fileSearch'], getv(from_object, ['file_search']))
|
|
1345
|
+
|
|
1345
1346
|
if getv(from_object, ['code_execution']) is not None:
|
|
1346
1347
|
setv(to_object, ['codeExecution'], getv(from_object, ['code_execution']))
|
|
1347
1348
|
|
|
@@ -1350,6 +1351,13 @@ def _Tool_to_mldev(
|
|
|
1350
1351
|
'enterprise_web_search parameter is not supported in Gemini API.'
|
|
1351
1352
|
)
|
|
1352
1353
|
|
|
1354
|
+
if getv(from_object, ['google_maps']) is not None:
|
|
1355
|
+
setv(
|
|
1356
|
+
to_object,
|
|
1357
|
+
['googleMaps'],
|
|
1358
|
+
_GoogleMaps_to_mldev(getv(from_object, ['google_maps']), to_object),
|
|
1359
|
+
)
|
|
1360
|
+
|
|
1353
1361
|
if getv(from_object, ['google_search']) is not None:
|
|
1354
1362
|
setv(
|
|
1355
1363
|
to_object,
|
|
@@ -591,16 +591,12 @@ def _Tool_to_mldev(
|
|
|
591
591
|
getv(from_object, ['google_search_retrieval']),
|
|
592
592
|
)
|
|
593
593
|
|
|
594
|
-
if getv(from_object, ['google_maps']) is not None:
|
|
595
|
-
setv(
|
|
596
|
-
to_object,
|
|
597
|
-
['googleMaps'],
|
|
598
|
-
_GoogleMaps_to_mldev(getv(from_object, ['google_maps']), to_object),
|
|
599
|
-
)
|
|
600
|
-
|
|
601
594
|
if getv(from_object, ['computer_use']) is not None:
|
|
602
595
|
setv(to_object, ['computerUse'], getv(from_object, ['computer_use']))
|
|
603
596
|
|
|
597
|
+
if getv(from_object, ['file_search']) is not None:
|
|
598
|
+
setv(to_object, ['fileSearch'], getv(from_object, ['file_search']))
|
|
599
|
+
|
|
604
600
|
if getv(from_object, ['code_execution']) is not None:
|
|
605
601
|
setv(to_object, ['codeExecution'], getv(from_object, ['code_execution']))
|
|
606
602
|
|
|
@@ -609,6 +605,13 @@ def _Tool_to_mldev(
|
|
|
609
605
|
'enterprise_web_search parameter is not supported in Gemini API.'
|
|
610
606
|
)
|
|
611
607
|
|
|
608
|
+
if getv(from_object, ['google_maps']) is not None:
|
|
609
|
+
setv(
|
|
610
|
+
to_object,
|
|
611
|
+
['googleMaps'],
|
|
612
|
+
_GoogleMaps_to_mldev(getv(from_object, ['google_maps']), to_object),
|
|
613
|
+
)
|
|
614
|
+
|
|
612
615
|
if getv(from_object, ['google_search']) is not None:
|
|
613
616
|
setv(
|
|
614
617
|
to_object,
|
|
@@ -647,12 +650,12 @@ def _Tool_to_vertex(
|
|
|
647
650
|
getv(from_object, ['google_search_retrieval']),
|
|
648
651
|
)
|
|
649
652
|
|
|
650
|
-
if getv(from_object, ['google_maps']) is not None:
|
|
651
|
-
setv(to_object, ['googleMaps'], getv(from_object, ['google_maps']))
|
|
652
|
-
|
|
653
653
|
if getv(from_object, ['computer_use']) is not None:
|
|
654
654
|
setv(to_object, ['computerUse'], getv(from_object, ['computer_use']))
|
|
655
655
|
|
|
656
|
+
if getv(from_object, ['file_search']) is not None:
|
|
657
|
+
raise ValueError('file_search parameter is not supported in Vertex AI.')
|
|
658
|
+
|
|
656
659
|
if getv(from_object, ['code_execution']) is not None:
|
|
657
660
|
setv(to_object, ['codeExecution'], getv(from_object, ['code_execution']))
|
|
658
661
|
|
|
@@ -663,6 +666,9 @@ def _Tool_to_vertex(
|
|
|
663
666
|
getv(from_object, ['enterprise_web_search']),
|
|
664
667
|
)
|
|
665
668
|
|
|
669
|
+
if getv(from_object, ['google_maps']) is not None:
|
|
670
|
+
setv(to_object, ['googleMaps'], getv(from_object, ['google_maps']))
|
|
671
|
+
|
|
666
672
|
if getv(from_object, ['google_search']) is not None:
|
|
667
673
|
setv(to_object, ['googleSearch'], getv(from_object, ['google_search']))
|
|
668
674
|
|
|
@@ -27,6 +27,7 @@ from ._replay_api_client import ReplayApiClient
|
|
|
27
27
|
from .batches import AsyncBatches, Batches
|
|
28
28
|
from .caches import AsyncCaches, Caches
|
|
29
29
|
from .chats import AsyncChats, Chats
|
|
30
|
+
from .file_search_stores import AsyncFileSearchStores, FileSearchStores
|
|
30
31
|
from .files import AsyncFiles, Files
|
|
31
32
|
from .live import AsyncLive
|
|
32
33
|
from .models import AsyncModels, Models
|
|
@@ -47,6 +48,7 @@ class AsyncClient:
|
|
|
47
48
|
self._caches = AsyncCaches(self._api_client)
|
|
48
49
|
self._batches = AsyncBatches(self._api_client)
|
|
49
50
|
self._files = AsyncFiles(self._api_client)
|
|
51
|
+
self._file_search_stores = AsyncFileSearchStores(self._api_client)
|
|
50
52
|
self._live = AsyncLive(self._api_client)
|
|
51
53
|
self._tokens = AsyncTokens(self._api_client)
|
|
52
54
|
self._operations = AsyncOperations(self._api_client)
|
|
@@ -63,6 +65,10 @@ class AsyncClient:
|
|
|
63
65
|
def caches(self) -> AsyncCaches:
|
|
64
66
|
return self._caches
|
|
65
67
|
|
|
68
|
+
@property
|
|
69
|
+
def file_search_stores(self) -> AsyncFileSearchStores:
|
|
70
|
+
return self._file_search_stores
|
|
71
|
+
|
|
66
72
|
@property
|
|
67
73
|
def batches(self) -> AsyncBatches:
|
|
68
74
|
return self._batches
|
|
@@ -276,6 +282,7 @@ class Client:
|
|
|
276
282
|
self._models = Models(self._api_client)
|
|
277
283
|
self._tunings = Tunings(self._api_client)
|
|
278
284
|
self._caches = Caches(self._api_client)
|
|
285
|
+
self._file_search_stores = FileSearchStores(self._api_client)
|
|
279
286
|
self._batches = Batches(self._api_client)
|
|
280
287
|
self._files = Files(self._api_client)
|
|
281
288
|
self._tokens = Tokens(self._api_client)
|
|
@@ -337,6 +344,10 @@ class Client:
|
|
|
337
344
|
def caches(self) -> Caches:
|
|
338
345
|
return self._caches
|
|
339
346
|
|
|
347
|
+
@property
|
|
348
|
+
def file_search_stores(self) -> FileSearchStores:
|
|
349
|
+
return self._file_search_stores
|
|
350
|
+
|
|
340
351
|
@property
|
|
341
352
|
def batches(self) -> Batches:
|
|
342
353
|
return self._batches
|