google-genai 1.2.0__py3-none-any.whl → 1.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- google/genai/_api_client.py +133 -36
- google/genai/_api_module.py +5 -0
- google/genai/_common.py +12 -0
- google/genai/_extra_utils.py +7 -2
- google/genai/_replay_api_client.py +32 -1
- google/genai/_transformers.py +12 -2
- google/genai/batches.py +6 -3
- google/genai/caches.py +9 -8
- google/genai/client.py +13 -3
- google/genai/errors.py +18 -3
- google/genai/files.py +8 -5
- google/genai/live.py +64 -41
- google/genai/models.py +653 -71
- google/genai/{_operations.py → operations.py} +260 -20
- google/genai/tunings.py +3 -0
- google/genai/types.py +434 -3
- google/genai/version.py +1 -1
- {google_genai-1.2.0.dist-info → google_genai-1.3.0.dist-info}/METADATA +88 -12
- google_genai-1.3.0.dist-info/RECORD +27 -0
- google_genai-1.2.0.dist-info/RECORD +0 -27
- {google_genai-1.2.0.dist-info → google_genai-1.3.0.dist-info}/LICENSE +0 -0
- {google_genai-1.2.0.dist-info → google_genai-1.3.0.dist-info}/WHEEL +0 -0
- {google_genai-1.2.0.dist-info → google_genai-1.3.0.dist-info}/top_level.txt +0 -0
google/genai/files.py
CHANGED
@@ -16,6 +16,7 @@
|
|
16
16
|
# Code generated by the Google Gen AI SDK generator DO NOT EDIT.
|
17
17
|
|
18
18
|
import io
|
19
|
+
import logging
|
19
20
|
import mimetypes
|
20
21
|
import os
|
21
22
|
import pathlib
|
@@ -30,6 +31,8 @@ from ._common import get_value_by_path as getv
|
|
30
31
|
from ._common import set_value_by_path as setv
|
31
32
|
from .pagers import AsyncPager, Pager
|
32
33
|
|
34
|
+
logger = logging.getLogger('google_genai.files')
|
35
|
+
|
33
36
|
|
34
37
|
def _ListFilesConfig_to_mldev(
|
35
38
|
api_client: ApiClient,
|
@@ -981,7 +984,7 @@ class AsyncFiles(_api_module.BaseModule):
|
|
981
984
|
|
982
985
|
.. code-block:: python
|
983
986
|
|
984
|
-
pager = client.files.list(config={'page_size': 10})
|
987
|
+
pager = await client.aio.files.list(config={'page_size': 10})
|
985
988
|
for file in pager.page:
|
986
989
|
print(file.name)
|
987
990
|
"""
|
@@ -1101,7 +1104,7 @@ class AsyncFiles(_api_module.BaseModule):
|
|
1101
1104
|
|
1102
1105
|
.. code-block:: python
|
1103
1106
|
|
1104
|
-
file = client.files.get(name='files/...')
|
1107
|
+
file = await client.aio.files.get(name='files/...')
|
1105
1108
|
print(file.uri)
|
1106
1109
|
"""
|
1107
1110
|
|
@@ -1164,7 +1167,7 @@ class AsyncFiles(_api_module.BaseModule):
|
|
1164
1167
|
|
1165
1168
|
.. code-block:: python
|
1166
1169
|
|
1167
|
-
client.files.delete(name='files/...')
|
1170
|
+
await client.aio.files.delete(name='files/...')
|
1168
1171
|
"""
|
1169
1172
|
|
1170
1173
|
parameter_model = types._DeleteFileParameters(
|
@@ -1296,13 +1299,13 @@ class AsyncFiles(_api_module.BaseModule):
|
|
1296
1299
|
|
1297
1300
|
if (
|
1298
1301
|
response.http_headers is None
|
1299
|
-
or '
|
1302
|
+
or 'x-goog-upload-url' not in response.http_headers
|
1300
1303
|
):
|
1301
1304
|
raise KeyError(
|
1302
1305
|
'Failed to create file. Upload URL did not returned from the create'
|
1303
1306
|
' file request.'
|
1304
1307
|
)
|
1305
|
-
upload_url = response.http_headers['
|
1308
|
+
upload_url = response.http_headers['x-goog-upload-url']
|
1306
1309
|
|
1307
1310
|
if isinstance(file, io.IOBase):
|
1308
1311
|
return_file = await self._api_client.async_upload_file(
|
google/genai/live.py
CHANGED
@@ -20,7 +20,7 @@ import base64
|
|
20
20
|
import contextlib
|
21
21
|
import json
|
22
22
|
import logging
|
23
|
-
from typing import AsyncIterator, Optional, Sequence, Union
|
23
|
+
from typing import Any, AsyncIterator, Dict, Optional, Sequence, Union
|
24
24
|
|
25
25
|
import google.auth
|
26
26
|
from websockets import ConnectionClosed
|
@@ -55,6 +55,7 @@ except ModuleNotFoundError:
|
|
55
55
|
from websockets.client import ClientConnection
|
56
56
|
from websockets.client import connect
|
57
57
|
|
58
|
+
logger = logging.getLogger('google_genai.live')
|
58
59
|
|
59
60
|
_FUNCTION_RESPONSE_REQUIRES_ID = (
|
60
61
|
'FunctionResponse request must have an `id` field from the'
|
@@ -72,15 +73,17 @@ class AsyncSession:
|
|
72
73
|
async def send(
|
73
74
|
self,
|
74
75
|
*,
|
75
|
-
input:
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
76
|
+
input: Optional[
|
77
|
+
Union[
|
78
|
+
types.ContentListUnion,
|
79
|
+
types.ContentListUnionDict,
|
80
|
+
types.LiveClientContentOrDict,
|
81
|
+
types.LiveClientRealtimeInputOrDict,
|
82
|
+
types.LiveClientToolResponseOrDict,
|
83
|
+
types.FunctionResponseOrDict,
|
84
|
+
Sequence[types.FunctionResponseOrDict],
|
85
|
+
]
|
86
|
+
] = None,
|
84
87
|
end_of_turn: Optional[bool] = False,
|
85
88
|
):
|
86
89
|
"""Send input to the model.
|
@@ -234,7 +237,7 @@ class AsyncSession:
|
|
234
237
|
def _LiveServerContent_from_mldev(
|
235
238
|
self,
|
236
239
|
from_object: Union[dict, object],
|
237
|
-
) ->
|
240
|
+
) -> Dict[str, Any]:
|
238
241
|
to_object = {}
|
239
242
|
if getv(from_object, ['modelTurn']) is not None:
|
240
243
|
setv(
|
@@ -254,7 +257,7 @@ class AsyncSession:
|
|
254
257
|
def _LiveToolCall_from_mldev(
|
255
258
|
self,
|
256
259
|
from_object: Union[dict, object],
|
257
|
-
) ->
|
260
|
+
) -> Dict[str, Any]:
|
258
261
|
to_object = {}
|
259
262
|
if getv(from_object, ['functionCalls']) is not None:
|
260
263
|
setv(
|
@@ -267,7 +270,7 @@ class AsyncSession:
|
|
267
270
|
def _LiveToolCall_from_vertex(
|
268
271
|
self,
|
269
272
|
from_object: Union[dict, object],
|
270
|
-
) ->
|
273
|
+
) -> Dict[str, Any]:
|
271
274
|
to_object = {}
|
272
275
|
if getv(from_object, ['functionCalls']) is not None:
|
273
276
|
setv(
|
@@ -280,7 +283,7 @@ class AsyncSession:
|
|
280
283
|
def _LiveServerMessage_from_mldev(
|
281
284
|
self,
|
282
285
|
from_object: Union[dict, object],
|
283
|
-
) ->
|
286
|
+
) -> Dict[str, Any]:
|
284
287
|
to_object = {}
|
285
288
|
if getv(from_object, ['serverContent']) is not None:
|
286
289
|
setv(
|
@@ -307,7 +310,7 @@ class AsyncSession:
|
|
307
310
|
def _LiveServerContent_from_vertex(
|
308
311
|
self,
|
309
312
|
from_object: Union[dict, object],
|
310
|
-
) ->
|
313
|
+
) -> Dict[str, Any]:
|
311
314
|
to_object = {}
|
312
315
|
if getv(from_object, ['modelTurn']) is not None:
|
313
316
|
setv(
|
@@ -327,7 +330,7 @@ class AsyncSession:
|
|
327
330
|
def _LiveServerMessage_from_vertex(
|
328
331
|
self,
|
329
332
|
from_object: Union[dict, object],
|
330
|
-
) ->
|
333
|
+
) -> Dict[str, Any]:
|
331
334
|
to_object = {}
|
332
335
|
if getv(from_object, ['serverContent']) is not None:
|
333
336
|
setv(
|
@@ -354,18 +357,23 @@ class AsyncSession:
|
|
354
357
|
|
355
358
|
def _parse_client_message(
|
356
359
|
self,
|
357
|
-
input:
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
360
|
+
input: Optional[
|
361
|
+
Union[
|
362
|
+
types.ContentListUnion,
|
363
|
+
types.ContentListUnionDict,
|
364
|
+
types.LiveClientContentOrDict,
|
365
|
+
types.LiveClientRealtimeInputOrDict,
|
366
|
+
types.LiveClientToolResponseOrDict,
|
367
|
+
types.FunctionResponseOrDict,
|
368
|
+
Sequence[types.FunctionResponseOrDict],
|
369
|
+
]
|
370
|
+
] = None,
|
367
371
|
end_of_turn: Optional[bool] = False,
|
368
|
-
) ->
|
372
|
+
) -> Dict[str, Any]:
|
373
|
+
|
374
|
+
if not input:
|
375
|
+
logging.info('No input provided. Assume it is the end of turn.')
|
376
|
+
return {'client_content': {'turn_complete': True}}
|
369
377
|
if isinstance(input, str):
|
370
378
|
input = [input]
|
371
379
|
elif isinstance(input, dict) and 'data' in input:
|
@@ -374,7 +382,6 @@ class AsyncSession:
|
|
374
382
|
input['data'] = decoded_data
|
375
383
|
input = [input]
|
376
384
|
elif isinstance(input, types.Blob):
|
377
|
-
input.data = base64.b64encode(input.data).decode('utf-8')
|
378
385
|
input = [input]
|
379
386
|
elif isinstance(input, dict) and 'name' in input and 'response' in input:
|
380
387
|
# ToolResponse.FunctionResponse
|
@@ -411,7 +418,7 @@ class AsyncSession:
|
|
411
418
|
if any((isinstance(b, dict) and 'data' in b) for b in input):
|
412
419
|
pass
|
413
420
|
elif any(isinstance(b, types.Blob) for b in input):
|
414
|
-
input = [b.model_dump(exclude_none=True) for b in input]
|
421
|
+
input = [b.model_dump(exclude_none=True, mode='json') for b in input]
|
415
422
|
else:
|
416
423
|
raise ValueError(
|
417
424
|
f'Unsupported input type "{type(input)}" or input content "{input}"'
|
@@ -419,11 +426,21 @@ class AsyncSession:
|
|
419
426
|
|
420
427
|
client_message = {'realtime_input': {'media_chunks': input}}
|
421
428
|
|
422
|
-
elif isinstance(input, dict)
|
423
|
-
|
424
|
-
|
429
|
+
elif isinstance(input, dict):
|
430
|
+
if 'content' in input or 'turns' in input:
|
431
|
+
# TODO(b/365983264) Add validation checks for content_update input_dict.
|
432
|
+
client_message = {'client_content': input}
|
433
|
+
elif 'media_chunks' in input:
|
434
|
+
client_message = {'realtime_input': input}
|
435
|
+
elif 'function_responses' in input:
|
436
|
+
client_message = {'tool_response': input}
|
437
|
+
else:
|
438
|
+
raise ValueError(
|
439
|
+
f'Unsupported input type "{type(input)}" or input content "{input}"')
|
425
440
|
elif isinstance(input, types.LiveClientRealtimeInput):
|
426
|
-
client_message = {
|
441
|
+
client_message = {
|
442
|
+
'realtime_input': input.model_dump(exclude_none=True, mode='json')
|
443
|
+
}
|
427
444
|
if isinstance(
|
428
445
|
client_message['realtime_input']['media_chunks'][0]['data'], bytes
|
429
446
|
):
|
@@ -436,20 +453,26 @@ class AsyncSession:
|
|
436
453
|
]
|
437
454
|
|
438
455
|
elif isinstance(input, types.LiveClientContent):
|
439
|
-
client_message = {
|
456
|
+
client_message = {
|
457
|
+
'client_content': input.model_dump(exclude_none=True, mode='json')
|
458
|
+
}
|
440
459
|
elif isinstance(input, types.LiveClientToolResponse):
|
441
460
|
# ToolResponse.FunctionResponse
|
442
461
|
if not (self._api_client.vertexai) and not (
|
443
462
|
input.function_responses[0].id
|
444
463
|
):
|
445
464
|
raise ValueError(_FUNCTION_RESPONSE_REQUIRES_ID)
|
446
|
-
client_message = {
|
465
|
+
client_message = {
|
466
|
+
'tool_response': input.model_dump(exclude_none=True, mode='json')
|
467
|
+
}
|
447
468
|
elif isinstance(input, types.FunctionResponse):
|
448
469
|
if not (self._api_client.vertexai) and not (input.id):
|
449
470
|
raise ValueError(_FUNCTION_RESPONSE_REQUIRES_ID)
|
450
471
|
client_message = {
|
451
472
|
'tool_response': {
|
452
|
-
'function_responses': [
|
473
|
+
'function_responses': [
|
474
|
+
input.model_dump(exclude_none=True, mode='json')
|
475
|
+
]
|
453
476
|
}
|
454
477
|
}
|
455
478
|
elif isinstance(input, Sequence) and isinstance(
|
@@ -460,7 +483,7 @@ class AsyncSession:
|
|
460
483
|
client_message = {
|
461
484
|
'tool_response': {
|
462
485
|
'function_responses': [
|
463
|
-
c.model_dump(exclude_none=True) for c in input
|
486
|
+
c.model_dump(exclude_none=True, mode='json') for c in input
|
464
487
|
]
|
465
488
|
}
|
466
489
|
}
|
@@ -682,10 +705,10 @@ class AsyncLive(_api_module.BaseModule):
|
|
682
705
|
auth_req = google.auth.transport.requests.Request()
|
683
706
|
creds.refresh(auth_req)
|
684
707
|
bearer_token = creds.token
|
685
|
-
headers =
|
686
|
-
|
708
|
+
headers = self._api_client._http_options['headers']
|
709
|
+
headers.update({
|
687
710
|
'Authorization': 'Bearer {}'.format(bearer_token),
|
688
|
-
}
|
711
|
+
})
|
689
712
|
version = self._api_client._http_options['api_version']
|
690
713
|
uri = f'{base_url}/ws/google.cloud.aiplatform.{version}.LlmBidiService/BidiGenerateContent'
|
691
714
|
location = self._api_client.location
|
@@ -702,6 +725,6 @@ class AsyncLive(_api_module.BaseModule):
|
|
702
725
|
|
703
726
|
async with connect(uri, additional_headers=headers) as ws:
|
704
727
|
await ws.send(request)
|
705
|
-
|
728
|
+
logger.info(await ws.recv(decode=False))
|
706
729
|
|
707
730
|
yield AsyncSession(api_client=self._api_client, websocket=ws)
|