google-genai 1.9.0__tar.gz → 1.11.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {google_genai-1.9.0/google_genai.egg-info → google_genai-1.11.0}/PKG-INFO +1 -1
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/_api_client.py +192 -35
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/_automatic_function_calling_util.py +1 -1
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/_extra_utils.py +70 -10
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/_replay_api_client.py +32 -8
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/_transformers.py +172 -59
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/files.py +22 -6
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/live.py +136 -580
- google_genai-1.11.0/google/genai/live_converters.py +1298 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/models.py +97 -15
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/operations.py +17 -9
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/tunings.py +0 -3
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/types.py +1064 -78
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/version.py +1 -1
- {google_genai-1.9.0 → google_genai-1.11.0/google_genai.egg-info}/PKG-INFO +1 -1
- {google_genai-1.9.0 → google_genai-1.11.0}/google_genai.egg-info/SOURCES.txt +1 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/pyproject.toml +2 -2
- {google_genai-1.9.0 → google_genai-1.11.0}/LICENSE +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/MANIFEST.in +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/README.md +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/__init__.py +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/_api_module.py +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/_common.py +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/_test_api_client.py +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/batches.py +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/caches.py +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/chats.py +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/client.py +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/errors.py +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google/genai/pagers.py +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google_genai.egg-info/dependency_links.txt +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google_genai.egg-info/requires.txt +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/google_genai.egg-info/top_level.txt +0 -0
- {google_genai-1.9.0 → google_genai-1.11.0}/setup.cfg +0 -0
@@ -19,28 +19,41 @@
|
|
19
19
|
The BaseApiClient is intended to be a private module and is subject to change.
|
20
20
|
"""
|
21
21
|
|
22
|
-
import anyio
|
23
22
|
import asyncio
|
24
23
|
import copy
|
25
24
|
from dataclasses import dataclass
|
26
25
|
import datetime
|
26
|
+
import http
|
27
27
|
import io
|
28
28
|
import json
|
29
29
|
import logging
|
30
|
+
import math
|
30
31
|
import os
|
32
|
+
import ssl
|
31
33
|
import sys
|
34
|
+
import time
|
32
35
|
from typing import Any, AsyncIterator, Optional, Tuple, Union
|
33
|
-
from urllib.parse import urlparse
|
36
|
+
from urllib.parse import urlparse
|
37
|
+
from urllib.parse import urlunparse
|
38
|
+
|
39
|
+
import anyio
|
40
|
+
import certifi
|
34
41
|
import google.auth
|
35
42
|
import google.auth.credentials
|
36
43
|
from google.auth.credentials import Credentials
|
37
44
|
from google.auth.transport.requests import Request
|
38
45
|
import httpx
|
39
|
-
from pydantic import BaseModel
|
46
|
+
from pydantic import BaseModel
|
47
|
+
from pydantic import Field
|
48
|
+
from pydantic import ValidationError
|
49
|
+
|
40
50
|
from . import _common
|
41
51
|
from . import errors
|
42
52
|
from . import version
|
43
|
-
from .types import HttpOptions
|
53
|
+
from .types import HttpOptions
|
54
|
+
from .types import HttpOptionsDict
|
55
|
+
from .types import HttpOptionsOrDict
|
56
|
+
|
44
57
|
|
45
58
|
logger = logging.getLogger('google_genai._api_client')
|
46
59
|
CHUNK_SIZE = 8 * 1024 * 1024 # 8 MB chunk size
|
@@ -95,6 +108,14 @@ def _patch_http_options(
|
|
95
108
|
return copy_option
|
96
109
|
|
97
110
|
|
111
|
+
def _populate_server_timeout_header(
|
112
|
+
headers: dict[str, str], timeout_in_seconds: Optional[Union[float, int]]
|
113
|
+
) -> None:
|
114
|
+
"""Populates the server timeout header in the headers dict."""
|
115
|
+
if timeout_in_seconds and 'X-Server-Timeout' not in headers:
|
116
|
+
headers['X-Server-Timeout'] = str(math.ceil(timeout_in_seconds))
|
117
|
+
|
118
|
+
|
98
119
|
def _join_url_path(base_url: str, path: str) -> str:
|
99
120
|
parsed_base = urlparse(base_url)
|
100
121
|
base_path = (
|
@@ -128,6 +149,19 @@ def _refresh_auth(credentials: Credentials) -> Credentials:
|
|
128
149
|
return credentials
|
129
150
|
|
130
151
|
|
152
|
+
def _get_timeout_in_seconds(
|
153
|
+
timeout: Optional[Union[float, int]],
|
154
|
+
) -> Optional[float]:
|
155
|
+
"""Converts the timeout to seconds."""
|
156
|
+
if timeout:
|
157
|
+
# HttpOptions.timeout is in milliseconds. But httpx.Client.request()
|
158
|
+
# expects seconds.
|
159
|
+
timeout_in_seconds = timeout / 1000.0
|
160
|
+
else:
|
161
|
+
timeout_in_seconds = None
|
162
|
+
return timeout_in_seconds
|
163
|
+
|
164
|
+
|
131
165
|
@dataclass
|
132
166
|
class HttpRequest:
|
133
167
|
headers: dict[str, str]
|
@@ -390,7 +424,7 @@ class BaseApiClient:
|
|
390
424
|
if not self.api_key:
|
391
425
|
raise ValueError(
|
392
426
|
'Missing key inputs argument! To use the Google AI API,'
|
393
|
-
'provide (`api_key`) arguments. To use the Google Cloud API,'
|
427
|
+
' provide (`api_key`) arguments. To use the Google Cloud API,'
|
394
428
|
' provide (`vertexai`, `project` & `location`) arguments.'
|
395
429
|
)
|
396
430
|
self._http_options.base_url = 'https://generativelanguage.googleapis.com/'
|
@@ -408,9 +442,67 @@ class BaseApiClient:
|
|
408
442
|
else:
|
409
443
|
if self._http_options.headers is not None:
|
410
444
|
_append_library_version_headers(self._http_options.headers)
|
411
|
-
|
412
|
-
|
413
|
-
self.
|
445
|
+
|
446
|
+
client_args, async_client_args = self._ensure_ssl_ctx(self._http_options)
|
447
|
+
self._httpx_client = SyncHttpxClient(**client_args)
|
448
|
+
self._async_httpx_client = AsyncHttpxClient(**async_client_args)
|
449
|
+
|
450
|
+
@staticmethod
|
451
|
+
def _ensure_ssl_ctx(options: HttpOptions) -> (
|
452
|
+
Tuple[dict[str, Any], dict[str, Any]]):
|
453
|
+
"""Ensures the SSL context is present in the client args.
|
454
|
+
|
455
|
+
Creates a default SSL context if one is not provided.
|
456
|
+
|
457
|
+
Args:
|
458
|
+
options: The http options to check for SSL context.
|
459
|
+
|
460
|
+
Returns:
|
461
|
+
A tuple of sync/async httpx client args.
|
462
|
+
"""
|
463
|
+
|
464
|
+
verify = 'verify'
|
465
|
+
args = options.client_args
|
466
|
+
async_args = options.async_client_args
|
467
|
+
ctx = (
|
468
|
+
args.get(verify) if args else None
|
469
|
+
or async_args.get(verify) if async_args else None
|
470
|
+
)
|
471
|
+
|
472
|
+
if not ctx:
|
473
|
+
# Initialize the SSL context for the httpx client.
|
474
|
+
# Unlike requests, the httpx package does not automatically pull in the
|
475
|
+
# environment variables SSL_CERT_FILE or SSL_CERT_DIR. They need to be
|
476
|
+
# enabled explicitly.
|
477
|
+
ctx = ssl.create_default_context(
|
478
|
+
cafile=os.environ.get('SSL_CERT_FILE', certifi.where()),
|
479
|
+
capath=os.environ.get('SSL_CERT_DIR'),
|
480
|
+
)
|
481
|
+
|
482
|
+
def _maybe_set(
|
483
|
+
args: Optional[dict[str, Any]],
|
484
|
+
ctx: ssl.SSLContext,
|
485
|
+
) -> dict[str, Any]:
|
486
|
+
"""Sets the SSL context in the client args if not set.
|
487
|
+
|
488
|
+
Does not override the SSL context if it is already set.
|
489
|
+
|
490
|
+
Args:
|
491
|
+
args: The client args to to check for SSL context.
|
492
|
+
ctx: The SSL context to set.
|
493
|
+
|
494
|
+
Returns:
|
495
|
+
The client args with the SSL context included.
|
496
|
+
"""
|
497
|
+
if not args or not args.get(verify):
|
498
|
+
args = (args or {}).copy()
|
499
|
+
args[verify] = ctx
|
500
|
+
return args
|
501
|
+
|
502
|
+
return (
|
503
|
+
_maybe_set(args, ctx),
|
504
|
+
_maybe_set(async_args, ctx),
|
505
|
+
)
|
414
506
|
|
415
507
|
def _websocket_base_url(self):
|
416
508
|
url_parts = urlparse(self._http_options.base_url)
|
@@ -520,18 +612,13 @@ class BaseApiClient:
|
|
520
612
|
versioned_path,
|
521
613
|
)
|
522
614
|
|
523
|
-
timeout_in_seconds
|
524
|
-
patched_http_options.timeout
|
525
|
-
)
|
526
|
-
if timeout_in_seconds:
|
527
|
-
# HttpOptions.timeout is in milliseconds. But httpx.Client.request()
|
528
|
-
# expects seconds.
|
529
|
-
timeout_in_seconds = timeout_in_seconds / 1000.0
|
530
|
-
else:
|
531
|
-
timeout_in_seconds = None
|
615
|
+
timeout_in_seconds = _get_timeout_in_seconds(patched_http_options.timeout)
|
532
616
|
|
533
617
|
if patched_http_options.headers is None:
|
534
618
|
raise ValueError('Request headers must be set.')
|
619
|
+
_populate_server_timeout_header(
|
620
|
+
patched_http_options.headers, timeout_in_seconds
|
621
|
+
)
|
535
622
|
return HttpRequest(
|
536
623
|
method=http_method,
|
537
624
|
url=url,
|
@@ -712,7 +799,12 @@ class BaseApiClient:
|
|
712
799
|
return async_generator()
|
713
800
|
|
714
801
|
def upload_file(
|
715
|
-
self,
|
802
|
+
self,
|
803
|
+
file_path: Union[str, io.IOBase],
|
804
|
+
upload_url: str,
|
805
|
+
upload_size: int,
|
806
|
+
*,
|
807
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
716
808
|
) -> HttpResponse:
|
717
809
|
"""Transfers a file to the given URL.
|
718
810
|
|
@@ -723,18 +815,28 @@ class BaseApiClient:
|
|
723
815
|
upload_url: The URL to upload the file to.
|
724
816
|
upload_size: The size of file content to be uploaded, this will have to
|
725
817
|
match the size requested in the resumable upload request.
|
818
|
+
http_options: The http options to use for the request.
|
726
819
|
|
727
820
|
returns:
|
728
821
|
The HttpResponse object from the finalize request.
|
729
822
|
"""
|
730
823
|
if isinstance(file_path, io.IOBase):
|
731
|
-
return self._upload_fd(
|
824
|
+
return self._upload_fd(
|
825
|
+
file_path, upload_url, upload_size, http_options=http_options
|
826
|
+
)
|
732
827
|
else:
|
733
828
|
with open(file_path, 'rb') as file:
|
734
|
-
return self._upload_fd(
|
829
|
+
return self._upload_fd(
|
830
|
+
file, upload_url, upload_size, http_options=http_options
|
831
|
+
)
|
735
832
|
|
736
833
|
def _upload_fd(
|
737
|
-
self,
|
834
|
+
self,
|
835
|
+
file: io.IOBase,
|
836
|
+
upload_url: str,
|
837
|
+
upload_size: int,
|
838
|
+
*,
|
839
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
738
840
|
) -> HttpResponse:
|
739
841
|
"""Transfers a file to the given URL.
|
740
842
|
|
@@ -743,6 +845,7 @@ class BaseApiClient:
|
|
743
845
|
upload_url: The URL to upload the file to.
|
744
846
|
upload_size: The size of file content to be uploaded, this will have to
|
745
847
|
match the size requested in the resumable upload request.
|
848
|
+
http_options: The http options to use for the request.
|
746
849
|
|
747
850
|
returns:
|
748
851
|
The HttpResponse object from the finalize request.
|
@@ -758,15 +861,32 @@ class BaseApiClient:
|
|
758
861
|
# If last chunk, finalize the upload.
|
759
862
|
if chunk_size + offset >= upload_size:
|
760
863
|
upload_command += ', finalize'
|
864
|
+
http_options = http_options if http_options else self._http_options
|
865
|
+
timeout = (
|
866
|
+
http_options.get('timeout')
|
867
|
+
if isinstance(http_options, dict)
|
868
|
+
else http_options.timeout
|
869
|
+
)
|
870
|
+
if timeout is None:
|
871
|
+
# Per request timeout is not configured. Check the global timeout.
|
872
|
+
timeout = (
|
873
|
+
self._http_options.timeout
|
874
|
+
if isinstance(self._http_options, dict)
|
875
|
+
else self._http_options.timeout
|
876
|
+
)
|
877
|
+
timeout_in_seconds = _get_timeout_in_seconds(timeout)
|
878
|
+
upload_headers = {
|
879
|
+
'X-Goog-Upload-Command': upload_command,
|
880
|
+
'X-Goog-Upload-Offset': str(offset),
|
881
|
+
'Content-Length': str(chunk_size),
|
882
|
+
}
|
883
|
+
_populate_server_timeout_header(upload_headers, timeout_in_seconds)
|
761
884
|
response = self._httpx_client.request(
|
762
885
|
method='POST',
|
763
886
|
url=upload_url,
|
764
|
-
headers=
|
765
|
-
'X-Goog-Upload-Command': upload_command,
|
766
|
-
'X-Goog-Upload-Offset': str(offset),
|
767
|
-
'Content-Length': str(chunk_size),
|
768
|
-
},
|
887
|
+
headers=upload_headers,
|
769
888
|
content=file_chunk,
|
889
|
+
timeout=timeout_in_seconds,
|
770
890
|
)
|
771
891
|
offset += chunk_size
|
772
892
|
if response.headers['x-goog-upload-status'] != 'active':
|
@@ -783,7 +903,12 @@ class BaseApiClient:
|
|
783
903
|
)
|
784
904
|
return HttpResponse(response.headers, response_stream=[response.text])
|
785
905
|
|
786
|
-
def download_file(
|
906
|
+
def download_file(
|
907
|
+
self,
|
908
|
+
path: str,
|
909
|
+
*,
|
910
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
911
|
+
):
|
787
912
|
"""Downloads the file data.
|
788
913
|
|
789
914
|
Args:
|
@@ -822,6 +947,8 @@ class BaseApiClient:
|
|
822
947
|
file_path: Union[str, io.IOBase],
|
823
948
|
upload_url: str,
|
824
949
|
upload_size: int,
|
950
|
+
*,
|
951
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
825
952
|
) -> HttpResponse:
|
826
953
|
"""Transfers a file asynchronously to the given URL.
|
827
954
|
|
@@ -831,23 +958,30 @@ class BaseApiClient:
|
|
831
958
|
upload_url: The URL to upload the file to.
|
832
959
|
upload_size: The size of file content to be uploaded, this will have to
|
833
960
|
match the size requested in the resumable upload request.
|
961
|
+
http_options: The http options to use for the request.
|
834
962
|
|
835
963
|
returns:
|
836
964
|
The HttpResponse object from the finalize request.
|
837
965
|
"""
|
838
966
|
if isinstance(file_path, io.IOBase):
|
839
|
-
return await self._async_upload_fd(
|
967
|
+
return await self._async_upload_fd(
|
968
|
+
file_path, upload_url, upload_size, http_options=http_options
|
969
|
+
)
|
840
970
|
else:
|
841
971
|
file = anyio.Path(file_path)
|
842
972
|
fd = await file.open('rb')
|
843
973
|
async with fd:
|
844
|
-
return await self._async_upload_fd(
|
974
|
+
return await self._async_upload_fd(
|
975
|
+
fd, upload_url, upload_size, http_options=http_options
|
976
|
+
)
|
845
977
|
|
846
978
|
async def _async_upload_fd(
|
847
979
|
self,
|
848
980
|
file: Union[io.IOBase, anyio.AsyncFile],
|
849
981
|
upload_url: str,
|
850
982
|
upload_size: int,
|
983
|
+
*,
|
984
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
851
985
|
) -> HttpResponse:
|
852
986
|
"""Transfers a file asynchronously to the given URL.
|
853
987
|
|
@@ -856,6 +990,7 @@ class BaseApiClient:
|
|
856
990
|
upload_url: The URL to upload the file to.
|
857
991
|
upload_size: The size of file content to be uploaded, this will have to
|
858
992
|
match the size requested in the resumable upload request.
|
993
|
+
http_options: The http options to use for the request.
|
859
994
|
|
860
995
|
returns:
|
861
996
|
The HttpResponse object from the finalized request.
|
@@ -874,15 +1009,32 @@ class BaseApiClient:
|
|
874
1009
|
# If last chunk, finalize the upload.
|
875
1010
|
if chunk_size + offset >= upload_size:
|
876
1011
|
upload_command += ', finalize'
|
1012
|
+
http_options = http_options if http_options else self._http_options
|
1013
|
+
timeout = (
|
1014
|
+
http_options.get('timeout')
|
1015
|
+
if isinstance(http_options, dict)
|
1016
|
+
else http_options.timeout
|
1017
|
+
)
|
1018
|
+
if timeout is None:
|
1019
|
+
# Per request timeout is not configured. Check the global timeout.
|
1020
|
+
timeout = (
|
1021
|
+
self._http_options.timeout
|
1022
|
+
if isinstance(self._http_options, dict)
|
1023
|
+
else self._http_options.timeout
|
1024
|
+
)
|
1025
|
+
timeout_in_seconds = _get_timeout_in_seconds(timeout)
|
1026
|
+
upload_headers = {
|
1027
|
+
'X-Goog-Upload-Command': upload_command,
|
1028
|
+
'X-Goog-Upload-Offset': str(offset),
|
1029
|
+
'Content-Length': str(chunk_size),
|
1030
|
+
}
|
1031
|
+
_populate_server_timeout_header(upload_headers, timeout_in_seconds)
|
877
1032
|
response = await self._async_httpx_client.request(
|
878
1033
|
method='POST',
|
879
1034
|
url=upload_url,
|
880
1035
|
content=file_chunk,
|
881
|
-
headers=
|
882
|
-
|
883
|
-
'X-Goog-Upload-Offset': str(offset),
|
884
|
-
'Content-Length': str(chunk_size),
|
885
|
-
},
|
1036
|
+
headers=upload_headers,
|
1037
|
+
timeout=timeout_in_seconds,
|
886
1038
|
)
|
887
1039
|
offset += chunk_size
|
888
1040
|
if response.headers.get('x-goog-upload-status') != 'active':
|
@@ -899,7 +1051,12 @@ class BaseApiClient:
|
|
899
1051
|
)
|
900
1052
|
return HttpResponse(response.headers, response_stream=[response.text])
|
901
1053
|
|
902
|
-
async def async_download_file(
|
1054
|
+
async def async_download_file(
|
1055
|
+
self,
|
1056
|
+
path: str,
|
1057
|
+
*,
|
1058
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
1059
|
+
):
|
903
1060
|
"""Downloads the file data.
|
904
1061
|
|
905
1062
|
Args:
|
@@ -28,7 +28,7 @@ from . import types
|
|
28
28
|
if sys.version_info >= (3, 10):
|
29
29
|
VersionedUnionType = builtin_types.UnionType
|
30
30
|
else:
|
31
|
-
VersionedUnionType = typing._UnionGenericAlias
|
31
|
+
VersionedUnionType = typing._UnionGenericAlias # type: ignore[attr-defined]
|
32
32
|
|
33
33
|
_py_builtin_type_to_schema_type = {
|
34
34
|
str: types.Type.STRING,
|
@@ -30,7 +30,7 @@ from . import types
|
|
30
30
|
if sys.version_info >= (3, 10):
|
31
31
|
from types import UnionType
|
32
32
|
else:
|
33
|
-
UnionType = typing._UnionGenericAlias
|
33
|
+
UnionType = typing._UnionGenericAlias # type: ignore[attr-defined]
|
34
34
|
|
35
35
|
_DEFAULT_MAX_REMOTE_CALLS_AFC = 10
|
36
36
|
|
@@ -78,6 +78,7 @@ def format_destination(
|
|
78
78
|
|
79
79
|
def get_function_map(
|
80
80
|
config: Optional[types.GenerateContentConfigOrDict] = None,
|
81
|
+
is_caller_method_async: bool = False,
|
81
82
|
) -> dict[str, Callable]:
|
82
83
|
"""Returns a function map from the config."""
|
83
84
|
function_map: dict[str, Callable] = {}
|
@@ -87,7 +88,7 @@ def get_function_map(
|
|
87
88
|
if config_model.tools:
|
88
89
|
for tool in config_model.tools:
|
89
90
|
if callable(tool):
|
90
|
-
if inspect.iscoroutinefunction(tool):
|
91
|
+
if inspect.iscoroutinefunction(tool) and not is_caller_method_async:
|
91
92
|
raise errors.UnsupportedFunctionError(
|
92
93
|
f'Function {tool.__name__} is a coroutine function, which is not'
|
93
94
|
' supported for automatic function calling. Please manually'
|
@@ -199,11 +200,11 @@ def convert_if_exist_pydantic_model(
|
|
199
200
|
return value
|
200
201
|
|
201
202
|
|
202
|
-
def
|
203
|
-
args:
|
204
|
-
) -> Any:
|
205
|
-
signature = inspect.signature(
|
206
|
-
func_name =
|
203
|
+
def convert_argument_from_function(
|
204
|
+
args: dict[str, Any], function: Callable
|
205
|
+
) -> dict[str, Any]:
|
206
|
+
signature = inspect.signature(function)
|
207
|
+
func_name = function.__name__
|
207
208
|
converted_args = {}
|
208
209
|
for param_name, param in signature.parameters.items():
|
209
210
|
if param_name in args:
|
@@ -213,13 +214,34 @@ def invoke_function_from_dict_args(
|
|
213
214
|
param_name,
|
214
215
|
func_name,
|
215
216
|
)
|
217
|
+
return converted_args
|
218
|
+
|
219
|
+
|
220
|
+
def invoke_function_from_dict_args(
|
221
|
+
args: Dict[str, Any], function_to_invoke: Callable
|
222
|
+
) -> Any:
|
223
|
+
converted_args = convert_argument_from_function(args, function_to_invoke)
|
216
224
|
try:
|
217
225
|
return function_to_invoke(**converted_args)
|
218
226
|
except Exception as e:
|
219
227
|
raise errors.FunctionInvocationError(
|
220
|
-
f'Failed to invoke function {
|
221
|
-
f' {converted_args} from model returned function
|
222
|
-
f' {args} because of error {e}'
|
228
|
+
f'Failed to invoke function {function_to_invoke.__name__} with'
|
229
|
+
f' converted arguments {converted_args} from model returned function'
|
230
|
+
f' call argument {args} because of error {e}'
|
231
|
+
)
|
232
|
+
|
233
|
+
|
234
|
+
async def invoke_function_from_dict_args_async(
|
235
|
+
args: Dict[str, Any], function_to_invoke: Callable
|
236
|
+
) -> Any:
|
237
|
+
converted_args = convert_argument_from_function(args, function_to_invoke)
|
238
|
+
try:
|
239
|
+
return await function_to_invoke(**converted_args)
|
240
|
+
except Exception as e:
|
241
|
+
raise errors.FunctionInvocationError(
|
242
|
+
f'Failed to invoke function {function_to_invoke.__name__} with'
|
243
|
+
f' converted arguments {converted_args} from model returned function'
|
244
|
+
f' call argument {args} because of error {e}'
|
223
245
|
)
|
224
246
|
|
225
247
|
|
@@ -256,6 +278,44 @@ def get_function_response_parts(
|
|
256
278
|
func_response_parts.append(func_response_part)
|
257
279
|
return func_response_parts
|
258
280
|
|
281
|
+
async def get_function_response_parts_async(
|
282
|
+
response: types.GenerateContentResponse,
|
283
|
+
function_map: dict[str, Callable],
|
284
|
+
) -> list[types.Part]:
|
285
|
+
"""Returns the function response parts from the response."""
|
286
|
+
func_response_parts = []
|
287
|
+
if (
|
288
|
+
response.candidates is not None
|
289
|
+
and isinstance(response.candidates[0].content, types.Content)
|
290
|
+
and response.candidates[0].content.parts is not None
|
291
|
+
):
|
292
|
+
for part in response.candidates[0].content.parts:
|
293
|
+
if not part.function_call:
|
294
|
+
continue
|
295
|
+
func_name = part.function_call.name
|
296
|
+
if func_name is not None and part.function_call.args is not None:
|
297
|
+
func = function_map[func_name]
|
298
|
+
args = convert_number_values_for_dict_function_call_args(
|
299
|
+
part.function_call.args
|
300
|
+
)
|
301
|
+
func_response: dict[str, Any]
|
302
|
+
try:
|
303
|
+
if inspect.iscoroutinefunction(func):
|
304
|
+
func_response = {
|
305
|
+
'result': await invoke_function_from_dict_args_async(args, func)
|
306
|
+
}
|
307
|
+
else:
|
308
|
+
func_response = {
|
309
|
+
'result': invoke_function_from_dict_args(args, func)
|
310
|
+
}
|
311
|
+
except Exception as e: # pylint: disable=broad-except
|
312
|
+
func_response = {'error': str(e)}
|
313
|
+
func_response_part = types.Part.from_function_response(
|
314
|
+
name=func_name, response=func_response
|
315
|
+
)
|
316
|
+
func_response_parts.append(func_response_part)
|
317
|
+
return func_response_parts
|
318
|
+
|
259
319
|
|
260
320
|
def should_disable_afc(
|
261
321
|
config: Optional[types.GenerateContentConfigOrDict] = None,
|
@@ -34,6 +34,8 @@ from ._api_client import HttpOptions
|
|
34
34
|
from ._api_client import HttpRequest
|
35
35
|
from ._api_client import HttpResponse
|
36
36
|
from ._common import BaseModel
|
37
|
+
from .types import HttpOptionsOrDict
|
38
|
+
from .types import GenerateVideosOperation
|
37
39
|
|
38
40
|
|
39
41
|
def _redact_version_numbers(version_string: str) -> str:
|
@@ -396,7 +398,12 @@ class ReplayApiClient(BaseApiClient):
|
|
396
398
|
if isinstance(response_model, list):
|
397
399
|
response_model = response_model[0]
|
398
400
|
print('response_model: ', response_model.model_dump(exclude_none=True))
|
399
|
-
|
401
|
+
if isinstance(response_model, GenerateVideosOperation):
|
402
|
+
actual = response_model.model_dump(
|
403
|
+
exclude={'result'}, exclude_none=True, mode='json'
|
404
|
+
)
|
405
|
+
else:
|
406
|
+
actual = response_model.model_dump(exclude_none=True, mode='json')
|
400
407
|
expected = interaction.response.sdk_response_segments[
|
401
408
|
self._sdk_response_index
|
402
409
|
]
|
@@ -461,7 +468,14 @@ class ReplayApiClient(BaseApiClient):
|
|
461
468
|
else:
|
462
469
|
return self._build_response_from_replay(http_request)
|
463
470
|
|
464
|
-
def upload_file(
|
471
|
+
def upload_file(
|
472
|
+
self,
|
473
|
+
file_path: Union[str, io.IOBase],
|
474
|
+
upload_url: str,
|
475
|
+
upload_size: int,
|
476
|
+
*,
|
477
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
478
|
+
) -> HttpResponse:
|
465
479
|
if isinstance(file_path, io.IOBase):
|
466
480
|
offset = file_path.tell()
|
467
481
|
content = file_path.read()
|
@@ -479,7 +493,9 @@ class ReplayApiClient(BaseApiClient):
|
|
479
493
|
if self._should_call_api():
|
480
494
|
result: Union[str, HttpResponse]
|
481
495
|
try:
|
482
|
-
result = super().upload_file(
|
496
|
+
result = super().upload_file(
|
497
|
+
file_path, upload_url, upload_size, http_options=http_options
|
498
|
+
)
|
483
499
|
except HTTPError as e:
|
484
500
|
result = HttpResponse(
|
485
501
|
dict(e.response.headers), [json.dumps({'reason': e.response.reason})]
|
@@ -496,6 +512,8 @@ class ReplayApiClient(BaseApiClient):
|
|
496
512
|
file_path: Union[str, io.IOBase],
|
497
513
|
upload_url: str,
|
498
514
|
upload_size: int,
|
515
|
+
*,
|
516
|
+
http_options: Optional[HttpOptionsOrDict] = None,
|
499
517
|
) -> HttpResponse:
|
500
518
|
if isinstance(file_path, io.IOBase):
|
501
519
|
offset = file_path.tell()
|
@@ -515,7 +533,7 @@ class ReplayApiClient(BaseApiClient):
|
|
515
533
|
result: HttpResponse
|
516
534
|
try:
|
517
535
|
result = await super().async_upload_file(
|
518
|
-
file_path, upload_url, upload_size
|
536
|
+
file_path, upload_url, upload_size, http_options=http_options
|
519
537
|
)
|
520
538
|
except HTTPError as e:
|
521
539
|
result = HttpResponse(
|
@@ -528,14 +546,16 @@ class ReplayApiClient(BaseApiClient):
|
|
528
546
|
else:
|
529
547
|
return self._build_response_from_replay(request)
|
530
548
|
|
531
|
-
def download_file(
|
549
|
+
def download_file(
|
550
|
+
self, path: str, *, http_options: Optional[HttpOptionsOrDict] = None
|
551
|
+
):
|
532
552
|
self._initialize_replay_session_if_not_loaded()
|
533
553
|
request = self._build_request(
|
534
554
|
'get', path=path, request_dict={}, http_options=http_options
|
535
555
|
)
|
536
556
|
if self._should_call_api():
|
537
557
|
try:
|
538
|
-
result = super().download_file(path, http_options)
|
558
|
+
result = super().download_file(path, http_options=http_options)
|
539
559
|
except HTTPError as e:
|
540
560
|
result = HttpResponse(
|
541
561
|
dict(e.response.headers), [json.dumps({'reason': e.response.reason})]
|
@@ -547,14 +567,18 @@ class ReplayApiClient(BaseApiClient):
|
|
547
567
|
else:
|
548
568
|
return self._build_response_from_replay(request).byte_stream[0]
|
549
569
|
|
550
|
-
async def async_download_file(
|
570
|
+
async def async_download_file(
|
571
|
+
self, path: str, *, http_options: Optional[HttpOptionsOrDict] = None
|
572
|
+
):
|
551
573
|
self._initialize_replay_session_if_not_loaded()
|
552
574
|
request = self._build_request(
|
553
575
|
'get', path=path, request_dict={}, http_options=http_options
|
554
576
|
)
|
555
577
|
if self._should_call_api():
|
556
578
|
try:
|
557
|
-
result = await super().async_download_file(
|
579
|
+
result = await super().async_download_file(
|
580
|
+
path, http_options=http_options
|
581
|
+
)
|
558
582
|
except HTTPError as e:
|
559
583
|
result = HttpResponse(
|
560
584
|
dict(e.response.headers), [json.dumps({'reason': e.response.reason})]
|