google-genai 0.4.0__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- google/genai/_api_client.py +80 -20
- google/genai/_common.py +14 -29
- google/genai/_replay_api_client.py +11 -46
- google/genai/_transformers.py +38 -0
- google/genai/batches.py +64 -62
- google/genai/caches.py +94 -80
- google/genai/chats.py +5 -8
- google/genai/files.py +69 -68
- google/genai/live.py +37 -29
- google/genai/models.py +277 -220
- google/genai/tunings.py +70 -68
- google/genai/types.py +17 -2
- google/genai/version.py +1 -1
- {google_genai-0.4.0.dist-info → google_genai-0.5.0.dist-info}/METADATA +1 -1
- google_genai-0.5.0.dist-info/RECORD +25 -0
- google_genai-0.4.0.dist-info/RECORD +0 -25
- {google_genai-0.4.0.dist-info → google_genai-0.5.0.dist-info}/LICENSE +0 -0
- {google_genai-0.4.0.dist-info → google_genai-0.5.0.dist-info}/WHEEL +0 -0
- {google_genai-0.4.0.dist-info → google_genai-0.5.0.dist-info}/top_level.txt +0 -0
google/genai/files.py
CHANGED
@@ -13,8 +13,11 @@
|
|
13
13
|
# limitations under the License.
|
14
14
|
#
|
15
15
|
|
16
|
+
# Code generated by the Google Gen AI SDK generator DO NOT EDIT.
|
17
|
+
|
16
18
|
import mimetypes
|
17
19
|
import os
|
20
|
+
import pathlib
|
18
21
|
from typing import Optional, Union
|
19
22
|
from urllib.parse import urlencode
|
20
23
|
from . import _common
|
@@ -615,11 +618,11 @@ class Files(_common.BaseModule):
|
|
615
618
|
config=config,
|
616
619
|
)
|
617
620
|
|
618
|
-
if self.
|
621
|
+
if self._api_client.vertexai:
|
619
622
|
raise ValueError('This method is only supported in the default client.')
|
620
623
|
else:
|
621
624
|
request_dict = _ListFilesParameters_to_mldev(
|
622
|
-
self.
|
625
|
+
self._api_client, parameter_model
|
623
626
|
)
|
624
627
|
path = 'files'.format_map(request_dict.get('_url'))
|
625
628
|
|
@@ -632,23 +635,23 @@ class Files(_common.BaseModule):
|
|
632
635
|
request_dict = _common.convert_to_dict(request_dict)
|
633
636
|
request_dict = _common.apply_base64_encoding(request_dict)
|
634
637
|
|
635
|
-
response_dict = self.
|
638
|
+
response_dict = self._api_client.request(
|
636
639
|
'get', path, request_dict, http_options
|
637
640
|
)
|
638
641
|
|
639
|
-
if self.
|
642
|
+
if self._api_client.vertexai:
|
640
643
|
response_dict = _ListFilesResponse_from_vertex(
|
641
|
-
self.
|
644
|
+
self._api_client, response_dict
|
642
645
|
)
|
643
646
|
else:
|
644
647
|
response_dict = _ListFilesResponse_from_mldev(
|
645
|
-
self.
|
648
|
+
self._api_client, response_dict
|
646
649
|
)
|
647
650
|
|
648
651
|
return_value = types.ListFilesResponse._from_response(
|
649
652
|
response_dict, parameter_model
|
650
653
|
)
|
651
|
-
self.
|
654
|
+
self._api_client._verify_response(return_value)
|
652
655
|
return return_value
|
653
656
|
|
654
657
|
def _create(
|
@@ -662,11 +665,11 @@ class Files(_common.BaseModule):
|
|
662
665
|
config=config,
|
663
666
|
)
|
664
667
|
|
665
|
-
if self.
|
668
|
+
if self._api_client.vertexai:
|
666
669
|
raise ValueError('This method is only supported in the default client.')
|
667
670
|
else:
|
668
671
|
request_dict = _CreateFileParameters_to_mldev(
|
669
|
-
self.
|
672
|
+
self._api_client, parameter_model
|
670
673
|
)
|
671
674
|
path = 'upload/v1beta/files'.format_map(request_dict.get('_url'))
|
672
675
|
|
@@ -679,23 +682,23 @@ class Files(_common.BaseModule):
|
|
679
682
|
request_dict = _common.convert_to_dict(request_dict)
|
680
683
|
request_dict = _common.apply_base64_encoding(request_dict)
|
681
684
|
|
682
|
-
response_dict = self.
|
685
|
+
response_dict = self._api_client.request(
|
683
686
|
'post', path, request_dict, http_options
|
684
687
|
)
|
685
688
|
|
686
|
-
if self.
|
689
|
+
if self._api_client.vertexai:
|
687
690
|
response_dict = _CreateFileResponse_from_vertex(
|
688
|
-
self.
|
691
|
+
self._api_client, response_dict
|
689
692
|
)
|
690
693
|
else:
|
691
694
|
response_dict = _CreateFileResponse_from_mldev(
|
692
|
-
self.
|
695
|
+
self._api_client, response_dict
|
693
696
|
)
|
694
697
|
|
695
698
|
return_value = types.CreateFileResponse._from_response(
|
696
699
|
response_dict, parameter_model
|
697
700
|
)
|
698
|
-
self.
|
701
|
+
self._api_client._verify_response(return_value)
|
699
702
|
return return_value
|
700
703
|
|
701
704
|
def get(
|
@@ -723,11 +726,11 @@ class Files(_common.BaseModule):
|
|
723
726
|
config=config,
|
724
727
|
)
|
725
728
|
|
726
|
-
if self.
|
729
|
+
if self._api_client.vertexai:
|
727
730
|
raise ValueError('This method is only supported in the default client.')
|
728
731
|
else:
|
729
732
|
request_dict = _GetFileParameters_to_mldev(
|
730
|
-
self.
|
733
|
+
self._api_client, parameter_model
|
731
734
|
)
|
732
735
|
path = 'files/{file}'.format_map(request_dict.get('_url'))
|
733
736
|
|
@@ -740,17 +743,17 @@ class Files(_common.BaseModule):
|
|
740
743
|
request_dict = _common.convert_to_dict(request_dict)
|
741
744
|
request_dict = _common.apply_base64_encoding(request_dict)
|
742
745
|
|
743
|
-
response_dict = self.
|
746
|
+
response_dict = self._api_client.request(
|
744
747
|
'get', path, request_dict, http_options
|
745
748
|
)
|
746
749
|
|
747
|
-
if self.
|
748
|
-
response_dict = _File_from_vertex(self.
|
750
|
+
if self._api_client.vertexai:
|
751
|
+
response_dict = _File_from_vertex(self._api_client, response_dict)
|
749
752
|
else:
|
750
|
-
response_dict = _File_from_mldev(self.
|
753
|
+
response_dict = _File_from_mldev(self._api_client, response_dict)
|
751
754
|
|
752
755
|
return_value = types.File._from_response(response_dict, parameter_model)
|
753
|
-
self.
|
756
|
+
self._api_client._verify_response(return_value)
|
754
757
|
return return_value
|
755
758
|
|
756
759
|
def delete(
|
@@ -777,11 +780,11 @@ class Files(_common.BaseModule):
|
|
777
780
|
config=config,
|
778
781
|
)
|
779
782
|
|
780
|
-
if self.
|
783
|
+
if self._api_client.vertexai:
|
781
784
|
raise ValueError('This method is only supported in the default client.')
|
782
785
|
else:
|
783
786
|
request_dict = _DeleteFileParameters_to_mldev(
|
784
|
-
self.
|
787
|
+
self._api_client, parameter_model
|
785
788
|
)
|
786
789
|
path = 'files/{file}'.format_map(request_dict.get('_url'))
|
787
790
|
|
@@ -794,39 +797,38 @@ class Files(_common.BaseModule):
|
|
794
797
|
request_dict = _common.convert_to_dict(request_dict)
|
795
798
|
request_dict = _common.apply_base64_encoding(request_dict)
|
796
799
|
|
797
|
-
response_dict = self.
|
800
|
+
response_dict = self._api_client.request(
|
798
801
|
'delete', path, request_dict, http_options
|
799
802
|
)
|
800
803
|
|
801
|
-
if self.
|
804
|
+
if self._api_client.vertexai:
|
802
805
|
response_dict = _DeleteFileResponse_from_vertex(
|
803
|
-
self.
|
806
|
+
self._api_client, response_dict
|
804
807
|
)
|
805
808
|
else:
|
806
809
|
response_dict = _DeleteFileResponse_from_mldev(
|
807
|
-
self.
|
810
|
+
self._api_client, response_dict
|
808
811
|
)
|
809
812
|
|
810
813
|
return_value = types.DeleteFileResponse._from_response(
|
811
814
|
response_dict, parameter_model
|
812
815
|
)
|
813
|
-
self.
|
816
|
+
self._api_client._verify_response(return_value)
|
814
817
|
return return_value
|
815
818
|
|
816
819
|
def upload(
|
817
820
|
self,
|
818
821
|
*,
|
819
|
-
path: str,
|
822
|
+
path: str | pathlib.Path | os.PathLike,
|
820
823
|
config: Optional[types.UploadFileConfigOrDict] = None,
|
821
824
|
) -> types.File:
|
822
825
|
"""Calls the API to upload a file using a supported file service.
|
823
826
|
|
824
827
|
Args:
|
825
|
-
path: The path
|
826
|
-
uploaded.
|
828
|
+
path: The path or a path-like object points to the file to to be uploaded.
|
827
829
|
config: Optional parameters to set `diplay_name`, `mime_type`, and `name`.
|
828
830
|
"""
|
829
|
-
if self.
|
831
|
+
if self._api_client.vertexai:
|
830
832
|
raise ValueError(
|
831
833
|
'Vertex AI does not support creating files. You can upload files to'
|
832
834
|
' GCS files instead.'
|
@@ -884,12 +886,12 @@ class Files(_common.BaseModule):
|
|
884
886
|
)
|
885
887
|
upload_url = response['headers']['X-Goog-Upload-URL']
|
886
888
|
|
887
|
-
return_file = self.
|
889
|
+
return_file = self._api_client.upload_file(
|
888
890
|
fs_path, upload_url, file.size_bytes
|
889
891
|
)
|
890
892
|
|
891
893
|
return types.File._from_response(
|
892
|
-
_File_from_mldev(self.
|
894
|
+
_File_from_mldev(self._api_client, return_file['file']), None
|
893
895
|
)
|
894
896
|
|
895
897
|
def list(
|
@@ -929,11 +931,11 @@ class AsyncFiles(_common.BaseModule):
|
|
929
931
|
config=config,
|
930
932
|
)
|
931
933
|
|
932
|
-
if self.
|
934
|
+
if self._api_client.vertexai:
|
933
935
|
raise ValueError('This method is only supported in the default client.')
|
934
936
|
else:
|
935
937
|
request_dict = _ListFilesParameters_to_mldev(
|
936
|
-
self.
|
938
|
+
self._api_client, parameter_model
|
937
939
|
)
|
938
940
|
path = 'files'.format_map(request_dict.get('_url'))
|
939
941
|
|
@@ -946,23 +948,23 @@ class AsyncFiles(_common.BaseModule):
|
|
946
948
|
request_dict = _common.convert_to_dict(request_dict)
|
947
949
|
request_dict = _common.apply_base64_encoding(request_dict)
|
948
950
|
|
949
|
-
response_dict = await self.
|
951
|
+
response_dict = await self._api_client.async_request(
|
950
952
|
'get', path, request_dict, http_options
|
951
953
|
)
|
952
954
|
|
953
|
-
if self.
|
955
|
+
if self._api_client.vertexai:
|
954
956
|
response_dict = _ListFilesResponse_from_vertex(
|
955
|
-
self.
|
957
|
+
self._api_client, response_dict
|
956
958
|
)
|
957
959
|
else:
|
958
960
|
response_dict = _ListFilesResponse_from_mldev(
|
959
|
-
self.
|
961
|
+
self._api_client, response_dict
|
960
962
|
)
|
961
963
|
|
962
964
|
return_value = types.ListFilesResponse._from_response(
|
963
965
|
response_dict, parameter_model
|
964
966
|
)
|
965
|
-
self.
|
967
|
+
self._api_client._verify_response(return_value)
|
966
968
|
return return_value
|
967
969
|
|
968
970
|
async def _create(
|
@@ -976,11 +978,11 @@ class AsyncFiles(_common.BaseModule):
|
|
976
978
|
config=config,
|
977
979
|
)
|
978
980
|
|
979
|
-
if self.
|
981
|
+
if self._api_client.vertexai:
|
980
982
|
raise ValueError('This method is only supported in the default client.')
|
981
983
|
else:
|
982
984
|
request_dict = _CreateFileParameters_to_mldev(
|
983
|
-
self.
|
985
|
+
self._api_client, parameter_model
|
984
986
|
)
|
985
987
|
path = 'upload/v1beta/files'.format_map(request_dict.get('_url'))
|
986
988
|
|
@@ -993,23 +995,23 @@ class AsyncFiles(_common.BaseModule):
|
|
993
995
|
request_dict = _common.convert_to_dict(request_dict)
|
994
996
|
request_dict = _common.apply_base64_encoding(request_dict)
|
995
997
|
|
996
|
-
response_dict = await self.
|
998
|
+
response_dict = await self._api_client.async_request(
|
997
999
|
'post', path, request_dict, http_options
|
998
1000
|
)
|
999
1001
|
|
1000
|
-
if self.
|
1002
|
+
if self._api_client.vertexai:
|
1001
1003
|
response_dict = _CreateFileResponse_from_vertex(
|
1002
|
-
self.
|
1004
|
+
self._api_client, response_dict
|
1003
1005
|
)
|
1004
1006
|
else:
|
1005
1007
|
response_dict = _CreateFileResponse_from_mldev(
|
1006
|
-
self.
|
1008
|
+
self._api_client, response_dict
|
1007
1009
|
)
|
1008
1010
|
|
1009
1011
|
return_value = types.CreateFileResponse._from_response(
|
1010
1012
|
response_dict, parameter_model
|
1011
1013
|
)
|
1012
|
-
self.
|
1014
|
+
self._api_client._verify_response(return_value)
|
1013
1015
|
return return_value
|
1014
1016
|
|
1015
1017
|
async def get(
|
@@ -1037,11 +1039,11 @@ class AsyncFiles(_common.BaseModule):
|
|
1037
1039
|
config=config,
|
1038
1040
|
)
|
1039
1041
|
|
1040
|
-
if self.
|
1042
|
+
if self._api_client.vertexai:
|
1041
1043
|
raise ValueError('This method is only supported in the default client.')
|
1042
1044
|
else:
|
1043
1045
|
request_dict = _GetFileParameters_to_mldev(
|
1044
|
-
self.
|
1046
|
+
self._api_client, parameter_model
|
1045
1047
|
)
|
1046
1048
|
path = 'files/{file}'.format_map(request_dict.get('_url'))
|
1047
1049
|
|
@@ -1054,17 +1056,17 @@ class AsyncFiles(_common.BaseModule):
|
|
1054
1056
|
request_dict = _common.convert_to_dict(request_dict)
|
1055
1057
|
request_dict = _common.apply_base64_encoding(request_dict)
|
1056
1058
|
|
1057
|
-
response_dict = await self.
|
1059
|
+
response_dict = await self._api_client.async_request(
|
1058
1060
|
'get', path, request_dict, http_options
|
1059
1061
|
)
|
1060
1062
|
|
1061
|
-
if self.
|
1062
|
-
response_dict = _File_from_vertex(self.
|
1063
|
+
if self._api_client.vertexai:
|
1064
|
+
response_dict = _File_from_vertex(self._api_client, response_dict)
|
1063
1065
|
else:
|
1064
|
-
response_dict = _File_from_mldev(self.
|
1066
|
+
response_dict = _File_from_mldev(self._api_client, response_dict)
|
1065
1067
|
|
1066
1068
|
return_value = types.File._from_response(response_dict, parameter_model)
|
1067
|
-
self.
|
1069
|
+
self._api_client._verify_response(return_value)
|
1068
1070
|
return return_value
|
1069
1071
|
|
1070
1072
|
async def delete(
|
@@ -1091,11 +1093,11 @@ class AsyncFiles(_common.BaseModule):
|
|
1091
1093
|
config=config,
|
1092
1094
|
)
|
1093
1095
|
|
1094
|
-
if self.
|
1096
|
+
if self._api_client.vertexai:
|
1095
1097
|
raise ValueError('This method is only supported in the default client.')
|
1096
1098
|
else:
|
1097
1099
|
request_dict = _DeleteFileParameters_to_mldev(
|
1098
|
-
self.
|
1100
|
+
self._api_client, parameter_model
|
1099
1101
|
)
|
1100
1102
|
path = 'files/{file}'.format_map(request_dict.get('_url'))
|
1101
1103
|
|
@@ -1108,39 +1110,38 @@ class AsyncFiles(_common.BaseModule):
|
|
1108
1110
|
request_dict = _common.convert_to_dict(request_dict)
|
1109
1111
|
request_dict = _common.apply_base64_encoding(request_dict)
|
1110
1112
|
|
1111
|
-
response_dict = await self.
|
1113
|
+
response_dict = await self._api_client.async_request(
|
1112
1114
|
'delete', path, request_dict, http_options
|
1113
1115
|
)
|
1114
1116
|
|
1115
|
-
if self.
|
1117
|
+
if self._api_client.vertexai:
|
1116
1118
|
response_dict = _DeleteFileResponse_from_vertex(
|
1117
|
-
self.
|
1119
|
+
self._api_client, response_dict
|
1118
1120
|
)
|
1119
1121
|
else:
|
1120
1122
|
response_dict = _DeleteFileResponse_from_mldev(
|
1121
|
-
self.
|
1123
|
+
self._api_client, response_dict
|
1122
1124
|
)
|
1123
1125
|
|
1124
1126
|
return_value = types.DeleteFileResponse._from_response(
|
1125
1127
|
response_dict, parameter_model
|
1126
1128
|
)
|
1127
|
-
self.
|
1129
|
+
self._api_client._verify_response(return_value)
|
1128
1130
|
return return_value
|
1129
1131
|
|
1130
1132
|
async def upload(
|
1131
1133
|
self,
|
1132
1134
|
*,
|
1133
|
-
path: str,
|
1135
|
+
path: str | pathlib.Path | os.PathLike,
|
1134
1136
|
config: Optional[types.UploadFileConfigOrDict] = None,
|
1135
1137
|
) -> types.File:
|
1136
1138
|
"""Calls the API to upload a file asynchronously using a supported file service.
|
1137
1139
|
|
1138
1140
|
Args:
|
1139
|
-
path:
|
1140
|
-
uploaded.
|
1141
|
+
path: The path or a path-like object points to the file to be uploaded.
|
1141
1142
|
config: Optional parameters to set `diplay_name`, `mime_type`, and `name`.
|
1142
1143
|
"""
|
1143
|
-
if self.
|
1144
|
+
if self._api_client.vertexai:
|
1144
1145
|
raise ValueError(
|
1145
1146
|
'Vertex AI does not support creating files. You can upload files to'
|
1146
1147
|
' GCS files instead.'
|
@@ -1198,12 +1199,12 @@ class AsyncFiles(_common.BaseModule):
|
|
1198
1199
|
)
|
1199
1200
|
upload_url = response['headers']['X-Goog-Upload-URL']
|
1200
1201
|
|
1201
|
-
return_file = await self.
|
1202
|
+
return_file = await self._api_client.async_upload_file(
|
1202
1203
|
fs_path, upload_url, file.size_bytes
|
1203
1204
|
)
|
1204
1205
|
|
1205
1206
|
return types.File._from_response(
|
1206
|
-
_File_from_mldev(self.
|
1207
|
+
_File_from_mldev(self._api_client, return_file['file']), None
|
1207
1208
|
)
|
1208
1209
|
|
1209
1210
|
async def list(
|
google/genai/live.py
CHANGED
@@ -241,6 +241,8 @@ class AsyncSession:
|
|
241
241
|
)
|
242
242
|
if getv(from_object, ['turnComplete']) is not None:
|
243
243
|
setv(to_object, ['turn_complete'], getv(from_object, ['turnComplete']))
|
244
|
+
if getv(from_object, ['interrupted']) is not None:
|
245
|
+
setv(to_object, ['interrupted'], getv(from_object, ['interrupted']))
|
244
246
|
return to_object
|
245
247
|
|
246
248
|
def _LiveToolCall_from_mldev(
|
@@ -312,6 +314,8 @@ class AsyncSession:
|
|
312
314
|
)
|
313
315
|
if getv(from_object, ['turnComplete']) is not None:
|
314
316
|
setv(to_object, ['turn_complete'], getv(from_object, ['turnComplete']))
|
317
|
+
if getv(from_object, ['interrupted']) is not None:
|
318
|
+
setv(to_object, ['interrupted'], getv(from_object, ['interrupted']))
|
315
319
|
return to_object
|
316
320
|
|
317
321
|
def _LiveServerMessage_from_vertex(
|
@@ -358,7 +362,7 @@ class AsyncSession:
|
|
358
362
|
) -> dict:
|
359
363
|
if isinstance(input, str):
|
360
364
|
input = [input]
|
361
|
-
elif
|
365
|
+
elif isinstance(input, dict) and 'data' in input:
|
362
366
|
if isinstance(input['data'], bytes):
|
363
367
|
decoded_data = base64.b64encode(input['data']).decode('utf-8')
|
364
368
|
input['data'] = decoded_data
|
@@ -425,7 +429,9 @@ class AsyncSession:
|
|
425
429
|
client_message = {'client_content': input.model_dump(exclude_none=True)}
|
426
430
|
elif isinstance(input, types.LiveClientToolResponse):
|
427
431
|
# ToolResponse.FunctionResponse
|
428
|
-
if not (self._api_client.vertexai) and not (
|
432
|
+
if not (self._api_client.vertexai) and not (
|
433
|
+
input.function_responses[0].id
|
434
|
+
):
|
429
435
|
raise ValueError(_FUNCTION_RESPONSE_REQUIRES_ID)
|
430
436
|
client_message = {'tool_response': input.model_dump(exclude_none=True)}
|
431
437
|
elif isinstance(input, types.FunctionResponse):
|
@@ -477,7 +483,7 @@ class AsyncLive(_common.BaseModule):
|
|
477
483
|
to_object,
|
478
484
|
['generationConfig'],
|
479
485
|
_GenerateContentConfig_to_mldev(
|
480
|
-
self.
|
486
|
+
self._api_client,
|
481
487
|
getv(from_object, ['generation_config']),
|
482
488
|
to_object,
|
483
489
|
),
|
@@ -494,17 +500,18 @@ class AsyncLive(_common.BaseModule):
|
|
494
500
|
if getv(from_object, ['speech_config']) is not None:
|
495
501
|
if getv(to_object, ['generationConfig']) is not None:
|
496
502
|
to_object['generationConfig']['speechConfig'] = _SpeechConfig_to_mldev(
|
497
|
-
self.
|
503
|
+
self._api_client,
|
498
504
|
t.t_speech_config(
|
499
|
-
self.
|
505
|
+
self._api_client, getv(from_object, ['speech_config'])
|
506
|
+
),
|
500
507
|
to_object,
|
501
508
|
)
|
502
509
|
else:
|
503
510
|
to_object['generationConfig'] = {
|
504
511
|
'speechConfig': _SpeechConfig_to_mldev(
|
505
|
-
self.
|
512
|
+
self._api_client,
|
506
513
|
t.t_speech_config(
|
507
|
-
self.
|
514
|
+
self._api_client, getv(from_object, ['speech_config'])
|
508
515
|
),
|
509
516
|
to_object,
|
510
517
|
)
|
@@ -515,9 +522,9 @@ class AsyncLive(_common.BaseModule):
|
|
515
522
|
to_object,
|
516
523
|
['systemInstruction'],
|
517
524
|
_Content_to_mldev(
|
518
|
-
self.
|
525
|
+
self._api_client,
|
519
526
|
t.t_content(
|
520
|
-
self.
|
527
|
+
self._api_client, getv(from_object, ['system_instruction'])
|
521
528
|
),
|
522
529
|
to_object,
|
523
530
|
),
|
@@ -527,7 +534,7 @@ class AsyncLive(_common.BaseModule):
|
|
527
534
|
to_object,
|
528
535
|
['tools'],
|
529
536
|
[
|
530
|
-
_Tool_to_mldev(self.
|
537
|
+
_Tool_to_mldev(self._api_client, item, to_object)
|
531
538
|
for item in getv(from_object, ['tools'])
|
532
539
|
],
|
533
540
|
)
|
@@ -551,7 +558,7 @@ class AsyncLive(_common.BaseModule):
|
|
551
558
|
to_object,
|
552
559
|
['generationConfig'],
|
553
560
|
_GenerateContentConfig_to_vertex(
|
554
|
-
self.
|
561
|
+
self._api_client,
|
555
562
|
getv(from_object, ['generation_config']),
|
556
563
|
to_object,
|
557
564
|
),
|
@@ -576,17 +583,18 @@ class AsyncLive(_common.BaseModule):
|
|
576
583
|
if getv(from_object, ['speech_config']) is not None:
|
577
584
|
if getv(to_object, ['generationConfig']) is not None:
|
578
585
|
to_object['generationConfig']['speechConfig'] = _SpeechConfig_to_vertex(
|
579
|
-
self.
|
586
|
+
self._api_client,
|
580
587
|
t.t_speech_config(
|
581
|
-
self.
|
588
|
+
self._api_client, getv(from_object, ['speech_config'])
|
589
|
+
),
|
582
590
|
to_object,
|
583
591
|
)
|
584
592
|
else:
|
585
593
|
to_object['generationConfig'] = {
|
586
594
|
'speechConfig': _SpeechConfig_to_vertex(
|
587
|
-
self.
|
595
|
+
self._api_client,
|
588
596
|
t.t_speech_config(
|
589
|
-
self.
|
597
|
+
self._api_client, getv(from_object, ['speech_config'])
|
590
598
|
),
|
591
599
|
to_object,
|
592
600
|
)
|
@@ -596,9 +604,9 @@ class AsyncLive(_common.BaseModule):
|
|
596
604
|
to_object,
|
597
605
|
['systemInstruction'],
|
598
606
|
_Content_to_vertex(
|
599
|
-
self.
|
607
|
+
self._api_client,
|
600
608
|
t.t_content(
|
601
|
-
self.
|
609
|
+
self._api_client, getv(from_object, ['system_instruction'])
|
602
610
|
),
|
603
611
|
to_object,
|
604
612
|
),
|
@@ -608,7 +616,7 @@ class AsyncLive(_common.BaseModule):
|
|
608
616
|
to_object,
|
609
617
|
['tools'],
|
610
618
|
[
|
611
|
-
_Tool_to_vertex(self.
|
619
|
+
_Tool_to_vertex(self._api_client, item, to_object)
|
612
620
|
for item in getv(from_object, ['tools'])
|
613
621
|
],
|
614
622
|
)
|
@@ -637,14 +645,14 @@ class AsyncLive(_common.BaseModule):
|
|
637
645
|
async for message in session.receive():
|
638
646
|
print(message)
|
639
647
|
"""
|
640
|
-
base_url = self.
|
641
|
-
if self.
|
642
|
-
api_key = self.
|
643
|
-
version = self.
|
648
|
+
base_url = self._api_client._websocket_base_url()
|
649
|
+
if self._api_client.api_key:
|
650
|
+
api_key = self._api_client.api_key
|
651
|
+
version = self._api_client._http_options['api_version']
|
644
652
|
uri = f'{base_url}/ws/google.ai.generativelanguage.{version}.GenerativeService.BidiGenerateContent?key={api_key}'
|
645
|
-
headers = self.
|
653
|
+
headers = self._api_client._http_options['headers']
|
646
654
|
|
647
|
-
transformed_model = t.t_model(self.
|
655
|
+
transformed_model = t.t_model(self._api_client, model)
|
648
656
|
request = json.dumps(
|
649
657
|
self._LiveSetup_to_mldev(model=transformed_model, config=config)
|
650
658
|
)
|
@@ -663,11 +671,11 @@ class AsyncLive(_common.BaseModule):
|
|
663
671
|
'Content-Type': 'application/json',
|
664
672
|
'Authorization': 'Bearer {}'.format(bearer_token),
|
665
673
|
}
|
666
|
-
version = self.
|
674
|
+
version = self._api_client._http_options['api_version']
|
667
675
|
uri = f'{base_url}/ws/google.cloud.aiplatform.{version}.LlmBidiService/BidiGenerateContent'
|
668
|
-
location = self.
|
669
|
-
project = self.
|
670
|
-
transformed_model = t.t_model(self.
|
676
|
+
location = self._api_client.location
|
677
|
+
project = self._api_client.project
|
678
|
+
transformed_model = t.t_model(self._api_client, model)
|
671
679
|
if transformed_model.startswith('publishers/'):
|
672
680
|
transformed_model = (
|
673
681
|
f'projects/{project}/locations/{location}/' + transformed_model
|
@@ -681,4 +689,4 @@ class AsyncLive(_common.BaseModule):
|
|
681
689
|
await ws.send(request)
|
682
690
|
logging.info(await ws.recv(decode=False))
|
683
691
|
|
684
|
-
yield AsyncSession(api_client=self.
|
692
|
+
yield AsyncSession(api_client=self._api_client, websocket=ws)
|