google-genai 1.0.0rc0__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- google/genai/_api_client.py +24 -21
- google/genai/_automatic_function_calling_util.py +21 -18
- google/genai/_common.py +24 -1
- google/genai/_extra_utils.py +14 -8
- google/genai/_replay_api_client.py +2 -0
- google/genai/_transformers.py +31 -3
- google/genai/chats.py +24 -8
- google/genai/errors.py +4 -0
- google/genai/files.py +18 -12
- google/genai/live.py +5 -0
- google/genai/models.py +311 -10
- google/genai/tunings.py +224 -60
- google/genai/types.py +100 -74
- google/genai/version.py +1 -1
- {google_genai-1.0.0rc0.dist-info → google_genai-1.1.0.dist-info}/METADATA +258 -149
- google_genai-1.1.0.dist-info/RECORD +27 -0
- google_genai-1.0.0rc0.dist-info/RECORD +0 -27
- {google_genai-1.0.0rc0.dist-info → google_genai-1.1.0.dist-info}/LICENSE +0 -0
- {google_genai-1.0.0rc0.dist-info → google_genai-1.1.0.dist-info}/WHEEL +0 -0
- {google_genai-1.0.0rc0.dist-info → google_genai-1.1.0.dist-info}/top_level.txt +0 -0
google/genai/tunings.py
CHANGED
@@ -655,42 +655,50 @@ def _ListTuningJobsResponse_from_vertex(
|
|
655
655
|
return to_object
|
656
656
|
|
657
657
|
|
658
|
-
def
|
658
|
+
def _Operation_from_mldev(
|
659
659
|
api_client: ApiClient,
|
660
660
|
from_object: Union[dict, object],
|
661
661
|
parent_object: dict = None,
|
662
662
|
) -> dict:
|
663
663
|
to_object = {}
|
664
|
-
if getv(from_object, ['
|
665
|
-
setv(
|
666
|
-
|
667
|
-
|
668
|
-
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
664
|
+
if getv(from_object, ['name']) is not None:
|
665
|
+
setv(to_object, ['name'], getv(from_object, ['name']))
|
666
|
+
|
667
|
+
if getv(from_object, ['metadata']) is not None:
|
668
|
+
setv(to_object, ['metadata'], getv(from_object, ['metadata']))
|
669
|
+
|
670
|
+
if getv(from_object, ['done']) is not None:
|
671
|
+
setv(to_object, ['done'], getv(from_object, ['done']))
|
672
|
+
|
673
|
+
if getv(from_object, ['error']) is not None:
|
674
|
+
setv(to_object, ['error'], getv(from_object, ['error']))
|
675
|
+
|
676
|
+
if getv(from_object, ['response']) is not None:
|
677
|
+
setv(to_object, ['response'], getv(from_object, ['response']))
|
674
678
|
|
675
679
|
return to_object
|
676
680
|
|
677
681
|
|
678
|
-
def
|
682
|
+
def _Operation_from_vertex(
|
679
683
|
api_client: ApiClient,
|
680
684
|
from_object: Union[dict, object],
|
681
685
|
parent_object: dict = None,
|
682
686
|
) -> dict:
|
683
687
|
to_object = {}
|
684
|
-
if getv(from_object, ['
|
685
|
-
setv(
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
|
692
|
-
|
693
|
-
|
688
|
+
if getv(from_object, ['name']) is not None:
|
689
|
+
setv(to_object, ['name'], getv(from_object, ['name']))
|
690
|
+
|
691
|
+
if getv(from_object, ['metadata']) is not None:
|
692
|
+
setv(to_object, ['metadata'], getv(from_object, ['metadata']))
|
693
|
+
|
694
|
+
if getv(from_object, ['done']) is not None:
|
695
|
+
setv(to_object, ['done'], getv(from_object, ['done']))
|
696
|
+
|
697
|
+
if getv(from_object, ['error']) is not None:
|
698
|
+
setv(to_object, ['error'], getv(from_object, ['error']))
|
699
|
+
|
700
|
+
if getv(from_object, ['response']) is not None:
|
701
|
+
setv(to_object, ['response'], getv(from_object, ['response']))
|
694
702
|
|
695
703
|
return to_object
|
696
704
|
|
@@ -823,7 +831,7 @@ class Tunings(_api_module.BaseModule):
|
|
823
831
|
base_model: str,
|
824
832
|
training_dataset: types.TuningDatasetOrDict,
|
825
833
|
config: Optional[types.CreateTuningJobConfigOrDict] = None,
|
826
|
-
) -> types.
|
834
|
+
) -> types.TuningJob:
|
827
835
|
"""Creates a supervised fine-tuning job.
|
828
836
|
|
829
837
|
Args:
|
@@ -841,16 +849,76 @@ class Tunings(_api_module.BaseModule):
|
|
841
849
|
config=config,
|
842
850
|
)
|
843
851
|
|
844
|
-
if self._api_client.vertexai:
|
852
|
+
if not self._api_client.vertexai:
|
853
|
+
raise ValueError('This method is only supported in the Vertex AI client.')
|
854
|
+
else:
|
845
855
|
request_dict = _CreateTuningJobParameters_to_vertex(
|
846
856
|
self._api_client, parameter_model
|
847
857
|
)
|
848
858
|
path = 'tuningJobs'.format_map(request_dict.get('_url'))
|
859
|
+
|
860
|
+
query_params = request_dict.get('_query')
|
861
|
+
if query_params:
|
862
|
+
path = f'{path}?{urlencode(query_params)}'
|
863
|
+
# TODO: remove the hack that pops config.
|
864
|
+
request_dict.pop('config', None)
|
865
|
+
|
866
|
+
http_options = None
|
867
|
+
if isinstance(config, dict):
|
868
|
+
http_options = config.get('http_options', None)
|
869
|
+
elif hasattr(config, 'http_options'):
|
870
|
+
http_options = config.http_options
|
871
|
+
|
872
|
+
request_dict = _common.convert_to_dict(request_dict)
|
873
|
+
request_dict = _common.encode_unserializable_types(request_dict)
|
874
|
+
|
875
|
+
response_dict = self._api_client.request(
|
876
|
+
'post', path, request_dict, http_options
|
877
|
+
)
|
878
|
+
|
879
|
+
if self._api_client.vertexai:
|
880
|
+
response_dict = _TuningJob_from_vertex(self._api_client, response_dict)
|
881
|
+
else:
|
882
|
+
response_dict = _TuningJob_from_mldev(self._api_client, response_dict)
|
883
|
+
|
884
|
+
return_value = types.TuningJob._from_response(
|
885
|
+
response=response_dict, kwargs=parameter_model
|
886
|
+
)
|
887
|
+
self._api_client._verify_response(return_value)
|
888
|
+
return return_value
|
889
|
+
|
890
|
+
def _tune_mldev(
|
891
|
+
self,
|
892
|
+
*,
|
893
|
+
base_model: str,
|
894
|
+
training_dataset: types.TuningDatasetOrDict,
|
895
|
+
config: Optional[types.CreateTuningJobConfigOrDict] = None,
|
896
|
+
) -> types.Operation:
|
897
|
+
"""Creates a supervised fine-tuning job.
|
898
|
+
|
899
|
+
Args:
|
900
|
+
base_model: The name of the model to tune.
|
901
|
+
training_dataset: The training dataset to use.
|
902
|
+
config: The configuration to use for the tuning job.
|
903
|
+
|
904
|
+
Returns:
|
905
|
+
A TuningJob operation.
|
906
|
+
"""
|
907
|
+
|
908
|
+
parameter_model = types._CreateTuningJobParameters(
|
909
|
+
base_model=base_model,
|
910
|
+
training_dataset=training_dataset,
|
911
|
+
config=config,
|
912
|
+
)
|
913
|
+
|
914
|
+
if self._api_client.vertexai:
|
915
|
+
raise ValueError('This method is only supported in the default client.')
|
849
916
|
else:
|
850
917
|
request_dict = _CreateTuningJobParameters_to_mldev(
|
851
918
|
self._api_client, parameter_model
|
852
919
|
)
|
853
920
|
path = 'tunedModels'.format_map(request_dict.get('_url'))
|
921
|
+
|
854
922
|
query_params = request_dict.get('_query')
|
855
923
|
if query_params:
|
856
924
|
path = f'{path}?{urlencode(query_params)}'
|
@@ -871,17 +939,13 @@ class Tunings(_api_module.BaseModule):
|
|
871
939
|
)
|
872
940
|
|
873
941
|
if self._api_client.vertexai:
|
874
|
-
response_dict =
|
875
|
-
self._api_client, response_dict
|
876
|
-
)
|
942
|
+
response_dict = _Operation_from_vertex(self._api_client, response_dict)
|
877
943
|
else:
|
878
|
-
response_dict =
|
879
|
-
self._api_client, response_dict
|
880
|
-
)
|
944
|
+
response_dict = _Operation_from_mldev(self._api_client, response_dict)
|
881
945
|
|
882
|
-
return_value = types.
|
946
|
+
return_value = types.Operation._from_response(
|
883
947
|
response=response_dict, kwargs=parameter_model
|
884
|
-
)
|
948
|
+
)
|
885
949
|
self._api_client._verify_response(return_value)
|
886
950
|
return return_value
|
887
951
|
|
@@ -909,21 +973,43 @@ class Tunings(_api_module.BaseModule):
|
|
909
973
|
)
|
910
974
|
return job
|
911
975
|
|
976
|
+
@_common.experimental_warning(
|
977
|
+
"The SDK's tuning implementation is experimental, "
|
978
|
+
'and may change in future versions.',
|
979
|
+
)
|
912
980
|
def tune(
|
913
981
|
self,
|
914
982
|
*,
|
915
983
|
base_model: str,
|
916
984
|
training_dataset: types.TuningDatasetOrDict,
|
917
985
|
config: Optional[types.CreateTuningJobConfigOrDict] = None,
|
918
|
-
) -> types.
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
|
924
|
-
|
925
|
-
|
926
|
-
|
986
|
+
) -> types.TuningJob:
|
987
|
+
if self._api_client.vertexai:
|
988
|
+
tuning_job = self._tune(
|
989
|
+
base_model=base_model,
|
990
|
+
training_dataset=training_dataset,
|
991
|
+
config=config,
|
992
|
+
)
|
993
|
+
else:
|
994
|
+
operation = self._tune_mldev(
|
995
|
+
base_model=base_model,
|
996
|
+
training_dataset=training_dataset,
|
997
|
+
config=config,
|
998
|
+
)
|
999
|
+
operation_dict = operation.to_json_dict()
|
1000
|
+
try:
|
1001
|
+
tuned_model_name = operation_dict['metadata']['tunedModel']
|
1002
|
+
except KeyError:
|
1003
|
+
tuned_model_name = operation_dict['name'].partition('/operations/')[0]
|
1004
|
+
tuning_job = types.TuningJob(
|
1005
|
+
name=tuned_model_name,
|
1006
|
+
state=types.JobState.JOB_STATE_QUEUED,
|
1007
|
+
)
|
1008
|
+
if tuning_job.name and self._api_client.vertexai:
|
1009
|
+
_IpythonUtils.display_model_tuning_button(
|
1010
|
+
tuning_job_resource=tuning_job.name
|
1011
|
+
)
|
1012
|
+
return tuning_job
|
927
1013
|
|
928
1014
|
|
929
1015
|
class AsyncTunings(_api_module.BaseModule):
|
@@ -1054,7 +1140,7 @@ class AsyncTunings(_api_module.BaseModule):
|
|
1054
1140
|
base_model: str,
|
1055
1141
|
training_dataset: types.TuningDatasetOrDict,
|
1056
1142
|
config: Optional[types.CreateTuningJobConfigOrDict] = None,
|
1057
|
-
) -> types.
|
1143
|
+
) -> types.TuningJob:
|
1058
1144
|
"""Creates a supervised fine-tuning job.
|
1059
1145
|
|
1060
1146
|
Args:
|
@@ -1072,16 +1158,76 @@ class AsyncTunings(_api_module.BaseModule):
|
|
1072
1158
|
config=config,
|
1073
1159
|
)
|
1074
1160
|
|
1075
|
-
if self._api_client.vertexai:
|
1161
|
+
if not self._api_client.vertexai:
|
1162
|
+
raise ValueError('This method is only supported in the Vertex AI client.')
|
1163
|
+
else:
|
1076
1164
|
request_dict = _CreateTuningJobParameters_to_vertex(
|
1077
1165
|
self._api_client, parameter_model
|
1078
1166
|
)
|
1079
1167
|
path = 'tuningJobs'.format_map(request_dict.get('_url'))
|
1168
|
+
|
1169
|
+
query_params = request_dict.get('_query')
|
1170
|
+
if query_params:
|
1171
|
+
path = f'{path}?{urlencode(query_params)}'
|
1172
|
+
# TODO: remove the hack that pops config.
|
1173
|
+
request_dict.pop('config', None)
|
1174
|
+
|
1175
|
+
http_options = None
|
1176
|
+
if isinstance(config, dict):
|
1177
|
+
http_options = config.get('http_options', None)
|
1178
|
+
elif hasattr(config, 'http_options'):
|
1179
|
+
http_options = config.http_options
|
1180
|
+
|
1181
|
+
request_dict = _common.convert_to_dict(request_dict)
|
1182
|
+
request_dict = _common.encode_unserializable_types(request_dict)
|
1183
|
+
|
1184
|
+
response_dict = await self._api_client.async_request(
|
1185
|
+
'post', path, request_dict, http_options
|
1186
|
+
)
|
1187
|
+
|
1188
|
+
if self._api_client.vertexai:
|
1189
|
+
response_dict = _TuningJob_from_vertex(self._api_client, response_dict)
|
1190
|
+
else:
|
1191
|
+
response_dict = _TuningJob_from_mldev(self._api_client, response_dict)
|
1192
|
+
|
1193
|
+
return_value = types.TuningJob._from_response(
|
1194
|
+
response=response_dict, kwargs=parameter_model
|
1195
|
+
)
|
1196
|
+
self._api_client._verify_response(return_value)
|
1197
|
+
return return_value
|
1198
|
+
|
1199
|
+
async def _tune_mldev(
|
1200
|
+
self,
|
1201
|
+
*,
|
1202
|
+
base_model: str,
|
1203
|
+
training_dataset: types.TuningDatasetOrDict,
|
1204
|
+
config: Optional[types.CreateTuningJobConfigOrDict] = None,
|
1205
|
+
) -> types.Operation:
|
1206
|
+
"""Creates a supervised fine-tuning job.
|
1207
|
+
|
1208
|
+
Args:
|
1209
|
+
base_model: The name of the model to tune.
|
1210
|
+
training_dataset: The training dataset to use.
|
1211
|
+
config: The configuration to use for the tuning job.
|
1212
|
+
|
1213
|
+
Returns:
|
1214
|
+
A TuningJob operation.
|
1215
|
+
"""
|
1216
|
+
|
1217
|
+
parameter_model = types._CreateTuningJobParameters(
|
1218
|
+
base_model=base_model,
|
1219
|
+
training_dataset=training_dataset,
|
1220
|
+
config=config,
|
1221
|
+
)
|
1222
|
+
|
1223
|
+
if self._api_client.vertexai:
|
1224
|
+
raise ValueError('This method is only supported in the default client.')
|
1080
1225
|
else:
|
1081
1226
|
request_dict = _CreateTuningJobParameters_to_mldev(
|
1082
1227
|
self._api_client, parameter_model
|
1083
1228
|
)
|
1084
1229
|
path = 'tunedModels'.format_map(request_dict.get('_url'))
|
1230
|
+
|
1085
1231
|
query_params = request_dict.get('_query')
|
1086
1232
|
if query_params:
|
1087
1233
|
path = f'{path}?{urlencode(query_params)}'
|
@@ -1102,17 +1248,13 @@ class AsyncTunings(_api_module.BaseModule):
|
|
1102
1248
|
)
|
1103
1249
|
|
1104
1250
|
if self._api_client.vertexai:
|
1105
|
-
response_dict =
|
1106
|
-
self._api_client, response_dict
|
1107
|
-
)
|
1251
|
+
response_dict = _Operation_from_vertex(self._api_client, response_dict)
|
1108
1252
|
else:
|
1109
|
-
response_dict =
|
1110
|
-
self._api_client, response_dict
|
1111
|
-
)
|
1253
|
+
response_dict = _Operation_from_mldev(self._api_client, response_dict)
|
1112
1254
|
|
1113
|
-
return_value = types.
|
1255
|
+
return_value = types.Operation._from_response(
|
1114
1256
|
response=response_dict, kwargs=parameter_model
|
1115
|
-
)
|
1257
|
+
)
|
1116
1258
|
self._api_client._verify_response(return_value)
|
1117
1259
|
return return_value
|
1118
1260
|
|
@@ -1140,21 +1282,43 @@ class AsyncTunings(_api_module.BaseModule):
|
|
1140
1282
|
)
|
1141
1283
|
return job
|
1142
1284
|
|
1285
|
+
@_common.experimental_warning(
|
1286
|
+
"The SDK's tuning implementation is experimental, "
|
1287
|
+
'and may change in future versions.'
|
1288
|
+
)
|
1143
1289
|
async def tune(
|
1144
1290
|
self,
|
1145
1291
|
*,
|
1146
1292
|
base_model: str,
|
1147
1293
|
training_dataset: types.TuningDatasetOrDict,
|
1148
1294
|
config: Optional[types.CreateTuningJobConfigOrDict] = None,
|
1149
|
-
) -> types.
|
1150
|
-
|
1151
|
-
|
1152
|
-
|
1153
|
-
|
1154
|
-
|
1155
|
-
|
1156
|
-
|
1157
|
-
|
1295
|
+
) -> types.TuningJob:
|
1296
|
+
if self._api_client.vertexai:
|
1297
|
+
tuning_job = await self._tune(
|
1298
|
+
base_model=base_model,
|
1299
|
+
training_dataset=training_dataset,
|
1300
|
+
config=config,
|
1301
|
+
)
|
1302
|
+
else:
|
1303
|
+
operation = await self._tune_mldev(
|
1304
|
+
base_model=base_model,
|
1305
|
+
training_dataset=training_dataset,
|
1306
|
+
config=config,
|
1307
|
+
)
|
1308
|
+
operation_dict = operation.to_json_dict()
|
1309
|
+
try:
|
1310
|
+
tuned_model_name = operation_dict['metadata']['tunedModel']
|
1311
|
+
except KeyError:
|
1312
|
+
tuned_model_name = operation_dict['name'].partition('/operations/')[0]
|
1313
|
+
tuning_job = types.TuningJob(
|
1314
|
+
name=tuned_model_name,
|
1315
|
+
state=types.JobState.JOB_STATE_QUEUED,
|
1316
|
+
)
|
1317
|
+
if tuning_job.name and self._api_client.vertexai:
|
1318
|
+
_IpythonUtils.display_model_tuning_button(
|
1319
|
+
tuning_job_resource=tuning_job.name
|
1320
|
+
)
|
1321
|
+
return tuning_job
|
1158
1322
|
|
1159
1323
|
|
1160
1324
|
class _IpythonUtils:
|
google/genai/types.py
CHANGED
@@ -723,10 +723,6 @@ class HttpOptions(_common.BaseModel):
|
|
723
723
|
timeout: Optional[int] = Field(
|
724
724
|
default=None, description="""Timeout for the request in milliseconds."""
|
725
725
|
)
|
726
|
-
deprecated_response_payload: Optional[dict[str, Any]] = Field(
|
727
|
-
default=None,
|
728
|
-
description="""This field is deprecated. If set, the response payload will be returned int the supplied dict.""",
|
729
|
-
)
|
730
726
|
|
731
727
|
|
732
728
|
class HttpOptionsDict(TypedDict, total=False):
|
@@ -744,9 +740,6 @@ class HttpOptionsDict(TypedDict, total=False):
|
|
744
740
|
timeout: Optional[int]
|
745
741
|
"""Timeout for the request in milliseconds."""
|
746
742
|
|
747
|
-
deprecated_response_payload: Optional[dict[str, Any]]
|
748
|
-
"""This field is deprecated. If set, the response payload will be returned int the supplied dict."""
|
749
|
-
|
750
743
|
|
751
744
|
HttpOptionsOrDict = Union[HttpOptions, HttpOptionsDict]
|
752
745
|
|
@@ -978,13 +971,24 @@ class FunctionDeclaration(_common.BaseModel):
|
|
978
971
|
)
|
979
972
|
|
980
973
|
@classmethod
|
981
|
-
def
|
974
|
+
def from_callable_with_api_option(
|
982
975
|
cls,
|
983
976
|
*,
|
984
|
-
client,
|
985
977
|
callable: Callable,
|
978
|
+
api_option: Literal['VERTEX_AI', 'GEMINI_API'] = 'GEMINI_API',
|
986
979
|
) -> 'FunctionDeclaration':
|
987
|
-
"""Converts a Callable to a FunctionDeclaration based on the
|
980
|
+
"""Converts a Callable to a FunctionDeclaration based on the API option.
|
981
|
+
|
982
|
+
Supported API option is 'VERTEX_AI' or 'GEMINI_API'. If api_option is unset,
|
983
|
+
it will default to 'GEMINI_API'. If unsupported api_option is provided, it
|
984
|
+
will raise ValueError.
|
985
|
+
"""
|
986
|
+
supported_api_options = ['VERTEX_AI', 'GEMINI_API']
|
987
|
+
if api_option not in supported_api_options:
|
988
|
+
raise ValueError(
|
989
|
+
f'Unsupported api_option value: {api_option}. Supported api_option'
|
990
|
+
f' value is one of: {supported_api_options}.'
|
991
|
+
)
|
988
992
|
from . import _automatic_function_calling_util
|
989
993
|
|
990
994
|
parameters_properties = {}
|
@@ -995,7 +999,7 @@ class FunctionDeclaration(_common.BaseModel):
|
|
995
999
|
inspect.Parameter.POSITIONAL_ONLY,
|
996
1000
|
):
|
997
1001
|
schema = _automatic_function_calling_util._parse_schema_from_parameter(
|
998
|
-
|
1002
|
+
api_option, param, callable.__name__
|
999
1003
|
)
|
1000
1004
|
parameters_properties[name] = schema
|
1001
1005
|
declaration = FunctionDeclaration(
|
@@ -1007,13 +1011,13 @@ class FunctionDeclaration(_common.BaseModel):
|
|
1007
1011
|
type='OBJECT',
|
1008
1012
|
properties=parameters_properties,
|
1009
1013
|
)
|
1010
|
-
if
|
1014
|
+
if api_option == 'VERTEX_AI':
|
1011
1015
|
declaration.parameters.required = (
|
1012
1016
|
_automatic_function_calling_util._get_required_fields(
|
1013
1017
|
declaration.parameters
|
1014
1018
|
)
|
1015
1019
|
)
|
1016
|
-
if
|
1020
|
+
if api_option == 'GEMINI_API':
|
1017
1021
|
return declaration
|
1018
1022
|
|
1019
1023
|
return_annotation = inspect.signature(callable).return_annotation
|
@@ -1022,7 +1026,7 @@ class FunctionDeclaration(_common.BaseModel):
|
|
1022
1026
|
|
1023
1027
|
declaration.response = (
|
1024
1028
|
_automatic_function_calling_util._parse_schema_from_parameter(
|
1025
|
-
|
1029
|
+
api_option,
|
1026
1030
|
inspect.Parameter(
|
1027
1031
|
'return_value',
|
1028
1032
|
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
@@ -1033,6 +1037,23 @@ class FunctionDeclaration(_common.BaseModel):
|
|
1033
1037
|
)
|
1034
1038
|
return declaration
|
1035
1039
|
|
1040
|
+
@classmethod
|
1041
|
+
def from_callable(
|
1042
|
+
cls,
|
1043
|
+
*,
|
1044
|
+
client,
|
1045
|
+
callable: Callable,
|
1046
|
+
) -> 'FunctionDeclaration':
|
1047
|
+
"""Converts a Callable to a FunctionDeclaration based on the client."""
|
1048
|
+
if client.vertexai:
|
1049
|
+
return cls.from_callable_with_api_option(
|
1050
|
+
callable=callable, api_option='VERTEX_AI'
|
1051
|
+
)
|
1052
|
+
else:
|
1053
|
+
return cls.from_callable_with_api_option(
|
1054
|
+
callable=callable, api_option='GEMINI_API'
|
1055
|
+
)
|
1056
|
+
|
1036
1057
|
|
1037
1058
|
class FunctionDeclarationDict(TypedDict, total=False):
|
1038
1059
|
"""Defines a function that the model can generate JSON inputs for.
|
@@ -1916,6 +1937,20 @@ class GenerateContentConfig(_common.BaseModel):
|
|
1916
1937
|
""",
|
1917
1938
|
)
|
1918
1939
|
|
1940
|
+
@pydantic.field_validator('response_schema', mode='before')
|
1941
|
+
@classmethod
|
1942
|
+
def _convert_literal_to_enum(cls, value):
|
1943
|
+
if typing.get_origin(value) is typing.Literal:
|
1944
|
+
enum_vals = typing.get_args(value)
|
1945
|
+
if not all(isinstance(arg, str) for arg in enum_vals):
|
1946
|
+
# This doesn't stop execution, it tells pydantic to raise a ValidationError
|
1947
|
+
# when the class is instantiated with an unsupported Literal
|
1948
|
+
raise ValueError(f'Literal type {value} must be a list of strings.')
|
1949
|
+
# The title 'PlaceholderLiteralEnum' is removed from the generated Schema
|
1950
|
+
# before sending the request
|
1951
|
+
return Enum('PlaceholderLiteralEnum', {s: s for s in enum_vals})
|
1952
|
+
return value
|
1953
|
+
|
1919
1954
|
|
1920
1955
|
class GenerateContentConfigDict(TypedDict, total=False):
|
1921
1956
|
"""Optional model configuration parameters.
|
@@ -2822,7 +2857,7 @@ class GenerateContentResponse(_common.BaseModel):
|
|
2822
2857
|
text = ''
|
2823
2858
|
any_text_part_text = False
|
2824
2859
|
for part in self.candidates[0].content.parts:
|
2825
|
-
for field_name, field_value in part.
|
2860
|
+
for field_name, field_value in part.model_dump(
|
2826
2861
|
exclude={'text', 'thought'}
|
2827
2862
|
).items():
|
2828
2863
|
if field_value is not None:
|
@@ -2890,6 +2925,11 @@ class GenerateContentResponse(_common.BaseModel):
|
|
2890
2925
|
enum_value = result.text.replace('"', '')
|
2891
2926
|
try:
|
2892
2927
|
result.parsed = response_schema(enum_value)
|
2928
|
+
if (
|
2929
|
+
hasattr(response_schema, '__name__')
|
2930
|
+
and response_schema.__name__ == 'PlaceholderLiteralEnum'
|
2931
|
+
):
|
2932
|
+
result.parsed = str(response_schema(enum_value).name)
|
2893
2933
|
except ValueError:
|
2894
2934
|
pass
|
2895
2935
|
elif isinstance(response_schema, GenericAlias) or isinstance(
|
@@ -5961,22 +6001,51 @@ _CreateTuningJobParametersOrDict = Union[
|
|
5961
6001
|
]
|
5962
6002
|
|
5963
6003
|
|
5964
|
-
class
|
5965
|
-
"""A
|
6004
|
+
class Operation(_common.BaseModel):
|
6005
|
+
"""A long-running operation."""
|
5966
6006
|
|
5967
|
-
|
6007
|
+
name: Optional[str] = Field(
|
6008
|
+
default=None,
|
6009
|
+
description="""The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.""",
|
6010
|
+
)
|
6011
|
+
metadata: Optional[dict[str, Any]] = Field(
|
6012
|
+
default=None,
|
6013
|
+
description="""Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.""",
|
6014
|
+
)
|
6015
|
+
done: Optional[bool] = Field(
|
6016
|
+
default=None,
|
6017
|
+
description="""If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.""",
|
6018
|
+
)
|
6019
|
+
error: Optional[dict[str, Any]] = Field(
|
6020
|
+
default=None,
|
6021
|
+
description="""The error result of the operation in case of failure or cancellation.""",
|
6022
|
+
)
|
6023
|
+
response: Optional[dict[str, Any]] = Field(
|
6024
|
+
default=None,
|
6025
|
+
description="""The normal response of the operation in case of success.""",
|
6026
|
+
)
|
5968
6027
|
|
5969
6028
|
|
5970
|
-
class
|
5971
|
-
"""A
|
6029
|
+
class OperationDict(TypedDict, total=False):
|
6030
|
+
"""A long-running operation."""
|
5972
6031
|
|
5973
|
-
|
5974
|
-
""""""
|
6032
|
+
name: Optional[str]
|
6033
|
+
"""The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`."""
|
6034
|
+
|
6035
|
+
metadata: Optional[dict[str, Any]]
|
6036
|
+
"""Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any."""
|
5975
6037
|
|
6038
|
+
done: Optional[bool]
|
6039
|
+
"""If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available."""
|
5976
6040
|
|
5977
|
-
|
5978
|
-
|
5979
|
-
|
6041
|
+
error: Optional[dict[str, Any]]
|
6042
|
+
"""The error result of the operation in case of failure or cancellation."""
|
6043
|
+
|
6044
|
+
response: Optional[dict[str, Any]]
|
6045
|
+
"""The normal response of the operation in case of success."""
|
6046
|
+
|
6047
|
+
|
6048
|
+
OperationOrDict = Union[Operation, OperationDict]
|
5980
6049
|
|
5981
6050
|
|
5982
6051
|
class CreateCachedContentConfig(_common.BaseModel):
|
@@ -6583,13 +6652,17 @@ _CreateFileParametersOrDict = Union[
|
|
6583
6652
|
class CreateFileResponse(_common.BaseModel):
|
6584
6653
|
"""Response for the create file method."""
|
6585
6654
|
|
6586
|
-
|
6655
|
+
http_headers: Optional[dict[str, str]] = Field(
|
6656
|
+
default=None,
|
6657
|
+
description="""Used to retain the HTTP headers in the request""",
|
6658
|
+
)
|
6587
6659
|
|
6588
6660
|
|
6589
6661
|
class CreateFileResponseDict(TypedDict, total=False):
|
6590
6662
|
"""Response for the create file method."""
|
6591
6663
|
|
6592
|
-
|
6664
|
+
http_headers: Optional[dict[str, str]]
|
6665
|
+
"""Used to retain the HTTP headers in the request"""
|
6593
6666
|
|
6594
6667
|
|
6595
6668
|
CreateFileResponseOrDict = Union[CreateFileResponse, CreateFileResponseDict]
|
@@ -7285,53 +7358,6 @@ _GetOperationParametersOrDict = Union[
|
|
7285
7358
|
]
|
7286
7359
|
|
7287
7360
|
|
7288
|
-
class Operation(_common.BaseModel):
|
7289
|
-
"""A long-running operation."""
|
7290
|
-
|
7291
|
-
name: Optional[str] = Field(
|
7292
|
-
default=None,
|
7293
|
-
description="""The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.""",
|
7294
|
-
)
|
7295
|
-
metadata: Optional[dict[str, Any]] = Field(
|
7296
|
-
default=None,
|
7297
|
-
description="""Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.""",
|
7298
|
-
)
|
7299
|
-
done: Optional[bool] = Field(
|
7300
|
-
default=None,
|
7301
|
-
description="""If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.""",
|
7302
|
-
)
|
7303
|
-
error: Optional[dict[str, Any]] = Field(
|
7304
|
-
default=None,
|
7305
|
-
description="""The error result of the operation in case of failure or cancellation.""",
|
7306
|
-
)
|
7307
|
-
response: Optional[dict[str, Any]] = Field(
|
7308
|
-
default=None,
|
7309
|
-
description="""The normal response of the operation in case of success.""",
|
7310
|
-
)
|
7311
|
-
|
7312
|
-
|
7313
|
-
class OperationDict(TypedDict, total=False):
|
7314
|
-
"""A long-running operation."""
|
7315
|
-
|
7316
|
-
name: Optional[str]
|
7317
|
-
"""The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`."""
|
7318
|
-
|
7319
|
-
metadata: Optional[dict[str, Any]]
|
7320
|
-
"""Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any."""
|
7321
|
-
|
7322
|
-
done: Optional[bool]
|
7323
|
-
"""If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available."""
|
7324
|
-
|
7325
|
-
error: Optional[dict[str, Any]]
|
7326
|
-
"""The error result of the operation in case of failure or cancellation."""
|
7327
|
-
|
7328
|
-
response: Optional[dict[str, Any]]
|
7329
|
-
"""The normal response of the operation in case of success."""
|
7330
|
-
|
7331
|
-
|
7332
|
-
OperationOrDict = Union[Operation, OperationDict]
|
7333
|
-
|
7334
|
-
|
7335
7361
|
class FetchPredictOperationConfig(_common.BaseModel):
|
7336
7362
|
|
7337
7363
|
http_options: Optional[HttpOptions] = Field(
|
google/genai/version.py
CHANGED