google-genai 1.60.0__py3-none-any.whl → 1.62.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. google/genai/_interactions/_base_client.py +5 -2
  2. google/genai/_interactions/_compat.py +3 -3
  3. google/genai/_interactions/_utils/_json.py +50 -0
  4. google/genai/_interactions/resources/interactions.py +50 -28
  5. google/genai/_interactions/types/__init__.py +2 -1
  6. google/genai/_interactions/types/content_delta.py +1 -1
  7. google/genai/_interactions/types/function_result_content.py +2 -1
  8. google/genai/_interactions/types/function_result_content_param.py +4 -4
  9. google/genai/_interactions/types/{interaction_event.py → interaction_complete_event.py} +3 -3
  10. google/genai/_interactions/types/interaction_create_params.py +4 -4
  11. google/genai/_interactions/types/interaction_get_params.py +3 -0
  12. google/genai/_interactions/types/interaction_sse_event.py +11 -2
  13. google/genai/_interactions/types/interaction_start_event.py +36 -0
  14. google/genai/batches.py +3 -0
  15. google/genai/errors.py +19 -6
  16. google/genai/files.py +15 -15
  17. google/genai/live.py +22 -2
  18. google/genai/live_music.py +14 -1
  19. google/genai/models.py +486 -197
  20. google/genai/tests/batches/test_create_with_inlined_requests.py +31 -15
  21. google/genai/tests/batches/test_get.py +1 -1
  22. google/genai/tests/client/test_client_close.py +0 -1
  23. google/genai/tests/errors/test_api_error.py +38 -0
  24. google/genai/tests/files/test_register_table.py +1 -1
  25. google/genai/tests/transformers/test_schema.py +10 -1
  26. google/genai/tests/tunings/test_tune.py +87 -0
  27. google/genai/tunings.py +211 -20
  28. google/genai/types.py +178 -14
  29. google/genai/version.py +1 -1
  30. {google_genai-1.60.0.dist-info → google_genai-1.62.0.dist-info}/METADATA +1 -1
  31. {google_genai-1.60.0.dist-info → google_genai-1.62.0.dist-info}/RECORD +34 -32
  32. {google_genai-1.60.0.dist-info → google_genai-1.62.0.dist-info}/WHEEL +1 -1
  33. {google_genai-1.60.0.dist-info → google_genai-1.62.0.dist-info}/licenses/LICENSE +0 -0
  34. {google_genai-1.60.0.dist-info → google_genai-1.62.0.dist-info}/top_level.txt +0 -0
google/genai/tunings.py CHANGED
@@ -188,6 +188,14 @@ def _CreateTuningJobConfig_to_mldev(
188
188
  if getv(from_object, ['adapter_size']) is not None:
189
189
  raise ValueError('adapter_size parameter is not supported in Gemini API.')
190
190
 
191
+ if getv(from_object, ['tuning_mode']) is not None:
192
+ raise ValueError('tuning_mode parameter is not supported in Gemini API.')
193
+
194
+ if getv(from_object, ['custom_base_model']) is not None:
195
+ raise ValueError(
196
+ 'custom_base_model parameter is not supported in Gemini API.'
197
+ )
198
+
191
199
  if getv(from_object, ['batch_size']) is not None:
192
200
  setv(
193
201
  parent_object,
@@ -213,6 +221,24 @@ def _CreateTuningJobConfig_to_mldev(
213
221
  if getv(from_object, ['beta']) is not None:
214
222
  raise ValueError('beta parameter is not supported in Gemini API.')
215
223
 
224
+ if getv(from_object, ['base_teacher_model']) is not None:
225
+ raise ValueError(
226
+ 'base_teacher_model parameter is not supported in Gemini API.'
227
+ )
228
+
229
+ if getv(from_object, ['tuned_teacher_model_source']) is not None:
230
+ raise ValueError(
231
+ 'tuned_teacher_model_source parameter is not supported in Gemini API.'
232
+ )
233
+
234
+ if getv(from_object, ['sft_loss_weight_multiplier']) is not None:
235
+ raise ValueError(
236
+ 'sft_loss_weight_multiplier parameter is not supported in Gemini API.'
237
+ )
238
+
239
+ if getv(from_object, ['output_uri']) is not None:
240
+ raise ValueError('output_uri parameter is not supported in Gemini API.')
241
+
216
242
  return to_object
217
243
 
218
244
 
@@ -246,6 +272,16 @@ def _CreateTuningJobConfig_to_vertex(
246
272
  ),
247
273
  )
248
274
 
275
+ elif discriminator == 'DISTILLATION':
276
+ if getv(from_object, ['validation_dataset']) is not None:
277
+ setv(
278
+ parent_object,
279
+ ['distillationSpec'],
280
+ _TuningValidationDataset_to_vertex(
281
+ getv(from_object, ['validation_dataset']), to_object, root_object
282
+ ),
283
+ )
284
+
249
285
  if getv(from_object, ['tuned_model_display_name']) is not None:
250
286
  setv(
251
287
  parent_object,
@@ -275,6 +311,14 @@ def _CreateTuningJobConfig_to_vertex(
275
311
  getv(from_object, ['epoch_count']),
276
312
  )
277
313
 
314
+ elif discriminator == 'DISTILLATION':
315
+ if getv(from_object, ['epoch_count']) is not None:
316
+ setv(
317
+ parent_object,
318
+ ['distillationSpec', 'hyperParameters', 'epochCount'],
319
+ getv(from_object, ['epoch_count']),
320
+ )
321
+
278
322
  discriminator = getv(root_object, ['config', 'method'])
279
323
  if discriminator is None:
280
324
  discriminator = 'SUPERVISED_FINE_TUNING'
@@ -298,6 +342,14 @@ def _CreateTuningJobConfig_to_vertex(
298
342
  getv(from_object, ['learning_rate_multiplier']),
299
343
  )
300
344
 
345
+ elif discriminator == 'DISTILLATION':
346
+ if getv(from_object, ['learning_rate_multiplier']) is not None:
347
+ setv(
348
+ parent_object,
349
+ ['distillationSpec', 'hyperParameters', 'learningRateMultiplier'],
350
+ getv(from_object, ['learning_rate_multiplier']),
351
+ )
352
+
301
353
  discriminator = getv(root_object, ['config', 'method'])
302
354
  if discriminator is None:
303
355
  discriminator = 'SUPERVISED_FINE_TUNING'
@@ -317,6 +369,14 @@ def _CreateTuningJobConfig_to_vertex(
317
369
  getv(from_object, ['export_last_checkpoint_only']),
318
370
  )
319
371
 
372
+ elif discriminator == 'DISTILLATION':
373
+ if getv(from_object, ['export_last_checkpoint_only']) is not None:
374
+ setv(
375
+ parent_object,
376
+ ['distillationSpec', 'exportLastCheckpointOnly'],
377
+ getv(from_object, ['export_last_checkpoint_only']),
378
+ )
379
+
320
380
  discriminator = getv(root_object, ['config', 'method'])
321
381
  if discriminator is None:
322
382
  discriminator = 'SUPERVISED_FINE_TUNING'
@@ -336,11 +396,53 @@ def _CreateTuningJobConfig_to_vertex(
336
396
  getv(from_object, ['adapter_size']),
337
397
  )
338
398
 
339
- if getv(from_object, ['batch_size']) is not None:
340
- raise ValueError('batch_size parameter is not supported in Vertex AI.')
399
+ elif discriminator == 'DISTILLATION':
400
+ if getv(from_object, ['adapter_size']) is not None:
401
+ setv(
402
+ parent_object,
403
+ ['distillationSpec', 'hyperParameters', 'adapterSize'],
404
+ getv(from_object, ['adapter_size']),
405
+ )
341
406
 
342
- if getv(from_object, ['learning_rate']) is not None:
343
- raise ValueError('learning_rate parameter is not supported in Vertex AI.')
407
+ discriminator = getv(root_object, ['config', 'method'])
408
+ if discriminator is None:
409
+ discriminator = 'SUPERVISED_FINE_TUNING'
410
+ if discriminator == 'SUPERVISED_FINE_TUNING':
411
+ if getv(from_object, ['tuning_mode']) is not None:
412
+ setv(
413
+ parent_object,
414
+ ['supervisedTuningSpec', 'tuningMode'],
415
+ getv(from_object, ['tuning_mode']),
416
+ )
417
+
418
+ if getv(from_object, ['custom_base_model']) is not None:
419
+ setv(
420
+ parent_object,
421
+ ['customBaseModel'],
422
+ getv(from_object, ['custom_base_model']),
423
+ )
424
+
425
+ discriminator = getv(root_object, ['config', 'method'])
426
+ if discriminator is None:
427
+ discriminator = 'SUPERVISED_FINE_TUNING'
428
+ if discriminator == 'SUPERVISED_FINE_TUNING':
429
+ if getv(from_object, ['batch_size']) is not None:
430
+ setv(
431
+ parent_object,
432
+ ['supervisedTuningSpec', 'hyperParameters', 'batchSize'],
433
+ getv(from_object, ['batch_size']),
434
+ )
435
+
436
+ discriminator = getv(root_object, ['config', 'method'])
437
+ if discriminator is None:
438
+ discriminator = 'SUPERVISED_FINE_TUNING'
439
+ if discriminator == 'SUPERVISED_FINE_TUNING':
440
+ if getv(from_object, ['learning_rate']) is not None:
441
+ setv(
442
+ parent_object,
443
+ ['supervisedTuningSpec', 'hyperParameters', 'learningRate'],
444
+ getv(from_object, ['learning_rate']),
445
+ )
344
446
 
345
447
  discriminator = getv(root_object, ['config', 'method'])
346
448
  if discriminator is None:
@@ -365,6 +467,16 @@ def _CreateTuningJobConfig_to_vertex(
365
467
  ),
366
468
  )
367
469
 
470
+ elif discriminator == 'DISTILLATION':
471
+ if getv(from_object, ['evaluation_config']) is not None:
472
+ setv(
473
+ parent_object,
474
+ ['distillationSpec', 'evaluationConfig'],
475
+ _EvaluationConfig_to_vertex(
476
+ getv(from_object, ['evaluation_config']), to_object, root_object
477
+ ),
478
+ )
479
+
368
480
  if getv(from_object, ['labels']) is not None:
369
481
  setv(parent_object, ['labels'], getv(from_object, ['labels']))
370
482
 
@@ -375,6 +487,30 @@ def _CreateTuningJobConfig_to_vertex(
375
487
  getv(from_object, ['beta']),
376
488
  )
377
489
 
490
+ if getv(from_object, ['base_teacher_model']) is not None:
491
+ setv(
492
+ parent_object,
493
+ ['distillationSpec', 'baseTeacherModel'],
494
+ getv(from_object, ['base_teacher_model']),
495
+ )
496
+
497
+ if getv(from_object, ['tuned_teacher_model_source']) is not None:
498
+ setv(
499
+ parent_object,
500
+ ['distillationSpec', 'tunedTeacherModelSource'],
501
+ getv(from_object, ['tuned_teacher_model_source']),
502
+ )
503
+
504
+ if getv(from_object, ['sft_loss_weight_multiplier']) is not None:
505
+ setv(
506
+ parent_object,
507
+ ['distillationSpec', 'hyperParameters', 'sftLossWeightMultiplier'],
508
+ getv(from_object, ['sft_loss_weight_multiplier']),
509
+ )
510
+
511
+ if getv(from_object, ['output_uri']) is not None:
512
+ setv(parent_object, ['outputUri'], getv(from_object, ['output_uri']))
513
+
378
514
  return to_object
379
515
 
380
516
 
@@ -920,6 +1056,14 @@ def _TuningDataset_to_vertex(
920
1056
  getv(from_object, ['gcs_uri']),
921
1057
  )
922
1058
 
1059
+ elif discriminator == 'DISTILLATION':
1060
+ if getv(from_object, ['gcs_uri']) is not None:
1061
+ setv(
1062
+ parent_object,
1063
+ ['distillationSpec', 'promptDatasetUri'],
1064
+ getv(from_object, ['gcs_uri']),
1065
+ )
1066
+
923
1067
  discriminator = getv(root_object, ['config', 'method'])
924
1068
  if discriminator is None:
925
1069
  discriminator = 'SUPERVISED_FINE_TUNING'
@@ -939,6 +1083,14 @@ def _TuningDataset_to_vertex(
939
1083
  getv(from_object, ['vertex_dataset_resource']),
940
1084
  )
941
1085
 
1086
+ elif discriminator == 'DISTILLATION':
1087
+ if getv(from_object, ['vertex_dataset_resource']) is not None:
1088
+ setv(
1089
+ parent_object,
1090
+ ['distillationSpec', 'promptDatasetUri'],
1091
+ getv(from_object, ['vertex_dataset_resource']),
1092
+ )
1093
+
942
1094
  if getv(from_object, ['examples']) is not None:
943
1095
  raise ValueError('examples parameter is not supported in Vertex AI.')
944
1096
 
@@ -1066,6 +1218,13 @@ def _TuningJob_from_vertex(
1066
1218
  getv(from_object, ['preferenceOptimizationSpec']),
1067
1219
  )
1068
1220
 
1221
+ if getv(from_object, ['distillationSpec']) is not None:
1222
+ setv(
1223
+ to_object,
1224
+ ['distillation_spec'],
1225
+ getv(from_object, ['distillationSpec']),
1226
+ )
1227
+
1069
1228
  if getv(from_object, ['tuningDataStats']) is not None:
1070
1229
  setv(
1071
1230
  to_object, ['tuning_data_stats'], getv(from_object, ['tuningDataStats'])
@@ -1231,10 +1390,14 @@ class Tunings(_api_module.BaseModule):
1231
1390
  response_dict = {} if not response.body else json.loads(response.body)
1232
1391
 
1233
1392
  if self._api_client.vertexai:
1234
- response_dict = _TuningJob_from_vertex(response_dict)
1393
+ response_dict = _TuningJob_from_vertex(
1394
+ response_dict, None, parameter_model
1395
+ )
1235
1396
 
1236
1397
  if not self._api_client.vertexai:
1237
- response_dict = _TuningJob_from_mldev(response_dict)
1398
+ response_dict = _TuningJob_from_mldev(
1399
+ response_dict, None, parameter_model
1400
+ )
1238
1401
 
1239
1402
  return_value = types.TuningJob._from_response(
1240
1403
  response=response_dict, kwargs=parameter_model.model_dump()
@@ -1293,10 +1456,14 @@ class Tunings(_api_module.BaseModule):
1293
1456
  response_dict = {} if not response.body else json.loads(response.body)
1294
1457
 
1295
1458
  if self._api_client.vertexai:
1296
- response_dict = _ListTuningJobsResponse_from_vertex(response_dict)
1459
+ response_dict = _ListTuningJobsResponse_from_vertex(
1460
+ response_dict, None, parameter_model
1461
+ )
1297
1462
 
1298
1463
  if not self._api_client.vertexai:
1299
- response_dict = _ListTuningJobsResponse_from_mldev(response_dict)
1464
+ response_dict = _ListTuningJobsResponse_from_mldev(
1465
+ response_dict, None, parameter_model
1466
+ )
1300
1467
 
1301
1468
  return_value = types.ListTuningJobsResponse._from_response(
1302
1469
  response=response_dict, kwargs=parameter_model.model_dump()
@@ -1367,10 +1534,14 @@ class Tunings(_api_module.BaseModule):
1367
1534
  response_dict = {} if not response.body else json.loads(response.body)
1368
1535
 
1369
1536
  if self._api_client.vertexai:
1370
- response_dict = _CancelTuningJobResponse_from_vertex(response_dict)
1537
+ response_dict = _CancelTuningJobResponse_from_vertex(
1538
+ response_dict, None, parameter_model
1539
+ )
1371
1540
 
1372
1541
  if not self._api_client.vertexai:
1373
- response_dict = _CancelTuningJobResponse_from_mldev(response_dict)
1542
+ response_dict = _CancelTuningJobResponse_from_mldev(
1543
+ response_dict, None, parameter_model
1544
+ )
1374
1545
 
1375
1546
  return_value = types.CancelTuningJobResponse._from_response(
1376
1547
  response=response_dict, kwargs=parameter_model.model_dump()
@@ -1443,7 +1614,9 @@ class Tunings(_api_module.BaseModule):
1443
1614
  response_dict = {} if not response.body else json.loads(response.body)
1444
1615
 
1445
1616
  if self._api_client.vertexai:
1446
- response_dict = _TuningJob_from_vertex(response_dict)
1617
+ response_dict = _TuningJob_from_vertex(
1618
+ response_dict, None, parameter_model
1619
+ )
1447
1620
 
1448
1621
  return_value = types.TuningJob._from_response(
1449
1622
  response=response_dict, kwargs=parameter_model.model_dump()
@@ -1518,7 +1691,9 @@ class Tunings(_api_module.BaseModule):
1518
1691
  response_dict = {} if not response.body else json.loads(response.body)
1519
1692
 
1520
1693
  if not self._api_client.vertexai:
1521
- response_dict = _TuningOperation_from_mldev(response_dict)
1694
+ response_dict = _TuningOperation_from_mldev(
1695
+ response_dict, None, parameter_model
1696
+ )
1522
1697
 
1523
1698
  return_value = types.TuningOperation._from_response(
1524
1699
  response=response_dict, kwargs=parameter_model.model_dump()
@@ -1723,10 +1898,14 @@ class AsyncTunings(_api_module.BaseModule):
1723
1898
  response_dict = {} if not response.body else json.loads(response.body)
1724
1899
 
1725
1900
  if self._api_client.vertexai:
1726
- response_dict = _TuningJob_from_vertex(response_dict)
1901
+ response_dict = _TuningJob_from_vertex(
1902
+ response_dict, None, parameter_model
1903
+ )
1727
1904
 
1728
1905
  if not self._api_client.vertexai:
1729
- response_dict = _TuningJob_from_mldev(response_dict)
1906
+ response_dict = _TuningJob_from_mldev(
1907
+ response_dict, None, parameter_model
1908
+ )
1730
1909
 
1731
1910
  return_value = types.TuningJob._from_response(
1732
1911
  response=response_dict, kwargs=parameter_model.model_dump()
@@ -1787,10 +1966,14 @@ class AsyncTunings(_api_module.BaseModule):
1787
1966
  response_dict = {} if not response.body else json.loads(response.body)
1788
1967
 
1789
1968
  if self._api_client.vertexai:
1790
- response_dict = _ListTuningJobsResponse_from_vertex(response_dict)
1969
+ response_dict = _ListTuningJobsResponse_from_vertex(
1970
+ response_dict, None, parameter_model
1971
+ )
1791
1972
 
1792
1973
  if not self._api_client.vertexai:
1793
- response_dict = _ListTuningJobsResponse_from_mldev(response_dict)
1974
+ response_dict = _ListTuningJobsResponse_from_mldev(
1975
+ response_dict, None, parameter_model
1976
+ )
1794
1977
 
1795
1978
  return_value = types.ListTuningJobsResponse._from_response(
1796
1979
  response=response_dict, kwargs=parameter_model.model_dump()
@@ -1861,10 +2044,14 @@ class AsyncTunings(_api_module.BaseModule):
1861
2044
  response_dict = {} if not response.body else json.loads(response.body)
1862
2045
 
1863
2046
  if self._api_client.vertexai:
1864
- response_dict = _CancelTuningJobResponse_from_vertex(response_dict)
2047
+ response_dict = _CancelTuningJobResponse_from_vertex(
2048
+ response_dict, None, parameter_model
2049
+ )
1865
2050
 
1866
2051
  if not self._api_client.vertexai:
1867
- response_dict = _CancelTuningJobResponse_from_mldev(response_dict)
2052
+ response_dict = _CancelTuningJobResponse_from_mldev(
2053
+ response_dict, None, parameter_model
2054
+ )
1868
2055
 
1869
2056
  return_value = types.CancelTuningJobResponse._from_response(
1870
2057
  response=response_dict, kwargs=parameter_model.model_dump()
@@ -1937,7 +2124,9 @@ class AsyncTunings(_api_module.BaseModule):
1937
2124
  response_dict = {} if not response.body else json.loads(response.body)
1938
2125
 
1939
2126
  if self._api_client.vertexai:
1940
- response_dict = _TuningJob_from_vertex(response_dict)
2127
+ response_dict = _TuningJob_from_vertex(
2128
+ response_dict, None, parameter_model
2129
+ )
1941
2130
 
1942
2131
  return_value = types.TuningJob._from_response(
1943
2132
  response=response_dict, kwargs=parameter_model.model_dump()
@@ -2012,7 +2201,9 @@ class AsyncTunings(_api_module.BaseModule):
2012
2201
  response_dict = {} if not response.body else json.loads(response.body)
2013
2202
 
2014
2203
  if not self._api_client.vertexai:
2015
- response_dict = _TuningOperation_from_mldev(response_dict)
2204
+ response_dict = _TuningOperation_from_mldev(
2205
+ response_dict, None, parameter_model
2206
+ )
2016
2207
 
2017
2208
  return_value = types.TuningOperation._from_response(
2018
2209
  response=response_dict, kwargs=parameter_model.model_dump()