google-genai 1.4.0__py3-none-any.whl → 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- google/genai/_api_client.py +207 -111
- google/genai/_automatic_function_calling_util.py +6 -16
- google/genai/_common.py +5 -2
- google/genai/_extra_utils.py +62 -47
- google/genai/_replay_api_client.py +70 -2
- google/genai/_transformers.py +98 -57
- google/genai/batches.py +14 -10
- google/genai/caches.py +30 -36
- google/genai/client.py +3 -2
- google/genai/errors.py +11 -19
- google/genai/files.py +28 -15
- google/genai/live.py +276 -93
- google/genai/models.py +201 -112
- google/genai/operations.py +40 -12
- google/genai/pagers.py +17 -10
- google/genai/tunings.py +40 -30
- google/genai/types.py +146 -58
- google/genai/version.py +1 -1
- {google_genai-1.4.0.dist-info → google_genai-1.6.0.dist-info}/METADATA +194 -24
- google_genai-1.6.0.dist-info/RECORD +27 -0
- {google_genai-1.4.0.dist-info → google_genai-1.6.0.dist-info}/WHEEL +1 -1
- google_genai-1.4.0.dist-info/RECORD +0 -27
- {google_genai-1.4.0.dist-info → google_genai-1.6.0.dist-info}/LICENSE +0 -0
- {google_genai-1.4.0.dist-info → google_genai-1.6.0.dist-info}/top_level.txt +0 -0
google/genai/batches.py
CHANGED
@@ -668,7 +668,7 @@ class Batches(_api_module.BaseModule):
|
|
668
668
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
669
669
|
if isinstance(config, dict):
|
670
670
|
http_options = config.get('http_options', None)
|
671
|
-
elif hasattr(config, 'http_options'):
|
671
|
+
elif hasattr(config, 'http_options') and config is not None:
|
672
672
|
http_options = config.http_options
|
673
673
|
|
674
674
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -737,7 +737,7 @@ class Batches(_api_module.BaseModule):
|
|
737
737
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
738
738
|
if isinstance(config, dict):
|
739
739
|
http_options = config.get('http_options', None)
|
740
|
-
elif hasattr(config, 'http_options'):
|
740
|
+
elif hasattr(config, 'http_options') and config is not None:
|
741
741
|
http_options = config.http_options
|
742
742
|
|
743
743
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -807,7 +807,7 @@ class Batches(_api_module.BaseModule):
|
|
807
807
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
808
808
|
if isinstance(config, dict):
|
809
809
|
http_options = config.get('http_options', None)
|
810
|
-
elif hasattr(config, 'http_options'):
|
810
|
+
elif hasattr(config, 'http_options') and config is not None:
|
811
811
|
http_options = config.http_options
|
812
812
|
|
813
813
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -846,7 +846,7 @@ class Batches(_api_module.BaseModule):
|
|
846
846
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
847
847
|
if isinstance(config, dict):
|
848
848
|
http_options = config.get('http_options', None)
|
849
|
-
elif hasattr(config, 'http_options'):
|
849
|
+
elif hasattr(config, 'http_options') and config is not None:
|
850
850
|
http_options = config.http_options
|
851
851
|
|
852
852
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -921,7 +921,7 @@ class Batches(_api_module.BaseModule):
|
|
921
921
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
922
922
|
if isinstance(config, dict):
|
923
923
|
http_options = config.get('http_options', None)
|
924
|
-
elif hasattr(config, 'http_options'):
|
924
|
+
elif hasattr(config, 'http_options') and config is not None:
|
925
925
|
http_options = config.http_options
|
926
926
|
|
927
927
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -998,6 +998,8 @@ class Batches(_api_module.BaseModule):
|
|
998
998
|
for batch_job in batch_jobs:
|
999
999
|
print(f"Batch job: {batch_job.name}, state {batch_job.state}")
|
1000
1000
|
"""
|
1001
|
+
if config is None:
|
1002
|
+
config = types.ListBatchJobsConfig()
|
1001
1003
|
return Pager(
|
1002
1004
|
'batch_jobs',
|
1003
1005
|
self._list,
|
@@ -1043,7 +1045,7 @@ class AsyncBatches(_api_module.BaseModule):
|
|
1043
1045
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1044
1046
|
if isinstance(config, dict):
|
1045
1047
|
http_options = config.get('http_options', None)
|
1046
|
-
elif hasattr(config, 'http_options'):
|
1048
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1047
1049
|
http_options = config.http_options
|
1048
1050
|
|
1049
1051
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1112,7 +1114,7 @@ class AsyncBatches(_api_module.BaseModule):
|
|
1112
1114
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1113
1115
|
if isinstance(config, dict):
|
1114
1116
|
http_options = config.get('http_options', None)
|
1115
|
-
elif hasattr(config, 'http_options'):
|
1117
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1116
1118
|
http_options = config.http_options
|
1117
1119
|
|
1118
1120
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1182,7 +1184,7 @@ class AsyncBatches(_api_module.BaseModule):
|
|
1182
1184
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1183
1185
|
if isinstance(config, dict):
|
1184
1186
|
http_options = config.get('http_options', None)
|
1185
|
-
elif hasattr(config, 'http_options'):
|
1187
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1186
1188
|
http_options = config.http_options
|
1187
1189
|
|
1188
1190
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1221,7 +1223,7 @@ class AsyncBatches(_api_module.BaseModule):
|
|
1221
1223
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1222
1224
|
if isinstance(config, dict):
|
1223
1225
|
http_options = config.get('http_options', None)
|
1224
|
-
elif hasattr(config, 'http_options'):
|
1226
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1225
1227
|
http_options = config.http_options
|
1226
1228
|
|
1227
1229
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1296,7 +1298,7 @@ class AsyncBatches(_api_module.BaseModule):
|
|
1296
1298
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1297
1299
|
if isinstance(config, dict):
|
1298
1300
|
http_options = config.get('http_options', None)
|
1299
|
-
elif hasattr(config, 'http_options'):
|
1301
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1300
1302
|
http_options = config.http_options
|
1301
1303
|
|
1302
1304
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1373,6 +1375,8 @@ class AsyncBatches(_api_module.BaseModule):
|
|
1373
1375
|
await batch_jobs_pager.next_page()
|
1374
1376
|
print(f"next page: {batch_jobs_pager.page}")
|
1375
1377
|
"""
|
1378
|
+
if config is None:
|
1379
|
+
config = types.ListBatchJobsConfig()
|
1376
1380
|
return AsyncPager(
|
1377
1381
|
'batch_jobs',
|
1378
1382
|
self._list,
|
google/genai/caches.py
CHANGED
@@ -174,15 +174,9 @@ def _Schema_to_mldev(
|
|
174
174
|
if getv(from_object, ['pattern']) is not None:
|
175
175
|
raise ValueError('pattern parameter is not supported in Gemini API.')
|
176
176
|
|
177
|
-
if getv(from_object, ['minimum']) is not None:
|
178
|
-
raise ValueError('minimum parameter is not supported in Gemini API.')
|
179
|
-
|
180
177
|
if getv(from_object, ['default']) is not None:
|
181
178
|
raise ValueError('default parameter is not supported in Gemini API.')
|
182
179
|
|
183
|
-
if getv(from_object, ['any_of']) is not None:
|
184
|
-
raise ValueError('any_of parameter is not supported in Gemini API.')
|
185
|
-
|
186
180
|
if getv(from_object, ['max_length']) is not None:
|
187
181
|
raise ValueError('max_length parameter is not supported in Gemini API.')
|
188
182
|
|
@@ -195,12 +189,12 @@ def _Schema_to_mldev(
|
|
195
189
|
if getv(from_object, ['min_properties']) is not None:
|
196
190
|
raise ValueError('min_properties parameter is not supported in Gemini API.')
|
197
191
|
|
198
|
-
if getv(from_object, ['maximum']) is not None:
|
199
|
-
raise ValueError('maximum parameter is not supported in Gemini API.')
|
200
|
-
|
201
192
|
if getv(from_object, ['max_properties']) is not None:
|
202
193
|
raise ValueError('max_properties parameter is not supported in Gemini API.')
|
203
194
|
|
195
|
+
if getv(from_object, ['any_of']) is not None:
|
196
|
+
setv(to_object, ['anyOf'], getv(from_object, ['any_of']))
|
197
|
+
|
204
198
|
if getv(from_object, ['description']) is not None:
|
205
199
|
setv(to_object, ['description'], getv(from_object, ['description']))
|
206
200
|
|
@@ -216,9 +210,15 @@ def _Schema_to_mldev(
|
|
216
210
|
if getv(from_object, ['max_items']) is not None:
|
217
211
|
setv(to_object, ['maxItems'], getv(from_object, ['max_items']))
|
218
212
|
|
213
|
+
if getv(from_object, ['maximum']) is not None:
|
214
|
+
setv(to_object, ['maximum'], getv(from_object, ['maximum']))
|
215
|
+
|
219
216
|
if getv(from_object, ['min_items']) is not None:
|
220
217
|
setv(to_object, ['minItems'], getv(from_object, ['min_items']))
|
221
218
|
|
219
|
+
if getv(from_object, ['minimum']) is not None:
|
220
|
+
setv(to_object, ['minimum'], getv(from_object, ['minimum']))
|
221
|
+
|
222
222
|
if getv(from_object, ['nullable']) is not None:
|
223
223
|
setv(to_object, ['nullable'], getv(from_object, ['nullable']))
|
224
224
|
|
@@ -253,15 +253,9 @@ def _Schema_to_vertex(
|
|
253
253
|
if getv(from_object, ['pattern']) is not None:
|
254
254
|
setv(to_object, ['pattern'], getv(from_object, ['pattern']))
|
255
255
|
|
256
|
-
if getv(from_object, ['minimum']) is not None:
|
257
|
-
setv(to_object, ['minimum'], getv(from_object, ['minimum']))
|
258
|
-
|
259
256
|
if getv(from_object, ['default']) is not None:
|
260
257
|
setv(to_object, ['default'], getv(from_object, ['default']))
|
261
258
|
|
262
|
-
if getv(from_object, ['any_of']) is not None:
|
263
|
-
setv(to_object, ['anyOf'], getv(from_object, ['any_of']))
|
264
|
-
|
265
259
|
if getv(from_object, ['max_length']) is not None:
|
266
260
|
setv(to_object, ['maxLength'], getv(from_object, ['max_length']))
|
267
261
|
|
@@ -274,12 +268,12 @@ def _Schema_to_vertex(
|
|
274
268
|
if getv(from_object, ['min_properties']) is not None:
|
275
269
|
setv(to_object, ['minProperties'], getv(from_object, ['min_properties']))
|
276
270
|
|
277
|
-
if getv(from_object, ['maximum']) is not None:
|
278
|
-
setv(to_object, ['maximum'], getv(from_object, ['maximum']))
|
279
|
-
|
280
271
|
if getv(from_object, ['max_properties']) is not None:
|
281
272
|
setv(to_object, ['maxProperties'], getv(from_object, ['max_properties']))
|
282
273
|
|
274
|
+
if getv(from_object, ['any_of']) is not None:
|
275
|
+
setv(to_object, ['anyOf'], getv(from_object, ['any_of']))
|
276
|
+
|
283
277
|
if getv(from_object, ['description']) is not None:
|
284
278
|
setv(to_object, ['description'], getv(from_object, ['description']))
|
285
279
|
|
@@ -295,9 +289,15 @@ def _Schema_to_vertex(
|
|
295
289
|
if getv(from_object, ['max_items']) is not None:
|
296
290
|
setv(to_object, ['maxItems'], getv(from_object, ['max_items']))
|
297
291
|
|
292
|
+
if getv(from_object, ['maximum']) is not None:
|
293
|
+
setv(to_object, ['maximum'], getv(from_object, ['maximum']))
|
294
|
+
|
298
295
|
if getv(from_object, ['min_items']) is not None:
|
299
296
|
setv(to_object, ['minItems'], getv(from_object, ['min_items']))
|
300
297
|
|
298
|
+
if getv(from_object, ['minimum']) is not None:
|
299
|
+
setv(to_object, ['minimum'], getv(from_object, ['minimum']))
|
300
|
+
|
301
301
|
if getv(from_object, ['nullable']) is not None:
|
302
302
|
setv(to_object, ['nullable'], getv(from_object, ['nullable']))
|
303
303
|
|
@@ -1176,10 +1176,7 @@ class Caches(_api_module.BaseModule):
|
|
1176
1176
|
model: str,
|
1177
1177
|
config: Optional[types.CreateCachedContentConfigOrDict] = None,
|
1178
1178
|
) -> types.CachedContent:
|
1179
|
-
"""Creates cached
|
1180
|
-
|
1181
|
-
content in the data storage, and users need to pay for the cache data
|
1182
|
-
storage.
|
1179
|
+
"""Creates a cached contents resource.
|
1183
1180
|
|
1184
1181
|
Usage:
|
1185
1182
|
|
@@ -1231,7 +1228,7 @@ class Caches(_api_module.BaseModule):
|
|
1231
1228
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1232
1229
|
if isinstance(config, dict):
|
1233
1230
|
http_options = config.get('http_options', None)
|
1234
|
-
elif hasattr(config, 'http_options'):
|
1231
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1235
1232
|
http_options = config.http_options
|
1236
1233
|
|
1237
1234
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1301,7 +1298,7 @@ class Caches(_api_module.BaseModule):
|
|
1301
1298
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1302
1299
|
if isinstance(config, dict):
|
1303
1300
|
http_options = config.get('http_options', None)
|
1304
|
-
elif hasattr(config, 'http_options'):
|
1301
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1305
1302
|
http_options = config.http_options
|
1306
1303
|
|
1307
1304
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1373,7 +1370,7 @@ class Caches(_api_module.BaseModule):
|
|
1373
1370
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1374
1371
|
if isinstance(config, dict):
|
1375
1372
|
http_options = config.get('http_options', None)
|
1376
|
-
elif hasattr(config, 'http_options'):
|
1373
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1377
1374
|
http_options = config.http_options
|
1378
1375
|
|
1379
1376
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1450,7 +1447,7 @@ class Caches(_api_module.BaseModule):
|
|
1450
1447
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1451
1448
|
if isinstance(config, dict):
|
1452
1449
|
http_options = config.get('http_options', None)
|
1453
|
-
elif hasattr(config, 'http_options'):
|
1450
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1454
1451
|
http_options = config.http_options
|
1455
1452
|
|
1456
1453
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1518,7 +1515,7 @@ class Caches(_api_module.BaseModule):
|
|
1518
1515
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1519
1516
|
if isinstance(config, dict):
|
1520
1517
|
http_options = config.get('http_options', None)
|
1521
|
-
elif hasattr(config, 'http_options'):
|
1518
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1522
1519
|
http_options = config.http_options
|
1523
1520
|
|
1524
1521
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1562,10 +1559,7 @@ class AsyncCaches(_api_module.BaseModule):
|
|
1562
1559
|
model: str,
|
1563
1560
|
config: Optional[types.CreateCachedContentConfigOrDict] = None,
|
1564
1561
|
) -> types.CachedContent:
|
1565
|
-
"""Creates cached
|
1566
|
-
|
1567
|
-
content in the data storage, and users need to pay for the cache data
|
1568
|
-
storage.
|
1562
|
+
"""Creates a cached contents resource.
|
1569
1563
|
|
1570
1564
|
Usage:
|
1571
1565
|
|
@@ -1617,7 +1611,7 @@ class AsyncCaches(_api_module.BaseModule):
|
|
1617
1611
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1618
1612
|
if isinstance(config, dict):
|
1619
1613
|
http_options = config.get('http_options', None)
|
1620
|
-
elif hasattr(config, 'http_options'):
|
1614
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1621
1615
|
http_options = config.http_options
|
1622
1616
|
|
1623
1617
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1688,7 +1682,7 @@ class AsyncCaches(_api_module.BaseModule):
|
|
1688
1682
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1689
1683
|
if isinstance(config, dict):
|
1690
1684
|
http_options = config.get('http_options', None)
|
1691
|
-
elif hasattr(config, 'http_options'):
|
1685
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1692
1686
|
http_options = config.http_options
|
1693
1687
|
|
1694
1688
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1761,7 +1755,7 @@ class AsyncCaches(_api_module.BaseModule):
|
|
1761
1755
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1762
1756
|
if isinstance(config, dict):
|
1763
1757
|
http_options = config.get('http_options', None)
|
1764
|
-
elif hasattr(config, 'http_options'):
|
1758
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1765
1759
|
http_options = config.http_options
|
1766
1760
|
|
1767
1761
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1838,7 +1832,7 @@ class AsyncCaches(_api_module.BaseModule):
|
|
1838
1832
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1839
1833
|
if isinstance(config, dict):
|
1840
1834
|
http_options = config.get('http_options', None)
|
1841
|
-
elif hasattr(config, 'http_options'):
|
1835
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1842
1836
|
http_options = config.http_options
|
1843
1837
|
|
1844
1838
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1906,7 +1900,7 @@ class AsyncCaches(_api_module.BaseModule):
|
|
1906
1900
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1907
1901
|
if isinstance(config, dict):
|
1908
1902
|
http_options = config.get('http_options', None)
|
1909
|
-
elif hasattr(config, 'http_options'):
|
1903
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1910
1904
|
http_options = config.http_options
|
1911
1905
|
|
1912
1906
|
request_dict = _common.convert_to_dict(request_dict)
|
google/genai/client.py
CHANGED
@@ -130,8 +130,9 @@ class Client:
|
|
130
130
|
from environment variables. Applies to the Vertex AI API only.
|
131
131
|
debug_config: Config settings that control network behavior of the client.
|
132
132
|
This is typically used when running test code.
|
133
|
-
http_options: Http options to use for the client.
|
134
|
-
|
133
|
+
http_options: Http options to use for the client. These options will be
|
134
|
+
applied to all requests made by the client. Example usage:
|
135
|
+
`client = genai.Client(http_options=types.HttpOptions(api_version='v1'))`.
|
135
136
|
|
136
137
|
Usage for the Gemini Developer API:
|
137
138
|
|
google/genai/errors.py
CHANGED
@@ -18,7 +18,6 @@
|
|
18
18
|
from typing import Any, Optional, TYPE_CHECKING, Union
|
19
19
|
import httpx
|
20
20
|
import json
|
21
|
-
import requests
|
22
21
|
|
23
22
|
|
24
23
|
if TYPE_CHECKING:
|
@@ -28,7 +27,7 @@ if TYPE_CHECKING:
|
|
28
27
|
class APIError(Exception):
|
29
28
|
"""General errors raised by the GenAI API."""
|
30
29
|
code: int
|
31
|
-
response: Union[
|
30
|
+
response: Union['ReplayResponse', httpx.Response]
|
32
31
|
|
33
32
|
status: Optional[str] = None
|
34
33
|
message: Optional[str] = None
|
@@ -36,28 +35,21 @@ class APIError(Exception):
|
|
36
35
|
def __init__(
|
37
36
|
self,
|
38
37
|
code: int,
|
39
|
-
response: Union[
|
38
|
+
response: Union['ReplayResponse', httpx.Response],
|
40
39
|
):
|
41
40
|
self.response = response
|
42
|
-
|
43
|
-
if isinstance(response,
|
41
|
+
message = None
|
42
|
+
if isinstance(response, httpx.Response):
|
44
43
|
try:
|
45
|
-
# do not do any extra muanipulation on the response.
|
46
|
-
# return the raw response json as is.
|
47
44
|
response_json = response.json()
|
48
|
-
except
|
45
|
+
except (json.decoder.JSONDecodeError):
|
46
|
+
message = response.text
|
49
47
|
response_json = {
|
50
|
-
'message':
|
51
|
-
'status': response.
|
48
|
+
'message': message,
|
49
|
+
'status': response.reason_phrase,
|
52
50
|
}
|
53
|
-
|
54
|
-
|
55
|
-
response_json = response.json()
|
56
|
-
except (json.decoder.JSONDecodeError, httpx.ResponseNotRead):
|
57
|
-
try:
|
58
|
-
message = response.text
|
59
|
-
except httpx.ResponseNotRead:
|
60
|
-
message = None
|
51
|
+
except httpx.ResponseNotRead:
|
52
|
+
message = 'Response not read'
|
61
53
|
response_json = {
|
62
54
|
'message': message,
|
63
55
|
'status': response.reason_phrase,
|
@@ -103,7 +95,7 @@ class APIError(Exception):
|
|
103
95
|
|
104
96
|
@classmethod
|
105
97
|
def raise_for_response(
|
106
|
-
cls, response: Union[
|
98
|
+
cls, response: Union['ReplayResponse', httpx.Response]
|
107
99
|
):
|
108
100
|
"""Raises an error with detailed error message if the response has an error status."""
|
109
101
|
if response.status_code == 200:
|
google/genai/files.py
CHANGED
@@ -583,7 +583,7 @@ class Files(_api_module.BaseModule):
|
|
583
583
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
584
584
|
if isinstance(config, dict):
|
585
585
|
http_options = config.get('http_options', None)
|
586
|
-
elif hasattr(config, 'http_options'):
|
586
|
+
elif hasattr(config, 'http_options') and config is not None:
|
587
587
|
http_options = config.http_options
|
588
588
|
|
589
589
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -641,7 +641,7 @@ class Files(_api_module.BaseModule):
|
|
641
641
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
642
642
|
if isinstance(config, dict):
|
643
643
|
http_options = config.get('http_options', None)
|
644
|
-
elif hasattr(config, 'http_options'):
|
644
|
+
elif hasattr(config, 'http_options') and config is not None:
|
645
645
|
http_options = config.http_options
|
646
646
|
|
647
647
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -713,7 +713,7 @@ class Files(_api_module.BaseModule):
|
|
713
713
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
714
714
|
if isinstance(config, dict):
|
715
715
|
http_options = config.get('http_options', None)
|
716
|
-
elif hasattr(config, 'http_options'):
|
716
|
+
elif hasattr(config, 'http_options') and config is not None:
|
717
717
|
http_options = config.http_options
|
718
718
|
|
719
719
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -780,7 +780,7 @@ class Files(_api_module.BaseModule):
|
|
780
780
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
781
781
|
if isinstance(config, dict):
|
782
782
|
http_options = config.get('http_options', None)
|
783
|
-
elif hasattr(config, 'http_options'):
|
783
|
+
elif hasattr(config, 'http_options') and config is not None:
|
784
784
|
http_options = config.http_options
|
785
785
|
|
786
786
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -826,7 +826,7 @@ class Files(_api_module.BaseModule):
|
|
826
826
|
'Vertex AI does not support creating files. You can upload files to'
|
827
827
|
' GCS files instead.'
|
828
828
|
)
|
829
|
-
config_model =
|
829
|
+
config_model = types.UploadFileConfig()
|
830
830
|
if config:
|
831
831
|
if isinstance(config, dict):
|
832
832
|
config_model = types.UploadFileConfig(**config)
|
@@ -907,7 +907,7 @@ class Files(_api_module.BaseModule):
|
|
907
907
|
|
908
908
|
return types.File._from_response(
|
909
909
|
response=_File_from_mldev(self._api_client, return_file['file']),
|
910
|
-
kwargs=
|
910
|
+
kwargs=config_model.model_dump() if config else {},
|
911
911
|
)
|
912
912
|
|
913
913
|
def list(
|
@@ -923,7 +923,7 @@ class Files(_api_module.BaseModule):
|
|
923
923
|
def download(
|
924
924
|
self,
|
925
925
|
*,
|
926
|
-
file: Union[str, types.File],
|
926
|
+
file: Union[str, types.File, types.Video, types.GeneratedVideo],
|
927
927
|
config: Optional[types.DownloadFileConfigOrDict] = None,
|
928
928
|
) -> bytes:
|
929
929
|
"""Downloads a file's data from storage.
|
@@ -931,6 +931,10 @@ class Files(_api_module.BaseModule):
|
|
931
931
|
Files created by `upload` can't be downloaded. You can tell which files are
|
932
932
|
downloadable by checking the `source` or `download_uri` property.
|
933
933
|
|
934
|
+
Note: This method returns the data as bytes. For `Video` and
|
935
|
+
`GeneratedVideo` objects there is an additional side effect, that it also
|
936
|
+
sets the `video_bytes` property on the `Video` object.
|
937
|
+
|
934
938
|
Args:
|
935
939
|
file (str): A file name, uri, or file object. Identifying which file to
|
936
940
|
download.
|
@@ -952,6 +956,10 @@ class Files(_api_module.BaseModule):
|
|
952
956
|
data = client.files.download(file=file)
|
953
957
|
# data = client.files.download(file=file.name)
|
954
958
|
# data = client.files.download(file=file.download_uri)
|
959
|
+
|
960
|
+
video = types.Video(uri=file.uri)
|
961
|
+
video_bytes = client.files.download(file=video)
|
962
|
+
video.video_bytes
|
955
963
|
"""
|
956
964
|
if self._api_client.vertexai:
|
957
965
|
raise ValueError(
|
@@ -971,7 +979,7 @@ class Files(_api_module.BaseModule):
|
|
971
979
|
'downloaded. You can tell which files are downloadable by checking '
|
972
980
|
'the `source` or `download_uri` property.'
|
973
981
|
)
|
974
|
-
name = t.t_file_name(self, file)
|
982
|
+
name = t.t_file_name(self._api_client, file)
|
975
983
|
|
976
984
|
path = f'files/{name}:download'
|
977
985
|
|
@@ -986,6 +994,11 @@ class Files(_api_module.BaseModule):
|
|
986
994
|
http_options,
|
987
995
|
)
|
988
996
|
|
997
|
+
if isinstance(file, types.Video):
|
998
|
+
file.video_bytes = data
|
999
|
+
elif isinstance(file, types.GeneratedVideo) and file.video is not None:
|
1000
|
+
file.video.video_bytes = data
|
1001
|
+
|
989
1002
|
return data
|
990
1003
|
|
991
1004
|
|
@@ -1037,7 +1050,7 @@ class AsyncFiles(_api_module.BaseModule):
|
|
1037
1050
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1038
1051
|
if isinstance(config, dict):
|
1039
1052
|
http_options = config.get('http_options', None)
|
1040
|
-
elif hasattr(config, 'http_options'):
|
1053
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1041
1054
|
http_options = config.http_options
|
1042
1055
|
|
1043
1056
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1095,7 +1108,7 @@ class AsyncFiles(_api_module.BaseModule):
|
|
1095
1108
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1096
1109
|
if isinstance(config, dict):
|
1097
1110
|
http_options = config.get('http_options', None)
|
1098
|
-
elif hasattr(config, 'http_options'):
|
1111
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1099
1112
|
http_options = config.http_options
|
1100
1113
|
|
1101
1114
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1167,7 +1180,7 @@ class AsyncFiles(_api_module.BaseModule):
|
|
1167
1180
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1168
1181
|
if isinstance(config, dict):
|
1169
1182
|
http_options = config.get('http_options', None)
|
1170
|
-
elif hasattr(config, 'http_options'):
|
1183
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1171
1184
|
http_options = config.http_options
|
1172
1185
|
|
1173
1186
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1234,7 +1247,7 @@ class AsyncFiles(_api_module.BaseModule):
|
|
1234
1247
|
http_options: Optional[types.HttpOptionsOrDict] = None
|
1235
1248
|
if isinstance(config, dict):
|
1236
1249
|
http_options = config.get('http_options', None)
|
1237
|
-
elif hasattr(config, 'http_options'):
|
1250
|
+
elif hasattr(config, 'http_options') and config is not None:
|
1238
1251
|
http_options = config.http_options
|
1239
1252
|
|
1240
1253
|
request_dict = _common.convert_to_dict(request_dict)
|
@@ -1280,7 +1293,7 @@ class AsyncFiles(_api_module.BaseModule):
|
|
1280
1293
|
'Vertex AI does not support creating files. You can upload files to'
|
1281
1294
|
' GCS files instead.'
|
1282
1295
|
)
|
1283
|
-
config_model =
|
1296
|
+
config_model = types.UploadFileConfig()
|
1284
1297
|
if config:
|
1285
1298
|
if isinstance(config, dict):
|
1286
1299
|
config_model = types.UploadFileConfig(**config)
|
@@ -1360,7 +1373,7 @@ class AsyncFiles(_api_module.BaseModule):
|
|
1360
1373
|
|
1361
1374
|
return types.File._from_response(
|
1362
1375
|
response=_File_from_mldev(self._api_client, return_file['file']),
|
1363
|
-
kwargs=
|
1376
|
+
kwargs=config_model.model_dump() if config else {},
|
1364
1377
|
)
|
1365
1378
|
|
1366
1379
|
async def list(
|
@@ -1420,7 +1433,7 @@ class AsyncFiles(_api_module.BaseModule):
|
|
1420
1433
|
else:
|
1421
1434
|
config_model = config
|
1422
1435
|
|
1423
|
-
name = t.t_file_name(self, file)
|
1436
|
+
name = t.t_file_name(self._api_client, file)
|
1424
1437
|
|
1425
1438
|
path = f'files/{name}:download'
|
1426
1439
|
|