google-genai 1.5.0__py3-none-any.whl → 1.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
google/genai/caches.py CHANGED
@@ -174,15 +174,9 @@ def _Schema_to_mldev(
174
174
  if getv(from_object, ['pattern']) is not None:
175
175
  raise ValueError('pattern parameter is not supported in Gemini API.')
176
176
 
177
- if getv(from_object, ['minimum']) is not None:
178
- raise ValueError('minimum parameter is not supported in Gemini API.')
179
-
180
177
  if getv(from_object, ['default']) is not None:
181
178
  raise ValueError('default parameter is not supported in Gemini API.')
182
179
 
183
- if getv(from_object, ['any_of']) is not None:
184
- raise ValueError('any_of parameter is not supported in Gemini API.')
185
-
186
180
  if getv(from_object, ['max_length']) is not None:
187
181
  raise ValueError('max_length parameter is not supported in Gemini API.')
188
182
 
@@ -195,12 +189,12 @@ def _Schema_to_mldev(
195
189
  if getv(from_object, ['min_properties']) is not None:
196
190
  raise ValueError('min_properties parameter is not supported in Gemini API.')
197
191
 
198
- if getv(from_object, ['maximum']) is not None:
199
- raise ValueError('maximum parameter is not supported in Gemini API.')
200
-
201
192
  if getv(from_object, ['max_properties']) is not None:
202
193
  raise ValueError('max_properties parameter is not supported in Gemini API.')
203
194
 
195
+ if getv(from_object, ['any_of']) is not None:
196
+ setv(to_object, ['anyOf'], getv(from_object, ['any_of']))
197
+
204
198
  if getv(from_object, ['description']) is not None:
205
199
  setv(to_object, ['description'], getv(from_object, ['description']))
206
200
 
@@ -216,9 +210,15 @@ def _Schema_to_mldev(
216
210
  if getv(from_object, ['max_items']) is not None:
217
211
  setv(to_object, ['maxItems'], getv(from_object, ['max_items']))
218
212
 
213
+ if getv(from_object, ['maximum']) is not None:
214
+ setv(to_object, ['maximum'], getv(from_object, ['maximum']))
215
+
219
216
  if getv(from_object, ['min_items']) is not None:
220
217
  setv(to_object, ['minItems'], getv(from_object, ['min_items']))
221
218
 
219
+ if getv(from_object, ['minimum']) is not None:
220
+ setv(to_object, ['minimum'], getv(from_object, ['minimum']))
221
+
222
222
  if getv(from_object, ['nullable']) is not None:
223
223
  setv(to_object, ['nullable'], getv(from_object, ['nullable']))
224
224
 
@@ -253,15 +253,9 @@ def _Schema_to_vertex(
253
253
  if getv(from_object, ['pattern']) is not None:
254
254
  setv(to_object, ['pattern'], getv(from_object, ['pattern']))
255
255
 
256
- if getv(from_object, ['minimum']) is not None:
257
- setv(to_object, ['minimum'], getv(from_object, ['minimum']))
258
-
259
256
  if getv(from_object, ['default']) is not None:
260
257
  setv(to_object, ['default'], getv(from_object, ['default']))
261
258
 
262
- if getv(from_object, ['any_of']) is not None:
263
- setv(to_object, ['anyOf'], getv(from_object, ['any_of']))
264
-
265
259
  if getv(from_object, ['max_length']) is not None:
266
260
  setv(to_object, ['maxLength'], getv(from_object, ['max_length']))
267
261
 
@@ -274,12 +268,12 @@ def _Schema_to_vertex(
274
268
  if getv(from_object, ['min_properties']) is not None:
275
269
  setv(to_object, ['minProperties'], getv(from_object, ['min_properties']))
276
270
 
277
- if getv(from_object, ['maximum']) is not None:
278
- setv(to_object, ['maximum'], getv(from_object, ['maximum']))
279
-
280
271
  if getv(from_object, ['max_properties']) is not None:
281
272
  setv(to_object, ['maxProperties'], getv(from_object, ['max_properties']))
282
273
 
274
+ if getv(from_object, ['any_of']) is not None:
275
+ setv(to_object, ['anyOf'], getv(from_object, ['any_of']))
276
+
283
277
  if getv(from_object, ['description']) is not None:
284
278
  setv(to_object, ['description'], getv(from_object, ['description']))
285
279
 
@@ -295,9 +289,15 @@ def _Schema_to_vertex(
295
289
  if getv(from_object, ['max_items']) is not None:
296
290
  setv(to_object, ['maxItems'], getv(from_object, ['max_items']))
297
291
 
292
+ if getv(from_object, ['maximum']) is not None:
293
+ setv(to_object, ['maximum'], getv(from_object, ['maximum']))
294
+
298
295
  if getv(from_object, ['min_items']) is not None:
299
296
  setv(to_object, ['minItems'], getv(from_object, ['min_items']))
300
297
 
298
+ if getv(from_object, ['minimum']) is not None:
299
+ setv(to_object, ['minimum'], getv(from_object, ['minimum']))
300
+
301
301
  if getv(from_object, ['nullable']) is not None:
302
302
  setv(to_object, ['nullable'], getv(from_object, ['nullable']))
303
303
 
@@ -1176,10 +1176,7 @@ class Caches(_api_module.BaseModule):
1176
1176
  model: str,
1177
1177
  config: Optional[types.CreateCachedContentConfigOrDict] = None,
1178
1178
  ) -> types.CachedContent:
1179
- """Creates cached content, this call will initialize the cached
1180
-
1181
- content in the data storage, and users need to pay for the cache data
1182
- storage.
1179
+ """Creates a cached contents resource.
1183
1180
 
1184
1181
  Usage:
1185
1182
 
@@ -1562,10 +1559,7 @@ class AsyncCaches(_api_module.BaseModule):
1562
1559
  model: str,
1563
1560
  config: Optional[types.CreateCachedContentConfigOrDict] = None,
1564
1561
  ) -> types.CachedContent:
1565
- """Creates cached content, this call will initialize the cached
1566
-
1567
- content in the data storage, and users need to pay for the cache data
1568
- storage.
1562
+ """Creates a cached contents resource.
1569
1563
 
1570
1564
  Usage:
1571
1565
 
google/genai/client.py CHANGED
@@ -130,8 +130,9 @@ class Client:
130
130
  from environment variables. Applies to the Vertex AI API only.
131
131
  debug_config: Config settings that control network behavior of the client.
132
132
  This is typically used when running test code.
133
- http_options: Http options to use for the client. Response_payload can't be
134
- set when passing to the client constructor.
133
+ http_options: Http options to use for the client. These options will be
134
+ applied to all requests made by the client. Example usage:
135
+ `client = genai.Client(http_options=types.HttpOptions(api_version='v1'))`.
135
136
 
136
137
  Usage for the Gemini Developer API:
137
138
 
google/genai/errors.py CHANGED
@@ -18,7 +18,6 @@
18
18
  from typing import Any, Optional, TYPE_CHECKING, Union
19
19
  import httpx
20
20
  import json
21
- import requests
22
21
 
23
22
 
24
23
  if TYPE_CHECKING:
@@ -28,7 +27,7 @@ if TYPE_CHECKING:
28
27
  class APIError(Exception):
29
28
  """General errors raised by the GenAI API."""
30
29
  code: int
31
- response: Union[requests.Response, 'ReplayResponse', httpx.Response]
30
+ response: Union['ReplayResponse', httpx.Response]
32
31
 
33
32
  status: Optional[str] = None
34
33
  message: Optional[str] = None
@@ -36,28 +35,21 @@ class APIError(Exception):
36
35
  def __init__(
37
36
  self,
38
37
  code: int,
39
- response: Union[requests.Response, 'ReplayResponse', httpx.Response],
38
+ response: Union['ReplayResponse', httpx.Response],
40
39
  ):
41
40
  self.response = response
42
-
43
- if isinstance(response, requests.Response):
41
+ message = None
42
+ if isinstance(response, httpx.Response):
44
43
  try:
45
- # do not do any extra muanipulation on the response.
46
- # return the raw response json as is.
47
44
  response_json = response.json()
48
- except requests.exceptions.JSONDecodeError:
45
+ except (json.decoder.JSONDecodeError):
46
+ message = response.text
49
47
  response_json = {
50
- 'message': response.text,
51
- 'status': response.reason,
48
+ 'message': message,
49
+ 'status': response.reason_phrase,
52
50
  }
53
- elif isinstance(response, httpx.Response):
54
- try:
55
- response_json = response.json()
56
- except (json.decoder.JSONDecodeError, httpx.ResponseNotRead):
57
- try:
58
- message = response.text
59
- except httpx.ResponseNotRead:
60
- message = None
51
+ except httpx.ResponseNotRead:
52
+ message = 'Response not read'
61
53
  response_json = {
62
54
  'message': message,
63
55
  'status': response.reason_phrase,
@@ -103,7 +95,7 @@ class APIError(Exception):
103
95
 
104
96
  @classmethod
105
97
  def raise_for_response(
106
- cls, response: Union[requests.Response, 'ReplayResponse', httpx.Response]
98
+ cls, response: Union['ReplayResponse', httpx.Response]
107
99
  ):
108
100
  """Raises an error with detailed error message if the response has an error status."""
109
101
  if response.status_code == 200:
google/genai/files.py CHANGED
@@ -826,7 +826,7 @@ class Files(_api_module.BaseModule):
826
826
  'Vertex AI does not support creating files. You can upload files to'
827
827
  ' GCS files instead.'
828
828
  )
829
- config_model = None
829
+ config_model = types.UploadFileConfig()
830
830
  if config:
831
831
  if isinstance(config, dict):
832
832
  config_model = types.UploadFileConfig(**config)
@@ -907,7 +907,7 @@ class Files(_api_module.BaseModule):
907
907
 
908
908
  return types.File._from_response(
909
909
  response=_File_from_mldev(self._api_client, return_file['file']),
910
- kwargs=None,
910
+ kwargs=config_model.model_dump() if config else {},
911
911
  )
912
912
 
913
913
  def list(
@@ -979,7 +979,7 @@ class Files(_api_module.BaseModule):
979
979
  'downloaded. You can tell which files are downloadable by checking '
980
980
  'the `source` or `download_uri` property.'
981
981
  )
982
- name = t.t_file_name(self, file)
982
+ name = t.t_file_name(self._api_client, file)
983
983
 
984
984
  path = f'files/{name}:download'
985
985
 
@@ -996,7 +996,7 @@ class Files(_api_module.BaseModule):
996
996
 
997
997
  if isinstance(file, types.Video):
998
998
  file.video_bytes = data
999
- elif isinstance(file, types.GeneratedVideo):
999
+ elif isinstance(file, types.GeneratedVideo) and file.video is not None:
1000
1000
  file.video.video_bytes = data
1001
1001
 
1002
1002
  return data
@@ -1293,7 +1293,7 @@ class AsyncFiles(_api_module.BaseModule):
1293
1293
  'Vertex AI does not support creating files. You can upload files to'
1294
1294
  ' GCS files instead.'
1295
1295
  )
1296
- config_model = None
1296
+ config_model = types.UploadFileConfig()
1297
1297
  if config:
1298
1298
  if isinstance(config, dict):
1299
1299
  config_model = types.UploadFileConfig(**config)
@@ -1373,7 +1373,7 @@ class AsyncFiles(_api_module.BaseModule):
1373
1373
 
1374
1374
  return types.File._from_response(
1375
1375
  response=_File_from_mldev(self._api_client, return_file['file']),
1376
- kwargs=None,
1376
+ kwargs=config_model.model_dump() if config else {},
1377
1377
  )
1378
1378
 
1379
1379
  async def list(
@@ -1433,7 +1433,7 @@ class AsyncFiles(_api_module.BaseModule):
1433
1433
  else:
1434
1434
  config_model = config
1435
1435
 
1436
- name = t.t_file_name(self, file)
1436
+ name = t.t_file_name(self._api_client, file)
1437
1437
 
1438
1438
  path = f'files/{name}:download'
1439
1439