google-genai 1.7.0__py3-none-any.whl → 1.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
google/genai/chats.py CHANGED
@@ -19,7 +19,7 @@ from typing import AsyncIterator, Awaitable, Optional, Union, get_args
19
19
  from . import _transformers as t
20
20
  from . import types
21
21
  from .models import AsyncModels, Models
22
- from .types import Content, GenerateContentConfigOrDict, GenerateContentResponse, Part, PartUnionDict
22
+ from .types import Content, ContentOrDict, GenerateContentConfigOrDict, GenerateContentResponse, Part, PartUnionDict
23
23
 
24
24
 
25
25
  if sys.version_info >= (3, 10):
@@ -116,14 +116,21 @@ class _BaseChat:
116
116
  *,
117
117
  model: str,
118
118
  config: Optional[GenerateContentConfigOrDict] = None,
119
- history: list[Content],
119
+ history: list[ContentOrDict],
120
120
  ):
121
121
  self._model = model
122
122
  self._config = config
123
- self._comprehensive_history = history
123
+ content_models = []
124
+ for content in history:
125
+ if not isinstance(content, Content):
126
+ content_model = Content.model_validate(content)
127
+ else:
128
+ content_model = content
129
+ content_models.append(content_model)
130
+ self._comprehensive_history = content_models
124
131
  """Comprehensive history is the full history of the chat, including turns of the invalid contents from the model and their associated inputs.
125
132
  """
126
- self._curated_history = _extract_curated_history(history)
133
+ self._curated_history = _extract_curated_history(content_models)
127
134
  """Curated history is the set of valid turns that will be used in the subsequent send requests.
128
135
  """
129
136
 
@@ -210,7 +217,7 @@ class Chat(_BaseChat):
210
217
  modules: Models,
211
218
  model: str,
212
219
  config: Optional[GenerateContentConfigOrDict] = None,
213
- history: list[Content],
220
+ history: list[ContentOrDict],
214
221
  ):
215
222
  self._modules = modules
216
223
  super().__init__(
@@ -344,7 +351,7 @@ class Chats:
344
351
  *,
345
352
  model: str,
346
353
  config: Optional[GenerateContentConfigOrDict] = None,
347
- history: Optional[list[Content]] = None,
354
+ history: Optional[list[ContentOrDict]] = None,
348
355
  ) -> Chat:
349
356
  """Creates a new chat session.
350
357
 
@@ -373,7 +380,7 @@ class AsyncChat(_BaseChat):
373
380
  modules: AsyncModels,
374
381
  model: str,
375
382
  config: Optional[GenerateContentConfigOrDict] = None,
376
- history: list[Content],
383
+ history: list[ContentOrDict],
377
384
  ):
378
385
  self._modules = modules
379
386
  super().__init__(
@@ -501,7 +508,7 @@ class AsyncChats:
501
508
  *,
502
509
  model: str,
503
510
  config: Optional[GenerateContentConfigOrDict] = None,
504
- history: Optional[list[Content]] = None,
511
+ history: Optional[list[ContentOrDict]] = None,
505
512
  ) -> AsyncChat:
506
513
  """Creates a new chat session.
507
514
 
google/genai/client.py CHANGED
@@ -194,6 +194,8 @@ class Client:
194
194
  """
195
195
 
196
196
  self._debug_config = debug_config or DebugConfig()
197
+ if isinstance(http_options, dict):
198
+ http_options = HttpOptions(**http_options)
197
199
 
198
200
  self._api_client = self._get_api_client(
199
201
  vertexai=vertexai,
@@ -229,10 +231,10 @@ class Client:
229
231
  'auto',
230
232
  ]:
231
233
  return ReplayApiClient(
232
- mode=debug_config.client_mode,
233
- replay_id=debug_config.replay_id,
234
+ mode=debug_config.client_mode, # type: ignore[arg-type]
235
+ replay_id=debug_config.replay_id, # type: ignore[arg-type]
234
236
  replays_directory=debug_config.replays_directory,
235
- vertexai=vertexai,
237
+ vertexai=vertexai, # type: ignore[arg-type]
236
238
  api_key=api_key,
237
239
  credentials=credentials,
238
240
  project=project,
google/genai/errors.py CHANGED
@@ -35,28 +35,10 @@ class APIError(Exception):
35
35
  def __init__(
36
36
  self,
37
37
  code: int,
38
- response: Union['ReplayResponse', httpx.Response],
38
+ response_json: Any,
39
+ response: Optional[Union['ReplayResponse', httpx.Response]] = None,
39
40
  ):
40
41
  self.response = response
41
- message = None
42
- if isinstance(response, httpx.Response):
43
- try:
44
- response_json = response.json()
45
- except (json.decoder.JSONDecodeError):
46
- message = response.text
47
- response_json = {
48
- 'message': message,
49
- 'status': response.reason_phrase,
50
- }
51
- except httpx.ResponseNotRead:
52
- message = 'Response not read'
53
- response_json = {
54
- 'message': message,
55
- 'status': response.reason_phrase,
56
- }
57
- else:
58
- response_json = response.body_segments[0].get('error', {})
59
-
60
42
  self.details = response_json
61
43
  self.message = self._get_message(response_json)
62
44
  self.status = self._get_status(response_json)
@@ -101,13 +83,54 @@ class APIError(Exception):
101
83
  if response.status_code == 200:
102
84
  return
103
85
 
86
+ if isinstance(response, httpx.Response):
87
+ try:
88
+ response.read()
89
+ response_json = response.json()
90
+ except json.decoder.JSONDecodeError:
91
+ message = response.text
92
+ response_json = {
93
+ 'message': message,
94
+ 'status': response.reason_phrase,
95
+ }
96
+ else:
97
+ response_json = response.body_segments[0].get('error', {})
98
+
99
+ status_code = response.status_code
100
+ if 400 <= status_code < 500:
101
+ raise ClientError(status_code, response_json, response)
102
+ elif 500 <= status_code < 600:
103
+ raise ServerError(status_code, response_json, response)
104
+ else:
105
+ raise cls(status_code, response_json, response)
106
+
107
+ @classmethod
108
+ async def raise_for_async_response(
109
+ cls, response: Union['ReplayResponse', httpx.Response]
110
+ ):
111
+ """Raises an error with detailed error message if the response has an error status."""
112
+ if response.status_code == 200:
113
+ return
114
+ if isinstance(response, httpx.Response):
115
+ try:
116
+ await response.aread()
117
+ response_json = response.json()
118
+ except json.decoder.JSONDecodeError:
119
+ message = response.text
120
+ response_json = {
121
+ 'message': message,
122
+ 'status': response.reason_phrase,
123
+ }
124
+ else:
125
+ response_json = response.body_segments[0].get('error', {})
126
+
104
127
  status_code = response.status_code
105
128
  if 400 <= status_code < 500:
106
- raise ClientError(status_code, response)
129
+ raise ClientError(status_code, response_json, response)
107
130
  elif 500 <= status_code < 600:
108
- raise ServerError(status_code, response)
131
+ raise ServerError(status_code, response_json, response)
109
132
  else:
110
- raise cls(status_code, response)
133
+ raise cls(status_code, response_json, response)
111
134
 
112
135
 
113
136
  class ClientError(APIError):
@@ -137,4 +160,4 @@ class FunctionInvocationError(ValueError):
137
160
 
138
161
 
139
162
  class ExperimentalWarning(Warning):
140
- """Warning for experimental features."""
163
+ """Warning for experimental features."""