google-genai 0.2.1__tar.gz → 0.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. {google_genai-0.2.1/google_genai.egg-info → google_genai-0.2.2}/PKG-INFO +63 -3
  2. {google_genai-0.2.1 → google_genai-0.2.2}/README.md +62 -2
  3. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/__init__.py +1 -1
  4. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/_api_client.py +1 -1
  5. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/errors.py +31 -11
  6. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/types.py +1 -1
  7. {google_genai-0.2.1 → google_genai-0.2.2/google_genai.egg-info}/PKG-INFO +63 -3
  8. {google_genai-0.2.1 → google_genai-0.2.2}/pyproject.toml +1 -1
  9. {google_genai-0.2.1 → google_genai-0.2.2}/LICENSE +0 -0
  10. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/_automatic_function_calling_util.py +0 -0
  11. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/_common.py +0 -0
  12. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/_extra_utils.py +0 -0
  13. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/_replay_api_client.py +0 -0
  14. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/_test_api_client.py +0 -0
  15. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/_transformers.py +0 -0
  16. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/batches.py +0 -0
  17. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/caches.py +0 -0
  18. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/chats.py +0 -0
  19. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/client.py +0 -0
  20. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/files.py +0 -0
  21. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/live.py +0 -0
  22. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/models.py +0 -0
  23. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/pagers.py +0 -0
  24. {google_genai-0.2.1 → google_genai-0.2.2}/google/genai/tunings.py +0 -0
  25. {google_genai-0.2.1 → google_genai-0.2.2}/google_genai.egg-info/SOURCES.txt +0 -0
  26. {google_genai-0.2.1 → google_genai-0.2.2}/google_genai.egg-info/dependency_links.txt +0 -0
  27. {google_genai-0.2.1 → google_genai-0.2.2}/google_genai.egg-info/requires.txt +0 -0
  28. {google_genai-0.2.1 → google_genai-0.2.2}/google_genai.egg-info/top_level.txt +0 -0
  29. {google_genai-0.2.1 → google_genai-0.2.2}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: google-genai
3
- Version: 0.2.1
3
+ Version: 0.2.2
4
4
  Summary: GenAI Python SDK
5
5
  Author-email: Google LLC <googleapis-packages@google.com>
6
6
  License: Apache-2.0
@@ -159,6 +159,14 @@ response = client.models.generate_content(
159
159
  response.text
160
160
  ```
161
161
 
162
+ #### Manually declare and invoke a function for function calling
163
+
164
+ If you don't want to use the automatic function support, you can manually
165
+ declare the function and invoke it.
166
+
167
+ The following example shows how to declare a function and pass it as a tool.
168
+ Then you will receive a function call part in the response.
169
+
162
170
  ``` python
163
171
  function = dict(
164
172
  name="get_current_weather",
@@ -187,15 +195,24 @@ response = client.models.generate_content(
187
195
  response.candidates[0].content.parts[0].function_call
188
196
  ```
189
197
 
198
+ After you receive the function call part from model, you can invoke the function
199
+ and get the function response. And then you can pass the function response to
200
+ the model.
201
+ The following example shows how to do it for a simple function invocation.
202
+
190
203
  ``` python
191
204
  function_call_part = response.candidates[0].content.parts[0]
192
205
 
193
- function_response = get_current_weather(**function_call_part.function_call.args)
206
+ try:
207
+ function_result = get_current_weather(**function_call_part.function_call.args)
208
+ function_response = {'result': function_result}
209
+ except Exception as e: # instead of raising the exception, you can let the model handle it
210
+ function_response = {'error': str(e)}
194
211
 
195
212
 
196
213
  function_response_part = types.Part.from_function_response(
197
214
  name=function_call_part.function_call.name,
198
- response={'result': function_response}
215
+ response=function_response,
199
216
  )
200
217
 
201
218
  response = client.models.generate_content(
@@ -273,6 +290,8 @@ print(response.text)
273
290
 
274
291
  ### Streaming
275
292
 
293
+ #### Streaming for text content
294
+
276
295
  ``` python
277
296
  for chunk in client.models.generate_content_stream(
278
297
  model='gemini-2.0-flash-exp', contents='Tell me a story in 300 words.'
@@ -280,6 +299,47 @@ for chunk in client.models.generate_content_stream(
280
299
  print(chunk.text)
281
300
  ```
282
301
 
302
+ #### Streaming for image content
303
+
304
+ If your image is stored in Google Cloud Storage, you can use the `from_uri`
305
+ class method to create a Part object.
306
+
307
+ ``` python
308
+ for chunk in client.models.generate_content_stream(
309
+ model='gemini-1.5-flash',
310
+ contents=[
311
+ 'What is this image about?',
312
+ types.Part.from_uri(
313
+ file_uri='gs://generativeai-downloads/images/scones.jpg',
314
+ mime_type='image/jpeg'
315
+ )
316
+ ],
317
+ ):
318
+ print(chunk.text)
319
+ ```
320
+
321
+ If your image is stored in your local file system, you can read it in as bytes
322
+ data and use the `from_bytes` class method to create a Part object.
323
+
324
+ ``` python
325
+ YOUR_IMAGE_PATH = 'your_image_path'
326
+ YOUR_IMAGE_MIME_TYPE = 'your_image_mime_type'
327
+ with open(YOUR_IMAGE_PATH, 'rb') as f:
328
+ image_bytes = f.read()
329
+
330
+ for chunk in client.models.generate_content_stream(
331
+ model='gemini-1.5-flash',
332
+ contents=[
333
+ 'What is this image about?',
334
+ types.Part.from_bytes(
335
+ data=image_bytes,
336
+ mime_type=YOUR_IMAGE_MIME_TYPE
337
+ )
338
+ ],
339
+ ):
340
+ print(chunk.text)
341
+ ```
342
+
283
343
  ### Async
284
344
 
285
345
  `client.aio` exposes all the analogous `async` methods that are
@@ -131,6 +131,14 @@ response = client.models.generate_content(
131
131
  response.text
132
132
  ```
133
133
 
134
+ #### Manually declare and invoke a function for function calling
135
+
136
+ If you don't want to use the automatic function support, you can manually
137
+ declare the function and invoke it.
138
+
139
+ The following example shows how to declare a function and pass it as a tool.
140
+ Then you will receive a function call part in the response.
141
+
134
142
  ``` python
135
143
  function = dict(
136
144
  name="get_current_weather",
@@ -159,15 +167,24 @@ response = client.models.generate_content(
159
167
  response.candidates[0].content.parts[0].function_call
160
168
  ```
161
169
 
170
+ After you receive the function call part from model, you can invoke the function
171
+ and get the function response. And then you can pass the function response to
172
+ the model.
173
+ The following example shows how to do it for a simple function invocation.
174
+
162
175
  ``` python
163
176
  function_call_part = response.candidates[0].content.parts[0]
164
177
 
165
- function_response = get_current_weather(**function_call_part.function_call.args)
178
+ try:
179
+ function_result = get_current_weather(**function_call_part.function_call.args)
180
+ function_response = {'result': function_result}
181
+ except Exception as e: # instead of raising the exception, you can let the model handle it
182
+ function_response = {'error': str(e)}
166
183
 
167
184
 
168
185
  function_response_part = types.Part.from_function_response(
169
186
  name=function_call_part.function_call.name,
170
- response={'result': function_response}
187
+ response=function_response,
171
188
  )
172
189
 
173
190
  response = client.models.generate_content(
@@ -245,6 +262,8 @@ print(response.text)
245
262
 
246
263
  ### Streaming
247
264
 
265
+ #### Streaming for text content
266
+
248
267
  ``` python
249
268
  for chunk in client.models.generate_content_stream(
250
269
  model='gemini-2.0-flash-exp', contents='Tell me a story in 300 words.'
@@ -252,6 +271,47 @@ for chunk in client.models.generate_content_stream(
252
271
  print(chunk.text)
253
272
  ```
254
273
 
274
+ #### Streaming for image content
275
+
276
+ If your image is stored in Google Cloud Storage, you can use the `from_uri`
277
+ class method to create a Part object.
278
+
279
+ ``` python
280
+ for chunk in client.models.generate_content_stream(
281
+ model='gemini-1.5-flash',
282
+ contents=[
283
+ 'What is this image about?',
284
+ types.Part.from_uri(
285
+ file_uri='gs://generativeai-downloads/images/scones.jpg',
286
+ mime_type='image/jpeg'
287
+ )
288
+ ],
289
+ ):
290
+ print(chunk.text)
291
+ ```
292
+
293
+ If your image is stored in your local file system, you can read it in as bytes
294
+ data and use the `from_bytes` class method to create a Part object.
295
+
296
+ ``` python
297
+ YOUR_IMAGE_PATH = 'your_image_path'
298
+ YOUR_IMAGE_MIME_TYPE = 'your_image_mime_type'
299
+ with open(YOUR_IMAGE_PATH, 'rb') as f:
300
+ image_bytes = f.read()
301
+
302
+ for chunk in client.models.generate_content_stream(
303
+ model='gemini-1.5-flash',
304
+ contents=[
305
+ 'What is this image about?',
306
+ types.Part.from_bytes(
307
+ data=image_bytes,
308
+ mime_type=YOUR_IMAGE_MIME_TYPE
309
+ )
310
+ ],
311
+ ):
312
+ print(chunk.text)
313
+ ```
314
+
255
315
  ### Async
256
316
 
257
317
  `client.aio` exposes all the analogous `async` methods that are
@@ -17,6 +17,6 @@
17
17
 
18
18
  from .client import Client
19
19
 
20
- __version__ = '0.2.1'
20
+ __version__ = '0.2.2'
21
21
 
22
22
  __all__ = ['Client']
@@ -51,7 +51,7 @@ class HttpOptions(TypedDict):
51
51
  def _append_library_version_headers(headers: dict[str, str]) -> None:
52
52
  """Appends the telemetry header to the headers dict."""
53
53
  # TODO: Automate revisions to the SDK library version.
54
- library_label = f'google-genai-sdk/0.2.1'
54
+ library_label = f'google-genai-sdk/0.2.2'
55
55
  language_label = 'gl-python/' + sys.version.split()[0]
56
56
  version_header_value = f'{library_label} {language_label}'
57
57
  if (
@@ -29,29 +29,49 @@ class APIError(Exception):
29
29
  code: int
30
30
  response: requests.Response
31
31
 
32
- message: str = ''
33
- status: str = 'UNKNOWN'
34
- details: Optional[Any] = None
32
+ status: Optional[str] = None
33
+ message: Optional[str] = None
34
+ response: Optional[Any] = None
35
35
 
36
36
  def __init__(
37
37
  self, code: int, response: Union[requests.Response, 'ReplayResponse']
38
38
  ):
39
- self.code = code
40
39
  self.response = response
41
40
 
42
41
  if isinstance(response, requests.Response):
43
42
  try:
44
- raw_error = response.json().get('error', {})
43
+ # do not do any extra muanipulation on the response.
44
+ # return the raw response json as is.
45
+ response_json = response.json()
45
46
  except requests.exceptions.JSONDecodeError:
46
- raw_error = {'message': response.text, 'status': response.reason}
47
+ response_json = {
48
+ 'message': response.text,
49
+ 'status': response.reason,
50
+ }
47
51
  else:
48
- raw_error = response.body_segments[0].get('error', {})
52
+ response_json = response.body_segments[0].get('error', {})
53
+
54
+ self.details = response_json
55
+ self.message = self._get_message(response_json)
56
+ self.status = self._get_status(response_json)
57
+ self.code = code if code else self._get_code(response_json)
58
+
59
+ super().__init__(f'{self.code} {self.status}. {self.details}')
60
+
61
+ def _get_status(self, response_json):
62
+ return response_json.get(
63
+ 'status', response_json.get('error', {}).get('status', None)
64
+ )
49
65
 
50
- self.message = raw_error.get('message', '')
51
- self.status = raw_error.get('status', 'UNKNOWN')
52
- self.details = raw_error.get('details', None)
66
+ def _get_message(self, response_json):
67
+ return response_json.get(
68
+ 'message', response_json.get('error', {}).get('message', None)
69
+ )
53
70
 
54
- super().__init__(f'{self.code} {self.status}. {self.message}')
71
+ def _get_code(self, response_json):
72
+ return response_json.get(
73
+ 'code', response_json.get('error', {}).get('code', None)
74
+ )
55
75
 
56
76
  def _to_replay_record(self):
57
77
  """Returns a dictionary representation of the error for replay recording.
@@ -1280,7 +1280,7 @@ class AutomaticFunctionCallingConfig(_common.BaseModel):
1280
1280
  """,
1281
1281
  )
1282
1282
  maximum_remote_calls: Optional[int] = Field(
1283
- default=None,
1283
+ default=10,
1284
1284
  description="""If automatic function calling is enabled,
1285
1285
  maximum number of remote calls for automatic function calling.
1286
1286
  This number should be a positive integer.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: google-genai
3
- Version: 0.2.1
3
+ Version: 0.2.2
4
4
  Summary: GenAI Python SDK
5
5
  Author-email: Google LLC <googleapis-packages@google.com>
6
6
  License: Apache-2.0
@@ -159,6 +159,14 @@ response = client.models.generate_content(
159
159
  response.text
160
160
  ```
161
161
 
162
+ #### Manually declare and invoke a function for function calling
163
+
164
+ If you don't want to use the automatic function support, you can manually
165
+ declare the function and invoke it.
166
+
167
+ The following example shows how to declare a function and pass it as a tool.
168
+ Then you will receive a function call part in the response.
169
+
162
170
  ``` python
163
171
  function = dict(
164
172
  name="get_current_weather",
@@ -187,15 +195,24 @@ response = client.models.generate_content(
187
195
  response.candidates[0].content.parts[0].function_call
188
196
  ```
189
197
 
198
+ After you receive the function call part from model, you can invoke the function
199
+ and get the function response. And then you can pass the function response to
200
+ the model.
201
+ The following example shows how to do it for a simple function invocation.
202
+
190
203
  ``` python
191
204
  function_call_part = response.candidates[0].content.parts[0]
192
205
 
193
- function_response = get_current_weather(**function_call_part.function_call.args)
206
+ try:
207
+ function_result = get_current_weather(**function_call_part.function_call.args)
208
+ function_response = {'result': function_result}
209
+ except Exception as e: # instead of raising the exception, you can let the model handle it
210
+ function_response = {'error': str(e)}
194
211
 
195
212
 
196
213
  function_response_part = types.Part.from_function_response(
197
214
  name=function_call_part.function_call.name,
198
- response={'result': function_response}
215
+ response=function_response,
199
216
  )
200
217
 
201
218
  response = client.models.generate_content(
@@ -273,6 +290,8 @@ print(response.text)
273
290
 
274
291
  ### Streaming
275
292
 
293
+ #### Streaming for text content
294
+
276
295
  ``` python
277
296
  for chunk in client.models.generate_content_stream(
278
297
  model='gemini-2.0-flash-exp', contents='Tell me a story in 300 words.'
@@ -280,6 +299,47 @@ for chunk in client.models.generate_content_stream(
280
299
  print(chunk.text)
281
300
  ```
282
301
 
302
+ #### Streaming for image content
303
+
304
+ If your image is stored in Google Cloud Storage, you can use the `from_uri`
305
+ class method to create a Part object.
306
+
307
+ ``` python
308
+ for chunk in client.models.generate_content_stream(
309
+ model='gemini-1.5-flash',
310
+ contents=[
311
+ 'What is this image about?',
312
+ types.Part.from_uri(
313
+ file_uri='gs://generativeai-downloads/images/scones.jpg',
314
+ mime_type='image/jpeg'
315
+ )
316
+ ],
317
+ ):
318
+ print(chunk.text)
319
+ ```
320
+
321
+ If your image is stored in your local file system, you can read it in as bytes
322
+ data and use the `from_bytes` class method to create a Part object.
323
+
324
+ ``` python
325
+ YOUR_IMAGE_PATH = 'your_image_path'
326
+ YOUR_IMAGE_MIME_TYPE = 'your_image_mime_type'
327
+ with open(YOUR_IMAGE_PATH, 'rb') as f:
328
+ image_bytes = f.read()
329
+
330
+ for chunk in client.models.generate_content_stream(
331
+ model='gemini-1.5-flash',
332
+ contents=[
333
+ 'What is this image about?',
334
+ types.Part.from_bytes(
335
+ data=image_bytes,
336
+ mime_type=YOUR_IMAGE_MIME_TYPE
337
+ )
338
+ ],
339
+ ):
340
+ print(chunk.text)
341
+ ```
342
+
283
343
  ### Async
284
344
 
285
345
  `client.aio` exposes all the analogous `async` methods that are
@@ -3,7 +3,7 @@ requires = ["setuptools", "wheel"]
3
3
 
4
4
  [project]
5
5
  name = "google-genai"
6
- version = "0.2.1"
6
+ version = "0.2.2"
7
7
  description = "GenAI Python SDK"
8
8
  readme = "README.md"
9
9
  license = {text = "Apache-2.0"}
File without changes
File without changes