google-genai 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- google/genai/__init__.py +20 -0
- google/genai/_api_client.py +467 -0
- google/genai/_automatic_function_calling_util.py +341 -0
- google/genai/_common.py +256 -0
- google/genai/_extra_utils.py +295 -0
- google/genai/_replay_api_client.py +478 -0
- google/genai/_test_api_client.py +149 -0
- google/genai/_transformers.py +438 -0
- google/genai/batches.py +1041 -0
- google/genai/caches.py +1830 -0
- google/genai/chats.py +184 -0
- google/genai/client.py +277 -0
- google/genai/errors.py +110 -0
- google/genai/files.py +1211 -0
- google/genai/live.py +629 -0
- google/genai/models.py +5307 -0
- google/genai/pagers.py +245 -0
- google/genai/tunings.py +1366 -0
- google/genai/types.py +7639 -0
- google_genai-0.0.1.dist-info/LICENSE +202 -0
- google_genai-0.0.1.dist-info/METADATA +763 -0
- google_genai-0.0.1.dist-info/RECORD +24 -0
- google_genai-0.0.1.dist-info/WHEEL +5 -0
- google_genai-0.0.1.dist-info/top_level.txt +1 -0
google/genai/__init__.py
ADDED
@@ -0,0 +1,20 @@
|
|
1
|
+
# Copyright 2024 Google LLC
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
#
|
15
|
+
|
16
|
+
"""Google Gen AI SDK"""
|
17
|
+
|
18
|
+
from .client import Client
|
19
|
+
|
20
|
+
__all__ = ['Client']
|
@@ -0,0 +1,467 @@
|
|
1
|
+
# Copyright 2024 Google LLC
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
#
|
15
|
+
|
16
|
+
|
17
|
+
"""Base client for calling HTTP APIs sending and receiving JSON."""
|
18
|
+
|
19
|
+
import asyncio
|
20
|
+
import copy
|
21
|
+
from dataclasses import dataclass
|
22
|
+
import datetime
|
23
|
+
import json
|
24
|
+
import os
|
25
|
+
import sys
|
26
|
+
from typing import Any, Optional, TypedDict, Union
|
27
|
+
import urllib
|
28
|
+
|
29
|
+
import google.auth
|
30
|
+
import google.auth.credentials
|
31
|
+
from google.auth.transport.requests import AuthorizedSession
|
32
|
+
from pydantic import BaseModel
|
33
|
+
import requests
|
34
|
+
|
35
|
+
from . import errors
|
36
|
+
|
37
|
+
|
38
|
+
class HttpOptions(TypedDict):
|
39
|
+
"""HTTP options for the api client."""
|
40
|
+
|
41
|
+
base_url: str = None
|
42
|
+
"""The base URL for the AI platform service endpoint."""
|
43
|
+
api_version: str = None
|
44
|
+
"""Specifies the version of the API to use."""
|
45
|
+
headers: dict[str, dict] = None
|
46
|
+
"""Additional HTTP headers to be sent with the request."""
|
47
|
+
response_payload: dict = None
|
48
|
+
"""If set, the response payload will be returned int the supplied dict."""
|
49
|
+
|
50
|
+
|
51
|
+
def _append_library_version_headers(headers: dict[str, str]) -> None:
|
52
|
+
"""Appends the telemetry header to the headers dict."""
|
53
|
+
# TODO: Automate revisions to the SDK library version.
|
54
|
+
library_label = 'google-genai-sdk/0.1.0'
|
55
|
+
language_label = 'gl-python/' + sys.version.split()[0]
|
56
|
+
version_header_value = f'{library_label} {language_label}'
|
57
|
+
if (
|
58
|
+
'user-agent' in headers
|
59
|
+
and version_header_value not in headers['user-agent']
|
60
|
+
):
|
61
|
+
headers['user-agent'] += f' {version_header_value}'
|
62
|
+
elif 'user-agent' not in headers:
|
63
|
+
headers['user-agent'] = version_header_value
|
64
|
+
if (
|
65
|
+
'x-goog-api-client' in headers
|
66
|
+
and version_header_value not in headers['x-goog-api-client']
|
67
|
+
):
|
68
|
+
headers['x-goog-api-client'] += f' {version_header_value}'
|
69
|
+
elif 'x-goog-api-client' not in headers:
|
70
|
+
headers['x-goog-api-client'] = version_header_value
|
71
|
+
|
72
|
+
|
73
|
+
def _patch_http_options(
|
74
|
+
options: HttpOptions, patch_options: HttpOptions
|
75
|
+
) -> HttpOptions:
|
76
|
+
# use shallow copy so we don't override the original objects.
|
77
|
+
copy_option = HttpOptions()
|
78
|
+
copy_option.update(options)
|
79
|
+
for k, v in patch_options.items():
|
80
|
+
# if both are dicts, update the copy.
|
81
|
+
# This is to handle cases like merging headers.
|
82
|
+
if isinstance(v, dict) and isinstance(copy_option.get(k, None), dict):
|
83
|
+
copy_option[k] = {}
|
84
|
+
copy_option[k].update(options[k]) # shallow copy from original options.
|
85
|
+
copy_option[k].update(v)
|
86
|
+
elif v is not None: # Accept empty values.
|
87
|
+
copy_option[k] = v
|
88
|
+
_append_library_version_headers(copy_option['headers'])
|
89
|
+
return copy_option
|
90
|
+
|
91
|
+
|
92
|
+
@dataclass
|
93
|
+
class HttpRequest:
|
94
|
+
headers: dict[str, str]
|
95
|
+
url: str
|
96
|
+
method: str
|
97
|
+
data: Union[dict[str, object], bytes]
|
98
|
+
|
99
|
+
|
100
|
+
class HttpResponse:
|
101
|
+
|
102
|
+
def __init__(self, headers: dict[str, str], response_stream: Union[Any, str]):
|
103
|
+
self.status_code = 200
|
104
|
+
self.headers = headers
|
105
|
+
self.response_stream = response_stream
|
106
|
+
|
107
|
+
@property
|
108
|
+
def text(self) -> str:
|
109
|
+
if not self.response_stream[0]: # Empty response
|
110
|
+
return ''
|
111
|
+
return json.loads(self.response_stream[0])
|
112
|
+
|
113
|
+
def segments(self):
|
114
|
+
if isinstance(self.response_stream, list):
|
115
|
+
# list of objects retrieved from replay or from non-streaming API.
|
116
|
+
for chunk in self.response_stream:
|
117
|
+
yield json.loads(chunk) if chunk else {}
|
118
|
+
else:
|
119
|
+
# Iterator of objects retrieved from the API.
|
120
|
+
for chunk in self.response_stream.iter_lines():
|
121
|
+
if chunk:
|
122
|
+
# In streaming mode, the chunk of JSON is prefixed with "data:" which
|
123
|
+
# we must strip before parsing.
|
124
|
+
if chunk.startswith(b'data: '):
|
125
|
+
chunk = chunk[len(b'data: ') :]
|
126
|
+
yield json.loads(str(chunk, 'utf-8'))
|
127
|
+
|
128
|
+
def copy_to_dict(self, response_payload: dict[str, object]):
|
129
|
+
for attribute in dir(self):
|
130
|
+
response_payload[attribute] = copy.deepcopy(getattr(self, attribute))
|
131
|
+
|
132
|
+
|
133
|
+
class ApiClient:
|
134
|
+
"""Client for calling HTTP APIs sending and receiving JSON."""
|
135
|
+
|
136
|
+
def __init__(
|
137
|
+
self,
|
138
|
+
vertexai: Union[bool, None] = None,
|
139
|
+
api_key: Union[str, None] = None,
|
140
|
+
credentials: google.auth.credentials.Credentials = None,
|
141
|
+
project: Union[str, None] = None,
|
142
|
+
location: Union[str, None] = None,
|
143
|
+
http_options: HttpOptions = None,
|
144
|
+
):
|
145
|
+
self.vertexai = vertexai
|
146
|
+
if self.vertexai is None:
|
147
|
+
if os.environ.get('GOOGLE_GENAI_USE_VERTEXAI', '0').lower() in [
|
148
|
+
'true',
|
149
|
+
'1',
|
150
|
+
]:
|
151
|
+
self.vertexai = True
|
152
|
+
|
153
|
+
# Validate explicitly set intializer values.
|
154
|
+
if (project or location) and api_key:
|
155
|
+
raise ValueError(
|
156
|
+
'Project/location and API key are mutually exclusive in the client initializer.'
|
157
|
+
)
|
158
|
+
|
159
|
+
self.api_key: Optional[str] = None
|
160
|
+
self.project = project or os.environ.get('GOOGLE_CLOUD_PROJECT', None)
|
161
|
+
self.location = location or os.environ.get('GOOGLE_CLOUD_LOCATION', None)
|
162
|
+
self._credentials = credentials
|
163
|
+
self._http_options = HttpOptions()
|
164
|
+
|
165
|
+
if self.vertexai:
|
166
|
+
if not self.project:
|
167
|
+
self.project = google.auth.default()[1]
|
168
|
+
# Will change this to support EasyGCP in the future.
|
169
|
+
if not self.project or not self.location:
|
170
|
+
raise ValueError(
|
171
|
+
'Project and location must be set when using the Vertex AI API.'
|
172
|
+
)
|
173
|
+
self._http_options['base_url'] = (
|
174
|
+
f'https://{self.location}-aiplatform.googleapis.com/'
|
175
|
+
)
|
176
|
+
self._http_options['api_version'] = 'v1beta1'
|
177
|
+
else: # ML Dev API
|
178
|
+
self.api_key = api_key or os.environ.get('GOOGLE_API_KEY', None)
|
179
|
+
if not self.api_key:
|
180
|
+
raise ValueError('API key must be set when using the Google AI API.')
|
181
|
+
self._http_options['base_url'] = (
|
182
|
+
'https://generativelanguage.googleapis.com/'
|
183
|
+
)
|
184
|
+
self._http_options['api_version'] = 'v1beta'
|
185
|
+
# Default options for both clients.
|
186
|
+
self._http_options['headers'] = {'Content-Type': 'application/json'}
|
187
|
+
if self.api_key:
|
188
|
+
self._http_options['headers']['x-goog-api-key'] = self.api_key
|
189
|
+
# Update the http options with the user provided http options.
|
190
|
+
if http_options:
|
191
|
+
self._http_options = _patch_http_options(self._http_options, http_options)
|
192
|
+
else:
|
193
|
+
_append_library_version_headers(self._http_options['headers'])
|
194
|
+
|
195
|
+
def _websocket_base_url(self):
|
196
|
+
url_parts = urllib.parse.urlparse(self._http_options['base_url'])
|
197
|
+
return url_parts._replace(scheme='wss').geturl()
|
198
|
+
|
199
|
+
def _build_request(
|
200
|
+
self,
|
201
|
+
http_method: str,
|
202
|
+
path: str,
|
203
|
+
request_dict: dict[str, object],
|
204
|
+
http_options: HttpOptions = None,
|
205
|
+
) -> HttpRequest:
|
206
|
+
# Remove all special dict keys such as _url and _query.
|
207
|
+
keys_to_delete = [key for key in request_dict.keys() if key.startswith('_')]
|
208
|
+
for key in keys_to_delete:
|
209
|
+
del request_dict[key]
|
210
|
+
# patch the http options with the user provided settings.
|
211
|
+
if http_options:
|
212
|
+
patched_http_options = _patch_http_options(
|
213
|
+
self._http_options, http_options
|
214
|
+
)
|
215
|
+
else:
|
216
|
+
patched_http_options = self._http_options
|
217
|
+
if self.vertexai and not path.startswith('projects/'):
|
218
|
+
path = f'projects/{self.project}/locations/{self.location}/' + path
|
219
|
+
url = urllib.parse.urljoin(
|
220
|
+
patched_http_options['base_url'],
|
221
|
+
patched_http_options['api_version'] + '/' + path)
|
222
|
+
return HttpRequest(
|
223
|
+
method=http_method,
|
224
|
+
url=url,
|
225
|
+
headers=patched_http_options['headers'],
|
226
|
+
data=request_dict,
|
227
|
+
)
|
228
|
+
|
229
|
+
def _request(
|
230
|
+
self,
|
231
|
+
http_request: HttpRequest,
|
232
|
+
stream: bool = False,
|
233
|
+
) -> HttpResponse:
|
234
|
+
if self.vertexai:
|
235
|
+
if not self._credentials:
|
236
|
+
self._credentials, _ = google.auth.default()
|
237
|
+
authed_session = AuthorizedSession(self._credentials)
|
238
|
+
authed_session.stream = stream
|
239
|
+
response = authed_session.request(
|
240
|
+
http_request.method.upper(),
|
241
|
+
http_request.url,
|
242
|
+
headers=http_request.headers,
|
243
|
+
data=json.dumps(http_request.data, cls=RequestJsonEncoder) if http_request.data else None,
|
244
|
+
# TODO: support timeout in RequestOptions so it can be configured
|
245
|
+
# per methods.
|
246
|
+
timeout=None,
|
247
|
+
)
|
248
|
+
errors.APIError.raise_for_response(response)
|
249
|
+
return HttpResponse(
|
250
|
+
response.headers, response if stream else [response.text]
|
251
|
+
)
|
252
|
+
else:
|
253
|
+
return self._request_unauthorized(http_request, stream)
|
254
|
+
|
255
|
+
def _request_unauthorized(
|
256
|
+
self,
|
257
|
+
http_request: HttpRequest,
|
258
|
+
stream: bool = False,
|
259
|
+
) -> HttpResponse:
|
260
|
+
data = None
|
261
|
+
if http_request.data:
|
262
|
+
if not isinstance(http_request.data, bytes):
|
263
|
+
data = json.dumps(http_request.data, cls=RequestJsonEncoder)
|
264
|
+
else:
|
265
|
+
data = http_request.data
|
266
|
+
|
267
|
+
http_session = requests.Session()
|
268
|
+
async_request = requests.Request(
|
269
|
+
method=http_request.method,
|
270
|
+
url=http_request.url,
|
271
|
+
headers=http_request.headers,
|
272
|
+
data=data,
|
273
|
+
).prepare()
|
274
|
+
response = http_session.send(async_request, stream=stream)
|
275
|
+
errors.APIError.raise_for_response(response)
|
276
|
+
return HttpResponse(
|
277
|
+
response.headers, response if stream else [response.text]
|
278
|
+
)
|
279
|
+
|
280
|
+
async def _async_request(
|
281
|
+
self, http_request: HttpRequest, stream: bool = False
|
282
|
+
):
|
283
|
+
if self.vertexai:
|
284
|
+
if not self._credentials:
|
285
|
+
self._credentials, _ = google.auth.default()
|
286
|
+
return await asyncio.to_thread(
|
287
|
+
self._request,
|
288
|
+
http_request,
|
289
|
+
stream=stream,
|
290
|
+
)
|
291
|
+
else:
|
292
|
+
return await asyncio.to_thread(
|
293
|
+
self._request,
|
294
|
+
http_request,
|
295
|
+
stream=stream,
|
296
|
+
)
|
297
|
+
|
298
|
+
def get_read_only_http_options(self) -> HttpOptions:
|
299
|
+
copied = HttpOptions()
|
300
|
+
copied.update(self._http_options)
|
301
|
+
return copied
|
302
|
+
|
303
|
+
def request(
|
304
|
+
self,
|
305
|
+
http_method: str,
|
306
|
+
path: str,
|
307
|
+
request_dict: dict[str, object],
|
308
|
+
http_options: HttpOptions = None,
|
309
|
+
):
|
310
|
+
http_request = self._build_request(
|
311
|
+
http_method, path, request_dict, http_options
|
312
|
+
)
|
313
|
+
response = self._request(http_request, stream=False)
|
314
|
+
if http_options and 'response_payload' in http_options:
|
315
|
+
response.copy_to_dict(http_options['response_payload'])
|
316
|
+
return response.text
|
317
|
+
|
318
|
+
def request_streamed(
|
319
|
+
self,
|
320
|
+
http_method: str,
|
321
|
+
path: str,
|
322
|
+
request_dict: dict[str, object],
|
323
|
+
http_options: HttpOptions = None,
|
324
|
+
):
|
325
|
+
http_request = self._build_request(
|
326
|
+
http_method, path, request_dict, http_options
|
327
|
+
)
|
328
|
+
|
329
|
+
session_response = self._request(http_request, stream=True)
|
330
|
+
if http_options and 'response_payload' in http_options:
|
331
|
+
session_response.copy_to_dict(http_options['response_payload'])
|
332
|
+
for chunk in session_response.segments():
|
333
|
+
yield chunk
|
334
|
+
|
335
|
+
async def async_request(
|
336
|
+
self,
|
337
|
+
http_method: str,
|
338
|
+
path: str,
|
339
|
+
request_dict: dict[str, object],
|
340
|
+
http_options: HttpOptions = None,
|
341
|
+
) -> dict[str, object]:
|
342
|
+
http_request = self._build_request(
|
343
|
+
http_method, path, request_dict, http_options
|
344
|
+
)
|
345
|
+
|
346
|
+
result = await self._async_request(http_request=http_request, stream=False)
|
347
|
+
if http_options and 'response_payload' in http_options:
|
348
|
+
result.copy_to_dict(http_options['response_payload'])
|
349
|
+
return result.text
|
350
|
+
|
351
|
+
async def async_request_streamed(
|
352
|
+
self,
|
353
|
+
http_method: str,
|
354
|
+
path: str,
|
355
|
+
request_dict: dict[str, object],
|
356
|
+
http_options: HttpOptions = None,
|
357
|
+
):
|
358
|
+
http_request = self._build_request(
|
359
|
+
http_method, path, request_dict, http_options
|
360
|
+
)
|
361
|
+
|
362
|
+
response = await self._async_request(http_request=http_request, stream=True)
|
363
|
+
|
364
|
+
for chunk in response.segments():
|
365
|
+
yield chunk
|
366
|
+
if http_options and 'response_payload' in http_options:
|
367
|
+
response.copy_to_dict(http_options['response_payload'])
|
368
|
+
|
369
|
+
def upload_file(self, file_path: str, upload_url: str, upload_size: int):
|
370
|
+
"""Transfers a file to the given URL.
|
371
|
+
|
372
|
+
Args:
|
373
|
+
file_path: The full path to the file. If the local file path is not found,
|
374
|
+
an error will be raised.
|
375
|
+
upload_url: The URL to upload the file to.
|
376
|
+
upload_size: The size of file content to be uploaded, this will have to
|
377
|
+
match the size requested in the resumable upload request.
|
378
|
+
|
379
|
+
returns:
|
380
|
+
The response json object from the finalize request.
|
381
|
+
"""
|
382
|
+
offset = 0
|
383
|
+
# Upload the file in chunks
|
384
|
+
with open(file_path, 'rb') as file:
|
385
|
+
while True:
|
386
|
+
file_chunk = file.read(1024 * 1024 * 8) # 8 MB chunk size
|
387
|
+
chunk_size = 0
|
388
|
+
if file_chunk:
|
389
|
+
chunk_size = len(file_chunk)
|
390
|
+
upload_command = 'upload'
|
391
|
+
# If last chunk, finalize the upload.
|
392
|
+
if chunk_size + offset >= upload_size:
|
393
|
+
upload_command += ', finalize'
|
394
|
+
|
395
|
+
request = HttpRequest(
|
396
|
+
method='POST',
|
397
|
+
url=upload_url,
|
398
|
+
headers={
|
399
|
+
'X-Goog-Upload-Command': upload_command,
|
400
|
+
'X-Goog-Upload-Offset': str(offset),
|
401
|
+
'Content-Length': str(chunk_size),
|
402
|
+
},
|
403
|
+
data=file_chunk,
|
404
|
+
)
|
405
|
+
response = self._request_unauthorized(request, stream=False)
|
406
|
+
offset += chunk_size
|
407
|
+
if response.headers['X-Goog-Upload-Status'] != 'active':
|
408
|
+
break # upload is complete or it has been interrupted.
|
409
|
+
|
410
|
+
if upload_size <= offset: # Status is not finalized.
|
411
|
+
raise ValueError(
|
412
|
+
'All content has been uploaded, but the upload status is not'
|
413
|
+
f' finalized. {response.headers}, body: {response.text}'
|
414
|
+
)
|
415
|
+
|
416
|
+
if response.headers['X-Goog-Upload-Status'] != 'final':
|
417
|
+
raise ValueError(
|
418
|
+
'Failed to upload file: Upload status is not finalized. headers:'
|
419
|
+
f' {response.headers}, body: {response.text}'
|
420
|
+
)
|
421
|
+
return response.text
|
422
|
+
|
423
|
+
async def async_upload_file(
|
424
|
+
self,
|
425
|
+
file_path: str,
|
426
|
+
upload_url: str,
|
427
|
+
upload_size: int,
|
428
|
+
):
|
429
|
+
"""Transfers a file asynchronously to the given URL.
|
430
|
+
|
431
|
+
Args:
|
432
|
+
file_path: The full path to the file. If the local file path is not found,
|
433
|
+
an error will be raised.
|
434
|
+
upload_url: The URL to upload the file to.
|
435
|
+
upload_size: The size of file content to be uploaded, this will have to
|
436
|
+
match the size requested in the resumable upload request.
|
437
|
+
|
438
|
+
returns:
|
439
|
+
The response json object from the finalize request.
|
440
|
+
"""
|
441
|
+
return await asyncio.to_thread(
|
442
|
+
self.upload_file,
|
443
|
+
file_path,
|
444
|
+
upload_url,
|
445
|
+
upload_size,
|
446
|
+
)
|
447
|
+
|
448
|
+
# This method does nothing in the real api client. It is used in the
|
449
|
+
# replay_api_client to verify the response from the SDK method matches the
|
450
|
+
# recorded response.
|
451
|
+
def _verify_response(self, response_model: BaseModel):
|
452
|
+
pass
|
453
|
+
|
454
|
+
|
455
|
+
class RequestJsonEncoder(json.JSONEncoder):
|
456
|
+
"""Encode bytes as strings without modify its content."""
|
457
|
+
|
458
|
+
def default(self, o):
|
459
|
+
if isinstance(o, bytes):
|
460
|
+
return o.decode()
|
461
|
+
elif isinstance(o, datetime.datetime):
|
462
|
+
# This Zulu time format is used by the Vertex AI API and the test recorder
|
463
|
+
# Using strftime works well, but we want to align with the replay encoder.
|
464
|
+
# o.astimezone(datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ')
|
465
|
+
return o.isoformat().replace('+00:00', 'Z')
|
466
|
+
else:
|
467
|
+
return super().default(o)
|