google-genai 1.7.0__py3-none-any.whl → 1.53.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- google/genai/__init__.py +4 -2
- google/genai/_adapters.py +55 -0
- google/genai/_api_client.py +1301 -299
- google/genai/_api_module.py +1 -1
- google/genai/_automatic_function_calling_util.py +54 -33
- google/genai/_base_transformers.py +26 -0
- google/genai/_base_url.py +50 -0
- google/genai/_common.py +560 -59
- google/genai/_extra_utils.py +371 -38
- google/genai/_live_converters.py +1467 -0
- google/genai/_local_tokenizer_loader.py +214 -0
- google/genai/_mcp_utils.py +117 -0
- google/genai/_operations_converters.py +394 -0
- google/genai/_replay_api_client.py +204 -92
- google/genai/_test_api_client.py +1 -1
- google/genai/_tokens_converters.py +520 -0
- google/genai/_transformers.py +633 -233
- google/genai/batches.py +1733 -538
- google/genai/caches.py +678 -1012
- google/genai/chats.py +48 -38
- google/genai/client.py +142 -15
- google/genai/documents.py +532 -0
- google/genai/errors.py +141 -35
- google/genai/file_search_stores.py +1296 -0
- google/genai/files.py +312 -744
- google/genai/live.py +617 -367
- google/genai/live_music.py +197 -0
- google/genai/local_tokenizer.py +395 -0
- google/genai/models.py +3598 -3116
- google/genai/operations.py +201 -362
- google/genai/pagers.py +23 -7
- google/genai/py.typed +1 -0
- google/genai/tokens.py +362 -0
- google/genai/tunings.py +1274 -496
- google/genai/types.py +14535 -5454
- google/genai/version.py +2 -2
- {google_genai-1.7.0.dist-info → google_genai-1.53.0.dist-info}/METADATA +736 -234
- google_genai-1.53.0.dist-info/RECORD +41 -0
- {google_genai-1.7.0.dist-info → google_genai-1.53.0.dist-info}/WHEEL +1 -1
- google_genai-1.7.0.dist-info/RECORD +0 -27
- {google_genai-1.7.0.dist-info → google_genai-1.53.0.dist-info/licenses}/LICENSE +0 -0
- {google_genai-1.7.0.dist-info → google_genai-1.53.0.dist-info}/top_level.txt +0 -0
google/genai/_extra_utils.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Copyright
|
|
1
|
+
# Copyright 2025 Google LLC
|
|
2
2
|
#
|
|
3
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
4
|
# you may not use this file except in compliance with the License.
|
|
@@ -15,22 +15,42 @@
|
|
|
15
15
|
|
|
16
16
|
"""Extra utils depending on types that are shared between sync and async modules."""
|
|
17
17
|
|
|
18
|
+
import asyncio
|
|
18
19
|
import inspect
|
|
20
|
+
import io
|
|
19
21
|
import logging
|
|
20
22
|
import sys
|
|
21
23
|
import typing
|
|
22
24
|
from typing import Any, Callable, Dict, Optional, Union, get_args, get_origin
|
|
23
|
-
|
|
25
|
+
import mimetypes
|
|
26
|
+
import os
|
|
24
27
|
import pydantic
|
|
25
28
|
|
|
26
29
|
from . import _common
|
|
30
|
+
from . import _mcp_utils
|
|
31
|
+
from . import _transformers as t
|
|
27
32
|
from . import errors
|
|
28
33
|
from . import types
|
|
34
|
+
from ._adapters import McpToGenAiToolAdapter
|
|
35
|
+
|
|
29
36
|
|
|
30
37
|
if sys.version_info >= (3, 10):
|
|
31
38
|
from types import UnionType
|
|
32
39
|
else:
|
|
33
|
-
UnionType = typing._UnionGenericAlias
|
|
40
|
+
UnionType = typing._UnionGenericAlias # type: ignore[attr-defined]
|
|
41
|
+
|
|
42
|
+
if typing.TYPE_CHECKING:
|
|
43
|
+
from mcp import ClientSession as McpClientSession
|
|
44
|
+
from mcp.types import Tool as McpTool
|
|
45
|
+
else:
|
|
46
|
+
McpClientSession: typing.Type = Any
|
|
47
|
+
McpTool: typing.Type = Any
|
|
48
|
+
try:
|
|
49
|
+
from mcp import ClientSession as McpClientSession
|
|
50
|
+
from mcp.types import Tool as McpTool
|
|
51
|
+
except ImportError:
|
|
52
|
+
McpClientSession = None
|
|
53
|
+
McpTool = None
|
|
34
54
|
|
|
35
55
|
_DEFAULT_MAX_REMOTE_CALLS_AFC = 10
|
|
36
56
|
|
|
@@ -46,15 +66,39 @@ def _create_generate_content_config_model(
|
|
|
46
66
|
return config
|
|
47
67
|
|
|
48
68
|
|
|
69
|
+
def _get_gcs_uri(
|
|
70
|
+
src: Union[str, types.BatchJobSourceOrDict]
|
|
71
|
+
) -> Optional[str]:
|
|
72
|
+
"""Extracts the first GCS URI from the source, if available."""
|
|
73
|
+
if isinstance(src, str) and src.startswith('gs://'):
|
|
74
|
+
return src
|
|
75
|
+
elif isinstance(src, dict) and src.get('gcs_uri'):
|
|
76
|
+
return src['gcs_uri'][0] if src['gcs_uri'] else None
|
|
77
|
+
elif isinstance(src, types.BatchJobSource) and src.gcs_uri:
|
|
78
|
+
return src.gcs_uri[0] if src.gcs_uri else None
|
|
79
|
+
return None
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _get_bigquery_uri(
|
|
83
|
+
src: Union[str, types.BatchJobSourceOrDict]
|
|
84
|
+
) -> Optional[str]:
|
|
85
|
+
"""Extracts the BigQuery URI from the source, if available."""
|
|
86
|
+
if isinstance(src, str) and src.startswith('bq://'):
|
|
87
|
+
return src
|
|
88
|
+
elif isinstance(src, dict) and src.get('bigquery_uri'):
|
|
89
|
+
return src['bigquery_uri']
|
|
90
|
+
elif isinstance(src, types.BatchJobSource) and src.bigquery_uri:
|
|
91
|
+
return src.bigquery_uri
|
|
92
|
+
return None
|
|
93
|
+
|
|
94
|
+
|
|
49
95
|
def format_destination(
|
|
50
|
-
src: str,
|
|
51
|
-
config: Optional[types.
|
|
96
|
+
src: Union[str, types.BatchJobSource],
|
|
97
|
+
config: Optional[types.CreateBatchJobConfig] = None,
|
|
52
98
|
) -> types.CreateBatchJobConfig:
|
|
53
|
-
"""Formats the destination uri based on the source uri."""
|
|
54
|
-
config
|
|
55
|
-
|
|
56
|
-
or types.CreateBatchJobConfig()
|
|
57
|
-
)
|
|
99
|
+
"""Formats the destination uri based on the source uri for Vertex AI."""
|
|
100
|
+
if config is None:
|
|
101
|
+
config = types.CreateBatchJobConfig()
|
|
58
102
|
|
|
59
103
|
unique_name = None
|
|
60
104
|
if not config.display_name:
|
|
@@ -62,44 +106,86 @@ def format_destination(
|
|
|
62
106
|
config.display_name = f'genai_batch_job_{unique_name}'
|
|
63
107
|
|
|
64
108
|
if not config.dest:
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
# uri will be "bq://project.dataset.src_dest_TIMESTAMP_UUID".
|
|
109
|
+
gcs_source_uri = _get_gcs_uri(src)
|
|
110
|
+
bigquery_source_uri = _get_bigquery_uri(src)
|
|
111
|
+
|
|
112
|
+
if gcs_source_uri and gcs_source_uri.endswith('.jsonl'):
|
|
113
|
+
config.dest = f'{gcs_source_uri[:-6]}/dest'
|
|
114
|
+
elif bigquery_source_uri:
|
|
72
115
|
unique_name = unique_name or _common.timestamped_unique_name()
|
|
73
|
-
config.dest = f'{
|
|
74
|
-
|
|
75
|
-
raise ValueError(f'Unsupported source: {src}')
|
|
116
|
+
config.dest = f'{bigquery_source_uri}_dest_{unique_name}'
|
|
117
|
+
|
|
76
118
|
return config
|
|
77
119
|
|
|
78
120
|
|
|
121
|
+
def find_afc_incompatible_tool_indexes(
|
|
122
|
+
config: Optional[types.GenerateContentConfigOrDict] = None,
|
|
123
|
+
) -> list[int]:
|
|
124
|
+
"""Checks if the config contains any AFC incompatible tools.
|
|
125
|
+
|
|
126
|
+
A `types.Tool` object that contains `function_declarations` is considered a
|
|
127
|
+
non-AFC tool for this execution path.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
config: The GenerateContentConfig to check for incompatible tools.
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
A list of indexes of the incompatible tools in the config.
|
|
134
|
+
"""
|
|
135
|
+
if not config:
|
|
136
|
+
return []
|
|
137
|
+
config_model = _create_generate_content_config_model(config)
|
|
138
|
+
incompatible_tools_indexes: list[int] = []
|
|
139
|
+
|
|
140
|
+
if not config_model or not config_model.tools:
|
|
141
|
+
return incompatible_tools_indexes
|
|
142
|
+
|
|
143
|
+
for index, tool in enumerate(config_model.tools):
|
|
144
|
+
if isinstance(tool, types.Tool) and tool.function_declarations:
|
|
145
|
+
incompatible_tools_indexes.append(index)
|
|
146
|
+
|
|
147
|
+
return incompatible_tools_indexes
|
|
148
|
+
|
|
149
|
+
|
|
79
150
|
def get_function_map(
|
|
80
151
|
config: Optional[types.GenerateContentConfigOrDict] = None,
|
|
81
|
-
|
|
152
|
+
mcp_to_genai_tool_adapters: Optional[
|
|
153
|
+
dict[str, McpToGenAiToolAdapter]
|
|
154
|
+
] = None,
|
|
155
|
+
is_caller_method_async: bool = False,
|
|
156
|
+
) -> dict[str, Union[Callable[..., Any], McpToGenAiToolAdapter]]:
|
|
82
157
|
"""Returns a function map from the config."""
|
|
83
|
-
function_map: dict[str, Callable] = {}
|
|
158
|
+
function_map: dict[str, Union[Callable[..., Any], McpToGenAiToolAdapter]] = {}
|
|
84
159
|
if not config:
|
|
85
160
|
return function_map
|
|
86
161
|
config_model = _create_generate_content_config_model(config)
|
|
87
162
|
if config_model.tools:
|
|
88
163
|
for tool in config_model.tools:
|
|
89
164
|
if callable(tool):
|
|
90
|
-
if inspect.iscoroutinefunction(tool):
|
|
165
|
+
if inspect.iscoroutinefunction(tool) and not is_caller_method_async:
|
|
91
166
|
raise errors.UnsupportedFunctionError(
|
|
92
167
|
f'Function {tool.__name__} is a coroutine function, which is not'
|
|
93
168
|
' supported for automatic function calling. Please manually'
|
|
94
169
|
f' invoke {tool.__name__} to get the function response.'
|
|
95
170
|
)
|
|
96
171
|
function_map[tool.__name__] = tool
|
|
172
|
+
if mcp_to_genai_tool_adapters:
|
|
173
|
+
if not is_caller_method_async:
|
|
174
|
+
raise errors.UnsupportedFunctionError(
|
|
175
|
+
'MCP tools are not supported in synchronous methods.'
|
|
176
|
+
)
|
|
177
|
+
for tool_name, _ in mcp_to_genai_tool_adapters.items():
|
|
178
|
+
if function_map.get(tool_name):
|
|
179
|
+
raise ValueError(
|
|
180
|
+
f'Tool {tool_name} is already defined for the request.'
|
|
181
|
+
)
|
|
182
|
+
function_map.update(mcp_to_genai_tool_adapters)
|
|
97
183
|
return function_map
|
|
98
184
|
|
|
99
185
|
|
|
100
186
|
def convert_number_values_for_dict_function_call_args(
|
|
101
|
-
args:
|
|
102
|
-
) ->
|
|
187
|
+
args: _common.StringDict,
|
|
188
|
+
) -> _common.StringDict:
|
|
103
189
|
"""Converts float values in dict with no decimal to integers."""
|
|
104
190
|
return {
|
|
105
191
|
key: convert_number_values_for_function_call_args(value)
|
|
@@ -199,11 +285,11 @@ def convert_if_exist_pydantic_model(
|
|
|
199
285
|
return value
|
|
200
286
|
|
|
201
287
|
|
|
202
|
-
def
|
|
203
|
-
args:
|
|
204
|
-
) ->
|
|
205
|
-
signature = inspect.signature(
|
|
206
|
-
func_name =
|
|
288
|
+
def convert_argument_from_function(
|
|
289
|
+
args: _common.StringDict, function: Callable[..., Any]
|
|
290
|
+
) -> _common.StringDict:
|
|
291
|
+
signature = inspect.signature(function)
|
|
292
|
+
func_name = function.__name__
|
|
207
293
|
converted_args = {}
|
|
208
294
|
for param_name, param in signature.parameters.items():
|
|
209
295
|
if param_name in args:
|
|
@@ -213,19 +299,40 @@ def invoke_function_from_dict_args(
|
|
|
213
299
|
param_name,
|
|
214
300
|
func_name,
|
|
215
301
|
)
|
|
302
|
+
return converted_args
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
def invoke_function_from_dict_args(
|
|
306
|
+
args: _common.StringDict, function_to_invoke: Callable[..., Any]
|
|
307
|
+
) -> Any:
|
|
308
|
+
converted_args = convert_argument_from_function(args, function_to_invoke)
|
|
216
309
|
try:
|
|
217
310
|
return function_to_invoke(**converted_args)
|
|
218
311
|
except Exception as e:
|
|
219
312
|
raise errors.FunctionInvocationError(
|
|
220
|
-
f'Failed to invoke function {
|
|
221
|
-
f' {converted_args} from model returned function
|
|
222
|
-
f' {args} because of error {e}'
|
|
313
|
+
f'Failed to invoke function {function_to_invoke.__name__} with'
|
|
314
|
+
f' converted arguments {converted_args} from model returned function'
|
|
315
|
+
f' call argument {args} because of error {e}'
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
async def invoke_function_from_dict_args_async(
|
|
320
|
+
args: _common.StringDict, function_to_invoke: Callable[..., Any]
|
|
321
|
+
) -> Any:
|
|
322
|
+
converted_args = convert_argument_from_function(args, function_to_invoke)
|
|
323
|
+
try:
|
|
324
|
+
return await function_to_invoke(**converted_args)
|
|
325
|
+
except Exception as e:
|
|
326
|
+
raise errors.FunctionInvocationError(
|
|
327
|
+
f'Failed to invoke function {function_to_invoke.__name__} with'
|
|
328
|
+
f' converted arguments {converted_args} from model returned function'
|
|
329
|
+
f' call argument {args} because of error {e}'
|
|
223
330
|
)
|
|
224
331
|
|
|
225
332
|
|
|
226
333
|
def get_function_response_parts(
|
|
227
334
|
response: types.GenerateContentResponse,
|
|
228
|
-
function_map: dict[str, Callable],
|
|
335
|
+
function_map: dict[str, Union[Callable[..., Any], McpToGenAiToolAdapter]],
|
|
229
336
|
) -> list[types.Part]:
|
|
230
337
|
"""Returns the function response parts from the response."""
|
|
231
338
|
func_response_parts = []
|
|
@@ -243,11 +350,61 @@ def get_function_response_parts(
|
|
|
243
350
|
args = convert_number_values_for_dict_function_call_args(
|
|
244
351
|
part.function_call.args
|
|
245
352
|
)
|
|
246
|
-
func_response:
|
|
353
|
+
func_response: _common.StringDict
|
|
247
354
|
try:
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
355
|
+
if not isinstance(func, McpToGenAiToolAdapter):
|
|
356
|
+
func_response = {
|
|
357
|
+
'result': invoke_function_from_dict_args(args, func)
|
|
358
|
+
}
|
|
359
|
+
except Exception as e: # pylint: disable=broad-except
|
|
360
|
+
func_response = {'error': str(e)}
|
|
361
|
+
func_response_part = types.Part.from_function_response(
|
|
362
|
+
name=func_name, response=func_response
|
|
363
|
+
)
|
|
364
|
+
func_response_parts.append(func_response_part)
|
|
365
|
+
return func_response_parts
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
async def get_function_response_parts_async(
|
|
369
|
+
response: types.GenerateContentResponse,
|
|
370
|
+
function_map: dict[str, Union[Callable[..., Any], McpToGenAiToolAdapter]],
|
|
371
|
+
) -> list[types.Part]:
|
|
372
|
+
"""Returns the function response parts from the response."""
|
|
373
|
+
func_response_parts = []
|
|
374
|
+
if (
|
|
375
|
+
response.candidates is not None
|
|
376
|
+
and isinstance(response.candidates[0].content, types.Content)
|
|
377
|
+
and response.candidates[0].content.parts is not None
|
|
378
|
+
):
|
|
379
|
+
for part in response.candidates[0].content.parts:
|
|
380
|
+
if not part.function_call:
|
|
381
|
+
continue
|
|
382
|
+
func_name = part.function_call.name
|
|
383
|
+
if func_name is not None and part.function_call.args is not None:
|
|
384
|
+
func = function_map[func_name]
|
|
385
|
+
args = convert_number_values_for_dict_function_call_args(
|
|
386
|
+
part.function_call.args
|
|
387
|
+
)
|
|
388
|
+
func_response: _common.StringDict
|
|
389
|
+
try:
|
|
390
|
+
if isinstance(func, McpToGenAiToolAdapter):
|
|
391
|
+
mcp_tool_response = await func.call_tool(
|
|
392
|
+
types.FunctionCall(name=func_name, args=args)
|
|
393
|
+
)
|
|
394
|
+
if mcp_tool_response.isError:
|
|
395
|
+
func_response = {'error': mcp_tool_response}
|
|
396
|
+
else:
|
|
397
|
+
func_response = {'result': mcp_tool_response}
|
|
398
|
+
elif inspect.iscoroutinefunction(func):
|
|
399
|
+
func_response = {
|
|
400
|
+
'result': await invoke_function_from_dict_args_async(args, func)
|
|
401
|
+
}
|
|
402
|
+
else:
|
|
403
|
+
func_response = {
|
|
404
|
+
'result': await asyncio.to_thread(
|
|
405
|
+
invoke_function_from_dict_args, args, func
|
|
406
|
+
)
|
|
407
|
+
}
|
|
251
408
|
except Exception as e: # pylint: disable=broad-except
|
|
252
409
|
func_response = {'error': str(e)}
|
|
253
410
|
func_response_part = types.Part.from_function_response(
|
|
@@ -332,6 +489,31 @@ def get_max_remote_calls_afc(
|
|
|
332
489
|
return int(config_model.automatic_function_calling.maximum_remote_calls)
|
|
333
490
|
|
|
334
491
|
|
|
492
|
+
def raise_error_for_afc_incompatible_config(config: Optional[types.GenerateContentConfig]
|
|
493
|
+
) -> None:
|
|
494
|
+
"""Raises an error if the config is not compatible with AFC."""
|
|
495
|
+
if (
|
|
496
|
+
not config
|
|
497
|
+
or not config.tool_config
|
|
498
|
+
or not config.tool_config.function_calling_config
|
|
499
|
+
):
|
|
500
|
+
return
|
|
501
|
+
afc_config = config.automatic_function_calling
|
|
502
|
+
disable_afc_config = afc_config.disable if afc_config else False
|
|
503
|
+
stream_function_call = (
|
|
504
|
+
config.tool_config.function_calling_config.stream_function_call_arguments
|
|
505
|
+
)
|
|
506
|
+
|
|
507
|
+
if stream_function_call and not disable_afc_config:
|
|
508
|
+
raise ValueError(
|
|
509
|
+
'Running in streaming mode with stream_function_call_arguments'
|
|
510
|
+
' enabled, this feature is not compatible with automatic function'
|
|
511
|
+
' calling (AFC). Please set config.automatic_function_calling.disable'
|
|
512
|
+
' to True to disable AFC or leave config.tool_config.'
|
|
513
|
+
' function_calling_config.stream_function_call_arguments to be empty'
|
|
514
|
+
' or set to False to disable streaming function call arguments.'
|
|
515
|
+
)
|
|
516
|
+
|
|
335
517
|
def should_append_afc_history(
|
|
336
518
|
config: Optional[types.GenerateContentConfigOrDict] = None,
|
|
337
519
|
) -> bool:
|
|
@@ -341,3 +523,154 @@ def should_append_afc_history(
|
|
|
341
523
|
if not config_model.automatic_function_calling:
|
|
342
524
|
return True
|
|
343
525
|
return not config_model.automatic_function_calling.ignore_call_history
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
def parse_config_for_mcp_usage(
|
|
529
|
+
config: Optional[types.GenerateContentConfigOrDict] = None,
|
|
530
|
+
) -> Optional[types.GenerateContentConfig]:
|
|
531
|
+
"""Returns a parsed config with an appended MCP header if MCP tools or sessions are used."""
|
|
532
|
+
if not config:
|
|
533
|
+
return None
|
|
534
|
+
config_model = _create_generate_content_config_model(config)
|
|
535
|
+
# Create a copy of the config model with the tools field cleared since some
|
|
536
|
+
# tools may not be pickleable.
|
|
537
|
+
config_model_copy = config_model.model_copy(update={'tools': None})
|
|
538
|
+
config_model_copy.tools = config_model.tools
|
|
539
|
+
if config_model.tools and _mcp_utils.has_mcp_tool_usage(config_model.tools):
|
|
540
|
+
if config_model_copy.http_options is None:
|
|
541
|
+
config_model_copy.http_options = types.HttpOptions(headers={})
|
|
542
|
+
if config_model_copy.http_options.headers is None:
|
|
543
|
+
config_model_copy.http_options.headers = {}
|
|
544
|
+
_mcp_utils.set_mcp_usage_header(config_model_copy.http_options.headers)
|
|
545
|
+
|
|
546
|
+
return config_model_copy
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
async def parse_config_for_mcp_sessions(
|
|
550
|
+
config: Optional[types.GenerateContentConfigOrDict] = None,
|
|
551
|
+
) -> tuple[
|
|
552
|
+
Optional[types.GenerateContentConfig],
|
|
553
|
+
dict[str, McpToGenAiToolAdapter],
|
|
554
|
+
]:
|
|
555
|
+
"""Returns a parsed config with MCP sessions converted to GenAI tools.
|
|
556
|
+
|
|
557
|
+
Also returns a map of MCP tools to GenAI tool adapters to be used for AFC.
|
|
558
|
+
"""
|
|
559
|
+
mcp_to_genai_tool_adapters: dict[str, McpToGenAiToolAdapter] = {}
|
|
560
|
+
parsed_config = parse_config_for_mcp_usage(config)
|
|
561
|
+
if not parsed_config:
|
|
562
|
+
return None, mcp_to_genai_tool_adapters
|
|
563
|
+
# Create a copy of the config model with the tools field cleared as they will
|
|
564
|
+
# be replaced with the MCP tools converted to GenAI tools.
|
|
565
|
+
parsed_config_copy = parsed_config.model_copy(update={'tools': None})
|
|
566
|
+
if parsed_config.tools:
|
|
567
|
+
parsed_config_copy.tools = []
|
|
568
|
+
for tool in parsed_config.tools:
|
|
569
|
+
if McpClientSession is not None and isinstance(tool, McpClientSession):
|
|
570
|
+
mcp_to_genai_tool_adapter = McpToGenAiToolAdapter(
|
|
571
|
+
tool, await tool.list_tools()
|
|
572
|
+
)
|
|
573
|
+
# Extend the config with the MCP session tools converted to GenAI tools.
|
|
574
|
+
parsed_config_copy.tools.extend(mcp_to_genai_tool_adapter.tools)
|
|
575
|
+
for genai_tool in mcp_to_genai_tool_adapter.tools:
|
|
576
|
+
if genai_tool.function_declarations:
|
|
577
|
+
for function_declaration in genai_tool.function_declarations:
|
|
578
|
+
if function_declaration.name:
|
|
579
|
+
if mcp_to_genai_tool_adapters.get(function_declaration.name):
|
|
580
|
+
raise ValueError(
|
|
581
|
+
f'Tool {function_declaration.name} is already defined for'
|
|
582
|
+
' the request.'
|
|
583
|
+
)
|
|
584
|
+
mcp_to_genai_tool_adapters[function_declaration.name] = (
|
|
585
|
+
mcp_to_genai_tool_adapter
|
|
586
|
+
)
|
|
587
|
+
else:
|
|
588
|
+
parsed_config_copy.tools.append(tool)
|
|
589
|
+
|
|
590
|
+
return parsed_config_copy, mcp_to_genai_tool_adapters
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
def append_chunk_contents(
|
|
594
|
+
contents: Union[types.ContentListUnion, types.ContentListUnionDict],
|
|
595
|
+
chunk: types.GenerateContentResponse,
|
|
596
|
+
) -> Union[types.ContentListUnion, types.ContentListUnionDict]:
|
|
597
|
+
"""Appends the contents of the chunk to the contents list and returns it."""
|
|
598
|
+
if chunk is not None and chunk.candidates is not None:
|
|
599
|
+
chunk_content = chunk.candidates[0].content
|
|
600
|
+
contents = t.t_contents(contents) # type: ignore[assignment]
|
|
601
|
+
if isinstance(contents, list) and chunk_content is not None:
|
|
602
|
+
contents.append(chunk_content) # type: ignore[arg-type]
|
|
603
|
+
return contents
|
|
604
|
+
|
|
605
|
+
|
|
606
|
+
def prepare_resumable_upload(
|
|
607
|
+
file: Union[str, os.PathLike[str], io.IOBase],
|
|
608
|
+
user_http_options: Optional[types.HttpOptionsOrDict] = None,
|
|
609
|
+
user_mime_type: Optional[str] = None,
|
|
610
|
+
) -> tuple[
|
|
611
|
+
types.HttpOptions,
|
|
612
|
+
int,
|
|
613
|
+
str,
|
|
614
|
+
]:
|
|
615
|
+
"""Prepares the HTTP options, file bytes size and mime type for a resumable upload.
|
|
616
|
+
|
|
617
|
+
This function inspects a file (from a path or an in-memory object) to
|
|
618
|
+
determine its size and MIME type. It then constructs the necessary HTTP
|
|
619
|
+
headers and options required to initiate a resumable upload session.
|
|
620
|
+
"""
|
|
621
|
+
size_bytes = None
|
|
622
|
+
mime_type = user_mime_type
|
|
623
|
+
if isinstance(file, io.IOBase):
|
|
624
|
+
if mime_type is None:
|
|
625
|
+
raise ValueError(
|
|
626
|
+
'Unknown mime type: Could not determine the mimetype for your'
|
|
627
|
+
' file\n please set the `mime_type` argument'
|
|
628
|
+
)
|
|
629
|
+
if hasattr(file, 'mode'):
|
|
630
|
+
if 'b' not in file.mode:
|
|
631
|
+
raise ValueError('The file must be opened in binary mode.')
|
|
632
|
+
offset = file.tell()
|
|
633
|
+
file.seek(0, os.SEEK_END)
|
|
634
|
+
size_bytes = file.tell() - offset
|
|
635
|
+
file.seek(offset, os.SEEK_SET)
|
|
636
|
+
else:
|
|
637
|
+
fs_path = os.fspath(file)
|
|
638
|
+
if not fs_path or not os.path.isfile(fs_path):
|
|
639
|
+
raise FileNotFoundError(f'{file} is not a valid file path.')
|
|
640
|
+
size_bytes = os.path.getsize(fs_path)
|
|
641
|
+
if mime_type is None:
|
|
642
|
+
mime_type, _ = mimetypes.guess_type(fs_path)
|
|
643
|
+
if mime_type is None:
|
|
644
|
+
raise ValueError(
|
|
645
|
+
'Unknown mime type: Could not determine the mimetype for your'
|
|
646
|
+
' file\n please set the `mime_type` argument'
|
|
647
|
+
)
|
|
648
|
+
http_options: types.HttpOptions
|
|
649
|
+
if user_http_options:
|
|
650
|
+
if isinstance(user_http_options, dict):
|
|
651
|
+
user_http_options = types.HttpOptions(**user_http_options)
|
|
652
|
+
http_options = user_http_options
|
|
653
|
+
http_options.api_version = ''
|
|
654
|
+
http_options.headers = {
|
|
655
|
+
'Content-Type': 'application/json',
|
|
656
|
+
'X-Goog-Upload-Protocol': 'resumable',
|
|
657
|
+
'X-Goog-Upload-Command': 'start',
|
|
658
|
+
'X-Goog-Upload-Header-Content-Length': f'{size_bytes}',
|
|
659
|
+
'X-Goog-Upload-Header-Content-Type': f'{mime_type}',
|
|
660
|
+
}
|
|
661
|
+
else:
|
|
662
|
+
http_options = types.HttpOptions(
|
|
663
|
+
api_version='',
|
|
664
|
+
headers={
|
|
665
|
+
'Content-Type': 'application/json',
|
|
666
|
+
'X-Goog-Upload-Protocol': 'resumable',
|
|
667
|
+
'X-Goog-Upload-Command': 'start',
|
|
668
|
+
'X-Goog-Upload-Header-Content-Length': f'{size_bytes}',
|
|
669
|
+
'X-Goog-Upload-Header-Content-Type': f'{mime_type}',
|
|
670
|
+
},
|
|
671
|
+
)
|
|
672
|
+
if isinstance(file, (str, os.PathLike)):
|
|
673
|
+
if http_options.headers is None:
|
|
674
|
+
http_options.headers = {}
|
|
675
|
+
http_options.headers['X-Goog-Upload-File-Name'] = os.path.basename(file)
|
|
676
|
+
return http_options, size_bytes, mime_type
|