google-adk 1.1.0__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. google/adk/agents/base_agent.py +0 -2
  2. google/adk/agents/invocation_context.py +3 -3
  3. google/adk/agents/parallel_agent.py +17 -7
  4. google/adk/agents/sequential_agent.py +8 -8
  5. google/adk/auth/auth_preprocessor.py +18 -17
  6. google/adk/cli/agent_graph.py +165 -23
  7. google/adk/cli/browser/assets/ADK-512-color.svg +9 -0
  8. google/adk/cli/browser/index.html +2 -2
  9. google/adk/cli/browser/{main-PKDNKWJE.js → main-CS5OLUMF.js} +59 -59
  10. google/adk/cli/browser/polyfills-FFHMD2TL.js +17 -0
  11. google/adk/cli/cli.py +9 -9
  12. google/adk/cli/cli_deploy.py +157 -0
  13. google/adk/cli/cli_tools_click.py +228 -99
  14. google/adk/cli/fast_api.py +119 -34
  15. google/adk/cli/utils/agent_loader.py +60 -44
  16. google/adk/cli/utils/envs.py +1 -1
  17. google/adk/code_executors/unsafe_local_code_executor.py +11 -0
  18. google/adk/errors/__init__.py +13 -0
  19. google/adk/errors/not_found_error.py +28 -0
  20. google/adk/evaluation/agent_evaluator.py +1 -1
  21. google/adk/evaluation/eval_sets_manager.py +36 -6
  22. google/adk/evaluation/evaluation_generator.py +5 -4
  23. google/adk/evaluation/local_eval_sets_manager.py +101 -6
  24. google/adk/flows/llm_flows/agent_transfer.py +2 -2
  25. google/adk/flows/llm_flows/base_llm_flow.py +19 -0
  26. google/adk/flows/llm_flows/contents.py +4 -4
  27. google/adk/flows/llm_flows/functions.py +140 -127
  28. google/adk/memory/vertex_ai_rag_memory_service.py +2 -2
  29. google/adk/models/anthropic_llm.py +7 -10
  30. google/adk/models/google_llm.py +46 -18
  31. google/adk/models/lite_llm.py +63 -26
  32. google/adk/py.typed +0 -0
  33. google/adk/sessions/_session_util.py +10 -16
  34. google/adk/sessions/database_session_service.py +81 -66
  35. google/adk/sessions/vertex_ai_session_service.py +32 -6
  36. google/adk/telemetry.py +91 -24
  37. google/adk/tools/_automatic_function_calling_util.py +31 -25
  38. google/adk/tools/{function_parameter_parse_util.py → _function_parameter_parse_util.py} +9 -3
  39. google/adk/tools/_gemini_schema_util.py +158 -0
  40. google/adk/tools/apihub_tool/apihub_toolset.py +3 -2
  41. google/adk/tools/application_integration_tool/clients/connections_client.py +7 -0
  42. google/adk/tools/application_integration_tool/integration_connector_tool.py +5 -7
  43. google/adk/tools/base_tool.py +4 -8
  44. google/adk/tools/bigquery/__init__.py +11 -1
  45. google/adk/tools/bigquery/bigquery_credentials.py +9 -4
  46. google/adk/tools/bigquery/bigquery_toolset.py +86 -0
  47. google/adk/tools/bigquery/client.py +33 -0
  48. google/adk/tools/bigquery/metadata_tool.py +249 -0
  49. google/adk/tools/bigquery/query_tool.py +76 -0
  50. google/adk/tools/function_tool.py +4 -4
  51. google/adk/tools/langchain_tool.py +20 -13
  52. google/adk/tools/load_memory_tool.py +1 -0
  53. google/adk/tools/mcp_tool/conversion_utils.py +4 -2
  54. google/adk/tools/mcp_tool/mcp_session_manager.py +63 -5
  55. google/adk/tools/mcp_tool/mcp_tool.py +3 -2
  56. google/adk/tools/mcp_tool/mcp_toolset.py +15 -8
  57. google/adk/tools/openapi_tool/common/common.py +4 -43
  58. google/adk/tools/openapi_tool/openapi_spec_parser/__init__.py +0 -2
  59. google/adk/tools/openapi_tool/openapi_spec_parser/openapi_spec_parser.py +4 -2
  60. google/adk/tools/openapi_tool/openapi_spec_parser/operation_parser.py +4 -2
  61. google/adk/tools/openapi_tool/openapi_spec_parser/rest_api_tool.py +7 -127
  62. google/adk/tools/openapi_tool/openapi_spec_parser/tool_auth_handler.py +2 -7
  63. google/adk/tools/transfer_to_agent_tool.py +8 -1
  64. google/adk/tools/vertex_ai_search_tool.py +8 -1
  65. google/adk/utils/variant_utils.py +51 -0
  66. google/adk/version.py +1 -1
  67. {google_adk-1.1.0.dist-info → google_adk-1.2.0.dist-info}/METADATA +7 -7
  68. {google_adk-1.1.0.dist-info → google_adk-1.2.0.dist-info}/RECORD +71 -61
  69. google/adk/cli/browser/polyfills-B6TNHZQ6.js +0 -17
  70. {google_adk-1.1.0.dist-info → google_adk-1.2.0.dist-info}/WHEEL +0 -0
  71. {google_adk-1.1.0.dist-info → google_adk-1.2.0.dist-info}/entry_points.txt +0 -0
  72. {google_adk-1.1.0.dist-info → google_adk-1.2.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,86 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from __future__ import annotations
16
+
17
+ from typing import List
18
+ from typing import Optional
19
+ from typing import Union
20
+
21
+ from google.adk.agents.readonly_context import ReadonlyContext
22
+ from typing_extensions import override
23
+
24
+ from . import metadata_tool
25
+ from . import query_tool
26
+ from ...tools.base_tool import BaseTool
27
+ from ...tools.base_toolset import BaseToolset
28
+ from ...tools.base_toolset import ToolPredicate
29
+ from .bigquery_credentials import BigQueryCredentialsConfig
30
+ from .bigquery_tool import BigQueryTool
31
+
32
+
33
+ class BigQueryToolset(BaseToolset):
34
+ """BigQuery Toolset contains tools for interacting with BigQuery data and metadata."""
35
+
36
+ def __init__(
37
+ self,
38
+ *,
39
+ tool_filter: Optional[Union[ToolPredicate, List[str]]] = None,
40
+ credentials_config: Optional[BigQueryCredentialsConfig] = None,
41
+ ):
42
+ self._credentials_config = credentials_config
43
+ self.tool_filter = tool_filter
44
+
45
+ def _is_tool_selected(
46
+ self, tool: BaseTool, readonly_context: ReadonlyContext
47
+ ) -> bool:
48
+ if self.tool_filter is None:
49
+ return True
50
+
51
+ if isinstance(self.tool_filter, ToolPredicate):
52
+ return self.tool_filter(tool, readonly_context)
53
+
54
+ if isinstance(self.tool_filter, list):
55
+ return tool.name in self.tool_filter
56
+
57
+ return False
58
+
59
+ @override
60
+ async def get_tools(
61
+ self, readonly_context: Optional[ReadonlyContext] = None
62
+ ) -> List[BaseTool]:
63
+ """Get tools from the toolset."""
64
+ all_tools = [
65
+ BigQueryTool(
66
+ func=func,
67
+ credentials=self._credentials_config,
68
+ )
69
+ for func in [
70
+ metadata_tool.get_dataset_info,
71
+ metadata_tool.get_table_info,
72
+ metadata_tool.list_dataset_ids,
73
+ metadata_tool.list_table_ids,
74
+ query_tool.execute_sql,
75
+ ]
76
+ ]
77
+
78
+ return [
79
+ tool
80
+ for tool in all_tools
81
+ if self._is_tool_selected(tool, readonly_context)
82
+ ]
83
+
84
+ @override
85
+ async def close(self):
86
+ pass
@@ -0,0 +1,33 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from __future__ import annotations
16
+
17
+ import google.api_core.client_info
18
+ from google.cloud import bigquery
19
+ from google.oauth2.credentials import Credentials
20
+
21
+ USER_AGENT = "adk-bigquery-tool"
22
+
23
+
24
+ def get_bigquery_client(*, credentials: Credentials) -> bigquery.Client:
25
+ """Get a BigQuery client."""
26
+
27
+ client_info = google.api_core.client_info.ClientInfo(user_agent=USER_AGENT)
28
+
29
+ bigquery_client = bigquery.Client(
30
+ credentials=credentials, client_info=client_info
31
+ )
32
+
33
+ return bigquery_client
@@ -0,0 +1,249 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from google.cloud import bigquery
16
+ from google.oauth2.credentials import Credentials
17
+
18
+ from ...tools.bigquery import client
19
+
20
+
21
+ def list_dataset_ids(project_id: str, credentials: Credentials) -> list[str]:
22
+ """List BigQuery dataset ids in a Google Cloud project.
23
+
24
+ Args:
25
+ project_id (str): The Google Cloud project id.
26
+ credentials (Credentials): The credentials to use for the request.
27
+
28
+ Returns:
29
+ list[str]: List of the BigQuery dataset ids present in the project.
30
+
31
+ Examples:
32
+ >>> list_dataset_ids("bigquery-public-data")
33
+ ['america_health_rankings',
34
+ 'american_community_survey',
35
+ 'aml_ai_input_dataset',
36
+ 'austin_311',
37
+ 'austin_bikeshare',
38
+ 'austin_crime',
39
+ 'austin_incidents',
40
+ 'austin_waste',
41
+ 'baseball',
42
+ 'bbc_news']
43
+ """
44
+ try:
45
+ bq_client = client.get_bigquery_client(credentials=credentials)
46
+
47
+ datasets = []
48
+ for dataset in bq_client.list_datasets(project_id):
49
+ datasets.append(dataset.dataset_id)
50
+ return datasets
51
+ except Exception as ex:
52
+ return {
53
+ "status": "ERROR",
54
+ "error_details": str(ex),
55
+ }
56
+
57
+
58
+ def get_dataset_info(
59
+ project_id: str, dataset_id: str, credentials: Credentials
60
+ ) -> dict:
61
+ """Get metadata information about a BigQuery dataset.
62
+
63
+ Args:
64
+ project_id (str): The Google Cloud project id containing the dataset.
65
+ dataset_id (str): The BigQuery dataset id.
66
+ credentials (Credentials): The credentials to use for the request.
67
+
68
+ Returns:
69
+ dict: Dictionary representing the properties of the dataset.
70
+
71
+ Examples:
72
+ >>> get_dataset_info("bigquery-public-data", "penguins")
73
+ {
74
+ "kind": "bigquery#dataset",
75
+ "etag": "PNC5907iQbzeVcAru/2L3A==",
76
+ "id": "bigquery-public-data:ml_datasets",
77
+ "selfLink":
78
+ "https://bigquery.googleapis.com/bigquery/v2/projects/bigquery-public-data/datasets/ml_datasets",
79
+ "datasetReference": {
80
+ "datasetId": "ml_datasets",
81
+ "projectId": "bigquery-public-data"
82
+ },
83
+ "access": [
84
+ {
85
+ "role": "OWNER",
86
+ "groupByEmail": "cloud-datasets-eng@google.com"
87
+ },
88
+ {
89
+ "role": "READER",
90
+ "iamMember": "allUsers"
91
+ },
92
+ {
93
+ "role": "READER",
94
+ "groupByEmail": "bqml-eng@google.com"
95
+ }
96
+ ],
97
+ "creationTime": "1553208775542",
98
+ "lastModifiedTime": "1686338918114",
99
+ "location": "US",
100
+ "type": "DEFAULT",
101
+ "maxTimeTravelHours": "168"
102
+ }
103
+ """
104
+ try:
105
+ bq_client = client.get_bigquery_client(credentials=credentials)
106
+ dataset = bq_client.get_dataset(
107
+ bigquery.DatasetReference(project_id, dataset_id)
108
+ )
109
+ return dataset.to_api_repr()
110
+ except Exception as ex:
111
+ return {
112
+ "status": "ERROR",
113
+ "error_details": str(ex),
114
+ }
115
+
116
+
117
+ def list_table_ids(
118
+ project_id: str, dataset_id: str, credentials: Credentials
119
+ ) -> list[str]:
120
+ """List table ids in a BigQuery dataset.
121
+
122
+ Args:
123
+ project_id (str): The Google Cloud project id containing the dataset.
124
+ dataset_id (str): The BigQuery dataset id.
125
+ credentials (Credentials): The credentials to use for the request.
126
+
127
+ Returns:
128
+ list[str]: List of the tables ids present in the dataset.
129
+
130
+ Examples:
131
+ >>> list_table_ids("bigquery-public-data", "ml_datasets")
132
+ ['census_adult_income',
133
+ 'credit_card_default',
134
+ 'holidays_and_events_for_forecasting',
135
+ 'iris',
136
+ 'penguins',
137
+ 'ulb_fraud_detection']
138
+ """
139
+ try:
140
+ bq_client = client.get_bigquery_client(credentials=credentials)
141
+
142
+ tables = []
143
+ for table in bq_client.list_tables(
144
+ bigquery.DatasetReference(project_id, dataset_id)
145
+ ):
146
+ tables.append(table.table_id)
147
+ return tables
148
+ except Exception as ex:
149
+ return {
150
+ "status": "ERROR",
151
+ "error_details": str(ex),
152
+ }
153
+
154
+
155
+ def get_table_info(
156
+ project_id: str, dataset_id: str, table_id: str, credentials: Credentials
157
+ ) -> dict:
158
+ """Get metadata information about a BigQuery table.
159
+
160
+ Args:
161
+ project_id (str): The Google Cloud project id containing the dataset.
162
+ dataset_id (str): The BigQuery dataset id containing the table.
163
+ table_id (str): The BigQuery table id.
164
+ credentials (Credentials): The credentials to use for the request.
165
+
166
+ Returns:
167
+ dict: Dictionary representing the properties of the table.
168
+
169
+ Examples:
170
+ >>> get_table_info("bigquery-public-data", "ml_datasets", "penguins")
171
+ {
172
+ "kind": "bigquery#table",
173
+ "etag": "X0ZkRohSGoYvWemRYEgOHA==",
174
+ "id": "bigquery-public-data:ml_datasets.penguins",
175
+ "selfLink":
176
+ "https://bigquery.googleapis.com/bigquery/v2/projects/bigquery-public-data/datasets/ml_datasets/tables/penguins",
177
+ "tableReference": {
178
+ "projectId": "bigquery-public-data",
179
+ "datasetId": "ml_datasets",
180
+ "tableId": "penguins"
181
+ },
182
+ "schema": {
183
+ "fields": [
184
+ {
185
+ "name": "species",
186
+ "type": "STRING",
187
+ "mode": "REQUIRED"
188
+ },
189
+ {
190
+ "name": "island",
191
+ "type": "STRING",
192
+ "mode": "NULLABLE"
193
+ },
194
+ {
195
+ "name": "culmen_length_mm",
196
+ "type": "FLOAT",
197
+ "mode": "NULLABLE"
198
+ },
199
+ {
200
+ "name": "culmen_depth_mm",
201
+ "type": "FLOAT",
202
+ "mode": "NULLABLE"
203
+ },
204
+ {
205
+ "name": "flipper_length_mm",
206
+ "type": "FLOAT",
207
+ "mode": "NULLABLE"
208
+ },
209
+ {
210
+ "name": "body_mass_g",
211
+ "type": "FLOAT",
212
+ "mode": "NULLABLE"
213
+ },
214
+ {
215
+ "name": "sex",
216
+ "type": "STRING",
217
+ "mode": "NULLABLE"
218
+ }
219
+ ]
220
+ },
221
+ "numBytes": "28947",
222
+ "numLongTermBytes": "28947",
223
+ "numRows": "344",
224
+ "creationTime": "1619804743188",
225
+ "lastModifiedTime": "1634584675234",
226
+ "type": "TABLE",
227
+ "location": "US",
228
+ "numTimeTravelPhysicalBytes": "0",
229
+ "numTotalLogicalBytes": "28947",
230
+ "numActiveLogicalBytes": "0",
231
+ "numLongTermLogicalBytes": "28947",
232
+ "numTotalPhysicalBytes": "5350",
233
+ "numActivePhysicalBytes": "0",
234
+ "numLongTermPhysicalBytes": "5350",
235
+ "numCurrentPhysicalBytes": "5350"
236
+ }
237
+ """
238
+ try:
239
+ bq_client = client.get_bigquery_client(credentials=credentials)
240
+ return bq_client.get_table(
241
+ bigquery.TableReference(
242
+ bigquery.DatasetReference(project_id, dataset_id), table_id
243
+ )
244
+ ).to_api_repr()
245
+ except Exception as ex:
246
+ return {
247
+ "status": "ERROR",
248
+ "error_details": str(ex),
249
+ }
@@ -0,0 +1,76 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from google.oauth2.credentials import Credentials
16
+
17
+ from ...tools.bigquery import client
18
+
19
+ MAX_DOWNLOADED_QUERY_RESULT_ROWS = 50
20
+
21
+
22
+ def execute_sql(project_id: str, query: str, credentials: Credentials) -> dict:
23
+ """Run a BigQuery SQL query in the project and return the result.
24
+
25
+ Args:
26
+ project_id (str): The GCP project id in which the query should be
27
+ executed.
28
+ query (str): The BigQuery SQL query to be executed.
29
+ credentials (Credentials): The credentials to use for the request.
30
+
31
+ Returns:
32
+ dict: Dictionary representing the result of the query.
33
+ If the result contains the key "result_is_likely_truncated" with
34
+ value True, it means that there may be additional rows matching the
35
+ query not returned in the result.
36
+
37
+ Examples:
38
+ >>> execute_sql("bigframes-dev",
39
+ ... "SELECT island, COUNT(*) AS population "
40
+ ... "FROM bigquery-public-data.ml_datasets.penguins GROUP BY island")
41
+ {
42
+ "rows": [
43
+ {
44
+ "island": "Dream",
45
+ "population": 124
46
+ },
47
+ {
48
+ "island": "Biscoe",
49
+ "population": 168
50
+ },
51
+ {
52
+ "island": "Torgersen",
53
+ "population": 52
54
+ }
55
+ ]
56
+ }
57
+ """
58
+
59
+ try:
60
+ bq_client = client.get_bigquery_client(credentials=credentials)
61
+ row_iterator = bq_client.query_and_wait(
62
+ query, project=project_id, max_results=MAX_DOWNLOADED_QUERY_RESULT_ROWS
63
+ )
64
+ rows = [{key: val for key, val in row.items()} for row in row_iterator]
65
+ result = {"rows": rows}
66
+ if (
67
+ MAX_DOWNLOADED_QUERY_RESULT_ROWS is not None
68
+ and len(rows) == MAX_DOWNLOADED_QUERY_RESULT_ROWS
69
+ ):
70
+ result["result_is_likely_truncated"] = True
71
+ return result
72
+ except Exception as ex:
73
+ return {
74
+ "status": "ERROR",
75
+ "error_details": str(ex),
76
+ }
@@ -46,14 +46,14 @@ class FunctionTool(BaseTool):
46
46
 
47
47
  # Get documentation (prioritize direct __doc__ if available)
48
48
  if hasattr(func, '__doc__') and func.__doc__:
49
- doc = func.__doc__
49
+ doc = inspect.cleandoc(func.__doc__)
50
50
  elif (
51
51
  hasattr(func, '__call__')
52
52
  and hasattr(func.__call__, '__doc__')
53
53
  and func.__call__.__doc__
54
54
  ):
55
55
  # For callable objects, try to get docstring from __call__ method
56
- doc = func.__call__.__doc__
56
+ doc = inspect.cleandoc(func.__call__.__doc__)
57
57
 
58
58
  super().__init__(name=name, description=doc)
59
59
  self.func = func
@@ -107,9 +107,9 @@ You could retry calling this tool, but it is IMPORTANT for you to provide all th
107
107
  or hasattr(self.func, '__call__')
108
108
  and inspect.iscoroutinefunction(self.func.__call__)
109
109
  ):
110
- return await self.func(**args_to_call) or {}
110
+ return await self.func(**args_to_call)
111
111
  else:
112
- return self.func(**args_to_call) or {}
112
+ return self.func(**args_to_call)
113
113
 
114
114
  # TODO(hangfei): fix call live for function stream.
115
115
  async def _call_live(
@@ -12,13 +12,15 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- from typing import Any
15
+ from __future__ import annotations
16
+
16
17
  from typing import Optional
17
18
  from typing import Union
18
19
 
19
20
  from google.genai import types
20
21
  from langchain.agents import Tool
21
22
  from langchain_core.tools import BaseTool
23
+ from langchain_core.tools.structured import StructuredTool
22
24
  from typing_extensions import override
23
25
 
24
26
  from . import _automatic_function_calling_util
@@ -63,9 +65,13 @@ class LangchainTool(FunctionTool):
63
65
  raise ValueError("Langchain tool must have a 'run' or '_run' method")
64
66
 
65
67
  # Determine which function to use
66
- func = tool._run if hasattr(tool, '_run') else tool.run
68
+ if isinstance(tool, StructuredTool):
69
+ func = tool.func
70
+ else:
71
+ func = tool._run if hasattr(tool, '_run') else tool.run
67
72
  super().__init__(func)
68
-
73
+ # run_manager is a special parameter for langchain tool
74
+ self._ignore_params.append('run_manager')
69
75
  self._langchain_tool = tool
70
76
 
71
77
  # Set name: priority is 1) explicitly provided name, 2) tool's name, 3) default
@@ -112,20 +118,21 @@ class LangchainTool(FunctionTool):
112
118
  ):
113
119
  tool_wrapper.args_schema = self._langchain_tool.args_schema
114
120
 
115
- return _automatic_function_calling_util.build_function_declaration_for_langchain(
116
- False,
117
- self.name,
118
- self.description,
119
- tool_wrapper.func,
120
- getattr(tool_wrapper, 'args', None),
121
- )
121
+ return _automatic_function_calling_util.build_function_declaration_for_langchain(
122
+ False,
123
+ self.name,
124
+ self.description,
125
+ tool_wrapper.func,
126
+ tool_wrapper.args,
127
+ )
122
128
 
123
129
  # Need to provide a way to override the function names and descriptions
124
130
  # as the original function names are mostly ".run" and the descriptions
125
131
  # may not meet users' needs
126
- return _automatic_function_calling_util.build_function_declaration(
127
- func=self._langchain_tool.run,
128
- )
132
+ function_decl = super()._get_declaration()
133
+ function_decl.name = self.name
134
+ function_decl.description = self.description
135
+ return function_decl
129
136
 
130
137
  except Exception as e:
131
138
  raise ValueError(
@@ -69,6 +69,7 @@ class LoadMemoryTool(FunctionTool):
69
69
  type=types.Type.STRING,
70
70
  )
71
71
  },
72
+ required=['query'],
72
73
  ),
73
74
  )
74
75
 
@@ -12,6 +12,8 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
+ from __future__ import annotations
16
+
15
17
  from typing import Any
16
18
  from typing import Dict
17
19
 
@@ -41,10 +43,10 @@ def adk_to_mcp_tool_type(tool: BaseTool) -> mcp_types.Tool:
41
43
  print(mcp_tool)
42
44
  """
43
45
  tool_declaration = tool._get_declaration()
44
- if not tool_declaration:
46
+ if not tool_declaration or not tool_declaration.parameters:
45
47
  input_schema = {}
46
48
  else:
47
- input_schema = gemini_to_json_schema(tool._get_declaration().parameters)
49
+ input_schema = gemini_to_json_schema(tool_declaration.parameters)
48
50
  return mcp_types.Tool(
49
51
  name=tool.name,
50
52
  description=tool.description,