tooluniverse 1.0.11.1__py3-none-any.whl → 1.0.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tooluniverse might be problematic. Click here for more details.

Files changed (65) hide show
  1. tooluniverse/alphafold_tool.py +47 -7
  2. tooluniverse/base_tool.py +9 -1
  3. tooluniverse/build_optimizer.py +115 -22
  4. tooluniverse/data/alphafold_tools.json +7 -12
  5. tooluniverse/data/encode_tools.json +139 -0
  6. tooluniverse/data/gbif_tools.json +152 -0
  7. tooluniverse/data/gdc_tools.json +116 -0
  8. tooluniverse/data/gtex_tools.json +116 -0
  9. tooluniverse/data/icgc_tools.json +0 -0
  10. tooluniverse/data/mgnify_tools.json +121 -0
  11. tooluniverse/data/obis_tools.json +122 -0
  12. tooluniverse/data/optimizer_tools.json +275 -0
  13. tooluniverse/data/rnacentral_tools.json +99 -0
  14. tooluniverse/data/smolagent_tools.json +206 -0
  15. tooluniverse/data/uniprot_tools.json +13 -5
  16. tooluniverse/data/wikipathways_tools.json +106 -0
  17. tooluniverse/default_config.py +12 -0
  18. tooluniverse/encode_tool.py +245 -0
  19. tooluniverse/execute_function.py +185 -17
  20. tooluniverse/gbif_tool.py +166 -0
  21. tooluniverse/gdc_tool.py +175 -0
  22. tooluniverse/generate_tools.py +121 -9
  23. tooluniverse/gtex_tool.py +168 -0
  24. tooluniverse/mgnify_tool.py +181 -0
  25. tooluniverse/obis_tool.py +185 -0
  26. tooluniverse/pypi_package_inspector_tool.py +3 -2
  27. tooluniverse/python_executor_tool.py +43 -13
  28. tooluniverse/rnacentral_tool.py +124 -0
  29. tooluniverse/smcp.py +17 -25
  30. tooluniverse/smcp_server.py +1 -1
  31. tooluniverse/smolagent_tool.py +555 -0
  32. tooluniverse/tools/ArgumentDescriptionOptimizer.py +55 -0
  33. tooluniverse/tools/ENCODE_list_files.py +59 -0
  34. tooluniverse/tools/ENCODE_search_experiments.py +67 -0
  35. tooluniverse/tools/GBIF_search_occurrences.py +67 -0
  36. tooluniverse/tools/GBIF_search_species.py +55 -0
  37. tooluniverse/tools/GDC_list_files.py +55 -0
  38. tooluniverse/tools/GDC_search_cases.py +55 -0
  39. tooluniverse/tools/GTEx_get_expression_summary.py +49 -0
  40. tooluniverse/tools/GTEx_query_eqtl.py +59 -0
  41. tooluniverse/tools/MGnify_list_analyses.py +52 -0
  42. tooluniverse/tools/MGnify_search_studies.py +55 -0
  43. tooluniverse/tools/OBIS_search_occurrences.py +59 -0
  44. tooluniverse/tools/OBIS_search_taxa.py +52 -0
  45. tooluniverse/tools/RNAcentral_get_by_accession.py +46 -0
  46. tooluniverse/tools/RNAcentral_search.py +52 -0
  47. tooluniverse/tools/TestCaseGenerator.py +46 -0
  48. tooluniverse/tools/ToolDescriptionOptimizer.py +67 -0
  49. tooluniverse/tools/ToolDiscover.py +4 -0
  50. tooluniverse/tools/UniProt_search.py +14 -6
  51. tooluniverse/tools/WikiPathways_get_pathway.py +52 -0
  52. tooluniverse/tools/WikiPathways_search.py +52 -0
  53. tooluniverse/tools/__init__.py +43 -1
  54. tooluniverse/tools/advanced_literature_search_agent.py +46 -0
  55. tooluniverse/tools/alphafold_get_annotations.py +4 -10
  56. tooluniverse/tools/download_binary_file.py +3 -6
  57. tooluniverse/tools/open_deep_research_agent.py +46 -0
  58. tooluniverse/uniprot_tool.py +51 -4
  59. tooluniverse/wikipathways_tool.py +122 -0
  60. {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/METADATA +3 -1
  61. {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/RECORD +65 -24
  62. {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/WHEEL +0 -0
  63. {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/entry_points.txt +0 -0
  64. {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/licenses/LICENSE +0 -0
  65. {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,106 @@
1
+ [
2
+ {
3
+ "name": "WikiPathways_search",
4
+ "type": "WikiPathwaysSearchTool",
5
+ "description": "Text search across community-curated pathways (disease, metabolic, signaling). Use to discover relevant pathways for a topic/gene set and obtain WPIDs for retrieval/visualization.",
6
+ "parameter": {
7
+ "type": "object",
8
+ "properties": {
9
+ "query": {
10
+ "type": "string",
11
+ "description": "Free-text query (keywords, gene symbols, processes), e.g., 'p53', 'glycolysis'."
12
+ },
13
+ "organism": {
14
+ "type": "string",
15
+ "description": "Organism filter (scientific name), e.g., 'Homo sapiens'."
16
+ }
17
+ },
18
+ "required": ["query"]
19
+ },
20
+ "fields": {
21
+ "endpoint": "https://webservice.wikipathways.org/findPathwaysByText",
22
+ "format": "json"
23
+ },
24
+ "return_schema": {
25
+ "type": "object",
26
+ "description": "WikiPathways search response",
27
+ "properties": {
28
+ "status": {"type": "string"},
29
+ "data": {
30
+ "type": "object",
31
+ "properties": {
32
+ "result": {
33
+ "type": "array",
34
+ "items": {
35
+ "type": "object",
36
+ "properties": {
37
+ "id": {"type": "string", "description": "WPID"},
38
+ "name": {"type": "string"},
39
+ "species": {"type": "string"}
40
+ }
41
+ }
42
+ }
43
+ }
44
+ },
45
+ "url": {"type": "string"}
46
+ }
47
+ },
48
+ "test_examples": [
49
+ {"query": "p53"},
50
+ {"query": "metabolism", "organism": "Homo sapiens"}
51
+ ],
52
+ "label": ["WikiPathways", "Pathway", "Search"],
53
+ "metadata": {
54
+ "tags": ["pathway", "enrichment", "visualization"],
55
+ "estimated_execution_time": "< 2 seconds"
56
+ }
57
+ },
58
+ {
59
+ "name": "WikiPathways_get_pathway",
60
+ "type": "WikiPathwaysGetTool",
61
+ "description": "Fetch pathway content by WPID (JSON/GPML). Use to programmatically access pathway nodes/edges/metadata for enrichment reporting or network visualization.",
62
+ "parameter": {
63
+ "type": "object",
64
+ "properties": {
65
+ "wpid": {
66
+ "type": "string",
67
+ "description": "WikiPathways identifier (e.g., 'WP254')."
68
+ },
69
+ "format": {
70
+ "type": "string",
71
+ "enum": ["json", "gpml"],
72
+ "default": "json",
73
+ "description": "Response format: 'json' for structured, 'gpml' for GPML XML."
74
+ }
75
+ },
76
+ "required": ["wpid"]
77
+ },
78
+ "fields": {
79
+ "endpoint": "https://webservice.wikipathways.org/getPathway",
80
+ "format": "json"
81
+ },
82
+ "return_schema": {
83
+ "type": "object",
84
+ "description": "WikiPathways getPathway response",
85
+ "properties": {
86
+ "status": {"type": "string"},
87
+ "data": {
88
+ "type": "object",
89
+ "properties": {
90
+ "pathway": {"type": "object"},
91
+ "metadata": {"type": "object"}
92
+ }
93
+ },
94
+ "url": {"type": "string"}
95
+ }
96
+ },
97
+ "test_examples": [
98
+ {"wpid": "WP254", "format": "json"}
99
+ ],
100
+ "label": ["WikiPathways", "Pathway", "Content"],
101
+ "metadata": {
102
+ "tags": ["pathway", "content", "gpml"],
103
+ "estimated_execution_time": "< 2 seconds"
104
+ }
105
+ }
106
+ ]
@@ -51,6 +51,8 @@ default_tool_files = {
51
51
  "fatcat": os.path.join(current_dir, "data", "fatcat_tools.json"),
52
52
  "wikidata_sparql": os.path.join(current_dir, "data", "wikidata_sparql_tools.json"),
53
53
  "agents": os.path.join(current_dir, "data", "agentic_tools.json"),
54
+ # Smolagents tool wrapper configs
55
+ "smolagents": os.path.join(current_dir, "data", "smolagent_tools.json"),
54
56
  "tool_discovery_agents": os.path.join(
55
57
  current_dir, "data", "tool_discovery_agents.json"
56
58
  ),
@@ -181,8 +183,18 @@ default_tool_files = {
181
183
  "geo": os.path.join(current_dir, "data", "geo_tools.json"),
182
184
  "dbsnp": os.path.join(current_dir, "data", "dbsnp_tools.json"),
183
185
  "gnomad": os.path.join(current_dir, "data", "gnomad_tools.json"),
186
+ # Newly added database tools
187
+ "gbif": os.path.join(current_dir, "data", "gbif_tools.json"),
188
+ "obis": os.path.join(current_dir, "data", "obis_tools.json"),
189
+ "wikipathways": os.path.join(current_dir, "data", "wikipathways_tools.json"),
190
+ "rnacentral": os.path.join(current_dir, "data", "rnacentral_tools.json"),
191
+ "encode": os.path.join(current_dir, "data", "encode_tools.json"),
192
+ "gtex": os.path.join(current_dir, "data", "gtex_tools.json"),
193
+ "mgnify": os.path.join(current_dir, "data", "mgnify_tools.json"),
194
+ "gdc": os.path.join(current_dir, "data", "gdc_tools.json"),
184
195
  # Ontology tools
185
196
  "ols": os.path.join(current_dir, "data", "ols_tools.json"),
197
+ "optimizer": os.path.join(current_dir, "data", "optimizer_tools.json"),
186
198
  }
187
199
 
188
200
 
@@ -0,0 +1,245 @@
1
+ import json
2
+ from typing import Any, Dict
3
+ from urllib.error import HTTPError
4
+ from urllib.parse import urlencode
5
+ from urllib.request import Request, urlopen
6
+
7
+ from tooluniverse.tool_registry import register_tool
8
+ from tooluniverse.exceptions import (
9
+ ToolError,
10
+ ToolAuthError,
11
+ ToolRateLimitError,
12
+ ToolUnavailableError,
13
+ ToolValidationError,
14
+ ToolConfigError,
15
+ ToolDependencyError,
16
+ ToolServerError,
17
+ )
18
+
19
+
20
+ def _http_get(
21
+ url: str,
22
+ headers: Dict[str, str] | None = None,
23
+ timeout: int = 30,
24
+ ) -> Dict[str, Any]:
25
+ req = Request(url, headers=headers or {})
26
+ try:
27
+ with urlopen(req, timeout=timeout) as resp:
28
+ data = resp.read()
29
+ try:
30
+ return json.loads(data.decode("utf-8", errors="ignore"))
31
+ except Exception:
32
+ return {"raw": data.decode("utf-8", errors="ignore")}
33
+ except HTTPError as e:
34
+ # ENCODE API may return 404 even with valid JSON data
35
+ # Read the response body from the error
36
+ try:
37
+ data = e.read()
38
+ parsed = json.loads(data.decode("utf-8", errors="ignore"))
39
+ # If we got valid JSON, return it even though status was 404
40
+ return parsed
41
+ except Exception:
42
+ # If we can't parse, re-raise the original error
43
+ raise
44
+
45
+
46
+ @register_tool(
47
+ "ENCODESearchTool",
48
+ config={
49
+ "name": "ENCODE_search_experiments",
50
+ "type": "ENCODESearchTool",
51
+ "description": "Search ENCODE experiments",
52
+ "parameter": {
53
+ "type": "object",
54
+ "properties": {
55
+ "assay_title": {"type": "string"},
56
+ "target": {"type": "string"},
57
+ "organism": {"type": "string"},
58
+ "status": {"type": "string", "default": "released"},
59
+ "limit": {"type": "integer", "default": 10},
60
+ },
61
+ },
62
+ "settings": {"base_url": "https://www.encodeproject.org", "timeout": 30},
63
+ },
64
+ )
65
+ class ENCODESearchTool:
66
+ def __init__(self, tool_config=None):
67
+ self.tool_config = tool_config or {}
68
+
69
+ def handle_error(self, exception: Exception) -> ToolError:
70
+ """Classify exceptions into structured ToolError."""
71
+ error_str = str(exception).lower()
72
+ if any(
73
+ kw in error_str
74
+ for kw in ["auth", "unauthorized", "401", "403", "api key", "token"]
75
+ ):
76
+ return ToolAuthError(f"Authentication failed: {exception}")
77
+ elif any(
78
+ kw in error_str for kw in ["rate limit", "429", "quota", "limit exceeded"]
79
+ ):
80
+ return ToolRateLimitError(f"Rate limit exceeded: {exception}")
81
+ elif any(
82
+ kw in error_str
83
+ for kw in [
84
+ "unavailable",
85
+ "timeout",
86
+ "connection",
87
+ "network",
88
+ "not found",
89
+ "404",
90
+ ]
91
+ ):
92
+ return ToolUnavailableError(f"Tool unavailable: {exception}")
93
+ elif any(
94
+ kw in error_str for kw in ["validation", "invalid", "schema", "parameter"]
95
+ ):
96
+ return ToolValidationError(f"Validation error: {exception}")
97
+ elif any(kw in error_str for kw in ["config", "configuration", "setup"]):
98
+ return ToolConfigError(f"Configuration error: {exception}")
99
+ elif any(
100
+ kw in error_str for kw in ["import", "module", "dependency", "package"]
101
+ ):
102
+ return ToolDependencyError(f"Dependency error: {exception}")
103
+ else:
104
+ return ToolServerError(f"Unexpected error: {exception}")
105
+
106
+ def run(self, arguments: Dict[str, Any]):
107
+ # Read from fields.endpoint or settings.base_url
108
+ fields = self.tool_config.get("fields", {})
109
+ settings = self.tool_config.get("settings", {})
110
+ endpoint = fields.get(
111
+ "endpoint",
112
+ settings.get("base_url", "https://www.encodeproject.org/search/"),
113
+ )
114
+ # Extract base URL if endpoint includes /search/
115
+ if endpoint.endswith("/search/"):
116
+ base = endpoint[:-7] # Remove "/search/"
117
+ else:
118
+ base = endpoint.rstrip("/")
119
+ timeout = int(settings.get("timeout", 30))
120
+
121
+ query: Dict[str, Any] = {"type": "Experiment", "format": "json"}
122
+ for key in ("assay_title", "target", "organism", "status", "limit"):
123
+ if arguments.get(key) is not None:
124
+ query[key] = arguments[key]
125
+
126
+ # ENCODE API expects specific parameter format
127
+ # Build URL with proper query string
128
+ url = f"{base}/search/?{urlencode(query, doseq=True)}"
129
+ try:
130
+ data = _http_get(
131
+ url, headers={"Accept": "application/json"}, timeout=timeout
132
+ )
133
+ return {
134
+ "source": "ENCODE",
135
+ "endpoint": "search",
136
+ "query": query,
137
+ "data": data,
138
+ "success": True,
139
+ }
140
+ except Exception as e:
141
+ return {
142
+ "error": str(e),
143
+ "source": "ENCODE",
144
+ "endpoint": "search",
145
+ "success": False,
146
+ }
147
+
148
+
149
+ @register_tool(
150
+ "ENCODEFilesTool",
151
+ config={
152
+ "name": "ENCODE_list_files",
153
+ "type": "ENCODEFilesTool",
154
+ "description": "List ENCODE files",
155
+ "parameter": {
156
+ "type": "object",
157
+ "properties": {
158
+ "file_type": {"type": "string"},
159
+ "assay_title": {"type": "string"},
160
+ "limit": {"type": "integer", "default": 10},
161
+ },
162
+ },
163
+ "settings": {"base_url": "https://www.encodeproject.org", "timeout": 30},
164
+ },
165
+ )
166
+ class ENCODEFilesTool:
167
+ def __init__(self, tool_config=None):
168
+ self.tool_config = tool_config or {}
169
+
170
+ def handle_error(self, exception: Exception) -> ToolError:
171
+ """Classify exceptions into structured ToolError."""
172
+ error_str = str(exception).lower()
173
+ if any(
174
+ kw in error_str
175
+ for kw in ["auth", "unauthorized", "401", "403", "api key", "token"]
176
+ ):
177
+ return ToolAuthError(f"Authentication failed: {exception}")
178
+ elif any(
179
+ kw in error_str for kw in ["rate limit", "429", "quota", "limit exceeded"]
180
+ ):
181
+ return ToolRateLimitError(f"Rate limit exceeded: {exception}")
182
+ elif any(
183
+ kw in error_str
184
+ for kw in [
185
+ "unavailable",
186
+ "timeout",
187
+ "connection",
188
+ "network",
189
+ "not found",
190
+ "404",
191
+ ]
192
+ ):
193
+ return ToolUnavailableError(f"Tool unavailable: {exception}")
194
+ elif any(
195
+ kw in error_str for kw in ["validation", "invalid", "schema", "parameter"]
196
+ ):
197
+ return ToolValidationError(f"Validation error: {exception}")
198
+ elif any(kw in error_str for kw in ["config", "configuration", "setup"]):
199
+ return ToolConfigError(f"Configuration error: {exception}")
200
+ elif any(
201
+ kw in error_str for kw in ["import", "module", "dependency", "package"]
202
+ ):
203
+ return ToolDependencyError(f"Dependency error: {exception}")
204
+ else:
205
+ return ToolServerError(f"Unexpected error: {exception}")
206
+
207
+ def run(self, arguments: Dict[str, Any]):
208
+ # Read from fields.endpoint or settings.base_url
209
+ fields = self.tool_config.get("fields", {})
210
+ settings = self.tool_config.get("settings", {})
211
+ endpoint = fields.get(
212
+ "endpoint",
213
+ settings.get("base_url", "https://www.encodeproject.org/search/"),
214
+ )
215
+ # Extract base URL if endpoint includes /search/
216
+ if endpoint.endswith("/search/"):
217
+ base = endpoint[:-7] # Remove "/search/"
218
+ else:
219
+ base = endpoint.rstrip("/")
220
+ timeout = int(settings.get("timeout", 30))
221
+
222
+ query: Dict[str, Any] = {"type": "File", "format": "json"}
223
+ for key in ("file_type", "assay_title", "limit"):
224
+ if arguments.get(key):
225
+ query[key] = arguments[key]
226
+
227
+ url = f"{base}/search/?{urlencode(query)}"
228
+ try:
229
+ data = _http_get(
230
+ url, headers={"Accept": "application/json"}, timeout=timeout
231
+ )
232
+ return {
233
+ "source": "ENCODE",
234
+ "endpoint": "search",
235
+ "query": query,
236
+ "data": data,
237
+ "success": True,
238
+ }
239
+ except Exception as e:
240
+ return {
241
+ "error": str(e),
242
+ "source": "ENCODE",
243
+ "endpoint": "search",
244
+ "success": False,
245
+ }
@@ -354,6 +354,12 @@ class ToolUniverse:
354
354
  "TOOLUNIVERSE_STRICT_VALIDATION", "false"
355
355
  ).lower() in ("true", "1", "yes")
356
356
 
357
+ # Initialize lenient type coercion feature
358
+ # Default: True for better user experience
359
+ self.lenient_type_coercion = os.getenv(
360
+ "TOOLUNIVERSE_COERCE_TYPES", "true"
361
+ ).lower() in ("true", "1", "yes")
362
+
357
363
  # Initialize dynamic tools namespace
358
364
  self.tools = ToolNamespace(self)
359
365
 
@@ -1398,8 +1404,12 @@ class ToolUniverse:
1398
1404
  # Validate tools have required fields
1399
1405
  valid_tools = []
1400
1406
  for tool in tools_in_file:
1407
+ # Validate that tool is a dict, has "name" field, and name is a string
1401
1408
  if isinstance(tool, dict) and "name" in tool:
1402
- valid_tools.append(tool)
1409
+ name_value = tool["name"]
1410
+ # Ensure name is a string (not a dict/object) - this filters out schema files
1411
+ if isinstance(name_value, str):
1412
+ valid_tools.append(tool)
1403
1413
 
1404
1414
  return valid_tools
1405
1415
 
@@ -1422,7 +1432,13 @@ class ToolUniverse:
1422
1432
  for _category, file_path in self.tool_files.items():
1423
1433
  tools_in_category = self._read_tools_from_file(file_path)
1424
1434
  all_tools.extend(tools_in_category)
1425
- all_tool_names.update([tool["name"] for tool in tools_in_category])
1435
+ # Only add string names to the set (filter out any non-string names as extra safety)
1436
+ tool_names = [
1437
+ tool["name"]
1438
+ for tool in tools_in_category
1439
+ if isinstance(tool.get("name"), str)
1440
+ ]
1441
+ all_tool_names.update(tool_names)
1426
1442
 
1427
1443
  # Also include remote tools
1428
1444
  try:
@@ -1435,7 +1451,13 @@ class ToolUniverse:
1435
1451
  remote_tools = self._read_tools_from_file(fpath)
1436
1452
  if remote_tools:
1437
1453
  all_tools.extend(remote_tools)
1438
- all_tool_names.update([tool["name"] for tool in remote_tools])
1454
+ # Only add string names to the set (filter out any non-string names as extra safety)
1455
+ tool_names = [
1456
+ tool["name"]
1457
+ for tool in remote_tools
1458
+ if isinstance(tool.get("name"), str)
1459
+ ]
1460
+ all_tool_names.update(tool_names)
1439
1461
  except Exception as e:
1440
1462
  warning(f"Warning: Failed to scan remote tools directory: {e}")
1441
1463
 
@@ -1459,11 +1481,17 @@ class ToolUniverse:
1459
1481
  warning(f"Warning: Data directory not found: {data_dir}")
1460
1482
  return all_tools, all_tool_names
1461
1483
 
1462
- # Recursively find all JSON files
1484
+ # Recursively find all JSON files, excluding schema files
1463
1485
  json_files = []
1464
1486
  for root, _dirs, files in os.walk(data_dir):
1487
+ # Skip schemas directory (contains JSON schema definition files, not tool configs)
1488
+ if "schemas" in root:
1489
+ continue
1465
1490
  for file in files:
1466
1491
  if file.lower().endswith(".json"):
1492
+ # Skip files with "schema" in the name
1493
+ if "schema" in file.lower():
1494
+ continue
1467
1495
  json_files.append(os.path.join(root, file))
1468
1496
 
1469
1497
  self.logger.debug(f"Found {len(json_files)} JSON files to scan")
@@ -1473,7 +1501,13 @@ class ToolUniverse:
1473
1501
  tools_in_file = self._read_tools_from_file(json_file)
1474
1502
  if tools_in_file:
1475
1503
  all_tools.extend(tools_in_file)
1476
- all_tool_names.update([tool["name"] for tool in tools_in_file])
1504
+ # Only add string names to the set (filter out any non-string names as extra safety)
1505
+ tool_names = [
1506
+ tool["name"]
1507
+ for tool in tools_in_file
1508
+ if isinstance(tool.get("name"), str)
1509
+ ]
1510
+ all_tool_names.update(tool_names)
1477
1511
  self.logger.debug(f"Loaded {len(tools_in_file)} tools from {json_file}")
1478
1512
 
1479
1513
  self.logger.info(
@@ -1862,7 +1896,10 @@ class ToolUniverse:
1862
1896
  continue
1863
1897
 
1864
1898
  tool_instance = self._ensure_tool_instance(job)
1865
- if not tool_instance or not tool_instance.supports_caching():
1899
+ if (
1900
+ not tool_instance
1901
+ or not getattr(tool_instance, "supports_caching", lambda: True)()
1902
+ ):
1866
1903
  continue
1867
1904
 
1868
1905
  cache_key = tool_instance.get_cache_key(job.arguments or {})
@@ -2081,7 +2118,10 @@ class ToolUniverse:
2081
2118
 
2082
2119
  if cache_enabled:
2083
2120
  tool_instance = self._get_tool_instance(function_name, cache=True)
2084
- if tool_instance and tool_instance.supports_caching():
2121
+ if (
2122
+ tool_instance
2123
+ and getattr(tool_instance, "supports_caching", lambda: True)()
2124
+ ):
2085
2125
  cache_namespace = tool_instance.get_cache_namespace()
2086
2126
  cache_version = tool_instance.get_cache_version()
2087
2127
  cache_key = self._make_cache_key(function_name, arguments)
@@ -2113,21 +2153,28 @@ class ToolUniverse:
2113
2153
  )
2114
2154
  return cached_value
2115
2155
 
2156
+ # Coerce types if lenient coercion is enabled
2157
+ if self.lenient_type_coercion:
2158
+ arguments = self._coerce_arguments_to_schema(function_name, arguments)
2159
+ # Update the original dict so coerced arguments are used
2160
+ function_call_json["arguments"] = arguments
2161
+
2116
2162
  # Validate parameters if requested
2117
2163
  if validate:
2118
2164
  validation_error = self._validate_parameters(function_name, arguments)
2119
2165
  if validation_error:
2120
2166
  return self._create_dual_format_error(validation_error)
2121
-
2122
- # Check function call format (existing validation)
2123
- check_status, check_message = self.check_function_call(function_call_json)
2124
- if check_status is False:
2125
- error_msg = "Invalid function call: " + check_message
2126
- return self._create_dual_format_error(
2127
- ToolValidationError(
2128
- error_msg, details={"check_message": check_message}
2167
+ else:
2168
+ # When validate=False, perform lightweight checks:
2169
+ # 1. Verify tool exists in all_tool_dict
2170
+ # 2. No parameter validation (for performance)
2171
+ if function_name not in self.all_tool_dict:
2172
+ return self._create_dual_format_error(
2173
+ ToolValidationError(
2174
+ f"Tool '{function_name}' not found",
2175
+ details={"tool_name": function_name},
2176
+ )
2129
2177
  )
2130
- )
2131
2178
 
2132
2179
  # Execute the tool
2133
2180
  tool_arguments = arguments
@@ -2198,7 +2245,11 @@ class ToolUniverse:
2198
2245
  )
2199
2246
 
2200
2247
  # Cache result if enabled
2201
- if cache_enabled and tool_instance and tool_instance.supports_caching():
2248
+ if (
2249
+ cache_enabled
2250
+ and tool_instance
2251
+ and getattr(tool_instance, "supports_caching", lambda: True)()
2252
+ ):
2202
2253
  if cache_key is None:
2203
2254
  cache_key = self._make_cache_key(function_name, arguments)
2204
2255
  if cache_namespace is None:
@@ -2421,6 +2472,123 @@ class ToolUniverse:
2421
2472
  )
2422
2473
  return hashlib.md5(serialized.encode()).hexdigest()
2423
2474
 
2475
+ def _coerce_value_to_type(self, value: Any, schema: dict) -> Any:
2476
+ """
2477
+ Coerce a value to match the schema's expected type.
2478
+
2479
+ This function attempts to convert string values to integers, floats,
2480
+ or booleans when the schema expects those types. This makes the
2481
+ system more lenient with user input from LLMs that provide numeric
2482
+ values as strings.
2483
+
2484
+ Args:
2485
+ value: The value to coerce
2486
+ schema: The JSON schema definition for this value
2487
+
2488
+ Returns:
2489
+ The coerced value (or original if coercion fails or not applicable)
2490
+ """
2491
+ # Only coerce string values
2492
+ if not isinstance(value, str):
2493
+ return value
2494
+
2495
+ # Handle anyOf/oneOf schemas by recursively trying each option
2496
+ if "anyOf" in schema:
2497
+ for option in schema["anyOf"]:
2498
+ coerced = self._coerce_value_to_type(value, option)
2499
+ if coerced is not value: # Coercion succeeded
2500
+ return coerced
2501
+ return value
2502
+
2503
+ if "oneOf" in schema:
2504
+ for option in schema["oneOf"]:
2505
+ coerced = self._coerce_value_to_type(value, option)
2506
+ if coerced is not value: # Coercion succeeded
2507
+ return coerced
2508
+ return value
2509
+
2510
+ # Handle array types
2511
+ if schema.get("type") == "array" and "items" in schema:
2512
+ if isinstance(value, list):
2513
+ # Recursively coerce array items
2514
+ items_schema = schema["items"]
2515
+ return [
2516
+ self._coerce_value_to_type(item, items_schema) for item in value
2517
+ ]
2518
+ return value
2519
+
2520
+ # Get the expected type
2521
+ expected_type = schema.get("type")
2522
+
2523
+ # Don't coerce if schema expects string type
2524
+ if expected_type == "string":
2525
+ return value
2526
+
2527
+ # Try to coerce based on expected type
2528
+ if expected_type == "integer":
2529
+ try:
2530
+ # Only parse as int if it represents an integer (not a float)
2531
+ if "." not in value:
2532
+ return int(value)
2533
+ except (ValueError, TypeError):
2534
+ # If coercion fails, return the original value as per function design
2535
+ pass
2536
+ elif expected_type == "number":
2537
+ try:
2538
+ return float(value)
2539
+ except (ValueError, TypeError):
2540
+ pass
2541
+ elif expected_type == "boolean":
2542
+ # Handle common boolean string representations
2543
+ lower_value = value.lower().strip()
2544
+ if lower_value in ("true", "1", "yes", "on"):
2545
+ return True
2546
+ elif lower_value in ("false", "0", "no", "off"):
2547
+ return False
2548
+
2549
+ return value
2550
+
2551
+ def _coerce_arguments_to_schema(self, function_name: str, arguments: dict) -> dict:
2552
+ """
2553
+ Coerce all arguments for a tool to match their schema expectations.
2554
+
2555
+ Args:
2556
+ function_name: Name of the tool
2557
+ arguments: Dictionary of arguments to coerce
2558
+
2559
+ Returns:
2560
+ New dictionary with coerced arguments
2561
+ """
2562
+ if function_name not in self.all_tool_dict:
2563
+ return arguments
2564
+
2565
+ tool_config = self.all_tool_dict[function_name]
2566
+ parameter_schema = tool_config.get("parameter", {})
2567
+ properties = parameter_schema.get("properties", {})
2568
+
2569
+ if not properties:
2570
+ return arguments
2571
+
2572
+ coerced_args = {}
2573
+ for param_name, param_value in arguments.items():
2574
+ if param_name in properties:
2575
+ param_schema = properties[param_name]
2576
+ coerced_value = self._coerce_value_to_type(param_value, param_schema)
2577
+
2578
+ # Log when coercion occurs
2579
+ if coerced_value != param_value:
2580
+ self.logger.debug(
2581
+ f"Coerced parameter '{param_name}' from "
2582
+ f"{param_value!r} ({type(param_value).__name__}) "
2583
+ f"to {coerced_value!r} ({type(coerced_value).__name__})"
2584
+ )
2585
+
2586
+ coerced_args[param_name] = coerced_value
2587
+ else:
2588
+ coerced_args[param_name] = param_value
2589
+
2590
+ return coerced_args
2591
+
2424
2592
  def _validate_parameters(
2425
2593
  self, function_name: str, arguments: dict
2426
2594
  ) -> Optional[ToolError]: