tooluniverse 1.0.11.1__py3-none-any.whl → 1.0.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tooluniverse might be problematic. Click here for more details.
- tooluniverse/alphafold_tool.py +47 -7
- tooluniverse/base_tool.py +9 -1
- tooluniverse/build_optimizer.py +115 -22
- tooluniverse/data/alphafold_tools.json +7 -12
- tooluniverse/data/encode_tools.json +139 -0
- tooluniverse/data/gbif_tools.json +152 -0
- tooluniverse/data/gdc_tools.json +116 -0
- tooluniverse/data/gtex_tools.json +116 -0
- tooluniverse/data/icgc_tools.json +0 -0
- tooluniverse/data/mgnify_tools.json +121 -0
- tooluniverse/data/obis_tools.json +122 -0
- tooluniverse/data/optimizer_tools.json +275 -0
- tooluniverse/data/rnacentral_tools.json +99 -0
- tooluniverse/data/smolagent_tools.json +206 -0
- tooluniverse/data/uniprot_tools.json +13 -5
- tooluniverse/data/wikipathways_tools.json +106 -0
- tooluniverse/default_config.py +12 -0
- tooluniverse/encode_tool.py +245 -0
- tooluniverse/execute_function.py +185 -17
- tooluniverse/gbif_tool.py +166 -0
- tooluniverse/gdc_tool.py +175 -0
- tooluniverse/generate_tools.py +121 -9
- tooluniverse/gtex_tool.py +168 -0
- tooluniverse/mgnify_tool.py +181 -0
- tooluniverse/obis_tool.py +185 -0
- tooluniverse/pypi_package_inspector_tool.py +3 -2
- tooluniverse/python_executor_tool.py +43 -13
- tooluniverse/rnacentral_tool.py +124 -0
- tooluniverse/smcp.py +17 -25
- tooluniverse/smcp_server.py +1 -1
- tooluniverse/smolagent_tool.py +555 -0
- tooluniverse/tools/ArgumentDescriptionOptimizer.py +55 -0
- tooluniverse/tools/ENCODE_list_files.py +59 -0
- tooluniverse/tools/ENCODE_search_experiments.py +67 -0
- tooluniverse/tools/GBIF_search_occurrences.py +67 -0
- tooluniverse/tools/GBIF_search_species.py +55 -0
- tooluniverse/tools/GDC_list_files.py +55 -0
- tooluniverse/tools/GDC_search_cases.py +55 -0
- tooluniverse/tools/GTEx_get_expression_summary.py +49 -0
- tooluniverse/tools/GTEx_query_eqtl.py +59 -0
- tooluniverse/tools/MGnify_list_analyses.py +52 -0
- tooluniverse/tools/MGnify_search_studies.py +55 -0
- tooluniverse/tools/OBIS_search_occurrences.py +59 -0
- tooluniverse/tools/OBIS_search_taxa.py +52 -0
- tooluniverse/tools/RNAcentral_get_by_accession.py +46 -0
- tooluniverse/tools/RNAcentral_search.py +52 -0
- tooluniverse/tools/TestCaseGenerator.py +46 -0
- tooluniverse/tools/ToolDescriptionOptimizer.py +67 -0
- tooluniverse/tools/ToolDiscover.py +4 -0
- tooluniverse/tools/UniProt_search.py +14 -6
- tooluniverse/tools/WikiPathways_get_pathway.py +52 -0
- tooluniverse/tools/WikiPathways_search.py +52 -0
- tooluniverse/tools/__init__.py +43 -1
- tooluniverse/tools/advanced_literature_search_agent.py +46 -0
- tooluniverse/tools/alphafold_get_annotations.py +4 -10
- tooluniverse/tools/download_binary_file.py +3 -6
- tooluniverse/tools/open_deep_research_agent.py +46 -0
- tooluniverse/uniprot_tool.py +51 -4
- tooluniverse/wikipathways_tool.py +122 -0
- {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/METADATA +3 -1
- {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/RECORD +65 -24
- {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/WHEEL +0 -0
- {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/entry_points.txt +0 -0
- {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/licenses/LICENSE +0 -0
- {tooluniverse-1.0.11.1.dist-info → tooluniverse-1.0.12.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Dict
|
|
3
|
+
from urllib.parse import urlencode
|
|
4
|
+
from urllib.request import Request, urlopen
|
|
5
|
+
|
|
6
|
+
from tooluniverse.tool_registry import register_tool
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _http_get(
|
|
10
|
+
url: str,
|
|
11
|
+
headers: Dict[str, str] | None = None,
|
|
12
|
+
timeout: int = 30,
|
|
13
|
+
) -> Dict[str, Any]:
|
|
14
|
+
req = Request(url, headers=headers or {})
|
|
15
|
+
with urlopen(req, timeout=timeout) as resp:
|
|
16
|
+
data = resp.read()
|
|
17
|
+
try:
|
|
18
|
+
return json.loads(data.decode("utf-8", errors="ignore"))
|
|
19
|
+
except Exception:
|
|
20
|
+
return {"raw": data.decode("utf-8", errors="ignore")}
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@register_tool(
|
|
24
|
+
"MGnifyStudiesTool",
|
|
25
|
+
config={
|
|
26
|
+
"name": "MGnify_search_studies",
|
|
27
|
+
"type": "MGnifyStudiesTool",
|
|
28
|
+
"description": "Search MGnify studies via /studies with optional biome/search filters",
|
|
29
|
+
"parameter": {
|
|
30
|
+
"type": "object",
|
|
31
|
+
"properties": {
|
|
32
|
+
"biome": {
|
|
33
|
+
"type": "string",
|
|
34
|
+
"description": "Biome identifier, e.g., 'root:Host-associated'",
|
|
35
|
+
},
|
|
36
|
+
"search": {
|
|
37
|
+
"type": "string",
|
|
38
|
+
"description": "Keyword to search in study title/description",
|
|
39
|
+
},
|
|
40
|
+
"size": {
|
|
41
|
+
"type": "integer",
|
|
42
|
+
"default": 10,
|
|
43
|
+
"minimum": 1,
|
|
44
|
+
"maximum": 100,
|
|
45
|
+
},
|
|
46
|
+
},
|
|
47
|
+
},
|
|
48
|
+
"settings": {
|
|
49
|
+
"base_url": "https://www.ebi.ac.uk/metagenomics/api/latest",
|
|
50
|
+
"timeout": 30,
|
|
51
|
+
},
|
|
52
|
+
},
|
|
53
|
+
)
|
|
54
|
+
class MGnifyStudiesTool:
|
|
55
|
+
def __init__(self, tool_config=None):
|
|
56
|
+
self.tool_config = tool_config or {}
|
|
57
|
+
|
|
58
|
+
def run(self, arguments: Dict[str, Any]):
|
|
59
|
+
base = self.tool_config.get("settings", {}).get(
|
|
60
|
+
"base_url", "https://www.ebi.ac.uk/metagenomics/api/latest"
|
|
61
|
+
)
|
|
62
|
+
timeout = int(self.tool_config.get("settings", {}).get("timeout", 30))
|
|
63
|
+
|
|
64
|
+
query: Dict[str, Any] = {}
|
|
65
|
+
if arguments.get("biome"):
|
|
66
|
+
query["biome"] = arguments["biome"]
|
|
67
|
+
if arguments.get("search"):
|
|
68
|
+
query["search"] = arguments["search"]
|
|
69
|
+
if arguments.get("size") is not None:
|
|
70
|
+
query["size"] = int(arguments["size"])
|
|
71
|
+
else:
|
|
72
|
+
query["size"] = 10
|
|
73
|
+
|
|
74
|
+
url = f"{base}/studies?{urlencode(query)}"
|
|
75
|
+
try:
|
|
76
|
+
api_response = _http_get(
|
|
77
|
+
url, headers={"Accept": "application/json"}, timeout=timeout
|
|
78
|
+
)
|
|
79
|
+
# Wrap API response to match schema: data.data should be array
|
|
80
|
+
# API returns {"data": [...], "links": {...}, "meta": {...}}
|
|
81
|
+
# Schema expects {"data": {"data": [...]}}
|
|
82
|
+
if isinstance(api_response, dict) and "data" in api_response:
|
|
83
|
+
wrapped_data = {"data": api_response.get("data", [])}
|
|
84
|
+
else:
|
|
85
|
+
# Fallback if response format is unexpected
|
|
86
|
+
wrapped_data = {
|
|
87
|
+
"data": api_response if isinstance(api_response, list) else []
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return {
|
|
91
|
+
"source": "MGnify",
|
|
92
|
+
"endpoint": "studies",
|
|
93
|
+
"query": query,
|
|
94
|
+
"data": wrapped_data,
|
|
95
|
+
"success": True,
|
|
96
|
+
}
|
|
97
|
+
except Exception as e:
|
|
98
|
+
return {
|
|
99
|
+
"error": str(e),
|
|
100
|
+
"source": "MGnify",
|
|
101
|
+
"endpoint": "studies",
|
|
102
|
+
"success": False,
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
@register_tool(
|
|
107
|
+
"MGnifyAnalysesTool",
|
|
108
|
+
config={
|
|
109
|
+
"name": "MGnify_list_analyses",
|
|
110
|
+
"type": "MGnifyAnalysesTool",
|
|
111
|
+
"description": "List MGnify analyses via /analyses for a given study_accession",
|
|
112
|
+
"parameter": {
|
|
113
|
+
"type": "object",
|
|
114
|
+
"properties": {
|
|
115
|
+
"study_accession": {
|
|
116
|
+
"type": "string",
|
|
117
|
+
"description": "MGnify study accession, e.g., 'MGYS00000001'",
|
|
118
|
+
},
|
|
119
|
+
"size": {
|
|
120
|
+
"type": "integer",
|
|
121
|
+
"default": 10,
|
|
122
|
+
"minimum": 1,
|
|
123
|
+
"maximum": 100,
|
|
124
|
+
},
|
|
125
|
+
},
|
|
126
|
+
"required": ["study_accession"],
|
|
127
|
+
},
|
|
128
|
+
"settings": {
|
|
129
|
+
"base_url": "https://www.ebi.ac.uk/metagenomics/api/latest",
|
|
130
|
+
"timeout": 30,
|
|
131
|
+
},
|
|
132
|
+
},
|
|
133
|
+
)
|
|
134
|
+
class MGnifyAnalysesTool:
|
|
135
|
+
def __init__(self, tool_config=None):
|
|
136
|
+
self.tool_config = tool_config or {}
|
|
137
|
+
|
|
138
|
+
def run(self, arguments: Dict[str, Any]):
|
|
139
|
+
base = self.tool_config.get("settings", {}).get(
|
|
140
|
+
"base_url", "https://www.ebi.ac.uk/metagenomics/api/latest"
|
|
141
|
+
)
|
|
142
|
+
timeout = int(self.tool_config.get("settings", {}).get("timeout", 30))
|
|
143
|
+
|
|
144
|
+
query: Dict[str, Any] = {
|
|
145
|
+
"study_accession": arguments.get("study_accession"),
|
|
146
|
+
}
|
|
147
|
+
if arguments.get("size") is not None:
|
|
148
|
+
query["size"] = int(arguments["size"])
|
|
149
|
+
else:
|
|
150
|
+
query["size"] = 10
|
|
151
|
+
|
|
152
|
+
url = f"{base}/analyses?{urlencode(query)}"
|
|
153
|
+
try:
|
|
154
|
+
api_response = _http_get(
|
|
155
|
+
url, headers={"Accept": "application/json"}, timeout=timeout
|
|
156
|
+
)
|
|
157
|
+
# Wrap API response to match schema: data.data should be array
|
|
158
|
+
# API returns {"data": [...], "links": {...}, "meta": {...}}
|
|
159
|
+
# Schema expects {"data": {"data": [...]}}
|
|
160
|
+
if isinstance(api_response, dict) and "data" in api_response:
|
|
161
|
+
wrapped_data = {"data": api_response.get("data", [])}
|
|
162
|
+
else:
|
|
163
|
+
# Fallback if response format is unexpected
|
|
164
|
+
wrapped_data = {
|
|
165
|
+
"data": api_response if isinstance(api_response, list) else []
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
return {
|
|
169
|
+
"source": "MGnify",
|
|
170
|
+
"endpoint": "analyses",
|
|
171
|
+
"query": query,
|
|
172
|
+
"data": wrapped_data,
|
|
173
|
+
"success": True,
|
|
174
|
+
}
|
|
175
|
+
except Exception as e:
|
|
176
|
+
return {
|
|
177
|
+
"error": str(e),
|
|
178
|
+
"source": "MGnify",
|
|
179
|
+
"endpoint": "analyses",
|
|
180
|
+
"success": False,
|
|
181
|
+
}
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Dict
|
|
3
|
+
from urllib.parse import urlencode
|
|
4
|
+
from urllib.request import Request, urlopen
|
|
5
|
+
|
|
6
|
+
from tooluniverse.tool_registry import register_tool
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _http_get(
|
|
10
|
+
url: str,
|
|
11
|
+
headers: Dict[str, str] | None = None,
|
|
12
|
+
timeout: int = 30,
|
|
13
|
+
) -> Dict[str, Any]:
|
|
14
|
+
req = Request(url, headers=headers or {})
|
|
15
|
+
with urlopen(req, timeout=timeout) as resp:
|
|
16
|
+
data = resp.read()
|
|
17
|
+
try:
|
|
18
|
+
return json.loads(data.decode("utf-8", errors="ignore"))
|
|
19
|
+
except Exception:
|
|
20
|
+
return {"raw": data.decode("utf-8", errors="ignore")}
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@register_tool(
|
|
24
|
+
"OBISTaxaTool",
|
|
25
|
+
config={
|
|
26
|
+
"name": "OBIS_search_taxa",
|
|
27
|
+
"type": "OBISTaxaTool",
|
|
28
|
+
"description": "Resolve marine taxa by scientific name via OBIS /v3/taxon",
|
|
29
|
+
"parameter": {
|
|
30
|
+
"type": "object",
|
|
31
|
+
"properties": {
|
|
32
|
+
"scientificname": {
|
|
33
|
+
"type": "string",
|
|
34
|
+
"description": "Scientific name to search, e.g., 'Gadus'",
|
|
35
|
+
},
|
|
36
|
+
"size": {
|
|
37
|
+
"type": "integer",
|
|
38
|
+
"default": 10,
|
|
39
|
+
"minimum": 1,
|
|
40
|
+
"maximum": 100,
|
|
41
|
+
},
|
|
42
|
+
},
|
|
43
|
+
"required": ["scientificname"],
|
|
44
|
+
},
|
|
45
|
+
"settings": {"base_url": "https://api.obis.org/v3", "timeout": 30},
|
|
46
|
+
},
|
|
47
|
+
)
|
|
48
|
+
class OBISTaxaTool:
|
|
49
|
+
def __init__(self, tool_config=None):
|
|
50
|
+
self.tool_config = tool_config or {}
|
|
51
|
+
|
|
52
|
+
def run(self, arguments: Dict[str, Any]):
|
|
53
|
+
base = self.tool_config.get("settings", {}).get(
|
|
54
|
+
"base_url", "https://api.obis.org/v3"
|
|
55
|
+
)
|
|
56
|
+
timeout = int(self.tool_config.get("settings", {}).get("timeout", 30))
|
|
57
|
+
|
|
58
|
+
scientificname = arguments.get("scientificname")
|
|
59
|
+
size = int(arguments.get("size", 10))
|
|
60
|
+
|
|
61
|
+
# Note: OBIS v3 API does not have /taxon endpoint
|
|
62
|
+
# Use occurrence search with scientificname filter instead
|
|
63
|
+
# This returns occurrences which can be used to identify taxa
|
|
64
|
+
query = {
|
|
65
|
+
"scientificname": scientificname,
|
|
66
|
+
"size": size,
|
|
67
|
+
}
|
|
68
|
+
url = f"{base}/occurrence?{urlencode(query)}"
|
|
69
|
+
try:
|
|
70
|
+
data = _http_get(
|
|
71
|
+
url, headers={"Accept": "application/json"}, timeout=timeout
|
|
72
|
+
)
|
|
73
|
+
# Extract unique taxa from occurrences
|
|
74
|
+
if isinstance(data, dict) and "results" in data:
|
|
75
|
+
results = data.get("results", [])
|
|
76
|
+
# Extract unique scientific names and taxonomic info
|
|
77
|
+
taxa_list = []
|
|
78
|
+
seen_names = set()
|
|
79
|
+
for occ in results:
|
|
80
|
+
sci_name = occ.get("scientificName")
|
|
81
|
+
if sci_name and sci_name not in seen_names:
|
|
82
|
+
seen_names.add(sci_name)
|
|
83
|
+
taxa_list.append(
|
|
84
|
+
{
|
|
85
|
+
"scientificName": sci_name,
|
|
86
|
+
"aphiaID": occ.get("aphiaID"),
|
|
87
|
+
"rank": occ.get("taxonRank"),
|
|
88
|
+
"kingdom": occ.get("kingdom"),
|
|
89
|
+
"phylum": occ.get("phylum"),
|
|
90
|
+
"class": occ.get("class_"),
|
|
91
|
+
"order": occ.get("order"),
|
|
92
|
+
"family": occ.get("family"),
|
|
93
|
+
"genus": occ.get("genus"),
|
|
94
|
+
}
|
|
95
|
+
)
|
|
96
|
+
if len(taxa_list) >= size:
|
|
97
|
+
break
|
|
98
|
+
# Return in expected schema format
|
|
99
|
+
wrapped_data = {
|
|
100
|
+
"results": taxa_list,
|
|
101
|
+
"total": len(taxa_list),
|
|
102
|
+
}
|
|
103
|
+
else:
|
|
104
|
+
wrapped_data = {"results": [], "total": 0}
|
|
105
|
+
|
|
106
|
+
return {
|
|
107
|
+
"source": "OBIS",
|
|
108
|
+
"endpoint": "occurrence", # Note: taxon endpoint not available, using occurrence
|
|
109
|
+
"query": query,
|
|
110
|
+
"data": wrapped_data,
|
|
111
|
+
"success": True,
|
|
112
|
+
}
|
|
113
|
+
except Exception as e:
|
|
114
|
+
return {
|
|
115
|
+
"error": str(e),
|
|
116
|
+
"source": "OBIS",
|
|
117
|
+
"endpoint": "occurrence",
|
|
118
|
+
"success": False,
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
@register_tool(
|
|
123
|
+
"OBISOccurrenceTool",
|
|
124
|
+
config={
|
|
125
|
+
"name": "OBIS_search_occurrences",
|
|
126
|
+
"type": "OBISOccurrenceTool",
|
|
127
|
+
"description": "Search OBIS occurrences via /v3/occurrence",
|
|
128
|
+
"parameter": {
|
|
129
|
+
"type": "object",
|
|
130
|
+
"properties": {
|
|
131
|
+
"scientificname": {
|
|
132
|
+
"type": "string",
|
|
133
|
+
"description": "Scientific name filter (optional)",
|
|
134
|
+
},
|
|
135
|
+
"areaid": {
|
|
136
|
+
"type": "string",
|
|
137
|
+
"description": "Area identifier filter (optional)",
|
|
138
|
+
},
|
|
139
|
+
"size": {
|
|
140
|
+
"type": "integer",
|
|
141
|
+
"default": 10,
|
|
142
|
+
"minimum": 1,
|
|
143
|
+
"maximum": 100,
|
|
144
|
+
},
|
|
145
|
+
},
|
|
146
|
+
},
|
|
147
|
+
"settings": {"base_url": "https://api.obis.org/v3", "timeout": 30},
|
|
148
|
+
},
|
|
149
|
+
)
|
|
150
|
+
class OBISOccurrenceTool:
|
|
151
|
+
def __init__(self, tool_config=None):
|
|
152
|
+
self.tool_config = tool_config or {}
|
|
153
|
+
|
|
154
|
+
def run(self, arguments: Dict[str, Any]):
|
|
155
|
+
base = self.tool_config.get("settings", {}).get(
|
|
156
|
+
"base_url", "https://api.obis.org/v3"
|
|
157
|
+
)
|
|
158
|
+
timeout = int(self.tool_config.get("settings", {}).get("timeout", 30))
|
|
159
|
+
|
|
160
|
+
query: Dict[str, Any] = {}
|
|
161
|
+
for key in ("scientificname", "areaid", "size"):
|
|
162
|
+
if key in arguments and arguments[key] is not None:
|
|
163
|
+
query[key] = arguments[key]
|
|
164
|
+
if "size" not in query:
|
|
165
|
+
query["size"] = 10
|
|
166
|
+
|
|
167
|
+
url = f"{base}/occurrence?{urlencode(query)}"
|
|
168
|
+
try:
|
|
169
|
+
data = _http_get(
|
|
170
|
+
url, headers={"Accept": "application/json"}, timeout=timeout
|
|
171
|
+
)
|
|
172
|
+
return {
|
|
173
|
+
"source": "OBIS",
|
|
174
|
+
"endpoint": "occurrence",
|
|
175
|
+
"query": query,
|
|
176
|
+
"data": data,
|
|
177
|
+
"success": True,
|
|
178
|
+
}
|
|
179
|
+
except Exception as e:
|
|
180
|
+
return {
|
|
181
|
+
"error": str(e),
|
|
182
|
+
"source": "OBIS",
|
|
183
|
+
"endpoint": "occurrence",
|
|
184
|
+
"success": False,
|
|
185
|
+
}
|
|
@@ -5,6 +5,7 @@ import time
|
|
|
5
5
|
from datetime import datetime, timedelta
|
|
6
6
|
from typing import Dict, Any
|
|
7
7
|
from .tool_registry import register_tool
|
|
8
|
+
from .base_tool import BaseTool
|
|
8
9
|
|
|
9
10
|
|
|
10
11
|
@register_tool(
|
|
@@ -39,7 +40,7 @@ from .tool_registry import register_tool
|
|
|
39
40
|
},
|
|
40
41
|
},
|
|
41
42
|
)
|
|
42
|
-
class PyPIPackageInspector:
|
|
43
|
+
class PyPIPackageInspector(BaseTool):
|
|
43
44
|
"""
|
|
44
45
|
Extracts comprehensive package information from PyPI and GitHub.
|
|
45
46
|
Provides detailed metrics on popularity, maintenance, security,
|
|
@@ -47,7 +48,7 @@ class PyPIPackageInspector:
|
|
|
47
48
|
"""
|
|
48
49
|
|
|
49
50
|
def __init__(self, tool_config: Dict[str, Any] = None):
|
|
50
|
-
self
|
|
51
|
+
BaseTool.__init__(self, tool_config or {})
|
|
51
52
|
self.pypi_api_url = "https://pypi.org/pypi/{package}/json"
|
|
52
53
|
self.pypistats_api_url = "https://pypistats.org/api/packages/{package}/recent"
|
|
53
54
|
self.github_api_url = "https://api.github.com/repos/{owner}/{repo}"
|
|
@@ -206,20 +206,15 @@ class BasePythonExecutor:
|
|
|
206
206
|
raise TimeoutError("Code execution timed out")
|
|
207
207
|
|
|
208
208
|
def _execute_with_timeout(self, func, timeout_seconds: int, *args, **kwargs):
|
|
209
|
-
"""Execute function with timeout using signal
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
result = func(*args, **kwargs)
|
|
215
|
-
return result
|
|
216
|
-
finally:
|
|
217
|
-
signal.alarm(0)
|
|
218
|
-
signal.signal(signal.SIGALRM, old_handler)
|
|
219
|
-
else: # Windows or other systems
|
|
220
|
-
# Fallback to threading timeout (simpler but less reliable)
|
|
221
|
-
import threading
|
|
209
|
+
"""Execute function with timeout using signal or threading."""
|
|
210
|
+
import threading
|
|
211
|
+
|
|
212
|
+
# Check if we're in the main thread
|
|
213
|
+
is_main_thread = threading.current_thread() is threading.main_thread()
|
|
222
214
|
|
|
215
|
+
# Use threading timeout if not in main thread or on Windows
|
|
216
|
+
if not is_main_thread or not hasattr(signal, "SIGALRM"):
|
|
217
|
+
# Use threading timeout (works in all threads)
|
|
223
218
|
result_container = [None]
|
|
224
219
|
exception_container = [None]
|
|
225
220
|
|
|
@@ -242,6 +237,41 @@ class BasePythonExecutor:
|
|
|
242
237
|
|
|
243
238
|
return result_container[0]
|
|
244
239
|
|
|
240
|
+
# Use signal timeout only in main thread on Unix systems
|
|
241
|
+
else:
|
|
242
|
+
try:
|
|
243
|
+
old_handler = signal.signal(signal.SIGALRM, self._handle_timeout)
|
|
244
|
+
signal.alarm(timeout_seconds)
|
|
245
|
+
try:
|
|
246
|
+
result = func(*args, **kwargs)
|
|
247
|
+
return result
|
|
248
|
+
finally:
|
|
249
|
+
signal.alarm(0)
|
|
250
|
+
signal.signal(signal.SIGALRM, old_handler)
|
|
251
|
+
except (ValueError, AttributeError):
|
|
252
|
+
# Fallback to threading if signal fails for any reason
|
|
253
|
+
result_container = [None]
|
|
254
|
+
exception_container = [None]
|
|
255
|
+
|
|
256
|
+
def target():
|
|
257
|
+
try:
|
|
258
|
+
result_container[0] = func(*args, **kwargs)
|
|
259
|
+
except Exception as e:
|
|
260
|
+
exception_container[0] = e
|
|
261
|
+
|
|
262
|
+
thread = threading.Thread(target=target)
|
|
263
|
+
thread.daemon = True
|
|
264
|
+
thread.start()
|
|
265
|
+
thread.join(timeout_seconds)
|
|
266
|
+
|
|
267
|
+
if thread.is_alive():
|
|
268
|
+
raise TimeoutError("Code execution timed out")
|
|
269
|
+
|
|
270
|
+
if exception_container[0]:
|
|
271
|
+
raise exception_container[0]
|
|
272
|
+
|
|
273
|
+
return result_container[0]
|
|
274
|
+
|
|
245
275
|
def _format_error_response(
|
|
246
276
|
self,
|
|
247
277
|
error: Exception,
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Dict
|
|
3
|
+
from urllib.parse import urlencode
|
|
4
|
+
from urllib.request import Request, urlopen
|
|
5
|
+
|
|
6
|
+
from tooluniverse.tool_registry import register_tool
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _http_get(
|
|
10
|
+
url: str, headers: Dict[str, str] | None = None, timeout: int = 30
|
|
11
|
+
) -> Dict[str, Any]:
|
|
12
|
+
req = Request(url, headers=headers or {})
|
|
13
|
+
with urlopen(req, timeout=timeout) as resp:
|
|
14
|
+
data = resp.read()
|
|
15
|
+
try:
|
|
16
|
+
return json.loads(data.decode("utf-8", errors="ignore"))
|
|
17
|
+
except Exception:
|
|
18
|
+
return {"raw": data.decode("utf-8", errors="ignore")}
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@register_tool(
|
|
22
|
+
"RNAcentralSearchTool",
|
|
23
|
+
config={
|
|
24
|
+
"name": "RNAcentral_search",
|
|
25
|
+
"type": "RNAcentralSearchTool",
|
|
26
|
+
"description": "Search RNA records via RNAcentral API",
|
|
27
|
+
"parameter": {
|
|
28
|
+
"type": "object",
|
|
29
|
+
"properties": {
|
|
30
|
+
"query": {"type": "string", "description": "Keyword or accession"},
|
|
31
|
+
"page_size": {
|
|
32
|
+
"type": "integer",
|
|
33
|
+
"default": 10,
|
|
34
|
+
"minimum": 1,
|
|
35
|
+
"maximum": 100,
|
|
36
|
+
},
|
|
37
|
+
},
|
|
38
|
+
"required": ["query"],
|
|
39
|
+
},
|
|
40
|
+
"settings": {"base_url": "https://rnacentral.org/api/v1", "timeout": 30},
|
|
41
|
+
},
|
|
42
|
+
)
|
|
43
|
+
class RNAcentralSearchTool:
|
|
44
|
+
def __init__(self, tool_config=None):
|
|
45
|
+
self.tool_config = tool_config or {}
|
|
46
|
+
|
|
47
|
+
def run(self, arguments: Dict[str, Any]):
|
|
48
|
+
base = self.tool_config.get("settings", {}).get(
|
|
49
|
+
"base_url", "https://rnacentral.org/api/v1"
|
|
50
|
+
)
|
|
51
|
+
timeout = int(self.tool_config.get("settings", {}).get("timeout", 30))
|
|
52
|
+
|
|
53
|
+
query = {
|
|
54
|
+
"query": arguments.get("query"),
|
|
55
|
+
"page_size": int(arguments.get("page_size", 10)),
|
|
56
|
+
}
|
|
57
|
+
url = f"{base}/rna/?{urlencode(query)}"
|
|
58
|
+
try:
|
|
59
|
+
data = _http_get(
|
|
60
|
+
url, headers={"Accept": "application/json"}, timeout=timeout
|
|
61
|
+
)
|
|
62
|
+
return {
|
|
63
|
+
"source": "RNAcentral",
|
|
64
|
+
"endpoint": "rna",
|
|
65
|
+
"query": query,
|
|
66
|
+
"data": data,
|
|
67
|
+
"success": True,
|
|
68
|
+
}
|
|
69
|
+
except Exception as e:
|
|
70
|
+
return {
|
|
71
|
+
"error": str(e),
|
|
72
|
+
"source": "RNAcentral",
|
|
73
|
+
"endpoint": "rna",
|
|
74
|
+
"success": False,
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@register_tool(
|
|
79
|
+
"RNAcentralGetTool",
|
|
80
|
+
config={
|
|
81
|
+
"name": "RNAcentral_get_by_accession",
|
|
82
|
+
"type": "RNAcentralGetTool",
|
|
83
|
+
"description": "Get RNAcentral entry by accession",
|
|
84
|
+
"parameter": {
|
|
85
|
+
"type": "object",
|
|
86
|
+
"properties": {
|
|
87
|
+
"accession": {"type": "string", "description": "RNAcentral accession"}
|
|
88
|
+
},
|
|
89
|
+
"required": ["accession"],
|
|
90
|
+
},
|
|
91
|
+
"settings": {"base_url": "https://rnacentral.org/api/v1", "timeout": 30},
|
|
92
|
+
},
|
|
93
|
+
)
|
|
94
|
+
class RNAcentralGetTool:
|
|
95
|
+
def __init__(self, tool_config=None):
|
|
96
|
+
self.tool_config = tool_config or {}
|
|
97
|
+
|
|
98
|
+
def run(self, arguments: Dict[str, Any]):
|
|
99
|
+
base = self.tool_config.get("settings", {}).get(
|
|
100
|
+
"base_url", "https://rnacentral.org/api/v1"
|
|
101
|
+
)
|
|
102
|
+
timeout = int(self.tool_config.get("settings", {}).get("timeout", 30))
|
|
103
|
+
|
|
104
|
+
acc = arguments.get("accession")
|
|
105
|
+
url = f"{base}/rna/{acc}"
|
|
106
|
+
try:
|
|
107
|
+
data = _http_get(
|
|
108
|
+
url, headers={"Accept": "application/json"}, timeout=timeout
|
|
109
|
+
)
|
|
110
|
+
return {
|
|
111
|
+
"source": "RNAcentral",
|
|
112
|
+
"endpoint": "rna/{accession}",
|
|
113
|
+
"accession": acc,
|
|
114
|
+
"data": data,
|
|
115
|
+
"success": True,
|
|
116
|
+
}
|
|
117
|
+
except Exception as e:
|
|
118
|
+
return {
|
|
119
|
+
"error": str(e),
|
|
120
|
+
"source": "RNAcentral",
|
|
121
|
+
"endpoint": "rna/{accession}",
|
|
122
|
+
"accession": acc,
|
|
123
|
+
"success": False,
|
|
124
|
+
}
|
tooluniverse/smcp.py
CHANGED
|
@@ -2230,13 +2230,16 @@ class SMCP(FastMCP):
|
|
|
2230
2230
|
python_type = str
|
|
2231
2231
|
# For string type, don't add json_schema_extra - let Pydantic handle it
|
|
2232
2232
|
elif param_type == "integer":
|
|
2233
|
-
|
|
2233
|
+
# Allow both string and int for lenient coercion
|
|
2234
|
+
python_type = Union[int, str]
|
|
2234
2235
|
# For integer type, don't add json_schema_extra - let Pydantic handle it
|
|
2235
2236
|
elif param_type == "number":
|
|
2236
|
-
|
|
2237
|
+
# Allow both string and float for lenient coercion
|
|
2238
|
+
python_type = Union[float, str]
|
|
2237
2239
|
# For number type, don't add json_schema_extra - let Pydantic handle it
|
|
2238
2240
|
elif param_type == "boolean":
|
|
2239
|
-
|
|
2241
|
+
# Allow both string and bool for lenient coercion
|
|
2242
|
+
python_type = Union[bool, str]
|
|
2240
2243
|
# For boolean type, don't add json_schema_extra - let Pydantic handle it
|
|
2241
2244
|
elif param_type == "array":
|
|
2242
2245
|
python_type = list
|
|
@@ -2334,32 +2337,21 @@ class SMCP(FastMCP):
|
|
|
2334
2337
|
)
|
|
2335
2338
|
)
|
|
2336
2339
|
|
|
2337
|
-
# Add optional
|
|
2338
|
-
|
|
2339
|
-
|
|
2340
|
-
)
|
|
2341
|
-
stream_annotation = Annotated[Union[bool, type(None)], stream_field]
|
|
2342
|
-
param_annotations["_tooluniverse_stream"] = stream_annotation
|
|
2343
|
-
func_params.append(
|
|
2344
|
-
inspect.Parameter(
|
|
2345
|
-
"_tooluniverse_stream",
|
|
2346
|
-
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
|
2347
|
-
default=None,
|
|
2348
|
-
annotation=stream_annotation,
|
|
2349
|
-
)
|
|
2350
|
-
)
|
|
2351
|
-
|
|
2352
|
-
# Note: ctx parameter removed as it causes Pydantic schema issues
|
|
2353
|
-
# FastMCP context injection is handled internally by FastMCP
|
|
2340
|
+
# Add _tooluniverse_stream as an optional parameter for streaming support
|
|
2341
|
+
# This parameter is NOT exposed in the MCP schema (it's in kwargs but not in param_annotations)
|
|
2342
|
+
# Users can pass it to enable streaming, but it won't appear in the tool schema
|
|
2354
2343
|
|
|
2355
2344
|
async def dynamic_tool_function(**kwargs) -> str:
|
|
2356
2345
|
"""Execute ToolUniverse tool with provided arguments."""
|
|
2357
2346
|
try:
|
|
2358
2347
|
# Remove ctx if present (legacy support)
|
|
2359
2348
|
ctx = kwargs.pop("ctx", None) if "ctx" in kwargs else None
|
|
2360
|
-
|
|
2349
|
+
# Extract streaming flag (users can optionally pass this)
|
|
2350
|
+
stream_flag = bool(kwargs.pop("_tooluniverse_stream", False))
|
|
2361
2351
|
|
|
2362
|
-
# Filter out None values for optional parameters
|
|
2352
|
+
# Filter out None values for optional parameters
|
|
2353
|
+
# Note: _tooluniverse_stream was extracted and popped above
|
|
2354
|
+
# so it won't be in args_dict, which is what we want
|
|
2363
2355
|
args_dict = {k: v for k, v in kwargs.items() if v is not None}
|
|
2364
2356
|
|
|
2365
2357
|
# Validate required parameters (check against args_dict, not filtered_args)
|
|
@@ -2409,9 +2401,9 @@ class SMCP(FastMCP):
|
|
|
2409
2401
|
# Assign the function to stream_callback
|
|
2410
2402
|
stream_callback = _stream_callback
|
|
2411
2403
|
|
|
2412
|
-
#
|
|
2413
|
-
|
|
2414
|
-
|
|
2404
|
+
# Note: _tooluniverse_stream was extracted from kwargs above
|
|
2405
|
+
# and is not passed to the tool. The stream_callback is sufficient
|
|
2406
|
+
# to enable streaming for downstream tools.
|
|
2415
2407
|
|
|
2416
2408
|
run_callable = functools.partial(
|
|
2417
2409
|
self.tooluniverse.run_one_function,
|
tooluniverse/smcp_server.py
CHANGED
|
@@ -455,7 +455,7 @@ Examples:
|
|
|
455
455
|
|
|
456
456
|
try:
|
|
457
457
|
print(f"🚀 Starting {args.name}...", file=sys.stderr)
|
|
458
|
-
print("📡 Transport: stdio
|
|
458
|
+
print("📡 Transport: stdio", file=sys.stderr)
|
|
459
459
|
print(f"🔍 Search enabled: {not args.no_search}", file=sys.stderr)
|
|
460
460
|
|
|
461
461
|
if args.categories is not None:
|