tooluniverse 1.0.7__py3-none-any.whl → 1.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tooluniverse might be problematic. Click here for more details.

Files changed (76) hide show
  1. tooluniverse/__init__.py +29 -14
  2. tooluniverse/admetai_tool.py +8 -4
  3. tooluniverse/base_tool.py +36 -0
  4. tooluniverse/biogrid_tool.py +118 -0
  5. tooluniverse/build_optimizer.py +87 -0
  6. tooluniverse/cache/__init__.py +3 -0
  7. tooluniverse/cache/memory_cache.py +99 -0
  8. tooluniverse/cache/result_cache_manager.py +235 -0
  9. tooluniverse/cache/sqlite_backend.py +257 -0
  10. tooluniverse/clinvar_tool.py +90 -0
  11. tooluniverse/custom_tool.py +28 -0
  12. tooluniverse/data/arxiv_tools.json +1 -4
  13. tooluniverse/data/core_tools.json +1 -4
  14. tooluniverse/data/dataset_tools.json +7 -7
  15. tooluniverse/data/doaj_tools.json +1 -3
  16. tooluniverse/data/drug_discovery_agents.json +292 -0
  17. tooluniverse/data/europe_pmc_tools.json +1 -2
  18. tooluniverse/data/genomics_tools.json +174 -0
  19. tooluniverse/data/geo_tools.json +86 -0
  20. tooluniverse/data/markitdown_tools.json +51 -0
  21. tooluniverse/data/openalex_tools.json +1 -5
  22. tooluniverse/data/pmc_tools.json +1 -4
  23. tooluniverse/data/ppi_tools.json +139 -0
  24. tooluniverse/data/pubmed_tools.json +1 -3
  25. tooluniverse/data/semantic_scholar_tools.json +1 -2
  26. tooluniverse/data/unified_guideline_tools.json +206 -4
  27. tooluniverse/data/xml_tools.json +15 -15
  28. tooluniverse/data/zenodo_tools.json +1 -2
  29. tooluniverse/dbsnp_tool.py +71 -0
  30. tooluniverse/default_config.py +6 -0
  31. tooluniverse/ensembl_tool.py +61 -0
  32. tooluniverse/execute_function.py +196 -75
  33. tooluniverse/generate_tools.py +303 -20
  34. tooluniverse/genomics_gene_search_tool.py +56 -0
  35. tooluniverse/geo_tool.py +116 -0
  36. tooluniverse/gnomad_tool.py +63 -0
  37. tooluniverse/markitdown_tool.py +159 -0
  38. tooluniverse/mcp_client_tool.py +10 -5
  39. tooluniverse/smcp.py +10 -9
  40. tooluniverse/string_tool.py +112 -0
  41. tooluniverse/tools/ADMETAnalyzerAgent.py +59 -0
  42. tooluniverse/tools/ArXiv_search_papers.py +3 -3
  43. tooluniverse/tools/CMA_Guidelines_Search.py +52 -0
  44. tooluniverse/tools/CORE_search_papers.py +3 -3
  45. tooluniverse/tools/ClinVar_search_variants.py +52 -0
  46. tooluniverse/tools/ClinicalTrialDesignAgent.py +63 -0
  47. tooluniverse/tools/CompoundDiscoveryAgent.py +59 -0
  48. tooluniverse/tools/DOAJ_search_articles.py +2 -2
  49. tooluniverse/tools/DiseaseAnalyzerAgent.py +52 -0
  50. tooluniverse/tools/DrugInteractionAnalyzerAgent.py +52 -0
  51. tooluniverse/tools/DrugOptimizationAgent.py +63 -0
  52. tooluniverse/tools/Ensembl_lookup_gene_by_symbol.py +52 -0
  53. tooluniverse/tools/EuropePMC_search_articles.py +1 -1
  54. tooluniverse/tools/GIN_Guidelines_Search.py +52 -0
  55. tooluniverse/tools/GWAS_search_associations_by_gene.py +52 -0
  56. tooluniverse/tools/LiteratureSynthesisAgent.py +59 -0
  57. tooluniverse/tools/PMC_search_papers.py +3 -3
  58. tooluniverse/tools/PubMed_search_articles.py +2 -2
  59. tooluniverse/tools/SemanticScholar_search_papers.py +1 -1
  60. tooluniverse/tools/UCSC_get_genes_by_region.py +67 -0
  61. tooluniverse/tools/Zenodo_search_records.py +1 -1
  62. tooluniverse/tools/__init__.py +33 -1
  63. tooluniverse/tools/convert_to_markdown.py +59 -0
  64. tooluniverse/tools/dbSNP_get_variant_by_rsid.py +46 -0
  65. tooluniverse/tools/gnomAD_query_variant.py +52 -0
  66. tooluniverse/tools/openalex_literature_search.py +4 -4
  67. tooluniverse/ucsc_tool.py +60 -0
  68. tooluniverse/unified_guideline_tools.py +1175 -57
  69. tooluniverse/utils.py +51 -4
  70. tooluniverse/zenodo_tool.py +2 -1
  71. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/METADATA +9 -3
  72. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/RECORD +76 -40
  73. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/WHEEL +0 -0
  74. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/entry_points.txt +0 -0
  75. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/licenses/LICENSE +0 -0
  76. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,235 @@
1
+ """
2
+ Result cache manager that coordinates in-memory and persistent storage.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import logging
8
+ import os
9
+ import time
10
+ from dataclasses import dataclass
11
+ from typing import Any, Dict, Iterator, Optional
12
+
13
+ from .memory_cache import LRUCache, SingleFlight
14
+ from .sqlite_backend import CacheEntry, PersistentCache
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ @dataclass
20
+ class CacheRecord:
21
+ value: Any
22
+ expires_at: Optional[float]
23
+ namespace: str
24
+ version: str
25
+
26
+
27
+ class ResultCacheManager:
28
+ """Facade around memory + persistent cache layers."""
29
+
30
+ def __init__(
31
+ self,
32
+ *,
33
+ memory_size: int = 256,
34
+ persistent_path: Optional[str] = None,
35
+ enabled: bool = True,
36
+ persistence_enabled: bool = True,
37
+ singleflight: bool = True,
38
+ default_ttl: Optional[int] = None,
39
+ ):
40
+ self.enabled = enabled
41
+ self.default_ttl = default_ttl
42
+
43
+ self.memory = LRUCache(max_size=memory_size)
44
+ persistence_path = persistent_path
45
+ if persistence_path is None:
46
+ cache_dir = os.environ.get("TOOLUNIVERSE_CACHE_DIR")
47
+ if cache_dir:
48
+ persistence_path = os.path.join(cache_dir, "tooluniverse_cache.sqlite")
49
+ self.persistent = None
50
+ if persistence_enabled and persistence_path:
51
+ try:
52
+ self.persistent = PersistentCache(persistence_path, enable=True)
53
+ except Exception as exc:
54
+ logger.warning("Failed to initialize persistent cache: %s", exc)
55
+ self.persistent = None
56
+
57
+ self.singleflight = SingleFlight() if singleflight else None
58
+
59
+ # ------------------------------------------------------------------
60
+ # Helper methods
61
+ # ------------------------------------------------------------------
62
+ @staticmethod
63
+ def compose_key(namespace: str, version: str, cache_key: str) -> str:
64
+ return f"{namespace}::{version}::{cache_key}"
65
+
66
+ def _now(self) -> float:
67
+ return time.time()
68
+
69
+ def _ttl_or_default(self, ttl: Optional[int]) -> Optional[int]:
70
+ return ttl if ttl is not None else self.default_ttl
71
+
72
+ # ------------------------------------------------------------------
73
+ # Public API
74
+ # ------------------------------------------------------------------
75
+ def get(self, *, namespace: str, version: str, cache_key: str) -> Optional[Any]:
76
+ if not self.enabled:
77
+ return None
78
+
79
+ composed = self.compose_key(namespace, version, cache_key)
80
+ record = self.memory.get(composed)
81
+ if record:
82
+ if record.expires_at and record.expires_at <= self._now():
83
+ self.memory.delete(composed)
84
+ else:
85
+ return record.value
86
+
87
+ entry = self._get_from_persistent(composed)
88
+ if entry:
89
+ expires_at = entry.created_at + entry.ttl if entry.ttl else None
90
+ self.memory.set(
91
+ composed,
92
+ CacheRecord(
93
+ value=entry.value,
94
+ expires_at=expires_at,
95
+ namespace=namespace,
96
+ version=version,
97
+ ),
98
+ )
99
+ return entry.value
100
+ return None
101
+
102
+ def set(
103
+ self,
104
+ *,
105
+ namespace: str,
106
+ version: str,
107
+ cache_key: str,
108
+ value: Any,
109
+ ttl: Optional[int] = None,
110
+ ):
111
+ if not self.enabled:
112
+ return
113
+
114
+ effective_ttl = self._ttl_or_default(ttl)
115
+ expires_at = self._now() + effective_ttl if effective_ttl else None
116
+ composed = self.compose_key(namespace, version, cache_key)
117
+
118
+ self.memory.set(
119
+ composed,
120
+ CacheRecord(
121
+ value=value,
122
+ expires_at=expires_at,
123
+ namespace=namespace,
124
+ version=version,
125
+ ),
126
+ )
127
+
128
+ if self.persistent:
129
+ try:
130
+ self.persistent.set(
131
+ composed,
132
+ value,
133
+ namespace=namespace,
134
+ version=version,
135
+ ttl=effective_ttl,
136
+ )
137
+ except Exception as exc:
138
+ logger.warning("Persistent cache write failed: %s", exc)
139
+ self.persistent = None
140
+
141
+ def delete(self, *, namespace: str, version: str, cache_key: str):
142
+ composed = self.compose_key(namespace, version, cache_key)
143
+ self.memory.delete(composed)
144
+ if self.persistent:
145
+ try:
146
+ self.persistent.delete(composed)
147
+ except Exception as exc:
148
+ logger.warning("Persistent cache delete failed: %s", exc)
149
+
150
+ def clear(self, namespace: Optional[str] = None):
151
+ if namespace:
152
+ # Clear matching namespace in memory
153
+ keys_to_remove = [
154
+ key
155
+ for key, record in self.memory.items()
156
+ if hasattr(record, "namespace") and record.namespace == namespace
157
+ ]
158
+ for key in keys_to_remove:
159
+ self.memory.delete(key)
160
+ else:
161
+ self.memory.clear()
162
+
163
+ if self.persistent:
164
+ try:
165
+ self.persistent.clear(namespace=namespace)
166
+ except Exception as exc:
167
+ logger.warning("Persistent cache clear failed: %s", exc)
168
+
169
+ def stats(self) -> Dict[str, Any]:
170
+ return {
171
+ "enabled": self.enabled,
172
+ "memory": self.memory.stats(),
173
+ "persistent": (
174
+ self.persistent.stats() if self.persistent else {"enabled": False}
175
+ ),
176
+ }
177
+
178
+ def dump(self, namespace: Optional[str] = None) -> Iterator[Dict[str, Any]]:
179
+ if not self.persistent:
180
+ return iter([])
181
+ return (
182
+ {
183
+ "cache_key": entry.key,
184
+ "namespace": entry.namespace,
185
+ "version": entry.version,
186
+ "ttl": entry.ttl,
187
+ "created_at": entry.created_at,
188
+ "last_accessed": entry.last_accessed,
189
+ "hit_count": entry.hit_count,
190
+ "value": entry.value,
191
+ }
192
+ for entry in self._iter_persistent(namespace=namespace)
193
+ )
194
+
195
+ def _get_from_persistent(self, composed_key: str) -> Optional[CacheEntry]:
196
+ if not self.persistent:
197
+ return None
198
+ try:
199
+ return self.persistent.get(composed_key)
200
+ except Exception as exc:
201
+ logger.warning("Persistent cache read failed: %s", exc)
202
+ self.persistent = None
203
+ return None
204
+
205
+ def _iter_persistent(self, namespace: Optional[str]):
206
+ if not self.persistent:
207
+ return iter([])
208
+ try:
209
+ return self.persistent.iter_entries(namespace=namespace)
210
+ except Exception as exc:
211
+ logger.warning("Persistent cache iterator failed: %s", exc)
212
+ return iter([])
213
+
214
+ # ------------------------------------------------------------------
215
+ # Context manager for singleflight
216
+ # ------------------------------------------------------------------
217
+ def singleflight_guard(self, composed_key: str):
218
+ if self.singleflight:
219
+ return self.singleflight.acquire(composed_key)
220
+ return _DummyContext()
221
+
222
+ def close(self):
223
+ if self.persistent:
224
+ try:
225
+ self.persistent.close()
226
+ except Exception as exc:
227
+ logger.warning("Persistent cache close failed: %s", exc)
228
+
229
+
230
+ class _DummyContext:
231
+ def __enter__(self):
232
+ return None
233
+
234
+ def __exit__(self, exc_type, exc_val, exc_tb):
235
+ return False
@@ -0,0 +1,257 @@
1
+ """
2
+ SQLite-backed persistent cache for ToolUniverse.
3
+
4
+ The cache stores serialized tool results with TTL and version metadata.
5
+ Designed to be a drop-in persistent layer behind the in-memory cache.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import os
11
+ import pickle
12
+ import sqlite3
13
+ import threading
14
+ import time
15
+ from dataclasses import dataclass
16
+ from typing import Any, Dict, Iterator, Optional
17
+
18
+
19
+ @dataclass
20
+ class CacheEntry:
21
+ key: str
22
+ value: Any
23
+ namespace: str
24
+ version: str
25
+ ttl: Optional[int]
26
+ created_at: float
27
+ last_accessed: float
28
+ hit_count: int
29
+
30
+
31
+ class PersistentCache:
32
+ """SQLite-backed cache layer with TTL support."""
33
+
34
+ def __init__(self, path: str, *, enable: bool = True):
35
+ self.enabled = enable
36
+ self.path = path
37
+ self._lock = threading.RLock()
38
+ self._conn: Optional[sqlite3.Connection] = None
39
+
40
+ if self.enabled:
41
+ self._init_storage()
42
+
43
+ def _init_storage(self):
44
+ directory = os.path.dirname(self.path)
45
+ if directory:
46
+ os.makedirs(directory, exist_ok=True)
47
+ self._conn = sqlite3.connect(
48
+ self.path,
49
+ timeout=30,
50
+ check_same_thread=False,
51
+ isolation_level=None, # autocommit
52
+ )
53
+ self._conn.execute("PRAGMA journal_mode=WAL;")
54
+ self._conn.execute("PRAGMA synchronous=NORMAL;")
55
+ self._conn.execute("PRAGMA foreign_keys=ON;")
56
+ self._ensure_schema()
57
+ self.cleanup_expired()
58
+
59
+ def _ensure_schema(self):
60
+ assert self._conn is not None
61
+ self._conn.execute(
62
+ """
63
+ CREATE TABLE IF NOT EXISTS cache_entries (
64
+ cache_key TEXT PRIMARY KEY,
65
+ namespace TEXT NOT NULL,
66
+ version TEXT,
67
+ value BLOB NOT NULL,
68
+ ttl INTEGER,
69
+ created_at REAL NOT NULL,
70
+ last_accessed REAL NOT NULL,
71
+ expires_at REAL,
72
+ hit_count INTEGER NOT NULL DEFAULT 0
73
+ )
74
+ """
75
+ )
76
+ self._conn.execute(
77
+ "CREATE INDEX IF NOT EXISTS idx_cache_namespace ON cache_entries(namespace)"
78
+ )
79
+ self._conn.execute(
80
+ "CREATE INDEX IF NOT EXISTS idx_cache_expires ON cache_entries(expires_at)"
81
+ )
82
+
83
+ def _serialize(self, value: Any) -> bytes:
84
+ return pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)
85
+
86
+ def _deserialize(self, payload: bytes) -> Any:
87
+ return pickle.loads(payload)
88
+
89
+ def close(self):
90
+ if self._conn:
91
+ self._conn.close()
92
+ self._conn = None
93
+
94
+ def cleanup_expired(self):
95
+ if not self.enabled or not self._conn:
96
+ return
97
+ with self._lock:
98
+ now = time.time()
99
+ self._conn.execute(
100
+ "DELETE FROM cache_entries WHERE expires_at IS NOT NULL AND expires_at <= ?",
101
+ (now,),
102
+ )
103
+
104
+ def get(self, cache_key: str) -> Optional[CacheEntry]:
105
+ if not self.enabled or not self._conn:
106
+ return None
107
+ with self._lock:
108
+ cur = self._conn.execute(
109
+ """
110
+ SELECT cache_key, namespace, version, value, ttl, created_at,
111
+ last_accessed, expires_at, hit_count
112
+ FROM cache_entries WHERE cache_key = ?
113
+ """,
114
+ (cache_key,),
115
+ )
116
+ row = cur.fetchone()
117
+ if not row:
118
+ return None
119
+
120
+ expires_at = row[7]
121
+ if expires_at is not None and expires_at <= time.time():
122
+ self._conn.execute(
123
+ "DELETE FROM cache_entries WHERE cache_key = ?", (cache_key,)
124
+ )
125
+ return None
126
+
127
+ entry = CacheEntry(
128
+ key=row[0],
129
+ namespace=row[1],
130
+ version=row[2] or "",
131
+ value=self._deserialize(row[3]),
132
+ ttl=row[4],
133
+ created_at=row[5],
134
+ last_accessed=row[6],
135
+ hit_count=row[8],
136
+ )
137
+
138
+ self._conn.execute(
139
+ """
140
+ UPDATE cache_entries
141
+ SET last_accessed = ?, hit_count = hit_count + 1
142
+ WHERE cache_key = ?
143
+ """,
144
+ (time.time(), cache_key),
145
+ )
146
+ return entry
147
+
148
+ def set(
149
+ self,
150
+ cache_key: str,
151
+ value: Any,
152
+ *,
153
+ namespace: str,
154
+ version: str,
155
+ ttl: Optional[int],
156
+ ):
157
+ if not self.enabled or not self._conn:
158
+ return
159
+ with self._lock:
160
+ now = time.time()
161
+ expires_at = now + ttl if ttl else None
162
+ payload = self._serialize(value)
163
+ self._conn.execute(
164
+ """
165
+ INSERT INTO cache_entries(cache_key, namespace, version, value, ttl,
166
+ created_at, last_accessed, expires_at, hit_count)
167
+ VALUES(?, ?, ?, ?, ?, ?, ?, ?, 0)
168
+ ON CONFLICT(cache_key) DO UPDATE SET
169
+ namespace=excluded.namespace,
170
+ version=excluded.version,
171
+ value=excluded.value,
172
+ ttl=excluded.ttl,
173
+ created_at=excluded.created_at,
174
+ last_accessed=excluded.last_accessed,
175
+ expires_at=excluded.expires_at,
176
+ hit_count=excluded.hit_count
177
+ """,
178
+ (
179
+ cache_key,
180
+ namespace,
181
+ version,
182
+ payload,
183
+ ttl,
184
+ now,
185
+ now,
186
+ expires_at,
187
+ ),
188
+ )
189
+
190
+ def delete(self, cache_key: str):
191
+ if not self.enabled or not self._conn:
192
+ return
193
+ with self._lock:
194
+ self._conn.execute(
195
+ "DELETE FROM cache_entries WHERE cache_key = ?", (cache_key,)
196
+ )
197
+
198
+ def clear(self, namespace: Optional[str] = None):
199
+ if not self.enabled or not self._conn:
200
+ return
201
+ with self._lock:
202
+ if namespace:
203
+ self._conn.execute(
204
+ "DELETE FROM cache_entries WHERE namespace = ?", (namespace,)
205
+ )
206
+ else:
207
+ self._conn.execute("DELETE FROM cache_entries")
208
+
209
+ def iter_entries(self, namespace: Optional[str] = None) -> Iterator[CacheEntry]:
210
+ if not self.enabled or not self._conn:
211
+ return iter([])
212
+ with self._lock:
213
+ if namespace:
214
+ cur = self._conn.execute(
215
+ """
216
+ SELECT cache_key, namespace, version, value, ttl,
217
+ created_at, last_accessed, hit_count
218
+ FROM cache_entries WHERE namespace = ?
219
+ """,
220
+ (namespace,),
221
+ )
222
+ else:
223
+ cur = self._conn.execute(
224
+ """
225
+ SELECT cache_key, namespace, version, value, ttl,
226
+ created_at, last_accessed, hit_count
227
+ FROM cache_entries
228
+ """
229
+ )
230
+ rows = cur.fetchall()
231
+
232
+ for row in rows:
233
+ yield CacheEntry(
234
+ key=row[0],
235
+ namespace=row[1],
236
+ version=row[2] or "",
237
+ value=self._deserialize(row[3]),
238
+ ttl=row[4],
239
+ created_at=row[5],
240
+ last_accessed=row[6],
241
+ hit_count=row[7],
242
+ )
243
+
244
+ def stats(self) -> Dict[str, Any]:
245
+ if not self.enabled or not self._conn:
246
+ return {"enabled": False}
247
+ with self._lock:
248
+ cur = self._conn.execute(
249
+ "SELECT COUNT(*), SUM(LENGTH(value)) FROM cache_entries"
250
+ )
251
+ count, total_bytes = cur.fetchone()
252
+ return {
253
+ "enabled": True,
254
+ "entries": count or 0,
255
+ "approx_bytes": total_bytes or 0,
256
+ "path": self.path,
257
+ }
@@ -0,0 +1,90 @@
1
+ import requests
2
+ from .base_tool import BaseTool
3
+ from .tool_registry import register_tool
4
+
5
+
6
+ @register_tool("ClinVarTool")
7
+ class ClinVarTool(BaseTool):
8
+ """
9
+ Local tool wrapper for ClinVar via NCBI E-utilities.
10
+ Uses esearch + esummary to fetch variant records.
11
+ """
12
+
13
+ def __init__(self, tool_config):
14
+ super().__init__(tool_config)
15
+ self.base = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils"
16
+ self.session = requests.Session()
17
+
18
+ def run(self, arguments):
19
+ query = arguments.get("query")
20
+ retmax = arguments.get("retmax", 5)
21
+ if not query:
22
+ return {"error": "Missing required parameter: query"}
23
+
24
+ # 1) esearch to get UIDs
25
+ search_url = f"{self.base}/esearch.fcgi"
26
+ search_params = {
27
+ "db": "clinvar",
28
+ "term": query,
29
+ "retmode": "json",
30
+ "retmax": retmax,
31
+ }
32
+ search_resp = self.session.get(search_url, params=search_params, timeout=20)
33
+ search_resp.raise_for_status()
34
+ search_data = search_resp.json()
35
+ uids = search_data.get("esearchresult", {}).get("idlist", [])
36
+ if not uids:
37
+ return []
38
+
39
+ # 2) esummary to get details
40
+ summary_url = f"{self.base}/esummary.fcgi"
41
+ summary_params = {
42
+ "db": "clinvar",
43
+ "id": ",".join(uids),
44
+ "retmode": "json",
45
+ }
46
+ summary_resp = self.session.get(summary_url, params=summary_params, timeout=30)
47
+ summary_resp.raise_for_status()
48
+ summary_data = summary_resp.json()
49
+
50
+ results = []
51
+ for uid in uids:
52
+ record = summary_data.get("result", {}).get(uid, {})
53
+ if not record:
54
+ continue
55
+
56
+ # Extract key fields
57
+ variation_set = record.get("variation_set", [])
58
+ gene = ""
59
+ chr_name = ""
60
+ start = None
61
+ stop = None
62
+ spdi = ""
63
+ if variation_set:
64
+ var = variation_set[0]
65
+ gene = record.get("genes", [{}])[0].get("symbol", "")
66
+ var_loc = var.get("variation_loc", [{}])[0]
67
+ chr_name = var_loc.get("chr", "")
68
+ start = var_loc.get("start")
69
+ stop = var_loc.get("stop")
70
+ spdi = var.get("canonical_spdi", "")
71
+
72
+ clinical_sig = record.get("germline_classification", {}).get(
73
+ "description", ""
74
+ )
75
+
76
+ results.append(
77
+ {
78
+ "uid": uid,
79
+ "accession": record.get("accession", ""),
80
+ "title": record.get("title", ""),
81
+ "gene": gene,
82
+ "chr": chr_name,
83
+ "start": start,
84
+ "stop": stop,
85
+ "spdi": spdi,
86
+ "clinical_significance": clinical_sig,
87
+ }
88
+ )
89
+
90
+ return results
@@ -26,6 +26,11 @@ class CustomTool(BaseTool):
26
26
  self.execute_function = None
27
27
  if self.code_file and os.path.exists(self.code_file):
28
28
  self._load_external_code()
29
+ elif (
30
+ "implementation" in tool_config
31
+ and "source_code" in tool_config["implementation"]
32
+ ):
33
+ self._load_embedded_code(tool_config["implementation"])
29
34
 
30
35
  def _load_external_code(self):
31
36
  """Load the execute_tool function from external Python file"""
@@ -46,6 +51,29 @@ class CustomTool(BaseTool):
46
51
  except Exception as e:
47
52
  print(f"Error loading external code from {self.code_file}: {e}")
48
53
 
54
+ def _load_embedded_code(self, implementation: Dict):
55
+ """Load the execute_tool function from embedded source code"""
56
+ try:
57
+ source_code = implementation.get("source_code", "")
58
+ main_function = implementation.get("main_function", "execute_tool")
59
+
60
+ # Create a temporary module to execute the code
61
+ import types
62
+
63
+ module = types.ModuleType("embedded_tool_module")
64
+
65
+ # Execute the source code in the module namespace
66
+ exec(source_code, module.__dict__)
67
+
68
+ # Get the main function
69
+ if hasattr(module, main_function):
70
+ self.execute_function = getattr(module, main_function)
71
+ else:
72
+ print(f"Warning: No {main_function} function found in embedded code")
73
+
74
+ except Exception as e:
75
+ print(f"Error loading embedded code: {e}")
76
+
49
77
  def run(self, arguments: Any = None) -> Dict[str, Any]:
50
78
  """
51
79
  Execute the custom tool
@@ -27,10 +27,7 @@
27
27
  }
28
28
  },
29
29
  "required": [
30
- "query",
31
- "limit",
32
- "sort_by",
33
- "sort_order"
30
+ "query"
34
31
  ]
35
32
  },
36
33
  "return_schema": {
@@ -32,10 +32,7 @@
32
32
  },
33
33
  "required": [
34
34
  "query",
35
- "limit",
36
- "year_from",
37
- "year_to",
38
- "language"
35
+ "limit"
39
36
  ]
40
37
  },
41
38
  "return_schema": {