tooluniverse 1.0.7__py3-none-any.whl → 1.0.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tooluniverse might be problematic. Click here for more details.

Files changed (96) hide show
  1. tooluniverse/__init__.py +37 -14
  2. tooluniverse/admetai_tool.py +16 -5
  3. tooluniverse/base_tool.py +36 -0
  4. tooluniverse/biogrid_tool.py +118 -0
  5. tooluniverse/build_optimizer.py +87 -0
  6. tooluniverse/cache/__init__.py +3 -0
  7. tooluniverse/cache/memory_cache.py +99 -0
  8. tooluniverse/cache/result_cache_manager.py +235 -0
  9. tooluniverse/cache/sqlite_backend.py +257 -0
  10. tooluniverse/clinvar_tool.py +90 -0
  11. tooluniverse/compose_scripts/output_summarizer.py +87 -33
  12. tooluniverse/compose_tool.py +2 -2
  13. tooluniverse/custom_tool.py +28 -0
  14. tooluniverse/data/adverse_event_tools.json +97 -98
  15. tooluniverse/data/agentic_tools.json +81 -162
  16. tooluniverse/data/arxiv_tools.json +1 -4
  17. tooluniverse/data/compose_tools.json +0 -54
  18. tooluniverse/data/core_tools.json +1 -4
  19. tooluniverse/data/dataset_tools.json +7 -7
  20. tooluniverse/data/doaj_tools.json +1 -3
  21. tooluniverse/data/drug_discovery_agents.json +282 -0
  22. tooluniverse/data/europe_pmc_tools.json +1 -2
  23. tooluniverse/data/genomics_tools.json +174 -0
  24. tooluniverse/data/geo_tools.json +86 -0
  25. tooluniverse/data/literature_search_tools.json +15 -35
  26. tooluniverse/data/markitdown_tools.json +51 -0
  27. tooluniverse/data/monarch_tools.json +1 -2
  28. tooluniverse/data/openalex_tools.json +1 -5
  29. tooluniverse/data/opentarget_tools.json +8 -16
  30. tooluniverse/data/output_summarization_tools.json +23 -20
  31. tooluniverse/data/packages/bioinformatics_core_tools.json +2 -2
  32. tooluniverse/data/packages/cheminformatics_tools.json +1 -1
  33. tooluniverse/data/packages/genomics_tools.json +1 -1
  34. tooluniverse/data/packages/single_cell_tools.json +1 -1
  35. tooluniverse/data/packages/structural_biology_tools.json +1 -1
  36. tooluniverse/data/pmc_tools.json +1 -4
  37. tooluniverse/data/ppi_tools.json +139 -0
  38. tooluniverse/data/pubmed_tools.json +1 -3
  39. tooluniverse/data/semantic_scholar_tools.json +1 -2
  40. tooluniverse/data/tool_composition_tools.json +2 -4
  41. tooluniverse/data/unified_guideline_tools.json +206 -4
  42. tooluniverse/data/xml_tools.json +15 -15
  43. tooluniverse/data/zenodo_tools.json +1 -2
  44. tooluniverse/dbsnp_tool.py +71 -0
  45. tooluniverse/default_config.py +6 -0
  46. tooluniverse/ensembl_tool.py +61 -0
  47. tooluniverse/execute_function.py +235 -76
  48. tooluniverse/generate_tools.py +303 -20
  49. tooluniverse/genomics_gene_search_tool.py +56 -0
  50. tooluniverse/geo_tool.py +116 -0
  51. tooluniverse/gnomad_tool.py +63 -0
  52. tooluniverse/logging_config.py +64 -2
  53. tooluniverse/markitdown_tool.py +159 -0
  54. tooluniverse/mcp_client_tool.py +10 -5
  55. tooluniverse/molecule_2d_tool.py +9 -3
  56. tooluniverse/molecule_3d_tool.py +9 -3
  57. tooluniverse/output_hook.py +217 -150
  58. tooluniverse/smcp.py +18 -10
  59. tooluniverse/smcp_server.py +89 -199
  60. tooluniverse/string_tool.py +112 -0
  61. tooluniverse/tools/{MultiAgentLiteratureSearch.py → ADMETAnalyzerAgent.py} +18 -18
  62. tooluniverse/tools/ArXiv_search_papers.py +3 -3
  63. tooluniverse/tools/CMA_Guidelines_Search.py +52 -0
  64. tooluniverse/tools/CORE_search_papers.py +3 -3
  65. tooluniverse/tools/ClinVar_search_variants.py +52 -0
  66. tooluniverse/tools/ClinicalTrialDesignAgent.py +63 -0
  67. tooluniverse/tools/CompoundDiscoveryAgent.py +59 -0
  68. tooluniverse/tools/DOAJ_search_articles.py +2 -2
  69. tooluniverse/tools/DiseaseAnalyzerAgent.py +52 -0
  70. tooluniverse/tools/DrugInteractionAnalyzerAgent.py +52 -0
  71. tooluniverse/tools/DrugOptimizationAgent.py +63 -0
  72. tooluniverse/tools/Ensembl_lookup_gene_by_symbol.py +52 -0
  73. tooluniverse/tools/EuropePMC_search_articles.py +1 -1
  74. tooluniverse/tools/GIN_Guidelines_Search.py +52 -0
  75. tooluniverse/tools/GWAS_search_associations_by_gene.py +52 -0
  76. tooluniverse/tools/LiteratureSynthesisAgent.py +59 -0
  77. tooluniverse/tools/PMC_search_papers.py +3 -3
  78. tooluniverse/tools/PubMed_search_articles.py +2 -2
  79. tooluniverse/tools/SemanticScholar_search_papers.py +1 -1
  80. tooluniverse/tools/UCSC_get_genes_by_region.py +67 -0
  81. tooluniverse/tools/Zenodo_search_records.py +1 -1
  82. tooluniverse/tools/__init__.py +33 -3
  83. tooluniverse/tools/convert_to_markdown.py +59 -0
  84. tooluniverse/tools/dbSNP_get_variant_by_rsid.py +46 -0
  85. tooluniverse/tools/gnomAD_query_variant.py +52 -0
  86. tooluniverse/tools/openalex_literature_search.py +4 -4
  87. tooluniverse/ucsc_tool.py +60 -0
  88. tooluniverse/unified_guideline_tools.py +1175 -57
  89. tooluniverse/utils.py +51 -4
  90. tooluniverse/zenodo_tool.py +2 -1
  91. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.9.dist-info}/METADATA +10 -3
  92. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.9.dist-info}/RECORD +96 -61
  93. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.9.dist-info}/entry_points.txt +0 -3
  94. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.9.dist-info}/WHEEL +0 -0
  95. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.9.dist-info}/licenses/LICENSE +0 -0
  96. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.9.dist-info}/top_level.txt +0 -0
@@ -1,8 +1,11 @@
1
1
  #!/usr/bin/env python3
2
2
  """Minimal tools generator - one tool, one file."""
3
3
 
4
+ import os
5
+ import shutil
6
+ import subprocess
4
7
  from pathlib import Path
5
- from typing import Dict, Any
8
+ from typing import Dict, Any, Optional, List
6
9
 
7
10
 
8
11
  def json_type_to_python(json_type: str) -> str:
@@ -17,7 +20,11 @@ def json_type_to_python(json_type: str) -> str:
17
20
  }.get(json_type, "Any")
18
21
 
19
22
 
20
- def generate_tool_file(tool_name: str, tool_config: Dict[str, Any], output_dir: Path):
23
+ def generate_tool_file(
24
+ tool_name: str,
25
+ tool_config: Dict[str, Any],
26
+ output_dir: Path,
27
+ ) -> Path:
21
28
  """Generate one file for one tool."""
22
29
  schema = tool_config.get("parameter", {}) or {}
23
30
  description = tool_config.get("description", f"Execute {tool_name}")
@@ -48,7 +55,9 @@ def generate_tool_file(tool_name: str, tool_config: Dict[str, Any], output_dir:
48
55
  # Use None as default and handle in function body
49
56
  optional_params.append(f"{name}: Optional[{py_type}] = None")
50
57
  mutable_defaults_code.append(
51
- f" if {name} is None:\n {name} = {repr(default)}"
58
+ (" if {n} is None:\n" " {n} = {d}").format(
59
+ n=name, d=repr(default)
60
+ )
52
61
  )
53
62
  else:
54
63
  optional_params.append(
@@ -75,9 +84,10 @@ def generate_tool_file(tool_name: str, tool_config: Dict[str, Any], output_dir:
75
84
 
76
85
  # Infer return type
77
86
  return_schema = tool_config.get("return_schema", {})
78
- return_type = (
79
- json_type_to_python(return_schema.get("type", "")) if return_schema else "Any"
80
- )
87
+ if return_schema:
88
+ return_type = json_type_to_python(return_schema.get("type", ""))
89
+ else:
90
+ return_type = "Any"
81
91
 
82
92
  content = f'''"""
83
93
  {tool_name}
@@ -131,10 +141,12 @@ def {tool_name}(
131
141
  __all__ = ["{tool_name}"]
132
142
  '''
133
143
 
134
- (output_dir / f"{tool_name}.py").write_text(content)
144
+ output_path = output_dir / f"{tool_name}.py"
145
+ output_path.write_text(content)
146
+ return output_path
135
147
 
136
148
 
137
- def generate_init(tool_names: list, output_dir: Path):
149
+ def generate_init(tool_names: list, output_dir: Path) -> Path:
138
150
  """Generate __init__.py with all imports."""
139
151
  imports = [f"from .{name} import {name}" for name in sorted(tool_names)]
140
152
 
@@ -167,12 +179,238 @@ __all__ = [
167
179
  ]
168
180
  '''
169
181
 
170
- (output_dir / "__init__.py").write_text(content)
182
+ init_path = output_dir / "__init__.py"
183
+ init_path.write_text(content)
184
+ return init_path
185
+
186
+
187
+ def _create_shared_client(shared_client_path: Path) -> None:
188
+ """Create _shared_client.py if it doesn't exist."""
189
+ content = '''"""
190
+ Shared ToolUniverse client for all tools.
191
+
192
+ This module provides a singleton ToolUniverse client to avoid reloading
193
+ tools multiple times when using different tool functions.
194
+
195
+ Thread Safety:
196
+ The shared client is thread-safe and uses double-checked locking to
197
+ ensure only one ToolUniverse instance is created even in multi-threaded
198
+ environments.
199
+
200
+ Configuration:
201
+ You can provide custom configuration parameters that will be used during
202
+ the initial creation of the ToolUniverse instance. These parameters are
203
+ ignored if the client has already been initialized.
204
+
205
+ Custom Instance:
206
+ You can provide your own ToolUniverse instance to be used instead of
207
+ the shared singleton. This is useful when you need specific configurations
208
+ or want to maintain separate instances.
209
+
210
+ Examples:
211
+ Basic usage (default behavior):
212
+ from tooluniverse.tools import get_shared_client
213
+ client = get_shared_client()
214
+
215
+ With custom configuration (only effective on first call):
216
+ client = get_shared_client(hooks_enabled=True, log_level="INFO")
217
+
218
+ Using your own instance:
219
+ my_tu = ToolUniverse(hooks_enabled=True)
220
+ client = get_shared_client(custom_instance=my_tu)
221
+
222
+ Reset for testing:
223
+ from tooluniverse.tools import reset_shared_client
224
+ reset_shared_client()
225
+ """
226
+
227
+ import threading
228
+ from typing import Optional
229
+ from tooluniverse import ToolUniverse
230
+
231
+ _client: Optional[ToolUniverse] = None
232
+ _client_lock = threading.Lock()
171
233
 
172
234
 
173
- def main():
174
- """Generate tools."""
235
+ def get_shared_client(
236
+ custom_instance: Optional[ToolUniverse] = None, **config_kwargs
237
+ ) -> ToolUniverse:
238
+ """
239
+ Get the shared ToolUniverse client instance.
240
+
241
+ This function implements a thread-safe singleton pattern with support for
242
+ custom configurations and external instances.
243
+
244
+ Args:
245
+ custom_instance: Optional ToolUniverse instance to use instead of
246
+ the shared singleton. If provided, this instance
247
+ will be returned directly without any singleton logic.
248
+
249
+ **config_kwargs: Optional configuration parameters to pass to
250
+ ToolUniverse constructor. These are only used during
251
+ the initial creation of the shared instance. If the
252
+ shared instance already exists, these parameters are
253
+ ignored.
254
+
255
+ Returns:
256
+ ToolUniverse: The client instance to use for tool execution
257
+
258
+ Thread Safety:
259
+ This function is thread-safe. Multiple threads can call this function
260
+ concurrently without risk of creating multiple ToolUniverse instances.
261
+
262
+ Configuration:
263
+ Configuration parameters are only applied during the initial creation
264
+ of the shared instance. Subsequent calls with different parameters
265
+ will not affect the already-created instance.
266
+
267
+ Examples:
268
+ # Basic usage
269
+ client = get_shared_client()
270
+
271
+ # With custom configuration (only effective on first call)
272
+ client = get_shared_client(hooks_enabled=True, log_level="DEBUG")
273
+
274
+ # Using your own instance
275
+ my_tu = ToolUniverse(hooks_enabled=True)
276
+ client = get_shared_client(custom_instance=my_tu)
277
+ """
278
+ # If user provides their own instance, use it directly
279
+ if custom_instance is not None:
280
+ return custom_instance
281
+
282
+ global _client
283
+
284
+ # Double-checked locking pattern for thread safety
285
+ if _client is None:
286
+ with _client_lock:
287
+ # Check again inside the lock to avoid race conditions
288
+ if _client is None:
289
+ # Create new instance with provided configuration
290
+ if config_kwargs:
291
+ _client = ToolUniverse(**config_kwargs)
292
+ else:
293
+ _client = ToolUniverse()
294
+ _client.load_tools()
295
+
296
+ return _client
297
+
298
+
299
+ def reset_shared_client():
300
+ """
301
+ Reset the shared client (useful for testing or when you need to reload).
302
+
303
+ This function clears the shared client instance, allowing a new instance
304
+ to be created on the next call to get_shared_client(). This is primarily
305
+ useful for testing scenarios where you need to ensure a clean state.
306
+
307
+ Thread Safety:
308
+ This function is thread-safe and uses the same lock as
309
+ get_shared_client() to ensure proper synchronization.
310
+
311
+ Warning:
312
+ Calling this function while other threads are using the shared client
313
+ may cause unexpected behavior. It's recommended to only call this
314
+ function when you're certain no other threads are accessing the client.
315
+
316
+ Examples:
317
+ # Reset for testing
318
+ reset_shared_client()
319
+
320
+ # Now get_shared_client() will create a new instance
321
+ client = get_shared_client(hooks_enabled=True)
322
+ """
323
+ global _client
324
+
325
+ with _client_lock:
326
+ _client = None
327
+ '''
328
+ shared_client_path.write_text(content)
329
+
330
+
331
+ def _chunked(sequence: List[str], chunk_size: int) -> List[List[str]]:
332
+ """Yield chunks of the sequence with up to chunk_size elements."""
333
+ if chunk_size <= 0:
334
+ return [sequence]
335
+ return [sequence[i : i + chunk_size] for i in range(0, len(sequence), chunk_size)]
336
+
337
+
338
+ def _format_files(paths: List[str]) -> None:
339
+ """Format files using pre-commit if available, else ruff/autoflake/black.
340
+
341
+ Honors TOOLUNIVERSE_SKIP_FORMAT=1 to skip formatting entirely.
342
+ """
343
+ if not paths:
344
+ return
345
+ if os.getenv("TOOLUNIVERSE_SKIP_FORMAT") == "1":
346
+ return
347
+
348
+ pre_commit = shutil.which("pre-commit")
349
+ if pre_commit:
350
+ # Run pre-commit on specific files to match repo config filters
351
+ for batch in _chunked(paths, 80):
352
+ try:
353
+ subprocess.run(
354
+ [pre_commit, "run", "--files", *batch],
355
+ check=False,
356
+ )
357
+ except Exception:
358
+ # Best-effort; continue to fallback below
359
+ pass
360
+ return
361
+
362
+ # Fallback to direct formatter CLIs in the same spirit/order as hooks
363
+ ruff = shutil.which("ruff")
364
+ if ruff:
365
+ try:
366
+ subprocess.run(
367
+ [
368
+ ruff,
369
+ "--fix",
370
+ "--line-length=88",
371
+ "--ignore=E203",
372
+ *paths,
373
+ ],
374
+ check=False,
375
+ )
376
+ except Exception:
377
+ pass
378
+
379
+ autoflake = shutil.which("autoflake")
380
+ if autoflake:
381
+ try:
382
+ subprocess.run(
383
+ [
384
+ autoflake,
385
+ "--remove-all-unused-imports",
386
+ "--remove-unused-variables",
387
+ "--in-place",
388
+ *paths,
389
+ ],
390
+ check=False,
391
+ )
392
+ except Exception:
393
+ pass
394
+
395
+ black = shutil.which("black")
396
+ if black:
397
+ try:
398
+ subprocess.run(
399
+ [black, "--line-length=88", *paths],
400
+ check=False,
401
+ )
402
+ except Exception:
403
+ pass
404
+
405
+
406
+ def main(format_enabled: Optional[bool] = None) -> None:
407
+ """Generate tools and format the generated files if enabled.
408
+
409
+ If format_enabled is None, decide based on TOOLUNIVERSE_SKIP_FORMAT env var
410
+ (skip when set to "1").
411
+ """
175
412
  from tooluniverse import ToolUniverse
413
+ from .build_optimizer import cleanup_orphaned_files, get_changed_tools
176
414
 
177
415
  print("🔧 Generating tools...")
178
416
 
@@ -182,17 +420,62 @@ def main():
182
420
  output = Path("src/tooluniverse/tools")
183
421
  output.mkdir(parents=True, exist_ok=True)
184
422
 
185
- # Generate all tools
186
- for i, (tool_name, tool_config) in enumerate(tu.all_tool_dict.items(), 1):
187
- generate_tool_file(tool_name, tool_config, output)
188
- if i % 50 == 0:
189
- print(f" Generated {i} tools...")
423
+ # Cleanup orphaned files
424
+ current_tool_names = set(tu.all_tool_dict.keys())
425
+ cleaned_count = cleanup_orphaned_files(output, current_tool_names)
426
+ if cleaned_count > 0:
427
+ print(f"🧹 Removed {cleaned_count} orphaned tool files")
428
+
429
+ # Check for changes
430
+ metadata_file = output / ".tool_metadata.json"
431
+ new_tools, changed_tools, unchanged_tools = get_changed_tools(
432
+ tu.all_tool_dict, metadata_file
433
+ )
190
434
 
191
- # Generate __init__.py
192
- generate_init(list(tu.all_tool_dict.keys()), output)
435
+ generated_paths: List[str] = []
193
436
 
194
- print(f"✅ Generated {len(tu.all_tool_dict)} tools in {output}")
437
+ # Generate only changed tools if there are changes
438
+ if new_tools or changed_tools:
439
+ print(f"🔄 Generating {len(new_tools + changed_tools)} changed tools...")
440
+ for i, (tool_name, tool_config) in enumerate(tu.all_tool_dict.items(), 1):
441
+ if tool_name in new_tools or tool_name in changed_tools:
442
+ path = generate_tool_file(tool_name, tool_config, output)
443
+ generated_paths.append(str(path))
444
+ if i % 50 == 0:
445
+ print(f" Processed {i} tools...")
446
+ else:
447
+ print("✨ No changes detected, skipping tool generation")
448
+
449
+ # Always regenerate __init__.py to include all tools
450
+ init_path = generate_init(list(tu.all_tool_dict.keys()), output)
451
+ generated_paths.append(str(init_path))
452
+
453
+ # Always ensure _shared_client.py exists
454
+ shared_client_path = output / "_shared_client.py"
455
+ if not shared_client_path.exists():
456
+ _create_shared_client(shared_client_path)
457
+ generated_paths.append(str(shared_client_path))
458
+
459
+ # Determine formatting behavior
460
+ if format_enabled is None:
461
+ # Enabled unless explicitly opted-out via env
462
+ format_enabled = os.getenv("TOOLUNIVERSE_SKIP_FORMAT") != "1"
463
+
464
+ if format_enabled:
465
+ _format_files(generated_paths)
466
+
467
+ print(f"✅ Generated {len(generated_paths)} files in {output}")
195
468
 
196
469
 
197
470
  if __name__ == "__main__":
198
- main()
471
+ # Lightweight CLI to allow opting out of formatting when run directly
472
+ import argparse
473
+
474
+ parser = argparse.ArgumentParser(description="Generate ToolUniverse tools")
475
+ parser.add_argument(
476
+ "--no-format",
477
+ action="store_true",
478
+ help="Do not run formatters on generated files",
479
+ )
480
+ args = parser.parse_args()
481
+ main(format_enabled=not args.no_format)
@@ -0,0 +1,56 @@
1
+ import requests
2
+ from .base_tool import BaseTool
3
+ from .tool_registry import register_tool
4
+
5
+
6
+ @register_tool("GWASGeneSearch")
7
+ class GWASGeneSearch(BaseTool):
8
+ """
9
+ Local tool wrapper for GWAS Catalog REST API.
10
+ Searches associations by gene name.
11
+ """
12
+
13
+ def __init__(self, tool_config):
14
+ super().__init__(tool_config)
15
+ self.base_url = "https://www.ebi.ac.uk/gwas/rest/api"
16
+ self.session = requests.Session()
17
+ self.session.headers.update(
18
+ {"Accept": "application/json", "Content-Type": "application/json"}
19
+ )
20
+
21
+ def run(self, arguments):
22
+ gene_name = arguments.get("gene_name")
23
+ if not gene_name:
24
+ return {"error": "Missing required parameter: gene_name"}
25
+
26
+ # Search for associations by gene name
27
+ url = f"{self.base_url}/v2/associations"
28
+ params = {"mapped_gene": gene_name, "size": 20, "page": 0}
29
+
30
+ try:
31
+ response = self.session.get(url, params=params, timeout=30)
32
+ response.raise_for_status()
33
+ data = response.json()
34
+
35
+ # Extract associations from _embedded structure
36
+ associations = []
37
+ if "_embedded" in data and "associations" in data["_embedded"]:
38
+ associations = data["_embedded"]["associations"]
39
+
40
+ return {
41
+ "gene_name": gene_name,
42
+ "association_count": len(associations),
43
+ "associations": (
44
+ associations[:5] if associations else []
45
+ ), # Return first 5
46
+ "total_found": (
47
+ data.get("page", {}).get("totalElements", 0)
48
+ if "page" in data
49
+ else 0
50
+ ),
51
+ }
52
+
53
+ except requests.exceptions.RequestException as e:
54
+ return {"error": f"Request failed: {str(e)}"}
55
+ except Exception as e:
56
+ return {"error": f"Unexpected error: {str(e)}"}
@@ -0,0 +1,116 @@
1
+ """
2
+ GEO Database REST API Tool
3
+
4
+ This tool provides access to gene expression data from the GEO database.
5
+ GEO is a public repository that archives and freely distributes microarray,
6
+ next-generation sequencing, and other forms of high-throughput functional
7
+ genomics data.
8
+ """
9
+
10
+ import requests
11
+ from typing import Dict, Any, List
12
+ from .base_tool import BaseTool
13
+ from .tool_registry import register_tool
14
+
15
+ GEO_BASE_URL = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils"
16
+
17
+
18
+ @register_tool("GEORESTTool")
19
+ class GEORESTTool(BaseTool):
20
+ """
21
+ GEO Database REST API tool.
22
+ Generic wrapper for GEO API endpoints defined in expression_tools.json.
23
+ """
24
+
25
+ def __init__(self, tool_config):
26
+ super().__init__(tool_config)
27
+ fields = tool_config.get("fields", {})
28
+ parameter = tool_config.get("parameter", {})
29
+
30
+ self.endpoint_template: str = fields.get("endpoint", "/esearch.fcgi")
31
+ self.required: List[str] = parameter.get("required", [])
32
+ self.output_format: str = fields.get("return_format", "JSON")
33
+
34
+ def _build_url(self, arguments: Dict[str, Any]) -> str | Dict[str, Any]:
35
+ """Build URL for GEO API request."""
36
+ url_path = self.endpoint_template
37
+ return GEO_BASE_URL + url_path
38
+
39
+ def _build_params(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
40
+ """Build parameters for GEO API request."""
41
+ params = {"db": "gds", "retmode": "json", "retmax": 50}
42
+
43
+ # Build search query
44
+ query_parts = []
45
+ if "query" in arguments:
46
+ query_parts.append(arguments["query"])
47
+
48
+ if "organism" in arguments:
49
+ organism = arguments["organism"]
50
+ if organism.lower() == "homo sapiens":
51
+ query_parts.append("Homo sapiens[organism]")
52
+ elif organism.lower() == "mus musculus":
53
+ query_parts.append("Mus musculus[organism]")
54
+ else:
55
+ query_parts.append(f'"{organism}"[organism]')
56
+
57
+ if "study_type" in arguments:
58
+ study_type = arguments["study_type"]
59
+ query_parts.append(f'"{study_type}"[study_type]')
60
+
61
+ if "platform" in arguments:
62
+ platform = arguments["platform"]
63
+ query_parts.append(f'"{platform}"[platform]')
64
+
65
+ if "date_range" in arguments:
66
+ date_range = arguments["date_range"]
67
+ if ":" in date_range:
68
+ start_year, end_year = date_range.split(":")
69
+ query_parts.append(f'"{start_year}"[PDAT] : "{end_year}"[PDAT]')
70
+
71
+ if query_parts:
72
+ params["term"] = " AND ".join(query_parts)
73
+
74
+ if "limit" in arguments:
75
+ params["retmax"] = min(arguments["limit"], 500)
76
+
77
+ if "sort" in arguments:
78
+ sort = arguments["sort"]
79
+ if sort == "date":
80
+ params["sort"] = "relevance"
81
+ elif sort == "title":
82
+ params["sort"] = "title"
83
+ else:
84
+ params["sort"] = "relevance"
85
+
86
+ return params
87
+
88
+ def _make_request(self, url: str, params: Dict[str, Any]) -> Dict[str, Any]:
89
+ """Perform a GET request and handle common errors."""
90
+ try:
91
+ response = requests.get(url, params=params, timeout=30)
92
+ response.raise_for_status()
93
+
94
+ if self.output_format == "JSON":
95
+ return response.json()
96
+ else:
97
+ return {"data": response.text}
98
+
99
+ except requests.exceptions.RequestException as e:
100
+ return {"error": f"Request failed: {str(e)}"}
101
+ except Exception as e:
102
+ return {"error": f"Unexpected error: {str(e)}"}
103
+
104
+ def run(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
105
+ """Execute the tool with given arguments."""
106
+ # Validate required parameters
107
+ for param in self.required:
108
+ if param not in arguments:
109
+ return {"error": f"Missing required parameter: {param}"}
110
+
111
+ url = self._build_url(arguments)
112
+ if isinstance(url, dict) and "error" in url:
113
+ return url
114
+
115
+ params = self._build_params(arguments)
116
+ return self._make_request(url, params)
@@ -0,0 +1,63 @@
1
+ import requests
2
+ import json
3
+ from .base_tool import BaseTool
4
+ from .tool_registry import register_tool
5
+
6
+
7
+ @register_tool("GnomadTool")
8
+ class GnomadTool(BaseTool):
9
+ """
10
+ Local tool wrapper for gnomAD GraphQL API.
11
+ Queries variant information including allele frequencies.
12
+ """
13
+
14
+ def __init__(self, tool_config):
15
+ super().__init__(tool_config)
16
+ self.base = "https://gnomad.broadinstitute.org/api"
17
+ self.session = requests.Session()
18
+ self.session.headers.update({"Content-Type": "application/json"})
19
+
20
+ def run(self, arguments):
21
+ variant_id = arguments.get("variant_id")
22
+ dataset = arguments.get("dataset", "gnomad_r4")
23
+
24
+ if not variant_id:
25
+ return {"error": "Missing required parameter: variant_id"}
26
+
27
+ # GraphQL query for variant with genome frequencies
28
+ query = """
29
+ query($variant: String!, $dataset: DatasetId!) {
30
+ variant(variantId: $variant, dataset: $dataset) {
31
+ variantId
32
+ genome {
33
+ ac
34
+ an
35
+ af
36
+ }
37
+ }
38
+ }
39
+ """
40
+
41
+ payload = {
42
+ "query": query,
43
+ "variables": {
44
+ "variant": variant_id,
45
+ "dataset": dataset,
46
+ },
47
+ }
48
+
49
+ resp = self.session.post(self.base, data=json.dumps(payload), timeout=30)
50
+ resp.raise_for_status()
51
+ data = resp.json()
52
+
53
+ if "errors" in data:
54
+ return {"error": f"GraphQL errors: {data['errors']}"}
55
+
56
+ variant = data.get("data", {}).get("variant")
57
+ if not variant:
58
+ return {"error": "Variant not found"}
59
+
60
+ return {
61
+ "variantId": variant.get("variantId"),
62
+ "genome": variant.get("genome", {}),
63
+ }
@@ -104,8 +104,11 @@ class ToolUniverseLogger:
104
104
 
105
105
  self._logger.setLevel(level)
106
106
 
107
- # Create console handler
108
- handler = logging.StreamHandler(sys.stdout)
107
+ # Create console handler - use stderr for stdio mode
108
+ output_stream = (
109
+ sys.stderr if os.getenv("TOOLUNIVERSE_STDIO_MODE") == "1" else sys.stdout
110
+ )
111
+ handler = logging.StreamHandler(output_stream)
109
112
  handler.setLevel(level)
110
113
 
111
114
  # Create formatter
@@ -118,6 +121,29 @@ class ToolUniverseLogger:
118
121
  # Add handler to logger
119
122
  self._logger.addHandler(handler)
120
123
 
124
+ def reconfigure_for_stdio(self):
125
+ """Reconfigure logger to output to stderr for stdio mode"""
126
+ # Remove existing handlers
127
+ for handler in self._logger.handlers[:]:
128
+ self._logger.removeHandler(handler)
129
+
130
+ # Get current level
131
+ level = self._logger.level
132
+
133
+ # Create new handler with stderr
134
+ handler = logging.StreamHandler(sys.stderr)
135
+ handler.setLevel(level)
136
+
137
+ # Create formatter
138
+ formatter = ToolUniverseFormatter(
139
+ fmt="%(message)s",
140
+ datefmt="%H:%M:%S",
141
+ )
142
+ handler.setFormatter(formatter)
143
+
144
+ # Add handler to logger
145
+ self._logger.addHandler(handler)
146
+
121
147
  # Prevent propagation to root logger
122
148
  self._logger.propagate = False
123
149
 
@@ -152,6 +178,42 @@ class ToolUniverseLogger:
152
178
  _logger_manager = ToolUniverseLogger()
153
179
 
154
180
 
181
+ def reconfigure_for_stdio() -> None:
182
+ """
183
+ Reconfigure logging to output to stderr for stdio mode.
184
+
185
+ This function should be called at the very beginning of stdio mode
186
+ to ensure all logs go to stderr instead of stdout.
187
+ """
188
+ _logger_manager.reconfigure_for_stdio()
189
+ # Ensure third-party rich/traceback pretty outputs do not go to stdout
190
+ try:
191
+ import rich
192
+ from rich.console import Console
193
+
194
+ # Redirect rich default console to stderr in stdio mode
195
+ rich_console = Console(
196
+ file=sys.stderr,
197
+ force_terminal=False,
198
+ markup=False,
199
+ highlight=False,
200
+ emoji=False,
201
+ soft_wrap=False,
202
+ )
203
+ rich.get_console = lambda: rich_console
204
+ except Exception:
205
+ pass
206
+ # Force Python warnings and tracebacks to stderr
207
+ try:
208
+ import warnings
209
+
210
+ warnings.showwarning = lambda *args, **kwargs: print(
211
+ warnings.formatwarning(*args, **kwargs), file=sys.stderr
212
+ )
213
+ except Exception:
214
+ pass
215
+
216
+
155
217
  def setup_logging(level: Optional[str] = None) -> None:
156
218
  """
157
219
  Setup global logging configuration