tooluniverse 1.0.7__py3-none-any.whl → 1.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tooluniverse might be problematic. Click here for more details.

Files changed (76) hide show
  1. tooluniverse/__init__.py +29 -14
  2. tooluniverse/admetai_tool.py +8 -4
  3. tooluniverse/base_tool.py +36 -0
  4. tooluniverse/biogrid_tool.py +118 -0
  5. tooluniverse/build_optimizer.py +87 -0
  6. tooluniverse/cache/__init__.py +3 -0
  7. tooluniverse/cache/memory_cache.py +99 -0
  8. tooluniverse/cache/result_cache_manager.py +235 -0
  9. tooluniverse/cache/sqlite_backend.py +257 -0
  10. tooluniverse/clinvar_tool.py +90 -0
  11. tooluniverse/custom_tool.py +28 -0
  12. tooluniverse/data/arxiv_tools.json +1 -4
  13. tooluniverse/data/core_tools.json +1 -4
  14. tooluniverse/data/dataset_tools.json +7 -7
  15. tooluniverse/data/doaj_tools.json +1 -3
  16. tooluniverse/data/drug_discovery_agents.json +292 -0
  17. tooluniverse/data/europe_pmc_tools.json +1 -2
  18. tooluniverse/data/genomics_tools.json +174 -0
  19. tooluniverse/data/geo_tools.json +86 -0
  20. tooluniverse/data/markitdown_tools.json +51 -0
  21. tooluniverse/data/openalex_tools.json +1 -5
  22. tooluniverse/data/pmc_tools.json +1 -4
  23. tooluniverse/data/ppi_tools.json +139 -0
  24. tooluniverse/data/pubmed_tools.json +1 -3
  25. tooluniverse/data/semantic_scholar_tools.json +1 -2
  26. tooluniverse/data/unified_guideline_tools.json +206 -4
  27. tooluniverse/data/xml_tools.json +15 -15
  28. tooluniverse/data/zenodo_tools.json +1 -2
  29. tooluniverse/dbsnp_tool.py +71 -0
  30. tooluniverse/default_config.py +6 -0
  31. tooluniverse/ensembl_tool.py +61 -0
  32. tooluniverse/execute_function.py +196 -75
  33. tooluniverse/generate_tools.py +303 -20
  34. tooluniverse/genomics_gene_search_tool.py +56 -0
  35. tooluniverse/geo_tool.py +116 -0
  36. tooluniverse/gnomad_tool.py +63 -0
  37. tooluniverse/markitdown_tool.py +159 -0
  38. tooluniverse/mcp_client_tool.py +10 -5
  39. tooluniverse/smcp.py +10 -9
  40. tooluniverse/string_tool.py +112 -0
  41. tooluniverse/tools/ADMETAnalyzerAgent.py +59 -0
  42. tooluniverse/tools/ArXiv_search_papers.py +3 -3
  43. tooluniverse/tools/CMA_Guidelines_Search.py +52 -0
  44. tooluniverse/tools/CORE_search_papers.py +3 -3
  45. tooluniverse/tools/ClinVar_search_variants.py +52 -0
  46. tooluniverse/tools/ClinicalTrialDesignAgent.py +63 -0
  47. tooluniverse/tools/CompoundDiscoveryAgent.py +59 -0
  48. tooluniverse/tools/DOAJ_search_articles.py +2 -2
  49. tooluniverse/tools/DiseaseAnalyzerAgent.py +52 -0
  50. tooluniverse/tools/DrugInteractionAnalyzerAgent.py +52 -0
  51. tooluniverse/tools/DrugOptimizationAgent.py +63 -0
  52. tooluniverse/tools/Ensembl_lookup_gene_by_symbol.py +52 -0
  53. tooluniverse/tools/EuropePMC_search_articles.py +1 -1
  54. tooluniverse/tools/GIN_Guidelines_Search.py +52 -0
  55. tooluniverse/tools/GWAS_search_associations_by_gene.py +52 -0
  56. tooluniverse/tools/LiteratureSynthesisAgent.py +59 -0
  57. tooluniverse/tools/PMC_search_papers.py +3 -3
  58. tooluniverse/tools/PubMed_search_articles.py +2 -2
  59. tooluniverse/tools/SemanticScholar_search_papers.py +1 -1
  60. tooluniverse/tools/UCSC_get_genes_by_region.py +67 -0
  61. tooluniverse/tools/Zenodo_search_records.py +1 -1
  62. tooluniverse/tools/__init__.py +33 -1
  63. tooluniverse/tools/convert_to_markdown.py +59 -0
  64. tooluniverse/tools/dbSNP_get_variant_by_rsid.py +46 -0
  65. tooluniverse/tools/gnomAD_query_variant.py +52 -0
  66. tooluniverse/tools/openalex_literature_search.py +4 -4
  67. tooluniverse/ucsc_tool.py +60 -0
  68. tooluniverse/unified_guideline_tools.py +1175 -57
  69. tooluniverse/utils.py +51 -4
  70. tooluniverse/zenodo_tool.py +2 -1
  71. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/METADATA +9 -3
  72. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/RECORD +76 -40
  73. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/WHEEL +0 -0
  74. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/entry_points.txt +0 -0
  75. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/licenses/LICENSE +0 -0
  76. {tooluniverse-1.0.7.dist-info → tooluniverse-1.0.8.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,71 @@
1
+ import requests
2
+ from .base_tool import BaseTool
3
+ from .tool_registry import register_tool
4
+
5
+
6
+ @register_tool("DbSnpTool")
7
+ class DbSnpTool(BaseTool):
8
+ """
9
+ Local tool wrapper for dbSNP via NCBI Variation Services.
10
+ Fetches variant by rsID using the refsnp endpoint.
11
+ """
12
+
13
+ def __init__(self, tool_config):
14
+ super().__init__(tool_config)
15
+ self.base = "https://api.ncbi.nlm.nih.gov/variation/v0"
16
+ self.session = requests.Session()
17
+
18
+ def run(self, arguments):
19
+ rsid = arguments.get("rsid")
20
+ if not rsid:
21
+ return {"error": "Missing required parameter: rsid"}
22
+
23
+ # Clean rsid (remove 'rs' prefix if present)
24
+ if rsid.startswith("rs"):
25
+ rsid = rsid[2:]
26
+
27
+ url = f"{self.base}/refsnp/{rsid}"
28
+ resp = self.session.get(url, timeout=20)
29
+ resp.raise_for_status()
30
+ data = resp.json()
31
+
32
+ # Extract key fields from primary snapshot
33
+ primary = data.get("primary_snapshot_data", {})
34
+ placements = primary.get("placements_with_allele", [])
35
+
36
+ chrom = ""
37
+ pos = None
38
+ alleles = []
39
+ hgvs = []
40
+
41
+ if placements:
42
+ placement = placements[0]
43
+ chrom = (
44
+ placement.get("seq_id", "").replace("NC_0000", "").replace(".11", "")
45
+ )
46
+ if chrom.startswith("0"):
47
+ chrom = chrom[1:]
48
+ chrom = f"chr{chrom}"
49
+
50
+ allele_data = placement.get("alleles", [])
51
+ for allele in allele_data:
52
+ spdi = allele.get("allele", {}).get("spdi", {})
53
+ if spdi:
54
+ ref = spdi.get("deleted_sequence", "")
55
+ alt = spdi.get("inserted_sequence", "")
56
+ if ref and alt:
57
+ alleles.append(f"{ref}>{alt}")
58
+ elif ref:
59
+ alleles.append(ref)
60
+
61
+ hgvs_val = allele.get("hgvs", "")
62
+ if hgvs_val:
63
+ hgvs.append(hgvs_val)
64
+
65
+ return {
66
+ "refsnp_id": f"rs{rsid}",
67
+ "chrom": chrom,
68
+ "pos": pos,
69
+ "alleles": alleles,
70
+ "hgvs": hgvs,
71
+ }
@@ -51,6 +51,9 @@ default_tool_files = {
51
51
  "fatcat": os.path.join(current_dir, "data", "fatcat_tools.json"),
52
52
  "wikidata_sparql": os.path.join(current_dir, "data", "wikidata_sparql_tools.json"),
53
53
  "agents": os.path.join(current_dir, "data", "agentic_tools.json"),
54
+ "drug_discovery_agents": os.path.join(
55
+ current_dir, "data", "drug_discovery_agents.json"
56
+ ),
54
57
  "dataset": os.path.join(current_dir, "data", "dataset_tools.json"),
55
58
  # 'mcp_clients': os.path.join(current_dir, 'data', 'mcp_client_tools_example.json'),
56
59
  "mcp_auto_loader_txagent": os.path.join(
@@ -142,6 +145,9 @@ default_tool_files = {
142
145
  current_dir, "data", "output_summarization_tools.json"
143
146
  ),
144
147
  "odphp": os.path.join(current_dir, "data", "odphp_tools.json"),
148
+ "markitdown": os.path.join(current_dir, "data", "markitdown_tools.json"),
149
+ # Genomics tools
150
+ "genomics": os.path.join(current_dir, "data", "genomics_tools.json"),
145
151
  # Guideline and health policy tools
146
152
  "guidelines": os.path.join(current_dir, "data", "unified_guideline_tools.json"),
147
153
  }
@@ -0,0 +1,61 @@
1
+ import requests
2
+ from .base_tool import BaseTool
3
+ from .tool_registry import register_tool
4
+
5
+
6
+ @register_tool("EnsemblTool")
7
+ class EnsemblTool(BaseTool):
8
+ """
9
+ Local tool wrapper for Ensembl REST API lookups.
10
+ Supports symbol→gene lookup (xrefs/symbol) then lookup/id to
11
+ fetch metadata.
12
+ """
13
+
14
+ def __init__(self, tool_config):
15
+ super().__init__(tool_config)
16
+ self.base = "https://rest.ensembl.org"
17
+ self.session = requests.Session()
18
+ self.session.headers.update(
19
+ {"Accept": "application/json", "Content-Type": "application/json"}
20
+ )
21
+
22
+ def run(self, arguments):
23
+ species = arguments.get("species", "homo_sapiens")
24
+ symbol = arguments.get("symbol")
25
+ if not symbol:
26
+ return {"error": "Missing required parameter: symbol"}
27
+
28
+ # 1) symbol -> xref(s) to get Ensembl gene ID
29
+ xref_url = f"{self.base}/xrefs/symbol/{species}/{symbol}"
30
+ xref_resp = self.session.get(xref_url, timeout=20)
31
+ xref_resp.raise_for_status()
32
+ xrefs = xref_resp.json() or []
33
+ gene_id = None
34
+ for item in xrefs:
35
+ if item.get("type") == "gene" and item.get("id"):
36
+ gene_id = item["id"]
37
+ break
38
+ if not gene_id and xrefs:
39
+ gene_id = xrefs[0].get("id")
40
+ if not gene_id:
41
+ return {"error": f"No Ensembl gene found for {symbol}"}
42
+
43
+ # 2) lookup by Ensembl ID (expand=1 to include transcripts)
44
+ lookup_url = f"{self.base}/lookup/id/{gene_id}?expand=1"
45
+ look_resp = self.session.get(lookup_url, timeout=30)
46
+ look_resp.raise_for_status()
47
+ data = look_resp.json() or {}
48
+
49
+ transcripts = data.get("Transcript") or []
50
+ return {
51
+ "id": data.get("id"),
52
+ "symbol": symbol,
53
+ "display_name": data.get("display_name"),
54
+ "species": data.get("species"),
55
+ "seq_region_name": data.get("seq_region_name"),
56
+ "start": data.get("start"),
57
+ "end": data.get("end"),
58
+ "strand": data.get("strand"),
59
+ "biotype": data.get("biotype"),
60
+ "transcript_count": len(transcripts),
61
+ }
@@ -33,6 +33,8 @@ import os
33
33
  import time
34
34
  import hashlib
35
35
  import warnings
36
+ from pathlib import Path
37
+ from contextlib import nullcontext
36
38
  from typing import Any, Dict, List, Optional
37
39
  from .utils import read_json_list, evaluate_function_call, extract_function_call_json
38
40
  from .exceptions import (
@@ -58,6 +60,7 @@ from .logging_config import (
58
60
  error,
59
61
  set_log_level,
60
62
  )
63
+ from .cache.result_cache_manager import ResultCacheManager
61
64
  from .output_hook import HookManager
62
65
  from .default_config import default_tool_files, get_default_hook_config
63
66
 
@@ -260,9 +263,39 @@ class ToolUniverse:
260
263
  self.hook_manager = None
261
264
  self.logger.debug("Output hooks disabled")
262
265
 
263
- # Initialize new attributes for enhanced functionality
264
- self._cache = {} # Simple cache for tool results
265
- self._cache_size = int(os.getenv("TOOLUNIVERSE_CACHE_SIZE", "100"))
266
+ # Initialize caching configuration
267
+ cache_enabled = os.getenv("TOOLUNIVERSE_CACHE_ENABLED", "true").lower() in (
268
+ "true",
269
+ "1",
270
+ "yes",
271
+ )
272
+ persistence_enabled = os.getenv(
273
+ "TOOLUNIVERSE_CACHE_PERSIST", "true"
274
+ ).lower() in ("true", "1", "yes")
275
+ memory_size = int(os.getenv("TOOLUNIVERSE_CACHE_MEMORY_SIZE", "256"))
276
+ default_ttl_env = os.getenv("TOOLUNIVERSE_CACHE_DEFAULT_TTL")
277
+ default_ttl = int(default_ttl_env) if default_ttl_env else None
278
+ singleflight_enabled = os.getenv(
279
+ "TOOLUNIVERSE_CACHE_SINGLEFLIGHT", "true"
280
+ ).lower() in ("true", "1", "yes")
281
+
282
+ cache_path = os.getenv("TOOLUNIVERSE_CACHE_PATH")
283
+ if not cache_path and persistence_enabled:
284
+ base_dir = os.getenv("TOOLUNIVERSE_CACHE_DIR")
285
+ if not base_dir:
286
+ base_dir = os.path.join(str(Path.home()), ".tooluniverse")
287
+ os.makedirs(base_dir, exist_ok=True)
288
+ cache_path = os.path.join(base_dir, "cache.sqlite")
289
+
290
+ self.cache_manager = ResultCacheManager(
291
+ memory_size=memory_size,
292
+ persistent_path=cache_path if persistence_enabled else None,
293
+ enabled=cache_enabled,
294
+ persistence_enabled=persistence_enabled,
295
+ singleflight=singleflight_enabled,
296
+ default_ttl=default_ttl,
297
+ )
298
+
266
299
  self._strict_validation = os.getenv(
267
300
  "TOOLUNIVERSE_STRICT_VALIDATION", "false"
268
301
  ).lower() in ("true", "1", "yes")
@@ -1041,9 +1074,14 @@ class ToolUniverse:
1041
1074
  - When scan_all=True, all JSON files in data/ and subdirectories are scanned
1042
1075
  """
1043
1076
  if mode not in ["config", "type", "list_name", "list_spec"]:
1044
- raise ValueError(
1045
- "Mode must be one of: 'config', 'type', 'list_name', 'list_spec'"
1046
- )
1077
+ # Handle invalid modes gracefully
1078
+ if mode is None:
1079
+ mode = "config" # Default to config mode
1080
+ else:
1081
+ # For invalid string modes, return error info instead of raising
1082
+ return {
1083
+ "error": f"Invalid mode '{mode}'. Must be one of: 'config', 'type', 'list_name', 'list_spec'"
1084
+ }
1047
1085
 
1048
1086
  # For list_name and list_spec modes, we can return early with just the data
1049
1087
  if mode in ["list_name", "list_spec"]:
@@ -1693,84 +1731,139 @@ class ToolUniverse:
1693
1731
  Returns:
1694
1732
  str or dict: Result from the tool execution, or error message if validation fails.
1695
1733
  """
1696
- function_name = function_call_json["name"]
1697
- arguments = function_call_json["arguments"]
1698
-
1699
- # Check cache first if enabled
1700
- if use_cache:
1701
- cache_key = self._make_cache_key(function_name, arguments)
1702
- if cache_key in self._cache:
1703
- self.logger.debug(f"Cache hit for {function_name}")
1704
- return self._cache[cache_key]
1705
-
1706
- # Validate parameters if requested
1707
- if validate:
1708
- validation_error = self._validate_parameters(function_name, arguments)
1709
- if validation_error:
1710
- return self._create_dual_format_error(validation_error)
1711
-
1712
- # Check function call format (existing validation)
1713
- check_status, check_message = self.check_function_call(function_call_json)
1714
- if check_status is False:
1715
- error_msg = "Invalid function call: " + check_message
1716
- return self._create_dual_format_error(
1717
- ToolValidationError(error_msg, details={"check_message": check_message})
1718
- )
1734
+ function_name = function_call_json.get("name", "")
1735
+ arguments = function_call_json.get("arguments", {})
1736
+
1737
+ # Handle malformed queries gracefully
1738
+ if not function_name:
1739
+ return {"error": "Missing or empty function name"}
1740
+
1741
+ if not isinstance(arguments, dict):
1742
+ return {
1743
+ "error": f"Arguments must be a dictionary, got {type(arguments).__name__}"
1744
+ }
1719
1745
 
1720
- # Execute the tool
1721
1746
  tool_instance = None
1722
- tool_arguments = arguments
1723
- try:
1724
- # Get or create tool instance (optimized to avoid duplication)
1725
- tool_instance = self._get_tool_instance(function_name, cache=True)
1747
+ cache_namespace = None
1748
+ cache_version = None
1749
+ cache_key = None
1750
+ composed_cache_key = None
1751
+ cache_guard = nullcontext()
1752
+
1753
+ cache_enabled = (
1754
+ use_cache and self.cache_manager is not None and self.cache_manager.enabled
1755
+ )
1726
1756
 
1727
- if tool_instance:
1728
- result, tool_arguments = self._execute_tool_with_stream(
1729
- tool_instance, arguments, stream_callback, use_cache, validate
1757
+ if cache_enabled:
1758
+ tool_instance = self._get_tool_instance(function_name, cache=True)
1759
+ if tool_instance and tool_instance.supports_caching():
1760
+ cache_namespace = tool_instance.get_cache_namespace()
1761
+ cache_version = tool_instance.get_cache_version()
1762
+ cache_key = self._make_cache_key(function_name, arguments)
1763
+ composed_cache_key = self.cache_manager.compose_key(
1764
+ cache_namespace, cache_version, cache_key
1765
+ )
1766
+ cached_value = self.cache_manager.get(
1767
+ namespace=cache_namespace,
1768
+ version=cache_version,
1769
+ cache_key=cache_key,
1730
1770
  )
1771
+ if cached_value is not None:
1772
+ self.logger.debug(f"Cache hit for {function_name}")
1773
+ return cached_value
1774
+ cache_guard = self.cache_manager.singleflight_guard(composed_cache_key)
1731
1775
  else:
1732
- error_msg = f"Tool '{function_name}' not found"
1776
+ cache_enabled = False
1777
+
1778
+ with cache_guard:
1779
+ if cache_enabled:
1780
+ cached_value = self.cache_manager.get(
1781
+ namespace=cache_namespace,
1782
+ version=cache_version,
1783
+ cache_key=cache_key,
1784
+ )
1785
+ if cached_value is not None:
1786
+ self.logger.debug(
1787
+ f"Cache hit for {function_name} (after singleflight wait)"
1788
+ )
1789
+ return cached_value
1790
+
1791
+ # Validate parameters if requested
1792
+ if validate:
1793
+ validation_error = self._validate_parameters(function_name, arguments)
1794
+ if validation_error:
1795
+ return self._create_dual_format_error(validation_error)
1796
+
1797
+ # Check function call format (existing validation)
1798
+ check_status, check_message = self.check_function_call(function_call_json)
1799
+ if check_status is False:
1800
+ error_msg = "Invalid function call: " + check_message
1733
1801
  return self._create_dual_format_error(
1734
- ToolUnavailableError(
1735
- error_msg,
1736
- next_steps=[
1737
- "Check tool name spelling",
1738
- "Run tu.tools.refresh()",
1739
- ],
1802
+ ToolValidationError(
1803
+ error_msg, details={"check_message": check_message}
1740
1804
  )
1741
1805
  )
1742
- except Exception as e:
1743
- # Classify and return structured error
1744
- classified_error = self._classify_exception(e, function_name, arguments)
1745
- return self._create_dual_format_error(classified_error)
1746
-
1747
- # Apply output hooks if enabled
1748
- if self.hook_manager:
1749
- context = {
1750
- "tool_name": function_name,
1751
- "tool_type": (
1752
- tool_instance.__class__.__name__
1753
- if tool_instance is not None
1754
- else "unknown"
1755
- ),
1756
- "execution_time": time.time(),
1757
- "arguments": tool_arguments,
1758
- }
1759
- result = self.hook_manager.apply_hooks(
1760
- result, function_name, tool_arguments, context
1761
- )
1762
1806
 
1763
- # Cache result if enabled
1764
- if use_cache:
1765
- cache_key = self._make_cache_key(function_name, arguments)
1766
- self._cache[cache_key] = result
1767
- # Simple cache size management
1768
- if len(self._cache) > self._cache_size:
1769
- # Remove oldest entries (simple FIFO)
1770
- oldest_key = next(iter(self._cache))
1771
- del self._cache[oldest_key]
1807
+ # Execute the tool
1808
+ tool_arguments = arguments
1809
+ try:
1810
+ if tool_instance is None:
1811
+ tool_instance = self._get_tool_instance(function_name, cache=True)
1772
1812
 
1773
- return result
1813
+ if tool_instance:
1814
+ result, tool_arguments = self._execute_tool_with_stream(
1815
+ tool_instance, arguments, stream_callback, use_cache, validate
1816
+ )
1817
+ else:
1818
+ error_msg = f"Tool '{function_name}' not found"
1819
+ return self._create_dual_format_error(
1820
+ ToolUnavailableError(
1821
+ error_msg,
1822
+ next_steps=[
1823
+ "Check tool name spelling",
1824
+ "Run tu.tools.refresh()",
1825
+ ],
1826
+ )
1827
+ )
1828
+ except Exception as e:
1829
+ # Classify and return structured error
1830
+ classified_error = self._classify_exception(e, function_name, arguments)
1831
+ return self._create_dual_format_error(classified_error)
1832
+
1833
+ # Apply output hooks if enabled
1834
+ if self.hook_manager:
1835
+ context = {
1836
+ "tool_name": function_name,
1837
+ "tool_type": (
1838
+ tool_instance.__class__.__name__
1839
+ if tool_instance is not None
1840
+ else "unknown"
1841
+ ),
1842
+ "execution_time": time.time(),
1843
+ "arguments": tool_arguments,
1844
+ }
1845
+ result = self.hook_manager.apply_hooks(
1846
+ result, function_name, tool_arguments, context
1847
+ )
1848
+
1849
+ # Cache result if enabled
1850
+ if cache_enabled and tool_instance and tool_instance.supports_caching():
1851
+ if cache_key is None:
1852
+ cache_key = self._make_cache_key(function_name, arguments)
1853
+ if cache_namespace is None:
1854
+ cache_namespace = tool_instance.get_cache_namespace()
1855
+ if cache_version is None:
1856
+ cache_version = tool_instance.get_cache_version()
1857
+ ttl = tool_instance.get_cache_ttl(result)
1858
+ self.cache_manager.set(
1859
+ namespace=cache_namespace,
1860
+ version=cache_version,
1861
+ cache_key=cache_key,
1862
+ value=result,
1863
+ ttl=ttl,
1864
+ )
1865
+
1866
+ return result
1774
1867
 
1775
1868
  def _execute_tool_with_stream(
1776
1869
  self, tool_instance, arguments, stream_callback, use_cache=False, validate=True
@@ -2039,9 +2132,33 @@ class ToolUniverse:
2039
2132
 
2040
2133
  def clear_cache(self):
2041
2134
  """Clear the result cache."""
2042
- self._cache.clear()
2135
+ if self.cache_manager:
2136
+ self.cache_manager.clear()
2043
2137
  self.logger.info("Result cache cleared")
2044
2138
 
2139
+ def get_cache_stats(self) -> Dict[str, Any]:
2140
+ """Return cache statistics."""
2141
+ if not self.cache_manager:
2142
+ return {"enabled": False}
2143
+ return self.cache_manager.stats()
2144
+
2145
+ def dump_cache(self, namespace: Optional[str] = None):
2146
+ """Iterate over cached entries (persistent layer only)."""
2147
+ if not self.cache_manager:
2148
+ return iter([])
2149
+ return self.cache_manager.dump(namespace=namespace)
2150
+
2151
+ def close(self):
2152
+ """Release resources."""
2153
+ if self.cache_manager:
2154
+ self.cache_manager.close()
2155
+
2156
+ def __del__(self):
2157
+ try:
2158
+ self.close()
2159
+ except Exception:
2160
+ pass
2161
+
2045
2162
  def get_tool_health(self, tool_name: str = None) -> dict:
2046
2163
  """Get health status for tool(s)."""
2047
2164
  tool_errors = get_tool_errors()
@@ -2248,6 +2365,10 @@ class ToolUniverse:
2248
2365
  self.logger.warning("No tools loaded. Call load_tools() first.")
2249
2366
  return []
2250
2367
 
2368
+ # Handle None or empty pattern
2369
+ if pattern is None or pattern == "":
2370
+ return self.all_tools
2371
+
2251
2372
  import re
2252
2373
 
2253
2374
  flags = 0 if case_sensitive else re.IGNORECASE