tooluniverse 1.0.9__py3-none-any.whl → 1.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tooluniverse might be problematic. Click here for more details.

Files changed (57) hide show
  1. tooluniverse/admetai_tool.py +1 -1
  2. tooluniverse/agentic_tool.py +65 -17
  3. tooluniverse/base_tool.py +19 -8
  4. tooluniverse/boltz_tool.py +1 -1
  5. tooluniverse/cache/result_cache_manager.py +167 -12
  6. tooluniverse/compose_scripts/drug_safety_analyzer.py +1 -1
  7. tooluniverse/compose_scripts/multi_agent_literature_search.py +1 -1
  8. tooluniverse/compose_scripts/output_summarizer.py +4 -4
  9. tooluniverse/compose_scripts/tool_graph_composer.py +1 -1
  10. tooluniverse/compose_scripts/tool_metadata_generator.py +1 -1
  11. tooluniverse/compose_tool.py +9 -9
  12. tooluniverse/core_tool.py +2 -2
  13. tooluniverse/ctg_tool.py +4 -4
  14. tooluniverse/custom_tool.py +1 -1
  15. tooluniverse/dataset_tool.py +2 -2
  16. tooluniverse/default_config.py +1 -1
  17. tooluniverse/enrichr_tool.py +14 -14
  18. tooluniverse/execute_function.py +520 -15
  19. tooluniverse/extended_hooks.py +4 -4
  20. tooluniverse/gene_ontology_tool.py +1 -1
  21. tooluniverse/generate_tools.py +3 -3
  22. tooluniverse/humanbase_tool.py +10 -10
  23. tooluniverse/logging_config.py +2 -2
  24. tooluniverse/mcp_client_tool.py +57 -129
  25. tooluniverse/mcp_integration.py +52 -49
  26. tooluniverse/mcp_tool_registry.py +147 -528
  27. tooluniverse/openalex_tool.py +8 -8
  28. tooluniverse/openfda_tool.py +2 -2
  29. tooluniverse/output_hook.py +15 -15
  30. tooluniverse/package_tool.py +1 -1
  31. tooluniverse/pmc_tool.py +2 -2
  32. tooluniverse/remote/boltz/boltz_mcp_server.py +1 -1
  33. tooluniverse/remote/depmap_24q2/depmap_24q2_mcp_tool.py +2 -2
  34. tooluniverse/remote/immune_compass/compass_tool.py +3 -3
  35. tooluniverse/remote/pinnacle/pinnacle_tool.py +2 -2
  36. tooluniverse/remote/transcriptformer/transcriptformer_tool.py +3 -3
  37. tooluniverse/remote/uspto_downloader/uspto_downloader_mcp_server.py +3 -3
  38. tooluniverse/remote_tool.py +4 -4
  39. tooluniverse/scripts/filter_tool_files.py +2 -2
  40. tooluniverse/smcp.py +93 -12
  41. tooluniverse/smcp_server.py +100 -20
  42. tooluniverse/space/__init__.py +46 -0
  43. tooluniverse/space/loader.py +133 -0
  44. tooluniverse/space/validator.py +353 -0
  45. tooluniverse/tool_finder_embedding.py +2 -2
  46. tooluniverse/tool_finder_keyword.py +9 -9
  47. tooluniverse/tool_finder_llm.py +6 -6
  48. tooluniverse/tools/_shared_client.py +3 -3
  49. tooluniverse/url_tool.py +1 -1
  50. tooluniverse/uspto_tool.py +1 -1
  51. tooluniverse/utils.py +10 -10
  52. {tooluniverse-1.0.9.dist-info → tooluniverse-1.0.10.dist-info}/METADATA +7 -3
  53. {tooluniverse-1.0.9.dist-info → tooluniverse-1.0.10.dist-info}/RECORD +57 -54
  54. {tooluniverse-1.0.9.dist-info → tooluniverse-1.0.10.dist-info}/WHEEL +0 -0
  55. {tooluniverse-1.0.9.dist-info → tooluniverse-1.0.10.dist-info}/entry_points.txt +0 -0
  56. {tooluniverse-1.0.9.dist-info → tooluniverse-1.0.10.dist-info}/licenses/LICENSE +0 -0
  57. {tooluniverse-1.0.9.dist-info → tooluniverse-1.0.10.dist-info}/top_level.txt +0 -0
@@ -33,8 +33,11 @@ import os
33
33
  import time
34
34
  import hashlib
35
35
  import warnings
36
+ import threading
36
37
  from pathlib import Path
37
38
  from contextlib import nullcontext
39
+ from concurrent.futures import ThreadPoolExecutor, as_completed
40
+ from dataclasses import dataclass, field
38
41
  from typing import Any, Dict, List, Optional
39
42
  from .utils import read_json_list, evaluate_function_call, extract_function_call_json
40
43
  from .exceptions import (
@@ -96,6 +99,26 @@ for _tool_name, _tool_class in sorted(tool_type_mappings.items()):
96
99
  debug(f" - {_tool_name}: {_tool_class.__name__}")
97
100
 
98
101
 
102
+ @dataclass
103
+ class _BatchCacheInfo:
104
+ namespace: str
105
+ version: str
106
+ cache_key: str
107
+
108
+
109
+ @dataclass
110
+ class _BatchJob:
111
+ signature: str
112
+ call: Dict[str, Any]
113
+ function_name: str
114
+ arguments: Dict[str, Any]
115
+ indices: List[int] = field(default_factory=list)
116
+ tool_instance: Any = None
117
+ cache_info: Optional[_BatchCacheInfo] = None
118
+ cache_key_composed: Optional[str] = None
119
+ skip_execution: bool = False
120
+
121
+
99
122
  class ToolCallable:
100
123
  """
101
124
  A callable wrapper for a tool that validates kwargs and calls run_one_function.
@@ -334,36 +357,102 @@ class ToolUniverse:
334
357
  # Initialize dynamic tools namespace
335
358
  self.tools = ToolNamespace(self)
336
359
 
337
- def register_custom_tool(self, tool_class, tool_name=None, tool_config=None):
360
+ def register_custom_tool(
361
+ self,
362
+ tool_class,
363
+ tool_name=None,
364
+ tool_config=None,
365
+ instantiate=False,
366
+ tool_instance=None,
367
+ ):
338
368
  """
339
- Register a custom tool class at runtime.
369
+ Register a custom tool class or instance at runtime.
340
370
 
341
371
  Args:
342
- tool_class: The tool class to register
372
+ tool_class: The tool class to register (required if tool_instance is None)
343
373
  tool_name (str, optional): Name to register under. Uses class name if None.
344
374
  tool_config (dict, optional): Tool configuration dictionary to add to all_tools
375
+ instantiate (bool, optional): If True, immediately instantiate and cache the tool.
376
+ Defaults to False for backward compatibility.
377
+ tool_instance (optional): Pre-instantiated tool object. If provided, tool_class
378
+ is inferred from the instance.
345
379
 
346
380
  Returns:
347
381
  str: The name the tool was registered under
382
+
383
+ Examples:
384
+ # Register tool class only (lazy instantiation)
385
+ tu.register_custom_tool(MyTool, tool_config={...})
386
+
387
+ # Register and immediately instantiate
388
+ tu.register_custom_tool(MyTool, tool_config={...}, instantiate=True)
389
+
390
+ # Register pre-instantiated tool
391
+ instance = MyTool({...})
392
+ tu.register_custom_tool(tool_class=MyTool, tool_instance=instance, tool_config={...})
348
393
  """
394
+ # If tool_instance is provided, infer tool_class from it
395
+ if tool_instance is not None:
396
+ tool_class = tool_instance.__class__
397
+ elif tool_class is None:
398
+ raise ValueError("Either tool_class or tool_instance must be provided")
399
+
349
400
  name = tool_name or tool_class.__name__
350
401
 
351
- # Register the tool class
402
+ # Register the tool class to global registry
352
403
  register_external_tool(name, tool_class)
353
404
 
354
405
  # Update the global tool_type_mappings
355
406
  global tool_type_mappings
356
407
  tool_type_mappings = get_tool_registry()
357
408
 
358
- # If tool_config is provided, add it to all_tools
409
+ # Process tool_config if provided
359
410
  if tool_config:
360
411
  # Ensure the config has the correct type
361
412
  if "type" not in tool_config:
362
413
  tool_config["type"] = name
363
414
 
364
415
  self.all_tools.append(tool_config)
365
- if "name" in tool_config:
366
- self.all_tool_dict[tool_config["name"]] = tool_config
416
+ tool_name_in_config = tool_config.get("name", name)
417
+ self.all_tool_dict[tool_name_in_config] = tool_config
418
+
419
+ # Handle tool instantiation
420
+ if tool_instance is not None:
421
+ # Use provided instance
422
+ self.callable_functions[tool_name_in_config] = tool_instance
423
+ self.logger.debug(
424
+ f"Registered pre-instantiated tool '{tool_name_in_config}'"
425
+ )
426
+ elif instantiate:
427
+ # Instantiate now
428
+ try:
429
+ # Use the same logic as _get_or_initialize_tool (line 2318)
430
+ # Try to instantiate with tool_config parameter
431
+ try:
432
+ instance = tool_class(
433
+ tool_config=tool_config
434
+ ) # ✅ 使用关键字参数
435
+ except TypeError:
436
+ # If tool doesn't accept tool_config, try without parameters
437
+ instance = tool_class()
438
+
439
+ self.callable_functions[tool_name_in_config] = instance
440
+ self.logger.debug(
441
+ f"Instantiated and cached tool '{tool_name_in_config}'"
442
+ )
443
+ except Exception as e:
444
+ self.logger.error(
445
+ f"Failed to instantiate tool '{tool_name_in_config}': {e}"
446
+ )
447
+ raise
448
+ # else: lazy instantiation (existing behavior)
449
+
450
+ # Add to category for proper organization
451
+ category = tool_config.get("category", "custom")
452
+ if category not in self.tool_category_dicts:
453
+ self.tool_category_dicts[category] = []
454
+ if tool_name_in_config not in self.tool_category_dicts[category]:
455
+ self.tool_category_dicts[category].append(tool_name_in_config)
367
456
 
368
457
  self.logger.info(f"Custom tool '{name}' registered successfully!")
369
458
  return name
@@ -899,7 +988,9 @@ class ToolUniverse:
899
988
  for _tool_type, config in discovered_configs.items():
900
989
  # Add to all_tools if not already present
901
990
  if "name" in config and config["name"] not in [
902
- tool.get("name") for tool in self.all_tools
991
+ tool.get("name")
992
+ for tool in self.all_tools
993
+ if isinstance(tool, dict)
903
994
  ]:
904
995
  self.all_tools.append(config)
905
996
  self.logger.debug(f"Added auto-discovered config: {config['name']}")
@@ -1674,6 +1765,198 @@ class ToolUniverse:
1674
1765
  """
1675
1766
  return copy.deepcopy(self.all_tools)
1676
1767
 
1768
+ def _execute_function_call_list(
1769
+ self,
1770
+ function_calls: List[Dict[str, Any]],
1771
+ stream_callback=None,
1772
+ use_cache: bool = False,
1773
+ max_workers: Optional[int] = None,
1774
+ ) -> List[Any]:
1775
+ """Execute a list of function calls, optionally in parallel.
1776
+
1777
+ Args:
1778
+ function_calls: Ordered list of function call dictionaries.
1779
+ stream_callback: Optional streaming callback.
1780
+ use_cache: Whether to enable cache lookups for each call.
1781
+ max_workers: Maximum parallel workers; values <=1 fall back to sequential execution.
1782
+
1783
+ Returns:
1784
+ List of results aligned with ``function_calls`` order.
1785
+ """
1786
+
1787
+ if not function_calls:
1788
+ return []
1789
+
1790
+ if stream_callback is not None and max_workers and max_workers > 1:
1791
+ # Streaming multiple calls concurrently is ambiguous; fall back to sequential execution.
1792
+ self.logger.warning(
1793
+ "stream_callback is not supported with parallel batch execution; falling back to sequential mode"
1794
+ )
1795
+ max_workers = 1
1796
+
1797
+ jobs = self._build_batch_jobs(function_calls)
1798
+ results: List[Any] = [None] * len(function_calls)
1799
+
1800
+ jobs_to_run = self._prime_batch_cache(jobs, use_cache, results)
1801
+ if not jobs_to_run:
1802
+ return results
1803
+
1804
+ self._execute_batch_jobs(
1805
+ jobs_to_run,
1806
+ results,
1807
+ stream_callback=stream_callback,
1808
+ use_cache=use_cache,
1809
+ max_workers=max_workers,
1810
+ )
1811
+
1812
+ return results
1813
+
1814
+ def _build_batch_jobs(
1815
+ self, function_calls: List[Dict[str, Any]]
1816
+ ) -> List[_BatchJob]:
1817
+ signature_to_job: Dict[str, _BatchJob] = {}
1818
+ jobs: List[_BatchJob] = []
1819
+
1820
+ for idx, call in enumerate(function_calls):
1821
+ function_name = call.get("name", "")
1822
+ arguments = call.get("arguments", {})
1823
+ if not isinstance(arguments, dict):
1824
+ arguments = {}
1825
+
1826
+ signature = json.dumps(
1827
+ {"name": function_name, "arguments": arguments}, sort_keys=True
1828
+ )
1829
+
1830
+ job = signature_to_job.get(signature)
1831
+ if job is None:
1832
+ job = _BatchJob(
1833
+ signature=signature,
1834
+ call=call,
1835
+ function_name=function_name,
1836
+ arguments=arguments,
1837
+ )
1838
+ signature_to_job[signature] = job
1839
+ jobs.append(job)
1840
+
1841
+ job.indices.append(idx)
1842
+
1843
+ return jobs
1844
+
1845
+ def _prime_batch_cache(
1846
+ self,
1847
+ jobs: List[_BatchJob],
1848
+ use_cache: bool,
1849
+ results: List[Any],
1850
+ ) -> List[_BatchJob]:
1851
+ if not (
1852
+ use_cache and self.cache_manager is not None and self.cache_manager.enabled
1853
+ ):
1854
+ return jobs
1855
+
1856
+ cache_requests: List[Dict[str, str]] = []
1857
+ for job in jobs:
1858
+ if not job.function_name:
1859
+ continue
1860
+
1861
+ tool_instance = self._ensure_tool_instance(job)
1862
+ if not tool_instance or not tool_instance.supports_caching():
1863
+ continue
1864
+
1865
+ cache_key = tool_instance.get_cache_key(job.arguments or {})
1866
+ cache_info = _BatchCacheInfo(
1867
+ namespace=tool_instance.get_cache_namespace(),
1868
+ version=tool_instance.get_cache_version(),
1869
+ cache_key=cache_key,
1870
+ )
1871
+ job.cache_info = cache_info
1872
+ job.cache_key_composed = self.cache_manager.compose_key(
1873
+ cache_info.namespace, cache_info.version, cache_info.cache_key
1874
+ )
1875
+ cache_requests.append(
1876
+ {
1877
+ "namespace": cache_info.namespace,
1878
+ "version": cache_info.version,
1879
+ "cache_key": cache_info.cache_key,
1880
+ }
1881
+ )
1882
+
1883
+ if cache_requests:
1884
+ cache_hits = self.cache_manager.bulk_get(cache_requests)
1885
+ if cache_hits:
1886
+ for job in jobs:
1887
+ if job.cache_key_composed and job.cache_key_composed in cache_hits:
1888
+ cached_value = cache_hits[job.cache_key_composed]
1889
+ for idx in job.indices:
1890
+ results[idx] = cached_value
1891
+ job.skip_execution = True
1892
+
1893
+ return [job for job in jobs if not job.skip_execution]
1894
+
1895
+ def _execute_batch_jobs(
1896
+ self,
1897
+ jobs_to_run: List[_BatchJob],
1898
+ results: List[Any],
1899
+ *,
1900
+ stream_callback,
1901
+ use_cache: bool,
1902
+ max_workers: Optional[int],
1903
+ ) -> None:
1904
+ if not jobs_to_run:
1905
+ return
1906
+
1907
+ tool_semaphores: Dict[str, Optional[threading.Semaphore]] = {}
1908
+
1909
+ def run_job(job: _BatchJob):
1910
+ semaphore = self._get_tool_semaphore(job, tool_semaphores)
1911
+ if semaphore:
1912
+ semaphore.acquire()
1913
+ try:
1914
+ result = self.run_one_function(
1915
+ job.call,
1916
+ stream_callback=stream_callback,
1917
+ use_cache=use_cache,
1918
+ )
1919
+ finally:
1920
+ if semaphore:
1921
+ semaphore.release()
1922
+
1923
+ for idx in job.indices:
1924
+ results[idx] = result
1925
+
1926
+ if max_workers and max_workers > 1:
1927
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
1928
+ futures = [executor.submit(run_job, job) for job in jobs_to_run]
1929
+ for future in as_completed(futures):
1930
+ future.result()
1931
+ else:
1932
+ for job in jobs_to_run:
1933
+ run_job(job)
1934
+
1935
+ def _ensure_tool_instance(self, job: _BatchJob):
1936
+ if job.tool_instance is None and job.function_name:
1937
+ job.tool_instance = self._get_tool_instance(job.function_name, cache=True)
1938
+ return job.tool_instance
1939
+
1940
+ def _get_tool_semaphore(
1941
+ self,
1942
+ job: _BatchJob,
1943
+ tool_semaphores: Dict[str, Optional[threading.Semaphore]],
1944
+ ) -> Optional[threading.Semaphore]:
1945
+ if job.function_name not in tool_semaphores:
1946
+ tool_instance = self._ensure_tool_instance(job)
1947
+ limit = (
1948
+ tool_instance.get_batch_concurrency_limit()
1949
+ if tool_instance is not None
1950
+ else 0
1951
+ )
1952
+ self.logger.debug("Batch concurrency for %s: %s", job.function_name, limit)
1953
+ if limit and limit > 0:
1954
+ tool_semaphores[job.function_name] = threading.Semaphore(limit)
1955
+ else:
1956
+ tool_semaphores[job.function_name] = None
1957
+
1958
+ return tool_semaphores[job.function_name]
1959
+
1677
1960
  def run(
1678
1961
  self,
1679
1962
  fcall_str,
@@ -1681,6 +1964,8 @@ class ToolUniverse:
1681
1964
  verbose=True,
1682
1965
  format="llama",
1683
1966
  stream_callback=None,
1967
+ use_cache: bool = False,
1968
+ max_workers: Optional[int] = None,
1684
1969
  ):
1685
1970
  """
1686
1971
  Execute function calls from input string or data.
@@ -1711,14 +1996,18 @@ class ToolUniverse:
1711
1996
  message = "" # Initialize message for cases where return_message=False
1712
1997
  if function_call_json is not None:
1713
1998
  if isinstance(function_call_json, list):
1714
- # return the function call+result message with call id.
1999
+ # Execute the batch (optionally in parallel) and attach call IDs to maintain downstream compatibility.
2000
+ batch_results = self._execute_function_call_list(
2001
+ function_call_json,
2002
+ stream_callback=stream_callback,
2003
+ use_cache=use_cache,
2004
+ max_workers=max_workers,
2005
+ )
2006
+
1715
2007
  call_results = []
1716
- for i in range(len(function_call_json)):
1717
- call_result = self.run_one_function(
1718
- function_call_json[i], stream_callback=stream_callback
1719
- )
2008
+ for idx, call_result in enumerate(batch_results):
1720
2009
  call_id = self.call_id_gen()
1721
- function_call_json[i]["call_id"] = call_id
2010
+ function_call_json[idx]["call_id"] = call_id
1722
2011
  call_results.append(
1723
2012
  {
1724
2013
  "role": "tool",
@@ -1737,7 +2026,9 @@ class ToolUniverse:
1737
2026
  return revised_messages
1738
2027
  else:
1739
2028
  return self.run_one_function(
1740
- function_call_json, stream_callback=stream_callback
2029
+ function_call_json,
2030
+ stream_callback=stream_callback,
2031
+ use_cache=use_cache,
1741
2032
  )
1742
2033
  else:
1743
2034
  error("Not a function call")
@@ -2658,3 +2949,217 @@ class ToolUniverse:
2658
2949
  original_count = len(self.all_tools)
2659
2950
  self.load_tools(include_tools=tool_names)
2660
2951
  return len(self.all_tools) - original_count
2952
+
2953
+ def load_space(self, uri: str, **kwargs) -> Dict[str, Any]:
2954
+ """
2955
+ Load Space configuration and apply it to the ToolUniverse instance.
2956
+
2957
+ This is a high-level method that loads a Space configuration from various
2958
+ sources (HuggingFace, local files, HTTP URLs) and applies the tool settings
2959
+ to the current instance.
2960
+
2961
+ Args:
2962
+ uri: Space URI (e.g., "hf:user/repo", "./config.yaml", "https://example.com/config.yaml")
2963
+ **kwargs: Additional parameters to override Space configuration
2964
+ (e.g., exclude_tools=["tool1"], include_tools=["tool2"])
2965
+
2966
+ Returns:
2967
+ dict: The loaded Space configuration
2968
+
2969
+ Examples:
2970
+ # Load from HuggingFace
2971
+ config = tu.load_space("hf:community/proteomics-toolkit")
2972
+
2973
+ # Load local file with overrides
2974
+ config = tu.load_space("./my-config.yaml", exclude_tools=["slow_tool"])
2975
+
2976
+ # Load from HTTP URL
2977
+ config = tu.load_space("https://example.com/config.yaml")
2978
+ """
2979
+ # Lazy import to avoid circular import issues
2980
+ from .space import SpaceLoader
2981
+
2982
+ # Load Space configuration
2983
+ loader = SpaceLoader()
2984
+ config = loader.load(uri)
2985
+
2986
+ # Extract tool configuration
2987
+ tools_config = config.get("tools", {})
2988
+
2989
+ # Merge with override parameters
2990
+ tool_type = kwargs.get("tool_type") or tools_config.get("categories")
2991
+ exclude_tools = kwargs.get("exclude_tools") or tools_config.get(
2992
+ "exclude_tools", []
2993
+ )
2994
+ exclude_categories = kwargs.get("exclude_categories") or tools_config.get(
2995
+ "exclude_categories", []
2996
+ )
2997
+ include_tools = kwargs.get("include_tools") or tools_config.get(
2998
+ "include_tools", []
2999
+ )
3000
+ include_tool_types = kwargs.get("include_tool_types") or tools_config.get(
3001
+ "include_tool_types", []
3002
+ )
3003
+ exclude_tool_types = kwargs.get("exclude_tool_types") or tools_config.get(
3004
+ "exclude_tool_types", []
3005
+ )
3006
+
3007
+ # Load tools with merged configuration
3008
+ self.load_tools(
3009
+ tool_type=tool_type,
3010
+ exclude_tools=exclude_tools,
3011
+ exclude_categories=exclude_categories,
3012
+ include_tools=include_tools,
3013
+ include_tool_types=include_tool_types,
3014
+ exclude_tool_types=exclude_tool_types,
3015
+ )
3016
+
3017
+ # Store the configuration for reference
3018
+ self._current_space_config = config
3019
+
3020
+ # Apply additional configurations (LLM, hooks, etc.)
3021
+ try:
3022
+ # Apply LLM configuration if present
3023
+ llm_config = config.get("llm_config")
3024
+ if llm_config:
3025
+ self._apply_llm_config(llm_config)
3026
+
3027
+ # Apply hooks configuration if present
3028
+ hooks_config = config.get("hooks")
3029
+ if hooks_config:
3030
+ self._apply_hooks_config(hooks_config)
3031
+
3032
+ # Store metadata
3033
+ self._store_space_metadata(config)
3034
+
3035
+ except Exception as e:
3036
+ # Use print since logging might not be available
3037
+ print(f"⚠️ Failed to apply Space configurations: {e}")
3038
+
3039
+ return config
3040
+
3041
+ def _apply_llm_config(self, llm_config: Dict[str, Any]):
3042
+ """
3043
+ Apply LLM configuration from Space.
3044
+
3045
+ Args:
3046
+ llm_config: LLM configuration dictionary
3047
+ """
3048
+ try:
3049
+ import os
3050
+
3051
+ # Store LLM configuration
3052
+ self._space_llm_config = llm_config
3053
+
3054
+ # Set environment variables for LLM configuration
3055
+ # Set configuration mode
3056
+ mode = llm_config.get("mode", "default")
3057
+ os.environ["TOOLUNIVERSE_LLM_CONFIG_MODE"] = mode
3058
+
3059
+ # Set default provider
3060
+ if "default_provider" in llm_config:
3061
+ os.environ["TOOLUNIVERSE_LLM_DEFAULT_PROVIDER"] = llm_config[
3062
+ "default_provider"
3063
+ ]
3064
+
3065
+ # Set model mappings
3066
+ models = llm_config.get("models", {})
3067
+ for task, model in models.items():
3068
+ env_var = f"TOOLUNIVERSE_LLM_MODEL_{task.upper()}"
3069
+ os.environ[env_var] = model
3070
+
3071
+ # Set temperature
3072
+ temperature = llm_config.get("temperature")
3073
+ if temperature is not None:
3074
+ os.environ["TOOLUNIVERSE_LLM_TEMPERATURE"] = str(temperature)
3075
+
3076
+ # Note: max_tokens is handled by LLM client automatically, not needed here
3077
+
3078
+ print(
3079
+ f"🤖 LLM configuration applied: {llm_config.get('default_provider', 'unknown')}"
3080
+ )
3081
+
3082
+ except Exception as e:
3083
+ print(f"⚠️ Failed to apply LLM configuration: {e}")
3084
+
3085
+ def _apply_hooks_config(self, hooks_config: List[Dict[str, Any]]):
3086
+ """
3087
+ Apply hooks configuration from Space.
3088
+
3089
+ Args:
3090
+ hooks_config: Hooks configuration list
3091
+ """
3092
+ try:
3093
+ # Convert Space hooks format to ToolUniverse hook_config format
3094
+ hook_config = {
3095
+ "hooks": hooks_config,
3096
+ "global_settings": {
3097
+ "default_timeout": 30,
3098
+ "max_hook_depth": 3,
3099
+ "enable_hook_caching": True,
3100
+ "hook_execution_order": "priority_desc",
3101
+ },
3102
+ }
3103
+
3104
+ # Enable hooks if not already enabled
3105
+ if not self.hooks_enabled:
3106
+ self.toggle_hooks(True)
3107
+
3108
+ # Update hook manager configuration
3109
+ if self.hook_manager:
3110
+ self.hook_manager.config = hook_config
3111
+ self.hook_manager._load_hooks()
3112
+ print(f"🔗 Hooks configuration applied: {len(hooks_config)} hooks")
3113
+ else:
3114
+ print("⚠️ Hook manager not available")
3115
+
3116
+ except Exception as e:
3117
+ print(f"⚠️ Failed to apply hooks configuration: {e}")
3118
+
3119
+ def _store_space_metadata(self, config: Dict[str, Any]):
3120
+ """
3121
+ Store Space metadata for reference.
3122
+
3123
+ Args:
3124
+ config: Space configuration dictionary
3125
+ """
3126
+ try:
3127
+ # Store metadata
3128
+ self._space_metadata = {
3129
+ "name": config.get("name"),
3130
+ "version": config.get("version"),
3131
+ "description": config.get("description"),
3132
+ "tags": config.get("tags", []),
3133
+ "required_env": config.get("required_env", []),
3134
+ }
3135
+
3136
+ # Check for missing environment variables
3137
+ if config.get("required_env"):
3138
+ import os
3139
+
3140
+ missing_env = [
3141
+ env for env in config["required_env"] if not os.getenv(env)
3142
+ ]
3143
+ if missing_env:
3144
+ print(f"⚠️ Missing environment variables: {', '.join(missing_env)}")
3145
+
3146
+ except Exception as e:
3147
+ print(f"⚠️ Failed to store Space metadata: {e}")
3148
+
3149
+ def get_space_llm_config(self) -> Optional[Dict[str, Any]]:
3150
+ """
3151
+ Get the current Space LLM configuration.
3152
+
3153
+ Returns:
3154
+ LLM configuration dictionary or None if not set
3155
+ """
3156
+ return getattr(self, "_space_llm_config", None)
3157
+
3158
+ def get_space_metadata(self) -> Optional[Dict[str, Any]]:
3159
+ """
3160
+ Get the current Space metadata.
3161
+
3162
+ Returns:
3163
+ Space metadata dictionary or None if not set
3164
+ """
3165
+ return getattr(self, "_space_metadata", None)
@@ -68,7 +68,7 @@ class FilteringHook(OutputHook):
68
68
  arguments (Dict[str, Any]): Arguments passed to the tool
69
69
  context (Dict[str, Any]): Additional context information
70
70
 
71
- Returns:
71
+ Returns
72
72
  Any: The filtered output, or original output if filtering fails
73
73
  """
74
74
  try:
@@ -153,7 +153,7 @@ class FormattingHook(OutputHook):
153
153
  arguments (Dict[str, Any]): Arguments passed to the tool
154
154
  context (Dict[str, Any]): Additional context information
155
155
 
156
- Returns:
156
+ Returns
157
157
  Any: The formatted output, or original output if formatting fails
158
158
  """
159
159
  try:
@@ -263,7 +263,7 @@ class ValidationHook(OutputHook):
263
263
  arguments (Dict[str, Any]): Arguments passed to the tool
264
264
  context (Dict[str, Any]): Additional context information
265
265
 
266
- Returns:
266
+ Returns
267
267
  Any: The validated output, or original output if validation fails
268
268
  """
269
269
  try:
@@ -380,7 +380,7 @@ class LoggingHook(OutputHook):
380
380
  arguments (Dict[str, Any]): Arguments passed to the tool
381
381
  context (Dict[str, Any]): Additional context information
382
382
 
383
- Returns:
383
+ Returns
384
384
  Any: The original output (logging doesn't modify the output)
385
385
  """
386
386
  try:
@@ -99,7 +99,7 @@ class GeneOntologyTool(BaseTool):
99
99
  Args:
100
100
  arguments (Dict[str, Any]): Parameters for the API call.
101
101
 
102
- Returns:
102
+ Returns
103
103
  Any: The JSON data from the API or an error dictionary.
104
104
  """
105
105
  # Normalize arguments
@@ -252,7 +252,7 @@ def get_shared_client(
252
252
  shared instance already exists, these parameters are
253
253
  ignored.
254
254
 
255
- Returns:
255
+ Returns
256
256
  ToolUniverse: The client instance to use for tool execution
257
257
 
258
258
  Thread Safety:
@@ -264,7 +264,7 @@ def get_shared_client(
264
264
  of the shared instance. Subsequent calls with different parameters
265
265
  will not affect the already-created instance.
266
266
 
267
- Examples:
267
+ Examples
268
268
  # Basic usage
269
269
  client = get_shared_client()
270
270
 
@@ -313,7 +313,7 @@ def reset_shared_client():
313
313
  may cause unexpected behavior. It's recommended to only call this
314
314
  function when you're certain no other threads are accessing the client.
315
315
 
316
- Examples:
316
+ Examples
317
317
  # Reset for testing
318
318
  reset_shared_client()
319
319