tooluniverse 1.0.9.1__py3-none-any.whl → 1.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tooluniverse might be problematic. Click here for more details.
- tooluniverse/__init__.py +57 -1
- tooluniverse/admetai_tool.py +1 -1
- tooluniverse/agentic_tool.py +65 -17
- tooluniverse/base_tool.py +19 -8
- tooluniverse/blast_tool.py +132 -0
- tooluniverse/boltz_tool.py +3 -3
- tooluniverse/cache/result_cache_manager.py +167 -12
- tooluniverse/cbioportal_tool.py +42 -0
- tooluniverse/clinvar_tool.py +268 -74
- tooluniverse/compose_scripts/drug_safety_analyzer.py +1 -1
- tooluniverse/compose_scripts/multi_agent_literature_search.py +1 -1
- tooluniverse/compose_scripts/output_summarizer.py +4 -4
- tooluniverse/compose_scripts/tool_discover.py +1941 -443
- tooluniverse/compose_scripts/tool_graph_composer.py +1 -1
- tooluniverse/compose_scripts/tool_metadata_generator.py +1 -1
- tooluniverse/compose_tool.py +9 -9
- tooluniverse/core_tool.py +2 -2
- tooluniverse/ctg_tool.py +4 -4
- tooluniverse/custom_tool.py +1 -1
- tooluniverse/data/agentic_tools.json +0 -370
- tooluniverse/data/alphafold_tools.json +6 -6
- tooluniverse/data/blast_tools.json +112 -0
- tooluniverse/data/cbioportal_tools.json +87 -0
- tooluniverse/data/clinvar_tools.json +235 -0
- tooluniverse/data/compose_tools.json +0 -89
- tooluniverse/data/dbsnp_tools.json +275 -0
- tooluniverse/data/emdb_tools.json +61 -0
- tooluniverse/data/ensembl_tools.json +259 -0
- tooluniverse/data/file_download_tools.json +275 -0
- tooluniverse/data/geo_tools.json +200 -48
- tooluniverse/data/gnomad_tools.json +109 -0
- tooluniverse/data/gtopdb_tools.json +68 -0
- tooluniverse/data/gwas_tools.json +32 -0
- tooluniverse/data/interpro_tools.json +199 -0
- tooluniverse/data/jaspar_tools.json +70 -0
- tooluniverse/data/kegg_tools.json +356 -0
- tooluniverse/data/mpd_tools.json +87 -0
- tooluniverse/data/ols_tools.json +314 -0
- tooluniverse/data/package_discovery_tools.json +64 -0
- tooluniverse/data/packages/categorized_tools.txt +0 -1
- tooluniverse/data/packages/machine_learning_tools.json +0 -47
- tooluniverse/data/paleobiology_tools.json +91 -0
- tooluniverse/data/pride_tools.json +62 -0
- tooluniverse/data/pypi_package_inspector_tools.json +158 -0
- tooluniverse/data/python_executor_tools.json +341 -0
- tooluniverse/data/regulomedb_tools.json +50 -0
- tooluniverse/data/remap_tools.json +89 -0
- tooluniverse/data/screen_tools.json +89 -0
- tooluniverse/data/tool_discovery_agents.json +428 -0
- tooluniverse/data/tool_discovery_agents.json.backup +1343 -0
- tooluniverse/data/uniprot_tools.json +77 -0
- tooluniverse/data/web_search_tools.json +250 -0
- tooluniverse/data/worms_tools.json +55 -0
- tooluniverse/dataset_tool.py +2 -2
- tooluniverse/dbsnp_tool.py +196 -58
- tooluniverse/default_config.py +36 -3
- tooluniverse/emdb_tool.py +30 -0
- tooluniverse/enrichr_tool.py +14 -14
- tooluniverse/ensembl_tool.py +140 -47
- tooluniverse/execute_function.py +594 -29
- tooluniverse/extended_hooks.py +4 -4
- tooluniverse/file_download_tool.py +269 -0
- tooluniverse/gene_ontology_tool.py +1 -1
- tooluniverse/generate_tools.py +3 -3
- tooluniverse/geo_tool.py +81 -28
- tooluniverse/gnomad_tool.py +100 -52
- tooluniverse/gtopdb_tool.py +41 -0
- tooluniverse/humanbase_tool.py +10 -10
- tooluniverse/interpro_tool.py +72 -0
- tooluniverse/jaspar_tool.py +30 -0
- tooluniverse/kegg_tool.py +230 -0
- tooluniverse/logging_config.py +2 -2
- tooluniverse/mcp_client_tool.py +57 -129
- tooluniverse/mcp_integration.py +52 -49
- tooluniverse/mcp_tool_registry.py +147 -528
- tooluniverse/mpd_tool.py +42 -0
- tooluniverse/ncbi_eutils_tool.py +96 -0
- tooluniverse/ols_tool.py +435 -0
- tooluniverse/openalex_tool.py +8 -8
- tooluniverse/openfda_tool.py +2 -2
- tooluniverse/output_hook.py +15 -15
- tooluniverse/package_discovery_tool.py +217 -0
- tooluniverse/package_tool.py +1 -1
- tooluniverse/paleobiology_tool.py +30 -0
- tooluniverse/pmc_tool.py +2 -2
- tooluniverse/pride_tool.py +30 -0
- tooluniverse/pypi_package_inspector_tool.py +593 -0
- tooluniverse/python_executor_tool.py +711 -0
- tooluniverse/regulomedb_tool.py +30 -0
- tooluniverse/remap_tool.py +44 -0
- tooluniverse/remote/boltz/boltz_mcp_server.py +1 -1
- tooluniverse/remote/depmap_24q2/depmap_24q2_mcp_tool.py +3 -3
- tooluniverse/remote/immune_compass/compass_tool.py +3 -3
- tooluniverse/remote/pinnacle/pinnacle_tool.py +2 -2
- tooluniverse/remote/transcriptformer/transcriptformer_tool.py +3 -3
- tooluniverse/remote/uspto_downloader/uspto_downloader_mcp_server.py +3 -3
- tooluniverse/remote_tool.py +4 -4
- tooluniverse/screen_tool.py +44 -0
- tooluniverse/scripts/filter_tool_files.py +2 -2
- tooluniverse/smcp.py +93 -12
- tooluniverse/smcp_server.py +100 -21
- tooluniverse/space/__init__.py +46 -0
- tooluniverse/space/loader.py +133 -0
- tooluniverse/space/validator.py +353 -0
- tooluniverse/tool_finder_embedding.py +5 -3
- tooluniverse/tool_finder_keyword.py +12 -10
- tooluniverse/tool_finder_llm.py +12 -8
- tooluniverse/tools/{UCSC_get_genes_by_region.py → BLAST_nucleotide_search.py} +22 -26
- tooluniverse/tools/BLAST_protein_search.py +63 -0
- tooluniverse/tools/ClinVar_search_variants.py +26 -15
- tooluniverse/tools/CodeQualityAnalyzer.py +3 -3
- tooluniverse/tools/EMDB_get_structure.py +46 -0
- tooluniverse/tools/GtoPdb_get_targets.py +52 -0
- tooluniverse/tools/InterPro_get_domain_details.py +46 -0
- tooluniverse/tools/InterPro_get_protein_domains.py +49 -0
- tooluniverse/tools/InterPro_search_domains.py +52 -0
- tooluniverse/tools/JASPAR_get_transcription_factors.py +52 -0
- tooluniverse/tools/MPD_get_phenotype_data.py +59 -0
- tooluniverse/tools/PRIDE_search_proteomics.py +52 -0
- tooluniverse/tools/PackageAnalyzer.py +55 -0
- tooluniverse/tools/Paleobiology_get_fossils.py +52 -0
- tooluniverse/tools/PyPIPackageInspector.py +59 -0
- tooluniverse/tools/ReMap_get_transcription_factor_binding.py +59 -0
- tooluniverse/tools/ReferenceInfoAnalyzer.py +55 -0
- tooluniverse/tools/RegulomeDB_query_variant.py +46 -0
- tooluniverse/tools/SCREEN_get_regulatory_elements.py +59 -0
- tooluniverse/tools/{ArgumentDescriptionOptimizer.py → TestResultsAnalyzer.py} +13 -13
- tooluniverse/tools/ToolDiscover.py +11 -11
- tooluniverse/tools/UniProt_id_mapping.py +63 -0
- tooluniverse/tools/UniProt_search.py +63 -0
- tooluniverse/tools/UnifiedToolGenerator.py +59 -0
- tooluniverse/tools/WoRMS_search_species.py +49 -0
- tooluniverse/tools/XMLToolOptimizer.py +55 -0
- tooluniverse/tools/__init__.py +119 -29
- tooluniverse/tools/_shared_client.py +3 -3
- tooluniverse/tools/alphafold_get_annotations.py +3 -3
- tooluniverse/tools/alphafold_get_prediction.py +3 -3
- tooluniverse/tools/alphafold_get_summary.py +3 -3
- tooluniverse/tools/cBioPortal_get_cancer_studies.py +46 -0
- tooluniverse/tools/cBioPortal_get_mutations.py +52 -0
- tooluniverse/tools/{gnomAD_query_variant.py → clinvar_get_clinical_significance.py} +8 -11
- tooluniverse/tools/clinvar_get_variant_details.py +49 -0
- tooluniverse/tools/dbSNP_get_variant_by_rsid.py +7 -7
- tooluniverse/tools/dbsnp_get_frequencies.py +46 -0
- tooluniverse/tools/dbsnp_search_by_gene.py +52 -0
- tooluniverse/tools/download_binary_file.py +66 -0
- tooluniverse/tools/download_file.py +71 -0
- tooluniverse/tools/download_text_content.py +55 -0
- tooluniverse/tools/dynamic_package_discovery.py +59 -0
- tooluniverse/tools/ensembl_get_sequence.py +52 -0
- tooluniverse/tools/{Ensembl_lookup_gene_by_symbol.py → ensembl_get_variants.py} +11 -11
- tooluniverse/tools/ensembl_lookup_gene.py +46 -0
- tooluniverse/tools/geo_get_dataset_info.py +46 -0
- tooluniverse/tools/geo_get_sample_info.py +46 -0
- tooluniverse/tools/geo_search_datasets.py +67 -0
- tooluniverse/tools/gnomad_get_gene_constraints.py +49 -0
- tooluniverse/tools/kegg_find_genes.py +52 -0
- tooluniverse/tools/kegg_get_gene_info.py +46 -0
- tooluniverse/tools/kegg_get_pathway_info.py +46 -0
- tooluniverse/tools/kegg_list_organisms.py +44 -0
- tooluniverse/tools/kegg_search_pathway.py +46 -0
- tooluniverse/tools/ols_find_similar_terms.py +63 -0
- tooluniverse/tools/{get_hyperopt_info.py → ols_get_ontology_info.py} +13 -10
- tooluniverse/tools/ols_get_term_ancestors.py +67 -0
- tooluniverse/tools/ols_get_term_children.py +67 -0
- tooluniverse/tools/{TestCaseGenerator.py → ols_get_term_info.py} +12 -9
- tooluniverse/tools/{CodeOptimizer.py → ols_search_ontologies.py} +22 -14
- tooluniverse/tools/ols_search_terms.py +71 -0
- tooluniverse/tools/python_code_executor.py +79 -0
- tooluniverse/tools/python_script_runner.py +79 -0
- tooluniverse/tools/web_api_documentation_search.py +63 -0
- tooluniverse/tools/web_search.py +71 -0
- tooluniverse/uniprot_tool.py +219 -16
- tooluniverse/url_tool.py +19 -1
- tooluniverse/uspto_tool.py +1 -1
- tooluniverse/utils.py +12 -12
- tooluniverse/web_search_tool.py +229 -0
- tooluniverse/worms_tool.py +64 -0
- {tooluniverse-1.0.9.1.dist-info → tooluniverse-1.0.11.dist-info}/METADATA +8 -3
- {tooluniverse-1.0.9.1.dist-info → tooluniverse-1.0.11.dist-info}/RECORD +184 -92
- tooluniverse/data/genomics_tools.json +0 -174
- tooluniverse/tools/ToolDescriptionOptimizer.py +0 -67
- tooluniverse/tools/ToolImplementationGenerator.py +0 -67
- tooluniverse/tools/ToolOptimizer.py +0 -59
- tooluniverse/tools/ToolSpecificationGenerator.py +0 -67
- tooluniverse/tools/ToolSpecificationOptimizer.py +0 -63
- tooluniverse/ucsc_tool.py +0 -60
- {tooluniverse-1.0.9.1.dist-info → tooluniverse-1.0.11.dist-info}/WHEEL +0 -0
- {tooluniverse-1.0.9.1.dist-info → tooluniverse-1.0.11.dist-info}/entry_points.txt +0 -0
- {tooluniverse-1.0.9.1.dist-info → tooluniverse-1.0.11.dist-info}/licenses/LICENSE +0 -0
- {tooluniverse-1.0.9.1.dist-info → tooluniverse-1.0.11.dist-info}/top_level.txt +0 -0
tooluniverse/execute_function.py
CHANGED
|
@@ -33,8 +33,11 @@ import os
|
|
|
33
33
|
import time
|
|
34
34
|
import hashlib
|
|
35
35
|
import warnings
|
|
36
|
+
import threading
|
|
36
37
|
from pathlib import Path
|
|
37
38
|
from contextlib import nullcontext
|
|
39
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
40
|
+
from dataclasses import dataclass, field
|
|
38
41
|
from typing import Any, Dict, List, Optional
|
|
39
42
|
from .utils import read_json_list, evaluate_function_call, extract_function_call_json
|
|
40
43
|
from .exceptions import (
|
|
@@ -96,6 +99,26 @@ for _tool_name, _tool_class in sorted(tool_type_mappings.items()):
|
|
|
96
99
|
debug(f" - {_tool_name}: {_tool_class.__name__}")
|
|
97
100
|
|
|
98
101
|
|
|
102
|
+
@dataclass
|
|
103
|
+
class _BatchCacheInfo:
|
|
104
|
+
namespace: str
|
|
105
|
+
version: str
|
|
106
|
+
cache_key: str
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@dataclass
|
|
110
|
+
class _BatchJob:
|
|
111
|
+
signature: str
|
|
112
|
+
call: Dict[str, Any]
|
|
113
|
+
function_name: str
|
|
114
|
+
arguments: Dict[str, Any]
|
|
115
|
+
indices: List[int] = field(default_factory=list)
|
|
116
|
+
tool_instance: Any = None
|
|
117
|
+
cache_info: Optional[_BatchCacheInfo] = None
|
|
118
|
+
cache_key_composed: Optional[str] = None
|
|
119
|
+
skip_execution: bool = False
|
|
120
|
+
|
|
121
|
+
|
|
99
122
|
class ToolCallable:
|
|
100
123
|
"""
|
|
101
124
|
A callable wrapper for a tool that validates kwargs and calls run_one_function.
|
|
@@ -334,36 +357,102 @@ class ToolUniverse:
|
|
|
334
357
|
# Initialize dynamic tools namespace
|
|
335
358
|
self.tools = ToolNamespace(self)
|
|
336
359
|
|
|
337
|
-
def register_custom_tool(
|
|
360
|
+
def register_custom_tool(
|
|
361
|
+
self,
|
|
362
|
+
tool_class,
|
|
363
|
+
tool_name=None,
|
|
364
|
+
tool_config=None,
|
|
365
|
+
instantiate=False,
|
|
366
|
+
tool_instance=None,
|
|
367
|
+
):
|
|
338
368
|
"""
|
|
339
|
-
Register a custom tool class at runtime.
|
|
369
|
+
Register a custom tool class or instance at runtime.
|
|
340
370
|
|
|
341
371
|
Args:
|
|
342
|
-
tool_class: The tool class to register
|
|
372
|
+
tool_class: The tool class to register (required if tool_instance is None)
|
|
343
373
|
tool_name (str, optional): Name to register under. Uses class name if None.
|
|
344
374
|
tool_config (dict, optional): Tool configuration dictionary to add to all_tools
|
|
375
|
+
instantiate (bool, optional): If True, immediately instantiate and cache the tool.
|
|
376
|
+
Defaults to False for backward compatibility.
|
|
377
|
+
tool_instance (optional): Pre-instantiated tool object. If provided, tool_class
|
|
378
|
+
is inferred from the instance.
|
|
345
379
|
|
|
346
380
|
Returns:
|
|
347
381
|
str: The name the tool was registered under
|
|
382
|
+
|
|
383
|
+
Examples:
|
|
384
|
+
# Register tool class only (lazy instantiation)
|
|
385
|
+
tu.register_custom_tool(MyTool, tool_config={...})
|
|
386
|
+
|
|
387
|
+
# Register and immediately instantiate
|
|
388
|
+
tu.register_custom_tool(MyTool, tool_config={...}, instantiate=True)
|
|
389
|
+
|
|
390
|
+
# Register pre-instantiated tool
|
|
391
|
+
instance = MyTool({...})
|
|
392
|
+
tu.register_custom_tool(tool_class=MyTool, tool_instance=instance, tool_config={...})
|
|
348
393
|
"""
|
|
394
|
+
# If tool_instance is provided, infer tool_class from it
|
|
395
|
+
if tool_instance is not None:
|
|
396
|
+
tool_class = tool_instance.__class__
|
|
397
|
+
elif tool_class is None:
|
|
398
|
+
raise ValueError("Either tool_class or tool_instance must be provided")
|
|
399
|
+
|
|
349
400
|
name = tool_name or tool_class.__name__
|
|
350
401
|
|
|
351
|
-
# Register the tool class
|
|
402
|
+
# Register the tool class to global registry
|
|
352
403
|
register_external_tool(name, tool_class)
|
|
353
404
|
|
|
354
405
|
# Update the global tool_type_mappings
|
|
355
406
|
global tool_type_mappings
|
|
356
407
|
tool_type_mappings = get_tool_registry()
|
|
357
408
|
|
|
358
|
-
#
|
|
409
|
+
# Process tool_config if provided
|
|
359
410
|
if tool_config:
|
|
360
411
|
# Ensure the config has the correct type
|
|
361
412
|
if "type" not in tool_config:
|
|
362
413
|
tool_config["type"] = name
|
|
363
414
|
|
|
364
415
|
self.all_tools.append(tool_config)
|
|
365
|
-
|
|
366
|
-
|
|
416
|
+
tool_name_in_config = tool_config.get("name", name)
|
|
417
|
+
self.all_tool_dict[tool_name_in_config] = tool_config
|
|
418
|
+
|
|
419
|
+
# Handle tool instantiation
|
|
420
|
+
if tool_instance is not None:
|
|
421
|
+
# Use provided instance
|
|
422
|
+
self.callable_functions[tool_name_in_config] = tool_instance
|
|
423
|
+
self.logger.debug(
|
|
424
|
+
f"Registered pre-instantiated tool '{tool_name_in_config}'"
|
|
425
|
+
)
|
|
426
|
+
elif instantiate:
|
|
427
|
+
# Instantiate now
|
|
428
|
+
try:
|
|
429
|
+
# Use the same logic as _get_or_initialize_tool (line 2318)
|
|
430
|
+
# Try to instantiate with tool_config parameter
|
|
431
|
+
try:
|
|
432
|
+
instance = tool_class(
|
|
433
|
+
tool_config=tool_config
|
|
434
|
+
) # ✅ 使用关键字参数
|
|
435
|
+
except TypeError:
|
|
436
|
+
# If tool doesn't accept tool_config, try without parameters
|
|
437
|
+
instance = tool_class()
|
|
438
|
+
|
|
439
|
+
self.callable_functions[tool_name_in_config] = instance
|
|
440
|
+
self.logger.debug(
|
|
441
|
+
f"Instantiated and cached tool '{tool_name_in_config}'"
|
|
442
|
+
)
|
|
443
|
+
except Exception as e:
|
|
444
|
+
self.logger.error(
|
|
445
|
+
f"Failed to instantiate tool '{tool_name_in_config}': {e}"
|
|
446
|
+
)
|
|
447
|
+
raise
|
|
448
|
+
# else: lazy instantiation (existing behavior)
|
|
449
|
+
|
|
450
|
+
# Add to category for proper organization
|
|
451
|
+
category = tool_config.get("category", "custom")
|
|
452
|
+
if category not in self.tool_category_dicts:
|
|
453
|
+
self.tool_category_dicts[category] = []
|
|
454
|
+
if tool_name_in_config not in self.tool_category_dicts[category]:
|
|
455
|
+
self.tool_category_dicts[category].append(tool_name_in_config)
|
|
367
456
|
|
|
368
457
|
self.logger.info(f"Custom tool '{name}' registered successfully!")
|
|
369
458
|
return name
|
|
@@ -474,8 +563,9 @@ class ToolUniverse:
|
|
|
474
563
|
for key in sorted(all_missing_keys):
|
|
475
564
|
f.write(f"{key}=your_api_key_here\n\n")
|
|
476
565
|
|
|
477
|
-
self.logger.info(
|
|
478
|
-
|
|
566
|
+
self.logger.info(
|
|
567
|
+
f"Generated API key template: {output_file}. Copy this file to .env and fill in your API keys"
|
|
568
|
+
)
|
|
479
569
|
|
|
480
570
|
def _create_hook_config_from_type(self, hook_type):
|
|
481
571
|
"""
|
|
@@ -877,8 +967,10 @@ class ToolUniverse:
|
|
|
877
967
|
|
|
878
968
|
# Generate template for missing API keys
|
|
879
969
|
if len(all_missing_keys) > 0:
|
|
880
|
-
warning(
|
|
881
|
-
|
|
970
|
+
warning(
|
|
971
|
+
f"Some tools will not be loaded due to missing API keys: {', '.join(all_missing_keys)}"
|
|
972
|
+
)
|
|
973
|
+
# info("Generating .env.template file with missing API keys...")
|
|
882
974
|
self.generate_env_template(all_missing_keys)
|
|
883
975
|
|
|
884
976
|
def _load_auto_discovered_configs(self):
|
|
@@ -899,7 +991,9 @@ class ToolUniverse:
|
|
|
899
991
|
for _tool_type, config in discovered_configs.items():
|
|
900
992
|
# Add to all_tools if not already present
|
|
901
993
|
if "name" in config and config["name"] not in [
|
|
902
|
-
tool.get("name")
|
|
994
|
+
tool.get("name")
|
|
995
|
+
for tool in self.all_tools
|
|
996
|
+
if isinstance(tool, dict)
|
|
903
997
|
]:
|
|
904
998
|
self.all_tools.append(config)
|
|
905
999
|
self.logger.debug(f"Added auto-discovered config: {config['name']}")
|
|
@@ -1674,6 +1768,198 @@ class ToolUniverse:
|
|
|
1674
1768
|
"""
|
|
1675
1769
|
return copy.deepcopy(self.all_tools)
|
|
1676
1770
|
|
|
1771
|
+
def _execute_function_call_list(
|
|
1772
|
+
self,
|
|
1773
|
+
function_calls: List[Dict[str, Any]],
|
|
1774
|
+
stream_callback=None,
|
|
1775
|
+
use_cache: bool = False,
|
|
1776
|
+
max_workers: Optional[int] = None,
|
|
1777
|
+
) -> List[Any]:
|
|
1778
|
+
"""Execute a list of function calls, optionally in parallel.
|
|
1779
|
+
|
|
1780
|
+
Args:
|
|
1781
|
+
function_calls: Ordered list of function call dictionaries.
|
|
1782
|
+
stream_callback: Optional streaming callback.
|
|
1783
|
+
use_cache: Whether to enable cache lookups for each call.
|
|
1784
|
+
max_workers: Maximum parallel workers; values <=1 fall back to sequential execution.
|
|
1785
|
+
|
|
1786
|
+
Returns:
|
|
1787
|
+
List of results aligned with ``function_calls`` order.
|
|
1788
|
+
"""
|
|
1789
|
+
|
|
1790
|
+
if not function_calls:
|
|
1791
|
+
return []
|
|
1792
|
+
|
|
1793
|
+
if stream_callback is not None and max_workers and max_workers > 1:
|
|
1794
|
+
# Streaming multiple calls concurrently is ambiguous; fall back to sequential execution.
|
|
1795
|
+
self.logger.warning(
|
|
1796
|
+
"stream_callback is not supported with parallel batch execution; falling back to sequential mode"
|
|
1797
|
+
)
|
|
1798
|
+
max_workers = 1
|
|
1799
|
+
|
|
1800
|
+
jobs = self._build_batch_jobs(function_calls)
|
|
1801
|
+
results: List[Any] = [None] * len(function_calls)
|
|
1802
|
+
|
|
1803
|
+
jobs_to_run = self._prime_batch_cache(jobs, use_cache, results)
|
|
1804
|
+
if not jobs_to_run:
|
|
1805
|
+
return results
|
|
1806
|
+
|
|
1807
|
+
self._execute_batch_jobs(
|
|
1808
|
+
jobs_to_run,
|
|
1809
|
+
results,
|
|
1810
|
+
stream_callback=stream_callback,
|
|
1811
|
+
use_cache=use_cache,
|
|
1812
|
+
max_workers=max_workers,
|
|
1813
|
+
)
|
|
1814
|
+
|
|
1815
|
+
return results
|
|
1816
|
+
|
|
1817
|
+
def _build_batch_jobs(
|
|
1818
|
+
self, function_calls: List[Dict[str, Any]]
|
|
1819
|
+
) -> List[_BatchJob]:
|
|
1820
|
+
signature_to_job: Dict[str, _BatchJob] = {}
|
|
1821
|
+
jobs: List[_BatchJob] = []
|
|
1822
|
+
|
|
1823
|
+
for idx, call in enumerate(function_calls):
|
|
1824
|
+
function_name = call.get("name", "")
|
|
1825
|
+
arguments = call.get("arguments", {})
|
|
1826
|
+
if not isinstance(arguments, dict):
|
|
1827
|
+
arguments = {}
|
|
1828
|
+
|
|
1829
|
+
signature = json.dumps(
|
|
1830
|
+
{"name": function_name, "arguments": arguments}, sort_keys=True
|
|
1831
|
+
)
|
|
1832
|
+
|
|
1833
|
+
job = signature_to_job.get(signature)
|
|
1834
|
+
if job is None:
|
|
1835
|
+
job = _BatchJob(
|
|
1836
|
+
signature=signature,
|
|
1837
|
+
call=call,
|
|
1838
|
+
function_name=function_name,
|
|
1839
|
+
arguments=arguments,
|
|
1840
|
+
)
|
|
1841
|
+
signature_to_job[signature] = job
|
|
1842
|
+
jobs.append(job)
|
|
1843
|
+
|
|
1844
|
+
job.indices.append(idx)
|
|
1845
|
+
|
|
1846
|
+
return jobs
|
|
1847
|
+
|
|
1848
|
+
def _prime_batch_cache(
|
|
1849
|
+
self,
|
|
1850
|
+
jobs: List[_BatchJob],
|
|
1851
|
+
use_cache: bool,
|
|
1852
|
+
results: List[Any],
|
|
1853
|
+
) -> List[_BatchJob]:
|
|
1854
|
+
if not (
|
|
1855
|
+
use_cache and self.cache_manager is not None and self.cache_manager.enabled
|
|
1856
|
+
):
|
|
1857
|
+
return jobs
|
|
1858
|
+
|
|
1859
|
+
cache_requests: List[Dict[str, str]] = []
|
|
1860
|
+
for job in jobs:
|
|
1861
|
+
if not job.function_name:
|
|
1862
|
+
continue
|
|
1863
|
+
|
|
1864
|
+
tool_instance = self._ensure_tool_instance(job)
|
|
1865
|
+
if not tool_instance or not tool_instance.supports_caching():
|
|
1866
|
+
continue
|
|
1867
|
+
|
|
1868
|
+
cache_key = tool_instance.get_cache_key(job.arguments or {})
|
|
1869
|
+
cache_info = _BatchCacheInfo(
|
|
1870
|
+
namespace=tool_instance.get_cache_namespace(),
|
|
1871
|
+
version=tool_instance.get_cache_version(),
|
|
1872
|
+
cache_key=cache_key,
|
|
1873
|
+
)
|
|
1874
|
+
job.cache_info = cache_info
|
|
1875
|
+
job.cache_key_composed = self.cache_manager.compose_key(
|
|
1876
|
+
cache_info.namespace, cache_info.version, cache_info.cache_key
|
|
1877
|
+
)
|
|
1878
|
+
cache_requests.append(
|
|
1879
|
+
{
|
|
1880
|
+
"namespace": cache_info.namespace,
|
|
1881
|
+
"version": cache_info.version,
|
|
1882
|
+
"cache_key": cache_info.cache_key,
|
|
1883
|
+
}
|
|
1884
|
+
)
|
|
1885
|
+
|
|
1886
|
+
if cache_requests:
|
|
1887
|
+
cache_hits = self.cache_manager.bulk_get(cache_requests)
|
|
1888
|
+
if cache_hits:
|
|
1889
|
+
for job in jobs:
|
|
1890
|
+
if job.cache_key_composed and job.cache_key_composed in cache_hits:
|
|
1891
|
+
cached_value = cache_hits[job.cache_key_composed]
|
|
1892
|
+
for idx in job.indices:
|
|
1893
|
+
results[idx] = cached_value
|
|
1894
|
+
job.skip_execution = True
|
|
1895
|
+
|
|
1896
|
+
return [job for job in jobs if not job.skip_execution]
|
|
1897
|
+
|
|
1898
|
+
def _execute_batch_jobs(
|
|
1899
|
+
self,
|
|
1900
|
+
jobs_to_run: List[_BatchJob],
|
|
1901
|
+
results: List[Any],
|
|
1902
|
+
*,
|
|
1903
|
+
stream_callback,
|
|
1904
|
+
use_cache: bool,
|
|
1905
|
+
max_workers: Optional[int],
|
|
1906
|
+
) -> None:
|
|
1907
|
+
if not jobs_to_run:
|
|
1908
|
+
return
|
|
1909
|
+
|
|
1910
|
+
tool_semaphores: Dict[str, Optional[threading.Semaphore]] = {}
|
|
1911
|
+
|
|
1912
|
+
def run_job(job: _BatchJob):
|
|
1913
|
+
semaphore = self._get_tool_semaphore(job, tool_semaphores)
|
|
1914
|
+
if semaphore:
|
|
1915
|
+
semaphore.acquire()
|
|
1916
|
+
try:
|
|
1917
|
+
result = self.run_one_function(
|
|
1918
|
+
job.call,
|
|
1919
|
+
stream_callback=stream_callback,
|
|
1920
|
+
use_cache=use_cache,
|
|
1921
|
+
)
|
|
1922
|
+
finally:
|
|
1923
|
+
if semaphore:
|
|
1924
|
+
semaphore.release()
|
|
1925
|
+
|
|
1926
|
+
for idx in job.indices:
|
|
1927
|
+
results[idx] = result
|
|
1928
|
+
|
|
1929
|
+
if max_workers and max_workers > 1:
|
|
1930
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
1931
|
+
futures = [executor.submit(run_job, job) for job in jobs_to_run]
|
|
1932
|
+
for future in as_completed(futures):
|
|
1933
|
+
future.result()
|
|
1934
|
+
else:
|
|
1935
|
+
for job in jobs_to_run:
|
|
1936
|
+
run_job(job)
|
|
1937
|
+
|
|
1938
|
+
def _ensure_tool_instance(self, job: _BatchJob):
|
|
1939
|
+
if job.tool_instance is None and job.function_name:
|
|
1940
|
+
job.tool_instance = self._get_tool_instance(job.function_name, cache=True)
|
|
1941
|
+
return job.tool_instance
|
|
1942
|
+
|
|
1943
|
+
def _get_tool_semaphore(
|
|
1944
|
+
self,
|
|
1945
|
+
job: _BatchJob,
|
|
1946
|
+
tool_semaphores: Dict[str, Optional[threading.Semaphore]],
|
|
1947
|
+
) -> Optional[threading.Semaphore]:
|
|
1948
|
+
if job.function_name not in tool_semaphores:
|
|
1949
|
+
tool_instance = self._ensure_tool_instance(job)
|
|
1950
|
+
limit = (
|
|
1951
|
+
tool_instance.get_batch_concurrency_limit()
|
|
1952
|
+
if tool_instance is not None
|
|
1953
|
+
else 0
|
|
1954
|
+
)
|
|
1955
|
+
self.logger.debug("Batch concurrency for %s: %s", job.function_name, limit)
|
|
1956
|
+
if limit and limit > 0:
|
|
1957
|
+
tool_semaphores[job.function_name] = threading.Semaphore(limit)
|
|
1958
|
+
else:
|
|
1959
|
+
tool_semaphores[job.function_name] = None
|
|
1960
|
+
|
|
1961
|
+
return tool_semaphores[job.function_name]
|
|
1962
|
+
|
|
1677
1963
|
def run(
|
|
1678
1964
|
self,
|
|
1679
1965
|
fcall_str,
|
|
@@ -1681,6 +1967,8 @@ class ToolUniverse:
|
|
|
1681
1967
|
verbose=True,
|
|
1682
1968
|
format="llama",
|
|
1683
1969
|
stream_callback=None,
|
|
1970
|
+
use_cache: bool = False,
|
|
1971
|
+
max_workers: Optional[int] = None,
|
|
1684
1972
|
):
|
|
1685
1973
|
"""
|
|
1686
1974
|
Execute function calls from input string or data.
|
|
@@ -1711,14 +1999,18 @@ class ToolUniverse:
|
|
|
1711
1999
|
message = "" # Initialize message for cases where return_message=False
|
|
1712
2000
|
if function_call_json is not None:
|
|
1713
2001
|
if isinstance(function_call_json, list):
|
|
1714
|
-
#
|
|
2002
|
+
# Execute the batch (optionally in parallel) and attach call IDs to maintain downstream compatibility.
|
|
2003
|
+
batch_results = self._execute_function_call_list(
|
|
2004
|
+
function_call_json,
|
|
2005
|
+
stream_callback=stream_callback,
|
|
2006
|
+
use_cache=use_cache,
|
|
2007
|
+
max_workers=max_workers,
|
|
2008
|
+
)
|
|
2009
|
+
|
|
1715
2010
|
call_results = []
|
|
1716
|
-
for
|
|
1717
|
-
call_result = self.run_one_function(
|
|
1718
|
-
function_call_json[i], stream_callback=stream_callback
|
|
1719
|
-
)
|
|
2011
|
+
for idx, call_result in enumerate(batch_results):
|
|
1720
2012
|
call_id = self.call_id_gen()
|
|
1721
|
-
function_call_json[
|
|
2013
|
+
function_call_json[idx]["call_id"] = call_id
|
|
1722
2014
|
call_results.append(
|
|
1723
2015
|
{
|
|
1724
2016
|
"role": "tool",
|
|
@@ -1737,7 +2029,9 @@ class ToolUniverse:
|
|
|
1737
2029
|
return revised_messages
|
|
1738
2030
|
else:
|
|
1739
2031
|
return self.run_one_function(
|
|
1740
|
-
function_call_json,
|
|
2032
|
+
function_call_json,
|
|
2033
|
+
stream_callback=stream_callback,
|
|
2034
|
+
use_cache=use_cache,
|
|
1741
2035
|
)
|
|
1742
2036
|
else:
|
|
1743
2037
|
error("Not a function call")
|
|
@@ -1846,16 +2140,42 @@ class ToolUniverse:
|
|
|
1846
2140
|
tool_instance, arguments, stream_callback, use_cache, validate
|
|
1847
2141
|
)
|
|
1848
2142
|
else:
|
|
1849
|
-
|
|
1850
|
-
|
|
1851
|
-
|
|
1852
|
-
|
|
1853
|
-
|
|
1854
|
-
|
|
1855
|
-
|
|
1856
|
-
|
|
2143
|
+
# Try to auto-load tools if dictionary is empty
|
|
2144
|
+
if not self._auto_load_tools_if_empty(function_name):
|
|
2145
|
+
error_msg = "Failed to auto-load tools"
|
|
2146
|
+
return self._create_dual_format_error(
|
|
2147
|
+
ToolUnavailableError(
|
|
2148
|
+
error_msg,
|
|
2149
|
+
next_steps=[
|
|
2150
|
+
"Manually run tu.load_tools()",
|
|
2151
|
+
"Check tool configuration",
|
|
2152
|
+
],
|
|
2153
|
+
)
|
|
2154
|
+
)
|
|
2155
|
+
|
|
2156
|
+
# Try to get the tool instance again after loading
|
|
2157
|
+
tool_instance = self._get_tool_instance(function_name, cache=True)
|
|
2158
|
+
if tool_instance:
|
|
2159
|
+
result, tool_arguments = self._execute_tool_with_stream(
|
|
2160
|
+
tool_instance,
|
|
2161
|
+
arguments,
|
|
2162
|
+
stream_callback,
|
|
2163
|
+
use_cache,
|
|
2164
|
+
validate,
|
|
2165
|
+
)
|
|
2166
|
+
else:
|
|
2167
|
+
error_msg = (
|
|
2168
|
+
f"Tool '{function_name}' not found even after loading tools"
|
|
2169
|
+
)
|
|
2170
|
+
return self._create_dual_format_error(
|
|
2171
|
+
ToolUnavailableError(
|
|
2172
|
+
error_msg,
|
|
2173
|
+
next_steps=[
|
|
2174
|
+
"Check tool name spelling",
|
|
2175
|
+
"Verify tool is available in loaded categories",
|
|
2176
|
+
],
|
|
2177
|
+
)
|
|
1857
2178
|
)
|
|
1858
|
-
)
|
|
1859
2179
|
except Exception as e:
|
|
1860
2180
|
# Classify and return structured error
|
|
1861
2181
|
classified_error = self._classify_exception(e, function_name, arguments)
|
|
@@ -2065,6 +2385,29 @@ class ToolUniverse:
|
|
|
2065
2385
|
|
|
2066
2386
|
return None
|
|
2067
2387
|
|
|
2388
|
+
def _auto_load_tools_if_empty(self, function_name: str = None) -> bool:
|
|
2389
|
+
"""
|
|
2390
|
+
Automatically load tools if the tools dictionary is empty.
|
|
2391
|
+
|
|
2392
|
+
Args:
|
|
2393
|
+
function_name: Optional tool name to check after loading
|
|
2394
|
+
|
|
2395
|
+
Returns:
|
|
2396
|
+
bool: True if tools were loaded successfully, False otherwise
|
|
2397
|
+
"""
|
|
2398
|
+
if not self.all_tool_dict:
|
|
2399
|
+
print(
|
|
2400
|
+
"⚠️ Warning: No tools loaded. Automatically running tu.load_tools()..."
|
|
2401
|
+
)
|
|
2402
|
+
try:
|
|
2403
|
+
self.load_tools()
|
|
2404
|
+
print("✅ Tools loaded successfully.")
|
|
2405
|
+
return True
|
|
2406
|
+
except Exception as load_error:
|
|
2407
|
+
print(f"❌ Failed to auto-load tools: {load_error}")
|
|
2408
|
+
return False
|
|
2409
|
+
return True
|
|
2410
|
+
|
|
2068
2411
|
def _make_cache_key(self, function_name: str, arguments: dict) -> str:
|
|
2069
2412
|
"""Generate cache key by delegating to BaseTool."""
|
|
2070
2413
|
tool_instance = self._get_tool_instance(function_name, cache=False)
|
|
@@ -2083,7 +2426,15 @@ class ToolUniverse:
|
|
|
2083
2426
|
) -> Optional[ToolError]:
|
|
2084
2427
|
"""Validate parameters by delegating to BaseTool."""
|
|
2085
2428
|
if function_name not in self.all_tool_dict:
|
|
2086
|
-
|
|
2429
|
+
# Try to auto-load tools if dictionary is empty
|
|
2430
|
+
if not self._auto_load_tools_if_empty(function_name):
|
|
2431
|
+
return ToolUnavailableError("Failed to auto-load tools")
|
|
2432
|
+
|
|
2433
|
+
# Check again after loading
|
|
2434
|
+
if function_name not in self.all_tool_dict:
|
|
2435
|
+
return ToolUnavailableError(
|
|
2436
|
+
f"Tool '{function_name}' not found even after loading tools"
|
|
2437
|
+
)
|
|
2087
2438
|
|
|
2088
2439
|
tool_instance = self._get_tool_instance(function_name, cache=False)
|
|
2089
2440
|
if not tool_instance:
|
|
@@ -2658,3 +3009,217 @@ class ToolUniverse:
|
|
|
2658
3009
|
original_count = len(self.all_tools)
|
|
2659
3010
|
self.load_tools(include_tools=tool_names)
|
|
2660
3011
|
return len(self.all_tools) - original_count
|
|
3012
|
+
|
|
3013
|
+
def load_space(self, uri: str, **kwargs) -> Dict[str, Any]:
|
|
3014
|
+
"""
|
|
3015
|
+
Load Space configuration and apply it to the ToolUniverse instance.
|
|
3016
|
+
|
|
3017
|
+
This is a high-level method that loads a Space configuration from various
|
|
3018
|
+
sources (HuggingFace, local files, HTTP URLs) and applies the tool settings
|
|
3019
|
+
to the current instance.
|
|
3020
|
+
|
|
3021
|
+
Args:
|
|
3022
|
+
uri: Space URI (e.g., "hf:user/repo", "./config.yaml", "https://example.com/config.yaml")
|
|
3023
|
+
**kwargs: Additional parameters to override Space configuration
|
|
3024
|
+
(e.g., exclude_tools=["tool1"], include_tools=["tool2"])
|
|
3025
|
+
|
|
3026
|
+
Returns:
|
|
3027
|
+
dict: The loaded Space configuration
|
|
3028
|
+
|
|
3029
|
+
Examples:
|
|
3030
|
+
# Load from HuggingFace
|
|
3031
|
+
config = tu.load_space("hf:community/proteomics-toolkit")
|
|
3032
|
+
|
|
3033
|
+
# Load local file with overrides
|
|
3034
|
+
config = tu.load_space("./my-config.yaml", exclude_tools=["slow_tool"])
|
|
3035
|
+
|
|
3036
|
+
# Load from HTTP URL
|
|
3037
|
+
config = tu.load_space("https://example.com/config.yaml")
|
|
3038
|
+
"""
|
|
3039
|
+
# Lazy import to avoid circular import issues
|
|
3040
|
+
from .space import SpaceLoader
|
|
3041
|
+
|
|
3042
|
+
# Load Space configuration
|
|
3043
|
+
loader = SpaceLoader()
|
|
3044
|
+
config = loader.load(uri)
|
|
3045
|
+
|
|
3046
|
+
# Extract tool configuration
|
|
3047
|
+
tools_config = config.get("tools", {})
|
|
3048
|
+
|
|
3049
|
+
# Merge with override parameters
|
|
3050
|
+
tool_type = kwargs.get("tool_type") or tools_config.get("categories")
|
|
3051
|
+
exclude_tools = kwargs.get("exclude_tools") or tools_config.get(
|
|
3052
|
+
"exclude_tools", []
|
|
3053
|
+
)
|
|
3054
|
+
exclude_categories = kwargs.get("exclude_categories") or tools_config.get(
|
|
3055
|
+
"exclude_categories", []
|
|
3056
|
+
)
|
|
3057
|
+
include_tools = kwargs.get("include_tools") or tools_config.get(
|
|
3058
|
+
"include_tools", []
|
|
3059
|
+
)
|
|
3060
|
+
include_tool_types = kwargs.get("include_tool_types") or tools_config.get(
|
|
3061
|
+
"include_tool_types", []
|
|
3062
|
+
)
|
|
3063
|
+
exclude_tool_types = kwargs.get("exclude_tool_types") or tools_config.get(
|
|
3064
|
+
"exclude_tool_types", []
|
|
3065
|
+
)
|
|
3066
|
+
|
|
3067
|
+
# Load tools with merged configuration
|
|
3068
|
+
self.load_tools(
|
|
3069
|
+
tool_type=tool_type,
|
|
3070
|
+
exclude_tools=exclude_tools,
|
|
3071
|
+
exclude_categories=exclude_categories,
|
|
3072
|
+
include_tools=include_tools,
|
|
3073
|
+
include_tool_types=include_tool_types,
|
|
3074
|
+
exclude_tool_types=exclude_tool_types,
|
|
3075
|
+
)
|
|
3076
|
+
|
|
3077
|
+
# Store the configuration for reference
|
|
3078
|
+
self._current_space_config = config
|
|
3079
|
+
|
|
3080
|
+
# Apply additional configurations (LLM, hooks, etc.)
|
|
3081
|
+
try:
|
|
3082
|
+
# Apply LLM configuration if present
|
|
3083
|
+
llm_config = config.get("llm_config")
|
|
3084
|
+
if llm_config:
|
|
3085
|
+
self._apply_llm_config(llm_config)
|
|
3086
|
+
|
|
3087
|
+
# Apply hooks configuration if present
|
|
3088
|
+
hooks_config = config.get("hooks")
|
|
3089
|
+
if hooks_config:
|
|
3090
|
+
self._apply_hooks_config(hooks_config)
|
|
3091
|
+
|
|
3092
|
+
# Store metadata
|
|
3093
|
+
self._store_space_metadata(config)
|
|
3094
|
+
|
|
3095
|
+
except Exception as e:
|
|
3096
|
+
# Use print since logging might not be available
|
|
3097
|
+
print(f"⚠️ Failed to apply Space configurations: {e}")
|
|
3098
|
+
|
|
3099
|
+
return config
|
|
3100
|
+
|
|
3101
|
+
def _apply_llm_config(self, llm_config: Dict[str, Any]):
|
|
3102
|
+
"""
|
|
3103
|
+
Apply LLM configuration from Space.
|
|
3104
|
+
|
|
3105
|
+
Args:
|
|
3106
|
+
llm_config: LLM configuration dictionary
|
|
3107
|
+
"""
|
|
3108
|
+
try:
|
|
3109
|
+
import os
|
|
3110
|
+
|
|
3111
|
+
# Store LLM configuration
|
|
3112
|
+
self._space_llm_config = llm_config
|
|
3113
|
+
|
|
3114
|
+
# Set environment variables for LLM configuration
|
|
3115
|
+
# Set configuration mode
|
|
3116
|
+
mode = llm_config.get("mode", "default")
|
|
3117
|
+
os.environ["TOOLUNIVERSE_LLM_CONFIG_MODE"] = mode
|
|
3118
|
+
|
|
3119
|
+
# Set default provider
|
|
3120
|
+
if "default_provider" in llm_config:
|
|
3121
|
+
os.environ["TOOLUNIVERSE_LLM_DEFAULT_PROVIDER"] = llm_config[
|
|
3122
|
+
"default_provider"
|
|
3123
|
+
]
|
|
3124
|
+
|
|
3125
|
+
# Set model mappings
|
|
3126
|
+
models = llm_config.get("models", {})
|
|
3127
|
+
for task, model in models.items():
|
|
3128
|
+
env_var = f"TOOLUNIVERSE_LLM_MODEL_{task.upper()}"
|
|
3129
|
+
os.environ[env_var] = model
|
|
3130
|
+
|
|
3131
|
+
# Set temperature
|
|
3132
|
+
temperature = llm_config.get("temperature")
|
|
3133
|
+
if temperature is not None:
|
|
3134
|
+
os.environ["TOOLUNIVERSE_LLM_TEMPERATURE"] = str(temperature)
|
|
3135
|
+
|
|
3136
|
+
# Note: max_tokens is handled by LLM client automatically, not needed here
|
|
3137
|
+
|
|
3138
|
+
print(
|
|
3139
|
+
f"🤖 LLM configuration applied: {llm_config.get('default_provider', 'unknown')}"
|
|
3140
|
+
)
|
|
3141
|
+
|
|
3142
|
+
except Exception as e:
|
|
3143
|
+
print(f"⚠️ Failed to apply LLM configuration: {e}")
|
|
3144
|
+
|
|
3145
|
+
def _apply_hooks_config(self, hooks_config: List[Dict[str, Any]]):
|
|
3146
|
+
"""
|
|
3147
|
+
Apply hooks configuration from Space.
|
|
3148
|
+
|
|
3149
|
+
Args:
|
|
3150
|
+
hooks_config: Hooks configuration list
|
|
3151
|
+
"""
|
|
3152
|
+
try:
|
|
3153
|
+
# Convert Space hooks format to ToolUniverse hook_config format
|
|
3154
|
+
hook_config = {
|
|
3155
|
+
"hooks": hooks_config,
|
|
3156
|
+
"global_settings": {
|
|
3157
|
+
"default_timeout": 30,
|
|
3158
|
+
"max_hook_depth": 3,
|
|
3159
|
+
"enable_hook_caching": True,
|
|
3160
|
+
"hook_execution_order": "priority_desc",
|
|
3161
|
+
},
|
|
3162
|
+
}
|
|
3163
|
+
|
|
3164
|
+
# Enable hooks if not already enabled
|
|
3165
|
+
if not self.hooks_enabled:
|
|
3166
|
+
self.toggle_hooks(True)
|
|
3167
|
+
|
|
3168
|
+
# Update hook manager configuration
|
|
3169
|
+
if self.hook_manager:
|
|
3170
|
+
self.hook_manager.config = hook_config
|
|
3171
|
+
self.hook_manager._load_hooks()
|
|
3172
|
+
print(f"🔗 Hooks configuration applied: {len(hooks_config)} hooks")
|
|
3173
|
+
else:
|
|
3174
|
+
print("⚠️ Hook manager not available")
|
|
3175
|
+
|
|
3176
|
+
except Exception as e:
|
|
3177
|
+
print(f"⚠️ Failed to apply hooks configuration: {e}")
|
|
3178
|
+
|
|
3179
|
+
def _store_space_metadata(self, config: Dict[str, Any]):
|
|
3180
|
+
"""
|
|
3181
|
+
Store Space metadata for reference.
|
|
3182
|
+
|
|
3183
|
+
Args:
|
|
3184
|
+
config: Space configuration dictionary
|
|
3185
|
+
"""
|
|
3186
|
+
try:
|
|
3187
|
+
# Store metadata
|
|
3188
|
+
self._space_metadata = {
|
|
3189
|
+
"name": config.get("name"),
|
|
3190
|
+
"version": config.get("version"),
|
|
3191
|
+
"description": config.get("description"),
|
|
3192
|
+
"tags": config.get("tags", []),
|
|
3193
|
+
"required_env": config.get("required_env", []),
|
|
3194
|
+
}
|
|
3195
|
+
|
|
3196
|
+
# Check for missing environment variables
|
|
3197
|
+
if config.get("required_env"):
|
|
3198
|
+
import os
|
|
3199
|
+
|
|
3200
|
+
missing_env = [
|
|
3201
|
+
env for env in config["required_env"] if not os.getenv(env)
|
|
3202
|
+
]
|
|
3203
|
+
if missing_env:
|
|
3204
|
+
print(f"⚠️ Missing environment variables: {', '.join(missing_env)}")
|
|
3205
|
+
|
|
3206
|
+
except Exception as e:
|
|
3207
|
+
print(f"⚠️ Failed to store Space metadata: {e}")
|
|
3208
|
+
|
|
3209
|
+
def get_space_llm_config(self) -> Optional[Dict[str, Any]]:
|
|
3210
|
+
"""
|
|
3211
|
+
Get the current Space LLM configuration.
|
|
3212
|
+
|
|
3213
|
+
Returns:
|
|
3214
|
+
LLM configuration dictionary or None if not set
|
|
3215
|
+
"""
|
|
3216
|
+
return getattr(self, "_space_llm_config", None)
|
|
3217
|
+
|
|
3218
|
+
def get_space_metadata(self) -> Optional[Dict[str, Any]]:
|
|
3219
|
+
"""
|
|
3220
|
+
Get the current Space metadata.
|
|
3221
|
+
|
|
3222
|
+
Returns:
|
|
3223
|
+
Space metadata dictionary or None if not set
|
|
3224
|
+
"""
|
|
3225
|
+
return getattr(self, "_space_metadata", None)
|