droidrun 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,143 @@
1
+ import importlib
2
+ import logging
3
+ from typing import Any
4
+ from llama_index.core.llms.llm import LLM
5
+ # Configure logging
6
+ logger = logging.getLogger(__name__)
7
+ logger.addHandler(logging.NullHandler())
8
+
9
+ def load_llm(provider_name: str, **kwargs: Any) -> LLM:
10
+ """
11
+ Dynamically loads and initializes a LlamaIndex LLM.
12
+
13
+ Imports `llama_index.llms.<provider_name_lower>`, finds the class named
14
+ `provider_name` within that module, verifies it's an LLM subclass,
15
+ and initializes it with kwargs.
16
+
17
+ Args:
18
+ provider_name: The case-sensitive name of the provider and the class
19
+ (e.g., "OpenAI", "Ollama", "HuggingFaceLLM").
20
+ **kwargs: Keyword arguments for the LLM class constructor.
21
+
22
+ Returns:
23
+ An initialized LLM instance.
24
+
25
+ Raises:
26
+ ModuleNotFoundError: If the provider's module cannot be found.
27
+ AttributeError: If the class `provider_name` is not found in the module.
28
+ TypeError: If the found class is not a subclass of LLM or if kwargs are invalid.
29
+ RuntimeError: For other initialization errors.
30
+ """
31
+ if not provider_name:
32
+ raise ValueError("provider_name cannot be empty.")
33
+ if provider_name == "OpenAILike":
34
+ module_provider_part = "openai_like"
35
+ else:
36
+ # Use lowercase for module path, handle hyphens for package name suggestion
37
+ lower_provider_name = provider_name.lower()
38
+ # Special case common variations like HuggingFaceLLM -> huggingface module
39
+ if lower_provider_name.endswith("llm"):
40
+ module_provider_part = lower_provider_name[:-3].replace("-", "_")
41
+ else:
42
+ module_provider_part = lower_provider_name.replace("-", "_")
43
+ module_path = f"llama_index.llms.{module_provider_part}"
44
+ install_package_name = f"llama-index-llms-{module_provider_part.replace('_', '-')}"
45
+
46
+ try:
47
+ logger.info(f"Attempting to import module: {module_path}")
48
+ llm_module = importlib.import_module(module_path)
49
+ logger.info(f"Successfully imported module: {module_path}")
50
+
51
+ except ModuleNotFoundError:
52
+ logger.error(f"Module '{module_path}' not found. Try: pip install {install_package_name}")
53
+ raise ModuleNotFoundError(
54
+ f"Could not import '{module_path}'. Is '{install_package_name}' installed?"
55
+ ) from None
56
+
57
+ try:
58
+ logger.info(f"Attempting to get class '{provider_name}' from module {module_path}")
59
+ llm_class = getattr(llm_module, provider_name)
60
+ logger.info(f"Found class: {llm_class.__name__}")
61
+
62
+ # Verify the class is a subclass of LLM
63
+ if not isinstance(llm_class, type) or not issubclass(llm_class, LLM):
64
+ raise TypeError(f"Class '{provider_name}' found in '{module_path}' is not a valid LLM subclass.")
65
+
66
+ # Initialize
67
+ logger.info(f"Initializing {llm_class.__name__} with kwargs: {list(kwargs.keys())}")
68
+ llm_instance = llm_class(**kwargs)
69
+ logger.info(f"Successfully loaded and initialized LLM: {provider_name}")
70
+ if not llm_instance:
71
+ raise RuntimeError(f"Failed to initialize LLM instance for {provider_name}.")
72
+ return llm_instance
73
+
74
+ except AttributeError:
75
+ logger.error(f"Class '{provider_name}' not found in module '{module_path}'.")
76
+ raise AttributeError(
77
+ f"Could not find class '{provider_name}' in module '{module_path}'. Check spelling and capitalization."
78
+ ) from None
79
+ except TypeError as e:
80
+ logger.error(f"Error initializing {provider_name}: {e}")
81
+ raise # Re-raise TypeError (could be from issubclass check or __init__)
82
+ except Exception as e:
83
+ logger.error(f"An unexpected error occurred initializing {provider_name}: {e}")
84
+ raise RuntimeError(f"Failed to initialize LLM '{provider_name}'.") from e
85
+
86
+ # --- Example Usage ---
87
+ if __name__ == "__main__":
88
+ # Install the specific LLM integrations you want to test:
89
+ # pip install \
90
+ # llama-index-llms-anthropic \
91
+ # llama-index-llms-deepseek \
92
+ # llama-index-llms-gemini \
93
+ # llama-index-llms-openai
94
+
95
+ # Example 1: Load Anthropic (requires ANTHROPIC_API_KEY env var or kwarg)
96
+ print("\n--- Loading Anthropic ---")
97
+ try:
98
+ anthropic_llm = load_llm(
99
+ "Anthropic",
100
+ model="claude-3-7-sonnet-latest",
101
+ )
102
+ print(f"Loaded LLM: {type(anthropic_llm)}")
103
+ print(f"Model: {anthropic_llm.metadata}")
104
+ except Exception as e:
105
+ print(f"Failed to load Anthropic: {e}")
106
+
107
+ # Example 2: Load DeepSeek (requires DEEPSEEK_API_KEY env var or kwarg)
108
+ print("\n--- Loading DeepSeek ---")
109
+ try:
110
+ deepseek_llm = load_llm(
111
+ "DeepSeek",
112
+ model="deepseek-reasoner",
113
+ api_key="your api", # or set DEEPSEEK_API_KEY
114
+ )
115
+ print(f"Loaded LLM: {type(deepseek_llm)}")
116
+ print(f"Model: {deepseek_llm.metadata}")
117
+ except Exception as e:
118
+ print(f"Failed to load DeepSeek: {e}")
119
+
120
+ # Example 3: Load Gemini (requires GOOGLE_APPLICATION_CREDENTIALS or kwarg)
121
+ print("\n--- Loading Gemini ---")
122
+ try:
123
+ gemini_llm = load_llm(
124
+ "Gemini",
125
+ model="gemini-2.0-fash",
126
+ )
127
+ print(f"Loaded LLM: {type(gemini_llm)}")
128
+ print(f"Model: {gemini_llm.metadata}")
129
+ except Exception as e:
130
+ print(f"Failed to load Gemini: {e}")
131
+
132
+ # Example 4: Load OpenAI (requires OPENAI_API_KEY env var or kwarg)
133
+ print("\n--- Loading OpenAI ---")
134
+ try:
135
+ openai_llm = load_llm(
136
+ "OpenAI",
137
+ model="gp-4o",
138
+ temperature=0.5,
139
+ )
140
+ print(f"Loaded LLM: {type(openai_llm)}")
141
+ print(f"Model: {openai_llm.metadata}")
142
+ except Exception as e:
143
+ print(f"Failed to load OpenAI: {e}")