droidrun 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- droidrun/__init__.py +15 -8
- droidrun/__main__.py +2 -3
- droidrun/adb/device.py +1 -1
- droidrun/agent/codeact/__init__.py +13 -0
- droidrun/agent/codeact/codeact_agent.py +334 -0
- droidrun/agent/codeact/events.py +36 -0
- droidrun/agent/codeact/prompts.py +78 -0
- droidrun/agent/droid/__init__.py +13 -0
- droidrun/agent/droid/droid_agent.py +418 -0
- droidrun/agent/planner/__init__.py +15 -0
- droidrun/agent/planner/events.py +20 -0
- droidrun/agent/planner/prompts.py +144 -0
- droidrun/agent/planner/task_manager.py +355 -0
- droidrun/agent/planner/workflow.py +371 -0
- droidrun/agent/utils/async_utils.py +56 -0
- droidrun/agent/utils/chat_utils.py +92 -0
- droidrun/agent/utils/executer.py +97 -0
- droidrun/agent/utils/llm_picker.py +143 -0
- droidrun/cli/main.py +422 -107
- droidrun/tools/__init__.py +4 -25
- droidrun/tools/actions.py +767 -783
- droidrun/tools/device.py +1 -1
- droidrun/tools/loader.py +60 -0
- {droidrun-0.1.0.dist-info → droidrun-0.2.0.dist-info}/METADATA +134 -37
- droidrun-0.2.0.dist-info/RECORD +32 -0
- droidrun/agent/__init__.py +0 -16
- droidrun/agent/llm_reasoning.py +0 -567
- droidrun/agent/react_agent.py +0 -556
- droidrun/llm/__init__.py +0 -24
- droidrun-0.1.0.dist-info/RECORD +0 -20
- {droidrun-0.1.0.dist-info → droidrun-0.2.0.dist-info}/WHEEL +0 -0
- {droidrun-0.1.0.dist-info → droidrun-0.2.0.dist-info}/entry_points.txt +0 -0
- {droidrun-0.1.0.dist-info → droidrun-0.2.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,143 @@
|
|
1
|
+
import importlib
|
2
|
+
import logging
|
3
|
+
from typing import Any
|
4
|
+
from llama_index.core.llms.llm import LLM
|
5
|
+
# Configure logging
|
6
|
+
logger = logging.getLogger(__name__)
|
7
|
+
logger.addHandler(logging.NullHandler())
|
8
|
+
|
9
|
+
def load_llm(provider_name: str, **kwargs: Any) -> LLM:
|
10
|
+
"""
|
11
|
+
Dynamically loads and initializes a LlamaIndex LLM.
|
12
|
+
|
13
|
+
Imports `llama_index.llms.<provider_name_lower>`, finds the class named
|
14
|
+
`provider_name` within that module, verifies it's an LLM subclass,
|
15
|
+
and initializes it with kwargs.
|
16
|
+
|
17
|
+
Args:
|
18
|
+
provider_name: The case-sensitive name of the provider and the class
|
19
|
+
(e.g., "OpenAI", "Ollama", "HuggingFaceLLM").
|
20
|
+
**kwargs: Keyword arguments for the LLM class constructor.
|
21
|
+
|
22
|
+
Returns:
|
23
|
+
An initialized LLM instance.
|
24
|
+
|
25
|
+
Raises:
|
26
|
+
ModuleNotFoundError: If the provider's module cannot be found.
|
27
|
+
AttributeError: If the class `provider_name` is not found in the module.
|
28
|
+
TypeError: If the found class is not a subclass of LLM or if kwargs are invalid.
|
29
|
+
RuntimeError: For other initialization errors.
|
30
|
+
"""
|
31
|
+
if not provider_name:
|
32
|
+
raise ValueError("provider_name cannot be empty.")
|
33
|
+
if provider_name == "OpenAILike":
|
34
|
+
module_provider_part = "openai_like"
|
35
|
+
else:
|
36
|
+
# Use lowercase for module path, handle hyphens for package name suggestion
|
37
|
+
lower_provider_name = provider_name.lower()
|
38
|
+
# Special case common variations like HuggingFaceLLM -> huggingface module
|
39
|
+
if lower_provider_name.endswith("llm"):
|
40
|
+
module_provider_part = lower_provider_name[:-3].replace("-", "_")
|
41
|
+
else:
|
42
|
+
module_provider_part = lower_provider_name.replace("-", "_")
|
43
|
+
module_path = f"llama_index.llms.{module_provider_part}"
|
44
|
+
install_package_name = f"llama-index-llms-{module_provider_part.replace('_', '-')}"
|
45
|
+
|
46
|
+
try:
|
47
|
+
logger.info(f"Attempting to import module: {module_path}")
|
48
|
+
llm_module = importlib.import_module(module_path)
|
49
|
+
logger.info(f"Successfully imported module: {module_path}")
|
50
|
+
|
51
|
+
except ModuleNotFoundError:
|
52
|
+
logger.error(f"Module '{module_path}' not found. Try: pip install {install_package_name}")
|
53
|
+
raise ModuleNotFoundError(
|
54
|
+
f"Could not import '{module_path}'. Is '{install_package_name}' installed?"
|
55
|
+
) from None
|
56
|
+
|
57
|
+
try:
|
58
|
+
logger.info(f"Attempting to get class '{provider_name}' from module {module_path}")
|
59
|
+
llm_class = getattr(llm_module, provider_name)
|
60
|
+
logger.info(f"Found class: {llm_class.__name__}")
|
61
|
+
|
62
|
+
# Verify the class is a subclass of LLM
|
63
|
+
if not isinstance(llm_class, type) or not issubclass(llm_class, LLM):
|
64
|
+
raise TypeError(f"Class '{provider_name}' found in '{module_path}' is not a valid LLM subclass.")
|
65
|
+
|
66
|
+
# Initialize
|
67
|
+
logger.info(f"Initializing {llm_class.__name__} with kwargs: {list(kwargs.keys())}")
|
68
|
+
llm_instance = llm_class(**kwargs)
|
69
|
+
logger.info(f"Successfully loaded and initialized LLM: {provider_name}")
|
70
|
+
if not llm_instance:
|
71
|
+
raise RuntimeError(f"Failed to initialize LLM instance for {provider_name}.")
|
72
|
+
return llm_instance
|
73
|
+
|
74
|
+
except AttributeError:
|
75
|
+
logger.error(f"Class '{provider_name}' not found in module '{module_path}'.")
|
76
|
+
raise AttributeError(
|
77
|
+
f"Could not find class '{provider_name}' in module '{module_path}'. Check spelling and capitalization."
|
78
|
+
) from None
|
79
|
+
except TypeError as e:
|
80
|
+
logger.error(f"Error initializing {provider_name}: {e}")
|
81
|
+
raise # Re-raise TypeError (could be from issubclass check or __init__)
|
82
|
+
except Exception as e:
|
83
|
+
logger.error(f"An unexpected error occurred initializing {provider_name}: {e}")
|
84
|
+
raise RuntimeError(f"Failed to initialize LLM '{provider_name}'.") from e
|
85
|
+
|
86
|
+
# --- Example Usage ---
|
87
|
+
if __name__ == "__main__":
|
88
|
+
# Install the specific LLM integrations you want to test:
|
89
|
+
# pip install \
|
90
|
+
# llama-index-llms-anthropic \
|
91
|
+
# llama-index-llms-deepseek \
|
92
|
+
# llama-index-llms-gemini \
|
93
|
+
# llama-index-llms-openai
|
94
|
+
|
95
|
+
# Example 1: Load Anthropic (requires ANTHROPIC_API_KEY env var or kwarg)
|
96
|
+
print("\n--- Loading Anthropic ---")
|
97
|
+
try:
|
98
|
+
anthropic_llm = load_llm(
|
99
|
+
"Anthropic",
|
100
|
+
model="claude-3-7-sonnet-latest",
|
101
|
+
)
|
102
|
+
print(f"Loaded LLM: {type(anthropic_llm)}")
|
103
|
+
print(f"Model: {anthropic_llm.metadata}")
|
104
|
+
except Exception as e:
|
105
|
+
print(f"Failed to load Anthropic: {e}")
|
106
|
+
|
107
|
+
# Example 2: Load DeepSeek (requires DEEPSEEK_API_KEY env var or kwarg)
|
108
|
+
print("\n--- Loading DeepSeek ---")
|
109
|
+
try:
|
110
|
+
deepseek_llm = load_llm(
|
111
|
+
"DeepSeek",
|
112
|
+
model="deepseek-reasoner",
|
113
|
+
api_key="your api", # or set DEEPSEEK_API_KEY
|
114
|
+
)
|
115
|
+
print(f"Loaded LLM: {type(deepseek_llm)}")
|
116
|
+
print(f"Model: {deepseek_llm.metadata}")
|
117
|
+
except Exception as e:
|
118
|
+
print(f"Failed to load DeepSeek: {e}")
|
119
|
+
|
120
|
+
# Example 3: Load Gemini (requires GOOGLE_APPLICATION_CREDENTIALS or kwarg)
|
121
|
+
print("\n--- Loading Gemini ---")
|
122
|
+
try:
|
123
|
+
gemini_llm = load_llm(
|
124
|
+
"Gemini",
|
125
|
+
model="gemini-2.0-fash",
|
126
|
+
)
|
127
|
+
print(f"Loaded LLM: {type(gemini_llm)}")
|
128
|
+
print(f"Model: {gemini_llm.metadata}")
|
129
|
+
except Exception as e:
|
130
|
+
print(f"Failed to load Gemini: {e}")
|
131
|
+
|
132
|
+
# Example 4: Load OpenAI (requires OPENAI_API_KEY env var or kwarg)
|
133
|
+
print("\n--- Loading OpenAI ---")
|
134
|
+
try:
|
135
|
+
openai_llm = load_llm(
|
136
|
+
"OpenAI",
|
137
|
+
model="gp-4o",
|
138
|
+
temperature=0.5,
|
139
|
+
)
|
140
|
+
print(f"Loaded LLM: {type(openai_llm)}")
|
141
|
+
print(f"Model: {openai_llm.metadata}")
|
142
|
+
except Exception as e:
|
143
|
+
print(f"Failed to load OpenAI: {e}")
|