minitap-mobile-use 3.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- minitap/mobile_use/__init__.py +0 -0
- minitap/mobile_use/agents/contextor/contextor.md +55 -0
- minitap/mobile_use/agents/contextor/contextor.py +175 -0
- minitap/mobile_use/agents/contextor/types.py +36 -0
- minitap/mobile_use/agents/cortex/cortex.md +135 -0
- minitap/mobile_use/agents/cortex/cortex.py +152 -0
- minitap/mobile_use/agents/cortex/types.py +15 -0
- minitap/mobile_use/agents/executor/executor.md +42 -0
- minitap/mobile_use/agents/executor/executor.py +87 -0
- minitap/mobile_use/agents/executor/tool_node.py +152 -0
- minitap/mobile_use/agents/hopper/hopper.md +15 -0
- minitap/mobile_use/agents/hopper/hopper.py +44 -0
- minitap/mobile_use/agents/orchestrator/human.md +12 -0
- minitap/mobile_use/agents/orchestrator/orchestrator.md +21 -0
- minitap/mobile_use/agents/orchestrator/orchestrator.py +134 -0
- minitap/mobile_use/agents/orchestrator/types.py +11 -0
- minitap/mobile_use/agents/outputter/human.md +25 -0
- minitap/mobile_use/agents/outputter/outputter.py +85 -0
- minitap/mobile_use/agents/outputter/test_outputter.py +167 -0
- minitap/mobile_use/agents/planner/human.md +14 -0
- minitap/mobile_use/agents/planner/planner.md +126 -0
- minitap/mobile_use/agents/planner/planner.py +101 -0
- minitap/mobile_use/agents/planner/types.py +51 -0
- minitap/mobile_use/agents/planner/utils.py +70 -0
- minitap/mobile_use/agents/summarizer/summarizer.py +35 -0
- minitap/mobile_use/agents/video_analyzer/__init__.py +5 -0
- minitap/mobile_use/agents/video_analyzer/human.md +5 -0
- minitap/mobile_use/agents/video_analyzer/video_analyzer.md +37 -0
- minitap/mobile_use/agents/video_analyzer/video_analyzer.py +111 -0
- minitap/mobile_use/clients/browserstack_client.py +477 -0
- minitap/mobile_use/clients/idb_client.py +429 -0
- minitap/mobile_use/clients/ios_client.py +332 -0
- minitap/mobile_use/clients/ios_client_config.py +141 -0
- minitap/mobile_use/clients/ui_automator_client.py +330 -0
- minitap/mobile_use/clients/wda_client.py +526 -0
- minitap/mobile_use/clients/wda_lifecycle.py +367 -0
- minitap/mobile_use/config.py +413 -0
- minitap/mobile_use/constants.py +3 -0
- minitap/mobile_use/context.py +106 -0
- minitap/mobile_use/controllers/__init__.py +0 -0
- minitap/mobile_use/controllers/android_controller.py +524 -0
- minitap/mobile_use/controllers/controller_factory.py +46 -0
- minitap/mobile_use/controllers/device_controller.py +182 -0
- minitap/mobile_use/controllers/ios_controller.py +436 -0
- minitap/mobile_use/controllers/platform_specific_commands_controller.py +199 -0
- minitap/mobile_use/controllers/types.py +106 -0
- minitap/mobile_use/controllers/unified_controller.py +193 -0
- minitap/mobile_use/graph/graph.py +160 -0
- minitap/mobile_use/graph/state.py +115 -0
- minitap/mobile_use/main.py +309 -0
- minitap/mobile_use/sdk/__init__.py +12 -0
- minitap/mobile_use/sdk/agent.py +1294 -0
- minitap/mobile_use/sdk/builders/__init__.py +10 -0
- minitap/mobile_use/sdk/builders/agent_config_builder.py +307 -0
- minitap/mobile_use/sdk/builders/index.py +15 -0
- minitap/mobile_use/sdk/builders/task_request_builder.py +236 -0
- minitap/mobile_use/sdk/constants.py +1 -0
- minitap/mobile_use/sdk/examples/README.md +83 -0
- minitap/mobile_use/sdk/examples/__init__.py +1 -0
- minitap/mobile_use/sdk/examples/app_lock_messaging.py +54 -0
- minitap/mobile_use/sdk/examples/platform_manual_task_example.py +67 -0
- minitap/mobile_use/sdk/examples/platform_minimal_example.py +48 -0
- minitap/mobile_use/sdk/examples/simple_photo_organizer.py +76 -0
- minitap/mobile_use/sdk/examples/smart_notification_assistant.py +225 -0
- minitap/mobile_use/sdk/examples/video_transcription_example.py +117 -0
- minitap/mobile_use/sdk/services/cloud_mobile.py +656 -0
- minitap/mobile_use/sdk/services/platform.py +434 -0
- minitap/mobile_use/sdk/types/__init__.py +51 -0
- minitap/mobile_use/sdk/types/agent.py +84 -0
- minitap/mobile_use/sdk/types/exceptions.py +138 -0
- minitap/mobile_use/sdk/types/platform.py +183 -0
- minitap/mobile_use/sdk/types/task.py +269 -0
- minitap/mobile_use/sdk/utils.py +29 -0
- minitap/mobile_use/services/accessibility.py +100 -0
- minitap/mobile_use/services/llm.py +247 -0
- minitap/mobile_use/services/telemetry.py +421 -0
- minitap/mobile_use/tools/index.py +67 -0
- minitap/mobile_use/tools/mobile/back.py +52 -0
- minitap/mobile_use/tools/mobile/erase_one_char.py +56 -0
- minitap/mobile_use/tools/mobile/focus_and_clear_text.py +317 -0
- minitap/mobile_use/tools/mobile/focus_and_input_text.py +153 -0
- minitap/mobile_use/tools/mobile/launch_app.py +86 -0
- minitap/mobile_use/tools/mobile/long_press_on.py +169 -0
- minitap/mobile_use/tools/mobile/open_link.py +62 -0
- minitap/mobile_use/tools/mobile/press_key.py +83 -0
- minitap/mobile_use/tools/mobile/stop_app.py +62 -0
- minitap/mobile_use/tools/mobile/swipe.py +156 -0
- minitap/mobile_use/tools/mobile/tap.py +154 -0
- minitap/mobile_use/tools/mobile/video_recording.py +177 -0
- minitap/mobile_use/tools/mobile/wait_for_delay.py +81 -0
- minitap/mobile_use/tools/scratchpad.py +147 -0
- minitap/mobile_use/tools/test_utils.py +413 -0
- minitap/mobile_use/tools/tool_wrapper.py +16 -0
- minitap/mobile_use/tools/types.py +35 -0
- minitap/mobile_use/tools/utils.py +336 -0
- minitap/mobile_use/utils/app_launch_utils.py +173 -0
- minitap/mobile_use/utils/cli_helpers.py +37 -0
- minitap/mobile_use/utils/cli_selection.py +143 -0
- minitap/mobile_use/utils/conversations.py +31 -0
- minitap/mobile_use/utils/decorators.py +124 -0
- minitap/mobile_use/utils/errors.py +6 -0
- minitap/mobile_use/utils/file.py +13 -0
- minitap/mobile_use/utils/logger.py +183 -0
- minitap/mobile_use/utils/media.py +186 -0
- minitap/mobile_use/utils/recorder.py +52 -0
- minitap/mobile_use/utils/requests_utils.py +37 -0
- minitap/mobile_use/utils/shell_utils.py +20 -0
- minitap/mobile_use/utils/test_ui_hierarchy.py +178 -0
- minitap/mobile_use/utils/time.py +6 -0
- minitap/mobile_use/utils/ui_hierarchy.py +132 -0
- minitap/mobile_use/utils/video.py +281 -0
- minitap_mobile_use-3.3.0.dist-info/METADATA +329 -0
- minitap_mobile_use-3.3.0.dist-info/RECORD +115 -0
- minitap_mobile_use-3.3.0.dist-info/WHEEL +4 -0
- minitap_mobile_use-3.3.0.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import re
|
|
4
|
+
|
|
5
|
+
# Set up basic logging
|
|
6
|
+
logging.basicConfig(level=logging.INFO)
|
|
7
|
+
logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
async def run_subprocess(command: str) -> tuple[str, str]:
|
|
11
|
+
"""
|
|
12
|
+
Executes a shell command in a subprocess.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
command: The command to execute.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
A tuple containing the stdout and stderr of the command.
|
|
19
|
+
"""
|
|
20
|
+
process = await asyncio.create_subprocess_shell(
|
|
21
|
+
command,
|
|
22
|
+
stdout=asyncio.subprocess.PIPE,
|
|
23
|
+
stderr=asyncio.subprocess.PIPE,
|
|
24
|
+
)
|
|
25
|
+
stdout, stderr = await process.communicate()
|
|
26
|
+
return stdout.decode(errors="ignore"), stderr.decode(errors="ignore")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
async def get_accessibility_tree(device_id: str | None = None) -> str:
|
|
30
|
+
"""
|
|
31
|
+
Retrieves the UI accessibility tree from an Android device as an XML string.
|
|
32
|
+
|
|
33
|
+
This function uses `uiautomator` to dump the current UI hierarchy.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
device_id: The optional ID of the target device. If not provided,
|
|
37
|
+
the command will run on the only connected device.
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
The UI hierarchy as an XML string.
|
|
41
|
+
Returns an empty string if the command fails.
|
|
42
|
+
"""
|
|
43
|
+
adb_command = "adb"
|
|
44
|
+
if device_id:
|
|
45
|
+
adb_command = f"adb -s {device_id}"
|
|
46
|
+
|
|
47
|
+
# The '/dev/tty' trick is used to get the raw XML output directly.
|
|
48
|
+
# On some devices, '/dev/null' or a temporary file might be needed.
|
|
49
|
+
command = f"{adb_command} shell uiautomator dump /dev/tty"
|
|
50
|
+
|
|
51
|
+
logger.info(f"Executing command: {command}")
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
stdout, stderr = await run_subprocess(command)
|
|
55
|
+
|
|
56
|
+
if "UI hierchary dumped to" in stderr: # Mispelling is in the original tool
|
|
57
|
+
# The XML is often in stdout, but sometimes mixed with stderr
|
|
58
|
+
# We'll clean it up to ensure we only get the XML part.
|
|
59
|
+
xml_output = re.sub(r"UI hierchary dumped to.*", "", stderr, flags=re.DOTALL).strip()
|
|
60
|
+
if not xml_output.startswith("<?xml"):
|
|
61
|
+
xml_output = stdout
|
|
62
|
+
|
|
63
|
+
# Clean up potential non-XML text at the beginning
|
|
64
|
+
xml_start_index = xml_output.find("<?xml")
|
|
65
|
+
if xml_start_index != -1:
|
|
66
|
+
return xml_output[xml_start_index:].strip()
|
|
67
|
+
else:
|
|
68
|
+
logger.error("Could not find XML content in the output.")
|
|
69
|
+
return ""
|
|
70
|
+
|
|
71
|
+
elif "ERROR" in stderr:
|
|
72
|
+
logger.error(f"Failed to get accessibility tree: {stderr.strip()}")
|
|
73
|
+
return ""
|
|
74
|
+
|
|
75
|
+
return stdout.strip()
|
|
76
|
+
|
|
77
|
+
except Exception as e:
|
|
78
|
+
logger.error(f"An exception occurred while getting the accessibility tree: {e}")
|
|
79
|
+
return ""
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
# Example of how to run this function
|
|
83
|
+
async def main():
|
|
84
|
+
print("Attempting to retrieve accessibility tree from the connected device...")
|
|
85
|
+
# You can specify a device_id like "emulator-5554" if you have multiple devices
|
|
86
|
+
accessibility_tree = await get_accessibility_tree()
|
|
87
|
+
|
|
88
|
+
if accessibility_tree:
|
|
89
|
+
print("\n--- Accessibility Tree XML ---")
|
|
90
|
+
print(accessibility_tree)
|
|
91
|
+
print("\n----------------------------")
|
|
92
|
+
else:
|
|
93
|
+
print("\nFailed to retrieve the accessibility tree.")
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
if __name__ == "__main__":
|
|
97
|
+
# To run this example, save it as a Python file (e.g., `get_tree.py`)
|
|
98
|
+
# and run `python get_tree.py` in your terminal.
|
|
99
|
+
# Make sure you have an Android device connected with ADB enabled.
|
|
100
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
from collections.abc import Awaitable, Callable, Coroutine
|
|
4
|
+
from typing import Any, Literal, TypeVar, overload
|
|
5
|
+
|
|
6
|
+
from langchain_core.language_models.chat_models import BaseChatModel
|
|
7
|
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
|
8
|
+
from langchain_google_vertexai import ChatVertexAI
|
|
9
|
+
from langchain_openai import ChatOpenAI
|
|
10
|
+
from pydantic import SecretStr
|
|
11
|
+
|
|
12
|
+
from minitap.mobile_use.config import (
|
|
13
|
+
AgentNode,
|
|
14
|
+
AgentNodeWithFallback,
|
|
15
|
+
LLMUtilsNode,
|
|
16
|
+
LLMUtilsNodeWithFallback,
|
|
17
|
+
LLMWithFallback,
|
|
18
|
+
settings,
|
|
19
|
+
)
|
|
20
|
+
from minitap.mobile_use.context import MobileUseContext
|
|
21
|
+
from minitap.mobile_use.utils.logger import get_logger
|
|
22
|
+
|
|
23
|
+
# Logger for internal messages (ex: fallback)
|
|
24
|
+
llm_logger = logging.getLogger(__name__)
|
|
25
|
+
# Logger for user messages
|
|
26
|
+
user_messages_logger = get_logger(__name__)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
async def invoke_llm_with_timeout_message[T](
|
|
30
|
+
llm_call: Coroutine[Any, Any, T],
|
|
31
|
+
timeout_seconds: int = 10,
|
|
32
|
+
) -> T:
|
|
33
|
+
"""
|
|
34
|
+
Send a LLM call and display a timeout message if it takes too long.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
llm_call: The coroutine of the LLM call to execute.
|
|
38
|
+
timeout_seconds: The delay in seconds before displaying the message.
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
The result of the LLM call.
|
|
42
|
+
"""
|
|
43
|
+
llm_task = asyncio.create_task(llm_call)
|
|
44
|
+
waiter_task = asyncio.create_task(asyncio.sleep(timeout_seconds))
|
|
45
|
+
|
|
46
|
+
done, _ = await asyncio.wait({llm_task, waiter_task}, return_when=asyncio.FIRST_COMPLETED)
|
|
47
|
+
|
|
48
|
+
if llm_task in done:
|
|
49
|
+
# The LLM call has finished before the timeout, cancel the timer
|
|
50
|
+
waiter_task.cancel()
|
|
51
|
+
return llm_task.result()
|
|
52
|
+
else:
|
|
53
|
+
# The timeout has been reached, display the message and wait for the call to finish
|
|
54
|
+
user_messages_logger.info("Waiting for LLM call response...")
|
|
55
|
+
return await llm_task
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def get_minitap_llm(
|
|
59
|
+
trace_id: str,
|
|
60
|
+
remote_tracing: bool = False,
|
|
61
|
+
model: str = "google/gemini-2.5-pro",
|
|
62
|
+
temperature: float | None = None,
|
|
63
|
+
max_retries: int | None = None,
|
|
64
|
+
api_key: str | None = None,
|
|
65
|
+
) -> ChatOpenAI:
|
|
66
|
+
if api_key:
|
|
67
|
+
effective_api_key = SecretStr(api_key)
|
|
68
|
+
elif settings.MINITAP_API_KEY:
|
|
69
|
+
effective_api_key = settings.MINITAP_API_KEY
|
|
70
|
+
else:
|
|
71
|
+
raise ValueError("MINITAP_API_KEY must be provided or set in environment")
|
|
72
|
+
|
|
73
|
+
if settings.MINITAP_BASE_URL is None:
|
|
74
|
+
raise ValueError("MINITAP_BASE_URL must be set in environment")
|
|
75
|
+
|
|
76
|
+
llm_base_url = f"{settings.MINITAP_BASE_URL}/api/v1"
|
|
77
|
+
|
|
78
|
+
if max_retries is None and model.startswith("google/"):
|
|
79
|
+
max_retries = 2
|
|
80
|
+
client = ChatOpenAI(
|
|
81
|
+
model=model,
|
|
82
|
+
temperature=temperature,
|
|
83
|
+
max_retries=max_retries,
|
|
84
|
+
api_key=effective_api_key,
|
|
85
|
+
base_url=llm_base_url,
|
|
86
|
+
default_query={
|
|
87
|
+
"sessionId": trace_id,
|
|
88
|
+
"traceOnlyUsage": remote_tracing,
|
|
89
|
+
},
|
|
90
|
+
)
|
|
91
|
+
return client
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def get_google_llm(
|
|
95
|
+
model_name: str = "gemini-2.5-pro",
|
|
96
|
+
temperature: float = 0.7,
|
|
97
|
+
) -> ChatGoogleGenerativeAI:
|
|
98
|
+
assert settings.GOOGLE_API_KEY is not None
|
|
99
|
+
client = ChatGoogleGenerativeAI(
|
|
100
|
+
model=model_name,
|
|
101
|
+
max_tokens=None,
|
|
102
|
+
temperature=temperature,
|
|
103
|
+
api_key=settings.GOOGLE_API_KEY,
|
|
104
|
+
max_retries=2,
|
|
105
|
+
)
|
|
106
|
+
return client
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def get_vertex_llm(
|
|
110
|
+
model_name: str = "gemini-2.5-pro",
|
|
111
|
+
temperature: float = 0.7,
|
|
112
|
+
) -> ChatVertexAI:
|
|
113
|
+
client = ChatVertexAI(
|
|
114
|
+
model_name=model_name,
|
|
115
|
+
max_tokens=None,
|
|
116
|
+
temperature=temperature,
|
|
117
|
+
max_retries=2,
|
|
118
|
+
)
|
|
119
|
+
return client
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def get_openai_llm(
|
|
123
|
+
model_name: str = "o3",
|
|
124
|
+
temperature: float = 1,
|
|
125
|
+
) -> ChatOpenAI:
|
|
126
|
+
assert settings.OPENAI_API_KEY is not None
|
|
127
|
+
client = ChatOpenAI(
|
|
128
|
+
model=model_name,
|
|
129
|
+
api_key=settings.OPENAI_API_KEY,
|
|
130
|
+
base_url=settings.OPENAI_BASE_URL,
|
|
131
|
+
temperature=temperature,
|
|
132
|
+
)
|
|
133
|
+
return client
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def get_openrouter_llm(model_name: str, temperature: float = 1):
|
|
137
|
+
assert settings.OPEN_ROUTER_API_KEY is not None
|
|
138
|
+
client = ChatOpenAI(
|
|
139
|
+
model=model_name,
|
|
140
|
+
temperature=temperature,
|
|
141
|
+
api_key=settings.OPEN_ROUTER_API_KEY,
|
|
142
|
+
base_url="https://openrouter.ai/api/v1",
|
|
143
|
+
)
|
|
144
|
+
return client
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def get_grok_llm(model_name: str, temperature: float = 1) -> ChatOpenAI:
|
|
148
|
+
assert settings.XAI_API_KEY is not None
|
|
149
|
+
client = ChatOpenAI(
|
|
150
|
+
model=model_name,
|
|
151
|
+
api_key=settings.XAI_API_KEY,
|
|
152
|
+
temperature=temperature,
|
|
153
|
+
base_url="https://api.x.ai/v1",
|
|
154
|
+
)
|
|
155
|
+
return client
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
@overload
|
|
159
|
+
def get_llm(
|
|
160
|
+
ctx: MobileUseContext,
|
|
161
|
+
name: AgentNodeWithFallback,
|
|
162
|
+
*,
|
|
163
|
+
use_fallback: bool = False,
|
|
164
|
+
temperature: float = 1,
|
|
165
|
+
) -> BaseChatModel: ...
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
@overload
|
|
169
|
+
def get_llm(
|
|
170
|
+
ctx: MobileUseContext,
|
|
171
|
+
name: LLMUtilsNode,
|
|
172
|
+
*,
|
|
173
|
+
is_utils: Literal[True],
|
|
174
|
+
temperature: float = 1,
|
|
175
|
+
) -> BaseChatModel: ...
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
@overload
|
|
179
|
+
def get_llm(
|
|
180
|
+
ctx: MobileUseContext,
|
|
181
|
+
name: LLMUtilsNodeWithFallback,
|
|
182
|
+
*,
|
|
183
|
+
is_utils: Literal[True],
|
|
184
|
+
use_fallback: bool = False,
|
|
185
|
+
temperature: float = 1,
|
|
186
|
+
) -> BaseChatModel: ...
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def get_llm(
|
|
190
|
+
ctx: MobileUseContext,
|
|
191
|
+
name: AgentNode | LLMUtilsNode | AgentNodeWithFallback,
|
|
192
|
+
is_utils: bool = False,
|
|
193
|
+
use_fallback: bool = False,
|
|
194
|
+
temperature: float = 1,
|
|
195
|
+
) -> BaseChatModel:
|
|
196
|
+
llm = (
|
|
197
|
+
ctx.llm_config.get_utils(name) # type: ignore
|
|
198
|
+
if is_utils
|
|
199
|
+
else ctx.llm_config.get_agent(name) # type: ignore
|
|
200
|
+
)
|
|
201
|
+
if use_fallback:
|
|
202
|
+
if isinstance(llm, LLMWithFallback):
|
|
203
|
+
llm = llm.fallback
|
|
204
|
+
else:
|
|
205
|
+
raise ValueError("LLM has no fallback!")
|
|
206
|
+
if llm.provider == "openai":
|
|
207
|
+
return get_openai_llm(llm.model, temperature)
|
|
208
|
+
elif llm.provider == "google":
|
|
209
|
+
return get_google_llm(llm.model, temperature)
|
|
210
|
+
elif llm.provider == "vertexai":
|
|
211
|
+
return get_vertex_llm(llm.model, temperature)
|
|
212
|
+
elif llm.provider == "openrouter":
|
|
213
|
+
return get_openrouter_llm(llm.model, temperature)
|
|
214
|
+
elif llm.provider == "xai":
|
|
215
|
+
return get_grok_llm(llm.model, temperature)
|
|
216
|
+
elif llm.provider == "minitap":
|
|
217
|
+
remote_tracing = False
|
|
218
|
+
if ctx.execution_setup:
|
|
219
|
+
remote_tracing = ctx.execution_setup.enable_remote_tracing
|
|
220
|
+
return get_minitap_llm(
|
|
221
|
+
trace_id=ctx.trace_id,
|
|
222
|
+
remote_tracing=remote_tracing,
|
|
223
|
+
model=llm.model,
|
|
224
|
+
temperature=temperature,
|
|
225
|
+
api_key=ctx.minitap_api_key,
|
|
226
|
+
)
|
|
227
|
+
else:
|
|
228
|
+
raise ValueError(f"Unsupported provider: {llm.provider}")
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
T = TypeVar("T")
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
async def with_fallback(
|
|
235
|
+
main_call: Callable[[], Awaitable[T]],
|
|
236
|
+
fallback_call: Callable[[], Awaitable[T]],
|
|
237
|
+
none_should_fallback: bool = True,
|
|
238
|
+
) -> T:
|
|
239
|
+
try:
|
|
240
|
+
result = await main_call()
|
|
241
|
+
if result is None and none_should_fallback:
|
|
242
|
+
llm_logger.warning("Main LLM inference returned None. Falling back...")
|
|
243
|
+
return await fallback_call()
|
|
244
|
+
return result
|
|
245
|
+
except Exception as e:
|
|
246
|
+
llm_logger.warning(f"❗ Main LLM inference failed: {e}. Falling back...")
|
|
247
|
+
return await fallback_call()
|