lybic-guiagents 0.2.2__py3-none-any.whl → 0.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lybic-guiagents might be problematic. Click here for more details.
- gui_agents/__init__.py +1 -1
- gui_agents/agents/Backend/LybicBackend.py +25 -19
- gui_agents/agents/agent_s.py +292 -97
- gui_agents/agents/grounding.py +43 -6
- gui_agents/agents/manager.py +113 -18
- gui_agents/agents/stream_manager.py +163 -0
- gui_agents/agents/worker.py +60 -35
- gui_agents/cli_app.py +16 -5
- gui_agents/core/knowledge.py +36 -5
- gui_agents/grpc_app.py +784 -0
- gui_agents/proto/__init__.py +3 -0
- gui_agents/proto/pb/__init__.py +4 -0
- gui_agents/tools/model.md +351 -0
- gui_agents/tools/tools.py +80 -39
- gui_agents/tools/tools_config.json +101 -0
- gui_agents/tools/tools_config_cn.json +101 -0
- gui_agents/tools/tools_config_en.json +101 -0
- {lybic_guiagents-0.2.2.dist-info → lybic_guiagents-0.2.3.dist-info}/METADATA +86 -8
- {lybic_guiagents-0.2.2.dist-info → lybic_guiagents-0.2.3.dist-info}/RECORD +23 -16
- lybic_guiagents-0.2.3.dist-info/entry_points.txt +3 -0
- gui_agents/lybic_client/__init__.py +0 -0
- gui_agents/lybic_client/lybic_client.py +0 -88
- {lybic_guiagents-0.2.2.dist-info → lybic_guiagents-0.2.3.dist-info}/WHEEL +0 -0
- {lybic_guiagents-0.2.2.dist-info → lybic_guiagents-0.2.3.dist-info}/licenses/LICENSE +0 -0
- {lybic_guiagents-0.2.2.dist-info → lybic_guiagents-0.2.3.dist-info}/top_level.txt +0 -0
gui_agents/grpc_app.py
ADDED
|
@@ -0,0 +1,784 @@
|
|
|
1
|
+
# !/usr/bin/env python3
|
|
2
|
+
import os
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
from dotenv import load_dotenv
|
|
7
|
+
|
|
8
|
+
env_path = Path(os.path.dirname(os.path.abspath(__file__))) / '.env'
|
|
9
|
+
if env_path.exists():
|
|
10
|
+
load_dotenv(dotenv_path=env_path)
|
|
11
|
+
else:
|
|
12
|
+
parent_env_path = Path(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) / '.env'
|
|
13
|
+
if parent_env_path.exists():
|
|
14
|
+
load_dotenv(dotenv_path=parent_env_path)
|
|
15
|
+
else:
|
|
16
|
+
print("Warning: no .env file found")
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
level = os.environ.get("LOG_LEVEL", "INFO")
|
|
20
|
+
logging.basicConfig(
|
|
21
|
+
level=level,
|
|
22
|
+
format='%(asctime)s.%(msecs)03d - %(levelname)s - %(message)s',
|
|
23
|
+
datefmt='%Y-%m-%d %H:%M:%S',
|
|
24
|
+
)
|
|
25
|
+
logger.info("Initializing Agent server")
|
|
26
|
+
|
|
27
|
+
import asyncio
|
|
28
|
+
import platform
|
|
29
|
+
from concurrent import futures
|
|
30
|
+
import grpc
|
|
31
|
+
import uuid
|
|
32
|
+
|
|
33
|
+
from lybic import LybicClient, LybicAuth, Sandbox
|
|
34
|
+
import gui_agents.cli_app as app
|
|
35
|
+
from gui_agents.proto import agent_pb2, agent_pb2_grpc
|
|
36
|
+
from gui_agents.agents.stream_manager import stream_manager, StreamMessage
|
|
37
|
+
from gui_agents.agents.agent_s import load_config
|
|
38
|
+
from gui_agents.proto.pb.agent_pb2 import LLMConfig, StageModelConfig, CommonConfig, Authorization, InstanceMode
|
|
39
|
+
from gui_agents import Registry, GlobalState, AgentS2, HardwareInterface, __version__
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class AgentServicer(agent_pb2_grpc.AgentServicer):
|
|
43
|
+
"""
|
|
44
|
+
Implements the Agent gRPC service.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def __init__(self, max_concurrent_task_num = 1):
|
|
48
|
+
"""
|
|
49
|
+
Initialize the AgentServicer with concurrency and runtime state.
|
|
50
|
+
|
|
51
|
+
Parameters:
|
|
52
|
+
max_concurrent_task_num (int): Maximum number of agent tasks allowed to run concurrently; defaults to 1.
|
|
53
|
+
"""
|
|
54
|
+
self.lybic_auth: LybicAuth | None = None
|
|
55
|
+
self.max_concurrent_task_num = max_concurrent_task_num
|
|
56
|
+
self.tasks = {}
|
|
57
|
+
self.global_common_config = agent_pb2.CommonConfig(id="global")
|
|
58
|
+
self.task_lock = asyncio.Lock()
|
|
59
|
+
self.lybic_client: LybicClient | None = None
|
|
60
|
+
self.sandbox: Sandbox | None = None
|
|
61
|
+
|
|
62
|
+
async def GetAgentTaskStream(self, request, context):
|
|
63
|
+
"""
|
|
64
|
+
Stream TaskStream messages for the given task ID to the client.
|
|
65
|
+
|
|
66
|
+
If the task ID does not exist, sets gRPC `NOT_FOUND` on the context and returns. Yields GetAgentTaskStreamResponse messages containing the taskId, stage, and message produced by the stream manager. Stops when the client cancels the stream; on internal errors sets gRPC `INTERNAL` on the context. Unregisters the task from the stream manager when streaming ends.
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
GetAgentTaskStreamResponse: Streamed responses carrying TaskStream payloads with `taskId`, `stage`, and `message`.
|
|
70
|
+
"""
|
|
71
|
+
task_id = request.taskId
|
|
72
|
+
logger.info(f"Received GetAgentTaskStream request for taskId: {task_id}")
|
|
73
|
+
|
|
74
|
+
async with self.task_lock:
|
|
75
|
+
task_info = self.tasks.get(task_id)
|
|
76
|
+
|
|
77
|
+
if not task_info:
|
|
78
|
+
context.set_code(grpc.StatusCode.NOT_FOUND)
|
|
79
|
+
context.set_details(f"Task with ID {task_id} not found.")
|
|
80
|
+
return
|
|
81
|
+
|
|
82
|
+
try:
|
|
83
|
+
async for msg in stream_manager.get_message_stream(task_id):
|
|
84
|
+
yield agent_pb2.GetAgentTaskStreamResponse(
|
|
85
|
+
taskStream=agent_pb2.TaskStream(
|
|
86
|
+
taskId=task_id,
|
|
87
|
+
stage=msg.stage,
|
|
88
|
+
message=msg.message,
|
|
89
|
+
timestamp=msg.timestamp
|
|
90
|
+
)
|
|
91
|
+
)
|
|
92
|
+
except asyncio.CancelledError:
|
|
93
|
+
logger.info(f"GetAgentTaskStream for {task_id} cancelled by client.")
|
|
94
|
+
except Exception as e:
|
|
95
|
+
logger.exception(f"Error in GetAgentTaskStream for task {task_id}")
|
|
96
|
+
context.set_code(grpc.StatusCode.INTERNAL)
|
|
97
|
+
context.set_details(f"An error occurred during streaming: {e}")
|
|
98
|
+
|
|
99
|
+
async def GetAgentInfo(self, request, context):
|
|
100
|
+
"""
|
|
101
|
+
Provide agent server metadata.
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
agent_pb2.AgentInfo: An AgentInfo message containing the server version, the configured maximum concurrent task count (`maxConcurrentTasks`), the current log level (`log_level`), and the host name (`domain`).
|
|
105
|
+
"""
|
|
106
|
+
return agent_pb2.AgentInfo(
|
|
107
|
+
version=__version__,
|
|
108
|
+
maxConcurrentTasks=self.max_concurrent_task_num,
|
|
109
|
+
log_level=level,
|
|
110
|
+
domain=platform.node(),
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
async def _run_task(self, task_id: str, backend_kwargs):
|
|
114
|
+
"""
|
|
115
|
+
Run the lifecycle of a single agent task: mark it running, execute the agent, record final state, emit stream messages, and unregister the task.
|
|
116
|
+
|
|
117
|
+
Parameters:
|
|
118
|
+
task_id (str): Identifier of the task to run.
|
|
119
|
+
backend_kwargs (dict): Backend configuration passed to the HardwareInterface (e.g., platform, org/api fields, sandbox id).
|
|
120
|
+
|
|
121
|
+
Notes:
|
|
122
|
+
- Updates the task entry in self.tasks (status and final_state).
|
|
123
|
+
- Emits task lifecycle messages via stream_manager and unregisters the task when finished.
|
|
124
|
+
- Exceptions are caught, the task status is set to "error", and an error message is emitted.
|
|
125
|
+
"""
|
|
126
|
+
async with self.task_lock:
|
|
127
|
+
self.tasks[task_id]["status"] = "running"
|
|
128
|
+
agent = self.tasks[task_id]["agent"]
|
|
129
|
+
steps = self.tasks[task_id]["max_steps"]
|
|
130
|
+
query = self.tasks[task_id]["query"]
|
|
131
|
+
|
|
132
|
+
# Register task with stream manager
|
|
133
|
+
await stream_manager.register_task(task_id)
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
# Send message through stream manager
|
|
137
|
+
await stream_manager.add_message(task_id, "starting", "Task starting")
|
|
138
|
+
|
|
139
|
+
# Set task_id for the agent to enable streaming from within the agent
|
|
140
|
+
if hasattr(agent, 'set_task_id'):
|
|
141
|
+
agent.set_task_id(task_id)
|
|
142
|
+
|
|
143
|
+
hwi = HardwareInterface(backend='lybic', **backend_kwargs)
|
|
144
|
+
|
|
145
|
+
agent.reset()
|
|
146
|
+
|
|
147
|
+
# Run the blocking function in a separate thread to avoid blocking the event loop
|
|
148
|
+
mode: InstanceMode | None = backend_kwargs["mode"]
|
|
149
|
+
if mode and mode == InstanceMode.NORMAL:
|
|
150
|
+
await asyncio.to_thread(app.run_agent_normal,agent, query, hwi, steps, False)
|
|
151
|
+
else:
|
|
152
|
+
await asyncio.to_thread(app.run_agent_fast, agent, query, hwi, steps, False)
|
|
153
|
+
|
|
154
|
+
global_state: GlobalState = Registry.get("GlobalStateStore") # type: ignore
|
|
155
|
+
final_state = global_state.get_running_state()
|
|
156
|
+
|
|
157
|
+
async with self.task_lock:
|
|
158
|
+
self.tasks[task_id]["final_state"] = final_state
|
|
159
|
+
self.tasks[task_id]["status"] = "finished"
|
|
160
|
+
|
|
161
|
+
if final_state and final_state == "completed":
|
|
162
|
+
await stream_manager.add_message(task_id, "finished", "Task completed successfully")
|
|
163
|
+
else:
|
|
164
|
+
status = final_state if final_state else 'unknown'
|
|
165
|
+
await stream_manager.add_message(task_id, "finished", f"Task finished with status: {status}")
|
|
166
|
+
|
|
167
|
+
except Exception as e:
|
|
168
|
+
logger.exception(f"Error during task execution for {task_id}: {e}")
|
|
169
|
+
async with self.task_lock:
|
|
170
|
+
self.tasks[task_id]["status"] = "error"
|
|
171
|
+
await stream_manager.add_message(task_id, "error", f"An error occurred: {e}")
|
|
172
|
+
finally:
|
|
173
|
+
logger.info(f"Task {task_id} processing finished.")
|
|
174
|
+
await stream_manager.unregister_task(task_id)
|
|
175
|
+
|
|
176
|
+
async def _make_backend_kwargs(self, request):
|
|
177
|
+
"""
|
|
178
|
+
Builds the backend keyword arguments required to provision or select a compute sandbox for the task, based on the provided request and the service's global configuration.
|
|
179
|
+
|
|
180
|
+
Parameters:
|
|
181
|
+
request: The incoming gRPC request containing optional `runningConfig` and `sandbox` fields. If `runningConfig.authorizationInfo` is present, it will be used to set Lybic authorization for this servicer instance.
|
|
182
|
+
|
|
183
|
+
Returns:
|
|
184
|
+
dict: A mapping with at least:
|
|
185
|
+
- "platform": platform identifier (e.g., "Windows" or "Ubuntu").
|
|
186
|
+
- "precreate_sid": sandbox id to use or an empty string if none.
|
|
187
|
+
When the backend is "lybic", the dict may also include:
|
|
188
|
+
- "org_id": organization id for Lybic.
|
|
189
|
+
- "api_key": API key for Lybic.
|
|
190
|
+
- "endpoint": Lybic API endpoint.
|
|
191
|
+
|
|
192
|
+
Side effects:
|
|
193
|
+
- May initialize or replace self.lybic_auth from request.runningConfig.authorizationInfo.
|
|
194
|
+
- May call self._create_sandbox(...) to create or retrieve a sandbox and determine the platform.
|
|
195
|
+
"""
|
|
196
|
+
backend_kwargs = {}
|
|
197
|
+
platform_map = {
|
|
198
|
+
agent_pb2.SandboxOS.WINDOWS: "Windows",
|
|
199
|
+
agent_pb2.SandboxOS.LINUX: "Ubuntu",
|
|
200
|
+
}
|
|
201
|
+
backend = "lybic"
|
|
202
|
+
shape = "beijing-2c-4g-cpu" # default shape # todo: check shape exist by using lybic sdk >=0.8.0b3
|
|
203
|
+
if request.HasField("runningConfig"):
|
|
204
|
+
if request.runningConfig.backend:
|
|
205
|
+
backend = request.runningConfig.backend
|
|
206
|
+
if request.runningConfig.HasField("authorizationInfo"):
|
|
207
|
+
self.lybic_auth = LybicAuth(
|
|
208
|
+
org_id=request.runningConfig.authorizationInfo.orgID,
|
|
209
|
+
api_key=request.runningConfig.authorizationInfo.apiKey,
|
|
210
|
+
endpoint=request.runningConfig.authorizationInfo.apiEndpoint or "https://api.lybic.cn/"
|
|
211
|
+
)
|
|
212
|
+
backend_kwargs["mode"] = request.runningConfig.mode
|
|
213
|
+
|
|
214
|
+
platform_str = platform.system()
|
|
215
|
+
sid = ''
|
|
216
|
+
if backend == 'lybic':
|
|
217
|
+
if request.HasField("sandbox"):
|
|
218
|
+
shape = request.sandbox.shapeName
|
|
219
|
+
sid = request.sandbox.id
|
|
220
|
+
if sid:
|
|
221
|
+
logger.info(f"Using existing sandbox with id: {sid}")
|
|
222
|
+
else:
|
|
223
|
+
sid, platform_str = await self._create_sandbox(shape)
|
|
224
|
+
|
|
225
|
+
if request.sandbox.os != agent_pb2.SandboxOS.OSUNDEFINED:
|
|
226
|
+
platform_str = platform_map.get(request.sandbox.os, platform.system())
|
|
227
|
+
else:
|
|
228
|
+
sid, platform_str = await self._create_sandbox(shape)
|
|
229
|
+
|
|
230
|
+
else:
|
|
231
|
+
platform_str = platform_map.get(request.sandbox.os, platform.system())
|
|
232
|
+
|
|
233
|
+
backend_kwargs["platform"] = platform_str
|
|
234
|
+
backend_kwargs["precreate_sid"] = sid
|
|
235
|
+
|
|
236
|
+
# Add Lybic authorization info if available
|
|
237
|
+
if backend == 'lybic':
|
|
238
|
+
auth_info = (request.runningConfig.authorizationInfo
|
|
239
|
+
if request.HasField("runningConfig") and request.runningConfig.HasField("authorizationInfo")
|
|
240
|
+
else self.global_common_config.authorizationInfo)
|
|
241
|
+
if not auth_info or not auth_info.orgID or not auth_info.apiKey:
|
|
242
|
+
raise ValueError("Lybic backend requires valid authorization (orgID and apiKey)")
|
|
243
|
+
if auth_info.orgID:
|
|
244
|
+
backend_kwargs['org_id'] = auth_info.orgID
|
|
245
|
+
if auth_info.apiKey:
|
|
246
|
+
backend_kwargs['api_key'] = auth_info.apiKey
|
|
247
|
+
if auth_info.apiEndpoint:
|
|
248
|
+
backend_kwargs['endpoint'] = auth_info.apiEndpoint
|
|
249
|
+
|
|
250
|
+
return backend_kwargs
|
|
251
|
+
|
|
252
|
+
async def _make_agent(self,request):
|
|
253
|
+
# todo: add max_steps support
|
|
254
|
+
# max_steps = 50
|
|
255
|
+
# if request.HasField("runningConfig") and request.runningConfig.steps:
|
|
256
|
+
# max_steps = request.runningConfig.steps
|
|
257
|
+
|
|
258
|
+
# Dynamically build tools_dict based on global config
|
|
259
|
+
"""
|
|
260
|
+
Builds and returns an AgentS2 configured for the incoming request by applying model and provider overrides to the tool configurations.
|
|
261
|
+
|
|
262
|
+
Parameters:
|
|
263
|
+
request: gRPC request message that may contain a runningConfig with a stageModelConfig. If present, stageModelConfig values take precedence over the global common config.
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
AgentS2: An agent instance with platform set to "windows", screen_size [1280, 720], takeover and search disabled, and a tools_config where tool entries have been updated with provider, model_name/model, and optionally overridden api_key and base_url/endpoint based on the stage model configuration.
|
|
267
|
+
|
|
268
|
+
Raises:
|
|
269
|
+
Exception: If neither the request nor the global common config contains a StageModelConfig.
|
|
270
|
+
"""
|
|
271
|
+
tools_config, tools_dict = load_config()
|
|
272
|
+
|
|
273
|
+
stage_config: StageModelConfig
|
|
274
|
+
if request.HasField("runningConfig") and request.runningConfig.HasField("stageModelConfig"):
|
|
275
|
+
stage_config = request.runningConfig.stageModelConfig
|
|
276
|
+
logger.info("Applying task model configurations to this task.")
|
|
277
|
+
elif self.global_common_config.HasField("stageModelConfig"):
|
|
278
|
+
stage_config = self.global_common_config.stageModelConfig
|
|
279
|
+
else:
|
|
280
|
+
raise Exception("No model configurations found.")
|
|
281
|
+
|
|
282
|
+
logger.info("Applying global model configurations to this task.")
|
|
283
|
+
|
|
284
|
+
def apply_config(tool_name, llm_config:LLMConfig):
|
|
285
|
+
"""
|
|
286
|
+
Apply an LLM configuration to an existing tool entry in the local tools_dict.
|
|
287
|
+
|
|
288
|
+
If a tool with the given name exists in tools_dict and the LLM config specifies a modelName,
|
|
289
|
+
this function updates the tool's provider, model_name, and model fields. It also overrides
|
|
290
|
+
sensitive connection fields when present: apiKey is copied to the tool's api_key, and
|
|
291
|
+
apiEndpoint is copied to base_url and endpoint_url. Actions are logged for any overrides.
|
|
292
|
+
|
|
293
|
+
Parameters:
|
|
294
|
+
tool_name (str): Name of the tool to update in tools_dict.
|
|
295
|
+
llm_config (LLMConfig): LLM configuration containing provider, modelName, apiKey, and apiEndpoint.
|
|
296
|
+
|
|
297
|
+
Returns:
|
|
298
|
+
None
|
|
299
|
+
"""
|
|
300
|
+
if tool_name in tools_dict and llm_config.modelName:
|
|
301
|
+
tool_cfg = tools_dict[tool_name]
|
|
302
|
+
tool_cfg['provider'] = llm_config.provider
|
|
303
|
+
tool_cfg['model_name'] = llm_config.modelName
|
|
304
|
+
tool_cfg['model'] = llm_config.modelName
|
|
305
|
+
|
|
306
|
+
# IMPORTANT Override api key and endpoint
|
|
307
|
+
if llm_config.apiKey:
|
|
308
|
+
tool_cfg['api_key'] = llm_config.apiKey
|
|
309
|
+
logger.info(f"Override api_key for tool '{tool_name}'")
|
|
310
|
+
if llm_config.apiEndpoint:
|
|
311
|
+
tool_cfg['base_url'] = llm_config.apiEndpoint
|
|
312
|
+
tool_cfg['endpoint_url'] = llm_config.apiEndpoint # for some engines that use endpoint_url
|
|
313
|
+
logger.info(f"Override base_url for tool '{tool_name}': {llm_config.apiEndpoint}")
|
|
314
|
+
|
|
315
|
+
logger.info(f"Override tool '{tool_name}' with model '{llm_config.modelName}'.")
|
|
316
|
+
|
|
317
|
+
if stage_config.HasField("embeddingModel"):
|
|
318
|
+
apply_config('embedding', stage_config.embeddingModel)
|
|
319
|
+
|
|
320
|
+
if stage_config.HasField("groundingModel"):
|
|
321
|
+
apply_config('grounding', stage_config.groundingModel)
|
|
322
|
+
|
|
323
|
+
if stage_config.HasField("actionGeneratorModel"):
|
|
324
|
+
common_llm_config = stage_config.actionGeneratorModel
|
|
325
|
+
# Apply common config to all other LLM-based tools
|
|
326
|
+
for tool_name, _ in tools_dict.items():
|
|
327
|
+
if tool_name not in ['embedding', 'grounding']:
|
|
328
|
+
apply_config(tool_name, common_llm_config)
|
|
329
|
+
|
|
330
|
+
# After modifications, merge changes from tools_dict back into tools_config
|
|
331
|
+
for tool_entry in tools_config['tools']:
|
|
332
|
+
tool_name = tool_entry['tool_name']
|
|
333
|
+
if tool_name in tools_dict:
|
|
334
|
+
modified_data = tools_dict[tool_name]
|
|
335
|
+
# Ensure all modified fields are synced back to tools_config
|
|
336
|
+
for key, value in modified_data.items():
|
|
337
|
+
if key in ['provider', 'model_name', 'api_key', 'base_url', 'model']:
|
|
338
|
+
tool_entry[key] = value
|
|
339
|
+
|
|
340
|
+
return AgentS2(
|
|
341
|
+
platform="windows", # Sandbox system
|
|
342
|
+
screen_size=[1280, 720],
|
|
343
|
+
enable_takeover=False,
|
|
344
|
+
enable_search=False,
|
|
345
|
+
tools_config=tools_config,
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
async def RunAgentInstruction(self, request, context):
|
|
350
|
+
"""
|
|
351
|
+
Stream task progress for a newly created instruction-run agent while managing task lifecycle and concurrency.
|
|
352
|
+
|
|
353
|
+
Parameters:
|
|
354
|
+
request: The RunAgentInstruction request proto containing the instruction and runtime configuration.
|
|
355
|
+
context: gRPC context used to set status codes and details on error or resource exhaustion.
|
|
356
|
+
|
|
357
|
+
Returns:
|
|
358
|
+
An iterator that yields TaskStream messages with fields: taskId, stage, message, and timestamp.
|
|
359
|
+
|
|
360
|
+
Notes:
|
|
361
|
+
- Enforces the servicer's max concurrent task limit and sets gRPC StatusCode.RESOURCE_EXHAUSTED if exceeded.
|
|
362
|
+
- Registers and starts a background task to execute the agent; cancels that background task if the client cancels the stream.
|
|
363
|
+
- On internal streaming errors, sets gRPC StatusCode.INTERNAL with an explanatory detail.
|
|
364
|
+
"""
|
|
365
|
+
task_id = str(uuid.uuid4())
|
|
366
|
+
logger.info(f"Received RunAgentInstruction request, assigning taskId: {task_id}")
|
|
367
|
+
|
|
368
|
+
task_future = None
|
|
369
|
+
|
|
370
|
+
async with self.task_lock:
|
|
371
|
+
active_tasks = sum(1 for t in self.tasks.values() if t['status'] in ['pending', 'running'])
|
|
372
|
+
if active_tasks >= self.max_concurrent_task_num:
|
|
373
|
+
context.set_code(grpc.StatusCode.RESOURCE_EXHAUSTED)
|
|
374
|
+
context.set_details(f"Max concurrent tasks ({self.max_concurrent_task_num}) reached.")
|
|
375
|
+
return
|
|
376
|
+
|
|
377
|
+
queue = asyncio.Queue()
|
|
378
|
+
agent = await self._make_agent(request)
|
|
379
|
+
backend_kwargs = await self._make_backend_kwargs(request)
|
|
380
|
+
max_steps = 50
|
|
381
|
+
if request.HasField("runningConfig") and request.runningConfig.steps:
|
|
382
|
+
max_steps = request.runningConfig.steps
|
|
383
|
+
|
|
384
|
+
self.tasks[task_id] = {
|
|
385
|
+
"request": request,
|
|
386
|
+
"status": "pending",
|
|
387
|
+
"final_state": None,
|
|
388
|
+
"queue": queue,
|
|
389
|
+
"future": None,
|
|
390
|
+
"query": request.instruction,
|
|
391
|
+
"agent": agent,
|
|
392
|
+
"max_steps": max_steps,
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
task_future = asyncio.create_task(self._run_task(task_id, backend_kwargs))
|
|
396
|
+
self.tasks[task_id]["future"] = task_future
|
|
397
|
+
try:
|
|
398
|
+
async for msg in stream_manager.get_message_stream(task_id):
|
|
399
|
+
yield agent_pb2.TaskStream(
|
|
400
|
+
taskId=task_id,
|
|
401
|
+
stage=msg.stage,
|
|
402
|
+
message=msg.message,
|
|
403
|
+
timestamp=msg.timestamp
|
|
404
|
+
)
|
|
405
|
+
except asyncio.CancelledError:
|
|
406
|
+
logger.info(f"RunAgentInstruction stream for {task_id} cancelled by client.")
|
|
407
|
+
if task_future:
|
|
408
|
+
task_future.cancel()
|
|
409
|
+
except Exception as e:
|
|
410
|
+
logger.exception(f"Error in RunAgentInstruction stream for task {task_id}")
|
|
411
|
+
context.set_code(grpc.StatusCode.INTERNAL)
|
|
412
|
+
context.set_details(f"An error occurred during streaming: {e}")
|
|
413
|
+
|
|
414
|
+
async def RunAgentInstructionAsync(self, request, context):
|
|
415
|
+
"""
|
|
416
|
+
Start a new agent task in the background and return a task identifier immediately.
|
|
417
|
+
|
|
418
|
+
If the server has reached its configured maximum concurrent tasks, the RPC sets
|
|
419
|
+
gRPC status RESOURCE_EXHAUSTED and returns no response.
|
|
420
|
+
|
|
421
|
+
Returns:
|
|
422
|
+
agent_pb2.RunAgentInstructionAsyncResponse: Response containing the generated `taskId`.
|
|
423
|
+
"""
|
|
424
|
+
task_id = str(uuid.uuid4())
|
|
425
|
+
logger.info(f"Received RunAgentInstructionAsync request, assigning taskId: {task_id}")
|
|
426
|
+
|
|
427
|
+
async with self.task_lock:
|
|
428
|
+
active_tasks = sum(1 for t in self.tasks.values() if t['status'] in ['pending', 'running'])
|
|
429
|
+
if active_tasks >= self.max_concurrent_task_num:
|
|
430
|
+
context.set_code(grpc.StatusCode.RESOURCE_EXHAUSTED)
|
|
431
|
+
context.set_details(f"Max concurrent tasks ({self.max_concurrent_task_num}) reached.")
|
|
432
|
+
return
|
|
433
|
+
|
|
434
|
+
agent = await self._make_agent(request=request)
|
|
435
|
+
backend_kwargs = await self._make_backend_kwargs(request)
|
|
436
|
+
max_steps = 50
|
|
437
|
+
if request.HasField("runningConfig") and request.runningConfig.steps:
|
|
438
|
+
max_steps = request.runningConfig.steps
|
|
439
|
+
|
|
440
|
+
# Create queue for this task
|
|
441
|
+
queue = asyncio.Queue()
|
|
442
|
+
|
|
443
|
+
self.tasks[task_id] = {
|
|
444
|
+
"request": request,
|
|
445
|
+
"status": "pending",
|
|
446
|
+
"final_state": None,
|
|
447
|
+
"queue": queue,
|
|
448
|
+
"future": None,
|
|
449
|
+
"query": request.instruction,
|
|
450
|
+
"agent": agent,
|
|
451
|
+
"max_steps": max_steps,
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
# Start the task in background
|
|
455
|
+
task_future = asyncio.create_task(self._run_task(task_id, backend_kwargs))
|
|
456
|
+
|
|
457
|
+
self.tasks[task_id]["future"] = task_future
|
|
458
|
+
|
|
459
|
+
return agent_pb2.RunAgentInstructionAsyncResponse(taskId=task_id)
|
|
460
|
+
|
|
461
|
+
async def QueryTaskStatus(self, request, context):
|
|
462
|
+
"""
|
|
463
|
+
Retrieve the current status and a human-readable message for the task identified by `request.taskId`.
|
|
464
|
+
|
|
465
|
+
If the task is not found, the response uses `TaskStatus.NOT_FOUND` and a descriptive message. Internal task states are mapped to protobuf `TaskStatus` values: finished maps to `SUCCESS` (message includes `final_state` when available), error maps to `FAILURE`, and pending/running map to the corresponding statuses; when a controller is present and has recorded thoughts, the latest thought is used as the message.
|
|
466
|
+
|
|
467
|
+
Parameters:
|
|
468
|
+
request: RPC request containing `taskId` (the ID of the task to query).
|
|
469
|
+
context: gRPC context (not used for parameter descriptions).
|
|
470
|
+
|
|
471
|
+
Returns:
|
|
472
|
+
QueryTaskStatusResponse: the task ID, mapped `status`, a short `message` describing the current state, a `result` string (empty if none), and the `sandbox` value echoed from the original request.
|
|
473
|
+
"""
|
|
474
|
+
task_id = request.taskId
|
|
475
|
+
async with self.task_lock:
|
|
476
|
+
task_info = self.tasks.get(task_id)
|
|
477
|
+
|
|
478
|
+
if not task_info:
|
|
479
|
+
return agent_pb2.QueryTaskStatusResponse(
|
|
480
|
+
taskId=task_id,
|
|
481
|
+
status=agent_pb2.TaskStatus.NOT_FOUND,
|
|
482
|
+
message=f"Task with ID {task_id} not found."
|
|
483
|
+
)
|
|
484
|
+
|
|
485
|
+
status = task_info["status"]
|
|
486
|
+
final_state = task_info.get("final_state")
|
|
487
|
+
|
|
488
|
+
status_map = {
|
|
489
|
+
"pending": agent_pb2.TaskStatus.PENDING,
|
|
490
|
+
"running": agent_pb2.TaskStatus.RUNNING,
|
|
491
|
+
"fulfilled": agent_pb2.TaskStatus.SUCCESS,
|
|
492
|
+
"rejected": agent_pb2.TaskStatus.FAILURE,
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
if status == "finished":
|
|
496
|
+
task_status = agent_pb2.TaskStatus.SUCCESS
|
|
497
|
+
message = f"Task finished with status: {final_state}" if final_state else "Task finished."
|
|
498
|
+
result = ""
|
|
499
|
+
elif status == "error":
|
|
500
|
+
task_status = agent_pb2.TaskStatus.FAILURE
|
|
501
|
+
message = "Task failed with an exception."
|
|
502
|
+
result = ""
|
|
503
|
+
else: # pending or running
|
|
504
|
+
task_status = status_map.get(status, agent_pb2.TaskStatus.TASKSTATUSUNDEFINED)
|
|
505
|
+
message = "Task is running."
|
|
506
|
+
result = ""
|
|
507
|
+
|
|
508
|
+
return agent_pb2.QueryTaskStatusResponse(
|
|
509
|
+
taskId=task_id,
|
|
510
|
+
status=task_status,
|
|
511
|
+
message=message,
|
|
512
|
+
result=result,
|
|
513
|
+
sandbox=task_info["request"].sandbox
|
|
514
|
+
)
|
|
515
|
+
|
|
516
|
+
def _mask_config_secrets(self, config: CommonConfig) -> CommonConfig:
|
|
517
|
+
"""
|
|
518
|
+
Return a deep copy of a CommonConfig with sensitive API keys replaced by "********".
|
|
519
|
+
|
|
520
|
+
Creates a copy of the provided CommonConfig and masks secrets to avoid leaking credentials. Specifically, it masks authorizationInfo.apiKey and any LLMConfig.apiKey fields present inside stageModelConfig (for example: embeddingModel, groundingModel, actionGeneratorModel, and other stage LLM fields).
|
|
521
|
+
|
|
522
|
+
Parameters:
|
|
523
|
+
config (CommonConfig): The original configuration that may contain sensitive API keys.
|
|
524
|
+
|
|
525
|
+
Returns:
|
|
526
|
+
CommonConfig: A copy of `config` where discovered API keys have been replaced with "********".
|
|
527
|
+
"""
|
|
528
|
+
config_copy = CommonConfig()
|
|
529
|
+
config_copy.CopyFrom(config)
|
|
530
|
+
|
|
531
|
+
# Mask authorizationInfo.apiKey
|
|
532
|
+
if config_copy.HasField("authorizationInfo") and config_copy.authorizationInfo.apiKey:
|
|
533
|
+
config_copy.authorizationInfo.apiKey = "********"
|
|
534
|
+
|
|
535
|
+
# Mask stageModelConfig API keys
|
|
536
|
+
if config_copy.HasField("stageModelConfig"):
|
|
537
|
+
stage_config = config_copy.stageModelConfig
|
|
538
|
+
|
|
539
|
+
# List of all LLMConfig fields in StageModelConfig
|
|
540
|
+
llm_config_fields = [
|
|
541
|
+
"contextFusionModel", "subtaskPlannerModel", "trajReflectorModel",
|
|
542
|
+
"memoryRetrivalModel", "groundingModel", "taskEvaluatorModel",
|
|
543
|
+
"actionGeneratorModel", "actionGeneratorWithTakeoverModel",
|
|
544
|
+
"fastActionGeneratorModel", "fastActionGeneratorWithTakeoverModel",
|
|
545
|
+
"dagTranslatorModel", "embeddingModel", "queryFormulatorModel",
|
|
546
|
+
"narrativeSummarizationModel", "textSpanModel", "episodeSummarizationModel"
|
|
547
|
+
]
|
|
548
|
+
|
|
549
|
+
# Check all LLMConfig fields and mask their API keys
|
|
550
|
+
for field_name in llm_config_fields:
|
|
551
|
+
if stage_config.HasField(field_name):
|
|
552
|
+
llm_config = getattr(stage_config, field_name)
|
|
553
|
+
if llm_config and llm_config.apiKey:
|
|
554
|
+
llm_config.apiKey = "********"
|
|
555
|
+
|
|
556
|
+
return config_copy
|
|
557
|
+
|
|
558
|
+
def _mask_llm_config_secrets(self, llm_config: LLMConfig) -> LLMConfig:
|
|
559
|
+
"""
|
|
560
|
+
Return a copy of the given LLMConfig with sensitive fields masked.
|
|
561
|
+
|
|
562
|
+
Parameters:
|
|
563
|
+
llm_config (LLMConfig): The original LLM configuration to mask.
|
|
564
|
+
|
|
565
|
+
Returns:
|
|
566
|
+
LLMConfig: A copy of `llm_config` where the `apiKey` (if present) is replaced with `"********"`.
|
|
567
|
+
"""
|
|
568
|
+
config_copy = LLMConfig()
|
|
569
|
+
config_copy.CopyFrom(llm_config)
|
|
570
|
+
|
|
571
|
+
if config_copy.apiKey:
|
|
572
|
+
config_copy.apiKey = "********"
|
|
573
|
+
|
|
574
|
+
return config_copy
|
|
575
|
+
|
|
576
|
+
async def GetGlobalCommonConfig(self, request, context):
|
|
577
|
+
"""
|
|
578
|
+
Return a masked copy of the global common configuration to avoid exposing secrets.
|
|
579
|
+
|
|
580
|
+
The returned configuration is a deep copy of the server's global common config with sensitive fields (such as API keys) replaced by asterisks.
|
|
581
|
+
|
|
582
|
+
Returns:
|
|
583
|
+
CommonConfig: A copy of the global common configuration with sensitive values masked.
|
|
584
|
+
"""
|
|
585
|
+
masked_config = self._mask_config_secrets(self.global_common_config)
|
|
586
|
+
logger.debug("Returned masked global common config")
|
|
587
|
+
return masked_config
|
|
588
|
+
|
|
589
|
+
async def GetCommonConfig(self, request, context):
|
|
590
|
+
"""
|
|
591
|
+
Return a masked copy of the saved CommonConfig for the task identified by request.id.
|
|
592
|
+
|
|
593
|
+
Parameters:
|
|
594
|
+
request: RPC request containing `id` (the task identifier) whose configuration is being fetched.
|
|
595
|
+
context: gRPC context used to report NOT_FOUND when no configuration exists for the given task id.
|
|
596
|
+
|
|
597
|
+
Returns:
|
|
598
|
+
agent_pb2.CommonConfig: A copy of the task's CommonConfig with sensitive fields masked, or an empty CommonConfig if no task with the given id exists (in which case the gRPC context is set to NOT_FOUND).
|
|
599
|
+
"""
|
|
600
|
+
async with self.task_lock:
|
|
601
|
+
if request.id == "global":
|
|
602
|
+
return await self.GetGlobalCommonConfig(request, context)
|
|
603
|
+
else:
|
|
604
|
+
task_info = self.tasks.get(request.id)
|
|
605
|
+
if task_info and task_info.get("request"):
|
|
606
|
+
if task_info["request"].HasField("runningConfig"):
|
|
607
|
+
original_config = task_info["request"].runningConfig
|
|
608
|
+
masked_config = self._mask_config_secrets(original_config)
|
|
609
|
+
else:
|
|
610
|
+
masked_config = agent_pb2.CommonConfig(id=request.id)
|
|
611
|
+
|
|
612
|
+
logger.debug(f"Returned masked config for task {request.id}")
|
|
613
|
+
return masked_config
|
|
614
|
+
context.set_code(grpc.StatusCode.NOT_FOUND)
|
|
615
|
+
context.set_details(f"Config for task {request.id} not found.")
|
|
616
|
+
return agent_pb2.CommonConfig()
|
|
617
|
+
|
|
618
|
+
async def _new_lybic_client(self):
|
|
619
|
+
"""
|
|
620
|
+
Create and store a new LybicClient using the servicer's current LybicAuth.
|
|
621
|
+
|
|
622
|
+
This replaces the servicer's `lybic_client` attribute with a newly constructed
|
|
623
|
+
LybicClient initialized from `self.lybic_auth`.
|
|
624
|
+
"""
|
|
625
|
+
self.lybic_client = LybicClient(self.lybic_auth)
|
|
626
|
+
|
|
627
|
+
async def SetGlobalCommonConfig(self, request, context):
|
|
628
|
+
"""
|
|
629
|
+
Set the server's global common configuration and initialize Lybic authorization if provided.
|
|
630
|
+
|
|
631
|
+
Sets request.commonConfig.id to "global" and stores it as the servicer's global_common_config. If the provided config contains authorizationInfo, initializes or updates self.lybic_auth with the org ID, API key, and API endpoint (defaulting to "https://api.lybic.cn/" when endpoint is empty).
|
|
632
|
+
|
|
633
|
+
Parameters:
|
|
634
|
+
request: gRPC request containing `commonConfig` to apply.
|
|
635
|
+
|
|
636
|
+
Returns:
|
|
637
|
+
agent_pb2.SetCommonConfigResponse: Response with `success=True` and the configuration `id`.
|
|
638
|
+
"""
|
|
639
|
+
logger.info("Setting new global common config.")
|
|
640
|
+
request.commonConfig.id = "global"
|
|
641
|
+
self.global_common_config = request.commonConfig
|
|
642
|
+
|
|
643
|
+
if self.global_common_config.HasField("authorizationInfo"): # lybic
|
|
644
|
+
self.lybic_auth = LybicAuth(
|
|
645
|
+
org_id=self.global_common_config.authorizationInfo.orgID,
|
|
646
|
+
api_key=self.global_common_config.authorizationInfo.apiKey,
|
|
647
|
+
endpoint=self.global_common_config.authorizationInfo.apiEndpoint or "https://api.lybic.cn/"
|
|
648
|
+
)
|
|
649
|
+
|
|
650
|
+
return agent_pb2.SetCommonConfigResponse(success=True, id=self.global_common_config.id)
|
|
651
|
+
|
|
652
|
+
async def SetGlobalCommonLLMConfig(self, request, context):
|
|
653
|
+
"""
|
|
654
|
+
Update the global stage action-generator LLM configuration.
|
|
655
|
+
|
|
656
|
+
If the global common config lacks a stageModelConfig, one is created. The request's `llmConfig` is copied into global_common_config.stageModelConfig.actionGeneratorModel and returned.
|
|
657
|
+
|
|
658
|
+
Returns:
|
|
659
|
+
llmConfig: The `LLMConfig` message that was stored in the global configuration.
|
|
660
|
+
"""
|
|
661
|
+
if not self.global_common_config.HasField("stageModelConfig"):
|
|
662
|
+
self.global_common_config.stageModelConfig.SetInParent()
|
|
663
|
+
self.global_common_config.stageModelConfig.actionGeneratorModel.CopyFrom(request.llmConfig)
|
|
664
|
+
logger.info(f"Global common LLM config updated to: {request.llmConfig.modelName}")
|
|
665
|
+
return request.llmConfig
|
|
666
|
+
|
|
667
|
+
async def SetGlobalGroundingLLMConfig(self, request, context):
|
|
668
|
+
"""
|
|
669
|
+
Update the global grounding LLM configuration used by the agent.
|
|
670
|
+
|
|
671
|
+
Ensures the global common config has a stageModelConfig, copies the provided `llmConfig` into
|
|
672
|
+
`global_common_config.stageModelConfig.groundingModel`, and logs the update.
|
|
673
|
+
|
|
674
|
+
Parameters:
|
|
675
|
+
request (SetGlobalGroundingLLMConfigRequest): Request containing `llmConfig` to apply.
|
|
676
|
+
context: gRPC context (not documented).
|
|
677
|
+
|
|
678
|
+
Returns:
|
|
679
|
+
LLMConfig: The `llmConfig` that was applied.
|
|
680
|
+
"""
|
|
681
|
+
if not self.global_common_config.HasField("stageModelConfig"):
|
|
682
|
+
self.global_common_config.stageModelConfig.SetInParent()
|
|
683
|
+
self.global_common_config.stageModelConfig.groundingModel.CopyFrom(request.llmConfig)
|
|
684
|
+
logger.info(f"Global grounding LLM config updated to: {request.llmConfig.modelName}")
|
|
685
|
+
return request.llmConfig
|
|
686
|
+
|
|
687
|
+
async def SetGlobalEmbeddingLLMConfig(self, request, context):
|
|
688
|
+
"""
|
|
689
|
+
Ensure the global common config has a stage model config and set its embedding model to the provided LLM configuration.
|
|
690
|
+
|
|
691
|
+
Parameters:
|
|
692
|
+
request: RPC request containing `llmConfig` to apply as the global embedding model.
|
|
693
|
+
|
|
694
|
+
Returns:
|
|
695
|
+
The `llmConfig` that was set as the global embedding model.
|
|
696
|
+
"""
|
|
697
|
+
if not self.global_common_config.HasField("stageModelConfig"):
|
|
698
|
+
self.global_common_config.stageModelConfig.SetInParent()
|
|
699
|
+
self.global_common_config.stageModelConfig.embeddingModel.CopyFrom(request.llmConfig)
|
|
700
|
+
logger.info(f"Global embedding LLM config updated to: {request.llmConfig.modelName}")
|
|
701
|
+
return request.llmConfig
|
|
702
|
+
|
|
703
|
+
async def _create_sandbox(self,shape:str):
|
|
704
|
+
"""
|
|
705
|
+
Create a sandbox with the given shape via the Lybic service and return its identifier and operating system.
|
|
706
|
+
|
|
707
|
+
Parameters:
|
|
708
|
+
shape (str): The sandbox shape to create (provider-specific size/OS configuration).
|
|
709
|
+
|
|
710
|
+
Returns:
|
|
711
|
+
tuple: A pair (sandbox_id, platform_os) where `sandbox_id` is the created sandbox identifier and `platform_os` is the sandbox operating system string.
|
|
712
|
+
|
|
713
|
+
Raises:
|
|
714
|
+
Exception: If Lybic authorization is not initialized (call SetGlobalCommonConfig first).
|
|
715
|
+
"""
|
|
716
|
+
if not self.lybic_auth:
|
|
717
|
+
raise Exception("Lybic client not initialized. Please call SetGlobalCommonConfig before")
|
|
718
|
+
|
|
719
|
+
await self._new_lybic_client()
|
|
720
|
+
if not self.sandbox:
|
|
721
|
+
self.sandbox = Sandbox(self.lybic_client)
|
|
722
|
+
result = await self.sandbox.create(shape=shape)
|
|
723
|
+
sandbox = await self.sandbox.get(result.id)
|
|
724
|
+
return sandbox.sandbox.id, sandbox.sandbox.shape.os
|
|
725
|
+
|
|
726
|
+
async def serve():
|
|
727
|
+
"""
|
|
728
|
+
Start and run the Agent gRPC server and block until it terminates.
|
|
729
|
+
|
|
730
|
+
This coroutine initializes and starts an aio gRPC server that serves the AgentServicer and remains running until server shutdown. It reads the following environment variables to control behavior:
|
|
731
|
+
- GRPC_PORT: port to listen on (default "50051")
|
|
732
|
+
- GRPC_MAX_WORKER_THREADS: maximum thread pool workers for the server (default "100")
|
|
733
|
+
|
|
734
|
+
The function also registers the servicer with the server, configures the stream_manager to use the current asyncio event loop, and then starts and awaits server termination.
|
|
735
|
+
"""
|
|
736
|
+
port = os.environ.get("GRPC_PORT", 50051)
|
|
737
|
+
max_workers = int(os.environ.get("GRPC_MAX_WORKER_THREADS", 100))
|
|
738
|
+
# task_num = int(os.environ.get("TASK_MAX_TASKS", 5))
|
|
739
|
+
servicer = AgentServicer(max_concurrent_task_num=1)
|
|
740
|
+
server = grpc.aio.server(futures.ThreadPoolExecutor(max_workers))
|
|
741
|
+
agent_pb2_grpc.add_AgentServicer_to_server(servicer, server)
|
|
742
|
+
server.add_insecure_port(f'[::]:{port}')
|
|
743
|
+
logger.info(f"Agent gRPC server started on port {port}")
|
|
744
|
+
|
|
745
|
+
stream_manager.set_loop(asyncio.get_running_loop())
|
|
746
|
+
|
|
747
|
+
await server.start()
|
|
748
|
+
await server.wait_for_termination()
|
|
749
|
+
|
|
750
|
+
def main():
|
|
751
|
+
"""Entry point for the gRPC server."""
|
|
752
|
+
has_display, pyautogui_available, _ = app.check_display_environment()
|
|
753
|
+
compatible_backends, incompatible_backends = app.get_compatible_backends(has_display, pyautogui_available)
|
|
754
|
+
app.validate_backend_compatibility('lybic', compatible_backends, incompatible_backends)
|
|
755
|
+
timestamp_dir = os.path.join(app.log_dir, app.datetime_str)
|
|
756
|
+
cache_dir = os.path.join(timestamp_dir, "cache", "screens")
|
|
757
|
+
state_dir = os.path.join(timestamp_dir, "state")
|
|
758
|
+
|
|
759
|
+
os.makedirs(cache_dir, exist_ok=True)
|
|
760
|
+
os.makedirs(state_dir, exist_ok=True)
|
|
761
|
+
|
|
762
|
+
Registry.register(
|
|
763
|
+
"GlobalStateStore",
|
|
764
|
+
GlobalState(
|
|
765
|
+
screenshot_dir=cache_dir,
|
|
766
|
+
tu_path=os.path.join(state_dir, "tu.json"),
|
|
767
|
+
search_query_path=os.path.join(state_dir, "search_query.json"),
|
|
768
|
+
completed_subtasks_path=os.path.join(state_dir,
|
|
769
|
+
"completed_subtasks.json"),
|
|
770
|
+
failed_subtasks_path=os.path.join(state_dir,
|
|
771
|
+
"failed_subtasks.json"),
|
|
772
|
+
remaining_subtasks_path=os.path.join(state_dir,
|
|
773
|
+
"remaining_subtasks.json"),
|
|
774
|
+
termination_flag_path=os.path.join(state_dir,
|
|
775
|
+
"termination_flag.json"),
|
|
776
|
+
running_state_path=os.path.join(state_dir, "running_state.json"),
|
|
777
|
+
display_info_path=os.path.join(timestamp_dir, "display.json"),
|
|
778
|
+
agent_log_path=os.path.join(timestamp_dir, "agent_log.json")
|
|
779
|
+
)
|
|
780
|
+
)
|
|
781
|
+
asyncio.run(serve())
|
|
782
|
+
|
|
783
|
+
if __name__ == '__main__':
|
|
784
|
+
main()
|