openhands-agent-server 1.3.0__tar.gz → 1.8.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/PKG-INFO +2 -1
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/api.py +64 -25
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/config.py +4 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/conversation_router.py +19 -7
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/conversation_service.py +97 -8
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/docker/Dockerfile +16 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/docker/build.py +34 -12
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/env_parser.py +34 -4
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/event_service.py +156 -12
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/logging_config.py +4 -46
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/models.py +48 -3
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/sockets.py +7 -18
- openhands_agent_server-1.8.1/openhands/agent_server/tool_preload_service.py +76 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/tool_router.py +2 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands_agent_server.egg-info/PKG-INFO +2 -1
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands_agent_server.egg-info/SOURCES.txt +2 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands_agent_server.egg-info/requires.txt +1 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/pyproject.toml +2 -1
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/__init__.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/__main__.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/bash_router.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/bash_service.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/dependencies.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/desktop_router.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/desktop_service.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/docker/wallpaper.svg +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/event_router.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/file_router.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/git_router.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/middleware.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/openapi.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/pub_sub.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/py.typed +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/server_details_router.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/utils.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/vscode_extensions/openhands-settings/extension.js +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/vscode_extensions/openhands-settings/package.json +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/vscode_router.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/vscode_service.py +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands_agent_server.egg-info/dependency_links.txt +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands_agent_server.egg-info/entry_points.txt +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands_agent_server.egg-info/top_level.txt +0 -0
- {openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/setup.cfg +0 -0
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: openhands-agent-server
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.8.1
|
|
4
4
|
Summary: OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent
|
|
5
5
|
Requires-Python: >=3.12
|
|
6
6
|
Requires-Dist: aiosqlite>=0.19
|
|
7
7
|
Requires-Dist: alembic>=1.13
|
|
8
8
|
Requires-Dist: docker<8,>=7.1
|
|
9
9
|
Requires-Dist: fastapi>=0.104
|
|
10
|
+
Requires-Dist: openhands-sdk
|
|
10
11
|
Requires-Dist: pydantic>=2
|
|
11
12
|
Requires-Dist: sqlalchemy>=2
|
|
12
13
|
Requires-Dist: uvicorn>=0.31.1
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import traceback
|
|
2
3
|
from collections.abc import AsyncIterator
|
|
3
4
|
from contextlib import asynccontextmanager
|
|
@@ -28,6 +29,7 @@ from openhands.agent_server.server_details_router import (
|
|
|
28
29
|
server_details_router,
|
|
29
30
|
)
|
|
30
31
|
from openhands.agent_server.sockets import sockets_router
|
|
32
|
+
from openhands.agent_server.tool_preload_service import get_tool_preload_service
|
|
31
33
|
from openhands.agent_server.tool_router import tool_router
|
|
32
34
|
from openhands.agent_server.vscode_router import vscode_router
|
|
33
35
|
from openhands.agent_server.vscode_service import get_vscode_service
|
|
@@ -42,28 +44,50 @@ async def api_lifespan(api: FastAPI) -> AsyncIterator[None]:
|
|
|
42
44
|
service = get_default_conversation_service()
|
|
43
45
|
vscode_service = get_vscode_service()
|
|
44
46
|
desktop_service = get_desktop_service()
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
if
|
|
50
|
-
|
|
47
|
+
tool_preload_service = get_tool_preload_service()
|
|
48
|
+
|
|
49
|
+
# Define async functions for starting each service
|
|
50
|
+
async def start_vscode_service():
|
|
51
|
+
if vscode_service is not None:
|
|
52
|
+
vscode_started = await vscode_service.start()
|
|
53
|
+
if vscode_started:
|
|
54
|
+
logger.info("VSCode service started successfully")
|
|
55
|
+
else:
|
|
56
|
+
logger.warning(
|
|
57
|
+
"VSCode service failed to start, continuing without VSCode"
|
|
58
|
+
)
|
|
51
59
|
else:
|
|
52
|
-
logger.
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
60
|
+
logger.info("VSCode service is disabled")
|
|
61
|
+
|
|
62
|
+
async def start_desktop_service():
|
|
63
|
+
if desktop_service is not None:
|
|
64
|
+
desktop_started = await desktop_service.start()
|
|
65
|
+
if desktop_started:
|
|
66
|
+
logger.info("Desktop service started successfully")
|
|
67
|
+
else:
|
|
68
|
+
logger.warning(
|
|
69
|
+
"Desktop service failed to start, continuing without desktop"
|
|
70
|
+
)
|
|
61
71
|
else:
|
|
62
|
-
logger.
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
72
|
+
logger.info("Desktop service is disabled")
|
|
73
|
+
|
|
74
|
+
async def start_tool_preload_service():
|
|
75
|
+
if tool_preload_service is not None:
|
|
76
|
+
tool_preload_started = await tool_preload_service.start()
|
|
77
|
+
if tool_preload_started:
|
|
78
|
+
logger.info("Tool preload service started successfully")
|
|
79
|
+
else:
|
|
80
|
+
logger.warning("Tool preload service failed to start - skipping")
|
|
81
|
+
else:
|
|
82
|
+
logger.info("Tool preload service is disabled")
|
|
83
|
+
|
|
84
|
+
# Start all services concurrently
|
|
85
|
+
await asyncio.gather(
|
|
86
|
+
start_vscode_service(),
|
|
87
|
+
start_desktop_service(),
|
|
88
|
+
start_tool_preload_service(),
|
|
89
|
+
return_exceptions=True,
|
|
90
|
+
)
|
|
67
91
|
|
|
68
92
|
async with service:
|
|
69
93
|
# Store the initialized service in app state for dependency injection
|
|
@@ -71,11 +95,26 @@ async def api_lifespan(api: FastAPI) -> AsyncIterator[None]:
|
|
|
71
95
|
try:
|
|
72
96
|
yield
|
|
73
97
|
finally:
|
|
74
|
-
#
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
98
|
+
# Define async functions for stopping each service
|
|
99
|
+
async def stop_vscode_service():
|
|
100
|
+
if vscode_service is not None:
|
|
101
|
+
await vscode_service.stop()
|
|
102
|
+
|
|
103
|
+
async def stop_desktop_service():
|
|
104
|
+
if desktop_service is not None:
|
|
105
|
+
await desktop_service.stop()
|
|
106
|
+
|
|
107
|
+
async def stop_tool_preload_service():
|
|
108
|
+
if tool_preload_service is not None:
|
|
109
|
+
await tool_preload_service.stop()
|
|
110
|
+
|
|
111
|
+
# Stop all services concurrently
|
|
112
|
+
await asyncio.gather(
|
|
113
|
+
stop_vscode_service(),
|
|
114
|
+
stop_desktop_service(),
|
|
115
|
+
stop_tool_preload_service(),
|
|
116
|
+
return_exceptions=True,
|
|
117
|
+
)
|
|
79
118
|
|
|
80
119
|
|
|
81
120
|
def _create_fastapi_instance() -> FastAPI:
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/config.py
RENAMED
|
@@ -126,6 +126,10 @@ class Config(BaseModel):
|
|
|
126
126
|
default=False,
|
|
127
127
|
description="Whether to enable VNC desktop functionality",
|
|
128
128
|
)
|
|
129
|
+
preload_tools: bool = Field(
|
|
130
|
+
default=True,
|
|
131
|
+
description="Whether to preload tools",
|
|
132
|
+
)
|
|
129
133
|
secret_key: SecretStr | None = Field(
|
|
130
134
|
default_factory=_default_secret_key,
|
|
131
135
|
description=(
|
|
@@ -24,9 +24,10 @@ from openhands.agent_server.models import (
|
|
|
24
24
|
UpdateConversationRequest,
|
|
25
25
|
UpdateSecretsRequest,
|
|
26
26
|
)
|
|
27
|
-
from openhands.sdk import LLM, Agent, TextContent
|
|
27
|
+
from openhands.sdk import LLM, Agent, TextContent
|
|
28
28
|
from openhands.sdk.conversation.state import ConversationExecutionStatus
|
|
29
29
|
from openhands.sdk.workspace import LocalWorkspace
|
|
30
|
+
from openhands.tools.preset.default import get_default_tools
|
|
30
31
|
|
|
31
32
|
|
|
32
33
|
conversation_router = APIRouter(prefix="/conversations", tags=["Conversations"])
|
|
@@ -41,17 +42,13 @@ START_CONVERSATION_EXAMPLES = [
|
|
|
41
42
|
model="your-model-provider/your-model-name",
|
|
42
43
|
api_key=SecretStr("your-api-key-here"),
|
|
43
44
|
),
|
|
44
|
-
tools=
|
|
45
|
-
Tool(name="TerminalTool"),
|
|
46
|
-
Tool(name="FileEditorTool"),
|
|
47
|
-
Tool(name="TaskTrackerTool"),
|
|
48
|
-
],
|
|
45
|
+
tools=get_default_tools(enable_browser=True),
|
|
49
46
|
),
|
|
50
47
|
workspace=LocalWorkspace(working_dir="workspace/project"),
|
|
51
48
|
initial_message=SendMessageRequest(
|
|
52
49
|
role="user", content=[TextContent(text="Flip a coin!")]
|
|
53
50
|
),
|
|
54
|
-
).model_dump(exclude_defaults=True)
|
|
51
|
+
).model_dump(exclude_defaults=True, mode="json")
|
|
55
52
|
]
|
|
56
53
|
|
|
57
54
|
|
|
@@ -307,3 +304,18 @@ async def ask_agent(
|
|
|
307
304
|
if response is None:
|
|
308
305
|
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
309
306
|
return AskAgentResponse(response=response)
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
@conversation_router.post(
|
|
310
|
+
"/{conversation_id}/condense",
|
|
311
|
+
responses={404: {"description": "Item not found"}},
|
|
312
|
+
)
|
|
313
|
+
async def condense_conversation(
|
|
314
|
+
conversation_id: UUID,
|
|
315
|
+
conversation_service: ConversationService = Depends(get_conversation_service),
|
|
316
|
+
) -> Success:
|
|
317
|
+
"""Force condensation of the conversation history."""
|
|
318
|
+
success = await conversation_service.condense(conversation_id)
|
|
319
|
+
if not success:
|
|
320
|
+
raise HTTPException(status.HTTP_404_NOT_FOUND, detail="Conversation not found")
|
|
321
|
+
return Success()
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import importlib
|
|
2
3
|
import logging
|
|
3
4
|
from dataclasses import dataclass, field
|
|
4
5
|
from pathlib import Path
|
|
@@ -165,14 +166,30 @@ class ConversationService:
|
|
|
165
166
|
if not self._conversation_webhook_subscribers:
|
|
166
167
|
return
|
|
167
168
|
|
|
168
|
-
# Send notifications to all conversation webhook subscribers
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
169
|
+
# Send notifications to all conversation webhook subscribers in the background
|
|
170
|
+
async def _notify_and_log_errors():
|
|
171
|
+
results = await asyncio.gather(
|
|
172
|
+
*[
|
|
173
|
+
subscriber.post_conversation_info(conversation_info)
|
|
174
|
+
for subscriber in self._conversation_webhook_subscribers
|
|
175
|
+
],
|
|
176
|
+
return_exceptions=True, # Don't fail if one webhook fails
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
# Log any exceptions that occurred
|
|
180
|
+
for i, result in enumerate(results):
|
|
181
|
+
if isinstance(result, Exception):
|
|
182
|
+
subscriber = self._conversation_webhook_subscribers[i]
|
|
183
|
+
logger.error(
|
|
184
|
+
(
|
|
185
|
+
f"Failed to notify conversation webhook "
|
|
186
|
+
f"{subscriber.spec.base_url}: {result}"
|
|
187
|
+
),
|
|
188
|
+
exc_info=result,
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
# Create task to run in background without awaiting
|
|
192
|
+
asyncio.create_task(_notify_and_log_errors())
|
|
176
193
|
|
|
177
194
|
# Write Methods
|
|
178
195
|
|
|
@@ -192,6 +209,32 @@ class ConversationService:
|
|
|
192
209
|
)
|
|
193
210
|
return conversation_info, False
|
|
194
211
|
|
|
212
|
+
# Dynamically register tools from client's registry
|
|
213
|
+
if request.tool_module_qualnames:
|
|
214
|
+
import importlib
|
|
215
|
+
|
|
216
|
+
for tool_name, module_qualname in request.tool_module_qualnames.items():
|
|
217
|
+
try:
|
|
218
|
+
# Import the module to trigger tool auto-registration
|
|
219
|
+
importlib.import_module(module_qualname)
|
|
220
|
+
logger.debug(
|
|
221
|
+
f"Tool '{tool_name}' registered via module '{module_qualname}'"
|
|
222
|
+
)
|
|
223
|
+
except ImportError as e:
|
|
224
|
+
logger.warning(
|
|
225
|
+
f"Failed to import module '{module_qualname}' for tool "
|
|
226
|
+
f"'{tool_name}': {e}. Tool will not be available."
|
|
227
|
+
)
|
|
228
|
+
# Continue even if some tools fail to register
|
|
229
|
+
# The agent will fail gracefully if it tries to use unregistered
|
|
230
|
+
# tools
|
|
231
|
+
if request.tool_module_qualnames:
|
|
232
|
+
logger.info(
|
|
233
|
+
f"Dynamically registered {len(request.tool_module_qualnames)} "
|
|
234
|
+
f"tools for conversation {conversation_id}: "
|
|
235
|
+
f"{list(request.tool_module_qualnames.keys())}"
|
|
236
|
+
)
|
|
237
|
+
|
|
195
238
|
stored = StoredConversation(id=conversation_id, **request.model_dump())
|
|
196
239
|
event_service = await self._start_event_service(stored)
|
|
197
240
|
initial_message = request.initial_message
|
|
@@ -240,6 +283,9 @@ class ConversationService:
|
|
|
240
283
|
conversation_info = _compose_conversation_info(
|
|
241
284
|
event_service.stored, state
|
|
242
285
|
)
|
|
286
|
+
conversation_info.execution_status = (
|
|
287
|
+
ConversationExecutionStatus.DELETING
|
|
288
|
+
)
|
|
243
289
|
await self._notify_conversation_webhooks(conversation_info)
|
|
244
290
|
except Exception as e:
|
|
245
291
|
logger.warning(
|
|
@@ -332,6 +378,18 @@ class ConversationService:
|
|
|
332
378
|
response = await event_service.ask_agent(question)
|
|
333
379
|
return response
|
|
334
380
|
|
|
381
|
+
async def condense(self, conversation_id: UUID) -> bool:
|
|
382
|
+
"""Force condensation of the conversation history."""
|
|
383
|
+
if self._event_services is None:
|
|
384
|
+
raise ValueError("inactive_service")
|
|
385
|
+
event_service = self._event_services.get(conversation_id)
|
|
386
|
+
if event_service is None:
|
|
387
|
+
return False
|
|
388
|
+
|
|
389
|
+
# Delegate to EventService to avoid accessing private conversation internals
|
|
390
|
+
await event_service.condense()
|
|
391
|
+
return True
|
|
392
|
+
|
|
335
393
|
async def __aenter__(self):
|
|
336
394
|
self.conversations_dir.mkdir(parents=True, exist_ok=True)
|
|
337
395
|
self._event_services = {}
|
|
@@ -347,6 +405,34 @@ class ConversationService:
|
|
|
347
405
|
"cipher": self.cipher,
|
|
348
406
|
},
|
|
349
407
|
)
|
|
408
|
+
# Dynamically register tools when resuming persisted conversations
|
|
409
|
+
if stored.tool_module_qualnames:
|
|
410
|
+
for (
|
|
411
|
+
tool_name,
|
|
412
|
+
module_qualname,
|
|
413
|
+
) in stored.tool_module_qualnames.items():
|
|
414
|
+
try:
|
|
415
|
+
# Import the module to trigger tool auto-registration
|
|
416
|
+
importlib.import_module(module_qualname)
|
|
417
|
+
logger.debug(
|
|
418
|
+
f"Tool '{tool_name}' registered via module "
|
|
419
|
+
f"'{module_qualname}' when resuming conversation "
|
|
420
|
+
f"{stored.id}"
|
|
421
|
+
)
|
|
422
|
+
except ImportError as e:
|
|
423
|
+
logger.warning(
|
|
424
|
+
f"Failed to import module '{module_qualname}' for "
|
|
425
|
+
f"tool '{tool_name}' when resuming conversation "
|
|
426
|
+
f"{stored.id}: {e}. Tool will not be available."
|
|
427
|
+
)
|
|
428
|
+
# Continue even if some tools fail to register
|
|
429
|
+
if stored.tool_module_qualnames:
|
|
430
|
+
logger.info(
|
|
431
|
+
f"Dynamically registered "
|
|
432
|
+
f"{len(stored.tool_module_qualnames)} tools when "
|
|
433
|
+
f"resuming conversation {stored.id}: "
|
|
434
|
+
f"{list(stored.tool_module_qualnames.keys())}"
|
|
435
|
+
)
|
|
350
436
|
await self._start_event_service(stored)
|
|
351
437
|
except Exception:
|
|
352
438
|
logger.exception(
|
|
@@ -416,6 +502,9 @@ class ConversationService:
|
|
|
416
502
|
|
|
417
503
|
try:
|
|
418
504
|
await event_service.start()
|
|
505
|
+
# Save metadata immediately after successful start to ensure persistence
|
|
506
|
+
# even if the system is not shut down gracefully
|
|
507
|
+
await event_service.save_meta()
|
|
419
508
|
except Exception:
|
|
420
509
|
# Clean up the event service if startup fails
|
|
421
510
|
await event_service.close()
|
|
@@ -156,6 +156,18 @@ RUN set -eux; \
|
|
|
156
156
|
RUN mkdir -p /etc/docker && \
|
|
157
157
|
echo '{"mtu": 1450}' > /etc/docker/daemon.json
|
|
158
158
|
|
|
159
|
+
# --- GitHub CLI ---
|
|
160
|
+
RUN set -eux; \
|
|
161
|
+
mkdir -p -m 755 /etc/apt/keyrings; \
|
|
162
|
+
wget -nv -O /etc/apt/keyrings/githubcli-archive-keyring.gpg \
|
|
163
|
+
https://cli.github.com/packages/githubcli-archive-keyring.gpg; \
|
|
164
|
+
chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg; \
|
|
165
|
+
echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \
|
|
166
|
+
> /etc/apt/sources.list.d/github-cli.list; \
|
|
167
|
+
apt-get update; \
|
|
168
|
+
apt-get install -y gh; \
|
|
169
|
+
apt-get clean; \
|
|
170
|
+
rm -rf /var/lib/apt/lists/*
|
|
159
171
|
|
|
160
172
|
# --- VNC + Desktop + noVNC ---
|
|
161
173
|
RUN set -eux; \
|
|
@@ -218,10 +230,14 @@ ARG USERNAME
|
|
|
218
230
|
|
|
219
231
|
COPY --chown=${USERNAME}:${USERNAME} --from=binary-builder /agent-server/dist/openhands-agent-server /usr/local/bin/openhands-agent-server
|
|
220
232
|
RUN chmod +x /usr/local/bin/openhands-agent-server
|
|
233
|
+
# Fix library path to use system GCC libraries instead of bundled ones
|
|
234
|
+
ENV LD_LIBRARY_PATH=/usr/lib/aarch64-linux-gnu:/usr/lib:/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH
|
|
221
235
|
ENTRYPOINT ["/usr/local/bin/openhands-agent-server"]
|
|
222
236
|
|
|
223
237
|
FROM base-image-minimal AS binary-minimal
|
|
224
238
|
ARG USERNAME
|
|
225
239
|
COPY --chown=${USERNAME}:${USERNAME} --from=binary-builder /agent-server/dist/openhands-agent-server /usr/local/bin/openhands-agent-server
|
|
226
240
|
RUN chmod +x /usr/local/bin/openhands-agent-server
|
|
241
|
+
# Fix library path to use system GCC libraries instead of bundled ones
|
|
242
|
+
ENV LD_LIBRARY_PATH=/usr/lib/aarch64-linux-gnu:/usr/lib:/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH
|
|
227
243
|
ENTRYPOINT ["/usr/local/bin/openhands-agent-server"]
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/docker/build.py
RENAMED
|
@@ -4,7 +4,7 @@ Single-entry build helper for agent-server images.
|
|
|
4
4
|
|
|
5
5
|
- Targets: binary | binary-minimal | source | source-minimal
|
|
6
6
|
- Multi-tagging via CUSTOM_TAGS (comma-separated)
|
|
7
|
-
- Versioned
|
|
7
|
+
- Versioned tags for custom tags: {SDK_VERSION}-{CUSTOM_TAG}
|
|
8
8
|
- Branch-scoped cache keys
|
|
9
9
|
- CI (push) vs local (load) behavior
|
|
10
10
|
- sdist-based builds: Uses `uv build` to create clean build contexts
|
|
@@ -26,18 +26,16 @@ import threading
|
|
|
26
26
|
import tomllib
|
|
27
27
|
from contextlib import chdir
|
|
28
28
|
from pathlib import Path
|
|
29
|
-
from typing import Literal
|
|
30
29
|
|
|
31
30
|
from pydantic import BaseModel, Field, field_validator
|
|
32
31
|
|
|
33
32
|
from openhands.sdk.logger import IN_CI, get_logger, rolling_log_view
|
|
33
|
+
from openhands.sdk.workspace import PlatformType, TargetType
|
|
34
34
|
|
|
35
35
|
|
|
36
36
|
logger = get_logger(__name__)
|
|
37
37
|
|
|
38
38
|
VALID_TARGETS = {"binary", "binary-minimal", "source", "source-minimal"}
|
|
39
|
-
TargetType = Literal["binary", "binary-minimal", "source", "source-minimal"]
|
|
40
|
-
PlatformType = Literal["linux/amd64", "linux/arm64"]
|
|
41
39
|
|
|
42
40
|
|
|
43
41
|
# --- helpers ---
|
|
@@ -379,8 +377,12 @@ class BuildOptions(BaseModel):
|
|
|
379
377
|
return _base_slug(self.base_image)
|
|
380
378
|
|
|
381
379
|
@property
|
|
382
|
-
def
|
|
383
|
-
|
|
380
|
+
def versioned_tags(self) -> list[str]:
|
|
381
|
+
"""
|
|
382
|
+
Generate simple version tags for each custom tag variant.
|
|
383
|
+
Returns tags like: 1.2.0-python, 1.2.0-java, 1.2.0-golang
|
|
384
|
+
"""
|
|
385
|
+
return [f"{self.sdk_version}-{t}" for t in self.custom_tag_list]
|
|
384
386
|
|
|
385
387
|
@property
|
|
386
388
|
def base_tag(self) -> str:
|
|
@@ -412,7 +414,8 @@ class BuildOptions(BaseModel):
|
|
|
412
414
|
if self.include_base_tag:
|
|
413
415
|
tags.append(f"{self.image}:{self.base_tag}{arch_suffix}")
|
|
414
416
|
if self.include_versioned_tag:
|
|
415
|
-
|
|
417
|
+
for versioned_tag in self.versioned_tags:
|
|
418
|
+
tags.append(f"{self.image}:{versioned_tag}{arch_suffix}")
|
|
416
419
|
|
|
417
420
|
# Append target suffix for clarity (binary is default, no suffix needed)
|
|
418
421
|
if self.target != "binary":
|
|
@@ -734,7 +737,11 @@ def main(argv: list[str]) -> int:
|
|
|
734
737
|
fh.write(f"build_context={ctx}\n")
|
|
735
738
|
fh.write(f"dockerfile={ctx / 'Dockerfile'}\n")
|
|
736
739
|
fh.write(f"tags_csv={','.join(opts.all_tags)}\n")
|
|
737
|
-
|
|
740
|
+
# Only output versioned tags if they're being used
|
|
741
|
+
if opts.include_versioned_tag:
|
|
742
|
+
fh.write(f"versioned_tags_csv={','.join(opts.versioned_tags)}\n")
|
|
743
|
+
else:
|
|
744
|
+
fh.write("versioned_tags_csv=\n")
|
|
738
745
|
fh.write(f"base_image_slug={opts.base_image_slug}\n")
|
|
739
746
|
logger.info("[build] Wrote outputs to $GITHUB_OUTPUT")
|
|
740
747
|
|
|
@@ -773,13 +780,18 @@ def main(argv: list[str]) -> int:
|
|
|
773
780
|
|
|
774
781
|
# --- expose outputs for GitHub Actions ---
|
|
775
782
|
def _write_gha_outputs(
|
|
776
|
-
image: str,
|
|
783
|
+
image: str,
|
|
784
|
+
short_sha: str,
|
|
785
|
+
versioned_tags: list[str],
|
|
786
|
+
tags_list: list[str],
|
|
787
|
+
include_versioned_tag: bool,
|
|
777
788
|
) -> None:
|
|
778
789
|
"""
|
|
779
790
|
If running in GitHub Actions, append step outputs to $GITHUB_OUTPUT.
|
|
780
791
|
- image: repo/name (no tag)
|
|
781
792
|
- short_sha: 7-char SHA
|
|
782
|
-
-
|
|
793
|
+
- versioned_tags_csv: comma-separated list of versioned tags
|
|
794
|
+
(empty if not enabled)
|
|
783
795
|
- tags: multiline output (one per line)
|
|
784
796
|
- tags_csv: single-line, comma-separated
|
|
785
797
|
"""
|
|
@@ -789,13 +801,23 @@ def main(argv: list[str]) -> int:
|
|
|
789
801
|
with open(out_path, "a", encoding="utf-8") as fh:
|
|
790
802
|
fh.write(f"image={image}\n")
|
|
791
803
|
fh.write(f"short_sha={short_sha}\n")
|
|
792
|
-
|
|
804
|
+
# Only output versioned tags if they're being used
|
|
805
|
+
if include_versioned_tag:
|
|
806
|
+
fh.write(f"versioned_tags_csv={','.join(versioned_tags)}\n")
|
|
807
|
+
else:
|
|
808
|
+
fh.write("versioned_tags_csv=\n")
|
|
793
809
|
fh.write(f"tags_csv={','.join(tags_list)}\n")
|
|
794
810
|
fh.write("tags<<EOF\n")
|
|
795
811
|
fh.write("\n".join(tags_list) + "\n")
|
|
796
812
|
fh.write("EOF\n")
|
|
797
813
|
|
|
798
|
-
_write_gha_outputs(
|
|
814
|
+
_write_gha_outputs(
|
|
815
|
+
opts.image,
|
|
816
|
+
opts.short_sha,
|
|
817
|
+
opts.versioned_tags,
|
|
818
|
+
tags,
|
|
819
|
+
opts.include_versioned_tag,
|
|
820
|
+
)
|
|
799
821
|
return 0
|
|
800
822
|
|
|
801
823
|
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/env_parser.py
RENAMED
|
@@ -17,7 +17,10 @@ from uuid import UUID
|
|
|
17
17
|
|
|
18
18
|
from pydantic import BaseModel, SecretStr, TypeAdapter
|
|
19
19
|
|
|
20
|
-
from openhands.sdk.utils.models import
|
|
20
|
+
from openhands.sdk.utils.models import (
|
|
21
|
+
DiscriminatedUnionMixin,
|
|
22
|
+
get_known_concrete_subclasses,
|
|
23
|
+
)
|
|
21
24
|
|
|
22
25
|
|
|
23
26
|
# Define Missing type
|
|
@@ -268,6 +271,26 @@ class UnionEnvParser(EnvParser):
|
|
|
268
271
|
output.write("\n")
|
|
269
272
|
|
|
270
273
|
|
|
274
|
+
@dataclass
|
|
275
|
+
class DiscriminatedUnionEnvParser(EnvParser):
|
|
276
|
+
parsers: dict[str, EnvParser]
|
|
277
|
+
|
|
278
|
+
def from_env(self, key: str) -> JsonType:
|
|
279
|
+
kind = os.environ.get(f"{key}_KIND", MISSING)
|
|
280
|
+
if kind is MISSING:
|
|
281
|
+
return MISSING
|
|
282
|
+
assert isinstance(kind, str)
|
|
283
|
+
parser = self.parsers[kind]
|
|
284
|
+
parser_result = parser.from_env(key)
|
|
285
|
+
assert isinstance(parser_result, dict)
|
|
286
|
+
parser_result["kind"] = kind
|
|
287
|
+
return parser_result
|
|
288
|
+
|
|
289
|
+
def to_env(self, key: str, value: Any, output: IO):
|
|
290
|
+
parser = self.parsers[value.kind]
|
|
291
|
+
parser.to_env(key, value, output)
|
|
292
|
+
|
|
293
|
+
|
|
271
294
|
@dataclass
|
|
272
295
|
class DelayedParser(EnvParser):
|
|
273
296
|
"""Delayed parser for circular dependencies"""
|
|
@@ -341,9 +364,16 @@ def get_env_parser(target_type: type, parsers: dict[type, EnvParser]) -> EnvPars
|
|
|
341
364
|
if issubclass(target_type, DiscriminatedUnionMixin) and (
|
|
342
365
|
inspect.isabstract(target_type) or ABC in target_type.__bases__
|
|
343
366
|
):
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
367
|
+
delayed = DelayedParser()
|
|
368
|
+
parsers[target_type] = delayed # Prevent circular dependency
|
|
369
|
+
sub_parsers = {
|
|
370
|
+
c.__name__: get_env_parser(c, parsers)
|
|
371
|
+
for c in get_known_concrete_subclasses(target_type)
|
|
372
|
+
}
|
|
373
|
+
parser = DiscriminatedUnionEnvParser(sub_parsers)
|
|
374
|
+
delayed.parser = parser
|
|
375
|
+
parsers[target_type] = parser
|
|
376
|
+
return parser
|
|
347
377
|
if issubclass(target_type, BaseModel): # type: ignore
|
|
348
378
|
delayed = DelayedParser()
|
|
349
379
|
parsers[target_type] = delayed # Prevent circular dependency
|
|
@@ -19,6 +19,7 @@ from openhands.sdk.conversation.state import (
|
|
|
19
19
|
ConversationExecutionStatus,
|
|
20
20
|
ConversationState,
|
|
21
21
|
)
|
|
22
|
+
from openhands.sdk.event import AgentErrorEvent
|
|
22
23
|
from openhands.sdk.event.conversation_state import ConversationStateUpdateEvent
|
|
23
24
|
from openhands.sdk.event.llm_completion_log import LLMCompletionLogEvent
|
|
24
25
|
from openhands.sdk.security.analyzer import SecurityAnalyzerBase
|
|
@@ -44,6 +45,7 @@ class EventService:
|
|
|
44
45
|
_conversation: LocalConversation | None = field(default=None, init=False)
|
|
45
46
|
_pub_sub: PubSub[Event] = field(default_factory=lambda: PubSub[Event](), init=False)
|
|
46
47
|
_run_task: asyncio.Task | None = field(default=None, init=False)
|
|
48
|
+
_run_lock: asyncio.Lock = field(default_factory=asyncio.Lock, init=False)
|
|
47
49
|
|
|
48
50
|
@property
|
|
49
51
|
def conversation_dir(self):
|
|
@@ -74,7 +76,8 @@ class EventService:
|
|
|
74
76
|
raise ValueError("inactive_service")
|
|
75
77
|
return self._conversation
|
|
76
78
|
|
|
77
|
-
|
|
79
|
+
def _get_event_sync(self, event_id: str) -> Event | None:
|
|
80
|
+
"""Private sync function to get event with state lock."""
|
|
78
81
|
if not self._conversation:
|
|
79
82
|
raise ValueError("inactive_service")
|
|
80
83
|
with self._conversation._state as state:
|
|
@@ -82,7 +85,13 @@ class EventService:
|
|
|
82
85
|
event = state.events[index]
|
|
83
86
|
return event
|
|
84
87
|
|
|
85
|
-
async def
|
|
88
|
+
async def get_event(self, event_id: str) -> Event | None:
|
|
89
|
+
if not self._conversation:
|
|
90
|
+
raise ValueError("inactive_service")
|
|
91
|
+
loop = asyncio.get_running_loop()
|
|
92
|
+
return await loop.run_in_executor(None, self._get_event_sync, event_id)
|
|
93
|
+
|
|
94
|
+
def _search_events_sync(
|
|
86
95
|
self,
|
|
87
96
|
page_id: str | None = None,
|
|
88
97
|
limit: int = 100,
|
|
@@ -93,6 +102,7 @@ class EventService:
|
|
|
93
102
|
timestamp__gte: datetime | None = None,
|
|
94
103
|
timestamp__lt: datetime | None = None,
|
|
95
104
|
) -> EventPage:
|
|
105
|
+
"""Private sync function to search events with state lock."""
|
|
96
106
|
if not self._conversation:
|
|
97
107
|
raise ValueError("inactive_service")
|
|
98
108
|
|
|
@@ -161,7 +171,34 @@ class EventService:
|
|
|
161
171
|
|
|
162
172
|
return EventPage(items=items, next_page_id=next_page_id)
|
|
163
173
|
|
|
164
|
-
async def
|
|
174
|
+
async def search_events(
|
|
175
|
+
self,
|
|
176
|
+
page_id: str | None = None,
|
|
177
|
+
limit: int = 100,
|
|
178
|
+
kind: str | None = None,
|
|
179
|
+
source: str | None = None,
|
|
180
|
+
body: str | None = None,
|
|
181
|
+
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
|
|
182
|
+
timestamp__gte: datetime | None = None,
|
|
183
|
+
timestamp__lt: datetime | None = None,
|
|
184
|
+
) -> EventPage:
|
|
185
|
+
if not self._conversation:
|
|
186
|
+
raise ValueError("inactive_service")
|
|
187
|
+
loop = asyncio.get_running_loop()
|
|
188
|
+
return await loop.run_in_executor(
|
|
189
|
+
None,
|
|
190
|
+
self._search_events_sync,
|
|
191
|
+
page_id,
|
|
192
|
+
limit,
|
|
193
|
+
kind,
|
|
194
|
+
source,
|
|
195
|
+
body,
|
|
196
|
+
sort_order,
|
|
197
|
+
timestamp__gte,
|
|
198
|
+
timestamp__lt,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
def _count_events_sync(
|
|
165
202
|
self,
|
|
166
203
|
kind: str | None = None,
|
|
167
204
|
source: str | None = None,
|
|
@@ -169,7 +206,7 @@ class EventService:
|
|
|
169
206
|
timestamp__gte: datetime | None = None,
|
|
170
207
|
timestamp__lt: datetime | None = None,
|
|
171
208
|
) -> int:
|
|
172
|
-
"""
|
|
209
|
+
"""Private sync function to count events with state lock."""
|
|
173
210
|
if not self._conversation:
|
|
174
211
|
raise ValueError("inactive_service")
|
|
175
212
|
|
|
@@ -210,6 +247,28 @@ class EventService:
|
|
|
210
247
|
|
|
211
248
|
return count
|
|
212
249
|
|
|
250
|
+
async def count_events(
|
|
251
|
+
self,
|
|
252
|
+
kind: str | None = None,
|
|
253
|
+
source: str | None = None,
|
|
254
|
+
body: str | None = None,
|
|
255
|
+
timestamp__gte: datetime | None = None,
|
|
256
|
+
timestamp__lt: datetime | None = None,
|
|
257
|
+
) -> int:
|
|
258
|
+
"""Count events matching the given filters."""
|
|
259
|
+
if not self._conversation:
|
|
260
|
+
raise ValueError("inactive_service")
|
|
261
|
+
loop = asyncio.get_running_loop()
|
|
262
|
+
return await loop.run_in_executor(
|
|
263
|
+
None,
|
|
264
|
+
self._count_events_sync,
|
|
265
|
+
kind,
|
|
266
|
+
source,
|
|
267
|
+
body,
|
|
268
|
+
timestamp__gte,
|
|
269
|
+
timestamp__lt,
|
|
270
|
+
)
|
|
271
|
+
|
|
213
272
|
def _event_matches_body(self, event: Event, body: str) -> bool:
|
|
214
273
|
"""Check if event's message content matches body filter (case-insensitive)."""
|
|
215
274
|
# Import here to avoid circular imports
|
|
@@ -350,7 +409,9 @@ class EventService:
|
|
|
350
409
|
workspace = self.stored.workspace
|
|
351
410
|
assert isinstance(workspace, LocalWorkspace)
|
|
352
411
|
Path(workspace.working_dir).mkdir(parents=True, exist_ok=True)
|
|
353
|
-
agent = Agent.model_validate(
|
|
412
|
+
agent = Agent.model_validate(
|
|
413
|
+
self.stored.agent.model_dump(context={"expose_secrets": True}),
|
|
414
|
+
)
|
|
354
415
|
|
|
355
416
|
conversation = LocalConversation(
|
|
356
417
|
agent=agent,
|
|
@@ -379,23 +440,95 @@ class EventService:
|
|
|
379
440
|
# Setup stats streaming for remote execution
|
|
380
441
|
self._setup_stats_streaming(self._conversation.agent)
|
|
381
442
|
|
|
443
|
+
# If the execution_status was "running" while serialized, then the
|
|
444
|
+
# conversation can't possibly be running - something is wrong
|
|
445
|
+
state = self._conversation.state
|
|
446
|
+
if state.execution_status == ConversationExecutionStatus.RUNNING:
|
|
447
|
+
state.execution_status = ConversationExecutionStatus.ERROR
|
|
448
|
+
# Add error event for the first unmatched action to inform the agent
|
|
449
|
+
unmatched_actions = ConversationState.get_unmatched_actions(state.events)
|
|
450
|
+
if unmatched_actions:
|
|
451
|
+
first_action = unmatched_actions[0]
|
|
452
|
+
error_event = AgentErrorEvent(
|
|
453
|
+
tool_name=first_action.tool_name,
|
|
454
|
+
tool_call_id=first_action.tool_call_id,
|
|
455
|
+
error=(
|
|
456
|
+
"A restart occurred while this tool was in progress. "
|
|
457
|
+
"This may indicate a fatal memory error or system crash. "
|
|
458
|
+
"The tool execution was interrupted and did not complete."
|
|
459
|
+
),
|
|
460
|
+
)
|
|
461
|
+
self._conversation._on_event(error_event)
|
|
462
|
+
|
|
382
463
|
# Publish initial state update
|
|
383
464
|
await self._publish_state_update()
|
|
384
465
|
|
|
385
466
|
async def run(self):
|
|
386
|
-
"""Run the conversation asynchronously.
|
|
467
|
+
"""Run the conversation asynchronously in the background.
|
|
468
|
+
|
|
469
|
+
This method starts the conversation run in a background task and returns
|
|
470
|
+
immediately. The conversation status can be monitored via the
|
|
471
|
+
GET /api/conversations/{id} endpoint or WebSocket events.
|
|
472
|
+
|
|
473
|
+
Raises:
|
|
474
|
+
ValueError: If the service is inactive or conversation is already running.
|
|
475
|
+
"""
|
|
387
476
|
if not self._conversation:
|
|
388
477
|
raise ValueError("inactive_service")
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
478
|
+
|
|
479
|
+
# Use lock to make check-and-set atomic, preventing race conditions
|
|
480
|
+
async with self._run_lock:
|
|
481
|
+
# Check if already running
|
|
482
|
+
with self._conversation._state as state:
|
|
483
|
+
if state.execution_status == ConversationExecutionStatus.RUNNING:
|
|
484
|
+
raise ValueError("conversation_already_running")
|
|
485
|
+
|
|
486
|
+
# Check if there's already a running task
|
|
487
|
+
if self._run_task is not None and not self._run_task.done():
|
|
488
|
+
raise ValueError("conversation_already_running")
|
|
489
|
+
|
|
490
|
+
# Capture conversation reference for the closure
|
|
491
|
+
conversation = self._conversation
|
|
492
|
+
|
|
493
|
+
# Start run in background
|
|
494
|
+
loop = asyncio.get_running_loop()
|
|
495
|
+
|
|
496
|
+
async def _run_and_publish():
|
|
497
|
+
try:
|
|
498
|
+
await loop.run_in_executor(None, conversation.run)
|
|
499
|
+
except Exception as e:
|
|
500
|
+
logger.error(f"Error during conversation run: {e}")
|
|
501
|
+
finally:
|
|
502
|
+
# Clear task reference and publish state update
|
|
503
|
+
self._run_task = None
|
|
504
|
+
await self._publish_state_update()
|
|
505
|
+
|
|
506
|
+
# Create task but don't await it - runs in background
|
|
507
|
+
self._run_task = asyncio.create_task(_run_and_publish())
|
|
393
508
|
|
|
394
509
|
async def respond_to_confirmation(self, request: ConfirmationResponseRequest):
|
|
395
510
|
if request.accept:
|
|
396
|
-
|
|
511
|
+
try:
|
|
512
|
+
await self.run()
|
|
513
|
+
except ValueError as e:
|
|
514
|
+
# Treat "already running" as a no-op success
|
|
515
|
+
if str(e) == "conversation_already_running":
|
|
516
|
+
logger.debug(
|
|
517
|
+
"Confirmation accepted but conversation already running"
|
|
518
|
+
)
|
|
519
|
+
else:
|
|
520
|
+
raise
|
|
397
521
|
else:
|
|
398
|
-
await self.
|
|
522
|
+
await self.reject_pending_actions(request.reason)
|
|
523
|
+
|
|
524
|
+
async def reject_pending_actions(self, reason: str):
|
|
525
|
+
"""Reject all pending actions and publish updated state."""
|
|
526
|
+
if not self._conversation:
|
|
527
|
+
raise ValueError("inactive_service")
|
|
528
|
+
loop = asyncio.get_running_loop()
|
|
529
|
+
await loop.run_in_executor(
|
|
530
|
+
None, self._conversation.reject_pending_actions, reason
|
|
531
|
+
)
|
|
399
532
|
|
|
400
533
|
async def pause(self):
|
|
401
534
|
if self._conversation:
|
|
@@ -474,6 +607,17 @@ class EventService:
|
|
|
474
607
|
loop = asyncio.get_running_loop()
|
|
475
608
|
return await loop.run_in_executor(None, self._conversation.ask_agent, question)
|
|
476
609
|
|
|
610
|
+
async def condense(self) -> None:
|
|
611
|
+
"""Force condensation of the conversation history.
|
|
612
|
+
|
|
613
|
+
Delegates to LocalConversation in an executor to avoid blocking the event loop.
|
|
614
|
+
"""
|
|
615
|
+
if not self._conversation:
|
|
616
|
+
raise ValueError("inactive_service")
|
|
617
|
+
|
|
618
|
+
loop = asyncio.get_running_loop()
|
|
619
|
+
return await loop.run_in_executor(None, self._conversation.condense)
|
|
620
|
+
|
|
477
621
|
async def get_state(self) -> ConversationState:
|
|
478
622
|
if not self._conversation:
|
|
479
623
|
raise ValueError("inactive_service")
|
|
@@ -3,26 +3,7 @@
|
|
|
3
3
|
import logging
|
|
4
4
|
from typing import Any
|
|
5
5
|
|
|
6
|
-
from openhands.sdk.logger import
|
|
7
|
-
ENV_JSON,
|
|
8
|
-
ENV_LOG_LEVEL,
|
|
9
|
-
IN_CI,
|
|
10
|
-
)
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class UvicornAccessFormatter(logging.Formatter):
|
|
14
|
-
"""Custom formatter for uvicorn access logs."""
|
|
15
|
-
|
|
16
|
-
def format(self, record):
|
|
17
|
-
# Set default values for uvicorn-specific fields if they don't exist
|
|
18
|
-
if not hasattr(record, "client_addr"):
|
|
19
|
-
record.client_addr = "-"
|
|
20
|
-
if not hasattr(record, "request_line"):
|
|
21
|
-
record.request_line = record.getMessage()
|
|
22
|
-
if not hasattr(record, "status_code"):
|
|
23
|
-
record.status_code = "-"
|
|
24
|
-
|
|
25
|
-
return super().format(record)
|
|
6
|
+
from openhands.sdk.logger import ENV_LOG_LEVEL
|
|
26
7
|
|
|
27
8
|
|
|
28
9
|
def get_uvicorn_logging_config() -> dict[str, Any]:
|
|
@@ -47,29 +28,6 @@ def get_uvicorn_logging_config() -> dict[str, Any]:
|
|
|
47
28
|
"loggers": {},
|
|
48
29
|
}
|
|
49
30
|
|
|
50
|
-
# Add formatters based on SDK settings
|
|
51
|
-
if ENV_JSON or IN_CI:
|
|
52
|
-
# JSON formatter for access logs
|
|
53
|
-
config["formatters"]["access"] = {
|
|
54
|
-
"()": "pythonjsonlogger.json.JsonFormatter",
|
|
55
|
-
"fmt": "%(asctime)s %(levelname)s %(name)s "
|
|
56
|
-
"%(client_addr)s %(request_line)s %(status_code)s",
|
|
57
|
-
}
|
|
58
|
-
else:
|
|
59
|
-
# Custom formatter for access logs that handles missing fields
|
|
60
|
-
config["formatters"]["access"] = {
|
|
61
|
-
"()": UvicornAccessFormatter,
|
|
62
|
-
"format": '%(asctime)s - %(name)s - %(levelname)s - "'
|
|
63
|
-
'"%(client_addr)s - "%(request_line)s" %(status_code)s',
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
# Access handler - always separate for proper formatting
|
|
67
|
-
config["handlers"]["access"] = {
|
|
68
|
-
"formatter": "access",
|
|
69
|
-
"class": "logging.StreamHandler",
|
|
70
|
-
"stream": "ext://sys.stdout",
|
|
71
|
-
}
|
|
72
|
-
|
|
73
31
|
# Configure uvicorn loggers
|
|
74
32
|
config["loggers"] = {
|
|
75
33
|
# Main uvicorn logger - propagate to root
|
|
@@ -84,11 +42,11 @@ def get_uvicorn_logging_config() -> dict[str, Any]:
|
|
|
84
42
|
"level": logging.getLevelName(ENV_LOG_LEVEL),
|
|
85
43
|
"propagate": True,
|
|
86
44
|
},
|
|
87
|
-
# Access logger -
|
|
45
|
+
# Access logger - propagate to root
|
|
88
46
|
"uvicorn.access": {
|
|
89
|
-
"handlers": [
|
|
47
|
+
"handlers": [],
|
|
90
48
|
"level": logging.getLevelName(ENV_LOG_LEVEL),
|
|
91
|
-
"propagate":
|
|
49
|
+
"propagate": True,
|
|
92
50
|
},
|
|
93
51
|
}
|
|
94
52
|
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/models.py
RENAMED
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
from abc import ABC
|
|
2
2
|
from datetime import datetime
|
|
3
3
|
from enum import Enum
|
|
4
|
-
from typing import Literal
|
|
4
|
+
from typing import Any, Literal
|
|
5
5
|
from uuid import uuid4
|
|
6
6
|
|
|
7
|
-
from pydantic import BaseModel, Field
|
|
7
|
+
from pydantic import BaseModel, Field, field_validator
|
|
8
8
|
|
|
9
9
|
from openhands.agent_server.utils import OpenHandsUUID, utc_now
|
|
10
10
|
from openhands.sdk import LLM, AgentBase, Event, ImageContent, Message, TextContent
|
|
11
|
-
from openhands.sdk.conversation.secret_source import SecretSource
|
|
12
11
|
from openhands.sdk.conversation.state import (
|
|
13
12
|
ConversationExecutionStatus,
|
|
14
13
|
ConversationState,
|
|
15
14
|
)
|
|
16
15
|
from openhands.sdk.llm.utils.metrics import MetricsSnapshot
|
|
16
|
+
from openhands.sdk.secret import SecretSource
|
|
17
17
|
from openhands.sdk.security.analyzer import SecurityAnalyzerBase
|
|
18
18
|
from openhands.sdk.security.confirmation_policy import (
|
|
19
19
|
ConfirmationPolicyBase,
|
|
@@ -98,6 +98,14 @@ class StartConversationRequest(BaseModel):
|
|
|
98
98
|
default_factory=dict,
|
|
99
99
|
description="Secrets available in the conversation",
|
|
100
100
|
)
|
|
101
|
+
tool_module_qualnames: dict[str, str] = Field(
|
|
102
|
+
default_factory=dict,
|
|
103
|
+
description=(
|
|
104
|
+
"Mapping of tool names to their module qualnames from the client's "
|
|
105
|
+
"registry. These modules will be dynamically imported on the server "
|
|
106
|
+
"to register the tools for this conversation."
|
|
107
|
+
),
|
|
108
|
+
)
|
|
101
109
|
|
|
102
110
|
|
|
103
111
|
class StoredConversation(StartConversationRequest):
|
|
@@ -159,6 +167,43 @@ class UpdateSecretsRequest(BaseModel):
|
|
|
159
167
|
description="Dictionary mapping secret keys to values"
|
|
160
168
|
)
|
|
161
169
|
|
|
170
|
+
@field_validator("secrets", mode="before")
|
|
171
|
+
@classmethod
|
|
172
|
+
def convert_string_secrets(cls, v: dict[str, Any]) -> dict[str, Any]:
|
|
173
|
+
"""Convert plain string secrets to StaticSecret objects.
|
|
174
|
+
|
|
175
|
+
This validator enables backward compatibility by automatically converting:
|
|
176
|
+
- Plain strings: "secret-value" → StaticSecret(value=SecretStr("secret-value"))
|
|
177
|
+
- Dict with value field: {"value": "secret-value"} → StaticSecret dict format
|
|
178
|
+
- Proper SecretSource objects: passed through unchanged
|
|
179
|
+
"""
|
|
180
|
+
if not isinstance(v, dict):
|
|
181
|
+
return v
|
|
182
|
+
|
|
183
|
+
converted = {}
|
|
184
|
+
for key, value in v.items():
|
|
185
|
+
if isinstance(value, str):
|
|
186
|
+
# Convert plain string to StaticSecret dict format
|
|
187
|
+
converted[key] = {
|
|
188
|
+
"kind": "StaticSecret",
|
|
189
|
+
"value": value,
|
|
190
|
+
}
|
|
191
|
+
elif isinstance(value, dict):
|
|
192
|
+
if "value" in value and "kind" not in value:
|
|
193
|
+
# Convert dict with value field to StaticSecret dict format
|
|
194
|
+
converted[key] = {
|
|
195
|
+
"kind": "StaticSecret",
|
|
196
|
+
"value": value["value"],
|
|
197
|
+
}
|
|
198
|
+
else:
|
|
199
|
+
# Keep existing SecretSource objects or properly formatted dicts
|
|
200
|
+
converted[key] = value
|
|
201
|
+
else:
|
|
202
|
+
# Keep other types as-is (will likely fail validation later)
|
|
203
|
+
converted[key] = value
|
|
204
|
+
|
|
205
|
+
return converted
|
|
206
|
+
|
|
162
207
|
|
|
163
208
|
class SetConfirmationPolicyRequest(BaseModel):
|
|
164
209
|
"""Payload to set confirmation policy for a conversation."""
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/sockets.py
RENAMED
|
@@ -26,6 +26,7 @@ from openhands.agent_server.conversation_service import (
|
|
|
26
26
|
from openhands.agent_server.models import BashEventBase, ExecuteBashRequest
|
|
27
27
|
from openhands.agent_server.pub_sub import Subscriber
|
|
28
28
|
from openhands.sdk import Event, Message
|
|
29
|
+
from openhands.sdk.utils.paging import page_iterator
|
|
29
30
|
|
|
30
31
|
|
|
31
32
|
sockets_router = APIRouter(prefix="/sockets", tags=["WebSockets"])
|
|
@@ -65,14 +66,8 @@ async def events_socket(
|
|
|
65
66
|
# Resend all existing events if requested
|
|
66
67
|
if resend_all:
|
|
67
68
|
logger.info(f"Resending events: {conversation_id}")
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
page = await event_service.search_events(page_id=page_id)
|
|
71
|
-
for event in page.items:
|
|
72
|
-
await _send_event(event, websocket)
|
|
73
|
-
page_id = page.next_page_id
|
|
74
|
-
if not page_id:
|
|
75
|
-
break
|
|
69
|
+
async for event in page_iterator(event_service.search_events):
|
|
70
|
+
await _send_event(event, websocket)
|
|
76
71
|
|
|
77
72
|
# Listen for messages over the socket
|
|
78
73
|
while True:
|
|
@@ -118,14 +113,8 @@ async def bash_events_socket(
|
|
|
118
113
|
# Resend all existing events if requested
|
|
119
114
|
if resend_all:
|
|
120
115
|
logger.info("Resending bash events")
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
page = await bash_event_service.search_bash_events(page_id=page_id)
|
|
124
|
-
for event in page.items:
|
|
125
|
-
await _send_bash_event(event, websocket)
|
|
126
|
-
page_id = page.next_page_id
|
|
127
|
-
if not page_id:
|
|
128
|
-
break
|
|
116
|
+
async for event in page_iterator(bash_event_service.search_bash_events):
|
|
117
|
+
await _send_bash_event(event, websocket)
|
|
129
118
|
|
|
130
119
|
while True:
|
|
131
120
|
try:
|
|
@@ -150,7 +139,7 @@ async def bash_events_socket(
|
|
|
150
139
|
|
|
151
140
|
async def _send_event(event: Event, websocket: WebSocket):
|
|
152
141
|
try:
|
|
153
|
-
dumped = event.model_dump()
|
|
142
|
+
dumped = event.model_dump(mode="json")
|
|
154
143
|
await websocket.send_json(dumped)
|
|
155
144
|
except Exception:
|
|
156
145
|
logger.exception("error_sending_event:{event}", stack_info=True)
|
|
@@ -168,7 +157,7 @@ class _WebSocketSubscriber(Subscriber):
|
|
|
168
157
|
|
|
169
158
|
async def _send_bash_event(event: BashEventBase, websocket: WebSocket):
|
|
170
159
|
try:
|
|
171
|
-
dumped = event.model_dump()
|
|
160
|
+
dumped = event.model_dump(mode="json")
|
|
172
161
|
await websocket.send_json(dumped)
|
|
173
162
|
except Exception:
|
|
174
163
|
logger.exception("error_sending_event:{event}", stack_info=True)
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""Service which preloads chromium."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import sys
|
|
6
|
+
|
|
7
|
+
from openhands.agent_server.config import get_default_config
|
|
8
|
+
from openhands.sdk.logger import get_logger
|
|
9
|
+
from openhands.sdk.tool.schema import Action
|
|
10
|
+
from openhands.sdk.tool.tool import create_action_type_with_risk
|
|
11
|
+
from openhands.sdk.utils.models import get_known_concrete_subclasses
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
_logger = get_logger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ToolPreloadService:
|
|
18
|
+
"""Service which preloads tools / chromium reducing time to
|
|
19
|
+
start first conversation"""
|
|
20
|
+
|
|
21
|
+
running: bool = False
|
|
22
|
+
|
|
23
|
+
async def start(self) -> bool:
|
|
24
|
+
"""Preload tools"""
|
|
25
|
+
|
|
26
|
+
# Skip if already running
|
|
27
|
+
if self.running:
|
|
28
|
+
return True
|
|
29
|
+
|
|
30
|
+
self.running = True
|
|
31
|
+
try:
|
|
32
|
+
if sys.platform == "win32":
|
|
33
|
+
from openhands.tools.browser_use.impl_windows import (
|
|
34
|
+
WindowsBrowserToolExecutor as BrowserToolExecutor,
|
|
35
|
+
)
|
|
36
|
+
else:
|
|
37
|
+
from openhands.tools.browser_use.impl import BrowserToolExecutor
|
|
38
|
+
|
|
39
|
+
# Creating an instance here to preload chomium
|
|
40
|
+
BrowserToolExecutor()
|
|
41
|
+
|
|
42
|
+
# Pre-creating all these classes prevents processing which costs
|
|
43
|
+
# significant time per tool on the first conversation invocation.
|
|
44
|
+
for action_type in get_known_concrete_subclasses(Action):
|
|
45
|
+
create_action_type_with_risk(action_type)
|
|
46
|
+
|
|
47
|
+
_logger.debug(f"Loaded {BrowserToolExecutor}")
|
|
48
|
+
return True
|
|
49
|
+
except Exception:
|
|
50
|
+
_logger.exception("Error preloading chromium")
|
|
51
|
+
return False
|
|
52
|
+
|
|
53
|
+
async def stop(self) -> None:
|
|
54
|
+
"""Stop the tool preload process."""
|
|
55
|
+
self.running = False
|
|
56
|
+
|
|
57
|
+
def is_running(self) -> bool:
|
|
58
|
+
"""Check if tool preload is running."""
|
|
59
|
+
return self.running
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
_tool_preload_service: ToolPreloadService | None = None
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def get_tool_preload_service() -> ToolPreloadService | None:
|
|
66
|
+
"""Get the tool preload service instance if preload is enabled."""
|
|
67
|
+
global _tool_preload_service
|
|
68
|
+
config = get_default_config()
|
|
69
|
+
|
|
70
|
+
if not config.preload_tools:
|
|
71
|
+
_logger.info("Tool preload is disabled in configuration")
|
|
72
|
+
return None
|
|
73
|
+
|
|
74
|
+
if _tool_preload_service is None:
|
|
75
|
+
_tool_preload_service = ToolPreloadService()
|
|
76
|
+
return _tool_preload_service
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/tool_router.py
RENAMED
|
@@ -4,11 +4,13 @@ from fastapi import APIRouter
|
|
|
4
4
|
|
|
5
5
|
from openhands.sdk.tool.registry import list_registered_tools
|
|
6
6
|
from openhands.tools.preset.default import register_default_tools
|
|
7
|
+
from openhands.tools.preset.gemini import register_gemini_tools
|
|
7
8
|
from openhands.tools.preset.planning import register_planning_tools
|
|
8
9
|
|
|
9
10
|
|
|
10
11
|
tool_router = APIRouter(prefix="/tools", tags=["Tools"])
|
|
11
12
|
register_default_tools(enable_browser=True)
|
|
13
|
+
register_gemini_tools(enable_browser=True)
|
|
12
14
|
register_planning_tools()
|
|
13
15
|
|
|
14
16
|
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: openhands-agent-server
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.8.1
|
|
4
4
|
Summary: OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent
|
|
5
5
|
Requires-Python: >=3.12
|
|
6
6
|
Requires-Dist: aiosqlite>=0.19
|
|
7
7
|
Requires-Dist: alembic>=1.13
|
|
8
8
|
Requires-Dist: docker<8,>=7.1
|
|
9
9
|
Requires-Dist: fastapi>=0.104
|
|
10
|
+
Requires-Dist: openhands-sdk
|
|
10
11
|
Requires-Dist: pydantic>=2
|
|
11
12
|
Requires-Dist: sqlalchemy>=2
|
|
12
13
|
Requires-Dist: uvicorn>=0.31.1
|
|
@@ -23,6 +23,7 @@ pyproject.toml
|
|
|
23
23
|
./openhands/agent_server/py.typed
|
|
24
24
|
./openhands/agent_server/server_details_router.py
|
|
25
25
|
./openhands/agent_server/sockets.py
|
|
26
|
+
./openhands/agent_server/tool_preload_service.py
|
|
26
27
|
./openhands/agent_server/tool_router.py
|
|
27
28
|
./openhands/agent_server/utils.py
|
|
28
29
|
./openhands/agent_server/vscode_router.py
|
|
@@ -56,6 +57,7 @@ openhands/agent_server/pub_sub.py
|
|
|
56
57
|
openhands/agent_server/py.typed
|
|
57
58
|
openhands/agent_server/server_details_router.py
|
|
58
59
|
openhands/agent_server/sockets.py
|
|
60
|
+
openhands/agent_server/tool_preload_service.py
|
|
59
61
|
openhands/agent_server/tool_router.py
|
|
60
62
|
openhands/agent_server/utils.py
|
|
61
63
|
openhands/agent_server/vscode_router.py
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "openhands-agent-server"
|
|
3
|
-
version = "1.
|
|
3
|
+
version = "1.8.1"
|
|
4
4
|
description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent"
|
|
5
5
|
|
|
6
6
|
requires-python = ">=3.12"
|
|
@@ -9,6 +9,7 @@ dependencies = [
|
|
|
9
9
|
"alembic>=1.13",
|
|
10
10
|
"docker>=7.1,<8",
|
|
11
11
|
"fastapi>=0.104",
|
|
12
|
+
"openhands-sdk",
|
|
12
13
|
"pydantic>=2",
|
|
13
14
|
"sqlalchemy>=2",
|
|
14
15
|
"uvicorn>=0.31.1",
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/__init__.py
RENAMED
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/__main__.py
RENAMED
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/bash_router.py
RENAMED
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/bash_service.py
RENAMED
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/dependencies.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/event_router.py
RENAMED
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/file_router.py
RENAMED
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/git_router.py
RENAMED
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/middleware.py
RENAMED
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/openapi.py
RENAMED
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/pub_sub.py
RENAMED
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/py.typed
RENAMED
|
File without changes
|
|
File without changes
|
{openhands_agent_server-1.3.0 → openhands_agent_server-1.8.1}/openhands/agent_server/utils.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|