a2a-adapter 0.1.4__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- a2a_adapter/__init__.py +1 -1
- a2a_adapter/client.py +57 -13
- a2a_adapter/integrations/__init__.py +5 -0
- a2a_adapter/integrations/openclaw.py +1297 -0
- a2a_adapter/loader.py +31 -1
- {a2a_adapter-0.1.4.dist-info → a2a_adapter-0.1.5.dist-info}/METADATA +39 -12
- a2a_adapter-0.1.5.dist-info/RECORD +16 -0
- {a2a_adapter-0.1.4.dist-info → a2a_adapter-0.1.5.dist-info}/WHEEL +1 -1
- a2a_adapter-0.1.4.dist-info/RECORD +0 -15
- {a2a_adapter-0.1.4.dist-info → a2a_adapter-0.1.5.dist-info}/licenses/LICENSE +0 -0
- {a2a_adapter-0.1.4.dist-info → a2a_adapter-0.1.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1297 @@
|
|
|
1
|
+
"""
|
|
2
|
+
OpenClaw adapter for A2A Protocol.
|
|
3
|
+
|
|
4
|
+
This adapter enables OpenClaw agents to be exposed as A2A-compliant agents
|
|
5
|
+
by wrapping the OpenClaw CLI as a subprocess.
|
|
6
|
+
|
|
7
|
+
Supports two modes:
|
|
8
|
+
- Synchronous (async_mode=False): Blocks until command completes, returns Message
|
|
9
|
+
- Async Task Mode (default): Returns Task immediately, processes in background, supports polling
|
|
10
|
+
|
|
11
|
+
Push Notifications (A2A-compliant):
|
|
12
|
+
- When push_notification_config is provided in MessageSendParams, the adapter will
|
|
13
|
+
POST task updates to the configured webhook URL using StreamResponse format
|
|
14
|
+
- Payload contains full Task object (including artifacts) per A2A spec section 4.3.3
|
|
15
|
+
- Supports Bearer token authentication for webhook calls
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import asyncio
|
|
19
|
+
from asyncio.subprocess import PIPE, Process as AsyncProcess
|
|
20
|
+
import json
|
|
21
|
+
import logging
|
|
22
|
+
import re
|
|
23
|
+
import time
|
|
24
|
+
import uuid
|
|
25
|
+
from datetime import datetime, timezone
|
|
26
|
+
from typing import Any, Dict
|
|
27
|
+
|
|
28
|
+
import httpx
|
|
29
|
+
|
|
30
|
+
from a2a.types import (
|
|
31
|
+
Artifact,
|
|
32
|
+
FilePart,
|
|
33
|
+
FileWithUri,
|
|
34
|
+
Message,
|
|
35
|
+
MessageSendParams,
|
|
36
|
+
Part,
|
|
37
|
+
PushNotificationConfig,
|
|
38
|
+
Role,
|
|
39
|
+
Task,
|
|
40
|
+
TaskState,
|
|
41
|
+
TaskStatus,
|
|
42
|
+
TextPart,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
from ..adapter import BaseAgentAdapter
|
|
46
|
+
|
|
47
|
+
# Lazy import for TaskStore to avoid hard dependency
|
|
48
|
+
try:
|
|
49
|
+
from a2a.server.tasks import InMemoryTaskStore, TaskStore
|
|
50
|
+
|
|
51
|
+
_HAS_TASK_STORE = True
|
|
52
|
+
except ImportError:
|
|
53
|
+
_HAS_TASK_STORE = False
|
|
54
|
+
TaskStore = None # type: ignore
|
|
55
|
+
InMemoryTaskStore = None # type: ignore
|
|
56
|
+
|
|
57
|
+
logger = logging.getLogger(__name__)
|
|
58
|
+
|
|
59
|
+
# Valid thinking levels for OpenClaw
|
|
60
|
+
VALID_THINKING_LEVELS = {"off", "minimal", "low", "medium", "high", "xhigh"}
|
|
61
|
+
|
|
62
|
+
# Regex for sanitizing session IDs (matches OpenClaw's VALID_ID_RE pattern)
|
|
63
|
+
# OpenClaw session IDs must be alphanumeric with underscores/hyphens, max 64 chars
|
|
64
|
+
_INVALID_SESSION_CHARS_RE = re.compile(r"[^a-z0-9_-]+")
|
|
65
|
+
_LEADING_TRAILING_DASH_RE = re.compile(r"^-+|-+$")
|
|
66
|
+
_SESSION_ID_MAX_LEN = 64
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class OpenClawAgentAdapter(BaseAgentAdapter):
|
|
70
|
+
"""
|
|
71
|
+
Adapter for integrating OpenClaw agents as A2A agents.
|
|
72
|
+
|
|
73
|
+
This adapter wraps the OpenClaw CLI (`openclaw agent --local --json`) as a
|
|
74
|
+
subprocess and translates between A2A protocol messages and OpenClaw's
|
|
75
|
+
JSON output format.
|
|
76
|
+
|
|
77
|
+
Supports two execution modes:
|
|
78
|
+
|
|
79
|
+
1. **Async Task Mode** (default, async_mode=True):
|
|
80
|
+
- Returns a Task with state="working" immediately
|
|
81
|
+
- Processes the command in the background
|
|
82
|
+
- Clients can poll get_task() for status updates
|
|
83
|
+
- Best for typical OpenClaw operations (can take minutes)
|
|
84
|
+
- Tasks time out after timeout seconds (default: 300)
|
|
85
|
+
|
|
86
|
+
2. **Synchronous Mode** (async_mode=False):
|
|
87
|
+
- Blocks until the OpenClaw command completes
|
|
88
|
+
- Returns a Message with the response
|
|
89
|
+
- Best for quick operations or testing
|
|
90
|
+
|
|
91
|
+
**Requirements**:
|
|
92
|
+
- OpenClaw CLI must be installed and in PATH (or provide custom path)
|
|
93
|
+
- OpenClaw must be configured with API keys (ANTHROPIC_API_KEY, etc.)
|
|
94
|
+
- Valid OpenClaw configuration at ~/.openclaw/config.yaml
|
|
95
|
+
|
|
96
|
+
**Memory Considerations (Async Mode)**:
|
|
97
|
+
|
|
98
|
+
When using InMemoryTaskStore (the default), completed tasks are automatically
|
|
99
|
+
cleaned up after `task_ttl_seconds` (default: 1 hour). You can also:
|
|
100
|
+
|
|
101
|
+
1. Call delete_task() after retrieving completed tasks to free memory immediately
|
|
102
|
+
2. Use DatabaseTaskStore for persistent storage with external cleanup
|
|
103
|
+
3. Set task_ttl_seconds=None to disable auto-cleanup (manual cleanup only)
|
|
104
|
+
|
|
105
|
+
Example:
|
|
106
|
+
>>> adapter = OpenClawAgentAdapter(
|
|
107
|
+
... session_id="my-session",
|
|
108
|
+
... agent_id="main",
|
|
109
|
+
... thinking="low",
|
|
110
|
+
... )
|
|
111
|
+
>>> task = await adapter.handle(params) # Returns Task immediately
|
|
112
|
+
>>> # Poll for completion
|
|
113
|
+
>>> completed = await adapter.get_task(task.id)
|
|
114
|
+
"""
|
|
115
|
+
|
|
116
|
+
def __init__(
|
|
117
|
+
self,
|
|
118
|
+
session_id: str | None = None,
|
|
119
|
+
agent_id: str | None = None,
|
|
120
|
+
thinking: str = "low",
|
|
121
|
+
timeout: int = 600,
|
|
122
|
+
openclaw_path: str = "openclaw",
|
|
123
|
+
working_directory: str | None = None,
|
|
124
|
+
env_vars: Dict[str, str] | None = None,
|
|
125
|
+
async_mode: bool = True,
|
|
126
|
+
task_store: "TaskStore | None" = None,
|
|
127
|
+
task_ttl_seconds: int | None = 3600,
|
|
128
|
+
cleanup_interval_seconds: int = 300,
|
|
129
|
+
):
|
|
130
|
+
"""
|
|
131
|
+
Initialize the OpenClaw adapter.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
session_id: Session ID for conversation continuity. If not provided,
|
|
135
|
+
auto-generates a unique session ID.
|
|
136
|
+
agent_id: OpenClaw agent ID (from `openclaw agents list`). If not
|
|
137
|
+
provided, uses the default agent.
|
|
138
|
+
thinking: Thinking level for the agent. Valid values:
|
|
139
|
+
off, minimal, low, medium, high, xhigh. Default: "low".
|
|
140
|
+
timeout: Command timeout in seconds (default: 600).
|
|
141
|
+
openclaw_path: Path to the openclaw binary (default: "openclaw").
|
|
142
|
+
working_directory: Working directory for the subprocess. If not
|
|
143
|
+
provided, uses the current directory.
|
|
144
|
+
env_vars: Additional environment variables to pass to the subprocess.
|
|
145
|
+
async_mode: If True (default), return Task immediately and process
|
|
146
|
+
in background. If False, block until command completes.
|
|
147
|
+
task_store: Optional TaskStore for persisting task state. If not
|
|
148
|
+
provided and async_mode is True, uses InMemoryTaskStore.
|
|
149
|
+
task_ttl_seconds: Time-to-live for completed tasks in seconds. After
|
|
150
|
+
this duration, completed/failed/canceled tasks are
|
|
151
|
+
automatically deleted. Set to None to disable
|
|
152
|
+
auto-cleanup. Default: 3600 (1 hour).
|
|
153
|
+
cleanup_interval_seconds: How often to run the cleanup routine in
|
|
154
|
+
seconds. Default: 300 (5 minutes).
|
|
155
|
+
"""
|
|
156
|
+
# Validate thinking level
|
|
157
|
+
if thinking not in VALID_THINKING_LEVELS:
|
|
158
|
+
raise ValueError(
|
|
159
|
+
f"Invalid thinking level: {thinking}. "
|
|
160
|
+
f"Valid values: {', '.join(sorted(VALID_THINKING_LEVELS))}"
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
# Generate session ID if not provided
|
|
164
|
+
self.session_id = session_id or f"a2a-{uuid.uuid4().hex[:12]}"
|
|
165
|
+
self.agent_id = agent_id
|
|
166
|
+
self.thinking = thinking
|
|
167
|
+
self.timeout = timeout
|
|
168
|
+
self.openclaw_path = openclaw_path
|
|
169
|
+
self.working_directory = working_directory
|
|
170
|
+
self.env_vars = dict(env_vars) if env_vars else {}
|
|
171
|
+
|
|
172
|
+
# Async task mode configuration
|
|
173
|
+
self.async_mode = async_mode
|
|
174
|
+
self._background_tasks: Dict[str, asyncio.Task[None]] = {}
|
|
175
|
+
self._background_processes: Dict[str, AsyncProcess] = {}
|
|
176
|
+
self._cancelled_tasks: set[str] = set()
|
|
177
|
+
|
|
178
|
+
# Push notification configuration per task
|
|
179
|
+
self._push_configs: Dict[str, PushNotificationConfig] = {}
|
|
180
|
+
self._http_client: httpx.AsyncClient | None = None
|
|
181
|
+
|
|
182
|
+
# TTL-based cleanup configuration
|
|
183
|
+
self._task_ttl = task_ttl_seconds
|
|
184
|
+
self._cleanup_interval = cleanup_interval_seconds
|
|
185
|
+
self._task_completion_times: Dict[str, float] = {} # task_id -> completion timestamp
|
|
186
|
+
self._cleanup_task: asyncio.Task[None] | None = None
|
|
187
|
+
|
|
188
|
+
# Initialize task store for async mode
|
|
189
|
+
if async_mode:
|
|
190
|
+
if not _HAS_TASK_STORE:
|
|
191
|
+
raise ImportError(
|
|
192
|
+
"Async task mode requires the A2A SDK with task support. "
|
|
193
|
+
"Install with: pip install a2a-sdk"
|
|
194
|
+
)
|
|
195
|
+
self.task_store: "TaskStore" = task_store or InMemoryTaskStore()
|
|
196
|
+
# Note: cleanup task is started lazily on first handle() call
|
|
197
|
+
# to avoid requiring a running event loop at init time
|
|
198
|
+
else:
|
|
199
|
+
self.task_store = task_store # type: ignore
|
|
200
|
+
|
|
201
|
+
async def handle(self, params: MessageSendParams) -> Message | Task:
|
|
202
|
+
"""
|
|
203
|
+
Handle a non-streaming A2A message request.
|
|
204
|
+
|
|
205
|
+
In async mode (default): Returns Task immediately, processes in background.
|
|
206
|
+
In sync mode: Blocks until command completes, returns Message.
|
|
207
|
+
"""
|
|
208
|
+
if self.async_mode:
|
|
209
|
+
return await self._handle_async(params)
|
|
210
|
+
else:
|
|
211
|
+
return await self._handle_sync(params)
|
|
212
|
+
|
|
213
|
+
async def _handle_sync(self, params: MessageSendParams) -> Message:
|
|
214
|
+
"""Handle request synchronously - blocks until command completes."""
|
|
215
|
+
framework_input = await self.to_framework(params)
|
|
216
|
+
framework_output = await self.call_framework(framework_input, params)
|
|
217
|
+
result = await self.from_framework(framework_output, params)
|
|
218
|
+
# In sync mode, always return Message
|
|
219
|
+
if isinstance(result, Task):
|
|
220
|
+
# Extract message from completed task if needed
|
|
221
|
+
if result.status and result.status.message:
|
|
222
|
+
return result.status.message
|
|
223
|
+
# Fallback: create a message from task
|
|
224
|
+
return Message(
|
|
225
|
+
role=Role.agent,
|
|
226
|
+
message_id=str(uuid.uuid4()),
|
|
227
|
+
context_id=result.context_id,
|
|
228
|
+
parts=[Part(root=TextPart(text="Task completed"))],
|
|
229
|
+
)
|
|
230
|
+
return result
|
|
231
|
+
|
|
232
|
+
async def _handle_async(self, params: MessageSendParams) -> Task:
|
|
233
|
+
"""
|
|
234
|
+
Handle request asynchronously - returns Task immediately, processes in background.
|
|
235
|
+
|
|
236
|
+
1. Creates a Task with state="working"
|
|
237
|
+
2. Saves the task to the TaskStore
|
|
238
|
+
3. Stores push notification config if provided
|
|
239
|
+
4. Starts a background coroutine to execute the command
|
|
240
|
+
5. Returns the Task immediately
|
|
241
|
+
"""
|
|
242
|
+
# Start cleanup loop lazily (requires running event loop)
|
|
243
|
+
self._ensure_cleanup_task_started()
|
|
244
|
+
|
|
245
|
+
# Generate IDs
|
|
246
|
+
task_id = str(uuid.uuid4())
|
|
247
|
+
context_id = self._extract_context_id(params) or str(uuid.uuid4())
|
|
248
|
+
|
|
249
|
+
# Extract the initial message for history
|
|
250
|
+
initial_message = None
|
|
251
|
+
if hasattr(params, "message") and params.message:
|
|
252
|
+
initial_message = params.message
|
|
253
|
+
|
|
254
|
+
# Extract and store push notification config if provided
|
|
255
|
+
push_config = getattr(params, "configuration", None)
|
|
256
|
+
if push_config and hasattr(push_config, "push_notification_config"):
|
|
257
|
+
pn_config = push_config.push_notification_config
|
|
258
|
+
if pn_config and pn_config.url:
|
|
259
|
+
self._push_configs[task_id] = pn_config
|
|
260
|
+
logger.debug("Stored push notification config for task %s: %s", task_id, pn_config.url)
|
|
261
|
+
|
|
262
|
+
# Create initial task with "working" state
|
|
263
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
264
|
+
task = Task(
|
|
265
|
+
id=task_id,
|
|
266
|
+
context_id=context_id,
|
|
267
|
+
status=TaskStatus(
|
|
268
|
+
state=TaskState.working,
|
|
269
|
+
timestamp=now,
|
|
270
|
+
),
|
|
271
|
+
history=[initial_message] if initial_message else None,
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
# Save initial task state
|
|
275
|
+
await self.task_store.save(task)
|
|
276
|
+
logger.debug("Created async task %s with state=working", task_id)
|
|
277
|
+
|
|
278
|
+
# Start background processing with timeout
|
|
279
|
+
bg_task = asyncio.create_task(
|
|
280
|
+
self._execute_command_with_timeout(task_id, context_id, params)
|
|
281
|
+
)
|
|
282
|
+
self._background_tasks[task_id] = bg_task
|
|
283
|
+
|
|
284
|
+
# Clean up background task reference when done and handle exceptions
|
|
285
|
+
def _on_task_done(t: asyncio.Task[None]) -> None:
|
|
286
|
+
self._background_tasks.pop(task_id, None)
|
|
287
|
+
self._background_processes.pop(task_id, None)
|
|
288
|
+
self._cancelled_tasks.discard(task_id)
|
|
289
|
+
# Clean up push config after task completes
|
|
290
|
+
self._push_configs.pop(task_id, None)
|
|
291
|
+
# Check for unhandled exceptions (shouldn't happen, but log if they do)
|
|
292
|
+
if not t.cancelled():
|
|
293
|
+
exc = t.exception()
|
|
294
|
+
if exc:
|
|
295
|
+
logger.error(
|
|
296
|
+
"Unhandled exception in background task %s: %s",
|
|
297
|
+
task_id,
|
|
298
|
+
exc,
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
bg_task.add_done_callback(_on_task_done)
|
|
302
|
+
|
|
303
|
+
return task
|
|
304
|
+
|
|
305
|
+
# ---------- TTL-based Cleanup ----------
|
|
306
|
+
|
|
307
|
+
def _ensure_cleanup_task_started(self) -> None:
|
|
308
|
+
"""Start the cleanup task if TTL is enabled and not already running."""
|
|
309
|
+
if (
|
|
310
|
+
self.async_mode
|
|
311
|
+
and self._task_ttl is not None
|
|
312
|
+
and self._task_ttl > 0
|
|
313
|
+
and self._cleanup_task is None
|
|
314
|
+
):
|
|
315
|
+
self._cleanup_task = asyncio.create_task(self._cleanup_loop())
|
|
316
|
+
|
|
317
|
+
async def _cleanup_loop(self) -> None:
|
|
318
|
+
"""
|
|
319
|
+
Background loop that periodically cleans up expired tasks.
|
|
320
|
+
|
|
321
|
+
Runs every cleanup_interval_seconds and removes tasks that have been
|
|
322
|
+
in a terminal state for longer than task_ttl_seconds.
|
|
323
|
+
"""
|
|
324
|
+
while True:
|
|
325
|
+
try:
|
|
326
|
+
await asyncio.sleep(self._cleanup_interval)
|
|
327
|
+
await self._cleanup_expired_tasks()
|
|
328
|
+
except asyncio.CancelledError:
|
|
329
|
+
logger.debug("Cleanup loop cancelled")
|
|
330
|
+
break
|
|
331
|
+
except Exception as e:
|
|
332
|
+
# Log but don't crash the cleanup loop
|
|
333
|
+
logger.error("Error in cleanup loop: %s", e)
|
|
334
|
+
|
|
335
|
+
async def _cleanup_expired_tasks(self) -> None:
|
|
336
|
+
"""Remove tasks that have exceeded their TTL."""
|
|
337
|
+
if self._task_ttl is None:
|
|
338
|
+
return
|
|
339
|
+
|
|
340
|
+
now = time.time()
|
|
341
|
+
expired_task_ids = [
|
|
342
|
+
task_id
|
|
343
|
+
for task_id, completion_time in list(self._task_completion_times.items())
|
|
344
|
+
if now - completion_time > self._task_ttl
|
|
345
|
+
]
|
|
346
|
+
|
|
347
|
+
if not expired_task_ids:
|
|
348
|
+
return
|
|
349
|
+
|
|
350
|
+
deleted_count = 0
|
|
351
|
+
for task_id in expired_task_ids:
|
|
352
|
+
try:
|
|
353
|
+
await self.task_store.delete(task_id)
|
|
354
|
+
self._task_completion_times.pop(task_id, None)
|
|
355
|
+
deleted_count += 1
|
|
356
|
+
logger.debug("Auto-deleted expired task %s", task_id)
|
|
357
|
+
except Exception as e:
|
|
358
|
+
# Task may already be deleted or store may have issues
|
|
359
|
+
logger.debug("Failed to delete expired task %s: %s", task_id, e)
|
|
360
|
+
# Still remove from tracking to avoid repeated attempts
|
|
361
|
+
self._task_completion_times.pop(task_id, None)
|
|
362
|
+
|
|
363
|
+
if deleted_count > 0:
|
|
364
|
+
logger.info(
|
|
365
|
+
"Task cleanup: removed %d expired tasks, %d remaining",
|
|
366
|
+
deleted_count,
|
|
367
|
+
len(self._task_completion_times),
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
def _record_task_completion(self, task_id: str) -> None:
|
|
371
|
+
"""Record the completion time of a task for TTL tracking."""
|
|
372
|
+
if self._task_ttl is not None:
|
|
373
|
+
self._task_completion_times[task_id] = time.time()
|
|
374
|
+
|
|
375
|
+
async def _execute_command_with_timeout(
|
|
376
|
+
self,
|
|
377
|
+
task_id: str,
|
|
378
|
+
context_id: str,
|
|
379
|
+
params: MessageSendParams,
|
|
380
|
+
) -> None:
|
|
381
|
+
"""
|
|
382
|
+
Execute the command with a timeout wrapper.
|
|
383
|
+
|
|
384
|
+
This ensures that long-running commands don't hang indefinitely.
|
|
385
|
+
"""
|
|
386
|
+
try:
|
|
387
|
+
await asyncio.wait_for(
|
|
388
|
+
self._execute_command_background(task_id, context_id, params),
|
|
389
|
+
timeout=self.timeout,
|
|
390
|
+
)
|
|
391
|
+
except asyncio.TimeoutError:
|
|
392
|
+
# Kill the subprocess if still running to prevent zombie processes
|
|
393
|
+
proc = self._background_processes.get(task_id)
|
|
394
|
+
if proc and proc.returncode is None:
|
|
395
|
+
logger.debug("Killing subprocess for task %s due to timeout", task_id)
|
|
396
|
+
proc.kill()
|
|
397
|
+
try:
|
|
398
|
+
await proc.wait() # Reap the zombie process
|
|
399
|
+
except Exception:
|
|
400
|
+
pass # Best effort cleanup
|
|
401
|
+
|
|
402
|
+
# Check if task was cancelled (don't overwrite canceled state)
|
|
403
|
+
if task_id in self._cancelled_tasks:
|
|
404
|
+
logger.debug("Task %s was cancelled, not marking as failed", task_id)
|
|
405
|
+
return
|
|
406
|
+
|
|
407
|
+
logger.error("Task %s timed out after %s seconds", task_id, self.timeout)
|
|
408
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
409
|
+
error_message = Message(
|
|
410
|
+
role=Role.agent,
|
|
411
|
+
message_id=str(uuid.uuid4()),
|
|
412
|
+
context_id=context_id,
|
|
413
|
+
parts=[
|
|
414
|
+
Part(
|
|
415
|
+
root=TextPart(
|
|
416
|
+
text=f"OpenClaw command timed out after {self.timeout} seconds"
|
|
417
|
+
)
|
|
418
|
+
)
|
|
419
|
+
],
|
|
420
|
+
)
|
|
421
|
+
|
|
422
|
+
timeout_task = Task(
|
|
423
|
+
id=task_id,
|
|
424
|
+
context_id=context_id,
|
|
425
|
+
status=TaskStatus(
|
|
426
|
+
state=TaskState.failed,
|
|
427
|
+
message=error_message,
|
|
428
|
+
timestamp=now,
|
|
429
|
+
),
|
|
430
|
+
)
|
|
431
|
+
await self.task_store.save(timeout_task)
|
|
432
|
+
|
|
433
|
+
# Record completion time for TTL cleanup
|
|
434
|
+
self._record_task_completion(task_id)
|
|
435
|
+
|
|
436
|
+
# Send push notification for timeout failure
|
|
437
|
+
await self._send_push_notification(task_id, timeout_task)
|
|
438
|
+
|
|
439
|
+
async def _execute_command_background(
|
|
440
|
+
self,
|
|
441
|
+
task_id: str,
|
|
442
|
+
context_id: str,
|
|
443
|
+
params: MessageSendParams,
|
|
444
|
+
) -> None:
|
|
445
|
+
"""
|
|
446
|
+
Execute the OpenClaw command in the background and update task state.
|
|
447
|
+
|
|
448
|
+
This runs as a background coroutine after the initial Task is returned.
|
|
449
|
+
Sends push notifications on completion/failure if configured.
|
|
450
|
+
"""
|
|
451
|
+
try:
|
|
452
|
+
logger.debug("Starting background execution for task %s", task_id)
|
|
453
|
+
|
|
454
|
+
# Execute the command (this may take a while)
|
|
455
|
+
framework_input = await self.to_framework(params)
|
|
456
|
+
framework_output = await self._call_framework_with_tracking(
|
|
457
|
+
framework_input, params, task_id
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
# Check if task was cancelled during execution
|
|
461
|
+
if task_id in self._cancelled_tasks:
|
|
462
|
+
logger.debug(
|
|
463
|
+
"Task %s was cancelled during execution, not updating state", task_id
|
|
464
|
+
)
|
|
465
|
+
return
|
|
466
|
+
|
|
467
|
+
# Convert to message for history
|
|
468
|
+
response_message = self._create_response_message(framework_output, context_id)
|
|
469
|
+
|
|
470
|
+
# Build history (Messages for conversation tracking)
|
|
471
|
+
history = []
|
|
472
|
+
if hasattr(params, "message") and params.message:
|
|
473
|
+
history.append(params.message)
|
|
474
|
+
history.append(response_message)
|
|
475
|
+
|
|
476
|
+
# Create artifact for the response (A2A spec: task outputs go in artifacts)
|
|
477
|
+
response_artifact = self._create_response_artifact(framework_output)
|
|
478
|
+
|
|
479
|
+
# Update task to completed state
|
|
480
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
481
|
+
completed_task = Task(
|
|
482
|
+
id=task_id,
|
|
483
|
+
context_id=context_id,
|
|
484
|
+
status=TaskStatus(
|
|
485
|
+
state=TaskState.completed,
|
|
486
|
+
timestamp=now,
|
|
487
|
+
),
|
|
488
|
+
artifacts=[response_artifact], # A2A-compliant: outputs in artifacts
|
|
489
|
+
history=history,
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
await self.task_store.save(completed_task)
|
|
493
|
+
logger.debug("Task %s completed successfully", task_id)
|
|
494
|
+
|
|
495
|
+
# Record completion time for TTL cleanup
|
|
496
|
+
self._record_task_completion(task_id)
|
|
497
|
+
|
|
498
|
+
# Send push notification for completion
|
|
499
|
+
await self._send_push_notification(task_id, completed_task)
|
|
500
|
+
|
|
501
|
+
except asyncio.CancelledError:
|
|
502
|
+
# Task was cancelled - don't update state, cancel_task() handles it
|
|
503
|
+
logger.debug("Task %s was cancelled", task_id)
|
|
504
|
+
raise # Re-raise to properly cancel the task
|
|
505
|
+
|
|
506
|
+
except Exception as e:
|
|
507
|
+
# Check if task was cancelled (don't overwrite canceled state)
|
|
508
|
+
if task_id in self._cancelled_tasks:
|
|
509
|
+
logger.debug("Task %s was cancelled, not marking as failed", task_id)
|
|
510
|
+
return
|
|
511
|
+
|
|
512
|
+
# Update task to failed state
|
|
513
|
+
logger.error("Task %s failed: %s", task_id, e)
|
|
514
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
515
|
+
error_message = Message(
|
|
516
|
+
role=Role.agent,
|
|
517
|
+
message_id=str(uuid.uuid4()),
|
|
518
|
+
context_id=context_id,
|
|
519
|
+
parts=[Part(root=TextPart(text=f"OpenClaw command failed: {str(e)}"))],
|
|
520
|
+
)
|
|
521
|
+
|
|
522
|
+
failed_task = Task(
|
|
523
|
+
id=task_id,
|
|
524
|
+
context_id=context_id,
|
|
525
|
+
status=TaskStatus(
|
|
526
|
+
state=TaskState.failed,
|
|
527
|
+
message=error_message,
|
|
528
|
+
timestamp=now,
|
|
529
|
+
),
|
|
530
|
+
)
|
|
531
|
+
|
|
532
|
+
await self.task_store.save(failed_task)
|
|
533
|
+
|
|
534
|
+
# Record completion time for TTL cleanup
|
|
535
|
+
self._record_task_completion(task_id)
|
|
536
|
+
|
|
537
|
+
# Send push notification for failure
|
|
538
|
+
await self._send_push_notification(task_id, failed_task)
|
|
539
|
+
|
|
540
|
+
async def _call_framework_with_tracking(
|
|
541
|
+
self,
|
|
542
|
+
framework_input: Dict[str, Any],
|
|
543
|
+
params: MessageSendParams,
|
|
544
|
+
task_id: str,
|
|
545
|
+
) -> Dict[str, Any]:
|
|
546
|
+
"""
|
|
547
|
+
Execute the OpenClaw command with process tracking for cancellation.
|
|
548
|
+
|
|
549
|
+
This is similar to call_framework but tracks the subprocess for
|
|
550
|
+
cancellation support.
|
|
551
|
+
"""
|
|
552
|
+
cmd = self._build_command(framework_input)
|
|
553
|
+
logger.debug("Executing OpenClaw command: %s", " ".join(cmd))
|
|
554
|
+
|
|
555
|
+
# Prepare environment
|
|
556
|
+
import os
|
|
557
|
+
|
|
558
|
+
env = os.environ.copy()
|
|
559
|
+
env.update(self.env_vars)
|
|
560
|
+
|
|
561
|
+
# Create subprocess
|
|
562
|
+
proc = await asyncio.create_subprocess_exec(
|
|
563
|
+
*cmd,
|
|
564
|
+
stdout=PIPE,
|
|
565
|
+
stderr=PIPE,
|
|
566
|
+
cwd=self.working_directory,
|
|
567
|
+
env=env,
|
|
568
|
+
)
|
|
569
|
+
|
|
570
|
+
# Track the process for cancellation
|
|
571
|
+
self._background_processes[task_id] = proc
|
|
572
|
+
|
|
573
|
+
try:
|
|
574
|
+
stdout, stderr = await proc.communicate()
|
|
575
|
+
finally:
|
|
576
|
+
# Remove from tracking
|
|
577
|
+
self._background_processes.pop(task_id, None)
|
|
578
|
+
|
|
579
|
+
# Check return code
|
|
580
|
+
if proc.returncode != 0:
|
|
581
|
+
stderr_text = stderr.decode("utf-8", errors="replace").strip()
|
|
582
|
+
raise RuntimeError(
|
|
583
|
+
f"OpenClaw command failed with exit code {proc.returncode}: {stderr_text}"
|
|
584
|
+
)
|
|
585
|
+
|
|
586
|
+
# Parse JSON output
|
|
587
|
+
stdout_text = stdout.decode("utf-8", errors="replace").strip()
|
|
588
|
+
logger.debug("OpenClaw raw stdout length: %d chars", len(stdout_text))
|
|
589
|
+
logger.debug("OpenClaw raw stdout (first 500 chars): %s", stdout_text[:500])
|
|
590
|
+
if not stdout_text:
|
|
591
|
+
raise RuntimeError("OpenClaw command returned empty output")
|
|
592
|
+
|
|
593
|
+
try:
|
|
594
|
+
parsed = json.loads(stdout_text)
|
|
595
|
+
logger.debug("OpenClaw parsed JSON keys: %s", list(parsed.keys()) if isinstance(parsed, dict) else type(parsed))
|
|
596
|
+
if isinstance(parsed, dict) and "payloads" in parsed:
|
|
597
|
+
logger.debug("OpenClaw payloads count: %d", len(parsed.get("payloads", [])))
|
|
598
|
+
for i, p in enumerate(parsed.get("payloads", [])):
|
|
599
|
+
text_preview = (p.get("text", "") or "")[:100]
|
|
600
|
+
logger.debug("OpenClaw payload[%d] text preview: %s", i, text_preview)
|
|
601
|
+
return parsed
|
|
602
|
+
except json.JSONDecodeError as e:
|
|
603
|
+
logger.error("OpenClaw JSON parse error. Raw output: %s", stdout_text[:1000])
|
|
604
|
+
raise RuntimeError(f"Failed to parse OpenClaw JSON output: {e}") from e
|
|
605
|
+
|
|
606
|
+
def _extract_context_id(self, params: MessageSendParams) -> str | None:
|
|
607
|
+
"""Extract context_id from MessageSendParams."""
|
|
608
|
+
if hasattr(params, "message") and params.message:
|
|
609
|
+
return getattr(params.message, "context_id", None)
|
|
610
|
+
return None
|
|
611
|
+
|
|
612
|
+
def _context_id_to_session_id(self, context_id: str | None) -> str:
|
|
613
|
+
"""
|
|
614
|
+
Convert A2A context_id to a valid OpenClaw session_id.
|
|
615
|
+
|
|
616
|
+
OpenClaw session IDs must match the pattern: ^[a-z0-9][a-z0-9_-]{0,63}$
|
|
617
|
+
This method sanitizes the A2A context_id to conform to that format.
|
|
618
|
+
|
|
619
|
+
If context_id is provided, it's sanitized and prefixed with 'a2a-' to
|
|
620
|
+
namespace it. If context_id is None or empty, falls back to the
|
|
621
|
+
adapter's default session_id.
|
|
622
|
+
|
|
623
|
+
Args:
|
|
624
|
+
context_id: The A2A context_id to convert
|
|
625
|
+
|
|
626
|
+
Returns:
|
|
627
|
+
A valid OpenClaw session_id
|
|
628
|
+
"""
|
|
629
|
+
if not context_id:
|
|
630
|
+
return self.session_id
|
|
631
|
+
|
|
632
|
+
# Lowercase and replace invalid characters with hyphen
|
|
633
|
+
sanitized = _INVALID_SESSION_CHARS_RE.sub("-", context_id.lower())
|
|
634
|
+
# Remove leading/trailing hyphens
|
|
635
|
+
sanitized = _LEADING_TRAILING_DASH_RE.sub("", sanitized)
|
|
636
|
+
|
|
637
|
+
if not sanitized:
|
|
638
|
+
return self.session_id
|
|
639
|
+
|
|
640
|
+
# Prefix with 'a2a-' to namespace and truncate to max length
|
|
641
|
+
# Account for 'a2a-' prefix (4 chars) in the max length
|
|
642
|
+
max_suffix_len = _SESSION_ID_MAX_LEN - 4
|
|
643
|
+
sanitized = sanitized[:max_suffix_len]
|
|
644
|
+
|
|
645
|
+
return f"a2a-{sanitized}"
|
|
646
|
+
|
|
647
|
+
# ---------- Input mapping ----------
|
|
648
|
+
|
|
649
|
+
async def to_framework(self, params: MessageSendParams) -> Dict[str, Any]:
|
|
650
|
+
"""
|
|
651
|
+
Convert A2A message parameters to OpenClaw command input.
|
|
652
|
+
|
|
653
|
+
Extracts the user's message text for passing to the OpenClaw CLI.
|
|
654
|
+
Maps A2A context_id to OpenClaw session_id for conversation continuity.
|
|
655
|
+
|
|
656
|
+
Args:
|
|
657
|
+
params: A2A message parameters
|
|
658
|
+
|
|
659
|
+
Returns:
|
|
660
|
+
Dictionary with command input data
|
|
661
|
+
"""
|
|
662
|
+
user_message = ""
|
|
663
|
+
|
|
664
|
+
# Extract message from A2A params (new format with message.parts)
|
|
665
|
+
if hasattr(params, "message") and params.message:
|
|
666
|
+
msg = params.message
|
|
667
|
+
if hasattr(msg, "parts") and msg.parts:
|
|
668
|
+
text_parts = []
|
|
669
|
+
for part in msg.parts:
|
|
670
|
+
# Handle Part(root=TextPart(...)) structure
|
|
671
|
+
if hasattr(part, "root") and hasattr(part.root, "text"):
|
|
672
|
+
text_parts.append(part.root.text)
|
|
673
|
+
# Handle direct TextPart
|
|
674
|
+
elif hasattr(part, "text"):
|
|
675
|
+
text_parts.append(part.text)
|
|
676
|
+
user_message = self._join_text_parts(text_parts)
|
|
677
|
+
|
|
678
|
+
# Legacy support for messages array (deprecated)
|
|
679
|
+
elif getattr(params, "messages", None):
|
|
680
|
+
last = params.messages[-1]
|
|
681
|
+
content = getattr(last, "content", "")
|
|
682
|
+
if isinstance(content, str):
|
|
683
|
+
user_message = content.strip()
|
|
684
|
+
elif isinstance(content, list):
|
|
685
|
+
text_parts = []
|
|
686
|
+
for item in content:
|
|
687
|
+
txt = getattr(item, "text", None)
|
|
688
|
+
if txt and isinstance(txt, str) and txt.strip():
|
|
689
|
+
text_parts.append(txt.strip())
|
|
690
|
+
user_message = self._join_text_parts(text_parts)
|
|
691
|
+
|
|
692
|
+
# Map A2A context_id to OpenClaw session_id
|
|
693
|
+
# This enables conversation continuity: each A2A context gets its own
|
|
694
|
+
# OpenClaw session, so the agent remembers previous messages in that context
|
|
695
|
+
context_id = self._extract_context_id(params)
|
|
696
|
+
effective_session_id = self._context_id_to_session_id(context_id)
|
|
697
|
+
|
|
698
|
+
return {
|
|
699
|
+
"message": user_message,
|
|
700
|
+
"session_id": effective_session_id,
|
|
701
|
+
"agent_id": self.agent_id,
|
|
702
|
+
"thinking": self.thinking,
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
@staticmethod
|
|
706
|
+
def _join_text_parts(parts: list[str]) -> str:
|
|
707
|
+
"""Join text parts into a single string."""
|
|
708
|
+
if not parts:
|
|
709
|
+
return ""
|
|
710
|
+
text = " ".join(p.strip() for p in parts if p)
|
|
711
|
+
return text.strip()
|
|
712
|
+
|
|
713
|
+
# ---------- Framework call ----------
|
|
714
|
+
|
|
715
|
+
def _build_command(self, framework_input: Dict[str, Any]) -> list[str]:
|
|
716
|
+
"""Build the OpenClaw CLI command."""
|
|
717
|
+
cmd = [
|
|
718
|
+
self.openclaw_path,
|
|
719
|
+
"agent",
|
|
720
|
+
"--local", # CRITICAL: Run embedded, not via gateway
|
|
721
|
+
"--message",
|
|
722
|
+
framework_input["message"],
|
|
723
|
+
"--json",
|
|
724
|
+
"--session-id",
|
|
725
|
+
framework_input["session_id"],
|
|
726
|
+
"--thinking",
|
|
727
|
+
framework_input["thinking"],
|
|
728
|
+
]
|
|
729
|
+
|
|
730
|
+
# Add agent ID if specified
|
|
731
|
+
if framework_input.get("agent_id"):
|
|
732
|
+
cmd.extend(["--agent", framework_input["agent_id"]])
|
|
733
|
+
|
|
734
|
+
return cmd
|
|
735
|
+
|
|
736
|
+
async def call_framework(
|
|
737
|
+
self, framework_input: Dict[str, Any], params: MessageSendParams
|
|
738
|
+
) -> Dict[str, Any]:
|
|
739
|
+
"""
|
|
740
|
+
Execute the OpenClaw CLI command.
|
|
741
|
+
|
|
742
|
+
Args:
|
|
743
|
+
framework_input: Input dictionary from to_framework()
|
|
744
|
+
params: Original A2A parameters (for context)
|
|
745
|
+
|
|
746
|
+
Returns:
|
|
747
|
+
Parsed JSON output from OpenClaw
|
|
748
|
+
|
|
749
|
+
Raises:
|
|
750
|
+
RuntimeError: If command execution fails
|
|
751
|
+
FileNotFoundError: If openclaw binary is not found
|
|
752
|
+
"""
|
|
753
|
+
import os
|
|
754
|
+
|
|
755
|
+
cmd = self._build_command(framework_input)
|
|
756
|
+
logger.debug("Executing OpenClaw command: %s", " ".join(cmd))
|
|
757
|
+
|
|
758
|
+
# Prepare environment
|
|
759
|
+
env = os.environ.copy()
|
|
760
|
+
env.update(self.env_vars)
|
|
761
|
+
|
|
762
|
+
try:
|
|
763
|
+
proc = await asyncio.create_subprocess_exec(
|
|
764
|
+
*cmd,
|
|
765
|
+
stdout=PIPE,
|
|
766
|
+
stderr=PIPE,
|
|
767
|
+
cwd=self.working_directory,
|
|
768
|
+
env=env,
|
|
769
|
+
)
|
|
770
|
+
|
|
771
|
+
stdout, stderr = await asyncio.wait_for(
|
|
772
|
+
proc.communicate(), timeout=self.timeout
|
|
773
|
+
)
|
|
774
|
+
|
|
775
|
+
except FileNotFoundError:
|
|
776
|
+
raise FileNotFoundError(
|
|
777
|
+
f"OpenClaw binary not found at '{self.openclaw_path}'. "
|
|
778
|
+
"Ensure OpenClaw is installed and in PATH."
|
|
779
|
+
)
|
|
780
|
+
except asyncio.TimeoutError:
|
|
781
|
+
# Kill the process if it times out
|
|
782
|
+
proc.kill()
|
|
783
|
+
await proc.wait()
|
|
784
|
+
raise RuntimeError(
|
|
785
|
+
f"OpenClaw command timed out after {self.timeout} seconds"
|
|
786
|
+
)
|
|
787
|
+
|
|
788
|
+
# Check return code
|
|
789
|
+
if proc.returncode != 0:
|
|
790
|
+
stderr_text = stderr.decode("utf-8", errors="replace").strip()
|
|
791
|
+
raise RuntimeError(
|
|
792
|
+
f"OpenClaw command failed with exit code {proc.returncode}: {stderr_text}"
|
|
793
|
+
)
|
|
794
|
+
|
|
795
|
+
# Parse JSON output
|
|
796
|
+
stdout_text = stdout.decode("utf-8", errors="replace").strip()
|
|
797
|
+
logger.debug("OpenClaw sync raw stdout length: %d chars", len(stdout_text))
|
|
798
|
+
logger.debug("OpenClaw sync raw stdout (first 500 chars): %s", stdout_text[:500])
|
|
799
|
+
if not stdout_text:
|
|
800
|
+
raise RuntimeError("OpenClaw command returned empty output")
|
|
801
|
+
|
|
802
|
+
try:
|
|
803
|
+
parsed = json.loads(stdout_text)
|
|
804
|
+
logger.debug("OpenClaw sync parsed JSON keys: %s", list(parsed.keys()) if isinstance(parsed, dict) else type(parsed))
|
|
805
|
+
if isinstance(parsed, dict) and "payloads" in parsed:
|
|
806
|
+
logger.debug("OpenClaw sync payloads count: %d", len(parsed.get("payloads", [])))
|
|
807
|
+
for i, p in enumerate(parsed.get("payloads", [])):
|
|
808
|
+
text_preview = (p.get("text", "") or "")[:100]
|
|
809
|
+
logger.debug("OpenClaw sync payload[%d] text preview: %s", i, text_preview)
|
|
810
|
+
return parsed
|
|
811
|
+
except json.JSONDecodeError as e:
|
|
812
|
+
logger.error("OpenClaw sync JSON parse error. Raw output: %s", stdout_text[:1000])
|
|
813
|
+
raise RuntimeError(f"Failed to parse OpenClaw JSON output: {e}") from e
|
|
814
|
+
|
|
815
|
+
# ---------- Output mapping ----------
|
|
816
|
+
|
|
817
|
+
async def from_framework(
|
|
818
|
+
self, framework_output: Dict[str, Any], params: MessageSendParams
|
|
819
|
+
) -> Message | Task:
|
|
820
|
+
"""
|
|
821
|
+
Convert OpenClaw JSON output to A2A Message.
|
|
822
|
+
|
|
823
|
+
Handles the OpenClaw JSON output format:
|
|
824
|
+
{
|
|
825
|
+
"payloads": [
|
|
826
|
+
{
|
|
827
|
+
"text": "Response text",
|
|
828
|
+
"mediaUrl": null,
|
|
829
|
+
"mediaUrls": ["https://..."]
|
|
830
|
+
}
|
|
831
|
+
],
|
|
832
|
+
"meta": {...}
|
|
833
|
+
}
|
|
834
|
+
|
|
835
|
+
Args:
|
|
836
|
+
framework_output: JSON output from OpenClaw
|
|
837
|
+
params: Original A2A parameters
|
|
838
|
+
|
|
839
|
+
Returns:
|
|
840
|
+
A2A Message with the response
|
|
841
|
+
"""
|
|
842
|
+
context_id = self._extract_context_id(params)
|
|
843
|
+
return self._create_response_message(framework_output, context_id)
|
|
844
|
+
|
|
845
|
+
def _create_response_message(
|
|
846
|
+
self, framework_output: Dict[str, Any], context_id: str | None
|
|
847
|
+
) -> Message:
|
|
848
|
+
"""Create a response Message from OpenClaw output."""
|
|
849
|
+
logger.debug("_create_response_message called with framework_output keys: %s",
|
|
850
|
+
list(framework_output.keys()) if isinstance(framework_output, dict) else type(framework_output))
|
|
851
|
+
parts: list[Part] = []
|
|
852
|
+
|
|
853
|
+
# Extract payloads
|
|
854
|
+
payloads = framework_output.get("payloads", [])
|
|
855
|
+
logger.debug("Extracting from %d payloads", len(payloads))
|
|
856
|
+
for i, payload in enumerate(payloads):
|
|
857
|
+
# Extract text
|
|
858
|
+
text = payload.get("text", "")
|
|
859
|
+
logger.debug("Payload[%d] text length: %d, preview: %s", i, len(text) if text else 0, (text or "")[:100])
|
|
860
|
+
if text:
|
|
861
|
+
parts.append(Part(root=TextPart(text=text)))
|
|
862
|
+
|
|
863
|
+
# Extract media URLs
|
|
864
|
+
media_urls = payload.get("mediaUrls") or []
|
|
865
|
+
if payload.get("mediaUrl"):
|
|
866
|
+
media_urls.append(payload["mediaUrl"])
|
|
867
|
+
|
|
868
|
+
for url in media_urls:
|
|
869
|
+
# Detect MIME type from URL extension
|
|
870
|
+
mime_type = self._detect_mime_type(url)
|
|
871
|
+
parts.append(
|
|
872
|
+
Part(
|
|
873
|
+
root=FilePart(
|
|
874
|
+
file=FileWithUri(uri=url, mimeType=mime_type),
|
|
875
|
+
)
|
|
876
|
+
)
|
|
877
|
+
)
|
|
878
|
+
|
|
879
|
+
# Fallback if no parts extracted
|
|
880
|
+
if not parts:
|
|
881
|
+
logger.warning("No parts extracted from OpenClaw output, using empty fallback")
|
|
882
|
+
parts.append(Part(root=TextPart(text="")))
|
|
883
|
+
|
|
884
|
+
logger.debug("Created Message with %d parts", len(parts))
|
|
885
|
+
for i, part in enumerate(parts):
|
|
886
|
+
if hasattr(part, 'root') and hasattr(part.root, 'text'):
|
|
887
|
+
logger.debug("Part[%d] text length: %d", i, len(part.root.text) if part.root.text else 0)
|
|
888
|
+
|
|
889
|
+
return Message(
|
|
890
|
+
role=Role.agent,
|
|
891
|
+
message_id=str(uuid.uuid4()),
|
|
892
|
+
context_id=context_id,
|
|
893
|
+
parts=parts,
|
|
894
|
+
)
|
|
895
|
+
|
|
896
|
+
def _create_response_artifact(self, framework_output: Dict[str, Any]) -> Artifact:
|
|
897
|
+
"""
|
|
898
|
+
Create an Artifact from OpenClaw output.
|
|
899
|
+
|
|
900
|
+
Per A2A spec, task outputs should be stored in artifacts, not messages.
|
|
901
|
+
Messages are for conversation history; artifacts are for task results.
|
|
902
|
+
"""
|
|
903
|
+
parts: list[Part] = []
|
|
904
|
+
|
|
905
|
+
# Extract payloads
|
|
906
|
+
payloads = framework_output.get("payloads", [])
|
|
907
|
+
for payload in payloads:
|
|
908
|
+
# Extract text
|
|
909
|
+
text = payload.get("text", "")
|
|
910
|
+
if text:
|
|
911
|
+
parts.append(Part(root=TextPart(text=text)))
|
|
912
|
+
|
|
913
|
+
# Extract media URLs
|
|
914
|
+
media_urls = payload.get("mediaUrls") or []
|
|
915
|
+
if payload.get("mediaUrl"):
|
|
916
|
+
media_urls.append(payload["mediaUrl"])
|
|
917
|
+
|
|
918
|
+
for url in media_urls:
|
|
919
|
+
mime_type = self._detect_mime_type(url)
|
|
920
|
+
parts.append(
|
|
921
|
+
Part(
|
|
922
|
+
root=FilePart(
|
|
923
|
+
file=FileWithUri(uri=url, mimeType=mime_type),
|
|
924
|
+
)
|
|
925
|
+
)
|
|
926
|
+
)
|
|
927
|
+
|
|
928
|
+
# Fallback if no parts extracted
|
|
929
|
+
if not parts:
|
|
930
|
+
parts.append(Part(root=TextPart(text="")))
|
|
931
|
+
|
|
932
|
+
return Artifact(
|
|
933
|
+
artifact_id=str(uuid.uuid4()),
|
|
934
|
+
name="response",
|
|
935
|
+
description="OpenClaw agent response",
|
|
936
|
+
parts=parts,
|
|
937
|
+
)
|
|
938
|
+
|
|
939
|
+
@staticmethod
|
|
940
|
+
def _detect_mime_type(url: str) -> str:
|
|
941
|
+
"""Detect MIME type from URL extension."""
|
|
942
|
+
url_lower = url.lower()
|
|
943
|
+
if url_lower.endswith(".png"):
|
|
944
|
+
return "image/png"
|
|
945
|
+
elif url_lower.endswith((".jpg", ".jpeg")):
|
|
946
|
+
return "image/jpeg"
|
|
947
|
+
elif url_lower.endswith(".gif"):
|
|
948
|
+
return "image/gif"
|
|
949
|
+
elif url_lower.endswith(".webp"):
|
|
950
|
+
return "image/webp"
|
|
951
|
+
elif url_lower.endswith(".svg"):
|
|
952
|
+
return "image/svg+xml"
|
|
953
|
+
elif url_lower.endswith(".pdf"):
|
|
954
|
+
return "application/pdf"
|
|
955
|
+
elif url_lower.endswith(".mp4"):
|
|
956
|
+
return "video/mp4"
|
|
957
|
+
elif url_lower.endswith(".webm"):
|
|
958
|
+
return "video/webm"
|
|
959
|
+
elif url_lower.endswith(".mp3"):
|
|
960
|
+
return "audio/mpeg"
|
|
961
|
+
elif url_lower.endswith(".wav"):
|
|
962
|
+
return "audio/wav"
|
|
963
|
+
else:
|
|
964
|
+
return "application/octet-stream"
|
|
965
|
+
|
|
966
|
+
# ---------- Push Notification Support ----------
|
|
967
|
+
|
|
968
|
+
def supports_push_notifications(self) -> bool:
|
|
969
|
+
"""Check if this adapter supports push notifications."""
|
|
970
|
+
return self.async_mode
|
|
971
|
+
|
|
972
|
+
async def _get_http_client(self) -> httpx.AsyncClient:
|
|
973
|
+
"""Get or create the HTTP client for push notifications."""
|
|
974
|
+
if self._http_client is None:
|
|
975
|
+
self._http_client = httpx.AsyncClient(timeout=30.0)
|
|
976
|
+
return self._http_client
|
|
977
|
+
|
|
978
|
+
async def _send_push_notification(self, task_id: str, task: Task) -> bool:
|
|
979
|
+
"""
|
|
980
|
+
Send a push notification for a task status update.
|
|
981
|
+
|
|
982
|
+
Per A2A spec section 4.3.3, push notifications use StreamResponse format.
|
|
983
|
+
We send the full Task object (including artifacts) wrapped in StreamResponse.
|
|
984
|
+
|
|
985
|
+
Args:
|
|
986
|
+
task_id: The task ID
|
|
987
|
+
task: The updated Task object
|
|
988
|
+
|
|
989
|
+
Returns:
|
|
990
|
+
True if notification was sent successfully, False otherwise
|
|
991
|
+
"""
|
|
992
|
+
push_config = self._push_configs.get(task_id)
|
|
993
|
+
if not push_config or not push_config.url:
|
|
994
|
+
return False
|
|
995
|
+
|
|
996
|
+
try:
|
|
997
|
+
client = await self._get_http_client()
|
|
998
|
+
|
|
999
|
+
# A2A-compliant: Send full Task wrapped in StreamResponse format
|
|
1000
|
+
# This ensures artifacts (with response content) are included
|
|
1001
|
+
payload = {
|
|
1002
|
+
"task": task.model_dump(mode="json")
|
|
1003
|
+
}
|
|
1004
|
+
|
|
1005
|
+
# Build headers
|
|
1006
|
+
headers = {"Content-Type": "application/json"}
|
|
1007
|
+
|
|
1008
|
+
# Add Bearer token if provided
|
|
1009
|
+
if push_config.token:
|
|
1010
|
+
headers["Authorization"] = f"Bearer {push_config.token}"
|
|
1011
|
+
|
|
1012
|
+
# Send the notification
|
|
1013
|
+
response = await client.post(
|
|
1014
|
+
push_config.url,
|
|
1015
|
+
json=payload,
|
|
1016
|
+
headers=headers,
|
|
1017
|
+
)
|
|
1018
|
+
|
|
1019
|
+
if response.status_code in (200, 201, 202, 204):
|
|
1020
|
+
logger.info(
|
|
1021
|
+
"Push notification sent for task %s to %s (status=%s)",
|
|
1022
|
+
task_id,
|
|
1023
|
+
push_config.url,
|
|
1024
|
+
task.status.state,
|
|
1025
|
+
)
|
|
1026
|
+
return True
|
|
1027
|
+
else:
|
|
1028
|
+
logger.warning(
|
|
1029
|
+
"Push notification failed for task %s: HTTP %s - %s",
|
|
1030
|
+
task_id,
|
|
1031
|
+
response.status_code,
|
|
1032
|
+
response.text[:200],
|
|
1033
|
+
)
|
|
1034
|
+
return False
|
|
1035
|
+
|
|
1036
|
+
except Exception as e:
|
|
1037
|
+
logger.error(
|
|
1038
|
+
"Failed to send push notification for task %s: %s",
|
|
1039
|
+
task_id,
|
|
1040
|
+
e,
|
|
1041
|
+
)
|
|
1042
|
+
return False
|
|
1043
|
+
|
|
1044
|
+
async def set_push_notification_config(
|
|
1045
|
+
self, task_id: str, config: PushNotificationConfig
|
|
1046
|
+
) -> bool:
|
|
1047
|
+
"""
|
|
1048
|
+
Set or update push notification config for a task.
|
|
1049
|
+
|
|
1050
|
+
Args:
|
|
1051
|
+
task_id: The task ID
|
|
1052
|
+
config: The push notification configuration
|
|
1053
|
+
|
|
1054
|
+
Returns:
|
|
1055
|
+
True if config was set successfully
|
|
1056
|
+
"""
|
|
1057
|
+
if not self.async_mode:
|
|
1058
|
+
raise RuntimeError(
|
|
1059
|
+
"Push notifications are only available in async mode. "
|
|
1060
|
+
"Initialize adapter with async_mode=True"
|
|
1061
|
+
)
|
|
1062
|
+
|
|
1063
|
+
task = await self.task_store.get(task_id)
|
|
1064
|
+
if not task:
|
|
1065
|
+
return False
|
|
1066
|
+
|
|
1067
|
+
self._push_configs[task_id] = config
|
|
1068
|
+
logger.debug("Set push notification config for task %s: %s", task_id, config.url)
|
|
1069
|
+
return True
|
|
1070
|
+
|
|
1071
|
+
async def get_push_notification_config(
|
|
1072
|
+
self, task_id: str
|
|
1073
|
+
) -> PushNotificationConfig | None:
|
|
1074
|
+
"""
|
|
1075
|
+
Get push notification config for a task.
|
|
1076
|
+
|
|
1077
|
+
Args:
|
|
1078
|
+
task_id: The task ID
|
|
1079
|
+
|
|
1080
|
+
Returns:
|
|
1081
|
+
The push notification config, or None if not set
|
|
1082
|
+
"""
|
|
1083
|
+
return self._push_configs.get(task_id)
|
|
1084
|
+
|
|
1085
|
+
async def delete_push_notification_config(self, task_id: str) -> bool:
|
|
1086
|
+
"""
|
|
1087
|
+
Delete push notification config for a task.
|
|
1088
|
+
|
|
1089
|
+
Args:
|
|
1090
|
+
task_id: The task ID
|
|
1091
|
+
|
|
1092
|
+
Returns:
|
|
1093
|
+
True if config was deleted, False if not found
|
|
1094
|
+
"""
|
|
1095
|
+
if task_id in self._push_configs:
|
|
1096
|
+
del self._push_configs[task_id]
|
|
1097
|
+
logger.debug("Deleted push notification config for task %s", task_id)
|
|
1098
|
+
return True
|
|
1099
|
+
return False
|
|
1100
|
+
|
|
1101
|
+
# ---------- Async Task Support ----------
|
|
1102
|
+
|
|
1103
|
+
def supports_async_tasks(self) -> bool:
|
|
1104
|
+
"""Check if this adapter supports async task execution."""
|
|
1105
|
+
return self.async_mode
|
|
1106
|
+
|
|
1107
|
+
async def get_task(self, task_id: str) -> Task | None:
|
|
1108
|
+
"""
|
|
1109
|
+
Get the current status of a task by ID.
|
|
1110
|
+
|
|
1111
|
+
This method is used for polling task status in async task execution mode.
|
|
1112
|
+
|
|
1113
|
+
Args:
|
|
1114
|
+
task_id: The ID of the task to retrieve
|
|
1115
|
+
|
|
1116
|
+
Returns:
|
|
1117
|
+
The Task object with current status, or None if not found
|
|
1118
|
+
|
|
1119
|
+
Raises:
|
|
1120
|
+
RuntimeError: If async mode is not enabled
|
|
1121
|
+
"""
|
|
1122
|
+
if not self.async_mode:
|
|
1123
|
+
raise RuntimeError(
|
|
1124
|
+
"get_task() is only available in async mode. "
|
|
1125
|
+
"Initialize adapter with async_mode=True"
|
|
1126
|
+
)
|
|
1127
|
+
|
|
1128
|
+
task = await self.task_store.get(task_id)
|
|
1129
|
+
if task:
|
|
1130
|
+
logger.debug("Retrieved task %s with state=%s", task_id, task.status.state)
|
|
1131
|
+
else:
|
|
1132
|
+
logger.debug("Task %s not found", task_id)
|
|
1133
|
+
return task
|
|
1134
|
+
|
|
1135
|
+
async def delete_task(self, task_id: str) -> bool:
|
|
1136
|
+
"""
|
|
1137
|
+
Delete a task from the task store.
|
|
1138
|
+
|
|
1139
|
+
This can be used to clean up completed/failed tasks to prevent memory leaks
|
|
1140
|
+
when using InMemoryTaskStore. Only tasks in terminal states (completed,
|
|
1141
|
+
failed, canceled) should be deleted.
|
|
1142
|
+
|
|
1143
|
+
Args:
|
|
1144
|
+
task_id: The ID of the task to delete
|
|
1145
|
+
|
|
1146
|
+
Returns:
|
|
1147
|
+
True if the task was deleted, False if not found or still running
|
|
1148
|
+
|
|
1149
|
+
Raises:
|
|
1150
|
+
RuntimeError: If async mode is not enabled
|
|
1151
|
+
ValueError: If the task is still running (not in a terminal state)
|
|
1152
|
+
"""
|
|
1153
|
+
if not self.async_mode:
|
|
1154
|
+
raise RuntimeError(
|
|
1155
|
+
"delete_task() is only available in async mode. "
|
|
1156
|
+
"Initialize adapter with async_mode=True"
|
|
1157
|
+
)
|
|
1158
|
+
|
|
1159
|
+
task = await self.task_store.get(task_id)
|
|
1160
|
+
if not task:
|
|
1161
|
+
return False
|
|
1162
|
+
|
|
1163
|
+
# Only allow deletion of tasks in terminal states
|
|
1164
|
+
terminal_states = {TaskState.completed, TaskState.failed, TaskState.canceled}
|
|
1165
|
+
if task.status.state not in terminal_states:
|
|
1166
|
+
raise ValueError(
|
|
1167
|
+
f"Cannot delete task {task_id} with state={task.status.state}. "
|
|
1168
|
+
f"Only tasks in terminal states ({', '.join(s.value for s in terminal_states)}) can be deleted."
|
|
1169
|
+
)
|
|
1170
|
+
|
|
1171
|
+
await self.task_store.delete(task_id)
|
|
1172
|
+
logger.debug("Deleted task %s", task_id)
|
|
1173
|
+
return True
|
|
1174
|
+
|
|
1175
|
+
async def cancel_task(self, task_id: str) -> Task | None:
|
|
1176
|
+
"""
|
|
1177
|
+
Attempt to cancel a running task.
|
|
1178
|
+
|
|
1179
|
+
This cancels the background asyncio task and kills the subprocess if
|
|
1180
|
+
it's still running.
|
|
1181
|
+
|
|
1182
|
+
Args:
|
|
1183
|
+
task_id: The ID of the task to cancel
|
|
1184
|
+
|
|
1185
|
+
Returns:
|
|
1186
|
+
The updated Task object with state="canceled", or None if not found
|
|
1187
|
+
"""
|
|
1188
|
+
if not self.async_mode:
|
|
1189
|
+
raise RuntimeError(
|
|
1190
|
+
"cancel_task() is only available in async mode. "
|
|
1191
|
+
"Initialize adapter with async_mode=True"
|
|
1192
|
+
)
|
|
1193
|
+
|
|
1194
|
+
# Mark task as cancelled to prevent race conditions
|
|
1195
|
+
self._cancelled_tasks.add(task_id)
|
|
1196
|
+
|
|
1197
|
+
# Kill the subprocess if still running
|
|
1198
|
+
proc = self._background_processes.get(task_id)
|
|
1199
|
+
if proc and proc.returncode is None:
|
|
1200
|
+
logger.debug("Killing subprocess for task %s", task_id)
|
|
1201
|
+
proc.kill()
|
|
1202
|
+
|
|
1203
|
+
# Cancel the background task if still running and wait for it
|
|
1204
|
+
bg_task = self._background_tasks.get(task_id)
|
|
1205
|
+
if bg_task and not bg_task.done():
|
|
1206
|
+
bg_task.cancel()
|
|
1207
|
+
logger.debug("Cancelling background task for %s", task_id)
|
|
1208
|
+
# Wait for the task to actually finish
|
|
1209
|
+
try:
|
|
1210
|
+
await bg_task
|
|
1211
|
+
except asyncio.CancelledError:
|
|
1212
|
+
pass # Expected when task is cancelled
|
|
1213
|
+
except Exception:
|
|
1214
|
+
pass # Task may have failed, we're cancelling anyway
|
|
1215
|
+
|
|
1216
|
+
# Update task state to canceled
|
|
1217
|
+
task = await self.task_store.get(task_id)
|
|
1218
|
+
if task:
|
|
1219
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
1220
|
+
canceled_task = Task(
|
|
1221
|
+
id=task_id,
|
|
1222
|
+
context_id=task.context_id,
|
|
1223
|
+
status=TaskStatus(
|
|
1224
|
+
state=TaskState.canceled,
|
|
1225
|
+
timestamp=now,
|
|
1226
|
+
),
|
|
1227
|
+
history=task.history,
|
|
1228
|
+
)
|
|
1229
|
+
await self.task_store.save(canceled_task)
|
|
1230
|
+
logger.debug("Task %s marked as canceled", task_id)
|
|
1231
|
+
|
|
1232
|
+
# Record completion time for TTL cleanup
|
|
1233
|
+
self._record_task_completion(task_id)
|
|
1234
|
+
|
|
1235
|
+
# Send push notification for cancellation
|
|
1236
|
+
await self._send_push_notification(task_id, canceled_task)
|
|
1237
|
+
|
|
1238
|
+
return canceled_task
|
|
1239
|
+
|
|
1240
|
+
return None
|
|
1241
|
+
|
|
1242
|
+
# ---------- Lifecycle ----------
|
|
1243
|
+
|
|
1244
|
+
async def close(self) -> None:
|
|
1245
|
+
"""Close the adapter and cancel pending background tasks."""
|
|
1246
|
+
# Cancel cleanup task first
|
|
1247
|
+
if self._cleanup_task and not self._cleanup_task.done():
|
|
1248
|
+
self._cleanup_task.cancel()
|
|
1249
|
+
try:
|
|
1250
|
+
await self._cleanup_task
|
|
1251
|
+
except asyncio.CancelledError:
|
|
1252
|
+
pass # Expected
|
|
1253
|
+
|
|
1254
|
+
# Mark all tasks as cancelled to prevent state updates
|
|
1255
|
+
for task_id in self._background_tasks:
|
|
1256
|
+
self._cancelled_tasks.add(task_id)
|
|
1257
|
+
|
|
1258
|
+
# Kill all running subprocesses
|
|
1259
|
+
for task_id, proc in list(self._background_processes.items()):
|
|
1260
|
+
if proc.returncode is None:
|
|
1261
|
+
logger.debug("Killing subprocess %s during close", task_id)
|
|
1262
|
+
proc.kill()
|
|
1263
|
+
|
|
1264
|
+
# Cancel all pending background tasks
|
|
1265
|
+
tasks_to_cancel = []
|
|
1266
|
+
for task_id, bg_task in list(self._background_tasks.items()):
|
|
1267
|
+
if not bg_task.done():
|
|
1268
|
+
bg_task.cancel()
|
|
1269
|
+
tasks_to_cancel.append(bg_task)
|
|
1270
|
+
logger.debug("Cancelling background task %s during close", task_id)
|
|
1271
|
+
|
|
1272
|
+
# Wait for all cancelled tasks to complete
|
|
1273
|
+
if tasks_to_cancel:
|
|
1274
|
+
await asyncio.gather(*tasks_to_cancel, return_exceptions=True)
|
|
1275
|
+
|
|
1276
|
+
self._background_tasks.clear()
|
|
1277
|
+
self._background_processes.clear()
|
|
1278
|
+
self._cancelled_tasks.clear()
|
|
1279
|
+
self._push_configs.clear()
|
|
1280
|
+
self._task_completion_times.clear()
|
|
1281
|
+
|
|
1282
|
+
# Close HTTP client
|
|
1283
|
+
if self._http_client:
|
|
1284
|
+
await self._http_client.aclose()
|
|
1285
|
+
self._http_client = None
|
|
1286
|
+
|
|
1287
|
+
async def __aenter__(self):
|
|
1288
|
+
"""Async context manager entry."""
|
|
1289
|
+
return self
|
|
1290
|
+
|
|
1291
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
1292
|
+
"""Async context manager exit."""
|
|
1293
|
+
await self.close()
|
|
1294
|
+
|
|
1295
|
+
def supports_streaming(self) -> bool:
|
|
1296
|
+
"""This adapter does not support streaming responses."""
|
|
1297
|
+
return False
|