a2a-adapter 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- a2a_adapter/__init__.py +42 -0
- a2a_adapter/adapter.py +185 -0
- a2a_adapter/client.py +236 -0
- a2a_adapter/integrations/__init__.py +33 -0
- a2a_adapter/integrations/callable.py +172 -0
- a2a_adapter/integrations/crewai.py +142 -0
- a2a_adapter/integrations/langchain.py +171 -0
- a2a_adapter/integrations/n8n.py +787 -0
- a2a_adapter/loader.py +131 -0
- a2a_adapter-0.1.0.dist-info/METADATA +604 -0
- a2a_adapter-0.1.0.dist-info/RECORD +14 -0
- a2a_adapter-0.1.0.dist-info/WHEEL +5 -0
- a2a_adapter-0.1.0.dist-info/licenses/LICENSE +201 -0
- a2a_adapter-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,787 @@
|
|
|
1
|
+
"""
|
|
2
|
+
n8n adapter for A2A Protocol.
|
|
3
|
+
|
|
4
|
+
This adapter enables n8n workflows to be exposed as A2A-compliant agents
|
|
5
|
+
by forwarding A2A messages to n8n webhooks.
|
|
6
|
+
|
|
7
|
+
Supports two modes:
|
|
8
|
+
- Synchronous (default): Blocks until n8n workflow completes, returns Message
|
|
9
|
+
- Async Task Mode: Returns Task immediately, processes in background, supports polling
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
import asyncio
|
|
14
|
+
import logging
|
|
15
|
+
import time
|
|
16
|
+
import uuid
|
|
17
|
+
from datetime import datetime, timezone
|
|
18
|
+
from typing import Any, Dict
|
|
19
|
+
|
|
20
|
+
import httpx
|
|
21
|
+
from httpx import HTTPStatusError, ConnectError, ReadTimeout
|
|
22
|
+
|
|
23
|
+
from a2a.types import (
|
|
24
|
+
Message,
|
|
25
|
+
MessageSendParams,
|
|
26
|
+
Task,
|
|
27
|
+
TaskState,
|
|
28
|
+
TaskStatus,
|
|
29
|
+
TextPart,
|
|
30
|
+
Role,
|
|
31
|
+
Part,
|
|
32
|
+
)
|
|
33
|
+
from ..adapter import BaseAgentAdapter
|
|
34
|
+
|
|
35
|
+
# Lazy import for TaskStore to avoid hard dependency
|
|
36
|
+
try:
|
|
37
|
+
from a2a.server.tasks import TaskStore, InMemoryTaskStore
|
|
38
|
+
_HAS_TASK_STORE = True
|
|
39
|
+
except ImportError:
|
|
40
|
+
_HAS_TASK_STORE = False
|
|
41
|
+
TaskStore = None # type: ignore
|
|
42
|
+
InMemoryTaskStore = None # type: ignore
|
|
43
|
+
|
|
44
|
+
logger = logging.getLogger(__name__)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class N8nAgentAdapter(BaseAgentAdapter):
|
|
48
|
+
"""
|
|
49
|
+
Adapter for integrating n8n workflows as A2A agents.
|
|
50
|
+
|
|
51
|
+
This adapter forwards A2A message requests to an n8n webhook URL and
|
|
52
|
+
translates the response back to A2A format.
|
|
53
|
+
|
|
54
|
+
Supports two execution modes:
|
|
55
|
+
|
|
56
|
+
1. **Synchronous Mode** (default):
|
|
57
|
+
- Blocks until the n8n workflow completes
|
|
58
|
+
- Returns a Message with the workflow result
|
|
59
|
+
- Best for quick workflows (< 30 seconds)
|
|
60
|
+
|
|
61
|
+
2. **Async Task Mode** (async_mode=True):
|
|
62
|
+
- Returns a Task with state="working" immediately
|
|
63
|
+
- Processes the workflow in the background
|
|
64
|
+
- Clients can poll get_task() for status updates
|
|
65
|
+
- Best for long-running workflows
|
|
66
|
+
- Tasks time out after async_timeout seconds (default: 300)
|
|
67
|
+
|
|
68
|
+
**Memory Considerations (Async Mode)**:
|
|
69
|
+
|
|
70
|
+
When using InMemoryTaskStore (the default), completed tasks remain in memory
|
|
71
|
+
indefinitely. For production use, either:
|
|
72
|
+
|
|
73
|
+
1. Call delete_task() after retrieving completed tasks to free memory
|
|
74
|
+
2. Use DatabaseTaskStore for persistent storage with external cleanup
|
|
75
|
+
3. Implement a periodic cleanup routine for old completed tasks
|
|
76
|
+
|
|
77
|
+
Example cleanup pattern::
|
|
78
|
+
|
|
79
|
+
task = await adapter.get_task(task_id)
|
|
80
|
+
if task and task.status.state in ("completed", "failed", "canceled"):
|
|
81
|
+
# Process the result...
|
|
82
|
+
await adapter.delete_task(task_id) # Free memory
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
def __init__(
|
|
86
|
+
self,
|
|
87
|
+
webhook_url: str,
|
|
88
|
+
timeout: int = 30,
|
|
89
|
+
headers: Dict[str, str] | None = None,
|
|
90
|
+
max_retries: int = 2,
|
|
91
|
+
backoff: float = 0.25,
|
|
92
|
+
payload_template: Dict[str, Any] | None = None,
|
|
93
|
+
message_field: str = "message",
|
|
94
|
+
async_mode: bool = False,
|
|
95
|
+
task_store: "TaskStore | None" = None,
|
|
96
|
+
async_timeout: int = 300,
|
|
97
|
+
):
|
|
98
|
+
"""
|
|
99
|
+
Initialize the n8n adapter.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
webhook_url: The n8n webhook URL to send requests to.
|
|
103
|
+
timeout: HTTP request timeout in seconds (default: 30).
|
|
104
|
+
headers: Optional additional HTTP headers to include in requests.
|
|
105
|
+
max_retries: Number of retry attempts for transient failures (default: 2).
|
|
106
|
+
backoff: Base backoff seconds; multiplied by 2**attempt between retries.
|
|
107
|
+
payload_template: Optional base payload dict to merge with message.
|
|
108
|
+
Use this to add static fields your n8n workflow expects.
|
|
109
|
+
message_field: Field name for the user message (default: "message").
|
|
110
|
+
Change this if your n8n workflow expects a different field name.
|
|
111
|
+
async_mode: If True, return Task immediately and process in background.
|
|
112
|
+
If False (default), block until workflow completes.
|
|
113
|
+
task_store: Optional TaskStore for persisting task state. If not provided
|
|
114
|
+
and async_mode is True, uses InMemoryTaskStore.
|
|
115
|
+
async_timeout: Timeout for async task execution in seconds (default: 300).
|
|
116
|
+
"""
|
|
117
|
+
self.webhook_url = webhook_url
|
|
118
|
+
self.timeout = timeout
|
|
119
|
+
self.headers = dict(headers) if headers else {}
|
|
120
|
+
self.max_retries = max(0, int(max_retries))
|
|
121
|
+
self.backoff = float(backoff)
|
|
122
|
+
self.payload_template = dict(payload_template) if payload_template else {}
|
|
123
|
+
self.message_field = message_field
|
|
124
|
+
self._client: httpx.AsyncClient | None = None
|
|
125
|
+
|
|
126
|
+
# Async task mode configuration
|
|
127
|
+
self.async_mode = async_mode
|
|
128
|
+
self.async_timeout = async_timeout
|
|
129
|
+
self._background_tasks: Dict[str, "asyncio.Task[None]"] = {}
|
|
130
|
+
self._cancelled_tasks: set[str] = set() # Track cancelled task IDs
|
|
131
|
+
|
|
132
|
+
# Initialize task store for async mode
|
|
133
|
+
if async_mode:
|
|
134
|
+
if not _HAS_TASK_STORE:
|
|
135
|
+
raise ImportError(
|
|
136
|
+
"Async task mode requires the A2A SDK with task support. "
|
|
137
|
+
"Install with: pip install a2a-sdk"
|
|
138
|
+
)
|
|
139
|
+
self.task_store: "TaskStore" = task_store or InMemoryTaskStore()
|
|
140
|
+
else:
|
|
141
|
+
self.task_store = task_store # type: ignore
|
|
142
|
+
|
|
143
|
+
async def _get_client(self) -> httpx.AsyncClient:
|
|
144
|
+
"""Get or create the HTTP client."""
|
|
145
|
+
if self._client is None:
|
|
146
|
+
# Use async_timeout for async mode since workflows may take longer
|
|
147
|
+
timeout = self.async_timeout if self.async_mode else self.timeout
|
|
148
|
+
self._client = httpx.AsyncClient(timeout=timeout)
|
|
149
|
+
return self._client
|
|
150
|
+
|
|
151
|
+
async def handle(self, params: MessageSendParams) -> Message | Task:
|
|
152
|
+
"""
|
|
153
|
+
Handle a non-streaming A2A message request.
|
|
154
|
+
|
|
155
|
+
In sync mode (default): Blocks until workflow completes, returns Message.
|
|
156
|
+
In async mode: Returns Task immediately, processes in background.
|
|
157
|
+
"""
|
|
158
|
+
if self.async_mode:
|
|
159
|
+
return await self._handle_async(params)
|
|
160
|
+
else:
|
|
161
|
+
return await self._handle_sync(params)
|
|
162
|
+
|
|
163
|
+
async def _handle_sync(self, params: MessageSendParams) -> Message:
|
|
164
|
+
"""Handle request synchronously - blocks until workflow completes."""
|
|
165
|
+
framework_input = await self.to_framework(params)
|
|
166
|
+
framework_output = await self.call_framework(framework_input, params)
|
|
167
|
+
result = await self.from_framework(framework_output, params)
|
|
168
|
+
# In sync mode, always return Message
|
|
169
|
+
if isinstance(result, Task):
|
|
170
|
+
# Extract message from completed task if needed
|
|
171
|
+
if result.status and result.status.message:
|
|
172
|
+
return result.status.message
|
|
173
|
+
# Fallback: create a message from task
|
|
174
|
+
return Message(
|
|
175
|
+
role=Role.agent,
|
|
176
|
+
message_id=str(uuid.uuid4()),
|
|
177
|
+
context_id=result.context_id,
|
|
178
|
+
parts=[Part(root=TextPart(text="Task completed"))],
|
|
179
|
+
)
|
|
180
|
+
return result
|
|
181
|
+
|
|
182
|
+
async def _handle_async(self, params: MessageSendParams) -> Task:
|
|
183
|
+
"""
|
|
184
|
+
Handle request asynchronously - returns Task immediately, processes in background.
|
|
185
|
+
|
|
186
|
+
1. Creates a Task with state="working"
|
|
187
|
+
2. Saves the task to the TaskStore
|
|
188
|
+
3. Starts a background coroutine to execute the workflow
|
|
189
|
+
4. Returns the Task immediately
|
|
190
|
+
"""
|
|
191
|
+
# Generate IDs
|
|
192
|
+
task_id = str(uuid.uuid4())
|
|
193
|
+
context_id = self._extract_context_id(params) or str(uuid.uuid4())
|
|
194
|
+
|
|
195
|
+
# Extract the initial message for history
|
|
196
|
+
initial_message = None
|
|
197
|
+
if hasattr(params, "message") and params.message:
|
|
198
|
+
initial_message = params.message
|
|
199
|
+
|
|
200
|
+
# Create initial task with "working" state
|
|
201
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
202
|
+
task = Task(
|
|
203
|
+
id=task_id,
|
|
204
|
+
context_id=context_id,
|
|
205
|
+
status=TaskStatus(
|
|
206
|
+
state=TaskState.working,
|
|
207
|
+
timestamp=now,
|
|
208
|
+
),
|
|
209
|
+
history=[initial_message] if initial_message else None,
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
# Save initial task state
|
|
213
|
+
await self.task_store.save(task)
|
|
214
|
+
logger.debug("Created async task %s with state=working", task_id)
|
|
215
|
+
|
|
216
|
+
# Start background processing with timeout
|
|
217
|
+
bg_task = asyncio.create_task(
|
|
218
|
+
self._execute_workflow_with_timeout(task_id, context_id, params)
|
|
219
|
+
)
|
|
220
|
+
self._background_tasks[task_id] = bg_task
|
|
221
|
+
|
|
222
|
+
# Clean up background task reference when done and handle exceptions
|
|
223
|
+
def _on_task_done(t: "asyncio.Task[None]") -> None:
|
|
224
|
+
self._background_tasks.pop(task_id, None)
|
|
225
|
+
self._cancelled_tasks.discard(task_id)
|
|
226
|
+
# Check for unhandled exceptions (shouldn't happen, but log if they do)
|
|
227
|
+
if not t.cancelled():
|
|
228
|
+
exc = t.exception()
|
|
229
|
+
if exc:
|
|
230
|
+
logger.error(
|
|
231
|
+
"Unhandled exception in background task %s: %s",
|
|
232
|
+
task_id,
|
|
233
|
+
exc,
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
bg_task.add_done_callback(_on_task_done)
|
|
237
|
+
|
|
238
|
+
return task
|
|
239
|
+
|
|
240
|
+
async def _execute_workflow_with_timeout(
|
|
241
|
+
self,
|
|
242
|
+
task_id: str,
|
|
243
|
+
context_id: str,
|
|
244
|
+
params: MessageSendParams,
|
|
245
|
+
) -> None:
|
|
246
|
+
"""
|
|
247
|
+
Execute the workflow with a timeout wrapper.
|
|
248
|
+
|
|
249
|
+
This ensures that long-running workflows don't hang indefinitely.
|
|
250
|
+
"""
|
|
251
|
+
try:
|
|
252
|
+
await asyncio.wait_for(
|
|
253
|
+
self._execute_workflow_background(task_id, context_id, params),
|
|
254
|
+
timeout=self.async_timeout,
|
|
255
|
+
)
|
|
256
|
+
except asyncio.TimeoutError:
|
|
257
|
+
# Check if task was cancelled (don't overwrite canceled state)
|
|
258
|
+
if task_id in self._cancelled_tasks:
|
|
259
|
+
logger.debug("Task %s was cancelled, not marking as failed", task_id)
|
|
260
|
+
return
|
|
261
|
+
|
|
262
|
+
logger.error("Task %s timed out after %s seconds", task_id, self.async_timeout)
|
|
263
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
264
|
+
error_message = Message(
|
|
265
|
+
role=Role.agent,
|
|
266
|
+
message_id=str(uuid.uuid4()),
|
|
267
|
+
context_id=context_id,
|
|
268
|
+
parts=[Part(root=TextPart(text=f"Workflow timed out after {self.async_timeout} seconds"))],
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
timeout_task = Task(
|
|
272
|
+
id=task_id,
|
|
273
|
+
context_id=context_id,
|
|
274
|
+
status=TaskStatus(
|
|
275
|
+
state=TaskState.failed,
|
|
276
|
+
message=error_message,
|
|
277
|
+
timestamp=now,
|
|
278
|
+
),
|
|
279
|
+
)
|
|
280
|
+
await self.task_store.save(timeout_task)
|
|
281
|
+
|
|
282
|
+
async def _execute_workflow_background(
|
|
283
|
+
self,
|
|
284
|
+
task_id: str,
|
|
285
|
+
context_id: str,
|
|
286
|
+
params: MessageSendParams,
|
|
287
|
+
) -> None:
|
|
288
|
+
"""
|
|
289
|
+
Execute the n8n workflow in the background and update task state.
|
|
290
|
+
|
|
291
|
+
This runs as a background coroutine after the initial Task is returned.
|
|
292
|
+
"""
|
|
293
|
+
try:
|
|
294
|
+
logger.debug("Starting background execution for task %s", task_id)
|
|
295
|
+
|
|
296
|
+
# Execute the workflow (this may take a while)
|
|
297
|
+
framework_input = await self.to_framework(params)
|
|
298
|
+
framework_output = await self.call_framework(framework_input, params)
|
|
299
|
+
|
|
300
|
+
# Check if task was cancelled during execution
|
|
301
|
+
if task_id in self._cancelled_tasks:
|
|
302
|
+
logger.debug("Task %s was cancelled during execution, not updating state", task_id)
|
|
303
|
+
return
|
|
304
|
+
|
|
305
|
+
# Convert to message
|
|
306
|
+
response_text = self._extract_response_text(framework_output)
|
|
307
|
+
response_message = Message(
|
|
308
|
+
role=Role.agent,
|
|
309
|
+
message_id=str(uuid.uuid4()),
|
|
310
|
+
context_id=context_id,
|
|
311
|
+
parts=[Part(root=TextPart(text=response_text))],
|
|
312
|
+
)
|
|
313
|
+
|
|
314
|
+
# Build history
|
|
315
|
+
history = []
|
|
316
|
+
if hasattr(params, "message") and params.message:
|
|
317
|
+
history.append(params.message)
|
|
318
|
+
history.append(response_message)
|
|
319
|
+
|
|
320
|
+
# Update task to completed state
|
|
321
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
322
|
+
completed_task = Task(
|
|
323
|
+
id=task_id,
|
|
324
|
+
context_id=context_id,
|
|
325
|
+
status=TaskStatus(
|
|
326
|
+
state=TaskState.completed,
|
|
327
|
+
message=response_message,
|
|
328
|
+
timestamp=now,
|
|
329
|
+
),
|
|
330
|
+
history=history,
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
await self.task_store.save(completed_task)
|
|
334
|
+
logger.debug("Task %s completed successfully", task_id)
|
|
335
|
+
|
|
336
|
+
except asyncio.CancelledError:
|
|
337
|
+
# Task was cancelled - don't update state, cancel_task() handles it
|
|
338
|
+
logger.debug("Task %s was cancelled", task_id)
|
|
339
|
+
raise # Re-raise to properly cancel the task
|
|
340
|
+
|
|
341
|
+
except Exception as e:
|
|
342
|
+
# Check if task was cancelled (don't overwrite canceled state)
|
|
343
|
+
if task_id in self._cancelled_tasks:
|
|
344
|
+
logger.debug("Task %s was cancelled, not marking as failed", task_id)
|
|
345
|
+
return
|
|
346
|
+
|
|
347
|
+
# Update task to failed state
|
|
348
|
+
logger.error("Task %s failed: %s", task_id, e)
|
|
349
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
350
|
+
error_message = Message(
|
|
351
|
+
role=Role.agent,
|
|
352
|
+
message_id=str(uuid.uuid4()),
|
|
353
|
+
context_id=context_id,
|
|
354
|
+
parts=[Part(root=TextPart(text=f"Workflow failed: {str(e)}"))],
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
failed_task = Task(
|
|
358
|
+
id=task_id,
|
|
359
|
+
context_id=context_id,
|
|
360
|
+
status=TaskStatus(
|
|
361
|
+
state=TaskState.failed,
|
|
362
|
+
message=error_message,
|
|
363
|
+
timestamp=now,
|
|
364
|
+
),
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
await self.task_store.save(failed_task)
|
|
368
|
+
|
|
369
|
+
def _extract_context_id(self, params: MessageSendParams) -> str | None:
|
|
370
|
+
"""Extract context_id from MessageSendParams."""
|
|
371
|
+
if hasattr(params, "message") and params.message:
|
|
372
|
+
return getattr(params.message, "context_id", None)
|
|
373
|
+
return None
|
|
374
|
+
|
|
375
|
+
def _extract_response_text(self, framework_output: Dict[str, Any] | list) -> str:
|
|
376
|
+
"""Extract response text from n8n webhook output."""
|
|
377
|
+
if isinstance(framework_output, list):
|
|
378
|
+
if len(framework_output) == 0:
|
|
379
|
+
return ""
|
|
380
|
+
elif len(framework_output) == 1:
|
|
381
|
+
return self._extract_text_from_item(framework_output[0])
|
|
382
|
+
else:
|
|
383
|
+
texts = []
|
|
384
|
+
for item in framework_output:
|
|
385
|
+
if isinstance(item, dict):
|
|
386
|
+
text = self._extract_text_from_item(item)
|
|
387
|
+
if text:
|
|
388
|
+
texts.append(text)
|
|
389
|
+
return "\n".join(texts) if texts else json.dumps(framework_output, indent=2)
|
|
390
|
+
elif isinstance(framework_output, dict):
|
|
391
|
+
return self._extract_text_from_item(framework_output)
|
|
392
|
+
else:
|
|
393
|
+
return str(framework_output)
|
|
394
|
+
|
|
395
|
+
# ---------- Input mapping ----------
|
|
396
|
+
|
|
397
|
+
async def to_framework(self, params: MessageSendParams) -> Dict[str, Any]:
|
|
398
|
+
"""
|
|
399
|
+
Build the n8n webhook payload from A2A params.
|
|
400
|
+
|
|
401
|
+
Extracts the latest user message text and constructs a JSON-serializable
|
|
402
|
+
payload for posting to an n8n webhook. Supports custom payload templates
|
|
403
|
+
and message field names for flexibility with different n8n workflows.
|
|
404
|
+
|
|
405
|
+
Args:
|
|
406
|
+
params: A2A message parameters.
|
|
407
|
+
|
|
408
|
+
Returns:
|
|
409
|
+
dict with the user message and any configured template fields.
|
|
410
|
+
"""
|
|
411
|
+
user_message = ""
|
|
412
|
+
|
|
413
|
+
# Extract message from A2A params (new format with message.parts)
|
|
414
|
+
if hasattr(params, "message") and params.message:
|
|
415
|
+
msg = params.message
|
|
416
|
+
if hasattr(msg, "parts") and msg.parts:
|
|
417
|
+
text_parts = []
|
|
418
|
+
for part in msg.parts:
|
|
419
|
+
# Handle Part(root=TextPart(...)) structure
|
|
420
|
+
if hasattr(part, "root") and hasattr(part.root, "text"):
|
|
421
|
+
text_parts.append(part.root.text)
|
|
422
|
+
# Handle direct TextPart
|
|
423
|
+
elif hasattr(part, "text"):
|
|
424
|
+
text_parts.append(part.text)
|
|
425
|
+
user_message = self._join_text_parts(text_parts)
|
|
426
|
+
|
|
427
|
+
# Legacy support for messages array
|
|
428
|
+
elif getattr(params, "messages", None):
|
|
429
|
+
last = params.messages[-1]
|
|
430
|
+
content = getattr(last, "content", "")
|
|
431
|
+
if isinstance(content, str):
|
|
432
|
+
user_message = content.strip()
|
|
433
|
+
elif isinstance(content, list):
|
|
434
|
+
text_parts: list[str] = []
|
|
435
|
+
for item in content:
|
|
436
|
+
txt = getattr(item, "text", None)
|
|
437
|
+
if txt and isinstance(txt, str) and txt.strip():
|
|
438
|
+
text_parts.append(txt.strip())
|
|
439
|
+
user_message = self._join_text_parts(text_parts)
|
|
440
|
+
|
|
441
|
+
# Extract context_id from the message (used for multi-turn conversation tracking)
|
|
442
|
+
context_id = None
|
|
443
|
+
if hasattr(params, "message") and params.message:
|
|
444
|
+
context_id = getattr(params.message, "context_id", None)
|
|
445
|
+
|
|
446
|
+
# Build payload with custom template support
|
|
447
|
+
payload: Dict[str, Any] = {
|
|
448
|
+
**self.payload_template, # Start with template (e.g., {"name": "A2A Agent"})
|
|
449
|
+
self.message_field: user_message, # Add message with custom field name
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
# Add metadata only if not using custom template
|
|
453
|
+
if not self.payload_template:
|
|
454
|
+
payload["metadata"] = {
|
|
455
|
+
"context_id": context_id,
|
|
456
|
+
}
|
|
457
|
+
else:
|
|
458
|
+
# With custom template, add context_id at root if not already present
|
|
459
|
+
if "context_id" not in payload:
|
|
460
|
+
payload["context_id"] = context_id
|
|
461
|
+
|
|
462
|
+
return payload
|
|
463
|
+
|
|
464
|
+
@staticmethod
|
|
465
|
+
def _join_text_parts(parts: list[str]) -> str:
|
|
466
|
+
"""
|
|
467
|
+
Join text parts into a single string.
|
|
468
|
+
"""
|
|
469
|
+
if not parts:
|
|
470
|
+
return ""
|
|
471
|
+
text = " ".join(p.strip() for p in parts if p)
|
|
472
|
+
return text.strip()
|
|
473
|
+
|
|
474
|
+
# ---------- Framework call ----------
|
|
475
|
+
|
|
476
|
+
async def call_framework(
|
|
477
|
+
self, framework_input: Dict[str, Any], params: MessageSendParams
|
|
478
|
+
) -> Dict[str, Any] | list:
|
|
479
|
+
"""
|
|
480
|
+
Execute the n8n workflow by POSTing to the webhook URL with retries/backoff.
|
|
481
|
+
|
|
482
|
+
Error policy:
|
|
483
|
+
- 4xx: no retry, raise ValueError with a concise message (likely bad request/user/config).
|
|
484
|
+
- 5xx / network timeouts / connect errors: retry with exponential backoff, then raise RuntimeError.
|
|
485
|
+
"""
|
|
486
|
+
client = await self._get_client()
|
|
487
|
+
req_id = str(uuid.uuid4())
|
|
488
|
+
headers = {
|
|
489
|
+
"Content-Type": "application/json",
|
|
490
|
+
"X-Request-Id": req_id,
|
|
491
|
+
**self.headers,
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
for attempt in range(self.max_retries + 1):
|
|
495
|
+
start = time.monotonic()
|
|
496
|
+
try:
|
|
497
|
+
resp = await client.post(
|
|
498
|
+
self.webhook_url,
|
|
499
|
+
json=framework_input,
|
|
500
|
+
headers=headers,
|
|
501
|
+
)
|
|
502
|
+
dur_ms = int((time.monotonic() - start) * 1000)
|
|
503
|
+
|
|
504
|
+
# Explicitly surface 4xx without retry.
|
|
505
|
+
if 400 <= resp.status_code < 500:
|
|
506
|
+
text = (await resp.aread()).decode(errors="ignore")
|
|
507
|
+
raise ValueError(
|
|
508
|
+
f"n8n webhook returned {resp.status_code} "
|
|
509
|
+
f"(req_id={req_id}, {dur_ms}ms): {text[:512]}"
|
|
510
|
+
)
|
|
511
|
+
|
|
512
|
+
# For 5xx, httpx will raise in raise_for_status().
|
|
513
|
+
resp.raise_for_status()
|
|
514
|
+
return resp.json()
|
|
515
|
+
|
|
516
|
+
except HTTPStatusError as e:
|
|
517
|
+
# Only 5xx should reach here (4xx is handled above).
|
|
518
|
+
if attempt < self.max_retries:
|
|
519
|
+
await asyncio.sleep(self.backoff * (2**attempt))
|
|
520
|
+
continue
|
|
521
|
+
raise RuntimeError(
|
|
522
|
+
f"n8n upstream 5xx after retries (req_id={req_id}): {e}"
|
|
523
|
+
) from e
|
|
524
|
+
|
|
525
|
+
except (ConnectError, ReadTimeout) as e:
|
|
526
|
+
if attempt < self.max_retries:
|
|
527
|
+
await asyncio.sleep(self.backoff * (2**attempt))
|
|
528
|
+
continue
|
|
529
|
+
raise RuntimeError(
|
|
530
|
+
f"n8n upstream unavailable/timeout after retries (req_id={req_id}): {e}"
|
|
531
|
+
) from e
|
|
532
|
+
|
|
533
|
+
# Should never reach here, but keeps type-checkers happy.
|
|
534
|
+
raise RuntimeError("Unexpected error in call_framework retry loop.")
|
|
535
|
+
|
|
536
|
+
# ---------- Output mapping ----------
|
|
537
|
+
|
|
538
|
+
async def from_framework(
|
|
539
|
+
self, framework_output: Dict[str, Any] | list, params: MessageSendParams
|
|
540
|
+
) -> Message | Task:
|
|
541
|
+
"""
|
|
542
|
+
Convert n8n webhook response to A2A Message.
|
|
543
|
+
|
|
544
|
+
Handles both n8n response formats:
|
|
545
|
+
- Single object: {"output": "..."} (first entry only)
|
|
546
|
+
- Array of objects: [{"output": "..."}, ...] (all entries)
|
|
547
|
+
|
|
548
|
+
Args:
|
|
549
|
+
framework_output: JSON response from n8n (dict or list).
|
|
550
|
+
params: Original A2A parameters.
|
|
551
|
+
|
|
552
|
+
Returns:
|
|
553
|
+
A2A Message with the n8n response text.
|
|
554
|
+
"""
|
|
555
|
+
# Handle array format (all entries from last node)
|
|
556
|
+
if isinstance(framework_output, list):
|
|
557
|
+
if len(framework_output) == 0:
|
|
558
|
+
response_text = ""
|
|
559
|
+
elif len(framework_output) == 1:
|
|
560
|
+
# Single item in array - extract it
|
|
561
|
+
response_text = self._extract_text_from_item(framework_output[0])
|
|
562
|
+
else:
|
|
563
|
+
# Multiple items - combine all outputs
|
|
564
|
+
texts = []
|
|
565
|
+
for item in framework_output:
|
|
566
|
+
if isinstance(item, dict):
|
|
567
|
+
text = self._extract_text_from_item(item)
|
|
568
|
+
if text:
|
|
569
|
+
texts.append(text)
|
|
570
|
+
response_text = "\n".join(texts) if texts else json.dumps(framework_output, indent=2)
|
|
571
|
+
elif isinstance(framework_output, dict):
|
|
572
|
+
# Handle single object format (first entry only)
|
|
573
|
+
response_text = self._extract_text_from_item(framework_output)
|
|
574
|
+
else:
|
|
575
|
+
# Fallback for unexpected types
|
|
576
|
+
response_text = str(framework_output)
|
|
577
|
+
|
|
578
|
+
# Preserve context_id from the request for multi-turn conversation tracking
|
|
579
|
+
context_id = None
|
|
580
|
+
if hasattr(params, "message") and params.message:
|
|
581
|
+
context_id = getattr(params.message, "context_id", None)
|
|
582
|
+
|
|
583
|
+
return Message(
|
|
584
|
+
role=Role.agent,
|
|
585
|
+
message_id=str(uuid.uuid4()),
|
|
586
|
+
context_id=context_id,
|
|
587
|
+
parts=[Part(root=TextPart(text=response_text))],
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
def _extract_text_from_item(self, item: Dict[str, Any]) -> str:
|
|
591
|
+
"""
|
|
592
|
+
Extract text content from a single n8n output item.
|
|
593
|
+
|
|
594
|
+
Checks common field names in order of priority.
|
|
595
|
+
|
|
596
|
+
Args:
|
|
597
|
+
item: A dictionary from n8n workflow output.
|
|
598
|
+
|
|
599
|
+
Returns:
|
|
600
|
+
Extracted text string.
|
|
601
|
+
"""
|
|
602
|
+
if not isinstance(item, dict):
|
|
603
|
+
return str(item)
|
|
604
|
+
if "output" in item:
|
|
605
|
+
return str(item["output"])
|
|
606
|
+
elif "result" in item:
|
|
607
|
+
return str(item["result"])
|
|
608
|
+
elif "message" in item:
|
|
609
|
+
return str(item["message"])
|
|
610
|
+
elif "text" in item:
|
|
611
|
+
return str(item["text"])
|
|
612
|
+
elif "response" in item:
|
|
613
|
+
return str(item["response"])
|
|
614
|
+
elif "content" in item:
|
|
615
|
+
return str(item["content"])
|
|
616
|
+
else:
|
|
617
|
+
# Fallback: serialize entire item as JSON
|
|
618
|
+
return json.dumps(item, indent=2)
|
|
619
|
+
|
|
620
|
+
# ---------- Async Task Support ----------
|
|
621
|
+
|
|
622
|
+
def supports_async_tasks(self) -> bool:
|
|
623
|
+
"""Check if this adapter supports async task execution."""
|
|
624
|
+
return self.async_mode
|
|
625
|
+
|
|
626
|
+
async def get_task(self, task_id: str) -> Task | None:
|
|
627
|
+
"""
|
|
628
|
+
Get the current status of a task by ID.
|
|
629
|
+
|
|
630
|
+
This method is used for polling task status in async task execution mode.
|
|
631
|
+
|
|
632
|
+
Args:
|
|
633
|
+
task_id: The ID of the task to retrieve
|
|
634
|
+
|
|
635
|
+
Returns:
|
|
636
|
+
The Task object with current status, or None if not found
|
|
637
|
+
|
|
638
|
+
Raises:
|
|
639
|
+
RuntimeError: If async mode is not enabled
|
|
640
|
+
"""
|
|
641
|
+
if not self.async_mode:
|
|
642
|
+
raise RuntimeError(
|
|
643
|
+
"get_task() is only available in async mode. "
|
|
644
|
+
"Initialize adapter with async_mode=True"
|
|
645
|
+
)
|
|
646
|
+
|
|
647
|
+
task = await self.task_store.get(task_id)
|
|
648
|
+
if task:
|
|
649
|
+
logger.debug("Retrieved task %s with state=%s", task_id, task.status.state)
|
|
650
|
+
else:
|
|
651
|
+
logger.debug("Task %s not found", task_id)
|
|
652
|
+
return task
|
|
653
|
+
|
|
654
|
+
async def delete_task(self, task_id: str) -> bool:
|
|
655
|
+
"""
|
|
656
|
+
Delete a task from the task store.
|
|
657
|
+
|
|
658
|
+
This can be used to clean up completed/failed tasks to prevent memory leaks
|
|
659
|
+
when using InMemoryTaskStore. Only tasks in terminal states (completed,
|
|
660
|
+
failed, canceled) should be deleted.
|
|
661
|
+
|
|
662
|
+
Args:
|
|
663
|
+
task_id: The ID of the task to delete
|
|
664
|
+
|
|
665
|
+
Returns:
|
|
666
|
+
True if the task was deleted, False if not found or still running
|
|
667
|
+
|
|
668
|
+
Raises:
|
|
669
|
+
RuntimeError: If async mode is not enabled
|
|
670
|
+
ValueError: If the task is still running (not in a terminal state)
|
|
671
|
+
"""
|
|
672
|
+
if not self.async_mode:
|
|
673
|
+
raise RuntimeError(
|
|
674
|
+
"delete_task() is only available in async mode. "
|
|
675
|
+
"Initialize adapter with async_mode=True"
|
|
676
|
+
)
|
|
677
|
+
|
|
678
|
+
task = await self.task_store.get(task_id)
|
|
679
|
+
if not task:
|
|
680
|
+
return False
|
|
681
|
+
|
|
682
|
+
# Only allow deletion of tasks in terminal states
|
|
683
|
+
terminal_states = {TaskState.completed, TaskState.failed, TaskState.canceled}
|
|
684
|
+
if task.status.state not in terminal_states:
|
|
685
|
+
raise ValueError(
|
|
686
|
+
f"Cannot delete task {task_id} with state={task.status.state}. "
|
|
687
|
+
f"Only tasks in terminal states ({', '.join(s.value for s in terminal_states)}) can be deleted."
|
|
688
|
+
)
|
|
689
|
+
|
|
690
|
+
await self.task_store.delete(task_id)
|
|
691
|
+
logger.debug("Deleted task %s", task_id)
|
|
692
|
+
return True
|
|
693
|
+
|
|
694
|
+
async def cancel_task(self, task_id: str) -> Task | None:
|
|
695
|
+
"""
|
|
696
|
+
Attempt to cancel a running task.
|
|
697
|
+
|
|
698
|
+
Note: This only cancels the background asyncio task. If the HTTP request
|
|
699
|
+
to n8n is already in flight, it cannot be cancelled on the n8n side.
|
|
700
|
+
|
|
701
|
+
Args:
|
|
702
|
+
task_id: The ID of the task to cancel
|
|
703
|
+
|
|
704
|
+
Returns:
|
|
705
|
+
The updated Task object with state="canceled", or None if not found
|
|
706
|
+
"""
|
|
707
|
+
if not self.async_mode:
|
|
708
|
+
raise RuntimeError(
|
|
709
|
+
"cancel_task() is only available in async mode. "
|
|
710
|
+
"Initialize adapter with async_mode=True"
|
|
711
|
+
)
|
|
712
|
+
|
|
713
|
+
# Mark task as cancelled to prevent race conditions
|
|
714
|
+
self._cancelled_tasks.add(task_id)
|
|
715
|
+
|
|
716
|
+
# Cancel the background task if still running and wait for it
|
|
717
|
+
bg_task = self._background_tasks.get(task_id)
|
|
718
|
+
if bg_task and not bg_task.done():
|
|
719
|
+
bg_task.cancel()
|
|
720
|
+
logger.debug("Cancelling background task for %s", task_id)
|
|
721
|
+
# Wait for the task to actually finish
|
|
722
|
+
try:
|
|
723
|
+
await bg_task
|
|
724
|
+
except asyncio.CancelledError:
|
|
725
|
+
pass # Expected when task is cancelled
|
|
726
|
+
except Exception:
|
|
727
|
+
pass # Task may have failed, we're cancelling anyway
|
|
728
|
+
|
|
729
|
+
# Update task state to canceled
|
|
730
|
+
task = await self.task_store.get(task_id)
|
|
731
|
+
if task:
|
|
732
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
733
|
+
canceled_task = Task(
|
|
734
|
+
id=task_id,
|
|
735
|
+
context_id=task.context_id,
|
|
736
|
+
status=TaskStatus(
|
|
737
|
+
state=TaskState.canceled,
|
|
738
|
+
timestamp=now,
|
|
739
|
+
),
|
|
740
|
+
history=task.history,
|
|
741
|
+
)
|
|
742
|
+
await self.task_store.save(canceled_task)
|
|
743
|
+
logger.debug("Task %s marked as canceled", task_id)
|
|
744
|
+
return canceled_task
|
|
745
|
+
|
|
746
|
+
return None
|
|
747
|
+
|
|
748
|
+
# ---------- Lifecycle ----------
|
|
749
|
+
|
|
750
|
+
async def close(self) -> None:
|
|
751
|
+
"""Close the HTTP client and cancel pending background tasks."""
|
|
752
|
+
# Mark all tasks as cancelled to prevent state updates
|
|
753
|
+
for task_id in self._background_tasks:
|
|
754
|
+
self._cancelled_tasks.add(task_id)
|
|
755
|
+
|
|
756
|
+
# Cancel all pending background tasks
|
|
757
|
+
tasks_to_cancel = []
|
|
758
|
+
for task_id, bg_task in list(self._background_tasks.items()):
|
|
759
|
+
if not bg_task.done():
|
|
760
|
+
bg_task.cancel()
|
|
761
|
+
tasks_to_cancel.append(bg_task)
|
|
762
|
+
logger.debug("Cancelling background task %s during close", task_id)
|
|
763
|
+
|
|
764
|
+
# Wait for all cancelled tasks to complete
|
|
765
|
+
if tasks_to_cancel:
|
|
766
|
+
await asyncio.gather(*tasks_to_cancel, return_exceptions=True)
|
|
767
|
+
|
|
768
|
+
self._background_tasks.clear()
|
|
769
|
+
self._cancelled_tasks.clear()
|
|
770
|
+
|
|
771
|
+
if self._client is not None:
|
|
772
|
+
await self._client.aclose()
|
|
773
|
+
self._client = None
|
|
774
|
+
|
|
775
|
+
async def __aenter__(self):
|
|
776
|
+
"""Async context manager entry."""
|
|
777
|
+
return self
|
|
778
|
+
|
|
779
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
780
|
+
"""Async context manager exit."""
|
|
781
|
+
await self.close()
|
|
782
|
+
|
|
783
|
+
def supports_streaming(self) -> bool:
|
|
784
|
+
"""This adapter does not support streaming responses."""
|
|
785
|
+
return False
|
|
786
|
+
|
|
787
|
+
|