a2a-adapter 0.1.3__tar.gz → 0.1.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/PKG-INFO +79 -26
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/README.md +78 -25
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter/__init__.py +1 -1
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter/integrations/__init__.py +5 -1
- a2a_adapter-0.1.4/a2a_adapter/integrations/callable.py +286 -0
- a2a_adapter-0.1.4/a2a_adapter/integrations/crewai.py +585 -0
- a2a_adapter-0.1.4/a2a_adapter/integrations/langchain.py +329 -0
- a2a_adapter-0.1.4/a2a_adapter/integrations/langgraph.py +756 -0
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter/loader.py +71 -28
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter.egg-info/PKG-INFO +79 -26
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter.egg-info/SOURCES.txt +1 -0
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/pyproject.toml +5 -1
- a2a_adapter-0.1.3/a2a_adapter/integrations/callable.py +0 -172
- a2a_adapter-0.1.3/a2a_adapter/integrations/crewai.py +0 -142
- a2a_adapter-0.1.3/a2a_adapter/integrations/langchain.py +0 -171
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/LICENSE +0 -0
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter/adapter.py +0 -0
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter/client.py +0 -0
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter/integrations/n8n.py +0 -0
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter.egg-info/dependency_links.txt +0 -0
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter.egg-info/requires.txt +0 -0
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/a2a_adapter.egg-info/top_level.txt +0 -0
- {a2a_adapter-0.1.3 → a2a_adapter-0.1.4}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: a2a-adapter
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.4
|
|
4
4
|
Summary: A2A Protocol Adapter SDK for integrating various agent frameworks
|
|
5
5
|
Author-email: HYBRO AI <info@hybro.ai>
|
|
6
6
|
License: Apache-2.0
|
|
@@ -52,20 +52,24 @@ Dynamic: license-file
|
|
|
52
52
|
|
|
53
53
|
**🚀 Open Source A2A Protocol Adapter SDK - Make Any Agent Framework A2A-Compatible in 3 Lines**
|
|
54
54
|
|
|
55
|
-
A Python SDK that enables seamless integration of various agent frameworks (n8n, CrewAI, LangChain, etc.) with the [A2A (Agent-to-Agent) Protocol](https://github.com/a2aproject/A2A). Build interoperable AI agent systems that can communicate across different platforms and frameworks.
|
|
55
|
+
A Python SDK that enables seamless integration of various agent frameworks (n8n, LangGraph, CrewAI, LangChain, etc.) with the [A2A (Agent-to-Agent) Protocol](https://github.com/a2aproject/A2A). Build interoperable AI agent systems that can communicate across different platforms and frameworks.
|
|
56
56
|
|
|
57
57
|
**✨ Key Benefits:**
|
|
58
58
|
|
|
59
59
|
- 🔌 **3-line setup** - Expose any agent as A2A-compliant
|
|
60
|
-
- 🌐 **Framework agnostic** - Works with n8n, CrewAI, LangChain, and more
|
|
60
|
+
- 🌐 **Framework agnostic** - Works with n8n, LangGraph, CrewAI, LangChain, and more
|
|
61
61
|
- 🌊 **Streaming support** - Built-in streaming for real-time responses
|
|
62
62
|
- 🎯 **Production ready** - Type-safe, well-tested, and actively maintained
|
|
63
63
|
|
|
64
|
+
**▶️ Demo: n8n → A2A Agent**
|
|
65
|
+
|
|
66
|
+
[](https://youtu.be/rHWi7tLQ444)
|
|
67
|
+
|
|
64
68
|
## Features
|
|
65
69
|
|
|
66
|
-
✨ **Framework Agnostic**: Integrate n8n workflows, CrewAI crews, LangChain chains, and more
|
|
70
|
+
✨ **Framework Agnostic**: Integrate n8n workflows, LangGraph workflows, CrewAI crews, LangChain chains, and more
|
|
67
71
|
🔌 **Simple API**: 3-line setup to expose any agent as A2A-compliant
|
|
68
|
-
🌊 **Streaming Support**: Built-in streaming for LangChain and custom adapters
|
|
72
|
+
🌊 **Streaming Support**: Built-in streaming for LangGraph, LangChain, and custom adapters
|
|
69
73
|
🎯 **Type Safe**: Leverages official A2A SDK types
|
|
70
74
|
🔧 **Extensible**: Easy to add custom adapters for new frameworks
|
|
71
75
|
📦 **Minimal Dependencies**: Optional dependencies per framework
|
|
@@ -81,9 +85,10 @@ A Python SDK that enables seamless integration of various agent frameworks (n8n,
|
|
|
81
85
|
┌─────────────────┐
|
|
82
86
|
│ A2A Adapter │ (This SDK)
|
|
83
87
|
│ - N8n │
|
|
88
|
+
│ - LangGraph │
|
|
84
89
|
│ - CrewAI │
|
|
85
90
|
│ - LangChain │
|
|
86
|
-
│ -
|
|
91
|
+
│ - Callable │
|
|
87
92
|
└────────┬────────┘
|
|
88
93
|
│
|
|
89
94
|
▼
|
|
@@ -198,6 +203,17 @@ adapter = await load_a2a_agent({
|
|
|
198
203
|
})
|
|
199
204
|
```
|
|
200
205
|
|
|
206
|
+
### LangGraph Workflow → A2A Agent (with Streaming)
|
|
207
|
+
|
|
208
|
+
```python
|
|
209
|
+
adapter = await load_a2a_agent({
|
|
210
|
+
"adapter": "langgraph",
|
|
211
|
+
"graph": your_compiled_graph,
|
|
212
|
+
"input_key": "messages",
|
|
213
|
+
"output_key": "output"
|
|
214
|
+
})
|
|
215
|
+
```
|
|
216
|
+
|
|
201
217
|
### Custom Function → A2A Agent
|
|
202
218
|
|
|
203
219
|
```python
|
|
@@ -271,9 +287,35 @@ class StreamingAdapter(BaseAgentAdapter):
|
|
|
271
287
|
return True
|
|
272
288
|
```
|
|
273
289
|
|
|
274
|
-
###
|
|
290
|
+
### LangGraph Workflow as A2A Server
|
|
275
291
|
|
|
276
|
-
|
|
292
|
+
Expose a LangGraph workflow as an A2A server:
|
|
293
|
+
|
|
294
|
+
```python
|
|
295
|
+
from langgraph.graph import StateGraph, END
|
|
296
|
+
|
|
297
|
+
# Build your workflow
|
|
298
|
+
builder = StateGraph(YourState)
|
|
299
|
+
builder.add_node("process", process_node)
|
|
300
|
+
builder.set_entry_point("process")
|
|
301
|
+
builder.add_edge("process", END)
|
|
302
|
+
graph = builder.compile()
|
|
303
|
+
|
|
304
|
+
# Expose as A2A agent
|
|
305
|
+
adapter = await load_a2a_agent({
|
|
306
|
+
"adapter": "langgraph",
|
|
307
|
+
"graph": graph,
|
|
308
|
+
"input_key": "messages",
|
|
309
|
+
"output_key": "output"
|
|
310
|
+
})
|
|
311
|
+
serve_agent(agent_card=card, adapter=adapter, port=9002)
|
|
312
|
+
```
|
|
313
|
+
|
|
314
|
+
See [examples/07_langgraph_server.py](https://github.com/hybroai/a2a-adapter/blob/main/examples/07_langgraph_server.py) for complete example.
|
|
315
|
+
|
|
316
|
+
### Using A2A Agents from LangGraph
|
|
317
|
+
|
|
318
|
+
Call A2A agents from within a LangGraph workflow:
|
|
277
319
|
|
|
278
320
|
```python
|
|
279
321
|
from langgraph.graph import StateGraph
|
|
@@ -332,6 +374,19 @@ See [examples/06_langgraph_single_agent.py](https://github.com/hybroai/a2a-adapt
|
|
|
332
374
|
}
|
|
333
375
|
```
|
|
334
376
|
|
|
377
|
+
### LangGraph Adapter
|
|
378
|
+
|
|
379
|
+
```python
|
|
380
|
+
{
|
|
381
|
+
"adapter": "langgraph",
|
|
382
|
+
"graph": compiled_graph, # Required: CompiledGraph from StateGraph.compile()
|
|
383
|
+
"input_key": "messages", # Optional, default: "messages" (for chat) or "input"
|
|
384
|
+
"output_key": None, # Optional, extracts specific key from final state
|
|
385
|
+
"async_mode": False, # Optional, enables async task execution
|
|
386
|
+
"async_timeout": 300 # Optional, timeout for async mode (default: 300s)
|
|
387
|
+
}
|
|
388
|
+
```
|
|
389
|
+
|
|
335
390
|
### Callable Adapter
|
|
336
391
|
|
|
337
392
|
```python
|
|
@@ -351,7 +406,8 @@ The `examples/` directory contains complete working examples:
|
|
|
351
406
|
- **03_single_langchain_agent.py** - LangChain streaming agent
|
|
352
407
|
- **04_single_agent_client.py** - A2A client for testing
|
|
353
408
|
- **05_custom_adapter.py** - Custom adapter implementations
|
|
354
|
-
- **06_langgraph_single_agent.py** -
|
|
409
|
+
- **06_langgraph_single_agent.py** - Calling A2A agents from LangGraph
|
|
410
|
+
- **07_langgraph_server.py** - LangGraph workflow as A2A server
|
|
355
411
|
|
|
356
412
|
Run any example:
|
|
357
413
|
|
|
@@ -457,11 +513,13 @@ Check if this adapter supports streaming responses.
|
|
|
457
513
|
|
|
458
514
|
## Framework Support
|
|
459
515
|
|
|
460
|
-
| Framework | Adapter
|
|
461
|
-
| ------------- |
|
|
462
|
-
| **n8n** | `N8nAgentAdapter`
|
|
463
|
-
| **
|
|
464
|
-
| **
|
|
516
|
+
| Framework | Adapter | Non-Streaming | Streaming | Async Tasks | Status |
|
|
517
|
+
| ------------- | ------------------------ | ------------- | --------- | ----------- | --------- |
|
|
518
|
+
| **n8n** | `N8nAgentAdapter` | ✅ | ❌ | ✅ | ✅ Stable |
|
|
519
|
+
| **LangGraph** | `LangGraphAgentAdapter` | ✅ | ✅ | ✅ | ✅ Stable |
|
|
520
|
+
| **CrewAI** | `CrewAIAgentAdapter` | ✅ | ❌ | ✅ | ✅ Stable |
|
|
521
|
+
| **LangChain** | `LangChainAgentAdapter` | ✅ | ✅ | ❌ | ✅ Stable |
|
|
522
|
+
| **Callable** | `CallableAgentAdapter` | ✅ | ✅ | ❌ | ✅ Stable |
|
|
465
523
|
|
|
466
524
|
## 🤝 Contributing
|
|
467
525
|
|
|
@@ -488,12 +546,13 @@ We welcome contributions from the community! Whether you're fixing bugs, adding
|
|
|
488
546
|
## Roadmap
|
|
489
547
|
|
|
490
548
|
- [x] Core adapter abstraction
|
|
491
|
-
- [x] N8n adapter
|
|
492
|
-
- [
|
|
493
|
-
- [
|
|
494
|
-
- [
|
|
495
|
-
- [
|
|
496
|
-
- [
|
|
549
|
+
- [x] N8n adapter (with async task support)
|
|
550
|
+
- [x] LangGraph adapter (with streaming and async tasks)
|
|
551
|
+
- [x] CrewAI adapter (with async task support)
|
|
552
|
+
- [x] LangChain adapter (with streaming)
|
|
553
|
+
- [x] Callable adapter (with streaming)
|
|
554
|
+
- [x] Comprehensive examples
|
|
555
|
+
- [x] Task support (async execution pattern)
|
|
497
556
|
- [ ] Artifact support (file uploads/downloads)
|
|
498
557
|
- [ ] AutoGen adapter
|
|
499
558
|
- [ ] Semantic Kernel adapter
|
|
@@ -528,12 +587,6 @@ We welcome contributions from the community! Whether you're fixing bugs, adding
|
|
|
528
587
|
|
|
529
588
|
Apache-2.0 License - see [LICENSE](https://github.com/hybroai/a2a-adapter/blob/main/LICENSE) file for details.
|
|
530
589
|
|
|
531
|
-
## Credits
|
|
532
|
-
|
|
533
|
-
Built with ❤️ by [HYBRO AI](https://hybro.ai)
|
|
534
|
-
|
|
535
|
-
Powered by the [A2A Protocol](https://github.com/a2aproject/A2A)
|
|
536
|
-
|
|
537
590
|
## 💬 Community & Support
|
|
538
591
|
|
|
539
592
|
- 📚 **[Full Documentation](https://github.com/hybroai/a2a-adapter/blob/main/README.md)** - Complete API reference and guides
|
|
@@ -7,20 +7,24 @@
|
|
|
7
7
|
|
|
8
8
|
**🚀 Open Source A2A Protocol Adapter SDK - Make Any Agent Framework A2A-Compatible in 3 Lines**
|
|
9
9
|
|
|
10
|
-
A Python SDK that enables seamless integration of various agent frameworks (n8n, CrewAI, LangChain, etc.) with the [A2A (Agent-to-Agent) Protocol](https://github.com/a2aproject/A2A). Build interoperable AI agent systems that can communicate across different platforms and frameworks.
|
|
10
|
+
A Python SDK that enables seamless integration of various agent frameworks (n8n, LangGraph, CrewAI, LangChain, etc.) with the [A2A (Agent-to-Agent) Protocol](https://github.com/a2aproject/A2A). Build interoperable AI agent systems that can communicate across different platforms and frameworks.
|
|
11
11
|
|
|
12
12
|
**✨ Key Benefits:**
|
|
13
13
|
|
|
14
14
|
- 🔌 **3-line setup** - Expose any agent as A2A-compliant
|
|
15
|
-
- 🌐 **Framework agnostic** - Works with n8n, CrewAI, LangChain, and more
|
|
15
|
+
- 🌐 **Framework agnostic** - Works with n8n, LangGraph, CrewAI, LangChain, and more
|
|
16
16
|
- 🌊 **Streaming support** - Built-in streaming for real-time responses
|
|
17
17
|
- 🎯 **Production ready** - Type-safe, well-tested, and actively maintained
|
|
18
18
|
|
|
19
|
+
**▶️ Demo: n8n → A2A Agent**
|
|
20
|
+
|
|
21
|
+
[](https://youtu.be/rHWi7tLQ444)
|
|
22
|
+
|
|
19
23
|
## Features
|
|
20
24
|
|
|
21
|
-
✨ **Framework Agnostic**: Integrate n8n workflows, CrewAI crews, LangChain chains, and more
|
|
25
|
+
✨ **Framework Agnostic**: Integrate n8n workflows, LangGraph workflows, CrewAI crews, LangChain chains, and more
|
|
22
26
|
🔌 **Simple API**: 3-line setup to expose any agent as A2A-compliant
|
|
23
|
-
🌊 **Streaming Support**: Built-in streaming for LangChain and custom adapters
|
|
27
|
+
🌊 **Streaming Support**: Built-in streaming for LangGraph, LangChain, and custom adapters
|
|
24
28
|
🎯 **Type Safe**: Leverages official A2A SDK types
|
|
25
29
|
🔧 **Extensible**: Easy to add custom adapters for new frameworks
|
|
26
30
|
📦 **Minimal Dependencies**: Optional dependencies per framework
|
|
@@ -36,9 +40,10 @@ A Python SDK that enables seamless integration of various agent frameworks (n8n,
|
|
|
36
40
|
┌─────────────────┐
|
|
37
41
|
│ A2A Adapter │ (This SDK)
|
|
38
42
|
│ - N8n │
|
|
43
|
+
│ - LangGraph │
|
|
39
44
|
│ - CrewAI │
|
|
40
45
|
│ - LangChain │
|
|
41
|
-
│ -
|
|
46
|
+
│ - Callable │
|
|
42
47
|
└────────┬────────┘
|
|
43
48
|
│
|
|
44
49
|
▼
|
|
@@ -153,6 +158,17 @@ adapter = await load_a2a_agent({
|
|
|
153
158
|
})
|
|
154
159
|
```
|
|
155
160
|
|
|
161
|
+
### LangGraph Workflow → A2A Agent (with Streaming)
|
|
162
|
+
|
|
163
|
+
```python
|
|
164
|
+
adapter = await load_a2a_agent({
|
|
165
|
+
"adapter": "langgraph",
|
|
166
|
+
"graph": your_compiled_graph,
|
|
167
|
+
"input_key": "messages",
|
|
168
|
+
"output_key": "output"
|
|
169
|
+
})
|
|
170
|
+
```
|
|
171
|
+
|
|
156
172
|
### Custom Function → A2A Agent
|
|
157
173
|
|
|
158
174
|
```python
|
|
@@ -226,9 +242,35 @@ class StreamingAdapter(BaseAgentAdapter):
|
|
|
226
242
|
return True
|
|
227
243
|
```
|
|
228
244
|
|
|
229
|
-
###
|
|
245
|
+
### LangGraph Workflow as A2A Server
|
|
230
246
|
|
|
231
|
-
|
|
247
|
+
Expose a LangGraph workflow as an A2A server:
|
|
248
|
+
|
|
249
|
+
```python
|
|
250
|
+
from langgraph.graph import StateGraph, END
|
|
251
|
+
|
|
252
|
+
# Build your workflow
|
|
253
|
+
builder = StateGraph(YourState)
|
|
254
|
+
builder.add_node("process", process_node)
|
|
255
|
+
builder.set_entry_point("process")
|
|
256
|
+
builder.add_edge("process", END)
|
|
257
|
+
graph = builder.compile()
|
|
258
|
+
|
|
259
|
+
# Expose as A2A agent
|
|
260
|
+
adapter = await load_a2a_agent({
|
|
261
|
+
"adapter": "langgraph",
|
|
262
|
+
"graph": graph,
|
|
263
|
+
"input_key": "messages",
|
|
264
|
+
"output_key": "output"
|
|
265
|
+
})
|
|
266
|
+
serve_agent(agent_card=card, adapter=adapter, port=9002)
|
|
267
|
+
```
|
|
268
|
+
|
|
269
|
+
See [examples/07_langgraph_server.py](https://github.com/hybroai/a2a-adapter/blob/main/examples/07_langgraph_server.py) for complete example.
|
|
270
|
+
|
|
271
|
+
### Using A2A Agents from LangGraph
|
|
272
|
+
|
|
273
|
+
Call A2A agents from within a LangGraph workflow:
|
|
232
274
|
|
|
233
275
|
```python
|
|
234
276
|
from langgraph.graph import StateGraph
|
|
@@ -287,6 +329,19 @@ See [examples/06_langgraph_single_agent.py](https://github.com/hybroai/a2a-adapt
|
|
|
287
329
|
}
|
|
288
330
|
```
|
|
289
331
|
|
|
332
|
+
### LangGraph Adapter
|
|
333
|
+
|
|
334
|
+
```python
|
|
335
|
+
{
|
|
336
|
+
"adapter": "langgraph",
|
|
337
|
+
"graph": compiled_graph, # Required: CompiledGraph from StateGraph.compile()
|
|
338
|
+
"input_key": "messages", # Optional, default: "messages" (for chat) or "input"
|
|
339
|
+
"output_key": None, # Optional, extracts specific key from final state
|
|
340
|
+
"async_mode": False, # Optional, enables async task execution
|
|
341
|
+
"async_timeout": 300 # Optional, timeout for async mode (default: 300s)
|
|
342
|
+
}
|
|
343
|
+
```
|
|
344
|
+
|
|
290
345
|
### Callable Adapter
|
|
291
346
|
|
|
292
347
|
```python
|
|
@@ -306,7 +361,8 @@ The `examples/` directory contains complete working examples:
|
|
|
306
361
|
- **03_single_langchain_agent.py** - LangChain streaming agent
|
|
307
362
|
- **04_single_agent_client.py** - A2A client for testing
|
|
308
363
|
- **05_custom_adapter.py** - Custom adapter implementations
|
|
309
|
-
- **06_langgraph_single_agent.py** -
|
|
364
|
+
- **06_langgraph_single_agent.py** - Calling A2A agents from LangGraph
|
|
365
|
+
- **07_langgraph_server.py** - LangGraph workflow as A2A server
|
|
310
366
|
|
|
311
367
|
Run any example:
|
|
312
368
|
|
|
@@ -412,11 +468,13 @@ Check if this adapter supports streaming responses.
|
|
|
412
468
|
|
|
413
469
|
## Framework Support
|
|
414
470
|
|
|
415
|
-
| Framework | Adapter
|
|
416
|
-
| ------------- |
|
|
417
|
-
| **n8n** | `N8nAgentAdapter`
|
|
418
|
-
| **
|
|
419
|
-
| **
|
|
471
|
+
| Framework | Adapter | Non-Streaming | Streaming | Async Tasks | Status |
|
|
472
|
+
| ------------- | ------------------------ | ------------- | --------- | ----------- | --------- |
|
|
473
|
+
| **n8n** | `N8nAgentAdapter` | ✅ | ❌ | ✅ | ✅ Stable |
|
|
474
|
+
| **LangGraph** | `LangGraphAgentAdapter` | ✅ | ✅ | ✅ | ✅ Stable |
|
|
475
|
+
| **CrewAI** | `CrewAIAgentAdapter` | ✅ | ❌ | ✅ | ✅ Stable |
|
|
476
|
+
| **LangChain** | `LangChainAgentAdapter` | ✅ | ✅ | ❌ | ✅ Stable |
|
|
477
|
+
| **Callable** | `CallableAgentAdapter` | ✅ | ✅ | ❌ | ✅ Stable |
|
|
420
478
|
|
|
421
479
|
## 🤝 Contributing
|
|
422
480
|
|
|
@@ -443,12 +501,13 @@ We welcome contributions from the community! Whether you're fixing bugs, adding
|
|
|
443
501
|
## Roadmap
|
|
444
502
|
|
|
445
503
|
- [x] Core adapter abstraction
|
|
446
|
-
- [x] N8n adapter
|
|
447
|
-
- [
|
|
448
|
-
- [
|
|
449
|
-
- [
|
|
450
|
-
- [
|
|
451
|
-
- [
|
|
504
|
+
- [x] N8n adapter (with async task support)
|
|
505
|
+
- [x] LangGraph adapter (with streaming and async tasks)
|
|
506
|
+
- [x] CrewAI adapter (with async task support)
|
|
507
|
+
- [x] LangChain adapter (with streaming)
|
|
508
|
+
- [x] Callable adapter (with streaming)
|
|
509
|
+
- [x] Comprehensive examples
|
|
510
|
+
- [x] Task support (async execution pattern)
|
|
452
511
|
- [ ] Artifact support (file uploads/downloads)
|
|
453
512
|
- [ ] AutoGen adapter
|
|
454
513
|
- [ ] Semantic Kernel adapter
|
|
@@ -483,12 +542,6 @@ We welcome contributions from the community! Whether you're fixing bugs, adding
|
|
|
483
542
|
|
|
484
543
|
Apache-2.0 License - see [LICENSE](https://github.com/hybroai/a2a-adapter/blob/main/LICENSE) file for details.
|
|
485
544
|
|
|
486
|
-
## Credits
|
|
487
|
-
|
|
488
|
-
Built with ❤️ by [HYBRO AI](https://hybro.ai)
|
|
489
|
-
|
|
490
|
-
Powered by the [A2A Protocol](https://github.com/a2aproject/A2A)
|
|
491
|
-
|
|
492
545
|
## 💬 Community & Support
|
|
493
546
|
|
|
494
547
|
- 📚 **[Full Documentation](https://github.com/hybroai/a2a-adapter/blob/main/README.md)** - Complete API reference and guides
|
|
@@ -5,6 +5,7 @@ This package contains concrete adapter implementations for various agent framewo
|
|
|
5
5
|
- n8n: HTTP webhook-based workflows
|
|
6
6
|
- CrewAI: Multi-agent collaboration framework
|
|
7
7
|
- LangChain: LLM application framework with LCEL support
|
|
8
|
+
- LangGraph: Stateful workflow orchestration framework
|
|
8
9
|
- Callable: Generic Python async function adapter
|
|
9
10
|
"""
|
|
10
11
|
|
|
@@ -12,6 +13,7 @@ __all__ = [
|
|
|
12
13
|
"N8nAgentAdapter",
|
|
13
14
|
"CrewAIAgentAdapter",
|
|
14
15
|
"LangChainAgentAdapter",
|
|
16
|
+
"LangGraphAgentAdapter",
|
|
15
17
|
"CallableAgentAdapter",
|
|
16
18
|
]
|
|
17
19
|
|
|
@@ -26,8 +28,10 @@ def __getattr__(name: str):
|
|
|
26
28
|
elif name == "LangChainAgentAdapter":
|
|
27
29
|
from .langchain import LangChainAgentAdapter
|
|
28
30
|
return LangChainAgentAdapter
|
|
31
|
+
elif name == "LangGraphAgentAdapter":
|
|
32
|
+
from .langgraph import LangGraphAgentAdapter
|
|
33
|
+
return LangGraphAgentAdapter
|
|
29
34
|
elif name == "CallableAgentAdapter":
|
|
30
35
|
from .callable import CallableAgentAdapter
|
|
31
36
|
return CallableAgentAdapter
|
|
32
37
|
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
33
|
-
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Generic callable adapter for A2A Protocol.
|
|
3
|
+
|
|
4
|
+
This adapter allows any async Python function to be exposed as an A2A-compliant
|
|
5
|
+
agent, providing maximum flexibility for custom implementations.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import uuid
|
|
11
|
+
from typing import Any, AsyncIterator, Callable, Dict
|
|
12
|
+
|
|
13
|
+
from a2a.types import (
|
|
14
|
+
Message,
|
|
15
|
+
MessageSendParams,
|
|
16
|
+
Task,
|
|
17
|
+
TextPart,
|
|
18
|
+
Role,
|
|
19
|
+
Part,
|
|
20
|
+
)
|
|
21
|
+
from ..adapter import BaseAgentAdapter
|
|
22
|
+
|
|
23
|
+
logger = logging.getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class CallableAgentAdapter(BaseAgentAdapter):
|
|
27
|
+
"""
|
|
28
|
+
Adapter for integrating custom async functions as A2A agents.
|
|
29
|
+
|
|
30
|
+
This adapter wraps any async callable (function, coroutine) and handles
|
|
31
|
+
the A2A protocol translation. The callable should accept a dictionary
|
|
32
|
+
input and return either a string or dictionary output.
|
|
33
|
+
|
|
34
|
+
For streaming support, the callable should be an async generator that
|
|
35
|
+
yields string chunks.
|
|
36
|
+
|
|
37
|
+
Example (non-streaming):
|
|
38
|
+
>>> async def my_agent(inputs: dict) -> str:
|
|
39
|
+
... message = inputs["message"]
|
|
40
|
+
... return f"Processed: {message}"
|
|
41
|
+
>>>
|
|
42
|
+
>>> adapter = CallableAgentAdapter(func=my_agent)
|
|
43
|
+
|
|
44
|
+
Example (streaming):
|
|
45
|
+
>>> async def my_streaming_agent(inputs: dict):
|
|
46
|
+
... message = inputs["message"]
|
|
47
|
+
... for word in message.split():
|
|
48
|
+
... yield word + " "
|
|
49
|
+
>>>
|
|
50
|
+
>>> adapter = CallableAgentAdapter(
|
|
51
|
+
... func=my_streaming_agent,
|
|
52
|
+
... supports_streaming=True
|
|
53
|
+
... )
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
def __init__(
|
|
57
|
+
self,
|
|
58
|
+
func: Callable,
|
|
59
|
+
supports_streaming: bool = False,
|
|
60
|
+
):
|
|
61
|
+
"""
|
|
62
|
+
Initialize the callable adapter.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
func: An async callable that processes the agent logic.
|
|
66
|
+
For non-streaming: Should accept Dict[str, Any] and return str or Dict.
|
|
67
|
+
For streaming: Should be an async generator yielding str chunks.
|
|
68
|
+
supports_streaming: Whether the function supports streaming (default: False)
|
|
69
|
+
"""
|
|
70
|
+
self.func = func
|
|
71
|
+
self._supports_streaming = supports_streaming
|
|
72
|
+
|
|
73
|
+
# ---------- Input mapping ----------
|
|
74
|
+
|
|
75
|
+
async def to_framework(self, params: MessageSendParams) -> Dict[str, Any]:
|
|
76
|
+
"""
|
|
77
|
+
Convert A2A message parameters to a dictionary for the callable.
|
|
78
|
+
|
|
79
|
+
Extracts the user's message and relevant metadata.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
params: A2A message parameters
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
Dictionary with input data for the callable
|
|
86
|
+
"""
|
|
87
|
+
user_message = ""
|
|
88
|
+
|
|
89
|
+
# Extract message from A2A params (new format with message.parts)
|
|
90
|
+
if hasattr(params, "message") and params.message:
|
|
91
|
+
msg = params.message
|
|
92
|
+
if hasattr(msg, "parts") and msg.parts:
|
|
93
|
+
text_parts = []
|
|
94
|
+
for part in msg.parts:
|
|
95
|
+
# Handle Part(root=TextPart(...)) structure
|
|
96
|
+
if hasattr(part, "root") and hasattr(part.root, "text"):
|
|
97
|
+
text_parts.append(part.root.text)
|
|
98
|
+
# Handle direct TextPart
|
|
99
|
+
elif hasattr(part, "text"):
|
|
100
|
+
text_parts.append(part.text)
|
|
101
|
+
user_message = self._join_text_parts(text_parts)
|
|
102
|
+
|
|
103
|
+
# Legacy support for messages array (deprecated)
|
|
104
|
+
elif getattr(params, "messages", None):
|
|
105
|
+
last = params.messages[-1]
|
|
106
|
+
content = getattr(last, "content", "")
|
|
107
|
+
if isinstance(content, str):
|
|
108
|
+
user_message = content.strip()
|
|
109
|
+
elif isinstance(content, list):
|
|
110
|
+
text_parts = []
|
|
111
|
+
for item in content:
|
|
112
|
+
txt = getattr(item, "text", None)
|
|
113
|
+
if txt and isinstance(txt, str) and txt.strip():
|
|
114
|
+
text_parts.append(txt.strip())
|
|
115
|
+
user_message = self._join_text_parts(text_parts)
|
|
116
|
+
|
|
117
|
+
# Extract metadata
|
|
118
|
+
context_id = self._extract_context_id(params)
|
|
119
|
+
|
|
120
|
+
# Build input dictionary with useful fields
|
|
121
|
+
return {
|
|
122
|
+
"message": user_message,
|
|
123
|
+
"context_id": context_id,
|
|
124
|
+
"params": params, # Full params for advanced use cases
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
@staticmethod
|
|
128
|
+
def _join_text_parts(parts: list[str]) -> str:
|
|
129
|
+
"""Join text parts into a single string."""
|
|
130
|
+
if not parts:
|
|
131
|
+
return ""
|
|
132
|
+
text = " ".join(p.strip() for p in parts if p)
|
|
133
|
+
return text.strip()
|
|
134
|
+
|
|
135
|
+
def _extract_context_id(self, params: MessageSendParams) -> str | None:
|
|
136
|
+
"""Extract context_id from MessageSendParams."""
|
|
137
|
+
if hasattr(params, "message") and params.message:
|
|
138
|
+
return getattr(params.message, "context_id", None)
|
|
139
|
+
return None
|
|
140
|
+
|
|
141
|
+
# ---------- Framework call ----------
|
|
142
|
+
|
|
143
|
+
async def call_framework(
|
|
144
|
+
self, framework_input: Dict[str, Any], params: MessageSendParams
|
|
145
|
+
) -> Any:
|
|
146
|
+
"""
|
|
147
|
+
Execute the callable function with the provided input.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
framework_input: Input dictionary for the function
|
|
151
|
+
params: Original A2A parameters (for context)
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
Function execution output
|
|
155
|
+
|
|
156
|
+
Raises:
|
|
157
|
+
Exception: If function execution fails
|
|
158
|
+
"""
|
|
159
|
+
logger.debug("Invoking callable with input keys: %s", list(framework_input.keys()))
|
|
160
|
+
result = await self.func(framework_input)
|
|
161
|
+
logger.debug("Callable returned: %s", type(result).__name__)
|
|
162
|
+
return result
|
|
163
|
+
|
|
164
|
+
# ---------- Output mapping ----------
|
|
165
|
+
|
|
166
|
+
async def from_framework(
|
|
167
|
+
self, framework_output: Any, params: MessageSendParams
|
|
168
|
+
) -> Message | Task:
|
|
169
|
+
"""
|
|
170
|
+
Convert callable output to A2A Message.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
framework_output: Output from the callable
|
|
174
|
+
params: Original A2A parameters
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
A2A Message with the function's response
|
|
178
|
+
"""
|
|
179
|
+
response_text = self._extract_output_text(framework_output)
|
|
180
|
+
context_id = self._extract_context_id(params)
|
|
181
|
+
|
|
182
|
+
return Message(
|
|
183
|
+
role=Role.agent,
|
|
184
|
+
message_id=str(uuid.uuid4()),
|
|
185
|
+
context_id=context_id,
|
|
186
|
+
parts=[Part(root=TextPart(text=response_text))],
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
def _extract_output_text(self, framework_output: Any) -> str:
|
|
190
|
+
"""
|
|
191
|
+
Extract text content from callable output.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
framework_output: Output from the callable
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
Extracted text string
|
|
198
|
+
"""
|
|
199
|
+
# Dictionary output
|
|
200
|
+
if isinstance(framework_output, dict):
|
|
201
|
+
# Try common output keys
|
|
202
|
+
for key in ["response", "output", "result", "answer", "text", "message"]:
|
|
203
|
+
if key in framework_output:
|
|
204
|
+
return str(framework_output[key])
|
|
205
|
+
# Fallback: serialize as JSON
|
|
206
|
+
return json.dumps(framework_output, indent=2)
|
|
207
|
+
|
|
208
|
+
# String or other type - convert to string
|
|
209
|
+
return str(framework_output)
|
|
210
|
+
|
|
211
|
+
# ---------- Streaming support ----------
|
|
212
|
+
|
|
213
|
+
async def handle_stream(
|
|
214
|
+
self, params: MessageSendParams
|
|
215
|
+
) -> AsyncIterator[Dict[str, Any]]:
|
|
216
|
+
"""
|
|
217
|
+
Handle a streaming A2A message request.
|
|
218
|
+
|
|
219
|
+
The wrapped function must be an async generator for streaming to work.
|
|
220
|
+
|
|
221
|
+
Args:
|
|
222
|
+
params: A2A message parameters
|
|
223
|
+
|
|
224
|
+
Yields:
|
|
225
|
+
Server-Sent Events compatible dictionaries with streaming chunks
|
|
226
|
+
|
|
227
|
+
Raises:
|
|
228
|
+
NotImplementedError: If streaming is not enabled for this adapter
|
|
229
|
+
"""
|
|
230
|
+
if not self._supports_streaming:
|
|
231
|
+
raise NotImplementedError(
|
|
232
|
+
"CallableAgentAdapter: streaming not enabled for this function. "
|
|
233
|
+
"Initialize with supports_streaming=True and provide an async generator."
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
framework_input = await self.to_framework(params)
|
|
237
|
+
context_id = self._extract_context_id(params)
|
|
238
|
+
message_id = str(uuid.uuid4())
|
|
239
|
+
|
|
240
|
+
logger.debug("Starting streaming call")
|
|
241
|
+
|
|
242
|
+
accumulated_text = ""
|
|
243
|
+
|
|
244
|
+
# Call the async generator function
|
|
245
|
+
async for chunk in self.func(framework_input):
|
|
246
|
+
# Convert chunk to string if needed
|
|
247
|
+
text = str(chunk) if not isinstance(chunk, str) else chunk
|
|
248
|
+
|
|
249
|
+
if text:
|
|
250
|
+
accumulated_text += text
|
|
251
|
+
# Yield SSE-compatible event
|
|
252
|
+
yield {
|
|
253
|
+
"event": "message",
|
|
254
|
+
"data": json.dumps({
|
|
255
|
+
"type": "content",
|
|
256
|
+
"content": text,
|
|
257
|
+
}),
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
# Send final message with complete response
|
|
261
|
+
final_message = Message(
|
|
262
|
+
role=Role.agent,
|
|
263
|
+
message_id=message_id,
|
|
264
|
+
context_id=context_id,
|
|
265
|
+
parts=[Part(root=TextPart(text=accumulated_text))],
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
# Send completion event
|
|
269
|
+
yield {
|
|
270
|
+
"event": "done",
|
|
271
|
+
"data": json.dumps({
|
|
272
|
+
"status": "completed",
|
|
273
|
+
"message": final_message.model_dump() if hasattr(final_message, "model_dump") else str(final_message),
|
|
274
|
+
}),
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
logger.debug("Streaming call completed")
|
|
278
|
+
|
|
279
|
+
def supports_streaming(self) -> bool:
|
|
280
|
+
"""
|
|
281
|
+
Check if this adapter supports streaming.
|
|
282
|
+
|
|
283
|
+
Returns:
|
|
284
|
+
True if streaming is enabled, False otherwise
|
|
285
|
+
"""
|
|
286
|
+
return self._supports_streaming
|