a2a-adapter 0.1.3__tar.gz → 0.1.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/PKG-INFO +108 -28
  2. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/README.md +107 -27
  3. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter/__init__.py +1 -1
  4. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter/client.py +57 -13
  5. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter/integrations/__init__.py +10 -1
  6. a2a_adapter-0.1.5/a2a_adapter/integrations/callable.py +286 -0
  7. a2a_adapter-0.1.5/a2a_adapter/integrations/crewai.py +585 -0
  8. a2a_adapter-0.1.5/a2a_adapter/integrations/langchain.py +329 -0
  9. a2a_adapter-0.1.5/a2a_adapter/integrations/langgraph.py +756 -0
  10. a2a_adapter-0.1.5/a2a_adapter/integrations/openclaw.py +1297 -0
  11. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter/loader.py +101 -28
  12. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter.egg-info/PKG-INFO +108 -28
  13. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter.egg-info/SOURCES.txt +3 -1
  14. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/pyproject.toml +5 -1
  15. a2a_adapter-0.1.3/a2a_adapter/integrations/callable.py +0 -172
  16. a2a_adapter-0.1.3/a2a_adapter/integrations/crewai.py +0 -142
  17. a2a_adapter-0.1.3/a2a_adapter/integrations/langchain.py +0 -171
  18. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/LICENSE +0 -0
  19. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter/adapter.py +0 -0
  20. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter/integrations/n8n.py +0 -0
  21. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter.egg-info/dependency_links.txt +0 -0
  22. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter.egg-info/requires.txt +0 -0
  23. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/a2a_adapter.egg-info/top_level.txt +0 -0
  24. {a2a_adapter-0.1.3 → a2a_adapter-0.1.5}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: a2a-adapter
3
- Version: 0.1.3
3
+ Version: 0.1.5
4
4
  Summary: A2A Protocol Adapter SDK for integrating various agent frameworks
5
5
  Author-email: HYBRO AI <info@hybro.ai>
6
6
  License: Apache-2.0
@@ -52,20 +52,24 @@ Dynamic: license-file
52
52
 
53
53
  **🚀 Open Source A2A Protocol Adapter SDK - Make Any Agent Framework A2A-Compatible in 3 Lines**
54
54
 
55
- A Python SDK that enables seamless integration of various agent frameworks (n8n, CrewAI, LangChain, etc.) with the [A2A (Agent-to-Agent) Protocol](https://github.com/a2aproject/A2A). Build interoperable AI agent systems that can communicate across different platforms and frameworks.
55
+ A Python SDK that enables seamless integration of various agent frameworks (n8n, LangGraph, CrewAI, LangChain, etc.) and personal AI agents (OpenClaw) with the [A2A (Agent-to-Agent) Protocol](https://github.com/a2aproject/A2A). Build interoperable AI agent systems that can communicate across different platforms and frameworks.
56
56
 
57
57
  **✨ Key Benefits:**
58
58
 
59
59
  - 🔌 **3-line setup** - Expose any agent as A2A-compliant
60
- - 🌐 **Framework agnostic** - Works with n8n, CrewAI, LangChain, and more
60
+ - 🌐 **Framework agnostic** - Works with n8n, LangGraph, CrewAI, LangChain, and more
61
61
  - 🌊 **Streaming support** - Built-in streaming for real-time responses
62
62
  - 🎯 **Production ready** - Type-safe, well-tested, and actively maintained
63
63
 
64
+ **▶️ Demo: n8n → A2A Agent**
65
+
66
+ [![A2A Adapter Demo](https://img.youtube.com/vi/rHWi7tLQ444/0.jpg)](https://youtu.be/rHWi7tLQ444)
67
+
64
68
  ## Features
65
69
 
66
- ✨ **Framework Agnostic**: Integrate n8n workflows, CrewAI crews, LangChain chains, and more
70
+ ✨ **Framework Agnostic**: Integrate n8n workflows, LangGraph workflows, CrewAI crews, LangChain chains, OpenClaw personal agents, and more
67
71
  🔌 **Simple API**: 3-line setup to expose any agent as A2A-compliant
68
- 🌊 **Streaming Support**: Built-in streaming for LangChain and custom adapters
72
+ 🌊 **Streaming Support**: Built-in streaming for LangGraph, LangChain, and custom adapters
69
73
  🎯 **Type Safe**: Leverages official A2A SDK types
70
74
  🔧 **Extensible**: Easy to add custom adapters for new frameworks
71
75
  📦 **Minimal Dependencies**: Optional dependencies per framework
@@ -81,14 +85,16 @@ A Python SDK that enables seamless integration of various agent frameworks (n8n,
81
85
  ┌─────────────────┐
82
86
  │ A2A Adapter │ (This SDK)
83
87
  │ - N8n │
88
+ │ - LangGraph │
84
89
  │ - CrewAI │
85
90
  │ - LangChain │
86
- │ - Custom
91
+ │ - OpenClaw
92
+ │ - Callable │
87
93
  └────────┬────────┘
88
94
 
89
95
 
90
96
  ┌─────────────────┐
91
- │ Your Agent │ (n8n workflow / CrewAI crew / Chain)
97
+ │ Your Agent │ (n8n workflow / CrewAI crew / Chain / OpenClaw)
92
98
  └─────────────────┘
93
99
  ```
94
100
 
@@ -198,6 +204,17 @@ adapter = await load_a2a_agent({
198
204
  })
199
205
  ```
200
206
 
207
+ ### LangGraph Workflow → A2A Agent (with Streaming)
208
+
209
+ ```python
210
+ adapter = await load_a2a_agent({
211
+ "adapter": "langgraph",
212
+ "graph": your_compiled_graph,
213
+ "input_key": "messages",
214
+ "output_key": "output"
215
+ })
216
+ ```
217
+
201
218
  ### Custom Function → A2A Agent
202
219
 
203
220
  ```python
@@ -210,6 +227,16 @@ adapter = await load_a2a_agent({
210
227
  })
211
228
  ```
212
229
 
230
+ ### OpenClaw Agent → A2A Agent
231
+
232
+ ```python
233
+ adapter = await load_a2a_agent({
234
+ "adapter": "openclaw",
235
+ "thinking": "low",
236
+ "async_mode": True
237
+ })
238
+ ```
239
+
213
240
  📚 **[View all examples →](https://github.com/hybroai/a2a-adapter/tree/main/examples/)**
214
241
 
215
242
  ## Advanced Usage
@@ -271,9 +298,35 @@ class StreamingAdapter(BaseAgentAdapter):
271
298
  return True
272
299
  ```
273
300
 
274
- ### Using with LangGraph
301
+ ### LangGraph Workflow as A2A Server
275
302
 
276
- Integrate A2A agents into LangGraph workflows:
303
+ Expose a LangGraph workflow as an A2A server:
304
+
305
+ ```python
306
+ from langgraph.graph import StateGraph, END
307
+
308
+ # Build your workflow
309
+ builder = StateGraph(YourState)
310
+ builder.add_node("process", process_node)
311
+ builder.set_entry_point("process")
312
+ builder.add_edge("process", END)
313
+ graph = builder.compile()
314
+
315
+ # Expose as A2A agent
316
+ adapter = await load_a2a_agent({
317
+ "adapter": "langgraph",
318
+ "graph": graph,
319
+ "input_key": "messages",
320
+ "output_key": "output"
321
+ })
322
+ serve_agent(agent_card=card, adapter=adapter, port=9002)
323
+ ```
324
+
325
+ See [examples/07_langgraph_server.py](https://github.com/hybroai/a2a-adapter/blob/main/examples/07_langgraph_server.py) for complete example.
326
+
327
+ ### Using A2A Agents from LangGraph
328
+
329
+ Call A2A agents from within a LangGraph workflow:
277
330
 
278
331
  ```python
279
332
  from langgraph.graph import StateGraph
@@ -332,6 +385,19 @@ See [examples/06_langgraph_single_agent.py](https://github.com/hybroai/a2a-adapt
332
385
  }
333
386
  ```
334
387
 
388
+ ### LangGraph Adapter
389
+
390
+ ```python
391
+ {
392
+ "adapter": "langgraph",
393
+ "graph": compiled_graph, # Required: CompiledGraph from StateGraph.compile()
394
+ "input_key": "messages", # Optional, default: "messages" (for chat) or "input"
395
+ "output_key": None, # Optional, extracts specific key from final state
396
+ "async_mode": False, # Optional, enables async task execution
397
+ "async_timeout": 300 # Optional, timeout for async mode (default: 300s)
398
+ }
399
+ ```
400
+
335
401
  ### Callable Adapter
336
402
 
337
403
  ```python
@@ -342,6 +408,19 @@ See [examples/06_langgraph_single_agent.py](https://github.com/hybroai/a2a-adapt
342
408
  }
343
409
  ```
344
410
 
411
+ ### OpenClaw Adapter
412
+
413
+ ```python
414
+ {
415
+ "adapter": "openclaw",
416
+ "session_id": "my-session", # Optional, auto-generated if not provided
417
+ "agent_id": None, # Optional, use default agent
418
+ "thinking": "low", # Optional: off|minimal|low|medium|high|xhigh
419
+ "timeout": 600, # Optional, command timeout in seconds
420
+ "async_mode": True # Optional, return Task immediately (default: True)
421
+ }
422
+ ```
423
+
345
424
  ## Examples
346
425
 
347
426
  The `examples/` directory contains complete working examples:
@@ -351,7 +430,9 @@ The `examples/` directory contains complete working examples:
351
430
  - **03_single_langchain_agent.py** - LangChain streaming agent
352
431
  - **04_single_agent_client.py** - A2A client for testing
353
432
  - **05_custom_adapter.py** - Custom adapter implementations
354
- - **06_langgraph_single_agent.py** - LangGraph + A2A integration
433
+ - **06_langgraph_single_agent.py** - Calling A2A agents from LangGraph
434
+ - **07_langgraph_server.py** - LangGraph workflow as A2A server
435
+ - **08_openclaw_agent.py** - OpenClaw personal AI agent
355
436
 
356
437
  Run any example:
357
438
 
@@ -455,13 +536,16 @@ Convert framework output to A2A Message or Task.
455
536
 
456
537
  Check if this adapter supports streaming responses.
457
538
 
458
- ## Framework Support
539
+ ## Adapter Support
459
540
 
460
- | Framework | Adapter | Non-Streaming | Streaming | Status |
461
- | ------------- | ----------------------- | ------------- | ---------- | ---------- |
462
- | **n8n** | `N8nAgentAdapter` | ✅ | 🔜 Planned | ✅ Stable |
463
- | **CrewAI** | `CrewAIAgentAdapter` | 🔜 Planned | 🔜 Planned | 🔜 Planned |
464
- | **LangChain** | `LangChainAgentAdapter` | 🔜 Planned | 🔜 Planned | 🔜 Planned |
541
+ | Agent/Framework | Adapter | Non-Streaming | Streaming | Async Tasks | Status |
542
+ | --------------- | ------------------------ | ------------- | --------- | ----------- | --------- |
543
+ | **n8n** | `N8nAgentAdapter` | ✅ | ❌ | | ✅ Stable |
544
+ | **LangGraph** | `LangGraphAgentAdapter` | ✅ | | | Stable |
545
+ | **CrewAI** | `CrewAIAgentAdapter` | | | | Stable |
546
+ | **LangChain** | `LangChainAgentAdapter` | ✅ | ✅ | ❌ | ✅ Stable |
547
+ | **OpenClaw** | `OpenClawAgentAdapter` | ✅ | ❌ | ✅ | ✅ Stable |
548
+ | **Callable** | `CallableAgentAdapter` | ✅ | ✅ | ❌ | ✅ Stable |
465
549
 
466
550
  ## 🤝 Contributing
467
551
 
@@ -488,12 +572,14 @@ We welcome contributions from the community! Whether you're fixing bugs, adding
488
572
  ## Roadmap
489
573
 
490
574
  - [x] Core adapter abstraction
491
- - [x] N8n adapter
492
- - [ ] CrewAI adapter
493
- - [ ] LangChain adapter with streaming
494
- - [ ] Callable adapter
495
- - [ ] Comprehensive examples
496
- - [ ] Task support (async execution pattern)
575
+ - [x] N8n adapter (with async task support)
576
+ - [x] LangGraph adapter (with streaming and async tasks)
577
+ - [x] CrewAI adapter (with async task support)
578
+ - [x] LangChain adapter (with streaming)
579
+ - [x] Callable adapter (with streaming)
580
+ - [x] OpenClaw adapter (with async tasks)
581
+ - [x] Comprehensive examples
582
+ - [x] Task support (async execution pattern)
497
583
  - [ ] Artifact support (file uploads/downloads)
498
584
  - [ ] AutoGen adapter
499
585
  - [ ] Semantic Kernel adapter
@@ -528,12 +614,6 @@ We welcome contributions from the community! Whether you're fixing bugs, adding
528
614
 
529
615
  Apache-2.0 License - see [LICENSE](https://github.com/hybroai/a2a-adapter/blob/main/LICENSE) file for details.
530
616
 
531
- ## Credits
532
-
533
- Built with ❤️ by [HYBRO AI](https://hybro.ai)
534
-
535
- Powered by the [A2A Protocol](https://github.com/a2aproject/A2A)
536
-
537
617
  ## 💬 Community & Support
538
618
 
539
619
  - 📚 **[Full Documentation](https://github.com/hybroai/a2a-adapter/blob/main/README.md)** - Complete API reference and guides
@@ -7,20 +7,24 @@
7
7
 
8
8
  **🚀 Open Source A2A Protocol Adapter SDK - Make Any Agent Framework A2A-Compatible in 3 Lines**
9
9
 
10
- A Python SDK that enables seamless integration of various agent frameworks (n8n, CrewAI, LangChain, etc.) with the [A2A (Agent-to-Agent) Protocol](https://github.com/a2aproject/A2A). Build interoperable AI agent systems that can communicate across different platforms and frameworks.
10
+ A Python SDK that enables seamless integration of various agent frameworks (n8n, LangGraph, CrewAI, LangChain, etc.) and personal AI agents (OpenClaw) with the [A2A (Agent-to-Agent) Protocol](https://github.com/a2aproject/A2A). Build interoperable AI agent systems that can communicate across different platforms and frameworks.
11
11
 
12
12
  **✨ Key Benefits:**
13
13
 
14
14
  - 🔌 **3-line setup** - Expose any agent as A2A-compliant
15
- - 🌐 **Framework agnostic** - Works with n8n, CrewAI, LangChain, and more
15
+ - 🌐 **Framework agnostic** - Works with n8n, LangGraph, CrewAI, LangChain, and more
16
16
  - 🌊 **Streaming support** - Built-in streaming for real-time responses
17
17
  - 🎯 **Production ready** - Type-safe, well-tested, and actively maintained
18
18
 
19
+ **▶️ Demo: n8n → A2A Agent**
20
+
21
+ [![A2A Adapter Demo](https://img.youtube.com/vi/rHWi7tLQ444/0.jpg)](https://youtu.be/rHWi7tLQ444)
22
+
19
23
  ## Features
20
24
 
21
- ✨ **Framework Agnostic**: Integrate n8n workflows, CrewAI crews, LangChain chains, and more
25
+ ✨ **Framework Agnostic**: Integrate n8n workflows, LangGraph workflows, CrewAI crews, LangChain chains, OpenClaw personal agents, and more
22
26
  🔌 **Simple API**: 3-line setup to expose any agent as A2A-compliant
23
- 🌊 **Streaming Support**: Built-in streaming for LangChain and custom adapters
27
+ 🌊 **Streaming Support**: Built-in streaming for LangGraph, LangChain, and custom adapters
24
28
  🎯 **Type Safe**: Leverages official A2A SDK types
25
29
  🔧 **Extensible**: Easy to add custom adapters for new frameworks
26
30
  📦 **Minimal Dependencies**: Optional dependencies per framework
@@ -36,14 +40,16 @@ A Python SDK that enables seamless integration of various agent frameworks (n8n,
36
40
  ┌─────────────────┐
37
41
  │ A2A Adapter │ (This SDK)
38
42
  │ - N8n │
43
+ │ - LangGraph │
39
44
  │ - CrewAI │
40
45
  │ - LangChain │
41
- │ - Custom
46
+ │ - OpenClaw
47
+ │ - Callable │
42
48
  └────────┬────────┘
43
49
 
44
50
 
45
51
  ┌─────────────────┐
46
- │ Your Agent │ (n8n workflow / CrewAI crew / Chain)
52
+ │ Your Agent │ (n8n workflow / CrewAI crew / Chain / OpenClaw)
47
53
  └─────────────────┘
48
54
  ```
49
55
 
@@ -153,6 +159,17 @@ adapter = await load_a2a_agent({
153
159
  })
154
160
  ```
155
161
 
162
+ ### LangGraph Workflow → A2A Agent (with Streaming)
163
+
164
+ ```python
165
+ adapter = await load_a2a_agent({
166
+ "adapter": "langgraph",
167
+ "graph": your_compiled_graph,
168
+ "input_key": "messages",
169
+ "output_key": "output"
170
+ })
171
+ ```
172
+
156
173
  ### Custom Function → A2A Agent
157
174
 
158
175
  ```python
@@ -165,6 +182,16 @@ adapter = await load_a2a_agent({
165
182
  })
166
183
  ```
167
184
 
185
+ ### OpenClaw Agent → A2A Agent
186
+
187
+ ```python
188
+ adapter = await load_a2a_agent({
189
+ "adapter": "openclaw",
190
+ "thinking": "low",
191
+ "async_mode": True
192
+ })
193
+ ```
194
+
168
195
  📚 **[View all examples →](https://github.com/hybroai/a2a-adapter/tree/main/examples/)**
169
196
 
170
197
  ## Advanced Usage
@@ -226,9 +253,35 @@ class StreamingAdapter(BaseAgentAdapter):
226
253
  return True
227
254
  ```
228
255
 
229
- ### Using with LangGraph
256
+ ### LangGraph Workflow as A2A Server
230
257
 
231
- Integrate A2A agents into LangGraph workflows:
258
+ Expose a LangGraph workflow as an A2A server:
259
+
260
+ ```python
261
+ from langgraph.graph import StateGraph, END
262
+
263
+ # Build your workflow
264
+ builder = StateGraph(YourState)
265
+ builder.add_node("process", process_node)
266
+ builder.set_entry_point("process")
267
+ builder.add_edge("process", END)
268
+ graph = builder.compile()
269
+
270
+ # Expose as A2A agent
271
+ adapter = await load_a2a_agent({
272
+ "adapter": "langgraph",
273
+ "graph": graph,
274
+ "input_key": "messages",
275
+ "output_key": "output"
276
+ })
277
+ serve_agent(agent_card=card, adapter=adapter, port=9002)
278
+ ```
279
+
280
+ See [examples/07_langgraph_server.py](https://github.com/hybroai/a2a-adapter/blob/main/examples/07_langgraph_server.py) for complete example.
281
+
282
+ ### Using A2A Agents from LangGraph
283
+
284
+ Call A2A agents from within a LangGraph workflow:
232
285
 
233
286
  ```python
234
287
  from langgraph.graph import StateGraph
@@ -287,6 +340,19 @@ See [examples/06_langgraph_single_agent.py](https://github.com/hybroai/a2a-adapt
287
340
  }
288
341
  ```
289
342
 
343
+ ### LangGraph Adapter
344
+
345
+ ```python
346
+ {
347
+ "adapter": "langgraph",
348
+ "graph": compiled_graph, # Required: CompiledGraph from StateGraph.compile()
349
+ "input_key": "messages", # Optional, default: "messages" (for chat) or "input"
350
+ "output_key": None, # Optional, extracts specific key from final state
351
+ "async_mode": False, # Optional, enables async task execution
352
+ "async_timeout": 300 # Optional, timeout for async mode (default: 300s)
353
+ }
354
+ ```
355
+
290
356
  ### Callable Adapter
291
357
 
292
358
  ```python
@@ -297,6 +363,19 @@ See [examples/06_langgraph_single_agent.py](https://github.com/hybroai/a2a-adapt
297
363
  }
298
364
  ```
299
365
 
366
+ ### OpenClaw Adapter
367
+
368
+ ```python
369
+ {
370
+ "adapter": "openclaw",
371
+ "session_id": "my-session", # Optional, auto-generated if not provided
372
+ "agent_id": None, # Optional, use default agent
373
+ "thinking": "low", # Optional: off|minimal|low|medium|high|xhigh
374
+ "timeout": 600, # Optional, command timeout in seconds
375
+ "async_mode": True # Optional, return Task immediately (default: True)
376
+ }
377
+ ```
378
+
300
379
  ## Examples
301
380
 
302
381
  The `examples/` directory contains complete working examples:
@@ -306,7 +385,9 @@ The `examples/` directory contains complete working examples:
306
385
  - **03_single_langchain_agent.py** - LangChain streaming agent
307
386
  - **04_single_agent_client.py** - A2A client for testing
308
387
  - **05_custom_adapter.py** - Custom adapter implementations
309
- - **06_langgraph_single_agent.py** - LangGraph + A2A integration
388
+ - **06_langgraph_single_agent.py** - Calling A2A agents from LangGraph
389
+ - **07_langgraph_server.py** - LangGraph workflow as A2A server
390
+ - **08_openclaw_agent.py** - OpenClaw personal AI agent
310
391
 
311
392
  Run any example:
312
393
 
@@ -410,13 +491,16 @@ Convert framework output to A2A Message or Task.
410
491
 
411
492
  Check if this adapter supports streaming responses.
412
493
 
413
- ## Framework Support
494
+ ## Adapter Support
414
495
 
415
- | Framework | Adapter | Non-Streaming | Streaming | Status |
416
- | ------------- | ----------------------- | ------------- | ---------- | ---------- |
417
- | **n8n** | `N8nAgentAdapter` | ✅ | 🔜 Planned | ✅ Stable |
418
- | **CrewAI** | `CrewAIAgentAdapter` | 🔜 Planned | 🔜 Planned | 🔜 Planned |
419
- | **LangChain** | `LangChainAgentAdapter` | 🔜 Planned | 🔜 Planned | 🔜 Planned |
496
+ | Agent/Framework | Adapter | Non-Streaming | Streaming | Async Tasks | Status |
497
+ | --------------- | ------------------------ | ------------- | --------- | ----------- | --------- |
498
+ | **n8n** | `N8nAgentAdapter` | ✅ | ❌ | | ✅ Stable |
499
+ | **LangGraph** | `LangGraphAgentAdapter` | ✅ | | | Stable |
500
+ | **CrewAI** | `CrewAIAgentAdapter` | | | | Stable |
501
+ | **LangChain** | `LangChainAgentAdapter` | ✅ | ✅ | ❌ | ✅ Stable |
502
+ | **OpenClaw** | `OpenClawAgentAdapter` | ✅ | ❌ | ✅ | ✅ Stable |
503
+ | **Callable** | `CallableAgentAdapter` | ✅ | ✅ | ❌ | ✅ Stable |
420
504
 
421
505
  ## 🤝 Contributing
422
506
 
@@ -443,12 +527,14 @@ We welcome contributions from the community! Whether you're fixing bugs, adding
443
527
  ## Roadmap
444
528
 
445
529
  - [x] Core adapter abstraction
446
- - [x] N8n adapter
447
- - [ ] CrewAI adapter
448
- - [ ] LangChain adapter with streaming
449
- - [ ] Callable adapter
450
- - [ ] Comprehensive examples
451
- - [ ] Task support (async execution pattern)
530
+ - [x] N8n adapter (with async task support)
531
+ - [x] LangGraph adapter (with streaming and async tasks)
532
+ - [x] CrewAI adapter (with async task support)
533
+ - [x] LangChain adapter (with streaming)
534
+ - [x] Callable adapter (with streaming)
535
+ - [x] OpenClaw adapter (with async tasks)
536
+ - [x] Comprehensive examples
537
+ - [x] Task support (async execution pattern)
452
538
  - [ ] Artifact support (file uploads/downloads)
453
539
  - [ ] AutoGen adapter
454
540
  - [ ] Semantic Kernel adapter
@@ -483,12 +569,6 @@ We welcome contributions from the community! Whether you're fixing bugs, adding
483
569
 
484
570
  Apache-2.0 License - see [LICENSE](https://github.com/hybroai/a2a-adapter/blob/main/LICENSE) file for details.
485
571
 
486
- ## Credits
487
-
488
- Built with ❤️ by [HYBRO AI](https://hybro.ai)
489
-
490
- Powered by the [A2A Protocol](https://github.com/a2aproject/A2A)
491
-
492
572
  ## 💬 Community & Support
493
573
 
494
574
  - 📚 **[Full Documentation](https://github.com/hybroai/a2a-adapter/blob/main/README.md)** - Complete API reference and guides
@@ -26,7 +26,7 @@ Example:
26
26
  >>> asyncio.run(main())
27
27
  """
28
28
 
29
- __version__ = "0.1.3"
29
+ __version__ = "0.1.5"
30
30
 
31
31
  from .adapter import BaseAgentAdapter
32
32
  from .client import build_agent_app, serve_agent
@@ -27,6 +27,7 @@ from a2a.types import (
27
27
  ListTaskPushNotificationConfigResponse,
28
28
  Message,
29
29
  MessageSendParams,
30
+ PushNotificationConfig,
30
31
  SetTaskPushNotificationConfigRequest,
31
32
  SetTaskPushNotificationConfigResponse,
32
33
  Task,
@@ -43,6 +44,11 @@ class AdapterRequestHandler(RequestHandler):
43
44
 
44
45
  This class bridges the gap between our adapter abstraction and the
45
46
  official A2A SDK's RequestHandler protocol.
47
+
48
+ Supports:
49
+ - Basic message send (sync and async)
50
+ - Task get/cancel for async adapters
51
+ - Push notification configuration for adapters that support it
46
52
  """
47
53
 
48
54
  def __init__(self, adapter: BaseAgentAdapter):
@@ -89,23 +95,37 @@ class AdapterRequestHandler(RequestHandler):
89
95
  async for event in self.adapter.handle_stream(params):
90
96
  yield event
91
97
 
92
- # Task-related methods (not supported by default)
98
+ # Task-related methods
93
99
 
94
100
  async def on_get_task(
95
101
  self,
96
102
  params: GetTaskRequest,
97
103
  context: ServerCallContext
98
104
  ) -> GetTaskResponse:
99
- """Get task status - not supported."""
100
- raise ServerError(error=UnsupportedOperationError())
105
+ """Get task status."""
106
+ if not self.adapter.supports_async_tasks():
107
+ raise ServerError(error=UnsupportedOperationError())
108
+
109
+ task = await self.adapter.get_task(params.id)
110
+ if task is None:
111
+ raise ServerError(error=UnsupportedOperationError(message=f"Task {params.id} not found"))
112
+
113
+ return GetTaskResponse(result=task)
101
114
 
102
115
  async def on_cancel_task(
103
116
  self,
104
117
  params: CancelTaskRequest,
105
118
  context: ServerCallContext
106
119
  ) -> CancelTaskResponse:
107
- """Cancel task - not supported."""
108
- raise ServerError(error=UnsupportedOperationError())
120
+ """Cancel task."""
121
+ if not self.adapter.supports_async_tasks():
122
+ raise ServerError(error=UnsupportedOperationError())
123
+
124
+ task = await self.adapter.cancel_task(params.id)
125
+ if task is None:
126
+ raise ServerError(error=UnsupportedOperationError(message=f"Task {params.id} not found"))
127
+
128
+ return CancelTaskResponse(result=task)
109
129
 
110
130
  async def on_resubscribe_to_task(
111
131
  self,
@@ -116,30 +136,47 @@ class AdapterRequestHandler(RequestHandler):
116
136
  raise ServerError(error=UnsupportedOperationError())
117
137
  yield # Make this an async generator
118
138
 
119
- # Push notification methods (not supported by default)
139
+ # Push notification methods
120
140
 
121
141
  async def on_set_task_push_notification_config(
122
142
  self,
123
143
  params: SetTaskPushNotificationConfigRequest,
124
144
  context: ServerCallContext
125
145
  ) -> SetTaskPushNotificationConfigResponse:
126
- """Set push notification config - not supported."""
127
- raise ServerError(error=UnsupportedOperationError())
146
+ """Set push notification config."""
147
+ if not hasattr(self.adapter, 'supports_push_notifications') or not self.adapter.supports_push_notifications():
148
+ raise ServerError(error=UnsupportedOperationError())
149
+
150
+ success = await self.adapter.set_push_notification_config(
151
+ params.taskId,
152
+ params.pushNotificationConfig
153
+ )
154
+ if not success:
155
+ raise ServerError(error=UnsupportedOperationError(message=f"Task {params.taskId} not found"))
156
+
157
+ return SetTaskPushNotificationConfigResponse(result=params.pushNotificationConfig)
128
158
 
129
159
  async def on_get_task_push_notification_config(
130
160
  self,
131
161
  params: GetTaskPushNotificationConfigParams,
132
162
  context: ServerCallContext
133
163
  ) -> GetTaskPushNotificationConfigResponse:
134
- """Get push notification config - not supported."""
135
- raise ServerError(error=UnsupportedOperationError())
164
+ """Get push notification config."""
165
+ if not hasattr(self.adapter, 'supports_push_notifications') or not self.adapter.supports_push_notifications():
166
+ raise ServerError(error=UnsupportedOperationError())
167
+
168
+ config = await self.adapter.get_push_notification_config(params.taskId)
169
+ if config is None:
170
+ raise ServerError(error=UnsupportedOperationError(message=f"No push config for task {params.taskId}"))
171
+
172
+ return GetTaskPushNotificationConfigResponse(result=config)
136
173
 
137
174
  async def on_list_task_push_notification_config(
138
175
  self,
139
176
  params: ListTaskPushNotificationConfigParams,
140
177
  context: ServerCallContext
141
178
  ) -> ListTaskPushNotificationConfigResponse:
142
- """List push notification configs - not supported."""
179
+ """List push notification configs - not supported (would need to track all configs)."""
143
180
  raise ServerError(error=UnsupportedOperationError())
144
181
 
145
182
  async def on_delete_task_push_notification_config(
@@ -147,8 +184,15 @@ class AdapterRequestHandler(RequestHandler):
147
184
  params: DeleteTaskPushNotificationConfigParams,
148
185
  context: ServerCallContext
149
186
  ) -> DeleteTaskPushNotificationConfigResponse:
150
- """Delete push notification config - not supported."""
151
- raise ServerError(error=UnsupportedOperationError())
187
+ """Delete push notification config."""
188
+ if not hasattr(self.adapter, 'supports_push_notifications') or not self.adapter.supports_push_notifications():
189
+ raise ServerError(error=UnsupportedOperationError())
190
+
191
+ success = await self.adapter.delete_push_notification_config(params.taskId)
192
+ if not success:
193
+ raise ServerError(error=UnsupportedOperationError(message=f"No push config for task {params.taskId}"))
194
+
195
+ return DeleteTaskPushNotificationConfigResponse(result={})
152
196
 
153
197
 
154
198
  def build_agent_app(
@@ -5,14 +5,18 @@ This package contains concrete adapter implementations for various agent framewo
5
5
  - n8n: HTTP webhook-based workflows
6
6
  - CrewAI: Multi-agent collaboration framework
7
7
  - LangChain: LLM application framework with LCEL support
8
+ - LangGraph: Stateful workflow orchestration framework
8
9
  - Callable: Generic Python async function adapter
10
+ - OpenClaw: Personal AI super agent CLI wrapper
9
11
  """
10
12
 
11
13
  __all__ = [
12
14
  "N8nAgentAdapter",
13
15
  "CrewAIAgentAdapter",
14
16
  "LangChainAgentAdapter",
17
+ "LangGraphAgentAdapter",
15
18
  "CallableAgentAdapter",
19
+ "OpenClawAgentAdapter",
16
20
  ]
17
21
 
18
22
  # Lazy imports to avoid requiring all optional dependencies
@@ -26,8 +30,13 @@ def __getattr__(name: str):
26
30
  elif name == "LangChainAgentAdapter":
27
31
  from .langchain import LangChainAgentAdapter
28
32
  return LangChainAgentAdapter
33
+ elif name == "LangGraphAgentAdapter":
34
+ from .langgraph import LangGraphAgentAdapter
35
+ return LangGraphAgentAdapter
29
36
  elif name == "CallableAgentAdapter":
30
37
  from .callable import CallableAgentAdapter
31
38
  return CallableAgentAdapter
39
+ elif name == "OpenClawAgentAdapter":
40
+ from .openclaw import OpenClawAgentAdapter
41
+ return OpenClawAgentAdapter
32
42
  raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
33
-