agent-mcp 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agent_mcp/__init__.py +16 -0
- agent_mcp/crewai_mcp_adapter.py +281 -0
- agent_mcp/enhanced_mcp_agent.py +601 -0
- agent_mcp/heterogeneous_group_chat.py +424 -0
- agent_mcp/langchain_mcp_adapter.py +325 -0
- agent_mcp/langgraph_mcp_adapter.py +325 -0
- agent_mcp/mcp_agent.py +632 -0
- agent_mcp/mcp_decorator.py +257 -0
- agent_mcp/mcp_langgraph.py +733 -0
- agent_mcp/mcp_transaction.py +97 -0
- agent_mcp/mcp_transport.py +700 -0
- agent_mcp/mcp_transport_enhanced.py +46 -0
- agent_mcp/proxy_agent.py +24 -0
- agent_mcp-0.1.3.dist-info/METADATA +331 -0
- agent_mcp-0.1.3.dist-info/RECORD +18 -0
- agent_mcp-0.1.3.dist-info/top_level.txt +1 -0
- agent_mcp-0.1.1.dist-info/METADATA +0 -474
- agent_mcp-0.1.1.dist-info/RECORD +0 -5
- agent_mcp-0.1.1.dist-info/top_level.txt +0 -1
- {agent_mcp-0.1.1.dist-info → agent_mcp-0.1.3.dist-info}/WHEEL +0 -0
- {agent_mcp-0.1.1.dist-info → agent_mcp-0.1.3.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Adapter for Langchain agents to work with MCP.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
from typing import Dict, Any, Optional
|
|
7
|
+
from .mcp_agent import MCPAgent
|
|
8
|
+
from .mcp_transport import MCPTransport
|
|
9
|
+
from langchain.agents import AgentExecutor
|
|
10
|
+
from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
|
|
11
|
+
import traceback
|
|
12
|
+
import json
|
|
13
|
+
|
|
14
|
+
# --- Setup Logger ---
|
|
15
|
+
import logging
|
|
16
|
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
# --- End Logger Setup ---
|
|
19
|
+
|
|
20
|
+
class LangchainMCPAdapter(MCPAgent):
|
|
21
|
+
"""Adapter for Langchain agents to work with MCP"""
|
|
22
|
+
|
|
23
|
+
def __init__(self,
|
|
24
|
+
name: str,
|
|
25
|
+
transport: Optional[MCPTransport] = None,
|
|
26
|
+
client_mode: bool = False,
|
|
27
|
+
langchain_agent: OpenAIFunctionsAgent = None,
|
|
28
|
+
agent_executor: AgentExecutor = None,
|
|
29
|
+
system_message: str = "",
|
|
30
|
+
**kwargs):
|
|
31
|
+
# Set default system message if none provided
|
|
32
|
+
if not system_message:
|
|
33
|
+
system_message = "I am a Langchain agent that can help with various tasks."
|
|
34
|
+
|
|
35
|
+
# Initialize parent with system message
|
|
36
|
+
super().__init__(name=name, system_message=system_message, **kwargs)
|
|
37
|
+
|
|
38
|
+
# Set instance attributes
|
|
39
|
+
self.transport = transport
|
|
40
|
+
self.client_mode = client_mode
|
|
41
|
+
self.langchain_agent = langchain_agent
|
|
42
|
+
self.agent_executor = agent_executor
|
|
43
|
+
self.task_queue = asyncio.Queue()
|
|
44
|
+
self._task_processor = None
|
|
45
|
+
self._message_processor = None
|
|
46
|
+
self._processed_tasks = set() # For idempotency check
|
|
47
|
+
|
|
48
|
+
async def connect_to_server(self, server_url: str):
|
|
49
|
+
"""Connect to another agent's server"""
|
|
50
|
+
if not self.client_mode or not self.transport:
|
|
51
|
+
raise ValueError("Agent not configured for client mode")
|
|
52
|
+
|
|
53
|
+
# Register with the server
|
|
54
|
+
registration = {
|
|
55
|
+
"type": "registration",
|
|
56
|
+
"agent_id": self.mcp_id,
|
|
57
|
+
"name": self.name,
|
|
58
|
+
"capabilities": []
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
response = await self.transport.send_message(server_url, registration)
|
|
62
|
+
if response.get("status") == "ok":
|
|
63
|
+
print(f"Successfully connected to server at {server_url}")
|
|
64
|
+
|
|
65
|
+
async def handle_incoming_message(self, message: Dict[str, Any], message_id: Optional[str] = None):
|
|
66
|
+
"""Handle incoming messages from other agents"""
|
|
67
|
+
# First check if type is directly in the message
|
|
68
|
+
msg_type = message.get("type")
|
|
69
|
+
|
|
70
|
+
# If not, check if it's inside the content field
|
|
71
|
+
if not msg_type and "content" in message and isinstance(message["content"], dict):
|
|
72
|
+
msg_type = message["content"].get("type")
|
|
73
|
+
|
|
74
|
+
sender = message.get("sender", "Unknown")
|
|
75
|
+
task_id = message.get("task_id") or message.get("content", {}).get("task_id") if isinstance(message.get("content"), dict) else message.get("task_id")
|
|
76
|
+
logger.info(f"[{self.name}] Received message (ID: {message_id}) of type '{msg_type}' from {sender} (Task ID: {task_id})")
|
|
77
|
+
|
|
78
|
+
# --- Idempotency Check ---
|
|
79
|
+
if not super()._should_process_message(message):
|
|
80
|
+
# If skipped, acknowledge and stop
|
|
81
|
+
if message_id and self.transport:
|
|
82
|
+
asyncio.create_task(self.transport.acknowledge_message(self.name, message_id))
|
|
83
|
+
logger.info(f"[{self.name}] Acknowledged duplicate task {task_id} (msg_id: {message_id})")
|
|
84
|
+
return
|
|
85
|
+
# --- End Idempotency Check ---
|
|
86
|
+
|
|
87
|
+
if msg_type == "task":
|
|
88
|
+
logger.info(f"[{self.name}] Queueing task {task_id} (message_id: {message_id}) from {sender}")
|
|
89
|
+
content = message.get("content", {})
|
|
90
|
+
task_id = content.get("task_id") or message.get("task_id")
|
|
91
|
+
description = content.get("description") or message.get("description")
|
|
92
|
+
reply_to = content.get("reply_to")
|
|
93
|
+
|
|
94
|
+
if not task_id or not description:
|
|
95
|
+
print(f"[ERROR] {self.name}: Task message missing required fields: {message}")
|
|
96
|
+
return
|
|
97
|
+
|
|
98
|
+
# Add message_id to task
|
|
99
|
+
message['message_id'] = message_id
|
|
100
|
+
|
|
101
|
+
# Queue task for async processing
|
|
102
|
+
print(f"[DEBUG] {self.name}: Queueing task {task_id} with message_id {message_id} for processing")
|
|
103
|
+
await self.task_queue.put(message)
|
|
104
|
+
print(f"[DEBUG] {self.name}: Successfully queued task {task_id}")
|
|
105
|
+
else:
|
|
106
|
+
print(f"[WARN] {self.name}: Received unknown message type: {msg_type}")
|
|
107
|
+
|
|
108
|
+
async def _handle_task(self, message: Dict[str, Any]):
|
|
109
|
+
"""Handle incoming task"""
|
|
110
|
+
print(f"{self.name}: Received task: {message}")
|
|
111
|
+
await self.task_queue.put(message)
|
|
112
|
+
return {"status": "ok"}
|
|
113
|
+
|
|
114
|
+
async def process_messages(self):
|
|
115
|
+
print(f"[{self.name}] Message processor loop started.")
|
|
116
|
+
while True:
|
|
117
|
+
try:
|
|
118
|
+
print(f"[{self.name}] Waiting for message from queue...")
|
|
119
|
+
message, message_id = await self.transport.receive_message()
|
|
120
|
+
print(f"{self.name}: Processing message {message_id}: {message}")
|
|
121
|
+
|
|
122
|
+
# Skip None messages
|
|
123
|
+
if message is None:
|
|
124
|
+
print(f"[{self.name}] Received None message, skipping...")
|
|
125
|
+
continue
|
|
126
|
+
|
|
127
|
+
await self.handle_incoming_message(message, message_id)
|
|
128
|
+
except asyncio.CancelledError:
|
|
129
|
+
print(f"[{self.name}] Message processor cancelled.")
|
|
130
|
+
break
|
|
131
|
+
except Exception as e:
|
|
132
|
+
print(f"[{self.name}] Error in message processor: {e}")
|
|
133
|
+
traceback.print_exc()
|
|
134
|
+
break
|
|
135
|
+
except Exception as e:
|
|
136
|
+
print(f"[{self.name}] Error in message processor: {e}")
|
|
137
|
+
await asyncio.sleep(1)
|
|
138
|
+
print(f"[{self.name}] Message processor loop finished.")
|
|
139
|
+
|
|
140
|
+
async def process_tasks(self):
|
|
141
|
+
print(f"[{self.name}] Task processor loop started.")
|
|
142
|
+
while True:
|
|
143
|
+
try:
|
|
144
|
+
print(f"[{self.name}] Waiting for task from queue...")
|
|
145
|
+
task = await self.task_queue.get()
|
|
146
|
+
print(f"\n[{self.name}] Got task from queue: {task}")
|
|
147
|
+
|
|
148
|
+
if not isinstance(task, dict):
|
|
149
|
+
print(f"[ERROR] {self.name}: Task is not a dictionary: {task}")
|
|
150
|
+
self.task_queue.task_done()
|
|
151
|
+
continue
|
|
152
|
+
|
|
153
|
+
# Get task details from content field if present
|
|
154
|
+
content = task.get("content", {})
|
|
155
|
+
task_desc = content.get("description") or task.get("description")
|
|
156
|
+
task_id = content.get("task_id") or task.get("task_id")
|
|
157
|
+
task_type = content.get("type") or task.get("type")
|
|
158
|
+
reply_to = content.get("reply_to") or task.get("reply_to")
|
|
159
|
+
|
|
160
|
+
print(f"[DEBUG] {self.name}: Task details:")
|
|
161
|
+
print(f" - Type: {task_type}")
|
|
162
|
+
print(f" - Task ID: {task_id}")
|
|
163
|
+
print(f" - Reply To: {reply_to}")
|
|
164
|
+
print(f" - Description: {task_desc}")
|
|
165
|
+
|
|
166
|
+
if not task_desc or not task_id:
|
|
167
|
+
print(f"[ERROR] {self.name}: Task is missing description or task_id")
|
|
168
|
+
self.task_queue.task_done()
|
|
169
|
+
continue
|
|
170
|
+
|
|
171
|
+
if task_type != "task":
|
|
172
|
+
print(f"[ERROR] {self.name}: Invalid task type: {task_type}")
|
|
173
|
+
self.task_queue.task_done()
|
|
174
|
+
continue
|
|
175
|
+
|
|
176
|
+
print(f"[DEBUG] {self.name}: Starting execution of task {task_id}")
|
|
177
|
+
# Execute task using Langchain agent
|
|
178
|
+
try:
|
|
179
|
+
print(f"[DEBUG] {self.name}: Calling agent_executor.arun with task description")
|
|
180
|
+
# Execute the task using the Langchain agent executor
|
|
181
|
+
result = await self.agent_executor.arun(task_desc)
|
|
182
|
+
print(f"[DEBUG] {self.name}: Agent execution completed. Result type: {type(result)}")
|
|
183
|
+
except Exception as e:
|
|
184
|
+
print(f"[ERROR] {self.name}: Agent execution failed: {e}")
|
|
185
|
+
print(f"[ERROR] {self.name}: Error type: {type(e)}")
|
|
186
|
+
traceback.print_exc() # Print the full traceback for detailed debugging
|
|
187
|
+
# Provide a user-friendly error message as the result
|
|
188
|
+
result = f"Agent execution failed due to an error: {str(e)}"
|
|
189
|
+
|
|
190
|
+
# Ensure result is always a string before sending
|
|
191
|
+
if not isinstance(result, str):
|
|
192
|
+
try:
|
|
193
|
+
result_str = json.dumps(result) # Try serializing if complex type
|
|
194
|
+
except (TypeError, OverflowError):
|
|
195
|
+
result_str = str(result) # Fallback to string conversion
|
|
196
|
+
else:
|
|
197
|
+
result_str = result
|
|
198
|
+
|
|
199
|
+
print(f"[DEBUG] {self.name}: Sending task result for task_id: {task_id}")
|
|
200
|
+
# Send the result back
|
|
201
|
+
if reply_to and self.transport:
|
|
202
|
+
try:
|
|
203
|
+
# --- FIX: Extract agent name from reply_to URL ---
|
|
204
|
+
try:
|
|
205
|
+
target_agent_name = reply_to.split('/')[-1]
|
|
206
|
+
except IndexError:
|
|
207
|
+
print(f"[ERROR] {self.name}: Could not extract agent name from reply_to URL: {reply_to}")
|
|
208
|
+
target_agent_name = reply_to # Fallback, though likely wrong
|
|
209
|
+
|
|
210
|
+
print(f"[DEBUG] {self.name}: Sending result to target agent: {target_agent_name} (extracted from {reply_to})")
|
|
211
|
+
# --- END FIX ---
|
|
212
|
+
|
|
213
|
+
await self.transport.send_message(
|
|
214
|
+
target_agent_name, # <<< Use extracted name, not full URL
|
|
215
|
+
{
|
|
216
|
+
"type": "task_result",
|
|
217
|
+
"task_id": task_id,
|
|
218
|
+
"result": result_str,
|
|
219
|
+
"sender": self.name,
|
|
220
|
+
"original_message_id": task.get('message_id') # Include original message ID
|
|
221
|
+
}
|
|
222
|
+
)
|
|
223
|
+
print(f"[DEBUG] {self.name}: Result sent successfully")
|
|
224
|
+
|
|
225
|
+
# Acknowledge task completion using message_id
|
|
226
|
+
message_id = task.get('message_id')
|
|
227
|
+
if message_id:
|
|
228
|
+
await self.transport.acknowledge_message(self.name, message_id)
|
|
229
|
+
print(f"[DEBUG] {self.name}: Task {task_id} acknowledged with message_id {message_id}")
|
|
230
|
+
else:
|
|
231
|
+
print(f"[WARN] {self.name}: No message_id for task {task_id}, cannot acknowledge")
|
|
232
|
+
except Exception as send_error:
|
|
233
|
+
print(f"[ERROR] {self.name}: Failed to send result: {str(send_error)}")
|
|
234
|
+
traceback.print_exc()
|
|
235
|
+
else:
|
|
236
|
+
print(f"[WARN] {self.name}: No reply_to URL in task {task_id}, cannot send result")
|
|
237
|
+
|
|
238
|
+
super()._mark_task_completed(task_id) # Call base class method
|
|
239
|
+
|
|
240
|
+
self.task_queue.task_done()
|
|
241
|
+
print(f"[DEBUG] {self.name}: Task {task_id} fully processed")
|
|
242
|
+
|
|
243
|
+
except Exception as e:
|
|
244
|
+
print(f"[ERROR] {self.name}: Error processing task: {e}")
|
|
245
|
+
traceback.print_exc()
|
|
246
|
+
await asyncio.sleep(1)
|
|
247
|
+
print(f"[{self.name}] Task processor loop finished.")
|
|
248
|
+
|
|
249
|
+
def _should_process_message(self, message: Dict[str, Any]) -> bool:
|
|
250
|
+
"""Check if a message should be processed based on idempotency"""
|
|
251
|
+
task_id = message.get("content", {}).get("task_id") if isinstance(message.get("content"), dict) else message.get("task_id")
|
|
252
|
+
if task_id in self._processed_tasks:
|
|
253
|
+
logger.info(f"[{self.name}] Skipping duplicate task {task_id}")
|
|
254
|
+
return False
|
|
255
|
+
return True
|
|
256
|
+
|
|
257
|
+
def _mark_task_completed(self, task_id: str) -> None:
|
|
258
|
+
"""Mark a task as completed for idempotency"""
|
|
259
|
+
self._processed_tasks.add(task_id)
|
|
260
|
+
logger.info(f"[{self.name}] Marked task {task_id} as completed")
|
|
261
|
+
|
|
262
|
+
async def run(self):
|
|
263
|
+
"""Run the agent's main loop asynchronously."""
|
|
264
|
+
print(f"[{self.name}] Starting agent run loop...")
|
|
265
|
+
|
|
266
|
+
# Ensure transport is ready (polling should be started by HeterogeneousGroupChat)
|
|
267
|
+
if not self.transport:
|
|
268
|
+
print(f"[ERROR] {self.name}: Transport is not configured. Cannot run agent.")
|
|
269
|
+
return
|
|
270
|
+
|
|
271
|
+
# We no longer call connect_to_server here, as registration and polling start
|
|
272
|
+
# are handled by HeterogeneousGroupChat._register_and_start_agent
|
|
273
|
+
# if self.client_mode and hasattr(self.transport, 'connect'):
|
|
274
|
+
# print(f"[{self.name}] Client mode: connecting transport...")
|
|
275
|
+
# # Assuming connect handles polling start now
|
|
276
|
+
# await self.transport.connect(agent_name=self.name, token=self.transport.token)
|
|
277
|
+
# else:
|
|
278
|
+
# print(f"[{self.name}] Not in client mode or transport does not support connect. Assuming ready.")
|
|
279
|
+
|
|
280
|
+
# Start message and task processors as background tasks
|
|
281
|
+
try:
|
|
282
|
+
print(f"[{self.name}] Creating message and task processor tasks...")
|
|
283
|
+
self._message_processor = asyncio.create_task(self.process_messages())
|
|
284
|
+
self._task_processor = asyncio.create_task(self.process_tasks())
|
|
285
|
+
print(f"[{self.name}] Processor tasks created.")
|
|
286
|
+
|
|
287
|
+
# Wait for either task to complete (or be cancelled)
|
|
288
|
+
# This keeps the agent alive while processors are running
|
|
289
|
+
done, pending = await asyncio.wait(
|
|
290
|
+
[self._message_processor, self._task_processor],
|
|
291
|
+
return_when=asyncio.FIRST_COMPLETED,
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
print(f"[{self.name}] One of the processor tasks completed or was cancelled.")
|
|
295
|
+
# Handle completion or cancellation if needed
|
|
296
|
+
for task in done:
|
|
297
|
+
try:
|
|
298
|
+
# Check if task raised an exception
|
|
299
|
+
exc = task.exception()
|
|
300
|
+
if exc:
|
|
301
|
+
print(f"[{self.name}] Processor task ended with error: {exc}")
|
|
302
|
+
# Optionally re-raise or handle
|
|
303
|
+
except asyncio.CancelledError:
|
|
304
|
+
print(f"[{self.name}] Processor task was cancelled.")
|
|
305
|
+
|
|
306
|
+
# Cancel any pending tasks to ensure clean shutdown
|
|
307
|
+
for task in pending:
|
|
308
|
+
print(f"[{self.name}] Cancelling pending processor task...")
|
|
309
|
+
task.cancel()
|
|
310
|
+
try:
|
|
311
|
+
await task # Await cancellation
|
|
312
|
+
except asyncio.CancelledError:
|
|
313
|
+
pass # Expected
|
|
314
|
+
|
|
315
|
+
except Exception as e:
|
|
316
|
+
print(f"[ERROR] {self.name}: Unhandled exception in run loop: {e}")
|
|
317
|
+
traceback.print_exc()
|
|
318
|
+
finally:
|
|
319
|
+
print(f"[{self.name}] Agent run loop finished.")
|
|
320
|
+
# Ensure processors are stopped if they weren't already cancelled
|
|
321
|
+
if self._message_processor and not self._message_processor.done():
|
|
322
|
+
self._message_processor.cancel()
|
|
323
|
+
if self._task_processor and not self._task_processor.done():
|
|
324
|
+
self._task_processor.cancel()
|
|
325
|
+
# Note: Transport disconnect should be handled by HeterogeneousGroupChat.shutdown()
|
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
"""
|
|
2
|
+
LangGraph MCP Adapter - Adapt LangGraph agents to work with MCP.
|
|
3
|
+
|
|
4
|
+
This module provides an adapter that allows LangGraph agents to work within
|
|
5
|
+
the Model Context Protocol (MCP) framework, enabling them to collaborate
|
|
6
|
+
with agents from other frameworks like Autogen and CrewAI.
|
|
7
|
+
|
|
8
|
+
Supports both workflow-based and tool-based LangGraph agents:
|
|
9
|
+
1. Workflow-based: Uses StateGraph for defining agent behavior
|
|
10
|
+
2. Tool-based: Uses LangChain tools and agent executors
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import asyncio
|
|
14
|
+
import traceback
|
|
15
|
+
from typing import Dict, Any, Optional, Callable, List, Union
|
|
16
|
+
from langchain.tools import BaseTool
|
|
17
|
+
from langchain.agents import AgentExecutor
|
|
18
|
+
from langchain_openai import ChatOpenAI
|
|
19
|
+
from langchain.agents import create_openai_tools_agent
|
|
20
|
+
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
|
|
21
|
+
from langgraph.graph import StateGraph
|
|
22
|
+
from .mcp_agent import MCPAgent
|
|
23
|
+
from .mcp_transport import HTTPTransport
|
|
24
|
+
from fastapi import FastAPI, Request
|
|
25
|
+
import uvicorn
|
|
26
|
+
from threading import Thread
|
|
27
|
+
import time
|
|
28
|
+
|
|
29
|
+
class LangGraphMCPAdapter(MCPAgent):
|
|
30
|
+
"""
|
|
31
|
+
Adapter for LangGraph agents to work with MCP.
|
|
32
|
+
|
|
33
|
+
This adapter supports both:
|
|
34
|
+
1. Workflow-based agents using StateGraph
|
|
35
|
+
2. Tool-based agents using LangChain tools
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(
|
|
39
|
+
self,
|
|
40
|
+
name: str,
|
|
41
|
+
workflow: Optional[StateGraph] = None,
|
|
42
|
+
tools: Optional[List[BaseTool]] = None,
|
|
43
|
+
process_message: Optional[Callable] = None,
|
|
44
|
+
transport: Optional[HTTPTransport] = None,
|
|
45
|
+
client_mode: bool = True,
|
|
46
|
+
state_type: Optional[type] = None,
|
|
47
|
+
**kwargs
|
|
48
|
+
):
|
|
49
|
+
"""
|
|
50
|
+
Initialize the LangGraph MCP adapter.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
name: Name of the agent
|
|
54
|
+
workflow: Optional StateGraph workflow for workflow-based agents
|
|
55
|
+
tools: Optional list of tools for tool-based agents
|
|
56
|
+
process_message: Optional custom message processing function
|
|
57
|
+
transport: Optional transport layer
|
|
58
|
+
client_mode: Whether to run in client mode
|
|
59
|
+
**kwargs: Additional arguments to pass to MCPAgent
|
|
60
|
+
"""
|
|
61
|
+
# Initialize MCPAgent with transport
|
|
62
|
+
super().__init__(name=name, transport=transport, **kwargs)
|
|
63
|
+
|
|
64
|
+
if workflow and tools:
|
|
65
|
+
raise ValueError("Cannot specify both workflow and tools. Choose one pattern.")
|
|
66
|
+
|
|
67
|
+
if workflow:
|
|
68
|
+
# Workflow-based agent
|
|
69
|
+
self.workflow = workflow
|
|
70
|
+
self.state_type = state_type
|
|
71
|
+
self.executor = None
|
|
72
|
+
elif tools:
|
|
73
|
+
# Tool-based agent
|
|
74
|
+
llm = ChatOpenAI(temperature=0)
|
|
75
|
+
prompt = ChatPromptTemplate.from_messages([
|
|
76
|
+
("system", "You are a helpful AI assistant that can use tools to accomplish tasks."),
|
|
77
|
+
("human", "{input}"),
|
|
78
|
+
MessagesPlaceholder(variable_name="agent_scratchpad")
|
|
79
|
+
])
|
|
80
|
+
agent = create_openai_tools_agent(llm, tools, prompt)
|
|
81
|
+
self.executor = AgentExecutor.from_agent_and_tools(
|
|
82
|
+
agent=agent,
|
|
83
|
+
tools=tools,
|
|
84
|
+
handle_parsing_errors=True
|
|
85
|
+
)
|
|
86
|
+
self.workflow = None
|
|
87
|
+
self.state_type = None
|
|
88
|
+
else:
|
|
89
|
+
raise ValueError("Must specify either workflow or tools")
|
|
90
|
+
|
|
91
|
+
self.custom_process_message = process_message
|
|
92
|
+
self.transport = transport
|
|
93
|
+
self.client_mode = client_mode
|
|
94
|
+
self.task_queue = asyncio.Queue()
|
|
95
|
+
self.state: Dict[str, Any] = {}
|
|
96
|
+
self.server_ready = asyncio.Event()
|
|
97
|
+
|
|
98
|
+
# Create FastAPI app for server mode
|
|
99
|
+
self.app = FastAPI()
|
|
100
|
+
|
|
101
|
+
@self.app.post("/message")
|
|
102
|
+
async def handle_message(request: Request):
|
|
103
|
+
return await self._handle_message(request)
|
|
104
|
+
|
|
105
|
+
@self.app.on_event("startup")
|
|
106
|
+
async def startup_event():
|
|
107
|
+
self.server_ready.set()
|
|
108
|
+
|
|
109
|
+
self.server_thread = None
|
|
110
|
+
|
|
111
|
+
async def _handle_message(self, request: Request):
|
|
112
|
+
"""Handle incoming HTTP messages"""
|
|
113
|
+
try:
|
|
114
|
+
message = await request.json()
|
|
115
|
+
await self.task_queue.put(message)
|
|
116
|
+
return {"status": "ok"}
|
|
117
|
+
except Exception as e:
|
|
118
|
+
return {"status": "error", "message": str(e)}
|
|
119
|
+
|
|
120
|
+
async def process_messages(self):
|
|
121
|
+
"""Process incoming messages from the transport layer"""
|
|
122
|
+
while True:
|
|
123
|
+
try:
|
|
124
|
+
message, message_id = await self.transport.receive_message()
|
|
125
|
+
print(f"{self.name}: Received message {message_id}: {message}")
|
|
126
|
+
|
|
127
|
+
if message and isinstance(message, dict):
|
|
128
|
+
# Add message_id to message for tracking
|
|
129
|
+
message['message_id'] = message_id
|
|
130
|
+
|
|
131
|
+
# Standardize message structure
|
|
132
|
+
if 'content' not in message and message.get('type') == 'task':
|
|
133
|
+
message = {
|
|
134
|
+
'type': 'task',
|
|
135
|
+
'content': {
|
|
136
|
+
'task_id': message.get('task_id'),
|
|
137
|
+
'description': message.get('description'),
|
|
138
|
+
'type': 'task'
|
|
139
|
+
},
|
|
140
|
+
'message_id': message_id,
|
|
141
|
+
'from': message.get('from', 'unknown')
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
# --- Idempotency Check ---
|
|
145
|
+
if not super()._should_process_message(message):
|
|
146
|
+
if message_id and self.transport:
|
|
147
|
+
asyncio.create_task(self.transport.acknowledge_message(self.name, message_id))
|
|
148
|
+
print(f"[{self.name}] Acknowledged duplicate task {message.get('task_id')} (msg_id: {message_id})")
|
|
149
|
+
continue
|
|
150
|
+
|
|
151
|
+
if message.get('type') == 'task':
|
|
152
|
+
print(f"{self.name}: Queueing task with message_id {message_id}")
|
|
153
|
+
await self.task_queue.put(message)
|
|
154
|
+
elif self.custom_process_message:
|
|
155
|
+
await self.custom_process_message(self, message)
|
|
156
|
+
else:
|
|
157
|
+
print(f"{self.name}: Unknown message type: {message.get('type')}")
|
|
158
|
+
if message_id and self.transport:
|
|
159
|
+
await self.transport.acknowledge_message(self.name, message_id)
|
|
160
|
+
print(f"{self.name}: Acknowledged unknown message {message_id}")
|
|
161
|
+
except asyncio.CancelledError:
|
|
162
|
+
print(f"{self.name}: Message processor cancelled")
|
|
163
|
+
break
|
|
164
|
+
except Exception as e:
|
|
165
|
+
print(f"{self.name}: Error processing message: {e}")
|
|
166
|
+
traceback.print_exc()
|
|
167
|
+
await asyncio.sleep(1)
|
|
168
|
+
|
|
169
|
+
async def execute_task(self, task: Dict[str, Any]):
|
|
170
|
+
"""Execute a task using either workflow or executor"""
|
|
171
|
+
try:
|
|
172
|
+
if self.workflow:
|
|
173
|
+
# Always initialize state as a dictionary for LangGraph workflows here.
|
|
174
|
+
# LangGraph itself handles the state type defined in StateGraph().
|
|
175
|
+
state_dict = {"message": task, "result": None}
|
|
176
|
+
|
|
177
|
+
# Run workflow
|
|
178
|
+
try:
|
|
179
|
+
print(f"{self.name}: Running workflow with initial state_dict: {state_dict}")
|
|
180
|
+
# Compile the workflow if not already compiled
|
|
181
|
+
if not hasattr(self, '_compiled_workflow'):
|
|
182
|
+
self._compiled_workflow = self.workflow.compile()
|
|
183
|
+
# Pass the initial state dict directly
|
|
184
|
+
final_state = await self._compiled_workflow.ainvoke(state_dict)
|
|
185
|
+
# Use the workflow's final state
|
|
186
|
+
result = final_state
|
|
187
|
+
print(f"{self.name}: Workflow finished with final_state: {result}")
|
|
188
|
+
return {"result": result, "error": None}
|
|
189
|
+
except Exception as e:
|
|
190
|
+
print(f"Error running workflow: {str(e)}")
|
|
191
|
+
import traceback
|
|
192
|
+
traceback.print_exc()
|
|
193
|
+
return {"result": None, "error": str(e)}
|
|
194
|
+
elif self.executor:
|
|
195
|
+
# Run with executor
|
|
196
|
+
result = await self.executor.arun(task)
|
|
197
|
+
return {"result": result, "error": None}
|
|
198
|
+
else:
|
|
199
|
+
return {"result": None, "error": "No workflow or executor configured"}
|
|
200
|
+
except Exception as e:
|
|
201
|
+
return {
|
|
202
|
+
"result": f"[FROM EXECUTE_TASK] Error executing task: {str(e)}",
|
|
203
|
+
"error": True
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
async def process_tasks(self):
|
|
207
|
+
"""Process tasks from the queue"""
|
|
208
|
+
while True:
|
|
209
|
+
try:
|
|
210
|
+
task = await self.task_queue.get()
|
|
211
|
+
|
|
212
|
+
# Extract task details
|
|
213
|
+
task_content = task.get('content', task.get('task', {}))
|
|
214
|
+
task_id = task.get('task_id') or task_content.get('task_id')
|
|
215
|
+
task_description = task.get('description') or task_content.get('description')
|
|
216
|
+
message_id = task.get('message_id')
|
|
217
|
+
reply_to = task.get('reply_to')
|
|
218
|
+
|
|
219
|
+
if not task_id or not task_description:
|
|
220
|
+
print(f"[ERROR] {self.name}: Task missing required fields: {task}")
|
|
221
|
+
self.task_queue.task_done()
|
|
222
|
+
continue
|
|
223
|
+
|
|
224
|
+
print(f"\n{self.name}: Processing task {task_id} with message_id {message_id}")
|
|
225
|
+
|
|
226
|
+
try:
|
|
227
|
+
# Execute the task
|
|
228
|
+
result = await self.execute_task(task_content)
|
|
229
|
+
|
|
230
|
+
# Mark task completed
|
|
231
|
+
super()._mark_task_completed(task_id)
|
|
232
|
+
|
|
233
|
+
# Send result back if reply_to is specified
|
|
234
|
+
if reply_to:
|
|
235
|
+
print(f"{self.name}: Sending result back to {reply_to}")
|
|
236
|
+
await self.transport.send_message(
|
|
237
|
+
reply_to,
|
|
238
|
+
{
|
|
239
|
+
"type": "task_result",
|
|
240
|
+
"task_id": task_id,
|
|
241
|
+
"result": result['result'],
|
|
242
|
+
"sender": self.name,
|
|
243
|
+
"original_message_id": message_id,
|
|
244
|
+
"error": result['error']
|
|
245
|
+
}
|
|
246
|
+
)
|
|
247
|
+
print(f"{self.name}: Result sent successfully")
|
|
248
|
+
|
|
249
|
+
# Acknowledge task completion
|
|
250
|
+
if message_id:
|
|
251
|
+
await self.transport.acknowledge_message(self.name, message_id)
|
|
252
|
+
print(f"{self.name}: Task {task_id} acknowledged with message_id {message_id}")
|
|
253
|
+
else:
|
|
254
|
+
print(f"{self.name}: No message_id for task {task_id}, cannot acknowledge")
|
|
255
|
+
except Exception as e:
|
|
256
|
+
print(f"{self.name}: Error processing task: {e}")
|
|
257
|
+
traceback.print_exc()
|
|
258
|
+
|
|
259
|
+
if reply_to:
|
|
260
|
+
await self.transport.send_message(
|
|
261
|
+
reply_to,
|
|
262
|
+
{
|
|
263
|
+
"type": "task_result",
|
|
264
|
+
"task_id": task_id,
|
|
265
|
+
"result": f"Error: {str(e)}",
|
|
266
|
+
"sender": self.name,
|
|
267
|
+
"original_message_id": message_id,
|
|
268
|
+
"error": True
|
|
269
|
+
}
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
self.task_queue.task_done()
|
|
273
|
+
|
|
274
|
+
except Exception as e:
|
|
275
|
+
print(f"{self.name}: Error processing task: {e}")
|
|
276
|
+
traceback.print_exc()
|
|
277
|
+
await asyncio.sleep(1)
|
|
278
|
+
|
|
279
|
+
def run(self):
|
|
280
|
+
"""Start the message and task processors"""
|
|
281
|
+
if not self.transport:
|
|
282
|
+
raise ValueError(f"{self.name}: No transport configured")
|
|
283
|
+
|
|
284
|
+
# Start the transport server if not in client mode
|
|
285
|
+
if not self.client_mode:
|
|
286
|
+
def run_server():
|
|
287
|
+
config = uvicorn.Config(
|
|
288
|
+
app=self.app,
|
|
289
|
+
host=self.transport.host,
|
|
290
|
+
port=self.transport.port,
|
|
291
|
+
log_level="info"
|
|
292
|
+
)
|
|
293
|
+
server = uvicorn.Server(config)
|
|
294
|
+
server.run()
|
|
295
|
+
|
|
296
|
+
self.server_thread = Thread(target=run_server, daemon=True)
|
|
297
|
+
self.server_thread.start()
|
|
298
|
+
else:
|
|
299
|
+
# In client mode, we're ready immediately
|
|
300
|
+
self.server_ready.set()
|
|
301
|
+
|
|
302
|
+
print(f"{self.name}: Starting message processor...")
|
|
303
|
+
asyncio.create_task(self.process_messages())
|
|
304
|
+
|
|
305
|
+
print(f"{self.name}: Starting task processor...")
|
|
306
|
+
asyncio.create_task(self.process_tasks())
|
|
307
|
+
|
|
308
|
+
async def connect_to_server(self, server_url: str):
|
|
309
|
+
"""Connect to a coordinator server"""
|
|
310
|
+
if not self.client_mode:
|
|
311
|
+
raise ValueError("Agent not configured for client mode")
|
|
312
|
+
|
|
313
|
+
# Wait for server to be ready before connecting
|
|
314
|
+
if not self.server_ready.is_set():
|
|
315
|
+
await asyncio.wait_for(self.server_ready.wait(), timeout=10)
|
|
316
|
+
|
|
317
|
+
# Register with the coordinator
|
|
318
|
+
await self.transport.send_message(
|
|
319
|
+
server_url,
|
|
320
|
+
{
|
|
321
|
+
"type": "register",
|
|
322
|
+
"agent_name": self.name,
|
|
323
|
+
"agent_url": self.transport.get_url()
|
|
324
|
+
}
|
|
325
|
+
)
|