agent-mcp 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,410 @@
1
+ """
2
+ LlamaIndex MCP Adapter
3
+ Integration between LlamaIndex agents and Model Context Protocol
4
+
5
+ This adapter allows LlamaIndex agents to:
6
+ 1. Expose their tools as MCP tools
7
+ 2. Consume MCP tools from other agents
8
+ 3. Participate in multi-agent networks
9
+ """
10
+
11
+ import asyncio
12
+ import json
13
+ import uuid
14
+ from typing import Dict, Any, List, Optional, Callable
15
+ from dataclasses import dataclass, asdict
16
+ import logging
17
+
18
+ try:
19
+ from llama_index.core.agent import Agent
20
+ from llama_index.core.tools import BaseTool
21
+ from llama_index.core.llms import LLM
22
+ from llama_index.core.query_engine import QueryEngine
23
+ from llama_index.core.indices import VectorStoreIndex
24
+ from llama_index.core.readers import base as readers
25
+ from llama_index.tools.mcp import McpToolSpec
26
+ LLAMA_INDEX_AVAILABLE = True
27
+ except ImportError:
28
+ LLAMA_INDEX_AVAILABLE = False
29
+ Agent = None
30
+ BaseTool = None
31
+ LLM = None
32
+
33
+ from .mcp_transport import HTTPTransport
34
+
35
+ logger = logging.getLogger(__name__)
36
+
37
+ @dataclass
38
+ class LlamaIndexMCPConfig:
39
+ """Configuration for LlamaIndex MCP integration"""
40
+ agent_id: str
41
+ name: str
42
+ description: str
43
+ server_url: str = "https://mcp-server-ixlfhxquwq-ew.a.run.app"
44
+ auto_register_tools: bool = True
45
+ expose_index_as_tool: bool = True
46
+ index_query_mode: str = "default" # "default", "tree", "sub_question"
47
+
48
+ class MCPLlamaIndexAgent:
49
+ """LlamaIndex Agent with MCP capabilities"""
50
+
51
+ def __init__(
52
+ self,
53
+ agent: Agent,
54
+ config: LlamaIndexMCPConfig,
55
+ transport: HTTPTransport = None
56
+ ):
57
+ if not LLAMA_INDEX_AVAILABLE:
58
+ raise ImportError("LlamaIndex is not installed. Install with: pip install llama-index")
59
+
60
+ self.llama_agent = agent
61
+ self.config = config
62
+ self.mcp_tools = {}
63
+ self.transport = transport or HTTPTransport.from_url(config.server_url)
64
+ self.mcp_id = config.agent_id
65
+ self.mcp_version = "0.1.0"
66
+
67
+ # Extract tools from LlamaIndex agent
68
+ self._extract_tools_from_agent()
69
+
70
+ def _extract_tools_from_agent(self):
71
+ """Extract tools from LlamaIndex agent and register as MCP tools"""
72
+ try:
73
+ # Get tools from the agent if available
74
+ if hasattr(self.llama_agent, 'tools') and self.llama_agent.tools:
75
+ for tool in self.llama_agent.tools:
76
+ self._register_llama_tool_as_mcp(tool)
77
+
78
+ # Get tools from agent's query engine
79
+ if hasattr(self.llama_agent, 'query_engine') and self.llama_agent.query_engine:
80
+ self._register_query_engine_as_tool()
81
+
82
+ except Exception as e:
83
+ logger.error(f"Error extracting tools from LlamaIndex agent: {e}")
84
+
85
+ def _register_llama_tool_as_mcp(self, tool: BaseTool):
86
+ """Register a LlamaIndex tool as MCP tool"""
87
+ tool_name = tool.metadata.name
88
+ tool_description = tool.metadata.description
89
+
90
+ async def mcp_tool_wrapper(**kwargs):
91
+ """Wrapper to call LlamaIndex tool through MCP"""
92
+ try:
93
+ # Convert args to the format LlamaIndex expects
94
+ result = await tool.acall(**kwargs)
95
+ return {
96
+ "status": "success",
97
+ "result": result,
98
+ "tool_name": tool_name
99
+ }
100
+ except Exception as e:
101
+ logger.error(f"Error calling LlamaIndex tool {tool_name}: {e}")
102
+ return {
103
+ "status": "error",
104
+ "message": str(e),
105
+ "tool_name": tool_name
106
+ }
107
+
108
+ self.mcp_tools[tool_name] = {
109
+ "name": tool_name,
110
+ "description": tool_description,
111
+ "parameters": self._extract_tool_parameters(tool),
112
+ "function": mcp_tool_wrapper
113
+ }
114
+
115
+ def _register_query_engine_as_tool(self):
116
+ """Register the agent's query engine as an MCP tool"""
117
+ async def query_tool_wrapper(query: str, **kwargs) -> Dict[str, Any]:
118
+ """Wrapper to query the agent's knowledge base"""
119
+ try:
120
+ response = await self.llama_agent.query_engine.aquery(query)
121
+ return {
122
+ "status": "success",
123
+ "result": str(response),
124
+ "query": query,
125
+ "source_nodes": [
126
+ {"text": node.text, "score": node.score}
127
+ for node in getattr(response, 'source_nodes', [])
128
+ ]
129
+ }
130
+ except Exception as e:
131
+ logger.error(f"Error querying LlamaIndex agent: {e}")
132
+ return {
133
+ "status": "error",
134
+ "message": str(e),
135
+ "query": query
136
+ }
137
+
138
+ self.mcp_tools["query_knowledge_base"] = {
139
+ "name": "query_knowledge_base",
140
+ "description": f"Query the knowledge base of {self.config.name}",
141
+ "parameters": [
142
+ {
143
+ "name": "query",
144
+ "description": "The query to search for",
145
+ "type": "string",
146
+ "required": True
147
+ }
148
+ ],
149
+ "function": query_tool_wrapper
150
+ }
151
+
152
+ def _extract_tool_parameters(self, tool: BaseTool) -> List[Dict[str, Any]]:
153
+ """Extract parameter information from LlamaIndex tool"""
154
+ parameters = []
155
+
156
+ if hasattr(tool.metadata, 'fn_schema') and tool.metadata.fn_schema:
157
+ # Get parameters from function schema
158
+ schema = tool.metadata.fn_schema
159
+ if hasattr(schema, 'model_fields'):
160
+ for field_name, field_info in schema.model_fields.items():
161
+ param = {
162
+ "name": field_name,
163
+ "description": field_info.description or f"Parameter {field_name}",
164
+ "type": "string", # Default to string for compatibility
165
+ "required": field_info.default is None
166
+ }
167
+
168
+ # Try to determine type
169
+ if hasattr(field_info, 'annotation'):
170
+ type_str = str(field_info.annotation)
171
+ if "int" in type_str.lower():
172
+ param["type"] = "number"
173
+ elif "bool" in type_str.lower():
174
+ param["type"] = "boolean"
175
+ elif "list" in type_str.lower():
176
+ param["type"] = "array"
177
+
178
+ parameters.append(param)
179
+
180
+ return parameters
181
+
182
+ async def register_with_mcp_server(self) -> Dict[str, Any]:
183
+ """Register this LlamaIndex agent with MCP server"""
184
+ registration_data = {
185
+ "agent_id": self.mcp_id,
186
+ "info": {
187
+ "name": self.config.name,
188
+ "description": self.config.description,
189
+ "framework": "LlamaIndex",
190
+ "capabilities": list(self.mcp_tools.keys()),
191
+ "version": self.mcp_version,
192
+ "tools": [
193
+ {
194
+ "name": tool_info["name"],
195
+ "description": tool_info["description"],
196
+ "parameters": tool_info["parameters"]
197
+ }
198
+ for tool_info in self.mcp_tools.values()
199
+ ]
200
+ }
201
+ }
202
+
203
+ return await self.transport.register_agent(self)
204
+
205
+ async def execute_mcp_tool(self, tool_name: str, **kwargs) -> Dict[str, Any]:
206
+ """Execute an MCP tool (could be local or remote)"""
207
+ if tool_name in self.mcp_tools:
208
+ # Execute local tool
209
+ tool_func = self.mcp_tools[tool_name]["function"]
210
+ return await tool_func(**kwargs)
211
+ else:
212
+ # Try to execute remote tool via MCP transport
213
+ try:
214
+ message = {
215
+ "type": "tool_call",
216
+ "tool_name": tool_name,
217
+ "arguments": kwargs,
218
+ "sender": self.mcp_id
219
+ }
220
+
221
+ response = await self.transport.send_message("network", message)
222
+ return response
223
+ except Exception as e:
224
+ logger.error(f"Error executing remote MCP tool {tool_name}: {e}")
225
+ return {
226
+ "status": "error",
227
+ "message": str(e),
228
+ "tool_name": tool_name
229
+ }
230
+
231
+ async def query_with_context(self, query: str, context: Dict[str, Any] = None) -> Dict[str, Any]:
232
+ """Query using LlamaIndex with additional MCP context"""
233
+ try:
234
+ # Add MCP context to the query if provided
235
+ if context:
236
+ # Create a contextual query string
237
+ context_str = json.dumps(context, indent=2)
238
+ enhanced_query = f"""
239
+ Context: {context_str}
240
+
241
+ Question: {query}
242
+ """
243
+ else:
244
+ enhanced_query = query
245
+
246
+ # Query using LlamaIndex
247
+ response = await self.llama_agent.aquery(enhanced_query)
248
+
249
+ return {
250
+ "status": "success",
251
+ "result": str(response),
252
+ "query": query,
253
+ "context_used": context is not None,
254
+ "source_nodes": [
255
+ {"text": node.text, "score": node.score}
256
+ for node in getattr(response, 'source_nodes', [])
257
+ ]
258
+ }
259
+
260
+ except Exception as e:
261
+ logger.error(f"Error in query_with_context: {e}")
262
+ return {
263
+ "status": "error",
264
+ "message": str(e),
265
+ "query": query
266
+ }
267
+
268
+ def get_mcp_tool_info(self) -> Dict[str, Any]:
269
+ """Get information about all available MCP tools"""
270
+ return {
271
+ "agent_id": self.mcp_id,
272
+ "framework": "LlamaIndex",
273
+ "tools": [
274
+ {
275
+ "name": tool_info["name"],
276
+ "description": tool_info["description"],
277
+ "parameters": tool_info["parameters"]
278
+ }
279
+ for tool_info in self.mcp_tools.values()
280
+ ]
281
+ }
282
+
283
+ class LlamaIndexMCPBridge:
284
+ """Bridge to connect LlamaIndex agents with MCP network"""
285
+
286
+ def __init__(self, server_url: str = "https://mcp-server-ixlfhxquwq-ew.a.run.app"):
287
+ self.server_url = server_url
288
+ self.connected_agents = {}
289
+
290
+ async def create_llama_mcp_agent(
291
+ self,
292
+ agent: Agent,
293
+ agent_id: str,
294
+ name: str = None,
295
+ description: str = None
296
+ ) -> MCPLlamaIndexAgent:
297
+ """Create and register a LlamaIndex MCP agent"""
298
+
299
+ config = LlamaIndexMCPConfig(
300
+ agent_id=agent_id,
301
+ name=name or agent_id,
302
+ description=description or f"LlamaIndex agent {agent_id}",
303
+ server_url=self.server_url
304
+ )
305
+
306
+ mcp_agent = MCPLlamaIndexAgent(agent, config)
307
+
308
+ # Register with MCP server
309
+ registration_result = await mcp_agent.register_with_mcp_server()
310
+
311
+ if registration_result.get("status") == "registered":
312
+ logger.info(f"LlamaIndex agent {agent_id} registered with MCP server")
313
+ self.connected_agents[agent_id] = mcp_agent
314
+ else:
315
+ logger.error(f"Failed to register LlamaIndex agent {agent_id}: {registration_result}")
316
+
317
+ return mcp_agent
318
+
319
+ async def connect_mcp_tools_to_llama(
320
+ self,
321
+ agent: Agent,
322
+ mcp_server_url: str = None
323
+ ) -> List[BaseTool]:
324
+ """Connect MCP tools to a LlamaIndex agent"""
325
+
326
+ if not LLAMA_INDEX_AVAILABLE:
327
+ raise ImportError("LlamaIndex is not installed")
328
+
329
+ mcp_tools = []
330
+
331
+ # Use McpToolSpec if available
332
+ try:
333
+ if McpToolSpec:
334
+ mcp_spec = McpToolSpec(
335
+ mcp_server_url or self.server_url
336
+ )
337
+ mcp_tools = await mcp_spec.as_tool_list()
338
+ logger.info(f"Loaded {len(mcp_tools)} MCP tools for LlamaIndex agent")
339
+ except Exception as e:
340
+ logger.warning(f"Could not use McpToolSpec: {e}")
341
+
342
+ # Fallback: manually discover and create tools
343
+ mcp_tools = await self._create_manual_mcp_tools(mcp_server_url or self.server_url)
344
+
345
+ # Add tools to LlamaIndex agent
346
+ if hasattr(agent, 'update_tool_mapping'):
347
+ agent.update_tool_mapping(mcp_tools)
348
+ elif hasattr(agent, 'tools'):
349
+ agent.tools.extend(mcp_tools)
350
+
351
+ return mcp_tools
352
+
353
+ async def _create_manual_mcp_tools(self, server_url: str) -> List[BaseTool]:
354
+ """Manually create MCP tools for LlamaIndex"""
355
+ tools = []
356
+
357
+ try:
358
+ import aiohttp
359
+ async with aiohttp.ClientSession() as session:
360
+ async with session.get(f"{server_url}/tools") as response:
361
+ if response.status == 200:
362
+ tools_data = await response.json()
363
+
364
+ for tool_data in tools_data.get("tools", []):
365
+ # Create custom LlamaIndex tool
366
+ tool = self._create_llama_tool_from_mcp_data(tool_data, server_url)
367
+ tools.append(tool)
368
+
369
+ except Exception as e:
370
+ logger.error(f"Error creating manual MCP tools: {e}")
371
+
372
+ return tools
373
+
374
+ def _create_llama_tool_from_mcp_data(self, tool_data: Dict[str, Any], server_url: str) -> BaseTool:
375
+ """Create a LlamaIndex tool from MCP tool data"""
376
+
377
+ class CustomMCPTool(BaseTool):
378
+ def __init__(self, tool_data: Dict[str, Any], server_url: str):
379
+ self._tool_data = tool_data
380
+ self._server_url = server_url
381
+ super().__init__(
382
+ name=tool_data["name"],
383
+ description=tool_data["description"]
384
+ )
385
+
386
+ async def acall(self, **kwargs):
387
+ """Call the MCP tool"""
388
+ import aiohttp
389
+
390
+ async with aiohttp.ClientSession() as session:
391
+ payload = {
392
+ "tool_name": self._tool_data["name"],
393
+ "arguments": kwargs
394
+ }
395
+
396
+ async with session.post(
397
+ f"{self._server_url}/execute_tool",
398
+ json=payload
399
+ ) as response:
400
+ result = await response.json()
401
+ return result.get("result", str(result))
402
+
403
+ return CustomMCPTool(tool_data, server_url)
404
+
405
+ # Export classes for easy importing
406
+ __all__ = [
407
+ 'LlamaIndexMCPConfig',
408
+ 'MCPLlamaIndexAgent',
409
+ 'LlamaIndexMCPBridge'
410
+ ]