amd-gaia 0.15.0__py3-none-any.whl → 0.15.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (185) hide show
  1. {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/METADATA +222 -223
  2. amd_gaia-0.15.2.dist-info/RECORD +182 -0
  3. {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/WHEEL +1 -1
  4. {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/entry_points.txt +1 -0
  5. {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/licenses/LICENSE.md +20 -20
  6. gaia/__init__.py +29 -29
  7. gaia/agents/__init__.py +19 -19
  8. gaia/agents/base/__init__.py +9 -9
  9. gaia/agents/base/agent.py +2132 -2177
  10. gaia/agents/base/api_agent.py +119 -120
  11. gaia/agents/base/console.py +1967 -1841
  12. gaia/agents/base/errors.py +237 -237
  13. gaia/agents/base/mcp_agent.py +86 -86
  14. gaia/agents/base/tools.py +88 -83
  15. gaia/agents/blender/__init__.py +7 -0
  16. gaia/agents/blender/agent.py +553 -556
  17. gaia/agents/blender/agent_simple.py +133 -135
  18. gaia/agents/blender/app.py +211 -211
  19. gaia/agents/blender/app_simple.py +41 -41
  20. gaia/agents/blender/core/__init__.py +16 -16
  21. gaia/agents/blender/core/materials.py +506 -506
  22. gaia/agents/blender/core/objects.py +316 -316
  23. gaia/agents/blender/core/rendering.py +225 -225
  24. gaia/agents/blender/core/scene.py +220 -220
  25. gaia/agents/blender/core/view.py +146 -146
  26. gaia/agents/chat/__init__.py +9 -9
  27. gaia/agents/chat/agent.py +809 -835
  28. gaia/agents/chat/app.py +1065 -1058
  29. gaia/agents/chat/session.py +508 -508
  30. gaia/agents/chat/tools/__init__.py +15 -15
  31. gaia/agents/chat/tools/file_tools.py +96 -96
  32. gaia/agents/chat/tools/rag_tools.py +1744 -1729
  33. gaia/agents/chat/tools/shell_tools.py +437 -436
  34. gaia/agents/code/__init__.py +7 -7
  35. gaia/agents/code/agent.py +549 -549
  36. gaia/agents/code/cli.py +377 -0
  37. gaia/agents/code/models.py +135 -135
  38. gaia/agents/code/orchestration/__init__.py +24 -24
  39. gaia/agents/code/orchestration/checklist_executor.py +1763 -1763
  40. gaia/agents/code/orchestration/checklist_generator.py +713 -713
  41. gaia/agents/code/orchestration/factories/__init__.py +9 -9
  42. gaia/agents/code/orchestration/factories/base.py +63 -63
  43. gaia/agents/code/orchestration/factories/nextjs_factory.py +118 -118
  44. gaia/agents/code/orchestration/factories/python_factory.py +106 -106
  45. gaia/agents/code/orchestration/orchestrator.py +841 -841
  46. gaia/agents/code/orchestration/project_analyzer.py +391 -391
  47. gaia/agents/code/orchestration/steps/__init__.py +67 -67
  48. gaia/agents/code/orchestration/steps/base.py +188 -188
  49. gaia/agents/code/orchestration/steps/error_handler.py +314 -314
  50. gaia/agents/code/orchestration/steps/nextjs.py +828 -828
  51. gaia/agents/code/orchestration/steps/python.py +307 -307
  52. gaia/agents/code/orchestration/template_catalog.py +469 -469
  53. gaia/agents/code/orchestration/workflows/__init__.py +14 -14
  54. gaia/agents/code/orchestration/workflows/base.py +80 -80
  55. gaia/agents/code/orchestration/workflows/nextjs.py +186 -186
  56. gaia/agents/code/orchestration/workflows/python.py +94 -94
  57. gaia/agents/code/prompts/__init__.py +11 -11
  58. gaia/agents/code/prompts/base_prompt.py +77 -77
  59. gaia/agents/code/prompts/code_patterns.py +2034 -2036
  60. gaia/agents/code/prompts/nextjs_prompt.py +40 -40
  61. gaia/agents/code/prompts/python_prompt.py +109 -109
  62. gaia/agents/code/schema_inference.py +365 -365
  63. gaia/agents/code/system_prompt.py +41 -41
  64. gaia/agents/code/tools/__init__.py +42 -42
  65. gaia/agents/code/tools/cli_tools.py +1138 -1138
  66. gaia/agents/code/tools/code_formatting.py +319 -319
  67. gaia/agents/code/tools/code_tools.py +769 -769
  68. gaia/agents/code/tools/error_fixing.py +1347 -1347
  69. gaia/agents/code/tools/external_tools.py +180 -180
  70. gaia/agents/code/tools/file_io.py +845 -845
  71. gaia/agents/code/tools/prisma_tools.py +190 -190
  72. gaia/agents/code/tools/project_management.py +1016 -1016
  73. gaia/agents/code/tools/testing.py +321 -321
  74. gaia/agents/code/tools/typescript_tools.py +122 -122
  75. gaia/agents/code/tools/validation_parsing.py +461 -461
  76. gaia/agents/code/tools/validation_tools.py +806 -806
  77. gaia/agents/code/tools/web_dev_tools.py +1758 -1758
  78. gaia/agents/code/validators/__init__.py +16 -16
  79. gaia/agents/code/validators/antipattern_checker.py +241 -241
  80. gaia/agents/code/validators/ast_analyzer.py +197 -197
  81. gaia/agents/code/validators/requirements_validator.py +145 -145
  82. gaia/agents/code/validators/syntax_validator.py +171 -171
  83. gaia/agents/docker/__init__.py +7 -7
  84. gaia/agents/docker/agent.py +643 -642
  85. gaia/agents/emr/__init__.py +8 -8
  86. gaia/agents/emr/agent.py +1504 -1506
  87. gaia/agents/emr/cli.py +1322 -1322
  88. gaia/agents/emr/constants.py +475 -475
  89. gaia/agents/emr/dashboard/__init__.py +4 -4
  90. gaia/agents/emr/dashboard/server.py +1972 -1974
  91. gaia/agents/jira/__init__.py +11 -11
  92. gaia/agents/jira/agent.py +894 -894
  93. gaia/agents/jira/jql_templates.py +299 -299
  94. gaia/agents/routing/__init__.py +7 -7
  95. gaia/agents/routing/agent.py +567 -570
  96. gaia/agents/routing/system_prompt.py +75 -75
  97. gaia/agents/summarize/__init__.py +11 -0
  98. gaia/agents/summarize/agent.py +885 -0
  99. gaia/agents/summarize/prompts.py +129 -0
  100. gaia/api/__init__.py +23 -23
  101. gaia/api/agent_registry.py +238 -238
  102. gaia/api/app.py +305 -305
  103. gaia/api/openai_server.py +575 -575
  104. gaia/api/schemas.py +186 -186
  105. gaia/api/sse_handler.py +373 -373
  106. gaia/apps/__init__.py +4 -4
  107. gaia/apps/llm/__init__.py +6 -6
  108. gaia/apps/llm/app.py +184 -169
  109. gaia/apps/summarize/app.py +116 -633
  110. gaia/apps/summarize/html_viewer.py +133 -133
  111. gaia/apps/summarize/pdf_formatter.py +284 -284
  112. gaia/audio/__init__.py +2 -2
  113. gaia/audio/audio_client.py +439 -439
  114. gaia/audio/audio_recorder.py +269 -269
  115. gaia/audio/kokoro_tts.py +599 -599
  116. gaia/audio/whisper_asr.py +432 -432
  117. gaia/chat/__init__.py +16 -16
  118. gaia/chat/app.py +428 -430
  119. gaia/chat/prompts.py +522 -522
  120. gaia/chat/sdk.py +1228 -1225
  121. gaia/cli.py +5659 -5632
  122. gaia/database/__init__.py +10 -10
  123. gaia/database/agent.py +176 -176
  124. gaia/database/mixin.py +290 -290
  125. gaia/database/testing.py +64 -64
  126. gaia/eval/batch_experiment.py +2332 -2332
  127. gaia/eval/claude.py +542 -542
  128. gaia/eval/config.py +37 -37
  129. gaia/eval/email_generator.py +512 -512
  130. gaia/eval/eval.py +3179 -3179
  131. gaia/eval/groundtruth.py +1130 -1130
  132. gaia/eval/transcript_generator.py +582 -582
  133. gaia/eval/webapp/README.md +167 -167
  134. gaia/eval/webapp/package-lock.json +875 -875
  135. gaia/eval/webapp/package.json +20 -20
  136. gaia/eval/webapp/public/app.js +3402 -3402
  137. gaia/eval/webapp/public/index.html +87 -87
  138. gaia/eval/webapp/public/styles.css +3661 -3661
  139. gaia/eval/webapp/server.js +415 -415
  140. gaia/eval/webapp/test-setup.js +72 -72
  141. gaia/installer/__init__.py +23 -0
  142. gaia/installer/init_command.py +1275 -0
  143. gaia/installer/lemonade_installer.py +619 -0
  144. gaia/llm/__init__.py +10 -2
  145. gaia/llm/base_client.py +60 -0
  146. gaia/llm/exceptions.py +12 -0
  147. gaia/llm/factory.py +70 -0
  148. gaia/llm/lemonade_client.py +3421 -3221
  149. gaia/llm/lemonade_manager.py +294 -294
  150. gaia/llm/providers/__init__.py +9 -0
  151. gaia/llm/providers/claude.py +108 -0
  152. gaia/llm/providers/lemonade.py +118 -0
  153. gaia/llm/providers/openai_provider.py +79 -0
  154. gaia/llm/vlm_client.py +382 -382
  155. gaia/logger.py +189 -189
  156. gaia/mcp/agent_mcp_server.py +245 -245
  157. gaia/mcp/blender_mcp_client.py +138 -138
  158. gaia/mcp/blender_mcp_server.py +648 -648
  159. gaia/mcp/context7_cache.py +332 -332
  160. gaia/mcp/external_services.py +518 -518
  161. gaia/mcp/mcp_bridge.py +811 -550
  162. gaia/mcp/servers/__init__.py +6 -6
  163. gaia/mcp/servers/docker_mcp.py +83 -83
  164. gaia/perf_analysis.py +361 -0
  165. gaia/rag/__init__.py +10 -10
  166. gaia/rag/app.py +293 -293
  167. gaia/rag/demo.py +304 -304
  168. gaia/rag/pdf_utils.py +235 -235
  169. gaia/rag/sdk.py +2194 -2194
  170. gaia/security.py +183 -163
  171. gaia/talk/app.py +287 -289
  172. gaia/talk/sdk.py +538 -538
  173. gaia/testing/__init__.py +87 -87
  174. gaia/testing/assertions.py +330 -330
  175. gaia/testing/fixtures.py +333 -333
  176. gaia/testing/mocks.py +493 -493
  177. gaia/util.py +46 -46
  178. gaia/utils/__init__.py +33 -33
  179. gaia/utils/file_watcher.py +675 -675
  180. gaia/utils/parsing.py +223 -223
  181. gaia/version.py +100 -100
  182. amd_gaia-0.15.0.dist-info/RECORD +0 -168
  183. gaia/agents/code/app.py +0 -266
  184. gaia/llm/llm_client.py +0 -723
  185. {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/top_level.txt +0 -0
gaia/mcp/mcp_bridge.py CHANGED
@@ -1,550 +1,811 @@
1
- #!/usr/bin/env python
2
- #
3
- # Copyright(C) 2024-2025 Advanced Micro Devices, Inc. All rights reserved.
4
- # SPDX-License-Identifier: MIT
5
-
6
- """
7
- GAIA MCP Bridge - HTTP Native Implementation
8
- No WebSockets, just clean HTTP + JSON-RPC for maximum compatibility
9
- """
10
-
11
- import json
12
- import os
13
- import sys
14
- from http.server import BaseHTTPRequestHandler, HTTPServer
15
- from typing import Any, Dict
16
- from urllib.parse import urlparse
17
-
18
- # Add GAIA to path
19
- sys.path.insert(
20
- 0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
21
- )
22
-
23
- from gaia.agents.blender.agent import BlenderAgent
24
- from gaia.llm.llm_client import LLMClient
25
- from gaia.logger import get_logger
26
-
27
- logger = get_logger(__name__)
28
-
29
- # Global verbose flag for request logging
30
- VERBOSE = False
31
-
32
-
33
- class GAIAMCPBridge:
34
- """HTTP-native MCP Bridge for GAIA - no WebSockets needed!"""
35
-
36
- def __init__(
37
- self,
38
- host: str = "localhost",
39
- port: int = 8765,
40
- base_url: str = None,
41
- verbose: bool = False,
42
- ):
43
- self.host = host
44
- self.port = port
45
- self.base_url = base_url or "http://localhost:8000/api/v1"
46
- self.agents = {}
47
- self.tools = {}
48
- self.llm_client = None
49
- self.verbose = verbose
50
- global VERBOSE
51
- VERBOSE = verbose
52
-
53
- # Initialize on creation
54
- self._initialize_agents()
55
- self._register_tools()
56
-
57
- def _initialize_agents(self):
58
- """Initialize all GAIA agents."""
59
- try:
60
- # LLM agent
61
- self.agents["llm"] = {
62
- "module": "gaia.apps.llm.app",
63
- "function": "main",
64
- "description": "Direct LLM interaction",
65
- "capabilities": ["query", "stream", "model_selection"],
66
- }
67
-
68
- # Chat agent
69
- self.agents["chat"] = {
70
- "module": "gaia.chat.app",
71
- "function": "main",
72
- "description": "Interactive chat",
73
- "capabilities": ["conversation", "history", "context_management"],
74
- }
75
-
76
- # Blender agent
77
- try:
78
- self.agents["blender"] = {
79
- "class": BlenderAgent,
80
- "description": "3D content creation",
81
- "capabilities": ["3d_modeling", "scene_manipulation", "rendering"],
82
- }
83
- except ImportError:
84
- logger.warning("Blender agent not available")
85
-
86
- # Jira agent - THE KEY ADDITION
87
- try:
88
- from gaia.agents.jira.agent import JiraAgent
89
-
90
- self.agents["jira"] = {
91
- "class": JiraAgent,
92
- "description": "Natural language Jira orchestration",
93
- "capabilities": ["search", "create", "update", "bulk_operations"],
94
- "init_params": {
95
- "model_id": "Qwen3-Coder-30B-A3B-Instruct-GGUF",
96
- "silent_mode": True,
97
- "debug": False,
98
- },
99
- }
100
- logger.info("✅ Jira agent registered")
101
- except ImportError as e:
102
- logger.warning(f"Jira agent not available: {e}")
103
-
104
- logger.info(f"Initialized {len(self.agents)} agents")
105
-
106
- except Exception as e:
107
- logger.error(f"Agent initialization error: {e}")
108
-
109
- def _register_tools(self):
110
- """Register available tools."""
111
- # Load from mcp.json if available
112
- try:
113
- mcp_config_path = os.path.join(os.path.dirname(__file__), "mcp.json")
114
- if os.path.exists(mcp_config_path):
115
- with open(mcp_config_path, "r") as f:
116
- config = json.load(f)
117
- tools_config = config.get("tools", {})
118
- # Convert tool config to proper MCP format with name field
119
- self.tools = {}
120
- for tool_name, tool_data in tools_config.items():
121
- self.tools[tool_name] = {
122
- "name": tool_name,
123
- "description": tool_data.get("description", ""),
124
- "servers": tool_data.get("servers", []),
125
- "parameters": tool_data.get("parameters", {}),
126
- }
127
- logger.info(f"Loaded {len(self.tools)} tools from mcp.json")
128
- except Exception as e:
129
- logger.warning(f"Could not load mcp.json: {e}")
130
-
131
- # Ensure core tools are registered
132
- if "gaia.jira" not in self.tools:
133
- self.tools["gaia.jira"] = {
134
- "name": "gaia.jira",
135
- "description": "Natural language Jira operations",
136
- "inputSchema": {
137
- "type": "object",
138
- "properties": {
139
- "query": {"type": "string"},
140
- "operation": {
141
- "type": "string",
142
- "enum": ["query", "create", "update"],
143
- },
144
- },
145
- },
146
- }
147
-
148
- if "gaia.chat" not in self.tools:
149
- self.tools["gaia.chat"] = {
150
- "name": "gaia.chat",
151
- "description": "Conversational chat with context",
152
- "inputSchema": {
153
- "type": "object",
154
- "properties": {"query": {"type": "string"}},
155
- },
156
- }
157
-
158
- if "gaia.query" not in self.tools:
159
- self.tools["gaia.query"] = {
160
- "name": "gaia.query",
161
- "description": "Direct LLM queries (no conversation context)",
162
- "inputSchema": {
163
- "type": "object",
164
- "properties": {"query": {"type": "string"}},
165
- },
166
- }
167
-
168
- def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
169
- """Execute a tool and return results."""
170
- try:
171
- if tool_name == "gaia.jira":
172
- return self._execute_jira(arguments)
173
- elif tool_name == "gaia.query":
174
- return self._execute_query(arguments)
175
- elif tool_name == "gaia.chat":
176
- return self._execute_chat(arguments)
177
- elif tool_name == "gaia.blender.create":
178
- return self._execute_blender(arguments)
179
- else:
180
- return {"error": f"Tool not implemented: {tool_name}"}
181
- except Exception as e:
182
- logger.error(f"Tool execution error: {e}")
183
- return {"error": str(e)}
184
-
185
- def _execute_jira(self, args: Dict[str, Any]) -> Dict[str, Any]:
186
- """Execute Jira operations."""
187
- query = args.get("query", "")
188
-
189
- # Get or create agent
190
- agent_config = self.agents.get("jira")
191
- if not agent_config:
192
- return {"error": "Jira agent not available"}
193
-
194
- # Lazy initialization
195
- if "instance" not in agent_config:
196
- agent_class = agent_config["class"]
197
- init_params = agent_config.get("init_params", {})
198
- agent_config["instance"] = agent_class(**init_params)
199
-
200
- # Initialize Jira config discovery
201
- try:
202
- config = agent_config["instance"].initialize()
203
- logger.info(
204
- f"Jira initialized: {len(config.get('projects', []))} projects found"
205
- )
206
- except Exception as e:
207
- logger.warning(f"Jira config discovery failed: {e}")
208
-
209
- agent = agent_config["instance"]
210
-
211
- # Execute query
212
- result = agent.process_query(query, trace=False)
213
-
214
- return {
215
- "success": True,
216
- "result": result.get("final_answer", ""),
217
- "steps_taken": result.get("steps_taken", 0),
218
- "conversation": result.get("conversation", []),
219
- }
220
-
221
- def _execute_query(self, args: Dict[str, Any]) -> Dict[str, Any]:
222
- """Execute LLM query."""
223
- if not self.llm_client:
224
- self.llm_client = LLMClient(base_url=self.base_url)
225
-
226
- response = self.llm_client.generate(
227
- prompt=args.get("query", ""),
228
- model=args.get("model"),
229
- max_tokens=args.get("max_tokens", 500),
230
- )
231
-
232
- return {"success": True, "result": response}
233
-
234
- def _execute_chat(self, args: Dict[str, Any]) -> Dict[str, Any]:
235
- """Execute chat interaction with conversation context."""
236
- try:
237
- from gaia.chat.sdk import ChatConfig, ChatSDK
238
-
239
- # Initialize chat SDK if not already done
240
- if not hasattr(self, "chat_sdk"):
241
- # ChatSDK uses the global LLM configuration, not a base_url
242
- config = ChatConfig()
243
- self.chat_sdk = ChatSDK(config=config)
244
-
245
- # Get the query
246
- query = args.get("query", "")
247
-
248
- # Send message and get response
249
- chat_response = self.chat_sdk.send(query)
250
-
251
- # Extract the text response
252
- if hasattr(chat_response, "text"):
253
- response = chat_response.text
254
- elif hasattr(chat_response, "content"):
255
- response = chat_response.content
256
- else:
257
- response = str(chat_response)
258
-
259
- return {"success": True, "result": response}
260
- except Exception as e:
261
- logger.error(f"Chat execution error: {e}")
262
- return {"success": False, "error": str(e)}
263
-
264
- def _execute_blender(self, args: Dict[str, Any]) -> Dict[str, Any]:
265
- """Execute Blender operations."""
266
- # Implementation would go here
267
- return {"success": True, "result": "Blender operation completed"}
268
-
269
-
270
- class MCPHTTPHandler(BaseHTTPRequestHandler):
271
- """HTTP handler for MCP protocol."""
272
-
273
- def __init__(self, *args, bridge: GAIAMCPBridge = None, **kwargs):
274
- self.bridge = bridge or GAIAMCPBridge()
275
- super().__init__(*args, **kwargs)
276
-
277
- def log_request_details(self, method, path, body=None):
278
- """Log incoming request details if verbose mode is enabled."""
279
- if VERBOSE:
280
- client_addr = self.client_address[0] if self.client_address else "unknown"
281
- logger.info(f"MCP Request: {method} {path} from {client_addr}")
282
- if body:
283
- logger.debug(f"Request body: {json.dumps(body, indent=2)}")
284
-
285
- def do_GET(self):
286
- """Handle GET requests."""
287
- self.log_request_details("GET", self.path)
288
- parsed = urlparse(self.path)
289
-
290
- if parsed.path == "/health":
291
- self.send_json(
292
- 200,
293
- {
294
- "status": "healthy",
295
- "service": "GAIA MCP Bridge (HTTP)",
296
- "agents": len(self.bridge.agents),
297
- "tools": len(self.bridge.tools),
298
- },
299
- )
300
- elif parsed.path == "/tools" or parsed.path == "/v1/tools":
301
- self.send_json(200, {"tools": list(self.bridge.tools.values())})
302
- elif parsed.path == "/status":
303
- # Comprehensive status endpoint with all details
304
- agents_info = {}
305
- for name, agent in self.bridge.agents.items():
306
- agents_info[name] = {
307
- "description": agent.get("description", ""),
308
- "capabilities": agent.get("capabilities", []),
309
- "type": "class" if "class" in agent else "module",
310
- }
311
-
312
- tools_info = {}
313
- for name, tool in self.bridge.tools.items():
314
- tools_info[name] = {
315
- "description": tool.get("description", ""),
316
- "inputSchema": tool.get("inputSchema", {}),
317
- }
318
-
319
- self.send_json(
320
- 200,
321
- {
322
- "status": "healthy",
323
- "service": "GAIA MCP Bridge (HTTP)",
324
- "version": "2.0.0",
325
- "host": self.bridge.host,
326
- "port": self.bridge.port,
327
- "llm_backend": self.bridge.base_url,
328
- "agents": agents_info,
329
- "tools": tools_info,
330
- "endpoints": {
331
- "health": "GET /health - Health check",
332
- "status": "GET /status - Detailed status (this endpoint)",
333
- "tools": "GET /tools - List available tools",
334
- "chat": "POST /chat - Interactive chat",
335
- "jira": "POST /jira - Jira operations",
336
- "llm": "POST /llm - Direct LLM queries",
337
- "jsonrpc": "POST / - JSON-RPC endpoint",
338
- },
339
- },
340
- )
341
- else:
342
- self.send_json(404, {"error": "Not found"})
343
-
344
- def do_POST(self):
345
- """Handle POST requests - main MCP endpoint."""
346
- content_length = int(self.headers.get("Content-Length", 0))
347
-
348
- if content_length > 0:
349
- body = self.rfile.read(content_length)
350
- try:
351
- data = json.loads(body.decode("utf-8"))
352
- self.log_request_details("POST", self.path, data)
353
- except json.JSONDecodeError:
354
- self.log_request_details("POST", self.path)
355
- logger.error("Invalid JSON in request body")
356
- self.send_json(400, {"error": "Invalid JSON"})
357
- return
358
- else:
359
- data = {}
360
- self.log_request_details("POST", self.path)
361
-
362
- parsed = urlparse(self.path)
363
-
364
- # Handle different endpoints
365
- if parsed.path in ["/", "/v1/messages", "/rpc"]:
366
- # JSON-RPC endpoint
367
- self.handle_jsonrpc(data)
368
- elif parsed.path == "/chat":
369
- # Direct chat endpoint for conversations
370
- result = self.bridge.execute_tool("gaia.chat", data)
371
- self.send_json(200 if result.get("success") else 500, result)
372
- elif parsed.path == "/jira":
373
- # Direct Jira endpoint for convenience
374
- result = self.bridge.execute_tool("gaia.jira", data)
375
- self.send_json(200 if result.get("success") else 500, result)
376
- elif parsed.path == "/llm":
377
- # Direct LLM endpoint (no conversation context)
378
- result = self.bridge.execute_tool("gaia.query", data)
379
- self.send_json(200 if result.get("success") else 500, result)
380
- else:
381
- self.send_json(404, {"error": "Not found"})
382
-
383
- def handle_jsonrpc(self, data):
384
- """Handle JSON-RPC requests."""
385
- # Validate JSON-RPC
386
- if "jsonrpc" not in data or data["jsonrpc"] != "2.0":
387
- self.send_json(
388
- 400,
389
- {
390
- "jsonrpc": "2.0",
391
- "error": {"code": -32600, "message": "Invalid Request"},
392
- "id": data.get("id"),
393
- },
394
- )
395
- return
396
-
397
- method = data.get("method")
398
- params = data.get("params", {})
399
- request_id = data.get("id")
400
-
401
- # Route methods
402
- if method == "initialize":
403
- result = {
404
- "protocolVersion": "1.0.0",
405
- "serverInfo": {"name": "GAIA MCP Bridge", "version": "2.0.0"},
406
- "capabilities": {"tools": True, "resources": True, "prompts": True},
407
- }
408
- elif method == "tools/list":
409
- result = {"tools": list(self.bridge.tools.values())}
410
- elif method == "tools/call":
411
- tool_name = params.get("name")
412
- arguments = params.get("arguments", {})
413
- tool_result = self.bridge.execute_tool(tool_name, arguments)
414
- result = {"content": [{"type": "text", "text": json.dumps(tool_result)}]}
415
- else:
416
- self.send_json(
417
- 400,
418
- {
419
- "jsonrpc": "2.0",
420
- "error": {"code": -32601, "message": f"Method not found: {method}"},
421
- "id": request_id,
422
- },
423
- )
424
- return
425
-
426
- # Send response
427
- self.send_json(200, {"jsonrpc": "2.0", "result": result, "id": request_id})
428
-
429
- def do_OPTIONS(self):
430
- """Handle OPTIONS for CORS."""
431
- self.log_request_details("OPTIONS", self.path)
432
- self.send_response(200)
433
- self.send_header("Access-Control-Allow-Origin", "*")
434
- self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
435
- self.send_header("Access-Control-Allow-Headers", "Content-Type")
436
- self.end_headers()
437
-
438
- def send_json(self, status, data):
439
- """Send JSON response."""
440
- if VERBOSE:
441
- logger.info(f"MCP Response: Status {status}")
442
- logger.debug(f"Response body: {json.dumps(data, indent=2)}")
443
-
444
- self.send_response(status)
445
- self.send_header("Content-Type", "application/json")
446
- self.send_header("Access-Control-Allow-Origin", "*")
447
- self.end_headers()
448
- self.wfile.write(json.dumps(data).encode("utf-8"))
449
-
450
- def log_message(self, format, *args):
451
- """Override to control standard HTTP logging."""
452
- # In verbose mode, skip the built-in HTTP logging since we have custom logging
453
- if VERBOSE:
454
- # We already log detailed info in log_request_details and send_json
455
- pass
456
- elif "/health" not in args[0]:
457
- # In non-verbose mode, skip health checks but log everything else
458
- super().log_message(format, *args)
459
-
460
-
461
- def start_server(host="localhost", port=8765, base_url=None, verbose=False):
462
- """Start the HTTP MCP server."""
463
- import io
464
-
465
- # Fix Windows Unicode
466
- if sys.platform == "win32":
467
- sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
468
-
469
- # Fix Linux IPv6 issue: When host is "localhost", Python's socket might bind
470
- # to ::1 (IPv6) which curl can't connect to by default. Use 0.0.0.0 on Linux
471
- # to bind to all IPv4 interfaces. Keep localhost on Windows where it works.
472
- bind_host = host
473
- if host == "localhost" and sys.platform != "win32":
474
- bind_host = "0.0.0.0"
475
-
476
- logger.info(f"Creating MCP bridge for {host}:{port}")
477
-
478
- # Create bridge with verbose flag
479
- bridge = GAIAMCPBridge(host, port, base_url, verbose=verbose)
480
-
481
- # Create handler with bridge
482
- def handler(*args, **kwargs):
483
- return MCPHTTPHandler(*args, bridge=bridge, **kwargs)
484
-
485
- # Start server - use bind_host for actual socket binding
486
- logger.info(f"Creating HTTP server on {bind_host}:{port}")
487
- try:
488
- server = HTTPServer((bind_host, port), handler)
489
- logger.info(
490
- f"HTTP server created successfully, listening on {bind_host}:{port}"
491
- )
492
- except Exception as e:
493
- logger.error(f"Failed to create HTTP server: {e}")
494
- raise
495
-
496
- print("=" * 60, flush=True)
497
- print("🚀 GAIA MCP Bridge - HTTP Native")
498
- print("=" * 60)
499
- print(f"Server: http://{host}:{port}")
500
- print(f"LLM Backend: {bridge.base_url}")
501
- print(f"Agents: {list(bridge.agents.keys())}")
502
- print(f"Tools: {list(bridge.tools.keys())}")
503
- if verbose:
504
- print(f"\n🔍 Verbose Mode: ENABLED")
505
- print(f" All requests will be logged to console and gaia.log")
506
- logger.info("MCP Bridge started in VERBOSE mode - all requests will be logged")
507
- print("\n📍 Endpoints:")
508
- print(f" GET http://{host}:{port}/health - Health check")
509
- print(
510
- f" GET http://{host}:{port}/status - Detailed status with agents & tools"
511
- )
512
- print(f" GET http://{host}:{port}/tools - List tools")
513
- print(f" POST http://{host}:{port}/ - JSON-RPC")
514
- print(f" POST http://{host}:{port}/chat - Chat (with context)")
515
- print(f" POST http://{host}:{port}/jira - Direct Jira")
516
- print(f" POST http://{host}:{port}/llm - Direct LLM (no context)")
517
- print("\n🔧 Usage Examples:")
518
- print(
519
- ' Chat: curl -X POST http://localhost:8765/chat -d \'{"query":"Hello GAIA!"}\''
520
- )
521
- print(
522
- ' Jira: curl -X POST http://localhost:8765/jira -d \'{"query":"show my issues"}\''
523
- )
524
- print(' n8n: HTTP Request → POST /chat → {"query": "..."}')
525
- print(" MCP: JSON-RPC to / with method: tools/call")
526
- print("=" * 60)
527
- print("\nPress Ctrl+C to stop\n", flush=True)
528
-
529
- logger.info(f"Starting serve_forever() on {bind_host}:{port}")
530
- try:
531
- server.serve_forever()
532
- except KeyboardInterrupt:
533
- print("\n✅ Server stopped")
534
-
535
-
536
- if __name__ == "__main__":
537
- import argparse
538
-
539
- parser = argparse.ArgumentParser(description="GAIA MCP Bridge - HTTP Native")
540
- parser.add_argument("--host", default="localhost", help="Host to bind to")
541
- parser.add_argument("--port", type=int, default=8765, help="Port to listen on")
542
- parser.add_argument(
543
- "--base-url", default="http://localhost:8000/api/v1", help="LLM server URL"
544
- )
545
- parser.add_argument(
546
- "--verbose", action="store_true", help="Enable verbose logging for all requests"
547
- )
548
-
549
- args = parser.parse_args()
550
- start_server(args.host, args.port, args.base_url, args.verbose)
1
+ #!/usr/bin/env python
2
+ #
3
+ # Copyright(C) 2025-2026 Advanced Micro Devices, Inc. All rights reserved.
4
+ # SPDX-License-Identifier: MIT
5
+
6
+ """
7
+ GAIA MCP Bridge - HTTP Native Implementation
8
+ No WebSockets, just clean HTTP + JSON-RPC for maximum compatibility
9
+ """
10
+
11
+ import io
12
+ import json
13
+ import os
14
+ import shutil
15
+ import sys
16
+ import tempfile
17
+ from http.server import BaseHTTPRequestHandler, HTTPServer
18
+ from pathlib import Path
19
+ from typing import Any, Dict
20
+ from urllib.parse import urlparse
21
+
22
+ from python_multipart.multipart import MultipartParser, parse_options_header
23
+
24
+ # Add GAIA to path
25
+ sys.path.insert(
26
+ 0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
27
+ )
28
+
29
+ from gaia.agents.blender.agent import BlenderAgent
30
+ from gaia.llm import create_client
31
+ from gaia.logger import get_logger
32
+
33
+ logger = get_logger(__name__)
34
+
35
+ # Global verbose flag for request logging
36
+ VERBOSE = False
37
+
38
+
39
+ class MultipartCollector:
40
+ def __init__(self):
41
+ self.fields = {}
42
+ self.files = {}
43
+ self._headers = []
44
+ self._name = None
45
+ self._filename = None
46
+ self._buffer = None
47
+
48
+ def _parse_cd(self, value: str):
49
+ name = None
50
+ filename = None
51
+ try:
52
+ parts = [p.strip() for p in value.split(";")]
53
+ for p in parts:
54
+ pl = p.lower()
55
+ if pl.startswith("name="):
56
+ name = p.split("=", 1)[1].strip().strip('"')
57
+ elif pl.startswith("filename="):
58
+ filename = p.split("=", 1)[1].strip().strip('"')
59
+ except Exception:
60
+ pass
61
+ return name, filename
62
+
63
+ def on_part_begin(self):
64
+ self._headers = []
65
+ self._name = None
66
+ self._filename = None
67
+ self._buffer = io.BytesIO()
68
+
69
+ def on_header_field(self, data: bytes, start: int, end: int):
70
+ field = data[start:end].decode("latin-1")
71
+ self._headers.append([field, ""])
72
+
73
+ def on_header_value(self, data: bytes, start: int, end: int):
74
+ if self._headers:
75
+ self._headers[-1][1] += data[start:end].decode("latin-1")
76
+
77
+ def on_headers_finished(self):
78
+ for k, v in self._headers:
79
+ if k.lower() == "content-disposition":
80
+ name, filename = self._parse_cd(v)
81
+ self._name = name
82
+ self._filename = filename
83
+
84
+ def on_part_data(self, data: bytes, start: int, end: int):
85
+ if self._buffer is not None:
86
+ self._buffer.write(data[start:end])
87
+
88
+ def on_part_end(self):
89
+ if self._name is None:
90
+ self._buffer = None
91
+ return
92
+ if self._filename:
93
+ self.files[self._name] = {
94
+ "file_name": self._filename,
95
+ "file_object": self._buffer,
96
+ }
97
+ else:
98
+ self.fields[self._name] = self._buffer.getvalue()
99
+ self._buffer = None
100
+
101
+ def callbacks(self):
102
+ return {
103
+ "on_part_begin": self.on_part_begin,
104
+ "on_header_field": self.on_header_field,
105
+ "on_header_value": self.on_header_value,
106
+ "on_headers_finished": self.on_headers_finished,
107
+ "on_part_data": self.on_part_data,
108
+ "on_part_end": self.on_part_end,
109
+ }
110
+
111
+
112
+ class GAIAMCPBridge:
113
+ """HTTP-native MCP Bridge for GAIA - no WebSockets needed!"""
114
+
115
+ def __init__(
116
+ self,
117
+ host: str = "localhost",
118
+ port: int = 8765,
119
+ base_url: str = None,
120
+ verbose: bool = False,
121
+ ):
122
+ self.host = host
123
+ self.port = port
124
+ self.base_url = base_url or "http://localhost:8000/api/v1"
125
+ self.agents = {}
126
+ self.tools = {}
127
+ self.llm_client = None
128
+ self.verbose = verbose
129
+ global VERBOSE
130
+ VERBOSE = verbose
131
+
132
+ # Initialize on creation
133
+ self._initialize_agents()
134
+ self._register_tools()
135
+
136
+ def _initialize_agents(self):
137
+ """Initialize all GAIA agents."""
138
+ try:
139
+ # LLM agent
140
+ self.agents["llm"] = {
141
+ "module": "gaia.apps.llm.app",
142
+ "function": "main",
143
+ "description": "Direct LLM interaction",
144
+ "capabilities": ["query", "stream", "model_selection"],
145
+ }
146
+
147
+ # Chat agent
148
+ self.agents["chat"] = {
149
+ "module": "gaia.chat.app",
150
+ "function": "main",
151
+ "description": "Interactive chat",
152
+ "capabilities": ["conversation", "history", "context_management"],
153
+ }
154
+
155
+ # Blender agent
156
+ try:
157
+ self.agents["blender"] = {
158
+ "class": BlenderAgent,
159
+ "description": "3D content creation",
160
+ "capabilities": ["3d_modeling", "scene_manipulation", "rendering"],
161
+ }
162
+ except ImportError:
163
+ logger.warning("Blender agent not available")
164
+ # Summarize agent
165
+ try:
166
+ from gaia.agents.summarize.agent import SummarizerAgent
167
+
168
+ self.agents["summarize"] = {
169
+ "class": SummarizerAgent,
170
+ "description": "Text/document summarization",
171
+ "capabilities": ["summarize", "pdf", "email", "transcript"],
172
+ "init_params": {},
173
+ }
174
+ logger.info("✅ Summarize agent registered")
175
+ except ImportError as e:
176
+ logger.warning(f"Summarize agent not available: {e}")
177
+ # Jira agent - THE KEY ADDITION
178
+ try:
179
+ from gaia.agents.jira.agent import JiraAgent
180
+
181
+ self.agents["jira"] = {
182
+ "class": JiraAgent,
183
+ "description": "Natural language Jira orchestration",
184
+ "capabilities": ["search", "create", "update", "bulk_operations"],
185
+ "init_params": {
186
+ "model_id": "Qwen3-Coder-30B-A3B-Instruct-GGUF",
187
+ "silent_mode": True,
188
+ "debug": False,
189
+ },
190
+ }
191
+ logger.info("✅ Jira agent registered")
192
+ except ImportError as e:
193
+ logger.warning(f"Jira agent not available: {e}")
194
+
195
+ logger.info(f"Initialized {len(self.agents)} agents")
196
+
197
+ except Exception as e:
198
+ logger.error(f"Agent initialization error: {e}")
199
+
200
+ def _register_tools(self):
201
+ """Register available tools."""
202
+ # Load from mcp.json if available
203
+ try:
204
+ mcp_config_path = os.path.join(os.path.dirname(__file__), "mcp.json")
205
+ if os.path.exists(mcp_config_path):
206
+ with open(mcp_config_path, "r") as f:
207
+ config = json.load(f)
208
+ tools_config = config.get("tools", {})
209
+ # Convert tool config to proper MCP format with name field
210
+ self.tools = {}
211
+ for tool_name, tool_data in tools_config.items():
212
+ self.tools[tool_name] = {
213
+ "name": tool_name,
214
+ "description": tool_data.get("description", ""),
215
+ "servers": tool_data.get("servers", []),
216
+ "parameters": tool_data.get("parameters", {}),
217
+ }
218
+ logger.info(f"Loaded {len(self.tools)} tools from mcp.json")
219
+ except Exception as e:
220
+ logger.warning(f"Could not load mcp.json: {e}")
221
+
222
+ # Ensure core tools are registered
223
+ if "gaia.jira" not in self.tools:
224
+ self.tools["gaia.jira"] = {
225
+ "name": "gaia.jira",
226
+ "description": "Natural language Jira operations",
227
+ "inputSchema": {
228
+ "type": "object",
229
+ "properties": {
230
+ "query": {"type": "string"},
231
+ "operation": {
232
+ "type": "string",
233
+ "enum": ["query", "create", "update"],
234
+ },
235
+ },
236
+ },
237
+ }
238
+
239
+ if "gaia.chat" not in self.tools:
240
+ self.tools["gaia.chat"] = {
241
+ "name": "gaia.chat",
242
+ "description": "Conversational chat with context",
243
+ "inputSchema": {
244
+ "type": "object",
245
+ "properties": {"query": {"type": "string"}},
246
+ },
247
+ }
248
+
249
+ if "gaia.query" not in self.tools:
250
+ self.tools["gaia.query"] = {
251
+ "name": "gaia.query",
252
+ "description": "Direct LLM queries (no conversation context)",
253
+ "inputSchema": {
254
+ "type": "object",
255
+ "properties": {"query": {"type": "string"}},
256
+ },
257
+ }
258
+
259
+ def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
260
+ """Execute a tool and return results."""
261
+ try:
262
+ if tool_name == "gaia.jira":
263
+ return self._execute_jira(arguments)
264
+ elif tool_name == "gaia.query":
265
+ return self._execute_query(arguments)
266
+ elif tool_name == "gaia.chat":
267
+ return self._execute_chat(arguments)
268
+ elif tool_name == "gaia.blender.create":
269
+ return self._execute_blender(arguments)
270
+ elif tool_name == "gaia.summarize":
271
+ return self._execute_summarize(arguments)
272
+ else:
273
+ return {"error": f"Tool not implemented: {tool_name}"}
274
+ except Exception as e:
275
+ logger.error(f"Tool execution error: {e}")
276
+ return {"error": str(e)}
277
+
278
+ def _execute_jira(self, args: Dict[str, Any]) -> Dict[str, Any]:
279
+ """Execute Jira operations."""
280
+ query = args.get("query", "")
281
+
282
+ # Get or create agent
283
+ agent_config = self.agents.get("jira")
284
+ if not agent_config:
285
+ return {"error": "Jira agent not available"}
286
+
287
+ # Lazy initialization
288
+ if "instance" not in agent_config:
289
+ agent_class = agent_config["class"]
290
+ init_params = agent_config.get("init_params", {})
291
+ agent_config["instance"] = agent_class(**init_params)
292
+
293
+ # Initialize Jira config discovery
294
+ try:
295
+ config = agent_config["instance"].initialize()
296
+ logger.info(
297
+ f"Jira initialized: {len(config.get('projects', []))} projects found"
298
+ )
299
+ except Exception as e:
300
+ logger.warning(f"Jira config discovery failed: {e}")
301
+
302
+ agent = agent_config["instance"]
303
+
304
+ # Execute query
305
+ result = agent.process_query(query, trace=False)
306
+
307
+ return {
308
+ "success": True,
309
+ "result": result.get("final_answer", ""),
310
+ "steps_taken": result.get("steps_taken", 0),
311
+ "conversation": result.get("conversation", []),
312
+ }
313
+
314
+ def _execute_query(self, args: Dict[str, Any]) -> Dict[str, Any]:
315
+ """Execute LLM query."""
316
+ if not self.llm_client:
317
+ self.llm_client = create_client("lemonade", base_url=self.base_url)
318
+
319
+ response = self.llm_client.generate(
320
+ prompt=args.get("query", ""),
321
+ model=args.get("model"),
322
+ max_tokens=args.get("max_tokens", 500),
323
+ )
324
+
325
+ return {"success": True, "result": response}
326
+
327
+ def _execute_chat(self, args: Dict[str, Any]) -> Dict[str, Any]:
328
+ """Execute chat interaction with conversation context."""
329
+ try:
330
+ from gaia.chat.sdk import ChatConfig, ChatSDK
331
+
332
+ # Initialize chat SDK if not already done
333
+ if not hasattr(self, "chat_sdk"):
334
+ # ChatSDK uses the global LLM configuration, not a base_url
335
+ config = ChatConfig()
336
+ self.chat_sdk = ChatSDK(config=config)
337
+
338
+ # Get the query
339
+ query = args.get("query", "")
340
+
341
+ # Send message and get response
342
+ chat_response = self.chat_sdk.send(query)
343
+
344
+ # Extract the text response
345
+ if hasattr(chat_response, "text"):
346
+ response = chat_response.text
347
+ elif hasattr(chat_response, "content"):
348
+ response = chat_response.content
349
+ else:
350
+ response = str(chat_response)
351
+
352
+ return {"success": True, "result": response}
353
+ except Exception as e:
354
+ logger.error(f"Chat execution error: {e}")
355
+ return {"success": False, "error": str(e)}
356
+
357
+ def _execute_blender(self, args: Dict[str, Any]) -> Dict[str, Any]:
358
+ """Execute Blender operations."""
359
+ # Implementation would go here
360
+ return {"success": True, "result": "Blender operation completed"}
361
+
362
+ def _execute_summarize(self, args: Dict[str, Any]) -> Dict[str, Any]:
363
+ """Execute summarize operations.
364
+ Returns either a non-streaming result or streaming iterator metadata.
365
+ """
366
+ collector = args.get("multipart_collector")
367
+ if not collector:
368
+ return {"success": False, "error": "Missing multipart_collector"}
369
+
370
+ file_rec = collector.files.get("file")
371
+ style_bytes = collector.fields.get("style") or b"brief"
372
+ stream_val = collector.fields.get("stream")
373
+ accept_sse = bool(args.get("accept_sse"))
374
+
375
+ # Normalize flags
376
+ try:
377
+ style = (
378
+ style_bytes.decode("utf-8", errors="ignore")
379
+ if isinstance(style_bytes, (bytes, bytearray))
380
+ else str(style_bytes)
381
+ )
382
+ except Exception:
383
+ style = "brief"
384
+ try:
385
+ stream = str(
386
+ (
387
+ stream_val.decode("utf-8")
388
+ if isinstance(stream_val, (bytes, bytearray))
389
+ else stream_val
390
+ )
391
+ or ""
392
+ ).lower() in ["1", "true", "yes"]
393
+ except Exception:
394
+ stream = False
395
+ # Honor Accept: text/event-stream if not explicitly set by field
396
+ if not stream and accept_sse:
397
+ stream = True
398
+
399
+ if not file_rec:
400
+ return {"success": False, "error": "No file uploaded"}
401
+
402
+ # Save file to temp
403
+ filename = file_rec.get("file_name")
404
+ ext = os.path.splitext(filename)[1] if filename else ".pdf"
405
+ tmpfile_path = None
406
+ try:
407
+ with tempfile.NamedTemporaryFile(
408
+ delete=False, suffix=ext or ".pdf"
409
+ ) as tmpfile:
410
+ buf = file_rec.get("file_object")
411
+ buf.seek(0)
412
+ shutil.copyfileobj(buf, tmpfile)
413
+ tmpfile_path = tmpfile.name
414
+
415
+ # Initialize agent
416
+ agent_config = self.agents.get("summarize")
417
+ if not agent_config:
418
+ return {"success": False, "error": "Summarize agent not available"}
419
+ if "instance" not in agent_config:
420
+ agent_class = agent_config["class"]
421
+ init_params = agent_config.get("init_params", {})
422
+ agent_config["instance"] = agent_class(**init_params)
423
+ agent = agent_config["instance"]
424
+
425
+ # Validate style early to provide clear error message
426
+ try:
427
+ agent._validate_styles(style) # pylint: disable=protected-access
428
+ except ValueError as e:
429
+ return {"success": False, "error": str(e)}
430
+
431
+ if stream:
432
+ content = agent.get_summary_content_from_file(Path(tmpfile_path))
433
+ if not content:
434
+ return {
435
+ "success": False,
436
+ "error": "No extractable text found in uploaded file",
437
+ }
438
+ iterator = agent.summarize_stream(
439
+ content, input_type="pdf", style=style
440
+ )
441
+ # Return tmpfile_path for cleanup after streaming completes
442
+ return {
443
+ "success": True,
444
+ "stream": True,
445
+ "style": style,
446
+ "tmpfile_path": tmpfile_path,
447
+ "iterator": iterator,
448
+ }
449
+ else:
450
+ result = agent.summarize_file(tmpfile_path, styles=[style])
451
+ return {
452
+ "success": True,
453
+ "stream": False,
454
+ "style": style,
455
+ "result": result,
456
+ }
457
+ finally:
458
+ # Clean up temp file for non-streaming responses or on error
459
+ # For streaming responses, cleanup happens in the HTTP handler after streaming completes
460
+ if tmpfile_path and not stream and os.path.exists(tmpfile_path):
461
+ try:
462
+ os.unlink(tmpfile_path)
463
+ except Exception as e:
464
+ logger.warning(f"Failed to cleanup temp file {tmpfile_path}: {e}")
465
+
466
+
467
+ class MCPHTTPHandler(BaseHTTPRequestHandler):
468
+ """HTTP handler for MCP protocol."""
469
+
470
+ def __init__(self, *args, bridge: GAIAMCPBridge = None, **kwargs):
471
+ self.bridge = bridge or GAIAMCPBridge()
472
+ super().__init__(*args, **kwargs)
473
+
474
+ def log_request_details(self, method, path, body=None):
475
+ """Log incoming request details if verbose mode is enabled."""
476
+ if VERBOSE:
477
+ client_addr = self.client_address[0] if self.client_address else "unknown"
478
+ logger.info(f"MCP Request: {method} {path} from {client_addr}")
479
+ if body:
480
+ logger.debug(f"Request body: {json.dumps(body, indent=2)}")
481
+
482
+ def do_GET(self):
483
+ """Handle GET requests."""
484
+ self.log_request_details("GET", self.path)
485
+ parsed = urlparse(self.path)
486
+
487
+ if parsed.path == "/health":
488
+ self.send_json(
489
+ 200,
490
+ {
491
+ "status": "healthy",
492
+ "service": "GAIA MCP Bridge (HTTP)",
493
+ "agents": len(self.bridge.agents),
494
+ "tools": len(self.bridge.tools),
495
+ },
496
+ )
497
+ elif parsed.path == "/tools" or parsed.path == "/v1/tools":
498
+ self.send_json(200, {"tools": list(self.bridge.tools.values())})
499
+ elif parsed.path == "/status":
500
+ # Comprehensive status endpoint with all details
501
+ agents_info = {}
502
+ for name, agent in self.bridge.agents.items():
503
+ agents_info[name] = {
504
+ "description": agent.get("description", ""),
505
+ "capabilities": agent.get("capabilities", []),
506
+ "type": "class" if "class" in agent else "module",
507
+ }
508
+
509
+ tools_info = {}
510
+ for name, tool in self.bridge.tools.items():
511
+ tools_info[name] = {
512
+ "description": tool.get("description", ""),
513
+ "inputSchema": tool.get("inputSchema", {}),
514
+ }
515
+
516
+ self.send_json(
517
+ 200,
518
+ {
519
+ "status": "healthy",
520
+ "service": "GAIA MCP Bridge (HTTP)",
521
+ "version": "2.0.0",
522
+ "host": self.bridge.host,
523
+ "port": self.bridge.port,
524
+ "llm_backend": self.bridge.base_url,
525
+ "agents": agents_info,
526
+ "tools": tools_info,
527
+ "endpoints": {
528
+ "health": "GET /health - Health check",
529
+ "status": "GET /status - Detailed status (this endpoint)",
530
+ "tools": "GET /tools - List available tools",
531
+ "chat": "POST /chat - Interactive chat",
532
+ "jira": "POST /jira - Jira operations",
533
+ "llm": "POST /llm - Direct LLM queries",
534
+ "jsonrpc": "POST / - JSON-RPC endpoint",
535
+ },
536
+ },
537
+ )
538
+ else:
539
+ self.send_json(404, {"error": "Not found"})
540
+
541
+ def do_POST(self):
542
+ """Handle POST requests - main MCP endpoint."""
543
+ content_length = int(self.headers.get("Content-Length", 0))
544
+
545
+ parsed = urlparse(self.path)
546
+ ctype = self.headers.get("content-type", "")
547
+
548
+ if ctype.startswith("application/json") and content_length > 0:
549
+ body = self.rfile.read(content_length)
550
+ try:
551
+ data = json.loads(body.decode("utf-8"))
552
+ self.log_request_details("POST", self.path, data)
553
+ except json.JSONDecodeError:
554
+ self.log_request_details("POST", self.path)
555
+ logger.error("Invalid JSON in request body")
556
+ self.send_json(400, {"error": "Invalid JSON"})
557
+ return
558
+ elif ctype.startswith("multipart/form-data"):
559
+ raw_data = self.rfile.read(content_length)
560
+
561
+ # Extract boundary using python-multipart helper and ensure bytes
562
+ _, opts = parse_options_header(ctype)
563
+ boundary = opts.get(b"boundary")
564
+ if not boundary:
565
+ raise ValueError("Missing multipart boundary")
566
+
567
+ # boundary is bytes, decode for parser if needed
568
+ boundary = boundary.decode("latin-1").strip('"')
569
+ boundary_bytes = (
570
+ boundary
571
+ if isinstance(boundary, (bytes, bytearray))
572
+ else str(boundary).encode("utf-8")
573
+ )
574
+
575
+ collector = MultipartCollector()
576
+ mp = MultipartParser(boundary_bytes, callbacks=collector.callbacks())
577
+ mp.write(raw_data)
578
+ mp.finalize()
579
+ data = {}
580
+ data["multipart_collector"] = collector
581
+ else:
582
+ data = {}
583
+ self.log_request_details("POST", self.path)
584
+
585
+ # Handle different endpoints
586
+ if parsed.path in ["/", "/v1/messages", "/rpc"]:
587
+ # JSON-RPC endpoint
588
+ self.handle_jsonrpc(data)
589
+ elif parsed.path == "/chat":
590
+ # Direct chat endpoint for conversations
591
+ result = self.bridge.execute_tool("gaia.chat", data)
592
+ self.send_json(200 if result.get("success") else 500, result)
593
+ elif parsed.path == "/jira":
594
+ # Direct Jira endpoint for convenience
595
+ result = self.bridge.execute_tool("gaia.jira", data)
596
+ self.send_json(200 if result.get("success") else 500, result)
597
+ elif parsed.path == "/llm":
598
+ # Direct LLM endpoint (no conversation context)
599
+ result = self.bridge.execute_tool("gaia.query", data)
600
+ self.send_json(200 if result.get("success") else 500, result)
601
+ elif parsed.path == "/summarize":
602
+ # Direct Summarize endpoint accept multipart/form-data (file upload) for browser clients
603
+ accept_header = self.headers.get("Accept", "")
604
+ if isinstance(data, dict):
605
+ data["accept_sse"] = "text/event-stream" in accept_header
606
+ result = self.bridge.execute_tool("gaia.summarize", data)
607
+ if result.get("success") and result.get("stream"):
608
+ self.send_sse_headers()
609
+ try:
610
+ self.stream_sse(result.get("iterator", []))
611
+ finally:
612
+ tmp = result.get("tmpfile_path")
613
+ if tmp and os.path.exists(tmp):
614
+ os.unlink(tmp)
615
+ return
616
+ else:
617
+ self.send_json(200 if result.get("success") else 500, result)
618
+ return
619
+ else:
620
+ self.send_json(404, {"error": "Not found"})
621
+
622
+ def handle_jsonrpc(self, data):
623
+ """Handle JSON-RPC requests."""
624
+ # Validate JSON-RPC
625
+ if "jsonrpc" not in data or data["jsonrpc"] != "2.0":
626
+ self.send_json(
627
+ 400,
628
+ {
629
+ "jsonrpc": "2.0",
630
+ "error": {"code": -32600, "message": "Invalid Request"},
631
+ "id": data.get("id"),
632
+ },
633
+ )
634
+ return
635
+
636
+ method = data.get("method")
637
+ params = data.get("params", {})
638
+ request_id = data.get("id")
639
+
640
+ # Route methods
641
+ if method == "initialize":
642
+ result = {
643
+ "protocolVersion": "1.0.0",
644
+ "serverInfo": {"name": "GAIA MCP Bridge", "version": "2.0.0"},
645
+ "capabilities": {"tools": True, "resources": True, "prompts": True},
646
+ }
647
+ elif method == "tools/list":
648
+ result = {"tools": list(self.bridge.tools.values())}
649
+ elif method == "tools/call":
650
+ tool_name = params.get("name")
651
+ arguments = params.get("arguments", {})
652
+ tool_result = self.bridge.execute_tool(tool_name, arguments)
653
+ result = {"content": [{"type": "text", "text": json.dumps(tool_result)}]}
654
+ else:
655
+ self.send_json(
656
+ 400,
657
+ {
658
+ "jsonrpc": "2.0",
659
+ "error": {"code": -32601, "message": f"Method not found: {method}"},
660
+ "id": request_id,
661
+ },
662
+ )
663
+ return
664
+
665
+ # Send response
666
+ self.send_json(200, {"jsonrpc": "2.0", "result": result, "id": request_id})
667
+
668
+ def do_OPTIONS(self):
669
+ """Handle OPTIONS for CORS."""
670
+ self.log_request_details("OPTIONS", self.path)
671
+ self.send_response(200)
672
+ self.send_header("Access-Control-Allow-Origin", "*")
673
+ self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
674
+ self.send_header("Access-Control-Allow-Headers", "Content-Type")
675
+ self.end_headers()
676
+
677
+ def send_sse_headers(self):
678
+ """Send standard headers for Server-Sent Events."""
679
+ self.send_response(200)
680
+ self.send_header("Content-Type", "text/event-stream")
681
+ self.send_header("Cache-Control", "no-cache")
682
+ self.send_header("Access-Control-Allow-Origin", "*")
683
+ self.send_header("Connection", "keep-alive")
684
+ self.send_header("X-Accel-Buffering", "no")
685
+ self.end_headers()
686
+
687
+ def stream_sse(self, iterator):
688
+ """Stream SSE data from an iterator of chunk dicts."""
689
+ for chunk in iterator:
690
+ if chunk.get("is_complete"):
691
+ data_out = json.dumps(
692
+ {"event": "complete", "performance": chunk.get("performance", {})}
693
+ )
694
+ else:
695
+ data_out = json.dumps({"text": chunk.get("text", "")})
696
+ self.wfile.write(f"data: {data_out}\n\n".encode("utf-8"))
697
+ self.wfile.flush()
698
+
699
+ def send_json(self, status, data):
700
+ """Send JSON response."""
701
+ if VERBOSE:
702
+ logger.info(f"MCP Response: Status {status}")
703
+ logger.debug(f"Response body: {json.dumps(data, indent=2)}")
704
+
705
+ self.send_response(status)
706
+ self.send_header("Content-Type", "application/json")
707
+ self.send_header("Access-Control-Allow-Origin", "*")
708
+ self.end_headers()
709
+ self.wfile.write(json.dumps(data).encode("utf-8"))
710
+
711
+ def log_message(self, format, *args):
712
+ """Override to control standard HTTP logging."""
713
+ # In verbose mode, skip the built-in HTTP logging since we have custom logging
714
+ if VERBOSE:
715
+ # We already log detailed info in log_request_details and send_json
716
+ pass
717
+ elif "/health" not in args[0]:
718
+ # In non-verbose mode, skip health checks but log everything else
719
+ super().log_message(format, *args)
720
+
721
+
722
+ def start_server(host="localhost", port=8765, base_url=None, verbose=False):
723
+ """Start the HTTP MCP server."""
724
+ import io
725
+
726
+ # Fix Windows Unicode
727
+ if sys.platform == "win32":
728
+ sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
729
+
730
+ # Fix Linux IPv6 issue: When host is "localhost", Python's socket might bind
731
+ # to ::1 (IPv6) which curl can't connect to by default. Use 0.0.0.0 on Linux
732
+ # to bind to all IPv4 interfaces. Keep localhost on Windows where it works.
733
+ bind_host = host
734
+ if host == "localhost" and sys.platform != "win32":
735
+ bind_host = "0.0.0.0"
736
+
737
+ logger.info(f"Creating MCP bridge for {host}:{port}")
738
+
739
+ # Create bridge with verbose flag
740
+ bridge = GAIAMCPBridge(host, port, base_url, verbose=verbose)
741
+
742
+ # Create handler with bridge
743
+ def handler(*args, **kwargs):
744
+ return MCPHTTPHandler(*args, bridge=bridge, **kwargs)
745
+
746
+ # Start server - use bind_host for actual socket binding
747
+ logger.info(f"Creating HTTP server on {bind_host}:{port}")
748
+ try:
749
+ server = HTTPServer((bind_host, port), handler)
750
+ logger.info(
751
+ f"HTTP server created successfully, listening on {bind_host}:{port}"
752
+ )
753
+ except Exception as e:
754
+ logger.error(f"Failed to create HTTP server: {e}")
755
+ raise
756
+
757
+ print("=" * 60, flush=True)
758
+ print("🚀 GAIA MCP Bridge - HTTP Native")
759
+ print("=" * 60)
760
+ print(f"Server: http://{host}:{port}")
761
+ print(f"LLM Backend: {bridge.base_url}")
762
+ print(f"Agents: {list(bridge.agents.keys())}")
763
+ print(f"Tools: {list(bridge.tools.keys())}")
764
+ if verbose:
765
+ print(f"\n🔍 Verbose Mode: ENABLED")
766
+ print(f" All requests will be logged to console and gaia.log")
767
+ logger.info("MCP Bridge started in VERBOSE mode - all requests will be logged")
768
+ print("\n📍 Endpoints:")
769
+ print(f" GET http://{host}:{port}/health - Health check")
770
+ print(
771
+ f" GET http://{host}:{port}/status - Detailed status with agents & tools"
772
+ )
773
+ print(f" GET http://{host}:{port}/tools - List tools")
774
+ print(f" POST http://{host}:{port}/ - JSON-RPC")
775
+ print(f" POST http://{host}:{port}/chat - Chat (with context)")
776
+ print(f" POST http://{host}:{port}/jira - Direct Jira")
777
+ print(f" POST http://{host}:{port}/llm - Direct LLM (no context)")
778
+ print("\n🔧 Usage Examples:")
779
+ print(
780
+ ' Chat: curl -X POST http://localhost:8765/chat -d \'{"query":"Hello GAIA!"}\''
781
+ )
782
+ print(
783
+ ' Jira: curl -X POST http://localhost:8765/jira -d \'{"query":"show my issues"}\''
784
+ )
785
+ print(' n8n: HTTP Request → POST /chat → {"query": "..."}')
786
+ print(" MCP: JSON-RPC to / with method: tools/call")
787
+ print("=" * 60)
788
+ print("\nPress Ctrl+C to stop\n", flush=True)
789
+
790
+ logger.info(f"Starting serve_forever() on {bind_host}:{port}")
791
+ try:
792
+ server.serve_forever()
793
+ except KeyboardInterrupt:
794
+ print("\n✅ Server stopped")
795
+
796
+
797
+ if __name__ == "__main__":
798
+ import argparse
799
+
800
+ parser = argparse.ArgumentParser(description="GAIA MCP Bridge - HTTP Native")
801
+ parser.add_argument("--host", default="localhost", help="Host to bind to")
802
+ parser.add_argument("--port", type=int, default=8765, help="Port to listen on")
803
+ parser.add_argument(
804
+ "--base-url", default="http://localhost:8000/api/v1", help="LLM server URL"
805
+ )
806
+ parser.add_argument(
807
+ "--verbose", action="store_true", help="Enable verbose logging for all requests"
808
+ )
809
+
810
+ args = parser.parse_args()
811
+ start_server(args.host, args.port, args.base_url, args.verbose)