amd-gaia 0.15.0__py3-none-any.whl → 0.15.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/METADATA +222 -223
- amd_gaia-0.15.2.dist-info/RECORD +182 -0
- {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/WHEEL +1 -1
- {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/entry_points.txt +1 -0
- {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/licenses/LICENSE.md +20 -20
- gaia/__init__.py +29 -29
- gaia/agents/__init__.py +19 -19
- gaia/agents/base/__init__.py +9 -9
- gaia/agents/base/agent.py +2132 -2177
- gaia/agents/base/api_agent.py +119 -120
- gaia/agents/base/console.py +1967 -1841
- gaia/agents/base/errors.py +237 -237
- gaia/agents/base/mcp_agent.py +86 -86
- gaia/agents/base/tools.py +88 -83
- gaia/agents/blender/__init__.py +7 -0
- gaia/agents/blender/agent.py +553 -556
- gaia/agents/blender/agent_simple.py +133 -135
- gaia/agents/blender/app.py +211 -211
- gaia/agents/blender/app_simple.py +41 -41
- gaia/agents/blender/core/__init__.py +16 -16
- gaia/agents/blender/core/materials.py +506 -506
- gaia/agents/blender/core/objects.py +316 -316
- gaia/agents/blender/core/rendering.py +225 -225
- gaia/agents/blender/core/scene.py +220 -220
- gaia/agents/blender/core/view.py +146 -146
- gaia/agents/chat/__init__.py +9 -9
- gaia/agents/chat/agent.py +809 -835
- gaia/agents/chat/app.py +1065 -1058
- gaia/agents/chat/session.py +508 -508
- gaia/agents/chat/tools/__init__.py +15 -15
- gaia/agents/chat/tools/file_tools.py +96 -96
- gaia/agents/chat/tools/rag_tools.py +1744 -1729
- gaia/agents/chat/tools/shell_tools.py +437 -436
- gaia/agents/code/__init__.py +7 -7
- gaia/agents/code/agent.py +549 -549
- gaia/agents/code/cli.py +377 -0
- gaia/agents/code/models.py +135 -135
- gaia/agents/code/orchestration/__init__.py +24 -24
- gaia/agents/code/orchestration/checklist_executor.py +1763 -1763
- gaia/agents/code/orchestration/checklist_generator.py +713 -713
- gaia/agents/code/orchestration/factories/__init__.py +9 -9
- gaia/agents/code/orchestration/factories/base.py +63 -63
- gaia/agents/code/orchestration/factories/nextjs_factory.py +118 -118
- gaia/agents/code/orchestration/factories/python_factory.py +106 -106
- gaia/agents/code/orchestration/orchestrator.py +841 -841
- gaia/agents/code/orchestration/project_analyzer.py +391 -391
- gaia/agents/code/orchestration/steps/__init__.py +67 -67
- gaia/agents/code/orchestration/steps/base.py +188 -188
- gaia/agents/code/orchestration/steps/error_handler.py +314 -314
- gaia/agents/code/orchestration/steps/nextjs.py +828 -828
- gaia/agents/code/orchestration/steps/python.py +307 -307
- gaia/agents/code/orchestration/template_catalog.py +469 -469
- gaia/agents/code/orchestration/workflows/__init__.py +14 -14
- gaia/agents/code/orchestration/workflows/base.py +80 -80
- gaia/agents/code/orchestration/workflows/nextjs.py +186 -186
- gaia/agents/code/orchestration/workflows/python.py +94 -94
- gaia/agents/code/prompts/__init__.py +11 -11
- gaia/agents/code/prompts/base_prompt.py +77 -77
- gaia/agents/code/prompts/code_patterns.py +2034 -2036
- gaia/agents/code/prompts/nextjs_prompt.py +40 -40
- gaia/agents/code/prompts/python_prompt.py +109 -109
- gaia/agents/code/schema_inference.py +365 -365
- gaia/agents/code/system_prompt.py +41 -41
- gaia/agents/code/tools/__init__.py +42 -42
- gaia/agents/code/tools/cli_tools.py +1138 -1138
- gaia/agents/code/tools/code_formatting.py +319 -319
- gaia/agents/code/tools/code_tools.py +769 -769
- gaia/agents/code/tools/error_fixing.py +1347 -1347
- gaia/agents/code/tools/external_tools.py +180 -180
- gaia/agents/code/tools/file_io.py +845 -845
- gaia/agents/code/tools/prisma_tools.py +190 -190
- gaia/agents/code/tools/project_management.py +1016 -1016
- gaia/agents/code/tools/testing.py +321 -321
- gaia/agents/code/tools/typescript_tools.py +122 -122
- gaia/agents/code/tools/validation_parsing.py +461 -461
- gaia/agents/code/tools/validation_tools.py +806 -806
- gaia/agents/code/tools/web_dev_tools.py +1758 -1758
- gaia/agents/code/validators/__init__.py +16 -16
- gaia/agents/code/validators/antipattern_checker.py +241 -241
- gaia/agents/code/validators/ast_analyzer.py +197 -197
- gaia/agents/code/validators/requirements_validator.py +145 -145
- gaia/agents/code/validators/syntax_validator.py +171 -171
- gaia/agents/docker/__init__.py +7 -7
- gaia/agents/docker/agent.py +643 -642
- gaia/agents/emr/__init__.py +8 -8
- gaia/agents/emr/agent.py +1504 -1506
- gaia/agents/emr/cli.py +1322 -1322
- gaia/agents/emr/constants.py +475 -475
- gaia/agents/emr/dashboard/__init__.py +4 -4
- gaia/agents/emr/dashboard/server.py +1972 -1974
- gaia/agents/jira/__init__.py +11 -11
- gaia/agents/jira/agent.py +894 -894
- gaia/agents/jira/jql_templates.py +299 -299
- gaia/agents/routing/__init__.py +7 -7
- gaia/agents/routing/agent.py +567 -570
- gaia/agents/routing/system_prompt.py +75 -75
- gaia/agents/summarize/__init__.py +11 -0
- gaia/agents/summarize/agent.py +885 -0
- gaia/agents/summarize/prompts.py +129 -0
- gaia/api/__init__.py +23 -23
- gaia/api/agent_registry.py +238 -238
- gaia/api/app.py +305 -305
- gaia/api/openai_server.py +575 -575
- gaia/api/schemas.py +186 -186
- gaia/api/sse_handler.py +373 -373
- gaia/apps/__init__.py +4 -4
- gaia/apps/llm/__init__.py +6 -6
- gaia/apps/llm/app.py +184 -169
- gaia/apps/summarize/app.py +116 -633
- gaia/apps/summarize/html_viewer.py +133 -133
- gaia/apps/summarize/pdf_formatter.py +284 -284
- gaia/audio/__init__.py +2 -2
- gaia/audio/audio_client.py +439 -439
- gaia/audio/audio_recorder.py +269 -269
- gaia/audio/kokoro_tts.py +599 -599
- gaia/audio/whisper_asr.py +432 -432
- gaia/chat/__init__.py +16 -16
- gaia/chat/app.py +428 -430
- gaia/chat/prompts.py +522 -522
- gaia/chat/sdk.py +1228 -1225
- gaia/cli.py +5659 -5632
- gaia/database/__init__.py +10 -10
- gaia/database/agent.py +176 -176
- gaia/database/mixin.py +290 -290
- gaia/database/testing.py +64 -64
- gaia/eval/batch_experiment.py +2332 -2332
- gaia/eval/claude.py +542 -542
- gaia/eval/config.py +37 -37
- gaia/eval/email_generator.py +512 -512
- gaia/eval/eval.py +3179 -3179
- gaia/eval/groundtruth.py +1130 -1130
- gaia/eval/transcript_generator.py +582 -582
- gaia/eval/webapp/README.md +167 -167
- gaia/eval/webapp/package-lock.json +875 -875
- gaia/eval/webapp/package.json +20 -20
- gaia/eval/webapp/public/app.js +3402 -3402
- gaia/eval/webapp/public/index.html +87 -87
- gaia/eval/webapp/public/styles.css +3661 -3661
- gaia/eval/webapp/server.js +415 -415
- gaia/eval/webapp/test-setup.js +72 -72
- gaia/installer/__init__.py +23 -0
- gaia/installer/init_command.py +1275 -0
- gaia/installer/lemonade_installer.py +619 -0
- gaia/llm/__init__.py +10 -2
- gaia/llm/base_client.py +60 -0
- gaia/llm/exceptions.py +12 -0
- gaia/llm/factory.py +70 -0
- gaia/llm/lemonade_client.py +3421 -3221
- gaia/llm/lemonade_manager.py +294 -294
- gaia/llm/providers/__init__.py +9 -0
- gaia/llm/providers/claude.py +108 -0
- gaia/llm/providers/lemonade.py +118 -0
- gaia/llm/providers/openai_provider.py +79 -0
- gaia/llm/vlm_client.py +382 -382
- gaia/logger.py +189 -189
- gaia/mcp/agent_mcp_server.py +245 -245
- gaia/mcp/blender_mcp_client.py +138 -138
- gaia/mcp/blender_mcp_server.py +648 -648
- gaia/mcp/context7_cache.py +332 -332
- gaia/mcp/external_services.py +518 -518
- gaia/mcp/mcp_bridge.py +811 -550
- gaia/mcp/servers/__init__.py +6 -6
- gaia/mcp/servers/docker_mcp.py +83 -83
- gaia/perf_analysis.py +361 -0
- gaia/rag/__init__.py +10 -10
- gaia/rag/app.py +293 -293
- gaia/rag/demo.py +304 -304
- gaia/rag/pdf_utils.py +235 -235
- gaia/rag/sdk.py +2194 -2194
- gaia/security.py +183 -163
- gaia/talk/app.py +287 -289
- gaia/talk/sdk.py +538 -538
- gaia/testing/__init__.py +87 -87
- gaia/testing/assertions.py +330 -330
- gaia/testing/fixtures.py +333 -333
- gaia/testing/mocks.py +493 -493
- gaia/util.py +46 -46
- gaia/utils/__init__.py +33 -33
- gaia/utils/file_watcher.py +675 -675
- gaia/utils/parsing.py +223 -223
- gaia/version.py +100 -100
- amd_gaia-0.15.0.dist-info/RECORD +0 -168
- gaia/agents/code/app.py +0 -266
- gaia/llm/llm_client.py +0 -723
- {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/top_level.txt +0 -0
gaia/mcp/mcp_bridge.py
CHANGED
|
@@ -1,550 +1,811 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
#
|
|
3
|
-
# Copyright(C)
|
|
4
|
-
# SPDX-License-Identifier: MIT
|
|
5
|
-
|
|
6
|
-
"""
|
|
7
|
-
GAIA MCP Bridge - HTTP Native Implementation
|
|
8
|
-
No WebSockets, just clean HTTP + JSON-RPC for maximum compatibility
|
|
9
|
-
"""
|
|
10
|
-
|
|
11
|
-
import
|
|
12
|
-
import
|
|
13
|
-
import
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
self.
|
|
44
|
-
self.
|
|
45
|
-
self.
|
|
46
|
-
self.
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
"
|
|
151
|
-
"description": "
|
|
152
|
-
"
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
if not self.
|
|
224
|
-
self.
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
"
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
self.
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
self.
|
|
417
|
-
|
|
418
|
-
{
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
def
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
#
|
|
3
|
+
# Copyright(C) 2025-2026 Advanced Micro Devices, Inc. All rights reserved.
|
|
4
|
+
# SPDX-License-Identifier: MIT
|
|
5
|
+
|
|
6
|
+
"""
|
|
7
|
+
GAIA MCP Bridge - HTTP Native Implementation
|
|
8
|
+
No WebSockets, just clean HTTP + JSON-RPC for maximum compatibility
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import io
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import shutil
|
|
15
|
+
import sys
|
|
16
|
+
import tempfile
|
|
17
|
+
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Any, Dict
|
|
20
|
+
from urllib.parse import urlparse
|
|
21
|
+
|
|
22
|
+
from python_multipart.multipart import MultipartParser, parse_options_header
|
|
23
|
+
|
|
24
|
+
# Add GAIA to path
|
|
25
|
+
sys.path.insert(
|
|
26
|
+
0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
from gaia.agents.blender.agent import BlenderAgent
|
|
30
|
+
from gaia.llm import create_client
|
|
31
|
+
from gaia.logger import get_logger
|
|
32
|
+
|
|
33
|
+
logger = get_logger(__name__)
|
|
34
|
+
|
|
35
|
+
# Global verbose flag for request logging
|
|
36
|
+
VERBOSE = False
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class MultipartCollector:
|
|
40
|
+
def __init__(self):
|
|
41
|
+
self.fields = {}
|
|
42
|
+
self.files = {}
|
|
43
|
+
self._headers = []
|
|
44
|
+
self._name = None
|
|
45
|
+
self._filename = None
|
|
46
|
+
self._buffer = None
|
|
47
|
+
|
|
48
|
+
def _parse_cd(self, value: str):
|
|
49
|
+
name = None
|
|
50
|
+
filename = None
|
|
51
|
+
try:
|
|
52
|
+
parts = [p.strip() for p in value.split(";")]
|
|
53
|
+
for p in parts:
|
|
54
|
+
pl = p.lower()
|
|
55
|
+
if pl.startswith("name="):
|
|
56
|
+
name = p.split("=", 1)[1].strip().strip('"')
|
|
57
|
+
elif pl.startswith("filename="):
|
|
58
|
+
filename = p.split("=", 1)[1].strip().strip('"')
|
|
59
|
+
except Exception:
|
|
60
|
+
pass
|
|
61
|
+
return name, filename
|
|
62
|
+
|
|
63
|
+
def on_part_begin(self):
|
|
64
|
+
self._headers = []
|
|
65
|
+
self._name = None
|
|
66
|
+
self._filename = None
|
|
67
|
+
self._buffer = io.BytesIO()
|
|
68
|
+
|
|
69
|
+
def on_header_field(self, data: bytes, start: int, end: int):
|
|
70
|
+
field = data[start:end].decode("latin-1")
|
|
71
|
+
self._headers.append([field, ""])
|
|
72
|
+
|
|
73
|
+
def on_header_value(self, data: bytes, start: int, end: int):
|
|
74
|
+
if self._headers:
|
|
75
|
+
self._headers[-1][1] += data[start:end].decode("latin-1")
|
|
76
|
+
|
|
77
|
+
def on_headers_finished(self):
|
|
78
|
+
for k, v in self._headers:
|
|
79
|
+
if k.lower() == "content-disposition":
|
|
80
|
+
name, filename = self._parse_cd(v)
|
|
81
|
+
self._name = name
|
|
82
|
+
self._filename = filename
|
|
83
|
+
|
|
84
|
+
def on_part_data(self, data: bytes, start: int, end: int):
|
|
85
|
+
if self._buffer is not None:
|
|
86
|
+
self._buffer.write(data[start:end])
|
|
87
|
+
|
|
88
|
+
def on_part_end(self):
|
|
89
|
+
if self._name is None:
|
|
90
|
+
self._buffer = None
|
|
91
|
+
return
|
|
92
|
+
if self._filename:
|
|
93
|
+
self.files[self._name] = {
|
|
94
|
+
"file_name": self._filename,
|
|
95
|
+
"file_object": self._buffer,
|
|
96
|
+
}
|
|
97
|
+
else:
|
|
98
|
+
self.fields[self._name] = self._buffer.getvalue()
|
|
99
|
+
self._buffer = None
|
|
100
|
+
|
|
101
|
+
def callbacks(self):
|
|
102
|
+
return {
|
|
103
|
+
"on_part_begin": self.on_part_begin,
|
|
104
|
+
"on_header_field": self.on_header_field,
|
|
105
|
+
"on_header_value": self.on_header_value,
|
|
106
|
+
"on_headers_finished": self.on_headers_finished,
|
|
107
|
+
"on_part_data": self.on_part_data,
|
|
108
|
+
"on_part_end": self.on_part_end,
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class GAIAMCPBridge:
|
|
113
|
+
"""HTTP-native MCP Bridge for GAIA - no WebSockets needed!"""
|
|
114
|
+
|
|
115
|
+
def __init__(
|
|
116
|
+
self,
|
|
117
|
+
host: str = "localhost",
|
|
118
|
+
port: int = 8765,
|
|
119
|
+
base_url: str = None,
|
|
120
|
+
verbose: bool = False,
|
|
121
|
+
):
|
|
122
|
+
self.host = host
|
|
123
|
+
self.port = port
|
|
124
|
+
self.base_url = base_url or "http://localhost:8000/api/v1"
|
|
125
|
+
self.agents = {}
|
|
126
|
+
self.tools = {}
|
|
127
|
+
self.llm_client = None
|
|
128
|
+
self.verbose = verbose
|
|
129
|
+
global VERBOSE
|
|
130
|
+
VERBOSE = verbose
|
|
131
|
+
|
|
132
|
+
# Initialize on creation
|
|
133
|
+
self._initialize_agents()
|
|
134
|
+
self._register_tools()
|
|
135
|
+
|
|
136
|
+
def _initialize_agents(self):
|
|
137
|
+
"""Initialize all GAIA agents."""
|
|
138
|
+
try:
|
|
139
|
+
# LLM agent
|
|
140
|
+
self.agents["llm"] = {
|
|
141
|
+
"module": "gaia.apps.llm.app",
|
|
142
|
+
"function": "main",
|
|
143
|
+
"description": "Direct LLM interaction",
|
|
144
|
+
"capabilities": ["query", "stream", "model_selection"],
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
# Chat agent
|
|
148
|
+
self.agents["chat"] = {
|
|
149
|
+
"module": "gaia.chat.app",
|
|
150
|
+
"function": "main",
|
|
151
|
+
"description": "Interactive chat",
|
|
152
|
+
"capabilities": ["conversation", "history", "context_management"],
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
# Blender agent
|
|
156
|
+
try:
|
|
157
|
+
self.agents["blender"] = {
|
|
158
|
+
"class": BlenderAgent,
|
|
159
|
+
"description": "3D content creation",
|
|
160
|
+
"capabilities": ["3d_modeling", "scene_manipulation", "rendering"],
|
|
161
|
+
}
|
|
162
|
+
except ImportError:
|
|
163
|
+
logger.warning("Blender agent not available")
|
|
164
|
+
# Summarize agent
|
|
165
|
+
try:
|
|
166
|
+
from gaia.agents.summarize.agent import SummarizerAgent
|
|
167
|
+
|
|
168
|
+
self.agents["summarize"] = {
|
|
169
|
+
"class": SummarizerAgent,
|
|
170
|
+
"description": "Text/document summarization",
|
|
171
|
+
"capabilities": ["summarize", "pdf", "email", "transcript"],
|
|
172
|
+
"init_params": {},
|
|
173
|
+
}
|
|
174
|
+
logger.info("✅ Summarize agent registered")
|
|
175
|
+
except ImportError as e:
|
|
176
|
+
logger.warning(f"Summarize agent not available: {e}")
|
|
177
|
+
# Jira agent - THE KEY ADDITION
|
|
178
|
+
try:
|
|
179
|
+
from gaia.agents.jira.agent import JiraAgent
|
|
180
|
+
|
|
181
|
+
self.agents["jira"] = {
|
|
182
|
+
"class": JiraAgent,
|
|
183
|
+
"description": "Natural language Jira orchestration",
|
|
184
|
+
"capabilities": ["search", "create", "update", "bulk_operations"],
|
|
185
|
+
"init_params": {
|
|
186
|
+
"model_id": "Qwen3-Coder-30B-A3B-Instruct-GGUF",
|
|
187
|
+
"silent_mode": True,
|
|
188
|
+
"debug": False,
|
|
189
|
+
},
|
|
190
|
+
}
|
|
191
|
+
logger.info("✅ Jira agent registered")
|
|
192
|
+
except ImportError as e:
|
|
193
|
+
logger.warning(f"Jira agent not available: {e}")
|
|
194
|
+
|
|
195
|
+
logger.info(f"Initialized {len(self.agents)} agents")
|
|
196
|
+
|
|
197
|
+
except Exception as e:
|
|
198
|
+
logger.error(f"Agent initialization error: {e}")
|
|
199
|
+
|
|
200
|
+
def _register_tools(self):
|
|
201
|
+
"""Register available tools."""
|
|
202
|
+
# Load from mcp.json if available
|
|
203
|
+
try:
|
|
204
|
+
mcp_config_path = os.path.join(os.path.dirname(__file__), "mcp.json")
|
|
205
|
+
if os.path.exists(mcp_config_path):
|
|
206
|
+
with open(mcp_config_path, "r") as f:
|
|
207
|
+
config = json.load(f)
|
|
208
|
+
tools_config = config.get("tools", {})
|
|
209
|
+
# Convert tool config to proper MCP format with name field
|
|
210
|
+
self.tools = {}
|
|
211
|
+
for tool_name, tool_data in tools_config.items():
|
|
212
|
+
self.tools[tool_name] = {
|
|
213
|
+
"name": tool_name,
|
|
214
|
+
"description": tool_data.get("description", ""),
|
|
215
|
+
"servers": tool_data.get("servers", []),
|
|
216
|
+
"parameters": tool_data.get("parameters", {}),
|
|
217
|
+
}
|
|
218
|
+
logger.info(f"Loaded {len(self.tools)} tools from mcp.json")
|
|
219
|
+
except Exception as e:
|
|
220
|
+
logger.warning(f"Could not load mcp.json: {e}")
|
|
221
|
+
|
|
222
|
+
# Ensure core tools are registered
|
|
223
|
+
if "gaia.jira" not in self.tools:
|
|
224
|
+
self.tools["gaia.jira"] = {
|
|
225
|
+
"name": "gaia.jira",
|
|
226
|
+
"description": "Natural language Jira operations",
|
|
227
|
+
"inputSchema": {
|
|
228
|
+
"type": "object",
|
|
229
|
+
"properties": {
|
|
230
|
+
"query": {"type": "string"},
|
|
231
|
+
"operation": {
|
|
232
|
+
"type": "string",
|
|
233
|
+
"enum": ["query", "create", "update"],
|
|
234
|
+
},
|
|
235
|
+
},
|
|
236
|
+
},
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
if "gaia.chat" not in self.tools:
|
|
240
|
+
self.tools["gaia.chat"] = {
|
|
241
|
+
"name": "gaia.chat",
|
|
242
|
+
"description": "Conversational chat with context",
|
|
243
|
+
"inputSchema": {
|
|
244
|
+
"type": "object",
|
|
245
|
+
"properties": {"query": {"type": "string"}},
|
|
246
|
+
},
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
if "gaia.query" not in self.tools:
|
|
250
|
+
self.tools["gaia.query"] = {
|
|
251
|
+
"name": "gaia.query",
|
|
252
|
+
"description": "Direct LLM queries (no conversation context)",
|
|
253
|
+
"inputSchema": {
|
|
254
|
+
"type": "object",
|
|
255
|
+
"properties": {"query": {"type": "string"}},
|
|
256
|
+
},
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
|
260
|
+
"""Execute a tool and return results."""
|
|
261
|
+
try:
|
|
262
|
+
if tool_name == "gaia.jira":
|
|
263
|
+
return self._execute_jira(arguments)
|
|
264
|
+
elif tool_name == "gaia.query":
|
|
265
|
+
return self._execute_query(arguments)
|
|
266
|
+
elif tool_name == "gaia.chat":
|
|
267
|
+
return self._execute_chat(arguments)
|
|
268
|
+
elif tool_name == "gaia.blender.create":
|
|
269
|
+
return self._execute_blender(arguments)
|
|
270
|
+
elif tool_name == "gaia.summarize":
|
|
271
|
+
return self._execute_summarize(arguments)
|
|
272
|
+
else:
|
|
273
|
+
return {"error": f"Tool not implemented: {tool_name}"}
|
|
274
|
+
except Exception as e:
|
|
275
|
+
logger.error(f"Tool execution error: {e}")
|
|
276
|
+
return {"error": str(e)}
|
|
277
|
+
|
|
278
|
+
def _execute_jira(self, args: Dict[str, Any]) -> Dict[str, Any]:
|
|
279
|
+
"""Execute Jira operations."""
|
|
280
|
+
query = args.get("query", "")
|
|
281
|
+
|
|
282
|
+
# Get or create agent
|
|
283
|
+
agent_config = self.agents.get("jira")
|
|
284
|
+
if not agent_config:
|
|
285
|
+
return {"error": "Jira agent not available"}
|
|
286
|
+
|
|
287
|
+
# Lazy initialization
|
|
288
|
+
if "instance" not in agent_config:
|
|
289
|
+
agent_class = agent_config["class"]
|
|
290
|
+
init_params = agent_config.get("init_params", {})
|
|
291
|
+
agent_config["instance"] = agent_class(**init_params)
|
|
292
|
+
|
|
293
|
+
# Initialize Jira config discovery
|
|
294
|
+
try:
|
|
295
|
+
config = agent_config["instance"].initialize()
|
|
296
|
+
logger.info(
|
|
297
|
+
f"Jira initialized: {len(config.get('projects', []))} projects found"
|
|
298
|
+
)
|
|
299
|
+
except Exception as e:
|
|
300
|
+
logger.warning(f"Jira config discovery failed: {e}")
|
|
301
|
+
|
|
302
|
+
agent = agent_config["instance"]
|
|
303
|
+
|
|
304
|
+
# Execute query
|
|
305
|
+
result = agent.process_query(query, trace=False)
|
|
306
|
+
|
|
307
|
+
return {
|
|
308
|
+
"success": True,
|
|
309
|
+
"result": result.get("final_answer", ""),
|
|
310
|
+
"steps_taken": result.get("steps_taken", 0),
|
|
311
|
+
"conversation": result.get("conversation", []),
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
def _execute_query(self, args: Dict[str, Any]) -> Dict[str, Any]:
|
|
315
|
+
"""Execute LLM query."""
|
|
316
|
+
if not self.llm_client:
|
|
317
|
+
self.llm_client = create_client("lemonade", base_url=self.base_url)
|
|
318
|
+
|
|
319
|
+
response = self.llm_client.generate(
|
|
320
|
+
prompt=args.get("query", ""),
|
|
321
|
+
model=args.get("model"),
|
|
322
|
+
max_tokens=args.get("max_tokens", 500),
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
return {"success": True, "result": response}
|
|
326
|
+
|
|
327
|
+
def _execute_chat(self, args: Dict[str, Any]) -> Dict[str, Any]:
|
|
328
|
+
"""Execute chat interaction with conversation context."""
|
|
329
|
+
try:
|
|
330
|
+
from gaia.chat.sdk import ChatConfig, ChatSDK
|
|
331
|
+
|
|
332
|
+
# Initialize chat SDK if not already done
|
|
333
|
+
if not hasattr(self, "chat_sdk"):
|
|
334
|
+
# ChatSDK uses the global LLM configuration, not a base_url
|
|
335
|
+
config = ChatConfig()
|
|
336
|
+
self.chat_sdk = ChatSDK(config=config)
|
|
337
|
+
|
|
338
|
+
# Get the query
|
|
339
|
+
query = args.get("query", "")
|
|
340
|
+
|
|
341
|
+
# Send message and get response
|
|
342
|
+
chat_response = self.chat_sdk.send(query)
|
|
343
|
+
|
|
344
|
+
# Extract the text response
|
|
345
|
+
if hasattr(chat_response, "text"):
|
|
346
|
+
response = chat_response.text
|
|
347
|
+
elif hasattr(chat_response, "content"):
|
|
348
|
+
response = chat_response.content
|
|
349
|
+
else:
|
|
350
|
+
response = str(chat_response)
|
|
351
|
+
|
|
352
|
+
return {"success": True, "result": response}
|
|
353
|
+
except Exception as e:
|
|
354
|
+
logger.error(f"Chat execution error: {e}")
|
|
355
|
+
return {"success": False, "error": str(e)}
|
|
356
|
+
|
|
357
|
+
def _execute_blender(self, args: Dict[str, Any]) -> Dict[str, Any]:
|
|
358
|
+
"""Execute Blender operations."""
|
|
359
|
+
# Implementation would go here
|
|
360
|
+
return {"success": True, "result": "Blender operation completed"}
|
|
361
|
+
|
|
362
|
+
def _execute_summarize(self, args: Dict[str, Any]) -> Dict[str, Any]:
|
|
363
|
+
"""Execute summarize operations.
|
|
364
|
+
Returns either a non-streaming result or streaming iterator metadata.
|
|
365
|
+
"""
|
|
366
|
+
collector = args.get("multipart_collector")
|
|
367
|
+
if not collector:
|
|
368
|
+
return {"success": False, "error": "Missing multipart_collector"}
|
|
369
|
+
|
|
370
|
+
file_rec = collector.files.get("file")
|
|
371
|
+
style_bytes = collector.fields.get("style") or b"brief"
|
|
372
|
+
stream_val = collector.fields.get("stream")
|
|
373
|
+
accept_sse = bool(args.get("accept_sse"))
|
|
374
|
+
|
|
375
|
+
# Normalize flags
|
|
376
|
+
try:
|
|
377
|
+
style = (
|
|
378
|
+
style_bytes.decode("utf-8", errors="ignore")
|
|
379
|
+
if isinstance(style_bytes, (bytes, bytearray))
|
|
380
|
+
else str(style_bytes)
|
|
381
|
+
)
|
|
382
|
+
except Exception:
|
|
383
|
+
style = "brief"
|
|
384
|
+
try:
|
|
385
|
+
stream = str(
|
|
386
|
+
(
|
|
387
|
+
stream_val.decode("utf-8")
|
|
388
|
+
if isinstance(stream_val, (bytes, bytearray))
|
|
389
|
+
else stream_val
|
|
390
|
+
)
|
|
391
|
+
or ""
|
|
392
|
+
).lower() in ["1", "true", "yes"]
|
|
393
|
+
except Exception:
|
|
394
|
+
stream = False
|
|
395
|
+
# Honor Accept: text/event-stream if not explicitly set by field
|
|
396
|
+
if not stream and accept_sse:
|
|
397
|
+
stream = True
|
|
398
|
+
|
|
399
|
+
if not file_rec:
|
|
400
|
+
return {"success": False, "error": "No file uploaded"}
|
|
401
|
+
|
|
402
|
+
# Save file to temp
|
|
403
|
+
filename = file_rec.get("file_name")
|
|
404
|
+
ext = os.path.splitext(filename)[1] if filename else ".pdf"
|
|
405
|
+
tmpfile_path = None
|
|
406
|
+
try:
|
|
407
|
+
with tempfile.NamedTemporaryFile(
|
|
408
|
+
delete=False, suffix=ext or ".pdf"
|
|
409
|
+
) as tmpfile:
|
|
410
|
+
buf = file_rec.get("file_object")
|
|
411
|
+
buf.seek(0)
|
|
412
|
+
shutil.copyfileobj(buf, tmpfile)
|
|
413
|
+
tmpfile_path = tmpfile.name
|
|
414
|
+
|
|
415
|
+
# Initialize agent
|
|
416
|
+
agent_config = self.agents.get("summarize")
|
|
417
|
+
if not agent_config:
|
|
418
|
+
return {"success": False, "error": "Summarize agent not available"}
|
|
419
|
+
if "instance" not in agent_config:
|
|
420
|
+
agent_class = agent_config["class"]
|
|
421
|
+
init_params = agent_config.get("init_params", {})
|
|
422
|
+
agent_config["instance"] = agent_class(**init_params)
|
|
423
|
+
agent = agent_config["instance"]
|
|
424
|
+
|
|
425
|
+
# Validate style early to provide clear error message
|
|
426
|
+
try:
|
|
427
|
+
agent._validate_styles(style) # pylint: disable=protected-access
|
|
428
|
+
except ValueError as e:
|
|
429
|
+
return {"success": False, "error": str(e)}
|
|
430
|
+
|
|
431
|
+
if stream:
|
|
432
|
+
content = agent.get_summary_content_from_file(Path(tmpfile_path))
|
|
433
|
+
if not content:
|
|
434
|
+
return {
|
|
435
|
+
"success": False,
|
|
436
|
+
"error": "No extractable text found in uploaded file",
|
|
437
|
+
}
|
|
438
|
+
iterator = agent.summarize_stream(
|
|
439
|
+
content, input_type="pdf", style=style
|
|
440
|
+
)
|
|
441
|
+
# Return tmpfile_path for cleanup after streaming completes
|
|
442
|
+
return {
|
|
443
|
+
"success": True,
|
|
444
|
+
"stream": True,
|
|
445
|
+
"style": style,
|
|
446
|
+
"tmpfile_path": tmpfile_path,
|
|
447
|
+
"iterator": iterator,
|
|
448
|
+
}
|
|
449
|
+
else:
|
|
450
|
+
result = agent.summarize_file(tmpfile_path, styles=[style])
|
|
451
|
+
return {
|
|
452
|
+
"success": True,
|
|
453
|
+
"stream": False,
|
|
454
|
+
"style": style,
|
|
455
|
+
"result": result,
|
|
456
|
+
}
|
|
457
|
+
finally:
|
|
458
|
+
# Clean up temp file for non-streaming responses or on error
|
|
459
|
+
# For streaming responses, cleanup happens in the HTTP handler after streaming completes
|
|
460
|
+
if tmpfile_path and not stream and os.path.exists(tmpfile_path):
|
|
461
|
+
try:
|
|
462
|
+
os.unlink(tmpfile_path)
|
|
463
|
+
except Exception as e:
|
|
464
|
+
logger.warning(f"Failed to cleanup temp file {tmpfile_path}: {e}")
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
class MCPHTTPHandler(BaseHTTPRequestHandler):
|
|
468
|
+
"""HTTP handler for MCP protocol."""
|
|
469
|
+
|
|
470
|
+
def __init__(self, *args, bridge: GAIAMCPBridge = None, **kwargs):
|
|
471
|
+
self.bridge = bridge or GAIAMCPBridge()
|
|
472
|
+
super().__init__(*args, **kwargs)
|
|
473
|
+
|
|
474
|
+
def log_request_details(self, method, path, body=None):
|
|
475
|
+
"""Log incoming request details if verbose mode is enabled."""
|
|
476
|
+
if VERBOSE:
|
|
477
|
+
client_addr = self.client_address[0] if self.client_address else "unknown"
|
|
478
|
+
logger.info(f"MCP Request: {method} {path} from {client_addr}")
|
|
479
|
+
if body:
|
|
480
|
+
logger.debug(f"Request body: {json.dumps(body, indent=2)}")
|
|
481
|
+
|
|
482
|
+
def do_GET(self):
|
|
483
|
+
"""Handle GET requests."""
|
|
484
|
+
self.log_request_details("GET", self.path)
|
|
485
|
+
parsed = urlparse(self.path)
|
|
486
|
+
|
|
487
|
+
if parsed.path == "/health":
|
|
488
|
+
self.send_json(
|
|
489
|
+
200,
|
|
490
|
+
{
|
|
491
|
+
"status": "healthy",
|
|
492
|
+
"service": "GAIA MCP Bridge (HTTP)",
|
|
493
|
+
"agents": len(self.bridge.agents),
|
|
494
|
+
"tools": len(self.bridge.tools),
|
|
495
|
+
},
|
|
496
|
+
)
|
|
497
|
+
elif parsed.path == "/tools" or parsed.path == "/v1/tools":
|
|
498
|
+
self.send_json(200, {"tools": list(self.bridge.tools.values())})
|
|
499
|
+
elif parsed.path == "/status":
|
|
500
|
+
# Comprehensive status endpoint with all details
|
|
501
|
+
agents_info = {}
|
|
502
|
+
for name, agent in self.bridge.agents.items():
|
|
503
|
+
agents_info[name] = {
|
|
504
|
+
"description": agent.get("description", ""),
|
|
505
|
+
"capabilities": agent.get("capabilities", []),
|
|
506
|
+
"type": "class" if "class" in agent else "module",
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
tools_info = {}
|
|
510
|
+
for name, tool in self.bridge.tools.items():
|
|
511
|
+
tools_info[name] = {
|
|
512
|
+
"description": tool.get("description", ""),
|
|
513
|
+
"inputSchema": tool.get("inputSchema", {}),
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
self.send_json(
|
|
517
|
+
200,
|
|
518
|
+
{
|
|
519
|
+
"status": "healthy",
|
|
520
|
+
"service": "GAIA MCP Bridge (HTTP)",
|
|
521
|
+
"version": "2.0.0",
|
|
522
|
+
"host": self.bridge.host,
|
|
523
|
+
"port": self.bridge.port,
|
|
524
|
+
"llm_backend": self.bridge.base_url,
|
|
525
|
+
"agents": agents_info,
|
|
526
|
+
"tools": tools_info,
|
|
527
|
+
"endpoints": {
|
|
528
|
+
"health": "GET /health - Health check",
|
|
529
|
+
"status": "GET /status - Detailed status (this endpoint)",
|
|
530
|
+
"tools": "GET /tools - List available tools",
|
|
531
|
+
"chat": "POST /chat - Interactive chat",
|
|
532
|
+
"jira": "POST /jira - Jira operations",
|
|
533
|
+
"llm": "POST /llm - Direct LLM queries",
|
|
534
|
+
"jsonrpc": "POST / - JSON-RPC endpoint",
|
|
535
|
+
},
|
|
536
|
+
},
|
|
537
|
+
)
|
|
538
|
+
else:
|
|
539
|
+
self.send_json(404, {"error": "Not found"})
|
|
540
|
+
|
|
541
|
+
def do_POST(self):
|
|
542
|
+
"""Handle POST requests - main MCP endpoint."""
|
|
543
|
+
content_length = int(self.headers.get("Content-Length", 0))
|
|
544
|
+
|
|
545
|
+
parsed = urlparse(self.path)
|
|
546
|
+
ctype = self.headers.get("content-type", "")
|
|
547
|
+
|
|
548
|
+
if ctype.startswith("application/json") and content_length > 0:
|
|
549
|
+
body = self.rfile.read(content_length)
|
|
550
|
+
try:
|
|
551
|
+
data = json.loads(body.decode("utf-8"))
|
|
552
|
+
self.log_request_details("POST", self.path, data)
|
|
553
|
+
except json.JSONDecodeError:
|
|
554
|
+
self.log_request_details("POST", self.path)
|
|
555
|
+
logger.error("Invalid JSON in request body")
|
|
556
|
+
self.send_json(400, {"error": "Invalid JSON"})
|
|
557
|
+
return
|
|
558
|
+
elif ctype.startswith("multipart/form-data"):
|
|
559
|
+
raw_data = self.rfile.read(content_length)
|
|
560
|
+
|
|
561
|
+
# Extract boundary using python-multipart helper and ensure bytes
|
|
562
|
+
_, opts = parse_options_header(ctype)
|
|
563
|
+
boundary = opts.get(b"boundary")
|
|
564
|
+
if not boundary:
|
|
565
|
+
raise ValueError("Missing multipart boundary")
|
|
566
|
+
|
|
567
|
+
# boundary is bytes, decode for parser if needed
|
|
568
|
+
boundary = boundary.decode("latin-1").strip('"')
|
|
569
|
+
boundary_bytes = (
|
|
570
|
+
boundary
|
|
571
|
+
if isinstance(boundary, (bytes, bytearray))
|
|
572
|
+
else str(boundary).encode("utf-8")
|
|
573
|
+
)
|
|
574
|
+
|
|
575
|
+
collector = MultipartCollector()
|
|
576
|
+
mp = MultipartParser(boundary_bytes, callbacks=collector.callbacks())
|
|
577
|
+
mp.write(raw_data)
|
|
578
|
+
mp.finalize()
|
|
579
|
+
data = {}
|
|
580
|
+
data["multipart_collector"] = collector
|
|
581
|
+
else:
|
|
582
|
+
data = {}
|
|
583
|
+
self.log_request_details("POST", self.path)
|
|
584
|
+
|
|
585
|
+
# Handle different endpoints
|
|
586
|
+
if parsed.path in ["/", "/v1/messages", "/rpc"]:
|
|
587
|
+
# JSON-RPC endpoint
|
|
588
|
+
self.handle_jsonrpc(data)
|
|
589
|
+
elif parsed.path == "/chat":
|
|
590
|
+
# Direct chat endpoint for conversations
|
|
591
|
+
result = self.bridge.execute_tool("gaia.chat", data)
|
|
592
|
+
self.send_json(200 if result.get("success") else 500, result)
|
|
593
|
+
elif parsed.path == "/jira":
|
|
594
|
+
# Direct Jira endpoint for convenience
|
|
595
|
+
result = self.bridge.execute_tool("gaia.jira", data)
|
|
596
|
+
self.send_json(200 if result.get("success") else 500, result)
|
|
597
|
+
elif parsed.path == "/llm":
|
|
598
|
+
# Direct LLM endpoint (no conversation context)
|
|
599
|
+
result = self.bridge.execute_tool("gaia.query", data)
|
|
600
|
+
self.send_json(200 if result.get("success") else 500, result)
|
|
601
|
+
elif parsed.path == "/summarize":
|
|
602
|
+
# Direct Summarize endpoint accept multipart/form-data (file upload) for browser clients
|
|
603
|
+
accept_header = self.headers.get("Accept", "")
|
|
604
|
+
if isinstance(data, dict):
|
|
605
|
+
data["accept_sse"] = "text/event-stream" in accept_header
|
|
606
|
+
result = self.bridge.execute_tool("gaia.summarize", data)
|
|
607
|
+
if result.get("success") and result.get("stream"):
|
|
608
|
+
self.send_sse_headers()
|
|
609
|
+
try:
|
|
610
|
+
self.stream_sse(result.get("iterator", []))
|
|
611
|
+
finally:
|
|
612
|
+
tmp = result.get("tmpfile_path")
|
|
613
|
+
if tmp and os.path.exists(tmp):
|
|
614
|
+
os.unlink(tmp)
|
|
615
|
+
return
|
|
616
|
+
else:
|
|
617
|
+
self.send_json(200 if result.get("success") else 500, result)
|
|
618
|
+
return
|
|
619
|
+
else:
|
|
620
|
+
self.send_json(404, {"error": "Not found"})
|
|
621
|
+
|
|
622
|
+
def handle_jsonrpc(self, data):
|
|
623
|
+
"""Handle JSON-RPC requests."""
|
|
624
|
+
# Validate JSON-RPC
|
|
625
|
+
if "jsonrpc" not in data or data["jsonrpc"] != "2.0":
|
|
626
|
+
self.send_json(
|
|
627
|
+
400,
|
|
628
|
+
{
|
|
629
|
+
"jsonrpc": "2.0",
|
|
630
|
+
"error": {"code": -32600, "message": "Invalid Request"},
|
|
631
|
+
"id": data.get("id"),
|
|
632
|
+
},
|
|
633
|
+
)
|
|
634
|
+
return
|
|
635
|
+
|
|
636
|
+
method = data.get("method")
|
|
637
|
+
params = data.get("params", {})
|
|
638
|
+
request_id = data.get("id")
|
|
639
|
+
|
|
640
|
+
# Route methods
|
|
641
|
+
if method == "initialize":
|
|
642
|
+
result = {
|
|
643
|
+
"protocolVersion": "1.0.0",
|
|
644
|
+
"serverInfo": {"name": "GAIA MCP Bridge", "version": "2.0.0"},
|
|
645
|
+
"capabilities": {"tools": True, "resources": True, "prompts": True},
|
|
646
|
+
}
|
|
647
|
+
elif method == "tools/list":
|
|
648
|
+
result = {"tools": list(self.bridge.tools.values())}
|
|
649
|
+
elif method == "tools/call":
|
|
650
|
+
tool_name = params.get("name")
|
|
651
|
+
arguments = params.get("arguments", {})
|
|
652
|
+
tool_result = self.bridge.execute_tool(tool_name, arguments)
|
|
653
|
+
result = {"content": [{"type": "text", "text": json.dumps(tool_result)}]}
|
|
654
|
+
else:
|
|
655
|
+
self.send_json(
|
|
656
|
+
400,
|
|
657
|
+
{
|
|
658
|
+
"jsonrpc": "2.0",
|
|
659
|
+
"error": {"code": -32601, "message": f"Method not found: {method}"},
|
|
660
|
+
"id": request_id,
|
|
661
|
+
},
|
|
662
|
+
)
|
|
663
|
+
return
|
|
664
|
+
|
|
665
|
+
# Send response
|
|
666
|
+
self.send_json(200, {"jsonrpc": "2.0", "result": result, "id": request_id})
|
|
667
|
+
|
|
668
|
+
def do_OPTIONS(self):
|
|
669
|
+
"""Handle OPTIONS for CORS."""
|
|
670
|
+
self.log_request_details("OPTIONS", self.path)
|
|
671
|
+
self.send_response(200)
|
|
672
|
+
self.send_header("Access-Control-Allow-Origin", "*")
|
|
673
|
+
self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
|
|
674
|
+
self.send_header("Access-Control-Allow-Headers", "Content-Type")
|
|
675
|
+
self.end_headers()
|
|
676
|
+
|
|
677
|
+
def send_sse_headers(self):
|
|
678
|
+
"""Send standard headers for Server-Sent Events."""
|
|
679
|
+
self.send_response(200)
|
|
680
|
+
self.send_header("Content-Type", "text/event-stream")
|
|
681
|
+
self.send_header("Cache-Control", "no-cache")
|
|
682
|
+
self.send_header("Access-Control-Allow-Origin", "*")
|
|
683
|
+
self.send_header("Connection", "keep-alive")
|
|
684
|
+
self.send_header("X-Accel-Buffering", "no")
|
|
685
|
+
self.end_headers()
|
|
686
|
+
|
|
687
|
+
def stream_sse(self, iterator):
|
|
688
|
+
"""Stream SSE data from an iterator of chunk dicts."""
|
|
689
|
+
for chunk in iterator:
|
|
690
|
+
if chunk.get("is_complete"):
|
|
691
|
+
data_out = json.dumps(
|
|
692
|
+
{"event": "complete", "performance": chunk.get("performance", {})}
|
|
693
|
+
)
|
|
694
|
+
else:
|
|
695
|
+
data_out = json.dumps({"text": chunk.get("text", "")})
|
|
696
|
+
self.wfile.write(f"data: {data_out}\n\n".encode("utf-8"))
|
|
697
|
+
self.wfile.flush()
|
|
698
|
+
|
|
699
|
+
def send_json(self, status, data):
|
|
700
|
+
"""Send JSON response."""
|
|
701
|
+
if VERBOSE:
|
|
702
|
+
logger.info(f"MCP Response: Status {status}")
|
|
703
|
+
logger.debug(f"Response body: {json.dumps(data, indent=2)}")
|
|
704
|
+
|
|
705
|
+
self.send_response(status)
|
|
706
|
+
self.send_header("Content-Type", "application/json")
|
|
707
|
+
self.send_header("Access-Control-Allow-Origin", "*")
|
|
708
|
+
self.end_headers()
|
|
709
|
+
self.wfile.write(json.dumps(data).encode("utf-8"))
|
|
710
|
+
|
|
711
|
+
def log_message(self, format, *args):
|
|
712
|
+
"""Override to control standard HTTP logging."""
|
|
713
|
+
# In verbose mode, skip the built-in HTTP logging since we have custom logging
|
|
714
|
+
if VERBOSE:
|
|
715
|
+
# We already log detailed info in log_request_details and send_json
|
|
716
|
+
pass
|
|
717
|
+
elif "/health" not in args[0]:
|
|
718
|
+
# In non-verbose mode, skip health checks but log everything else
|
|
719
|
+
super().log_message(format, *args)
|
|
720
|
+
|
|
721
|
+
|
|
722
|
+
def start_server(host="localhost", port=8765, base_url=None, verbose=False):
|
|
723
|
+
"""Start the HTTP MCP server."""
|
|
724
|
+
import io
|
|
725
|
+
|
|
726
|
+
# Fix Windows Unicode
|
|
727
|
+
if sys.platform == "win32":
|
|
728
|
+
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
|
|
729
|
+
|
|
730
|
+
# Fix Linux IPv6 issue: When host is "localhost", Python's socket might bind
|
|
731
|
+
# to ::1 (IPv6) which curl can't connect to by default. Use 0.0.0.0 on Linux
|
|
732
|
+
# to bind to all IPv4 interfaces. Keep localhost on Windows where it works.
|
|
733
|
+
bind_host = host
|
|
734
|
+
if host == "localhost" and sys.platform != "win32":
|
|
735
|
+
bind_host = "0.0.0.0"
|
|
736
|
+
|
|
737
|
+
logger.info(f"Creating MCP bridge for {host}:{port}")
|
|
738
|
+
|
|
739
|
+
# Create bridge with verbose flag
|
|
740
|
+
bridge = GAIAMCPBridge(host, port, base_url, verbose=verbose)
|
|
741
|
+
|
|
742
|
+
# Create handler with bridge
|
|
743
|
+
def handler(*args, **kwargs):
|
|
744
|
+
return MCPHTTPHandler(*args, bridge=bridge, **kwargs)
|
|
745
|
+
|
|
746
|
+
# Start server - use bind_host for actual socket binding
|
|
747
|
+
logger.info(f"Creating HTTP server on {bind_host}:{port}")
|
|
748
|
+
try:
|
|
749
|
+
server = HTTPServer((bind_host, port), handler)
|
|
750
|
+
logger.info(
|
|
751
|
+
f"HTTP server created successfully, listening on {bind_host}:{port}"
|
|
752
|
+
)
|
|
753
|
+
except Exception as e:
|
|
754
|
+
logger.error(f"Failed to create HTTP server: {e}")
|
|
755
|
+
raise
|
|
756
|
+
|
|
757
|
+
print("=" * 60, flush=True)
|
|
758
|
+
print("🚀 GAIA MCP Bridge - HTTP Native")
|
|
759
|
+
print("=" * 60)
|
|
760
|
+
print(f"Server: http://{host}:{port}")
|
|
761
|
+
print(f"LLM Backend: {bridge.base_url}")
|
|
762
|
+
print(f"Agents: {list(bridge.agents.keys())}")
|
|
763
|
+
print(f"Tools: {list(bridge.tools.keys())}")
|
|
764
|
+
if verbose:
|
|
765
|
+
print(f"\n🔍 Verbose Mode: ENABLED")
|
|
766
|
+
print(f" All requests will be logged to console and gaia.log")
|
|
767
|
+
logger.info("MCP Bridge started in VERBOSE mode - all requests will be logged")
|
|
768
|
+
print("\n📍 Endpoints:")
|
|
769
|
+
print(f" GET http://{host}:{port}/health - Health check")
|
|
770
|
+
print(
|
|
771
|
+
f" GET http://{host}:{port}/status - Detailed status with agents & tools"
|
|
772
|
+
)
|
|
773
|
+
print(f" GET http://{host}:{port}/tools - List tools")
|
|
774
|
+
print(f" POST http://{host}:{port}/ - JSON-RPC")
|
|
775
|
+
print(f" POST http://{host}:{port}/chat - Chat (with context)")
|
|
776
|
+
print(f" POST http://{host}:{port}/jira - Direct Jira")
|
|
777
|
+
print(f" POST http://{host}:{port}/llm - Direct LLM (no context)")
|
|
778
|
+
print("\n🔧 Usage Examples:")
|
|
779
|
+
print(
|
|
780
|
+
' Chat: curl -X POST http://localhost:8765/chat -d \'{"query":"Hello GAIA!"}\''
|
|
781
|
+
)
|
|
782
|
+
print(
|
|
783
|
+
' Jira: curl -X POST http://localhost:8765/jira -d \'{"query":"show my issues"}\''
|
|
784
|
+
)
|
|
785
|
+
print(' n8n: HTTP Request → POST /chat → {"query": "..."}')
|
|
786
|
+
print(" MCP: JSON-RPC to / with method: tools/call")
|
|
787
|
+
print("=" * 60)
|
|
788
|
+
print("\nPress Ctrl+C to stop\n", flush=True)
|
|
789
|
+
|
|
790
|
+
logger.info(f"Starting serve_forever() on {bind_host}:{port}")
|
|
791
|
+
try:
|
|
792
|
+
server.serve_forever()
|
|
793
|
+
except KeyboardInterrupt:
|
|
794
|
+
print("\n✅ Server stopped")
|
|
795
|
+
|
|
796
|
+
|
|
797
|
+
if __name__ == "__main__":
|
|
798
|
+
import argparse
|
|
799
|
+
|
|
800
|
+
parser = argparse.ArgumentParser(description="GAIA MCP Bridge - HTTP Native")
|
|
801
|
+
parser.add_argument("--host", default="localhost", help="Host to bind to")
|
|
802
|
+
parser.add_argument("--port", type=int, default=8765, help="Port to listen on")
|
|
803
|
+
parser.add_argument(
|
|
804
|
+
"--base-url", default="http://localhost:8000/api/v1", help="LLM server URL"
|
|
805
|
+
)
|
|
806
|
+
parser.add_argument(
|
|
807
|
+
"--verbose", action="store_true", help="Enable verbose logging for all requests"
|
|
808
|
+
)
|
|
809
|
+
|
|
810
|
+
args = parser.parse_args()
|
|
811
|
+
start_server(args.host, args.port, args.base_url, args.verbose)
|