amd-gaia 0.15.0__py3-none-any.whl → 0.15.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (185) hide show
  1. {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/METADATA +222 -223
  2. amd_gaia-0.15.2.dist-info/RECORD +182 -0
  3. {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/WHEEL +1 -1
  4. {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/entry_points.txt +1 -0
  5. {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/licenses/LICENSE.md +20 -20
  6. gaia/__init__.py +29 -29
  7. gaia/agents/__init__.py +19 -19
  8. gaia/agents/base/__init__.py +9 -9
  9. gaia/agents/base/agent.py +2132 -2177
  10. gaia/agents/base/api_agent.py +119 -120
  11. gaia/agents/base/console.py +1967 -1841
  12. gaia/agents/base/errors.py +237 -237
  13. gaia/agents/base/mcp_agent.py +86 -86
  14. gaia/agents/base/tools.py +88 -83
  15. gaia/agents/blender/__init__.py +7 -0
  16. gaia/agents/blender/agent.py +553 -556
  17. gaia/agents/blender/agent_simple.py +133 -135
  18. gaia/agents/blender/app.py +211 -211
  19. gaia/agents/blender/app_simple.py +41 -41
  20. gaia/agents/blender/core/__init__.py +16 -16
  21. gaia/agents/blender/core/materials.py +506 -506
  22. gaia/agents/blender/core/objects.py +316 -316
  23. gaia/agents/blender/core/rendering.py +225 -225
  24. gaia/agents/blender/core/scene.py +220 -220
  25. gaia/agents/blender/core/view.py +146 -146
  26. gaia/agents/chat/__init__.py +9 -9
  27. gaia/agents/chat/agent.py +809 -835
  28. gaia/agents/chat/app.py +1065 -1058
  29. gaia/agents/chat/session.py +508 -508
  30. gaia/agents/chat/tools/__init__.py +15 -15
  31. gaia/agents/chat/tools/file_tools.py +96 -96
  32. gaia/agents/chat/tools/rag_tools.py +1744 -1729
  33. gaia/agents/chat/tools/shell_tools.py +437 -436
  34. gaia/agents/code/__init__.py +7 -7
  35. gaia/agents/code/agent.py +549 -549
  36. gaia/agents/code/cli.py +377 -0
  37. gaia/agents/code/models.py +135 -135
  38. gaia/agents/code/orchestration/__init__.py +24 -24
  39. gaia/agents/code/orchestration/checklist_executor.py +1763 -1763
  40. gaia/agents/code/orchestration/checklist_generator.py +713 -713
  41. gaia/agents/code/orchestration/factories/__init__.py +9 -9
  42. gaia/agents/code/orchestration/factories/base.py +63 -63
  43. gaia/agents/code/orchestration/factories/nextjs_factory.py +118 -118
  44. gaia/agents/code/orchestration/factories/python_factory.py +106 -106
  45. gaia/agents/code/orchestration/orchestrator.py +841 -841
  46. gaia/agents/code/orchestration/project_analyzer.py +391 -391
  47. gaia/agents/code/orchestration/steps/__init__.py +67 -67
  48. gaia/agents/code/orchestration/steps/base.py +188 -188
  49. gaia/agents/code/orchestration/steps/error_handler.py +314 -314
  50. gaia/agents/code/orchestration/steps/nextjs.py +828 -828
  51. gaia/agents/code/orchestration/steps/python.py +307 -307
  52. gaia/agents/code/orchestration/template_catalog.py +469 -469
  53. gaia/agents/code/orchestration/workflows/__init__.py +14 -14
  54. gaia/agents/code/orchestration/workflows/base.py +80 -80
  55. gaia/agents/code/orchestration/workflows/nextjs.py +186 -186
  56. gaia/agents/code/orchestration/workflows/python.py +94 -94
  57. gaia/agents/code/prompts/__init__.py +11 -11
  58. gaia/agents/code/prompts/base_prompt.py +77 -77
  59. gaia/agents/code/prompts/code_patterns.py +2034 -2036
  60. gaia/agents/code/prompts/nextjs_prompt.py +40 -40
  61. gaia/agents/code/prompts/python_prompt.py +109 -109
  62. gaia/agents/code/schema_inference.py +365 -365
  63. gaia/agents/code/system_prompt.py +41 -41
  64. gaia/agents/code/tools/__init__.py +42 -42
  65. gaia/agents/code/tools/cli_tools.py +1138 -1138
  66. gaia/agents/code/tools/code_formatting.py +319 -319
  67. gaia/agents/code/tools/code_tools.py +769 -769
  68. gaia/agents/code/tools/error_fixing.py +1347 -1347
  69. gaia/agents/code/tools/external_tools.py +180 -180
  70. gaia/agents/code/tools/file_io.py +845 -845
  71. gaia/agents/code/tools/prisma_tools.py +190 -190
  72. gaia/agents/code/tools/project_management.py +1016 -1016
  73. gaia/agents/code/tools/testing.py +321 -321
  74. gaia/agents/code/tools/typescript_tools.py +122 -122
  75. gaia/agents/code/tools/validation_parsing.py +461 -461
  76. gaia/agents/code/tools/validation_tools.py +806 -806
  77. gaia/agents/code/tools/web_dev_tools.py +1758 -1758
  78. gaia/agents/code/validators/__init__.py +16 -16
  79. gaia/agents/code/validators/antipattern_checker.py +241 -241
  80. gaia/agents/code/validators/ast_analyzer.py +197 -197
  81. gaia/agents/code/validators/requirements_validator.py +145 -145
  82. gaia/agents/code/validators/syntax_validator.py +171 -171
  83. gaia/agents/docker/__init__.py +7 -7
  84. gaia/agents/docker/agent.py +643 -642
  85. gaia/agents/emr/__init__.py +8 -8
  86. gaia/agents/emr/agent.py +1504 -1506
  87. gaia/agents/emr/cli.py +1322 -1322
  88. gaia/agents/emr/constants.py +475 -475
  89. gaia/agents/emr/dashboard/__init__.py +4 -4
  90. gaia/agents/emr/dashboard/server.py +1972 -1974
  91. gaia/agents/jira/__init__.py +11 -11
  92. gaia/agents/jira/agent.py +894 -894
  93. gaia/agents/jira/jql_templates.py +299 -299
  94. gaia/agents/routing/__init__.py +7 -7
  95. gaia/agents/routing/agent.py +567 -570
  96. gaia/agents/routing/system_prompt.py +75 -75
  97. gaia/agents/summarize/__init__.py +11 -0
  98. gaia/agents/summarize/agent.py +885 -0
  99. gaia/agents/summarize/prompts.py +129 -0
  100. gaia/api/__init__.py +23 -23
  101. gaia/api/agent_registry.py +238 -238
  102. gaia/api/app.py +305 -305
  103. gaia/api/openai_server.py +575 -575
  104. gaia/api/schemas.py +186 -186
  105. gaia/api/sse_handler.py +373 -373
  106. gaia/apps/__init__.py +4 -4
  107. gaia/apps/llm/__init__.py +6 -6
  108. gaia/apps/llm/app.py +184 -169
  109. gaia/apps/summarize/app.py +116 -633
  110. gaia/apps/summarize/html_viewer.py +133 -133
  111. gaia/apps/summarize/pdf_formatter.py +284 -284
  112. gaia/audio/__init__.py +2 -2
  113. gaia/audio/audio_client.py +439 -439
  114. gaia/audio/audio_recorder.py +269 -269
  115. gaia/audio/kokoro_tts.py +599 -599
  116. gaia/audio/whisper_asr.py +432 -432
  117. gaia/chat/__init__.py +16 -16
  118. gaia/chat/app.py +428 -430
  119. gaia/chat/prompts.py +522 -522
  120. gaia/chat/sdk.py +1228 -1225
  121. gaia/cli.py +5659 -5632
  122. gaia/database/__init__.py +10 -10
  123. gaia/database/agent.py +176 -176
  124. gaia/database/mixin.py +290 -290
  125. gaia/database/testing.py +64 -64
  126. gaia/eval/batch_experiment.py +2332 -2332
  127. gaia/eval/claude.py +542 -542
  128. gaia/eval/config.py +37 -37
  129. gaia/eval/email_generator.py +512 -512
  130. gaia/eval/eval.py +3179 -3179
  131. gaia/eval/groundtruth.py +1130 -1130
  132. gaia/eval/transcript_generator.py +582 -582
  133. gaia/eval/webapp/README.md +167 -167
  134. gaia/eval/webapp/package-lock.json +875 -875
  135. gaia/eval/webapp/package.json +20 -20
  136. gaia/eval/webapp/public/app.js +3402 -3402
  137. gaia/eval/webapp/public/index.html +87 -87
  138. gaia/eval/webapp/public/styles.css +3661 -3661
  139. gaia/eval/webapp/server.js +415 -415
  140. gaia/eval/webapp/test-setup.js +72 -72
  141. gaia/installer/__init__.py +23 -0
  142. gaia/installer/init_command.py +1275 -0
  143. gaia/installer/lemonade_installer.py +619 -0
  144. gaia/llm/__init__.py +10 -2
  145. gaia/llm/base_client.py +60 -0
  146. gaia/llm/exceptions.py +12 -0
  147. gaia/llm/factory.py +70 -0
  148. gaia/llm/lemonade_client.py +3421 -3221
  149. gaia/llm/lemonade_manager.py +294 -294
  150. gaia/llm/providers/__init__.py +9 -0
  151. gaia/llm/providers/claude.py +108 -0
  152. gaia/llm/providers/lemonade.py +118 -0
  153. gaia/llm/providers/openai_provider.py +79 -0
  154. gaia/llm/vlm_client.py +382 -382
  155. gaia/logger.py +189 -189
  156. gaia/mcp/agent_mcp_server.py +245 -245
  157. gaia/mcp/blender_mcp_client.py +138 -138
  158. gaia/mcp/blender_mcp_server.py +648 -648
  159. gaia/mcp/context7_cache.py +332 -332
  160. gaia/mcp/external_services.py +518 -518
  161. gaia/mcp/mcp_bridge.py +811 -550
  162. gaia/mcp/servers/__init__.py +6 -6
  163. gaia/mcp/servers/docker_mcp.py +83 -83
  164. gaia/perf_analysis.py +361 -0
  165. gaia/rag/__init__.py +10 -10
  166. gaia/rag/app.py +293 -293
  167. gaia/rag/demo.py +304 -304
  168. gaia/rag/pdf_utils.py +235 -235
  169. gaia/rag/sdk.py +2194 -2194
  170. gaia/security.py +183 -163
  171. gaia/talk/app.py +287 -289
  172. gaia/talk/sdk.py +538 -538
  173. gaia/testing/__init__.py +87 -87
  174. gaia/testing/assertions.py +330 -330
  175. gaia/testing/fixtures.py +333 -333
  176. gaia/testing/mocks.py +493 -493
  177. gaia/util.py +46 -46
  178. gaia/utils/__init__.py +33 -33
  179. gaia/utils/file_watcher.py +675 -675
  180. gaia/utils/parsing.py +223 -223
  181. gaia/version.py +100 -100
  182. amd_gaia-0.15.0.dist-info/RECORD +0 -168
  183. gaia/agents/code/app.py +0 -266
  184. gaia/llm/llm_client.py +0 -723
  185. {amd_gaia-0.15.0.dist-info → amd_gaia-0.15.2.dist-info}/top_level.txt +0 -0
@@ -1,642 +1,643 @@
1
- #!/usr/bin/env python
2
- # Copyright(C) 2024-2025 Advanced Micro Devices, Inc. All rights reserved.
3
- # SPDX-License-Identifier: MIT
4
- """
5
- Docker Agent for GAIA.
6
-
7
- This agent provides an intelligent interface for containerizing applications,
8
- generating Dockerfiles, and managing Docker containers through natural language commands.
9
- """
10
- import json
11
- import logging
12
- import subprocess
13
- from pathlib import Path
14
- from typing import Any, Dict
15
-
16
- from gaia.agents.base.console import AgentConsole, SilentConsole
17
- from gaia.agents.base.mcp_agent import MCPAgent
18
- from gaia.agents.base.tools import tool
19
- from gaia.security import PathValidator
20
-
21
- logger = logging.getLogger(__name__)
22
-
23
- DEFAULT_MODEL = "Qwen3-Coder-30B-A3B-Instruct-GGUF"
24
- DEFAULT_MAX_STEPS = 10
25
- DEFAULT_PORT = 8080
26
-
27
-
28
- class DockerAgent(MCPAgent):
29
- """
30
- Intelligent Docker agent for containerization assistance.
31
-
32
- This agent helps developers containerize their applications by:
33
- - Analyzing application structure and dependencies
34
- - Generating appropriate Dockerfiles using LLM intelligence
35
- - Building Docker images
36
- - Running containers with proper configuration
37
-
38
- The agent uses Lemonade/LLM to understand the application context
39
- and generate optimal Dockerfiles based on best practices.
40
- """
41
-
42
- def __init__(self, **kwargs):
43
- """Initialize the Docker agent.
44
-
45
- Args:
46
- **kwargs: Agent initialization parameters:
47
- - max_steps: Maximum conversation steps (default: 10)
48
- - model_id: LLM model to use (default: Qwen3-Coder-30B-A3B-Instruct-GGUF)
49
- - silent_mode: Suppress console output (default: False)
50
- - debug: Enable debug logging (default: False)
51
- - show_prompts: Display prompts sent to LLM (default: False)
52
- """
53
- # Use larger coding model for reliable Dockerfile generation
54
- if "model_id" not in kwargs:
55
- kwargs["model_id"] = DEFAULT_MODEL
56
-
57
- if "max_steps" not in kwargs:
58
- kwargs["max_steps"] = DEFAULT_MAX_STEPS
59
-
60
- # Security: Configure allowed paths for file operations
61
- # If None, allow current directory and subdirectories
62
- self.allowed_paths = kwargs.pop("allowed_paths", None)
63
- self.path_validator = PathValidator(self.allowed_paths)
64
-
65
- super().__init__(**kwargs)
66
-
67
- def _get_system_prompt(self) -> str:
68
- """Generate the system prompt for Docker containerization.
69
-
70
- Returns:
71
- str: System prompt that teaches the LLM about Dockerfile best practices
72
- """
73
- return """You are a Docker containerization expert that responds ONLY in JSON format.
74
-
75
- **CRITICAL RULES:**
76
- 1. Output ONLY valid JSON - nothing else
77
- 2. Do NOT add any text before the opening {
78
- 3. Do NOT add any text after the closing }
79
- 4. Your ENTIRE response must be parseable JSON
80
-
81
- You help developers containerize their applications by:
82
- - Analyzing application structure and dependencies
83
- - Generating optimized Dockerfiles
84
- - Building and running Docker containers
85
-
86
- **Dockerfile Best Practices:**
87
- - Use appropriate base images (python:3.9-slim for Python, node:18-alpine for Node.js)
88
- - Minimize layers by combining RUN commands
89
- - Copy dependency files first for better caching
90
- - Use non-root users when possible
91
- - Expose appropriate ports
92
- - Set proper working directories
93
-
94
- **Example Dockerfiles (use as inspiration - adapt to specific needs):**
95
-
96
- Python/Flask application:
97
- ```
98
- FROM python:3.9-slim
99
- WORKDIR /app
100
- COPY requirements.txt .
101
- RUN pip install --no-cache-dir -r requirements.txt
102
- COPY . .
103
- EXPOSE 5000
104
- CMD ["python", "app.py"]
105
- ```
106
-
107
- Node.js/Express application:
108
- ```
109
- FROM node:18-alpine
110
- WORKDIR /app
111
- COPY package*.json ./
112
- RUN npm ci --only=production
113
- COPY . .
114
- EXPOSE 3000
115
- CMD ["npm", "start"]
116
- ```
117
-
118
- **RESPONSE FORMAT - Use EXACTLY this structure:**
119
-
120
- For ANALYZE (understand the app structure):
121
- {"thought": "User wants to containerize X application", "goal": "Analyze application", "plan": [{"tool": "analyze_directory", "tool_args": {"path": "PATH_HERE"}}]}
122
-
123
- For SAVE_DOCKERFILE (save generated Dockerfile):
124
- {"thought": "Analyzed app, now generating Dockerfile content", "goal": "Save Dockerfile", "plan": [{"tool": "save_dockerfile", "tool_args": {"dockerfile_content": "FROM python:3.9-slim\\nWORKDIR /app\\n...", "path": ".", "port": 5000}}]}
125
-
126
- For BUILD (build Docker image):
127
- {"thought": "Building Docker image", "goal": "Build image", "plan": [{"tool": "build_image", "tool_args": {"path": "PATH", "tag": "TAG"}}]}
128
-
129
- For RUN (run container):
130
- {"thought": "Running container", "goal": "Run container", "plan": [{"tool": "run_container", "tool_args": {"image": "IMAGE", "port": "PORT_MAP"}}]}
131
-
132
- For FINAL ANSWER:
133
- {"thought": "Task completed", "goal": "Report results", "answer": "Successfully containerized the application. [Details about what was done]"}
134
-
135
- **EXAMPLES:**
136
-
137
- User: "create a Dockerfile for my Flask app"
138
- Step 1: {"thought": "Need to analyze the app first", "goal": "Analyze application", "plan": [{"tool": "analyze_directory", "tool_args": {"path": "."}}]}
139
- Step 2 (after seeing app_type=flask, entry_point=app.py): {"thought": "Flask app detected, I'll generate an appropriate Dockerfile", "goal": "Save Dockerfile", "plan": [{"tool": "save_dockerfile", "tool_args": {"dockerfile_content": "FROM python:3.9-slim\nWORKDIR /app\nCOPY requirements.txt .\nRUN pip install --no-cache-dir -r requirements.txt\nCOPY . .\nEXPOSE 5000\nCMD [\"python\", \"app.py\"]", "path": ".", "port": 5000}}]}
140
-
141
- User: "containerize and build my Node.js app"
142
- Step 1: Analyze with analyze_directory
143
- Step 2: Generate and save Node.js Dockerfile with save_dockerfile
144
- Step 3: Build with build_image
145
-
146
- **IMPORTANT:**
147
- - First analyze the app to understand its structure
148
- - Then generate appropriate Dockerfile content based on the analysis
149
- - Include proper copyright header in Dockerfile
150
- - Use save_dockerfile to write the Dockerfile you generated"""
151
-
152
- def _create_console(self):
153
- """Create console for Docker agent output."""
154
- if self.silent_mode:
155
- return SilentConsole()
156
- return AgentConsole()
157
-
158
- def _register_tools(self):
159
- """Register Docker-specific tools."""
160
-
161
- @tool
162
- def analyze_directory(path: str = ".") -> Dict[str, Any]:
163
- """Analyze application directory to determine type and dependencies.
164
-
165
- Args:
166
- path: Directory path to analyze (default: current directory)
167
-
168
- Returns:
169
- Dictionary containing application information:
170
- - app_type: Type of application (flask, node, python, etc.)
171
- - entry_point: Main application file
172
- - dependencies: Dependencies file (requirements.txt, package.json)
173
- - port: Suggested port to expose
174
- - additional_files: Other relevant files found
175
- """
176
- return self._analyze_directory(path)
177
-
178
- @tool
179
- def save_dockerfile(
180
- dockerfile_content: str, path: str = ".", port: int = 5000
181
- ) -> Dict[str, Any]:
182
- """Save a Dockerfile that you've generated.
183
-
184
- You should generate the Dockerfile content based on the application analysis
185
- and your knowledge of Docker best practices. Use the example Dockerfiles in
186
- the system prompt as guidance.
187
-
188
- Args:
189
- dockerfile_content: The complete Dockerfile content you've generated
190
- path: Directory where to save the Dockerfile (default: current directory)
191
- port: Port exposed by the application (default: 5000)
192
-
193
- Returns:
194
- Dictionary containing:
195
- - status: "success" or "error"
196
- - path: Path where Dockerfile was saved
197
- - next_steps: Instructions for building and running
198
- """
199
- return self._save_dockerfile(dockerfile_content, path, port)
200
-
201
- @tool
202
- def build_image(path: str = ".", tag: str = "app:latest") -> Dict[str, Any]:
203
- """Build Docker image from Dockerfile.
204
-
205
- Args:
206
- path: Directory containing Dockerfile
207
- tag: Image tag (default: app:latest)
208
-
209
- Returns:
210
- Dictionary containing:
211
- - success: Whether build succeeded
212
- - image: Image tag if successful
213
- - output: Build output
214
- - error: Error message if failed
215
- """
216
- return self._build_image(path, tag)
217
-
218
- @tool
219
- def run_container(
220
- image: str, port: str = None, name: str = None
221
- ) -> Dict[str, Any]:
222
- """Run Docker container from image.
223
-
224
- Args:
225
- image: Docker image to run
226
- port: Port mapping (e.g., "5000:5000")
227
- name: Container name (optional)
228
-
229
- Returns:
230
- Dictionary containing:
231
- - success: Whether container started
232
- - container_id: Container ID if successful
233
- - url: Access URL if port mapped
234
- - output: Run output
235
- - error: Error message if failed
236
- """
237
- return self._run_container(image, port, name)
238
-
239
- def _analyze_directory(self, path: str) -> Dict[str, Any]:
240
- """Analyze directory to determine application type and structure."""
241
- logger.debug(f"Analyzing directory: {path}")
242
-
243
- # Security check
244
- if not self.path_validator.is_path_allowed(path):
245
- return {
246
- "status": "error",
247
- "error": f"Access denied: {path} is not in allowed paths",
248
- }
249
-
250
- path_obj = Path(path).resolve()
251
- if not path_obj.exists():
252
- return {"status": "error", "error": f"Directory {path} does not exist"}
253
-
254
- result = {
255
- "path": str(path_obj),
256
- "app_type": "unknown",
257
- "entry_point": None,
258
- "dependencies": None,
259
- "port": DEFAULT_PORT,
260
- "additional_files": [],
261
- }
262
-
263
- # Check for Python/Flask application
264
- requirements_file = path_obj / "requirements.txt"
265
- if requirements_file.exists():
266
- result["app_type"] = "python"
267
- result["dependencies"] = "requirements.txt"
268
-
269
- # Read requirements to detect framework
270
- with open(requirements_file, "r", encoding="utf-8") as f:
271
- requirements = f.read().lower()
272
- if "flask" in requirements:
273
- result["app_type"] = "flask"
274
- result["port"] = DEFAULT_PORT
275
- elif "django" in requirements:
276
- result["app_type"] = "django"
277
- result["port"] = DEFAULT_PORT
278
- elif "fastapi" in requirements:
279
- result["app_type"] = "fastapi"
280
- result["port"] = DEFAULT_PORT
281
-
282
- # Find entry point
283
- for potential_entry in [
284
- "app.py",
285
- "main.py",
286
- "run.py",
287
- "server.py",
288
- "application.py",
289
- ]:
290
- if (path_obj / potential_entry).exists():
291
- result["entry_point"] = potential_entry
292
- break
293
-
294
- # Check for Node.js application
295
- package_json = path_obj / "package.json"
296
- if package_json.exists():
297
- result["app_type"] = "node"
298
- result["dependencies"] = "package.json"
299
- result["port"] = DEFAULT_PORT
300
-
301
- # Read package.json to understand the app better
302
- try:
303
- with open(package_json, "r", encoding="utf-8") as f:
304
- pkg_data = json.load(f)
305
-
306
- # Check for start script
307
- if "scripts" in pkg_data and "start" in pkg_data["scripts"]:
308
- result["start_command"] = pkg_data["scripts"]["start"]
309
-
310
- # Detect framework from dependencies
311
- deps = pkg_data.get("dependencies", {})
312
- if "express" in deps:
313
- result["app_type"] = "express"
314
- elif "next" in deps:
315
- result["app_type"] = "nextjs"
316
- elif "react" in deps:
317
- result["app_type"] = "react"
318
-
319
- # Find entry point
320
- if "main" in pkg_data:
321
- result["entry_point"] = pkg_data["main"]
322
- else:
323
- for potential_entry in ["index.js", "server.js", "app.js"]:
324
- if (path_obj / potential_entry).exists():
325
- result["entry_point"] = potential_entry
326
- break
327
- except Exception as e:
328
- logger.warning(f"Could not parse package.json: {e}")
329
-
330
- # Check for other important files
331
- for file_name in [
332
- ".env.example",
333
- "docker-compose.yml",
334
- "Dockerfile",
335
- ".dockerignore",
336
- ]:
337
- if (path_obj / file_name).exists():
338
- result["additional_files"].append(file_name)
339
-
340
- logger.debug(f"Analysis result: {result}")
341
- return result
342
-
343
- def _save_dockerfile(
344
- self, dockerfile_content: str, path: str, port: int
345
- ) -> Dict[str, Any]:
346
- """Save Dockerfile content generated by the LLM.
347
-
348
- Args:
349
- dockerfile_content: Dockerfile content generated by LLM
350
- path: Directory where to save the Dockerfile
351
- port: Port exposed by the application
352
-
353
- Returns:
354
- Dictionary with status, path, and next steps
355
- """
356
- logger.debug(f"Saving Dockerfile to: {path}")
357
-
358
- # Security check
359
- if not self.path_validator.is_path_allowed(path):
360
- return {
361
- "status": "error",
362
- "error": f"Access denied: {path} is not in allowed paths",
363
- }
364
-
365
- path_obj = Path(path).resolve()
366
- if not path_obj.exists():
367
- return {"status": "error", "error": f"Directory {path} does not exist"}
368
-
369
- dockerfile_path = path_obj / "Dockerfile"
370
-
371
- try:
372
- # Save the LLM-generated Dockerfile
373
- with open(dockerfile_path, "w", encoding="utf-8") as f:
374
- f.write(dockerfile_content)
375
-
376
- # Generate image name from directory
377
- image_name = path_obj.name.lower().replace("_", "-").replace(" ", "-")
378
-
379
- return {
380
- "status": "success",
381
- "path": str(dockerfile_path),
382
- "next_steps": [
383
- "1. Build the Docker image:",
384
- f" cd {path_obj}",
385
- f" docker build -t {image_name} .",
386
- "",
387
- "2. Run the container (keeps running in background):",
388
- f" docker run -d -p {port}:{port} --name {image_name}-container {image_name}",
389
- "",
390
- "3. Access your application at:",
391
- f" http://localhost:{port}",
392
- "",
393
- "4. View container logs:",
394
- f" docker logs -f {image_name}-container",
395
- "",
396
- "5. Stop the container when done:",
397
- f" docker stop {image_name}-container",
398
- ],
399
- }
400
-
401
- except Exception as e:
402
- return {"status": "error", "error": f"Failed to save Dockerfile: {str(e)}"}
403
-
404
- def _build_image(self, path: str, tag: str) -> Dict[str, Any]:
405
- """Build Docker image from Dockerfile."""
406
- logger.debug(f"Building Docker image: {tag} from {path}")
407
-
408
- # Security check
409
- if not self.path_validator.is_path_allowed(path):
410
- return {
411
- "status": "error",
412
- "error": f"Access denied: {path} is not in allowed paths",
413
- }
414
-
415
- # Check if Docker is available
416
- try:
417
- result = subprocess.run(
418
- ["docker", "--version"],
419
- capture_output=True,
420
- text=True,
421
- timeout=5,
422
- check=False,
423
- )
424
- if result.returncode != 0:
425
- return {
426
- "status": "error",
427
- "error": "Docker is not installed or not accessible. Please install Docker first.",
428
- }
429
- except Exception as e:
430
- return {"status": "error", "error": f"Cannot access Docker: {str(e)}"}
431
-
432
- # Build the image
433
- try:
434
- result = subprocess.run(
435
- ["docker", "build", "-t", tag, path],
436
- capture_output=True,
437
- text=True,
438
- timeout=300, # 5 minute timeout for build
439
- check=False,
440
- )
441
-
442
- if result.returncode == 0:
443
- return {
444
- "status": "success",
445
- "success": True,
446
- "image": tag,
447
- "output": result.stdout,
448
- "message": f"Successfully built Docker image: {tag}",
449
- }
450
- else:
451
- return {
452
- "status": "error",
453
- "success": False,
454
- "error": f"Docker build failed: {result.stderr}",
455
- "output": result.stdout,
456
- }
457
-
458
- except subprocess.TimeoutExpired:
459
- return {
460
- "status": "error",
461
- "error": "Docker build timed out after 5 minutes",
462
- }
463
- except Exception as e:
464
- return {"status": "error", "error": f"Failed to build image: {str(e)}"}
465
-
466
- def _run_container(
467
- self, image: str, port: str = None, name: str = None
468
- ) -> Dict[str, Any]:
469
- """Run Docker container from image."""
470
- logger.debug(f"Running container from image: {image}")
471
-
472
- # Build docker run command
473
- cmd = ["docker", "run", "-d"] # Run in detached mode
474
-
475
- if port:
476
- cmd.extend(["-p", port])
477
-
478
- if name:
479
- cmd.extend(["--name", name])
480
-
481
- cmd.append(image)
482
-
483
- try:
484
- result = subprocess.run(
485
- cmd, capture_output=True, text=True, timeout=30, check=False
486
- )
487
-
488
- if result.returncode == 0:
489
- container_id = result.stdout.strip()[:12]
490
-
491
- response = {
492
- "status": "success",
493
- "success": True,
494
- "container_id": container_id,
495
- "image": image,
496
- "message": f"Container {container_id} is running",
497
- }
498
-
499
- if port:
500
- host_port = port.split(":")[0]
501
- response["url"] = f"http://localhost:{host_port}"
502
- response[
503
- "message"
504
- ] += f"\nAccess your application at: http://localhost:{host_port}"
505
-
506
- return response
507
- else:
508
- return {
509
- "status": "error",
510
- "success": False,
511
- "error": f"Failed to run container: {result.stderr}",
512
- "output": result.stdout,
513
- }
514
-
515
- except subprocess.TimeoutExpired:
516
- return {"status": "error", "error": "Docker run command timed out"}
517
- except Exception as e:
518
- return {"status": "error", "error": f"Failed to run container: {str(e)}"}
519
-
520
- # MCP Interface Implementation
521
- def get_mcp_tool_definitions(self) -> list[Dict[str, Any]]:
522
- """Return MCP tool definitions for Docker agent."""
523
- return [
524
- {
525
- "name": "dockerize",
526
- "description": "Containerize an application by analyzing its structure, generating an optimized Dockerfile, building the Docker image, and running the container. Use this when the user wants to dockerize, containerize, or run their application in Docker. This performs the complete workflow: analyze → create Dockerfile → build image → run container.",
527
- "inputSchema": {
528
- "type": "object",
529
- "properties": {
530
- "appPath": {
531
- "type": "string",
532
- "description": "Absolute path to the application's root directory. Must be a complete filesystem path (e.g., C:/Users/name/myapp on Windows or /home/user/myapp on Linux). This is where the Dockerfile will be created and where dependency files (requirements.txt, package.json) should exist.",
533
- },
534
- "port": {
535
- "type": "integer",
536
- "description": "The port that the application listens on inside the container. This will be exposed in the Dockerfile and mapped to the same host port when running the container. Common values: 5000 (Flask), 3000 (Node.js/Express), 8000 (Django), 8080 (general web apps). Default: 5000",
537
- "default": 5000,
538
- },
539
- },
540
- "required": ["appPath"],
541
- },
542
- }
543
- ]
544
-
545
- def execute_mcp_tool(
546
- self, tool_name: str, arguments: Dict[str, Any]
547
- ) -> Dict[str, Any]:
548
- """Execute MCP tool call by delegating to LLM via process_query."""
549
- if tool_name == "dockerize":
550
- # Validate appPath is provided
551
- if "appPath" not in arguments:
552
- return {
553
- "success": False,
554
- "error": "appPath is required - must be an absolute path to the application directory",
555
- }
556
-
557
- app_path = arguments["appPath"]
558
-
559
- # Validate it's an absolute path
560
- path_obj = Path(app_path)
561
- if not path_obj.is_absolute():
562
- return {
563
- "success": False,
564
- "error": f"appPath must be an absolute path, got: {app_path}",
565
- }
566
-
567
- # Validate directory exists
568
- if not path_obj.exists():
569
- return {
570
- "success": False,
571
- "error": f"Directory does not exist: {app_path}",
572
- }
573
-
574
- # Validate it's a directory
575
- if not path_obj.is_dir():
576
- return {
577
- "success": False,
578
- "error": f"Path is not a directory: {app_path}",
579
- }
580
-
581
- # Security check
582
- if not self.path_validator.is_path_allowed(app_path):
583
- return {
584
- "success": False,
585
- "error": f"Access denied: {app_path} is not in allowed paths",
586
- }
587
-
588
- # Get parameters
589
- port = arguments.get("port", DEFAULT_PORT)
590
-
591
- # Construct natural language query for the LLM
592
- # Always do full workflow: dockerize build → run
593
- query_parts = [f"Dockerize the application at {app_path}"]
594
-
595
- if port != DEFAULT_PORT:
596
- query_parts.append(f"using port {port}")
597
-
598
- query_parts.append("then build and run the container")
599
-
600
- query = " ".join(query_parts) + "."
601
-
602
- # Let the LLM orchestrate the workflow
603
- result = self.process_query(user_input=query, trace=False)
604
-
605
- # Extract the final result
606
- # process_query returns: {status: "success"/"failed"/"incomplete", result: ..., error_history: [...]}
607
- status = result.get("status", "incomplete")
608
- final_result = result.get("result", "")
609
-
610
- # Only report failure if status is explicitly "failed"
611
- # Intermediate errors/warnings are acceptable as long as the overall task succeeded
612
- if status == "success":
613
- return {
614
- "success": True,
615
- "status": "completed",
616
- "result": final_result,
617
- "steps_taken": result.get("steps_taken", 0),
618
- "duration": result.get("duration", 0),
619
- }
620
- elif status == "failed":
621
- # Include error details only when actually failed
622
- error_history = result.get("error_history", [])
623
- error_msg = (
624
- error_history[-1] if error_history else "Unknown error occurred"
625
- )
626
- return {
627
- "success": False,
628
- "error": error_msg,
629
- "result": final_result,
630
- "error_count": result.get("error_count", 0),
631
- }
632
- else:
633
- # Incomplete status
634
- return {
635
- "success": False,
636
- "error": "Task did not complete within step limit",
637
- "result": final_result,
638
- "steps_taken": result.get("steps_taken", 0),
639
- }
640
-
641
- else:
642
- raise ValueError(f"Unknown tool: {tool_name}")
1
+ #!/usr/bin/env python
2
+ # Copyright(C) 2024-2025 Advanced Micro Devices, Inc. All rights reserved.
3
+ # SPDX-License-Identifier: MIT
4
+ """
5
+ Docker Agent for GAIA.
6
+
7
+ This agent provides an intelligent interface for containerizing applications,
8
+ generating Dockerfiles, and managing Docker containers through natural language commands.
9
+ """
10
+
11
+ import json
12
+ import logging
13
+ import subprocess
14
+ from pathlib import Path
15
+ from typing import Any, Dict
16
+
17
+ from gaia.agents.base.console import AgentConsole, SilentConsole
18
+ from gaia.agents.base.mcp_agent import MCPAgent
19
+ from gaia.agents.base.tools import tool
20
+ from gaia.security import PathValidator
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+ DEFAULT_MODEL = "Qwen3-Coder-30B-A3B-Instruct-GGUF"
25
+ DEFAULT_MAX_STEPS = 10
26
+ DEFAULT_PORT = 8080
27
+
28
+
29
+ class DockerAgent(MCPAgent):
30
+ """
31
+ Intelligent Docker agent for containerization assistance.
32
+
33
+ This agent helps developers containerize their applications by:
34
+ - Analyzing application structure and dependencies
35
+ - Generating appropriate Dockerfiles using LLM intelligence
36
+ - Building Docker images
37
+ - Running containers with proper configuration
38
+
39
+ The agent uses Lemonade/LLM to understand the application context
40
+ and generate optimal Dockerfiles based on best practices.
41
+ """
42
+
43
+ def __init__(self, **kwargs):
44
+ """Initialize the Docker agent.
45
+
46
+ Args:
47
+ **kwargs: Agent initialization parameters:
48
+ - max_steps: Maximum conversation steps (default: 10)
49
+ - model_id: LLM model to use (default: Qwen3-Coder-30B-A3B-Instruct-GGUF)
50
+ - silent_mode: Suppress console output (default: False)
51
+ - debug: Enable debug logging (default: False)
52
+ - show_prompts: Display prompts sent to LLM (default: False)
53
+ """
54
+ # Use larger coding model for reliable Dockerfile generation
55
+ if "model_id" not in kwargs:
56
+ kwargs["model_id"] = DEFAULT_MODEL
57
+
58
+ if "max_steps" not in kwargs:
59
+ kwargs["max_steps"] = DEFAULT_MAX_STEPS
60
+
61
+ # Security: Configure allowed paths for file operations
62
+ # If None, allow current directory and subdirectories
63
+ self.allowed_paths = kwargs.pop("allowed_paths", None)
64
+ self.path_validator = PathValidator(self.allowed_paths)
65
+
66
+ super().__init__(**kwargs)
67
+
68
+ def _get_system_prompt(self) -> str:
69
+ """Generate the system prompt for Docker containerization.
70
+
71
+ Returns:
72
+ str: System prompt that teaches the LLM about Dockerfile best practices
73
+ """
74
+ return """You are a Docker containerization expert that responds ONLY in JSON format.
75
+
76
+ **CRITICAL RULES:**
77
+ 1. Output ONLY valid JSON - nothing else
78
+ 2. Do NOT add any text before the opening {
79
+ 3. Do NOT add any text after the closing }
80
+ 4. Your ENTIRE response must be parseable JSON
81
+
82
+ You help developers containerize their applications by:
83
+ - Analyzing application structure and dependencies
84
+ - Generating optimized Dockerfiles
85
+ - Building and running Docker containers
86
+
87
+ **Dockerfile Best Practices:**
88
+ - Use appropriate base images (python:3.9-slim for Python, node:18-alpine for Node.js)
89
+ - Minimize layers by combining RUN commands
90
+ - Copy dependency files first for better caching
91
+ - Use non-root users when possible
92
+ - Expose appropriate ports
93
+ - Set proper working directories
94
+
95
+ **Example Dockerfiles (use as inspiration - adapt to specific needs):**
96
+
97
+ Python/Flask application:
98
+ ```
99
+ FROM python:3.9-slim
100
+ WORKDIR /app
101
+ COPY requirements.txt .
102
+ RUN pip install --no-cache-dir -r requirements.txt
103
+ COPY . .
104
+ EXPOSE 5000
105
+ CMD ["python", "app.py"]
106
+ ```
107
+
108
+ Node.js/Express application:
109
+ ```
110
+ FROM node:18-alpine
111
+ WORKDIR /app
112
+ COPY package*.json ./
113
+ RUN npm ci --only=production
114
+ COPY . .
115
+ EXPOSE 3000
116
+ CMD ["npm", "start"]
117
+ ```
118
+
119
+ **RESPONSE FORMAT - Use EXACTLY this structure:**
120
+
121
+ For ANALYZE (understand the app structure):
122
+ {"thought": "User wants to containerize X application", "goal": "Analyze application", "plan": [{"tool": "analyze_directory", "tool_args": {"path": "PATH_HERE"}}]}
123
+
124
+ For SAVE_DOCKERFILE (save generated Dockerfile):
125
+ {"thought": "Analyzed app, now generating Dockerfile content", "goal": "Save Dockerfile", "plan": [{"tool": "save_dockerfile", "tool_args": {"dockerfile_content": "FROM python:3.9-slim\\nWORKDIR /app\\n...", "path": ".", "port": 5000}}]}
126
+
127
+ For BUILD (build Docker image):
128
+ {"thought": "Building Docker image", "goal": "Build image", "plan": [{"tool": "build_image", "tool_args": {"path": "PATH", "tag": "TAG"}}]}
129
+
130
+ For RUN (run container):
131
+ {"thought": "Running container", "goal": "Run container", "plan": [{"tool": "run_container", "tool_args": {"image": "IMAGE", "port": "PORT_MAP"}}]}
132
+
133
+ For FINAL ANSWER:
134
+ {"thought": "Task completed", "goal": "Report results", "answer": "Successfully containerized the application. [Details about what was done]"}
135
+
136
+ **EXAMPLES:**
137
+
138
+ User: "create a Dockerfile for my Flask app"
139
+ Step 1: {"thought": "Need to analyze the app first", "goal": "Analyze application", "plan": [{"tool": "analyze_directory", "tool_args": {"path": "."}}]}
140
+ Step 2 (after seeing app_type=flask, entry_point=app.py): {"thought": "Flask app detected, I'll generate an appropriate Dockerfile", "goal": "Save Dockerfile", "plan": [{"tool": "save_dockerfile", "tool_args": {"dockerfile_content": "FROM python:3.9-slim\nWORKDIR /app\nCOPY requirements.txt .\nRUN pip install --no-cache-dir -r requirements.txt\nCOPY . .\nEXPOSE 5000\nCMD [\"python\", \"app.py\"]", "path": ".", "port": 5000}}]}
141
+
142
+ User: "containerize and build my Node.js app"
143
+ Step 1: Analyze with analyze_directory
144
+ Step 2: Generate and save Node.js Dockerfile with save_dockerfile
145
+ Step 3: Build with build_image
146
+
147
+ **IMPORTANT:**
148
+ - First analyze the app to understand its structure
149
+ - Then generate appropriate Dockerfile content based on the analysis
150
+ - Include proper copyright header in Dockerfile
151
+ - Use save_dockerfile to write the Dockerfile you generated"""
152
+
153
+ def _create_console(self):
154
+ """Create console for Docker agent output."""
155
+ if self.silent_mode:
156
+ return SilentConsole()
157
+ return AgentConsole()
158
+
159
+ def _register_tools(self):
160
+ """Register Docker-specific tools."""
161
+
162
+ @tool
163
+ def analyze_directory(path: str = ".") -> Dict[str, Any]:
164
+ """Analyze application directory to determine type and dependencies.
165
+
166
+ Args:
167
+ path: Directory path to analyze (default: current directory)
168
+
169
+ Returns:
170
+ Dictionary containing application information:
171
+ - app_type: Type of application (flask, node, python, etc.)
172
+ - entry_point: Main application file
173
+ - dependencies: Dependencies file (requirements.txt, package.json)
174
+ - port: Suggested port to expose
175
+ - additional_files: Other relevant files found
176
+ """
177
+ return self._analyze_directory(path)
178
+
179
+ @tool
180
+ def save_dockerfile(
181
+ dockerfile_content: str, path: str = ".", port: int = 5000
182
+ ) -> Dict[str, Any]:
183
+ """Save a Dockerfile that you've generated.
184
+
185
+ You should generate the Dockerfile content based on the application analysis
186
+ and your knowledge of Docker best practices. Use the example Dockerfiles in
187
+ the system prompt as guidance.
188
+
189
+ Args:
190
+ dockerfile_content: The complete Dockerfile content you've generated
191
+ path: Directory where to save the Dockerfile (default: current directory)
192
+ port: Port exposed by the application (default: 5000)
193
+
194
+ Returns:
195
+ Dictionary containing:
196
+ - status: "success" or "error"
197
+ - path: Path where Dockerfile was saved
198
+ - next_steps: Instructions for building and running
199
+ """
200
+ return self._save_dockerfile(dockerfile_content, path, port)
201
+
202
+ @tool
203
+ def build_image(path: str = ".", tag: str = "app:latest") -> Dict[str, Any]:
204
+ """Build Docker image from Dockerfile.
205
+
206
+ Args:
207
+ path: Directory containing Dockerfile
208
+ tag: Image tag (default: app:latest)
209
+
210
+ Returns:
211
+ Dictionary containing:
212
+ - success: Whether build succeeded
213
+ - image: Image tag if successful
214
+ - output: Build output
215
+ - error: Error message if failed
216
+ """
217
+ return self._build_image(path, tag)
218
+
219
+ @tool
220
+ def run_container(
221
+ image: str, port: str = None, name: str = None
222
+ ) -> Dict[str, Any]:
223
+ """Run Docker container from image.
224
+
225
+ Args:
226
+ image: Docker image to run
227
+ port: Port mapping (e.g., "5000:5000")
228
+ name: Container name (optional)
229
+
230
+ Returns:
231
+ Dictionary containing:
232
+ - success: Whether container started
233
+ - container_id: Container ID if successful
234
+ - url: Access URL if port mapped
235
+ - output: Run output
236
+ - error: Error message if failed
237
+ """
238
+ return self._run_container(image, port, name)
239
+
240
+ def _analyze_directory(self, path: str) -> Dict[str, Any]:
241
+ """Analyze directory to determine application type and structure."""
242
+ logger.debug(f"Analyzing directory: {path}")
243
+
244
+ # Security check
245
+ if not self.path_validator.is_path_allowed(path):
246
+ return {
247
+ "status": "error",
248
+ "error": f"Access denied: {path} is not in allowed paths",
249
+ }
250
+
251
+ path_obj = Path(path).resolve()
252
+ if not path_obj.exists():
253
+ return {"status": "error", "error": f"Directory {path} does not exist"}
254
+
255
+ result = {
256
+ "path": str(path_obj),
257
+ "app_type": "unknown",
258
+ "entry_point": None,
259
+ "dependencies": None,
260
+ "port": DEFAULT_PORT,
261
+ "additional_files": [],
262
+ }
263
+
264
+ # Check for Python/Flask application
265
+ requirements_file = path_obj / "requirements.txt"
266
+ if requirements_file.exists():
267
+ result["app_type"] = "python"
268
+ result["dependencies"] = "requirements.txt"
269
+
270
+ # Read requirements to detect framework
271
+ with open(requirements_file, "r", encoding="utf-8") as f:
272
+ requirements = f.read().lower()
273
+ if "flask" in requirements:
274
+ result["app_type"] = "flask"
275
+ result["port"] = DEFAULT_PORT
276
+ elif "django" in requirements:
277
+ result["app_type"] = "django"
278
+ result["port"] = DEFAULT_PORT
279
+ elif "fastapi" in requirements:
280
+ result["app_type"] = "fastapi"
281
+ result["port"] = DEFAULT_PORT
282
+
283
+ # Find entry point
284
+ for potential_entry in [
285
+ "app.py",
286
+ "main.py",
287
+ "run.py",
288
+ "server.py",
289
+ "application.py",
290
+ ]:
291
+ if (path_obj / potential_entry).exists():
292
+ result["entry_point"] = potential_entry
293
+ break
294
+
295
+ # Check for Node.js application
296
+ package_json = path_obj / "package.json"
297
+ if package_json.exists():
298
+ result["app_type"] = "node"
299
+ result["dependencies"] = "package.json"
300
+ result["port"] = DEFAULT_PORT
301
+
302
+ # Read package.json to understand the app better
303
+ try:
304
+ with open(package_json, "r", encoding="utf-8") as f:
305
+ pkg_data = json.load(f)
306
+
307
+ # Check for start script
308
+ if "scripts" in pkg_data and "start" in pkg_data["scripts"]:
309
+ result["start_command"] = pkg_data["scripts"]["start"]
310
+
311
+ # Detect framework from dependencies
312
+ deps = pkg_data.get("dependencies", {})
313
+ if "express" in deps:
314
+ result["app_type"] = "express"
315
+ elif "next" in deps:
316
+ result["app_type"] = "nextjs"
317
+ elif "react" in deps:
318
+ result["app_type"] = "react"
319
+
320
+ # Find entry point
321
+ if "main" in pkg_data:
322
+ result["entry_point"] = pkg_data["main"]
323
+ else:
324
+ for potential_entry in ["index.js", "server.js", "app.js"]:
325
+ if (path_obj / potential_entry).exists():
326
+ result["entry_point"] = potential_entry
327
+ break
328
+ except Exception as e:
329
+ logger.warning(f"Could not parse package.json: {e}")
330
+
331
+ # Check for other important files
332
+ for file_name in [
333
+ ".env.example",
334
+ "docker-compose.yml",
335
+ "Dockerfile",
336
+ ".dockerignore",
337
+ ]:
338
+ if (path_obj / file_name).exists():
339
+ result["additional_files"].append(file_name)
340
+
341
+ logger.debug(f"Analysis result: {result}")
342
+ return result
343
+
344
+ def _save_dockerfile(
345
+ self, dockerfile_content: str, path: str, port: int
346
+ ) -> Dict[str, Any]:
347
+ """Save Dockerfile content generated by the LLM.
348
+
349
+ Args:
350
+ dockerfile_content: Dockerfile content generated by LLM
351
+ path: Directory where to save the Dockerfile
352
+ port: Port exposed by the application
353
+
354
+ Returns:
355
+ Dictionary with status, path, and next steps
356
+ """
357
+ logger.debug(f"Saving Dockerfile to: {path}")
358
+
359
+ # Security check
360
+ if not self.path_validator.is_path_allowed(path):
361
+ return {
362
+ "status": "error",
363
+ "error": f"Access denied: {path} is not in allowed paths",
364
+ }
365
+
366
+ path_obj = Path(path).resolve()
367
+ if not path_obj.exists():
368
+ return {"status": "error", "error": f"Directory {path} does not exist"}
369
+
370
+ dockerfile_path = path_obj / "Dockerfile"
371
+
372
+ try:
373
+ # Save the LLM-generated Dockerfile
374
+ with open(dockerfile_path, "w", encoding="utf-8") as f:
375
+ f.write(dockerfile_content)
376
+
377
+ # Generate image name from directory
378
+ image_name = path_obj.name.lower().replace("_", "-").replace(" ", "-")
379
+
380
+ return {
381
+ "status": "success",
382
+ "path": str(dockerfile_path),
383
+ "next_steps": [
384
+ "1. Build the Docker image:",
385
+ f" cd {path_obj}",
386
+ f" docker build -t {image_name} .",
387
+ "",
388
+ "2. Run the container (keeps running in background):",
389
+ f" docker run -d -p {port}:{port} --name {image_name}-container {image_name}",
390
+ "",
391
+ "3. Access your application at:",
392
+ f" http://localhost:{port}",
393
+ "",
394
+ "4. View container logs:",
395
+ f" docker logs -f {image_name}-container",
396
+ "",
397
+ "5. Stop the container when done:",
398
+ f" docker stop {image_name}-container",
399
+ ],
400
+ }
401
+
402
+ except Exception as e:
403
+ return {"status": "error", "error": f"Failed to save Dockerfile: {str(e)}"}
404
+
405
+ def _build_image(self, path: str, tag: str) -> Dict[str, Any]:
406
+ """Build Docker image from Dockerfile."""
407
+ logger.debug(f"Building Docker image: {tag} from {path}")
408
+
409
+ # Security check
410
+ if not self.path_validator.is_path_allowed(path):
411
+ return {
412
+ "status": "error",
413
+ "error": f"Access denied: {path} is not in allowed paths",
414
+ }
415
+
416
+ # Check if Docker is available
417
+ try:
418
+ result = subprocess.run(
419
+ ["docker", "--version"],
420
+ capture_output=True,
421
+ text=True,
422
+ timeout=5,
423
+ check=False,
424
+ )
425
+ if result.returncode != 0:
426
+ return {
427
+ "status": "error",
428
+ "error": "Docker is not installed or not accessible. Please install Docker first.",
429
+ }
430
+ except Exception as e:
431
+ return {"status": "error", "error": f"Cannot access Docker: {str(e)}"}
432
+
433
+ # Build the image
434
+ try:
435
+ result = subprocess.run(
436
+ ["docker", "build", "-t", tag, path],
437
+ capture_output=True,
438
+ text=True,
439
+ timeout=300, # 5 minute timeout for build
440
+ check=False,
441
+ )
442
+
443
+ if result.returncode == 0:
444
+ return {
445
+ "status": "success",
446
+ "success": True,
447
+ "image": tag,
448
+ "output": result.stdout,
449
+ "message": f"Successfully built Docker image: {tag}",
450
+ }
451
+ else:
452
+ return {
453
+ "status": "error",
454
+ "success": False,
455
+ "error": f"Docker build failed: {result.stderr}",
456
+ "output": result.stdout,
457
+ }
458
+
459
+ except subprocess.TimeoutExpired:
460
+ return {
461
+ "status": "error",
462
+ "error": "Docker build timed out after 5 minutes",
463
+ }
464
+ except Exception as e:
465
+ return {"status": "error", "error": f"Failed to build image: {str(e)}"}
466
+
467
+ def _run_container(
468
+ self, image: str, port: str = None, name: str = None
469
+ ) -> Dict[str, Any]:
470
+ """Run Docker container from image."""
471
+ logger.debug(f"Running container from image: {image}")
472
+
473
+ # Build docker run command
474
+ cmd = ["docker", "run", "-d"] # Run in detached mode
475
+
476
+ if port:
477
+ cmd.extend(["-p", port])
478
+
479
+ if name:
480
+ cmd.extend(["--name", name])
481
+
482
+ cmd.append(image)
483
+
484
+ try:
485
+ result = subprocess.run(
486
+ cmd, capture_output=True, text=True, timeout=30, check=False
487
+ )
488
+
489
+ if result.returncode == 0:
490
+ container_id = result.stdout.strip()[:12]
491
+
492
+ response = {
493
+ "status": "success",
494
+ "success": True,
495
+ "container_id": container_id,
496
+ "image": image,
497
+ "message": f"Container {container_id} is running",
498
+ }
499
+
500
+ if port:
501
+ host_port = port.split(":")[0]
502
+ response["url"] = f"http://localhost:{host_port}"
503
+ response[
504
+ "message"
505
+ ] += f"\nAccess your application at: http://localhost:{host_port}"
506
+
507
+ return response
508
+ else:
509
+ return {
510
+ "status": "error",
511
+ "success": False,
512
+ "error": f"Failed to run container: {result.stderr}",
513
+ "output": result.stdout,
514
+ }
515
+
516
+ except subprocess.TimeoutExpired:
517
+ return {"status": "error", "error": "Docker run command timed out"}
518
+ except Exception as e:
519
+ return {"status": "error", "error": f"Failed to run container: {str(e)}"}
520
+
521
+ # MCP Interface Implementation
522
+ def get_mcp_tool_definitions(self) -> list[Dict[str, Any]]:
523
+ """Return MCP tool definitions for Docker agent."""
524
+ return [
525
+ {
526
+ "name": "dockerize",
527
+ "description": "Containerize an application by analyzing its structure, generating an optimized Dockerfile, building the Docker image, and running the container. Use this when the user wants to dockerize, containerize, or run their application in Docker. This performs the complete workflow: analyze → create Dockerfile → build image → run container.",
528
+ "inputSchema": {
529
+ "type": "object",
530
+ "properties": {
531
+ "appPath": {
532
+ "type": "string",
533
+ "description": "Absolute path to the application's root directory. Must be a complete filesystem path (e.g., C:/Users/name/myapp on Windows or /home/user/myapp on Linux). This is where the Dockerfile will be created and where dependency files (requirements.txt, package.json) should exist.",
534
+ },
535
+ "port": {
536
+ "type": "integer",
537
+ "description": "The port that the application listens on inside the container. This will be exposed in the Dockerfile and mapped to the same host port when running the container. Common values: 5000 (Flask), 3000 (Node.js/Express), 8000 (Django), 8080 (general web apps). Default: 5000",
538
+ "default": 5000,
539
+ },
540
+ },
541
+ "required": ["appPath"],
542
+ },
543
+ }
544
+ ]
545
+
546
+ def execute_mcp_tool(
547
+ self, tool_name: str, arguments: Dict[str, Any]
548
+ ) -> Dict[str, Any]:
549
+ """Execute MCP tool call by delegating to LLM via process_query."""
550
+ if tool_name == "dockerize":
551
+ # Validate appPath is provided
552
+ if "appPath" not in arguments:
553
+ return {
554
+ "success": False,
555
+ "error": "appPath is required - must be an absolute path to the application directory",
556
+ }
557
+
558
+ app_path = arguments["appPath"]
559
+
560
+ # Validate it's an absolute path
561
+ path_obj = Path(app_path)
562
+ if not path_obj.is_absolute():
563
+ return {
564
+ "success": False,
565
+ "error": f"appPath must be an absolute path, got: {app_path}",
566
+ }
567
+
568
+ # Validate directory exists
569
+ if not path_obj.exists():
570
+ return {
571
+ "success": False,
572
+ "error": f"Directory does not exist: {app_path}",
573
+ }
574
+
575
+ # Validate it's a directory
576
+ if not path_obj.is_dir():
577
+ return {
578
+ "success": False,
579
+ "error": f"Path is not a directory: {app_path}",
580
+ }
581
+
582
+ # Security check
583
+ if not self.path_validator.is_path_allowed(app_path):
584
+ return {
585
+ "success": False,
586
+ "error": f"Access denied: {app_path} is not in allowed paths",
587
+ }
588
+
589
+ # Get parameters
590
+ port = arguments.get("port", DEFAULT_PORT)
591
+
592
+ # Construct natural language query for the LLM
593
+ # Always do full workflow: dockerize → build → run
594
+ query_parts = [f"Dockerize the application at {app_path}"]
595
+
596
+ if port != DEFAULT_PORT:
597
+ query_parts.append(f"using port {port}")
598
+
599
+ query_parts.append("then build and run the container")
600
+
601
+ query = " ".join(query_parts) + "."
602
+
603
+ # Let the LLM orchestrate the workflow
604
+ result = self.process_query(user_input=query, trace=False)
605
+
606
+ # Extract the final result
607
+ # process_query returns: {status: "success"/"failed"/"incomplete", result: ..., error_history: [...]}
608
+ status = result.get("status", "incomplete")
609
+ final_result = result.get("result", "")
610
+
611
+ # Only report failure if status is explicitly "failed"
612
+ # Intermediate errors/warnings are acceptable as long as the overall task succeeded
613
+ if status == "success":
614
+ return {
615
+ "success": True,
616
+ "status": "completed",
617
+ "result": final_result,
618
+ "steps_taken": result.get("steps_taken", 0),
619
+ "duration": result.get("duration", 0),
620
+ }
621
+ elif status == "failed":
622
+ # Include error details only when actually failed
623
+ error_history = result.get("error_history", [])
624
+ error_msg = (
625
+ error_history[-1] if error_history else "Unknown error occurred"
626
+ )
627
+ return {
628
+ "success": False,
629
+ "error": error_msg,
630
+ "result": final_result,
631
+ "error_count": result.get("error_count", 0),
632
+ }
633
+ else:
634
+ # Incomplete status
635
+ return {
636
+ "success": False,
637
+ "error": "Task did not complete within step limit",
638
+ "result": final_result,
639
+ "steps_taken": result.get("steps_taken", 0),
640
+ }
641
+
642
+ else:
643
+ raise ValueError(f"Unknown tool: {tool_name}")