dockerbrain 1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- core/__init__.py +1 -0
- core/__main__.py +4 -0
- core/ai_advisor.py +345 -0
- core/cli.py +369 -0
- core/dockerizer.py +310 -0
- core/fixer/__init__.py +21 -0
- core/fixer/container.py +171 -0
- core/fixer/dockerfile.py +225 -0
- core/llm.py +212 -0
- core/monitor/__init__.py +33 -0
- core/monitor/collector.py +197 -0
- core/monitor/display.py +279 -0
- core/monitor/snapshot.py +57 -0
- core/optimizer/__init__.py +23 -0
- core/optimizer/engine.py +84 -0
- core/optimizer/rules.py +221 -0
- core/storage.py +161 -0
- core/templates.py +559 -0
- core/utils.py +38 -0
- dockerbrain-1.0.dist-info/METADATA +156 -0
- dockerbrain-1.0.dist-info/RECORD +25 -0
- dockerbrain-1.0.dist-info/WHEEL +5 -0
- dockerbrain-1.0.dist-info/entry_points.txt +2 -0
- dockerbrain-1.0.dist-info/licenses/LICENSE +201 -0
- dockerbrain-1.0.dist-info/top_level.txt +1 -0
core/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "1.0"
|
core/__main__.py
ADDED
core/ai_advisor.py
ADDED
|
@@ -0,0 +1,345 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import time
|
|
5
|
+
from datetime import datetime, timedelta, timezone
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
import docker
|
|
9
|
+
from rich.align import Align
|
|
10
|
+
from rich.console import Console
|
|
11
|
+
from rich.live import Live
|
|
12
|
+
from rich.panel import Panel
|
|
13
|
+
from rich.spinner import Spinner
|
|
14
|
+
from rich.table import Table
|
|
15
|
+
from rich.text import Text
|
|
16
|
+
|
|
17
|
+
from core.llm import load_llm_config, generate_stream
|
|
18
|
+
from core.optimizer import RuleBasedOptimizer
|
|
19
|
+
from core.storage import get_metrics_since, get_all_container_names, store_ai_suggestion
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
console = Console()
|
|
23
|
+
|
|
24
|
+
# System instructions
|
|
25
|
+
_CONTAINER_SYSTEM_INSTRUCTION = (
|
|
26
|
+
"You are a Docker container optimization expert. Analyze the provided metrics and return "
|
|
27
|
+
"ONLY a valid JSON array — no markdown, no explanation, no code fences.\n\n"
|
|
28
|
+
"Each element must be an object with exactly these three keys:\n"
|
|
29
|
+
' - "container": the container name (string)\n'
|
|
30
|
+
' - "issue": a short one-line description of the problem (max 10 words)\n'
|
|
31
|
+
' - "recommendation": a short, actionable fix (max 15 words)\n\n'
|
|
32
|
+
"Rules:\n"
|
|
33
|
+
"- DO NOT suggest lowering memory limits just because usage is low — only flag if near/at the limit.\n"
|
|
34
|
+
"- Only include real, actionable findings. If everything is healthy, return: []\n"
|
|
35
|
+
"- Keep every field SHORT. No long paragraphs. No code blocks inside the JSON strings.\n"
|
|
36
|
+
"- Return ONLY the raw JSON array. Nothing else."
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
_DOCKERFILE_SYSTEM_INSTRUCTION = (
|
|
40
|
+
"You are a Dockerfile optimization expert. Analyze the provided Dockerfile and return "
|
|
41
|
+
"ONLY a valid JSON array — no markdown, no explanation, no code fences.\n\n"
|
|
42
|
+
"Each element must be an object with exactly these three keys:\n"
|
|
43
|
+
' - "line": the line number in the Dockerfile (integer), or null if general\n'
|
|
44
|
+
' - "issue": a short one-line description of the problem (max 10 words)\n'
|
|
45
|
+
' - "recommendation": a short, actionable fix (max 15 words)\n\n'
|
|
46
|
+
"Detect issues like: large base images, hardcoded secrets, unchained RUN commands, "
|
|
47
|
+
"missing non-root USER, no HEALTHCHECK, COPY . . before deps, missing .dockerignore, "
|
|
48
|
+
"apt-get without --no-install-recommends, uncleaned apt cache, shell-form CMD.\n\n"
|
|
49
|
+
"Keep every field SHORT. Return ONLY the raw JSON array."
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
def _render_compact_table(raw: str) -> None:
|
|
53
|
+
"""Parse JSON array from AI and render as a compact 3-column table."""
|
|
54
|
+
text = raw.strip()
|
|
55
|
+
if text.startswith("```"):
|
|
56
|
+
text = text[text.index("\n") + 1:] if "\n" in text else text
|
|
57
|
+
if text.endswith("```"):
|
|
58
|
+
text = text[:-3].rstrip()
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
findings = json.loads(text)
|
|
62
|
+
except json.JSONDecodeError:
|
|
63
|
+
console.print(
|
|
64
|
+
Panel(
|
|
65
|
+
f"[red]Could not parse AI response.[/]\n\n[dim]{raw[:500]}[/]",
|
|
66
|
+
title="[bold red]Parse Error[/]",
|
|
67
|
+
border_style="red",
|
|
68
|
+
expand=False,
|
|
69
|
+
)
|
|
70
|
+
)
|
|
71
|
+
return
|
|
72
|
+
|
|
73
|
+
if not findings:
|
|
74
|
+
console.print("[green bold]✓ No issues found![/]")
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
# Auto-detect mode: container data has "container" key, Dockerfile has "line"
|
|
78
|
+
is_dockerfile = "line" in findings[0] and "container" not in findings[0]
|
|
79
|
+
|
|
80
|
+
table = Table(
|
|
81
|
+
show_header=True,
|
|
82
|
+
header_style="bold cyan",
|
|
83
|
+
border_style="bright_blue",
|
|
84
|
+
show_lines=True,
|
|
85
|
+
expand=True,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
if is_dockerfile:
|
|
89
|
+
table.add_column("Line", style="bold", justify="right", width=6)
|
|
90
|
+
else:
|
|
91
|
+
table.add_column("Container", style="bold", no_wrap=True, min_width=16)
|
|
92
|
+
table.add_column("Issue", style="yellow")
|
|
93
|
+
table.add_column("Recommendation", style="green")
|
|
94
|
+
|
|
95
|
+
for f in findings:
|
|
96
|
+
first_col = str(f.get("line", "—")) if is_dockerfile else str(f.get("container", "?"))
|
|
97
|
+
table.add_row(
|
|
98
|
+
first_col,
|
|
99
|
+
str(f.get("issue", "?")),
|
|
100
|
+
str(f.get("recommendation", "?")),
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
console.print(table)
|
|
104
|
+
console.print(f"\n[dim]{len(findings)} finding(s)[/]")
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class AIAdvisor:
|
|
108
|
+
"""Multi-provider AI advisor for container & Dockerfile analysis."""
|
|
109
|
+
|
|
110
|
+
def __init__(self) -> None:
|
|
111
|
+
self._config = load_llm_config()
|
|
112
|
+
|
|
113
|
+
def _build_container_prompt(
|
|
114
|
+
self,
|
|
115
|
+
container_name: str | None,
|
|
116
|
+
window_minutes: int,
|
|
117
|
+
no_rules: bool = False,
|
|
118
|
+
) -> str:
|
|
119
|
+
"""Build a structured prompt from SQLite history + rule-based suggestions."""
|
|
120
|
+
|
|
121
|
+
since = (datetime.now(timezone.utc) - timedelta(minutes=window_minutes)).isoformat()
|
|
122
|
+
rows = get_metrics_since(since, container=container_name)
|
|
123
|
+
|
|
124
|
+
if container_name:
|
|
125
|
+
containers = [container_name]
|
|
126
|
+
elif rows:
|
|
127
|
+
containers = sorted({r["container"] for r in rows})
|
|
128
|
+
else:
|
|
129
|
+
containers = get_all_container_names()
|
|
130
|
+
|
|
131
|
+
rule_suggestions: list[str] = []
|
|
132
|
+
if not no_rules:
|
|
133
|
+
try:
|
|
134
|
+
optimizer = RuleBasedOptimizer()
|
|
135
|
+
suggestions = optimizer.analyze(container_name=container_name)
|
|
136
|
+
for s in suggestions:
|
|
137
|
+
rule_suggestions.append(
|
|
138
|
+
f" [{s.severity.value}] {s.container_name}: {s.message}"
|
|
139
|
+
)
|
|
140
|
+
except Exception:
|
|
141
|
+
rule_suggestions.append(" (Could not collect suggestions)")
|
|
142
|
+
|
|
143
|
+
container_sections: list[str] = []
|
|
144
|
+
|
|
145
|
+
for cname in containers:
|
|
146
|
+
c_rows = [r for r in rows if r["container"] == cname]
|
|
147
|
+
|
|
148
|
+
if c_rows:
|
|
149
|
+
cpus = [r["cpu_percent"] for r in c_rows if r["cpu_percent"] is not None]
|
|
150
|
+
mems = [r["mem_usage_mb"] for r in c_rows if r["mem_usage_mb"] is not None]
|
|
151
|
+
idle_counts = [r["idle_polls"] for r in c_rows if r.get("idle_polls")]
|
|
152
|
+
|
|
153
|
+
avg_cpu = sum(cpus) / len(cpus) if cpus else 0.0
|
|
154
|
+
peak_mem = max(mems) if mems else 0.0
|
|
155
|
+
mem_limit = c_rows[-1].get("mem_limit_mb", 0) or 0
|
|
156
|
+
max_idle = max(idle_counts) if idle_counts else 0
|
|
157
|
+
|
|
158
|
+
section = (
|
|
159
|
+
f"### {cname}\n"
|
|
160
|
+
f"- Avg CPU: {avg_cpu:.2f}%\n"
|
|
161
|
+
f"- Peak Memory: {peak_mem:.1f} MB / {mem_limit:.1f} MB limit\n"
|
|
162
|
+
f"- Data points: {len(c_rows)} (last {window_minutes} min)\n"
|
|
163
|
+
f"- Max consecutive idle polls: {max_idle}\n"
|
|
164
|
+
)
|
|
165
|
+
else:
|
|
166
|
+
section = (
|
|
167
|
+
f"### {cname}\n"
|
|
168
|
+
f"- No historical metrics available (run `dockerbrain monitor` first)\n"
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
client = docker.from_env()
|
|
173
|
+
ctr = client.containers.get(cname)
|
|
174
|
+
ctr.reload()
|
|
175
|
+
restart_count = ctr.attrs.get("RestartCount", 0)
|
|
176
|
+
image_tag = ctr.image.tags[0] if ctr.image.tags else ctr.image.short_id
|
|
177
|
+
section += (
|
|
178
|
+
f"- Status: {ctr.status}\n"
|
|
179
|
+
f"- Restart count: {restart_count}\n"
|
|
180
|
+
f"- Image: {image_tag}\n"
|
|
181
|
+
)
|
|
182
|
+
except Exception:
|
|
183
|
+
section += "- (Container not currently running)\n"
|
|
184
|
+
|
|
185
|
+
container_sections.append(section)
|
|
186
|
+
|
|
187
|
+
prompt = (
|
|
188
|
+
"## Container Metrics Report\n\n"
|
|
189
|
+
+ "\n".join(container_sections)
|
|
190
|
+
+ "\n## Current Rule-Based Findings\n\n"
|
|
191
|
+
+ ("\n".join(rule_suggestions) if rule_suggestions else " None")
|
|
192
|
+
+ "\n\nPlease analyze the above and provide your optimization recommendations."
|
|
193
|
+
)
|
|
194
|
+
return prompt
|
|
195
|
+
|
|
196
|
+
@staticmethod
|
|
197
|
+
def _build_dockerfile_prompt(dockerfile_path: str) -> str:
|
|
198
|
+
"""Read a Dockerfile and build a prompt for Gemini."""
|
|
199
|
+
path = Path(dockerfile_path)
|
|
200
|
+
if not path.is_file():
|
|
201
|
+
console.print(f"[red bold]Error:[/] File not found: {path}")
|
|
202
|
+
raise SystemExit(1)
|
|
203
|
+
|
|
204
|
+
content = path.read_text(encoding="utf-8")
|
|
205
|
+
|
|
206
|
+
dockerignore_exists = (path.parent / ".dockerignore").exists()
|
|
207
|
+
dockerignore_note = (
|
|
208
|
+
"NOTE: A .dockerignore file already exists in this project — do NOT flag it as missing."
|
|
209
|
+
if dockerignore_exists
|
|
210
|
+
else "NOTE: No .dockerignore file was found in this project."
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return (
|
|
214
|
+
"## Dockerfile to Optimize\n\n"
|
|
215
|
+
f"```dockerfile\n{content}\n```\n\n"
|
|
216
|
+
f"{dockerignore_note}\n\n"
|
|
217
|
+
"Please analyze this Dockerfile and suggest concrete optimizations covering:\n"
|
|
218
|
+
"1. Multi-stage builds to reduce image size\n"
|
|
219
|
+
"2. Optimal layer ordering for cache efficiency\n"
|
|
220
|
+
"3. Removal of unnecessary packages\n"
|
|
221
|
+
"4. Using smaller base images (Alpine, distroless, slim)\n"
|
|
222
|
+
"5. Security best practices (non-root user, minimal permissions)\n"
|
|
223
|
+
"6. Any other improvements\n\n"
|
|
224
|
+
"For each suggestion, show a BEFORE → AFTER code diff."
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
def suggest_for_containers(
|
|
228
|
+
self,
|
|
229
|
+
container_name: str | None = None,
|
|
230
|
+
window_minutes: int = 30,
|
|
231
|
+
no_rules: bool = False,
|
|
232
|
+
) -> None:
|
|
233
|
+
"""Query Gemini with container metrics and render as panels."""
|
|
234
|
+
prompt = self._build_container_prompt(container_name, window_minutes, no_rules=no_rules)
|
|
235
|
+
|
|
236
|
+
self._stream(
|
|
237
|
+
prompt,
|
|
238
|
+
system_instruction=_CONTAINER_SYSTEM_INSTRUCTION,
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
def suggest_for_dockerfile(self, dockerfile_path: str) -> None:
|
|
242
|
+
"""Query Gemini with a Dockerfile and render as panels."""
|
|
243
|
+
prompt = self._build_dockerfile_prompt(dockerfile_path)
|
|
244
|
+
|
|
245
|
+
self._stream(
|
|
246
|
+
prompt,
|
|
247
|
+
system_instruction=_DOCKERFILE_SYSTEM_INSTRUCTION,
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
def _stream(self, prompt: str, system_instruction: str) -> None:
|
|
251
|
+
"""Collect AI response with a spinner, then render as styled panels."""
|
|
252
|
+
console.print()
|
|
253
|
+
|
|
254
|
+
header_text = Text()
|
|
255
|
+
header_text.append("DockerBrain", style="bold cyan")
|
|
256
|
+
header_text.append(" Suggest", style="dim")
|
|
257
|
+
console.print(
|
|
258
|
+
Panel(
|
|
259
|
+
Align.center(header_text),
|
|
260
|
+
border_style="bright_blue",
|
|
261
|
+
style="on #1a1a2e",
|
|
262
|
+
)
|
|
263
|
+
)
|
|
264
|
+
console.print()
|
|
265
|
+
|
|
266
|
+
max_retries = 2
|
|
267
|
+
full_text = ""
|
|
268
|
+
last_error: Exception | None = None
|
|
269
|
+
success = False
|
|
270
|
+
|
|
271
|
+
for attempt in range(max_retries + 1):
|
|
272
|
+
try:
|
|
273
|
+
spinner_msg = (
|
|
274
|
+
"Please wait…"
|
|
275
|
+
if attempt == 0
|
|
276
|
+
else f"Retrying ({attempt}/{max_retries})…"
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
full_text = ""
|
|
280
|
+
with Live(
|
|
281
|
+
Spinner("dots", text=f" {spinner_msg}", style="bold green"),
|
|
282
|
+
console=console,
|
|
283
|
+
refresh_per_second=10,
|
|
284
|
+
) as live:
|
|
285
|
+
for chunk in generate_stream(prompt, system_instruction, self._config):
|
|
286
|
+
full_text += chunk
|
|
287
|
+
live.update(
|
|
288
|
+
Spinner(
|
|
289
|
+
"dots",
|
|
290
|
+
text=" Issues Found...",
|
|
291
|
+
style="bold green",
|
|
292
|
+
)
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
success = True
|
|
296
|
+
break
|
|
297
|
+
except Exception as exc:
|
|
298
|
+
last_error = exc
|
|
299
|
+
if attempt < max_retries:
|
|
300
|
+
wait = 2 ** (attempt + 1)
|
|
301
|
+
console.print(
|
|
302
|
+
f"[yellow] Request failed: {exc}. "
|
|
303
|
+
f"Retrying in {wait}s…[/]"
|
|
304
|
+
)
|
|
305
|
+
time.sleep(wait)
|
|
306
|
+
|
|
307
|
+
if not success:
|
|
308
|
+
console.print(
|
|
309
|
+
Panel(
|
|
310
|
+
f"[red bold]LLM API call failed after {max_retries + 1} attempts.[/]\n\n"
|
|
311
|
+
f"[dim]{last_error}[/]\n\n"
|
|
312
|
+
"Check your network connection and API key, then try again.",
|
|
313
|
+
title="[bold red]Request Failed[/]",
|
|
314
|
+
border_style="red",
|
|
315
|
+
expand=False,
|
|
316
|
+
)
|
|
317
|
+
)
|
|
318
|
+
return
|
|
319
|
+
|
|
320
|
+
console.print()
|
|
321
|
+
_render_compact_table(full_text)
|
|
322
|
+
|
|
323
|
+
summary = full_text[:300].replace("\n", " ").strip()
|
|
324
|
+
if len(full_text) > 300:
|
|
325
|
+
summary += "…"
|
|
326
|
+
store_ai_suggestion(summary=summary, full_response=full_text)
|
|
327
|
+
|
|
328
|
+
def run_ai_suggest(
|
|
329
|
+
container_name: str | None = None,
|
|
330
|
+
window_minutes: int = 30,
|
|
331
|
+
dockerfile_path: str | None = None,
|
|
332
|
+
no_rules: bool = False,
|
|
333
|
+
) -> None:
|
|
334
|
+
"""Create an AIAdvisor and run the appropriate suggestion mode."""
|
|
335
|
+
advisor = AIAdvisor()
|
|
336
|
+
|
|
337
|
+
if dockerfile_path:
|
|
338
|
+
advisor.suggest_for_dockerfile(dockerfile_path)
|
|
339
|
+
else:
|
|
340
|
+
advisor.suggest_for_containers(
|
|
341
|
+
container_name=container_name,
|
|
342
|
+
window_minutes=window_minutes,
|
|
343
|
+
no_rules=no_rules,
|
|
344
|
+
)
|
|
345
|
+
|