@rog0x/mcp-docker-tools 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,126 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.containerLogsTool = void 0;
4
+ exports.handleContainerLogs = handleContainerLogs;
5
+ const node_child_process_1 = require("node:child_process");
6
+ exports.containerLogsTool = {
7
+ name: "docker_container_logs",
8
+ description: "Get logs from a Docker container. Supports retrieving the last N lines, " +
9
+ "filtering by keyword, and including timestamps. Can target a container by name or ID.",
10
+ inputSchema: {
11
+ type: "object",
12
+ properties: {
13
+ container: {
14
+ type: "string",
15
+ description: "Container name or ID to get logs from.",
16
+ },
17
+ tail: {
18
+ type: "number",
19
+ description: "Number of lines to retrieve from the end of logs. Defaults to 100.",
20
+ default: 100,
21
+ },
22
+ since: {
23
+ type: "string",
24
+ description: "Show logs since a timestamp (e.g., '2024-01-01T00:00:00') or relative duration (e.g., '1h', '30m').",
25
+ },
26
+ until: {
27
+ type: "string",
28
+ description: "Show logs until a timestamp or relative duration.",
29
+ },
30
+ filter: {
31
+ type: "string",
32
+ description: "Filter log lines to only include those containing this keyword (case-insensitive).",
33
+ },
34
+ timestamps: {
35
+ type: "boolean",
36
+ description: "Include timestamps in log output. Defaults to true.",
37
+ default: true,
38
+ },
39
+ },
40
+ required: ["container"],
41
+ },
42
+ };
43
+ async function handleContainerLogs(args) {
44
+ const container = args.container;
45
+ const tail = typeof args.tail === "number" ? args.tail : 100;
46
+ const since = args.since;
47
+ const until = args.until;
48
+ const filter = args.filter;
49
+ const timestamps = args.timestamps !== false;
50
+ if (!container || !container.trim()) {
51
+ return "Error: 'container' parameter is required. Provide a container name or ID.";
52
+ }
53
+ try {
54
+ // Verify container exists
55
+ try {
56
+ (0, node_child_process_1.execSync)(`docker inspect --type=container ${container}`, {
57
+ encoding: "utf-8",
58
+ timeout: 10000,
59
+ stdio: ["pipe", "pipe", "pipe"],
60
+ });
61
+ }
62
+ catch {
63
+ return `Error: Container '${container}' not found. Use docker_container_list to see available containers.`;
64
+ }
65
+ // Build the logs command
66
+ const parts = ["docker", "logs"];
67
+ if (timestamps)
68
+ parts.push("--timestamps");
69
+ parts.push(`--tail=${tail}`);
70
+ if (since)
71
+ parts.push(`--since=${since}`);
72
+ if (until)
73
+ parts.push(`--until=${until}`);
74
+ parts.push(container);
75
+ // docker logs outputs to stderr for some containers, capture both
76
+ const raw = (0, node_child_process_1.execSync)(parts.join(" "), {
77
+ encoding: "utf-8",
78
+ timeout: 30000,
79
+ maxBuffer: 10 * 1024 * 1024,
80
+ stdio: ["pipe", "pipe", "pipe"],
81
+ });
82
+ // Also capture stderr since docker logs sends some output there
83
+ let combined;
84
+ try {
85
+ combined = (0, node_child_process_1.execSync)(parts.join(" ") + " 2>&1", {
86
+ encoding: "utf-8",
87
+ timeout: 30000,
88
+ maxBuffer: 10 * 1024 * 1024,
89
+ });
90
+ }
91
+ catch {
92
+ combined = raw;
93
+ }
94
+ if (!combined.trim()) {
95
+ return `No logs found for container '${container}' with the specified parameters.`;
96
+ }
97
+ let lines = combined.split("\n");
98
+ // Apply keyword filter
99
+ if (filter) {
100
+ const lowerFilter = filter.toLowerCase();
101
+ lines = lines.filter((line) => line.toLowerCase().includes(lowerFilter));
102
+ if (lines.length === 0) {
103
+ return `No log lines matching '${filter}' found in the last ${tail} lines of container '${container}'.`;
104
+ }
105
+ }
106
+ const header = `Logs for container '${container}' (${lines.length} lines)`;
107
+ const separator = "=".repeat(Math.min(header.length, 60));
108
+ const result = [header, separator, ...lines].join("\n");
109
+ // Truncate if extremely long
110
+ if (result.length > 50000) {
111
+ return result.substring(0, 50000) + "\n\n... [output truncated at 50KB]";
112
+ }
113
+ return result;
114
+ }
115
+ catch (err) {
116
+ const message = err instanceof Error ? err.message : String(err);
117
+ if (message.includes("not found") || message.includes("not recognized") || message.includes("ENOENT")) {
118
+ return "Error: Docker is not installed or not available in PATH. Please install Docker and ensure the 'docker' command is accessible.";
119
+ }
120
+ if (message.includes("Cannot connect") || message.includes("Is the docker daemon running")) {
121
+ return "Error: Cannot connect to the Docker daemon. Is Docker running?";
122
+ }
123
+ return `Error fetching logs for '${container}': ${message}`;
124
+ }
125
+ }
126
+ //# sourceMappingURL=container-logs.js.map
@@ -0,0 +1,21 @@
1
+ export declare const dockerfileAnalyzerTool: {
2
+ name: string;
3
+ description: string;
4
+ inputSchema: {
5
+ type: "object";
6
+ properties: {
7
+ content: {
8
+ type: string;
9
+ description: string;
10
+ };
11
+ checkDockerignore: {
12
+ type: string;
13
+ description: string;
14
+ default: boolean;
15
+ };
16
+ };
17
+ required: string[];
18
+ };
19
+ };
20
+ export declare function handleDockerfileAnalyzer(args: Record<string, unknown>): Promise<string>;
21
+ //# sourceMappingURL=dockerfile-analyzer.d.ts.map
@@ -0,0 +1,278 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.dockerfileAnalyzerTool = void 0;
4
+ exports.handleDockerfileAnalyzer = handleDockerfileAnalyzer;
5
+ exports.dockerfileAnalyzerTool = {
6
+ name: "docker_dockerfile_analyze",
7
+ description: "Analyze a Dockerfile for best practices including multi-stage builds, non-root user, " +
8
+ ".dockerignore usage, layer caching order, image size optimization, and security. " +
9
+ "Provide the Dockerfile content as input and receive a detailed report with suggestions.",
10
+ inputSchema: {
11
+ type: "object",
12
+ properties: {
13
+ content: {
14
+ type: "string",
15
+ description: "The full content of the Dockerfile to analyze.",
16
+ },
17
+ checkDockerignore: {
18
+ type: "boolean",
19
+ description: "If true, also checks for common .dockerignore recommendations. Defaults to true.",
20
+ default: true,
21
+ },
22
+ },
23
+ required: ["content"],
24
+ },
25
+ };
26
+ function analyzeDockerfile(content, checkDockerignore) {
27
+ const lines = content.split("\n");
28
+ const findings = [];
29
+ const fromStatements = [];
30
+ let hasUser = false;
31
+ let hasCopy = false;
32
+ let hasAdd = false;
33
+ let hasHealthcheck = false;
34
+ let hasExpose = false;
35
+ let hasWorkdir = false;
36
+ let runCount = 0;
37
+ let hasAptGetCleanup = false;
38
+ let usesLatestTag = false;
39
+ let hasEnvForVersions = false;
40
+ let copiesBeforeRun = false;
41
+ let lastCopyLine = -1;
42
+ let lastRunLine = -1;
43
+ for (let i = 0; i < lines.length; i++) {
44
+ const raw = lines[i];
45
+ const trimmed = raw.trim();
46
+ const lineNum = i + 1;
47
+ // Skip comments and empty
48
+ if (trimmed.startsWith("#") || !trimmed)
49
+ continue;
50
+ const instruction = trimmed.split(/\s+/)[0].toUpperCase();
51
+ if (instruction === "FROM") {
52
+ const rest = trimmed.substring(4).trim();
53
+ const parts = rest.split(/\s+/);
54
+ const image = parts[0];
55
+ const alias = parts.find((_, idx) => idx > 0 && parts[idx - 1]?.toUpperCase() === "AS");
56
+ fromStatements.push({ line: lineNum, image, alias });
57
+ if (image.endsWith(":latest") || (!image.includes(":") && !image.includes("@"))) {
58
+ usesLatestTag = true;
59
+ findings.push({
60
+ category: "Versioning",
61
+ severity: "warning",
62
+ message: `Line ${lineNum}: Base image '${image}' uses implicit or explicit :latest tag.`,
63
+ suggestion: "Pin to a specific version tag (e.g., node:20-alpine) for reproducible builds.",
64
+ line: lineNum,
65
+ });
66
+ }
67
+ if (!image.includes("alpine") && !image.includes("slim") && !image.includes("distroless") && !image.includes("scratch")) {
68
+ findings.push({
69
+ category: "Image Size",
70
+ severity: "info",
71
+ message: `Line ${lineNum}: Base image '${image}' is not a minimal variant.`,
72
+ suggestion: "Consider using an alpine, slim, or distroless variant to reduce image size.",
73
+ line: lineNum,
74
+ });
75
+ }
76
+ }
77
+ if (instruction === "RUN") {
78
+ runCount++;
79
+ lastRunLine = lineNum;
80
+ if (trimmed.includes("apt-get") || trimmed.includes("apk add")) {
81
+ if (trimmed.includes("rm -rf /var/lib/apt/lists") || trimmed.includes("--no-cache")) {
82
+ hasAptGetCleanup = true;
83
+ }
84
+ else {
85
+ findings.push({
86
+ category: "Layer Caching",
87
+ severity: "warning",
88
+ message: `Line ${lineNum}: Package install without cache cleanup.`,
89
+ suggestion: "Add 'rm -rf /var/lib/apt/lists/*' in the same RUN layer, or use 'apk add --no-cache'.",
90
+ line: lineNum,
91
+ });
92
+ }
93
+ }
94
+ if (trimmed.includes("curl") && !trimmed.includes("--fail")) {
95
+ findings.push({
96
+ category: "Reliability",
97
+ severity: "info",
98
+ message: `Line ${lineNum}: curl used without --fail flag.`,
99
+ suggestion: "Use 'curl --fail' so the build fails on HTTP errors.",
100
+ line: lineNum,
101
+ });
102
+ }
103
+ }
104
+ if (instruction === "COPY") {
105
+ hasCopy = true;
106
+ lastCopyLine = lineNum;
107
+ if (lastRunLine > 0 && lastCopyLine > lastRunLine) {
108
+ // This is normal — but copying app code before dependency install is not
109
+ }
110
+ }
111
+ if (instruction === "ADD") {
112
+ hasAdd = true;
113
+ if (!trimmed.includes(".tar") && !trimmed.includes("http")) {
114
+ findings.push({
115
+ category: "Best Practice",
116
+ severity: "warning",
117
+ message: `Line ${lineNum}: ADD instruction used instead of COPY.`,
118
+ suggestion: "Use COPY unless you specifically need ADD's tar extraction or URL features.",
119
+ line: lineNum,
120
+ });
121
+ }
122
+ }
123
+ if (instruction === "USER") {
124
+ hasUser = true;
125
+ }
126
+ if (instruction === "HEALTHCHECK") {
127
+ hasHealthcheck = true;
128
+ }
129
+ if (instruction === "EXPOSE") {
130
+ hasExpose = true;
131
+ }
132
+ if (instruction === "WORKDIR") {
133
+ hasWorkdir = true;
134
+ }
135
+ if (instruction === "ENV") {
136
+ hasEnvForVersions = true;
137
+ }
138
+ }
139
+ // Multi-stage build check
140
+ const isMultiStage = fromStatements.length > 1;
141
+ if (!isMultiStage) {
142
+ findings.push({
143
+ category: "Multi-stage Build",
144
+ severity: "info",
145
+ message: "Dockerfile does not use multi-stage builds.",
146
+ suggestion: "Multi-stage builds reduce final image size by separating build dependencies from runtime. " +
147
+ "Consider adding a build stage and copying only needed artifacts to the final stage.",
148
+ });
149
+ }
150
+ else {
151
+ findings.push({
152
+ category: "Multi-stage Build",
153
+ severity: "info",
154
+ message: `Good: Uses multi-stage build with ${fromStatements.length} stages.`,
155
+ });
156
+ }
157
+ // Non-root user
158
+ if (!hasUser) {
159
+ findings.push({
160
+ category: "Security",
161
+ severity: "warning",
162
+ message: "No USER instruction found. Container will run as root by default.",
163
+ suggestion: "Add a USER instruction to run as a non-root user (e.g., USER 1001 or USER appuser).",
164
+ });
165
+ }
166
+ // HEALTHCHECK
167
+ if (!hasHealthcheck) {
168
+ findings.push({
169
+ category: "Reliability",
170
+ severity: "info",
171
+ message: "No HEALTHCHECK instruction found.",
172
+ suggestion: "Add a HEALTHCHECK to enable Docker to monitor container health.",
173
+ });
174
+ }
175
+ // RUN consolidation
176
+ if (runCount > 5) {
177
+ findings.push({
178
+ category: "Layer Caching",
179
+ severity: "warning",
180
+ message: `Found ${runCount} separate RUN instructions.`,
181
+ suggestion: "Consolidate related RUN commands using && to reduce the number of image layers.",
182
+ });
183
+ }
184
+ // WORKDIR
185
+ if (!hasWorkdir && hasCopy) {
186
+ findings.push({
187
+ category: "Best Practice",
188
+ severity: "info",
189
+ message: "No WORKDIR instruction found.",
190
+ suggestion: "Use WORKDIR to set a working directory instead of relying on the default or using 'cd' in RUN.",
191
+ });
192
+ }
193
+ // Layer caching order hint
194
+ if (hasCopy && runCount > 0) {
195
+ findings.push({
196
+ category: "Layer Caching",
197
+ severity: "info",
198
+ message: "Tip: Copy dependency manifests (package.json, requirements.txt) before source code.",
199
+ suggestion: "COPY package*.json ./ then RUN npm install, then COPY the rest. " +
200
+ "This leverages Docker layer caching so dependencies are only reinstalled when manifests change.",
201
+ });
202
+ }
203
+ // .dockerignore
204
+ if (checkDockerignore) {
205
+ findings.push({
206
+ category: ".dockerignore",
207
+ severity: "info",
208
+ message: "Ensure a .dockerignore file exists alongside the Dockerfile.",
209
+ suggestion: "Common entries: node_modules, .git, .env, dist, *.log, .DS_Store, __pycache__, .venv",
210
+ });
211
+ }
212
+ const score = calculateScore(findings);
213
+ return {
214
+ findings,
215
+ summary: {
216
+ totalFindings: findings.length,
217
+ errors: findings.filter((f) => f.severity === "error").length,
218
+ warnings: findings.filter((f) => f.severity === "warning").length,
219
+ info: findings.filter((f) => f.severity === "info").length,
220
+ stages: fromStatements.length,
221
+ isMultiStage,
222
+ hasNonRootUser: hasUser,
223
+ hasHealthcheck,
224
+ runInstructions: runCount,
225
+ usesLatestTag,
226
+ score,
227
+ },
228
+ };
229
+ }
230
+ function calculateScore(findings) {
231
+ let score = 100;
232
+ for (const f of findings) {
233
+ if (f.severity === "error")
234
+ score -= 15;
235
+ if (f.severity === "warning")
236
+ score -= 7;
237
+ }
238
+ score = Math.max(0, score);
239
+ if (score >= 80)
240
+ return `${score}/100 (Good)`;
241
+ if (score >= 50)
242
+ return `${score}/100 (Needs improvement)`;
243
+ return `${score}/100 (Poor)`;
244
+ }
245
+ async function handleDockerfileAnalyzer(args) {
246
+ const content = args.content;
247
+ const checkDockerignore = args.checkDockerignore !== false;
248
+ if (!content || !content.trim()) {
249
+ return "Error: Dockerfile content is empty. Please provide the Dockerfile content to analyze.";
250
+ }
251
+ const { findings, summary } = analyzeDockerfile(content, checkDockerignore);
252
+ const sections = [];
253
+ sections.push("# Dockerfile Analysis Report\n");
254
+ sections.push(`Score: ${summary.score}`);
255
+ sections.push(`Stages: ${summary.stages} | Multi-stage: ${summary.isMultiStage ? "Yes" : "No"}`);
256
+ sections.push(`Non-root user: ${summary.hasNonRootUser ? "Yes" : "No"} | Healthcheck: ${summary.hasHealthcheck ? "Yes" : "No"}`);
257
+ sections.push(`Findings: ${summary.errors} errors, ${summary.warnings} warnings, ${summary.info} info\n`);
258
+ const grouped = {};
259
+ for (const f of findings) {
260
+ if (!grouped[f.category])
261
+ grouped[f.category] = [];
262
+ grouped[f.category].push(f);
263
+ }
264
+ for (const [category, items] of Object.entries(grouped)) {
265
+ sections.push(`## ${category}`);
266
+ for (const item of items) {
267
+ const icon = item.severity === "error" ? "[ERROR]" : item.severity === "warning" ? "[WARN]" : "[INFO]";
268
+ const lineRef = item.line ? ` (line ${item.line})` : "";
269
+ sections.push(`${icon}${lineRef} ${item.message}`);
270
+ if (item.suggestion) {
271
+ sections.push(` -> ${item.suggestion}`);
272
+ }
273
+ }
274
+ sections.push("");
275
+ }
276
+ return sections.join("\n");
277
+ }
278
+ //# sourceMappingURL=dockerfile-analyzer.js.map
@@ -0,0 +1,27 @@
1
+ export declare const imageListTool: {
2
+ name: string;
3
+ description: string;
4
+ inputSchema: {
5
+ type: "object";
6
+ properties: {
7
+ filter: {
8
+ type: string;
9
+ description: string;
10
+ };
11
+ showDangling: {
12
+ type: string;
13
+ description: string;
14
+ default: boolean;
15
+ };
16
+ format: {
17
+ type: string;
18
+ enum: string[];
19
+ description: string;
20
+ default: string;
21
+ };
22
+ };
23
+ required: never[];
24
+ };
25
+ };
26
+ export declare function handleImageList(args: Record<string, unknown>): Promise<string>;
27
+ //# sourceMappingURL=image-list.d.ts.map
@@ -0,0 +1,117 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.imageListTool = void 0;
4
+ exports.handleImageList = handleImageList;
5
+ const node_child_process_1 = require("node:child_process");
6
+ exports.imageListTool = {
7
+ name: "docker_image_list",
8
+ description: "List Docker images with size, tags, created date, and layer count. " +
9
+ "Optionally filter by repository name.",
10
+ inputSchema: {
11
+ type: "object",
12
+ properties: {
13
+ filter: {
14
+ type: "string",
15
+ description: "Filter images by repository name (partial match). Leave empty to list all.",
16
+ },
17
+ showDangling: {
18
+ type: "boolean",
19
+ description: "Include dangling (untagged) images. Defaults to false.",
20
+ default: false,
21
+ },
22
+ format: {
23
+ type: "string",
24
+ enum: ["table", "json"],
25
+ description: "Output format: 'table' or 'json'. Defaults to 'json'.",
26
+ default: "json",
27
+ },
28
+ },
29
+ required: [],
30
+ },
31
+ };
32
+ function getLayerCount(imageId) {
33
+ try {
34
+ const raw = (0, node_child_process_1.execSync)(`docker inspect --format="{{len .RootFS.Layers}}" ${imageId}`, {
35
+ encoding: "utf-8",
36
+ timeout: 10000,
37
+ }).trim();
38
+ return parseInt(raw, 10) || 0;
39
+ }
40
+ catch {
41
+ return 0;
42
+ }
43
+ }
44
+ async function handleImageList(args) {
45
+ const filter = args.filter || "";
46
+ const showDangling = args.showDangling === true;
47
+ const format = args.format || "json";
48
+ try {
49
+ let cmd = `docker images --format "{{.ID}}|||{{.Repository}}|||{{.Tag}}|||{{.Size}}|||{{.CreatedAt}}"`;
50
+ if (!showDangling) {
51
+ cmd += ` --filter "dangling=false"`;
52
+ }
53
+ if (filter) {
54
+ cmd += ` ${filter}`;
55
+ }
56
+ const raw = (0, node_child_process_1.execSync)(cmd, { encoding: "utf-8", timeout: 15000 }).trim();
57
+ if (!raw) {
58
+ return filter
59
+ ? `No images found matching '${filter}'.`
60
+ : "No Docker images found on this system.";
61
+ }
62
+ const lines = raw.split("\n").filter((l) => l.trim());
63
+ const images = [];
64
+ for (const line of lines) {
65
+ const parts = line.split("|||");
66
+ if (parts.length < 5)
67
+ continue;
68
+ const id = parts[0];
69
+ images.push({
70
+ id,
71
+ repository: parts[1],
72
+ tag: parts[2],
73
+ size: parts[3],
74
+ created: parts[4],
75
+ layers: getLayerCount(id),
76
+ });
77
+ }
78
+ const totalSize = images
79
+ .map((img) => {
80
+ const s = img.size.toUpperCase();
81
+ if (s.includes("GB"))
82
+ return parseFloat(s) * 1024;
83
+ if (s.includes("MB"))
84
+ return parseFloat(s);
85
+ if (s.includes("KB"))
86
+ return parseFloat(s) / 1024;
87
+ return 0;
88
+ })
89
+ .reduce((a, b) => a + b, 0);
90
+ const totalSizeStr = totalSize >= 1024
91
+ ? `${(totalSize / 1024).toFixed(2)} GB`
92
+ : `${totalSize.toFixed(1)} MB`;
93
+ if (format === "json") {
94
+ return JSON.stringify({ count: images.length, totalSize: totalSizeStr, images }, null, 2);
95
+ }
96
+ let table = `Found ${images.length} image(s) | Total size: ${totalSizeStr}\n\n`;
97
+ for (const img of images) {
98
+ table += `${img.repository}:${img.tag}\n`;
99
+ table += ` ID: ${img.id}\n`;
100
+ table += ` Size: ${img.size}\n`;
101
+ table += ` Layers: ${img.layers}\n`;
102
+ table += ` Created: ${img.created}\n\n`;
103
+ }
104
+ return table.trim();
105
+ }
106
+ catch (err) {
107
+ const message = err instanceof Error ? err.message : String(err);
108
+ if (message.includes("not found") || message.includes("not recognized") || message.includes("ENOENT")) {
109
+ return "Error: Docker is not installed or not available in PATH. Please install Docker and ensure the 'docker' command is accessible.";
110
+ }
111
+ if (message.includes("Cannot connect") || message.includes("Is the docker daemon running")) {
112
+ return "Error: Cannot connect to the Docker daemon. Is Docker running?";
113
+ }
114
+ return `Error listing images: ${message}`;
115
+ }
116
+ }
117
+ //# sourceMappingURL=image-list.js.map
package/package.json ADDED
@@ -0,0 +1,29 @@
1
+ {
2
+ "name": "@rog0x/mcp-docker-tools",
3
+ "version": "1.0.0",
4
+ "description": "Docker management tools for AI agents via MCP",
5
+ "main": "dist/index.js",
6
+ "types": "dist/index.d.ts",
7
+ "bin": {
8
+ "mcp-docker-tools": "dist/index.js"
9
+ },
10
+ "scripts": {
11
+ "build": "tsc",
12
+ "start": "node dist/index.js"
13
+ },
14
+ "keywords": [
15
+ "mcp",
16
+ "docker",
17
+ "containers",
18
+ "ai-tools",
19
+ "model-context-protocol"
20
+ ],
21
+ "license": "MIT",
22
+ "dependencies": {
23
+ "@modelcontextprotocol/sdk": "^1.12.1"
24
+ },
25
+ "devDependencies": {
26
+ "@types/node": "^22.15.3",
27
+ "typescript": "^5.8.3"
28
+ }
29
+ }