unix-disk-mcp 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,311 @@
1
+ import { z } from "zod";
2
+ import { readdirSync, statSync } from "fs";
3
+ import { join } from "path";
4
+ import { execSync } from "child_process";
5
+ import { homedir } from "os";
6
+ import { expandPath } from "../config/index.js";
7
+ export function registerExplorationTools(server, config) {
8
+ // list_directory
9
+ server.tool("list_directory", "List contents of a directory with file sizes and dates", {
10
+ path: z.string().describe("Absolute path to the directory"),
11
+ show_hidden: z
12
+ .boolean()
13
+ .optional()
14
+ .default(false)
15
+ .describe("Include hidden files (starting with .)"),
16
+ }, async ({ path, show_hidden }) => {
17
+ try {
18
+ const expandedPath = expandPath(path);
19
+ const entries = readdirSync(expandedPath, { withFileTypes: true });
20
+ const items = entries
21
+ .filter((entry) => show_hidden || !entry.name.startsWith("."))
22
+ .map((entry) => {
23
+ const fullPath = join(expandedPath, entry.name);
24
+ try {
25
+ const stats = statSync(fullPath);
26
+ return {
27
+ name: entry.name,
28
+ type: entry.isDirectory() ? "directory" : "file",
29
+ size: entry.isFile() ? stats.size : null,
30
+ modified: stats.mtime.toISOString(),
31
+ accessed: stats.atime.toISOString(),
32
+ };
33
+ }
34
+ catch {
35
+ return {
36
+ name: entry.name,
37
+ type: entry.isDirectory() ? "directory" : "file",
38
+ size: null,
39
+ modified: null,
40
+ accessed: null,
41
+ error: "Could not read stats",
42
+ };
43
+ }
44
+ });
45
+ return {
46
+ content: [
47
+ {
48
+ type: "text",
49
+ text: JSON.stringify({ success: true, data: items }, null, 2),
50
+ },
51
+ ],
52
+ };
53
+ }
54
+ catch (error) {
55
+ return {
56
+ content: [
57
+ {
58
+ type: "text",
59
+ text: JSON.stringify({
60
+ success: false,
61
+ error: error instanceof Error ? error.message : "Unknown error",
62
+ code: "LIST_DIRECTORY_FAILED",
63
+ }),
64
+ },
65
+ ],
66
+ };
67
+ }
68
+ });
69
+ // get_disk_usage
70
+ server.tool("get_disk_usage", "Get overview of disk space usage", {}, async () => {
71
+ try {
72
+ let disk;
73
+ if (process.platform === 'darwin') {
74
+ // macOS: Use diskutil for accurate APFS container usage
75
+ const diskutilOutput = execSync("diskutil info / | grep -E 'Volume Name|Container Total Space|Container Free Space'", {
76
+ encoding: "utf-8"
77
+ });
78
+ const lines = diskutilOutput.trim().split("\n");
79
+ const volumeName = lines[0]?.split(":")[1]?.trim() || "Unknown";
80
+ // Parse container space - format: "494.4 GB (494384795648 Bytes) (exactly...)"
81
+ const totalLine = lines[1]?.split(":")[1]?.trim() || "";
82
+ const freeLine = lines[2]?.split(":")[1]?.trim() || "";
83
+ // Extract human-readable values before the first parenthesis (e.g., "494.4 GB")
84
+ const totalGB = totalLine.split("(")[0]?.trim() || "Unknown";
85
+ const freeGB = freeLine.split("(")[0]?.trim() || "Unknown";
86
+ // Extract bytes for calculations - inside first parenthesis
87
+ const totalBytesMatch = totalLine.match(/\((\d+) Bytes\)/);
88
+ const freeBytesMatch = freeLine.match(/\((\d+) Bytes\)/);
89
+ const totalBytes = totalBytesMatch ? parseInt(totalBytesMatch[1]) : 0;
90
+ const freeBytes = freeBytesMatch ? parseInt(freeBytesMatch[1]) : 0;
91
+ const usedBytes = totalBytes - freeBytes;
92
+ const percentUsed = totalBytes > 0 ? Math.round((usedBytes / totalBytes) * 100) : 0;
93
+ // Format used space
94
+ const usedGB = (usedBytes / 1e9).toFixed(1) + " GB";
95
+ disk = {
96
+ volume: volumeName,
97
+ total: totalGB,
98
+ used: usedGB,
99
+ available: freeGB,
100
+ percent_used: `${percentUsed}%`,
101
+ note: "APFS container usage (accurate)",
102
+ };
103
+ }
104
+ else {
105
+ // Linux: Use df
106
+ const dfOutput = execSync("df -h / | tail -1", { encoding: "utf-8" });
107
+ const parts = dfOutput.trim().split(/\s+/);
108
+ disk = {
109
+ filesystem: parts[0],
110
+ total: parts[1],
111
+ used: parts[2],
112
+ available: parts[3],
113
+ percent_used: parts[4],
114
+ mounted: parts[5],
115
+ };
116
+ }
117
+ // Get home directory breakdown
118
+ const home = homedir();
119
+ const entries = readdirSync(home, { withFileTypes: true });
120
+ const breakdown = entries
121
+ .filter((entry) => entry.isDirectory() && !entry.name.startsWith("."))
122
+ .slice(0, 15) // Limit to avoid too many du calls
123
+ .map((entry) => {
124
+ const fullPath = join(home, entry.name);
125
+ try {
126
+ const duOutput = execSync(`du -sk "${fullPath}" 2>/dev/null || echo "0 ${fullPath}"`, {
127
+ encoding: "utf-8",
128
+ });
129
+ const size = parseInt(duOutput.split("\t")[0]) * 1024;
130
+ return { path: fullPath, size };
131
+ }
132
+ catch {
133
+ return { path: fullPath, size: 0, error: "Could not calculate size" };
134
+ }
135
+ })
136
+ .sort((a, b) => b.size - a.size);
137
+ return {
138
+ content: [
139
+ {
140
+ type: "text",
141
+ text: JSON.stringify({
142
+ success: true,
143
+ data: {
144
+ disk,
145
+ home_breakdown: breakdown,
146
+ },
147
+ }, null, 2),
148
+ },
149
+ ],
150
+ };
151
+ }
152
+ catch (error) {
153
+ return {
154
+ content: [
155
+ {
156
+ type: "text",
157
+ text: JSON.stringify({
158
+ success: false,
159
+ error: error instanceof Error ? error.message : "Unknown error",
160
+ code: "DISK_USAGE_FAILED",
161
+ }),
162
+ },
163
+ ],
164
+ };
165
+ }
166
+ });
167
+ // find_large_items
168
+ server.tool("find_large_items", "Find files and directories above a size threshold. WORKFLOW: (1) Start with max_depth=3 on home dir for overview, (2) Identify large branches (e.g., Library/Application Support/Steam), (3) Drill down into those specific paths with max_depth=5-8, (4) For final details, search specific subdirs with no depth limit. This progressive approach is fast and gives clear actionable results.", {
169
+ path: z.string().describe("Path to search within"),
170
+ min_size_mb: z.number().describe("Minimum size in megabytes"),
171
+ max_depth: z
172
+ .number()
173
+ .optional()
174
+ .describe("Maximum depth from this path. Start with 3 for overview, use 5-8 for branch exploration, omit for deep dive. REQUIRED for initial scans to avoid slow searches."),
175
+ max_results: z
176
+ .number()
177
+ .optional()
178
+ .default(20)
179
+ .describe("Maximum number of results to return"),
180
+ }, async ({ path, min_size_mb, max_depth, max_results }) => {
181
+ try {
182
+ const expandedPath = expandPath(path);
183
+ // Find large files (no depth limit, but use timeout)
184
+ const fileCmd = `timeout 30 find "${expandedPath}" -type f -size +${min_size_mb}M 2>/dev/null || true`;
185
+ const fileOutput = execSync(fileCmd, { encoding: "utf-8", maxBuffer: 10 * 1024 * 1024 });
186
+ const files = fileOutput
187
+ .trim()
188
+ .split("\n")
189
+ .filter((line) => line.length > 0)
190
+ .slice(0, max_results * 2) // Limit how many we stat
191
+ .map((filePath) => {
192
+ try {
193
+ const stats = statSync(filePath);
194
+ return { path: filePath, size: stats.size, type: "file" };
195
+ }
196
+ catch {
197
+ return null;
198
+ }
199
+ })
200
+ .filter((item) => item !== null);
201
+ // Find large directories using du
202
+ const minSizeKb = min_size_mb * 1024;
203
+ const depthFlag = max_depth !== undefined ? `-d ${max_depth}` : `-a`;
204
+ const dirCmd = `du ${depthFlag} -k "${expandedPath}" 2>/dev/null | awk '$1 > ${minSizeKb}' | sort -rn | head -${max_results * 2}`;
205
+ const dirOutput = execSync(dirCmd, { encoding: "utf-8", maxBuffer: 10 * 1024 * 1024, timeout: 60000 });
206
+ const dirs = dirOutput
207
+ .trim()
208
+ .split("\n")
209
+ .filter((line) => line.length > 0)
210
+ .map((line) => {
211
+ const parts = line.split("\t");
212
+ if (parts.length === 2) {
213
+ const size = parseInt(parts[0]) * 1024;
214
+ const itemPath = parts[1];
215
+ // Check if it's a directory
216
+ try {
217
+ const stats = statSync(itemPath);
218
+ if (stats.isDirectory()) {
219
+ return { path: itemPath, size, type: "directory" };
220
+ }
221
+ }
222
+ catch { }
223
+ }
224
+ return null;
225
+ })
226
+ .filter((item) => item !== null);
227
+ // Filter out parent directories - only keep leaves or items without children in results
228
+ const filteredDirs = dirs.filter((dir) => {
229
+ // Check if any other directory in the list is a child of this one
230
+ const hasChildInList = dirs.some((other) => other !== dir && other.path.startsWith(dir.path + "/"));
231
+ return !hasChildInList;
232
+ });
233
+ // Combine and sort by size
234
+ const items = [...files, ...filteredDirs]
235
+ .sort((a, b) => b.size - a.size)
236
+ .slice(0, max_results);
237
+ return {
238
+ content: [
239
+ {
240
+ type: "text",
241
+ text: JSON.stringify({ success: true, data: items }, null, 2),
242
+ },
243
+ ],
244
+ };
245
+ }
246
+ catch (error) {
247
+ return {
248
+ content: [
249
+ {
250
+ type: "text",
251
+ text: JSON.stringify({
252
+ success: false,
253
+ error: error instanceof Error ? error.message : "Unknown error",
254
+ code: "FIND_LARGE_ITEMS_FAILED",
255
+ }),
256
+ },
257
+ ],
258
+ };
259
+ }
260
+ });
261
+ // get_item_info
262
+ server.tool("get_item_info", "Get detailed information about a file or directory", {
263
+ path: z.string().describe("Path to the file or directory"),
264
+ }, async ({ path }) => {
265
+ try {
266
+ const expandedPath = expandPath(path);
267
+ const stats = statSync(expandedPath);
268
+ const isDirectory = stats.isDirectory();
269
+ let size = stats.size;
270
+ if (isDirectory) {
271
+ // Calculate directory size
272
+ const duOutput = execSync(`du -sk "${expandedPath}" 2>/dev/null`, {
273
+ encoding: "utf-8",
274
+ });
275
+ size = parseInt(duOutput.split("\t")[0]) * 1024;
276
+ }
277
+ return {
278
+ content: [
279
+ {
280
+ type: "text",
281
+ text: JSON.stringify({
282
+ success: true,
283
+ data: {
284
+ path: expandedPath,
285
+ type: isDirectory ? "directory" : "file",
286
+ size,
287
+ modified: stats.mtime.toISOString(),
288
+ accessed: stats.atime.toISOString(),
289
+ created: stats.birthtime.toISOString(),
290
+ },
291
+ }, null, 2),
292
+ },
293
+ ],
294
+ };
295
+ }
296
+ catch (error) {
297
+ return {
298
+ content: [
299
+ {
300
+ type: "text",
301
+ text: JSON.stringify({
302
+ success: false,
303
+ error: error instanceof Error ? error.message : "Unknown error",
304
+ code: "GET_ITEM_INFO_FAILED",
305
+ }),
306
+ },
307
+ ],
308
+ };
309
+ }
310
+ });
311
+ }
@@ -0,0 +1,6 @@
1
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2
+ import { Config } from "../config/index.js";
3
+ export declare function registerStagingTools(server: McpServer, config: Config): void;
4
+ export declare function getStagedFilePath(): string;
5
+ export declare function getHistoryFilePath(): string;
6
+ export declare function getDataDir(): string;
@@ -0,0 +1,236 @@
1
+ import { z } from "zod";
2
+ import { existsSync, readFileSync, writeFileSync, mkdirSync, statSync } from "fs";
3
+ import { join } from "path";
4
+ import { homedir } from "os";
5
+ import { execSync } from "child_process";
6
+ import { expandPath, isProtectedPath } from "../config/index.js";
7
+ // XDG Base Directory paths
8
+ const XDG_DATA_HOME = process.env.XDG_DATA_HOME || join(homedir(), ".local", "share");
9
+ const DATA_DIR = join(XDG_DATA_HOME, "unix-disk-mcp");
10
+ const STAGED_FILE = join(DATA_DIR, "staged.json");
11
+ const HISTORY_FILE = join(DATA_DIR, "history.json");
12
+ function ensureDataDir() {
13
+ if (!existsSync(DATA_DIR)) {
14
+ mkdirSync(DATA_DIR, { recursive: true });
15
+ }
16
+ }
17
+ function loadStaged() {
18
+ ensureDataDir();
19
+ if (!existsSync(STAGED_FILE)) {
20
+ return { items: [] };
21
+ }
22
+ const raw = readFileSync(STAGED_FILE, "utf-8");
23
+ return JSON.parse(raw);
24
+ }
25
+ function saveStaged(data) {
26
+ ensureDataDir();
27
+ writeFileSync(STAGED_FILE, JSON.stringify(data, null, 2));
28
+ }
29
+ function getItemSize(path) {
30
+ try {
31
+ const stats = statSync(path);
32
+ if (stats.isDirectory()) {
33
+ const duOutput = execSync(`du -sk "${path}" 2>/dev/null`, { encoding: "utf-8" });
34
+ return parseInt(duOutput.split("\t")[0]) * 1024;
35
+ }
36
+ return stats.size;
37
+ }
38
+ catch {
39
+ return 0;
40
+ }
41
+ }
42
+ export function registerStagingTools(server, config) {
43
+ // stage_for_deletion
44
+ server.tool("stage_for_deletion", "Add a path to the staged deletion list", {
45
+ path: z.string().describe("Path to stage for deletion"),
46
+ reason: z.string().optional().describe("Reason for staging this item"),
47
+ }, async ({ path, reason }) => {
48
+ try {
49
+ const expandedPath = expandPath(path);
50
+ // Check if path exists
51
+ if (!existsSync(expandedPath)) {
52
+ return {
53
+ content: [
54
+ {
55
+ type: "text",
56
+ text: JSON.stringify({
57
+ success: false,
58
+ error: `Path does not exist: ${expandedPath}`,
59
+ code: "PATH_NOT_FOUND",
60
+ }),
61
+ },
62
+ ],
63
+ };
64
+ }
65
+ // Check if path is protected
66
+ if (isProtectedPath(expandedPath, config)) {
67
+ return {
68
+ content: [
69
+ {
70
+ type: "text",
71
+ text: JSON.stringify({
72
+ success: false,
73
+ error: `Path is protected and cannot be staged: ${expandedPath}`,
74
+ code: "PATH_PROTECTED",
75
+ }),
76
+ },
77
+ ],
78
+ };
79
+ }
80
+ const staged = loadStaged();
81
+ // Check if already staged
82
+ if (staged.items.some((item) => item.path === expandedPath)) {
83
+ return {
84
+ content: [
85
+ {
86
+ type: "text",
87
+ text: JSON.stringify({
88
+ success: false,
89
+ error: `Path is already staged: ${expandedPath}`,
90
+ code: "ALREADY_STAGED",
91
+ }),
92
+ },
93
+ ],
94
+ };
95
+ }
96
+ // Add to staged
97
+ const size = getItemSize(expandedPath);
98
+ staged.items.push({
99
+ path: expandedPath,
100
+ size,
101
+ reason,
102
+ staged_at: new Date().toISOString(),
103
+ });
104
+ saveStaged(staged);
105
+ return {
106
+ content: [
107
+ {
108
+ type: "text",
109
+ text: JSON.stringify({
110
+ success: true,
111
+ data: {
112
+ message: `Staged for deletion: ${expandedPath}`,
113
+ staged_count: staged.items.length,
114
+ },
115
+ reminder: "Tell the user to run 'unix-disk-mcp delete' in their terminal when ready. DO NOT attempt to execute it yourself.",
116
+ }),
117
+ },
118
+ ],
119
+ };
120
+ }
121
+ catch (error) {
122
+ return {
123
+ content: [
124
+ {
125
+ type: "text",
126
+ text: JSON.stringify({
127
+ success: false,
128
+ error: error instanceof Error ? error.message : "Unknown error",
129
+ code: "STAGE_FAILED",
130
+ }),
131
+ },
132
+ ],
133
+ };
134
+ }
135
+ });
136
+ // unstage
137
+ server.tool("unstage", "Remove a path from the staged deletion list", {
138
+ path: z.string().describe("Path to remove from staging"),
139
+ }, async ({ path }) => {
140
+ try {
141
+ const expandedPath = expandPath(path);
142
+ const staged = loadStaged();
143
+ const index = staged.items.findIndex((item) => item.path === expandedPath);
144
+ if (index === -1) {
145
+ return {
146
+ content: [
147
+ {
148
+ type: "text",
149
+ text: JSON.stringify({
150
+ success: false,
151
+ error: `Path is not staged: ${expandedPath}`,
152
+ code: "NOT_STAGED",
153
+ }),
154
+ },
155
+ ],
156
+ };
157
+ }
158
+ staged.items.splice(index, 1);
159
+ saveStaged(staged);
160
+ return {
161
+ content: [
162
+ {
163
+ type: "text",
164
+ text: JSON.stringify({
165
+ success: true,
166
+ data: {
167
+ message: `Removed from staging: ${expandedPath}`,
168
+ staged_count: staged.items.length,
169
+ },
170
+ }),
171
+ },
172
+ ],
173
+ };
174
+ }
175
+ catch (error) {
176
+ return {
177
+ content: [
178
+ {
179
+ type: "text",
180
+ text: JSON.stringify({
181
+ success: false,
182
+ error: error instanceof Error ? error.message : "Unknown error",
183
+ code: "UNSTAGE_FAILED",
184
+ }),
185
+ },
186
+ ],
187
+ };
188
+ }
189
+ });
190
+ // get_staged
191
+ server.tool("get_staged", "View all currently staged items. CRITICAL: You MUST NOT attempt to run 'unix-disk-mcp delete' or any deletion command. Tell the user to run it manually in their own terminal.", {}, async () => {
192
+ try {
193
+ const staged = loadStaged();
194
+ const totalSize = staged.items.reduce((sum, item) => sum + item.size, 0);
195
+ return {
196
+ content: [
197
+ {
198
+ type: "text",
199
+ text: JSON.stringify({
200
+ success: true,
201
+ data: {
202
+ items: staged.items,
203
+ total_count: staged.items.length,
204
+ total_size: totalSize,
205
+ },
206
+ warning: "⚠️ CRITICAL: AI agents MUST NOT attempt to run 'unix-disk-mcp delete'. Instruct the user to run this command manually in their terminal. DO NOT use run_in_terminal or execute any deletion commands.",
207
+ }, null, 2),
208
+ },
209
+ ],
210
+ };
211
+ }
212
+ catch (error) {
213
+ return {
214
+ content: [
215
+ {
216
+ type: "text",
217
+ text: JSON.stringify({
218
+ success: false,
219
+ error: error instanceof Error ? error.message : "Unknown error",
220
+ code: "GET_STAGED_FAILED",
221
+ }),
222
+ },
223
+ ],
224
+ };
225
+ }
226
+ });
227
+ }
228
+ export function getStagedFilePath() {
229
+ return STAGED_FILE;
230
+ }
231
+ export function getHistoryFilePath() {
232
+ return HISTORY_FILE;
233
+ }
234
+ export function getDataDir() {
235
+ return DATA_DIR;
236
+ }
package/package.json ADDED
@@ -0,0 +1,55 @@
1
+ {
2
+ "name": "unix-disk-mcp",
3
+ "version": "0.1.0",
4
+ "description": "MCP server for AI-assisted disk cleanup on Unix systems (macOS and Linux)",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "bin": {
8
+ "unix-disk-mcp": "dist/cli.js"
9
+ },
10
+ "scripts": {
11
+ "dev": "npx tsx src/index.ts",
12
+ "build": "tsc && chmod +x dist/cli.js",
13
+ "start": "node dist/index.js",
14
+ "delete": "node dist/commands/delete.js",
15
+ "setup": "node dist/commands/setup.js",
16
+ "prepublishOnly": "npm run build"
17
+ },
18
+ "files": [
19
+ "dist/**/*",
20
+ "config.sample.json",
21
+ "README.md",
22
+ "LICENSE"
23
+ ],
24
+ "keywords": [
25
+ "mcp",
26
+ "unix",
27
+ "macos",
28
+ "linux",
29
+ "disk-cleanup",
30
+ "ai",
31
+ "storage",
32
+ "model-context-protocol"
33
+ ],
34
+ "author": "juljus",
35
+ "license": "GPL-3.0-or-later",
36
+ "repository": {
37
+ "type": "git",
38
+ "url": "https://github.com/juliusdehner/unix-disk-mcp.git"
39
+ },
40
+ "engines": {
41
+ "node": ">=20.0.0"
42
+ },
43
+ "os": [
44
+ "darwin",
45
+ "linux"
46
+ ],
47
+ "dependencies": {
48
+ "@modelcontextprotocol/sdk": "^1.0.0"
49
+ },
50
+ "devDependencies": {
51
+ "@types/node": "^20.0.0",
52
+ "tsx": "^4.0.0",
53
+ "typescript": "^5.0.0"
54
+ }
55
+ }