dorky 4.0.0 → 4.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/.mcp.json +20 -0
  2. package/README.md +100 -1
  3. package/bin/mcp.js +631 -0
  4. package/package.json +4 -2
package/.mcp.json ADDED
@@ -0,0 +1,20 @@
1
+ {
2
+ "mcpServers": {
3
+ "ccrag": {
4
+ "command": "/Users/trishantpahwa/Desktop/open-source/ccrag/.venv/bin/ccrag",
5
+ "args": [
6
+ "serve",
7
+ "/Users/trishantpahwa/Desktop/open-source/dorky"
8
+ ]
9
+ },
10
+ "dorky": {
11
+ "type": "stdio",
12
+ "command": "npx",
13
+ "args": [
14
+ "-y",
15
+ "dorky-mcp"
16
+ ],
17
+ "env": {}
18
+ }
19
+ }
20
+ }
package/README.md CHANGED
@@ -341,6 +341,104 @@ dorky --push
341
341
  dorky --pull
342
342
  ```
343
343
 
344
+ ## MCP Server (AI Agent Integration)
345
+
346
+ dorky ships a [Model Context Protocol (MCP)](https://modelcontextprotocol.io/) server so AI coding assistants (Claude, Cursor, VS Code Copilot, etc.) can invoke dorky commands directly from within AI-assisted workflows.
347
+
348
+ ### Available MCP Tools
349
+
350
+ | Tool | Description |
351
+ | ---------- | ---------------------------------------------------- |
352
+ | `init` | Initialize a dorky project (`aws` or `google-drive`) |
353
+ | `list` | List local untracked/staged files or remote files |
354
+ | `add` | Stage files for upload |
355
+ | `remove` | Unstage files from tracking |
356
+ | `push` | Push staged files to remote storage |
357
+ | `pull` | Pull tracked files from remote storage |
358
+ | `log` | Show push history |
359
+ | `checkout` | Restore files from a history commit |
360
+ | `destroy` | Destroy the project locally and remotely |
361
+
362
+ ### Running the MCP Server
363
+
364
+ ```bash
365
+ npx dorky-mcp
366
+ ```
367
+
368
+ Or, if installed globally:
369
+
370
+ ```bash
371
+ dorky-mcp
372
+ ```
373
+
374
+ ### Configuring MCP Clients
375
+
376
+ #### Claude Desktop
377
+
378
+ Add the following to your `claude_desktop_config.json` (usually at `~/Library/Application Support/Claude/claude_desktop_config.json` on macOS or `%APPDATA%\Claude\claude_desktop_config.json` on Windows):
379
+
380
+ ```json
381
+ {
382
+ "mcpServers": {
383
+ "dorky": {
384
+ "command": "npx",
385
+ "args": ["dorky-mcp"],
386
+ "env": {
387
+ "AWS_ACCESS_KEY": "your-access-key",
388
+ "AWS_SECRET_KEY": "your-secret-key",
389
+ "AWS_REGION": "us-east-1",
390
+ "BUCKET_NAME": "your-bucket-name"
391
+ }
392
+ }
393
+ }
394
+ }
395
+ ```
396
+
397
+ #### VS Code (GitHub Copilot)
398
+
399
+ Add to your VS Code `settings.json`:
400
+
401
+ ```json
402
+ {
403
+ "mcp": {
404
+ "servers": {
405
+ "dorky": {
406
+ "type": "stdio",
407
+ "command": "npx",
408
+ "args": ["dorky-mcp"],
409
+ "env": {
410
+ "AWS_ACCESS_KEY": "your-access-key",
411
+ "AWS_SECRET_KEY": "your-secret-key",
412
+ "AWS_REGION": "us-east-1",
413
+ "BUCKET_NAME": "your-bucket-name"
414
+ }
415
+ }
416
+ }
417
+ }
418
+ }
419
+ ```
420
+
421
+ #### Cursor
422
+
423
+ Add to your Cursor MCP config (`.cursor/mcp.json` in your project or `~/.cursor/mcp.json` globally):
424
+
425
+ ```json
426
+ {
427
+ "mcpServers": {
428
+ "dorky": {
429
+ "command": "npx",
430
+ "args": ["dorky-mcp"],
431
+ "env": {
432
+ "AWS_ACCESS_KEY": "your-access-key",
433
+ "AWS_SECRET_KEY": "your-secret-key",
434
+ "AWS_REGION": "us-east-1",
435
+ "BUCKET_NAME": "your-bucket-name"
436
+ }
437
+ }
438
+ }
439
+ }
440
+ ```
441
+
344
442
  ## VS Code Extension
345
443
 
346
444
  A graphical interface for dorky is available as a VS Code extension — manage staged and uploaded files directly from the sidebar without leaving your editor.
@@ -365,6 +463,7 @@ A graphical interface for dorky is available as a VS Code extension — manage s
365
463
  - ✅ Auto-recovery of AWS credentials from environment variables
366
464
  - ✅ Push history with versioned remote snapshots
367
465
  - ✅ Restore files to any previous push commit
466
+ - ✅ MCP server for AI agent integration (Claude, Cursor, VS Code Copilot)
368
467
 
369
468
  ## How It Works
370
469
 
@@ -434,7 +533,7 @@ ISC License - see [LICENSE](LICENSE) file for details.
434
533
  - [x] Uninitialize dorky setup (Bug fix release)
435
534
  - [ ] dorky --list remote --update should sync metadata according to remote (Minor release)
436
535
  - [x] Extension for VS Code to list and highlight them like git (Major release)
437
- - [ ] MCP server (Minor release)
536
+ - [x] MCP server (Minor release)
438
537
  - [ ] Encryption of files (Minor release)
439
538
  - [x] Add stages for variables (Major release)
440
539
  - [ ] Migrate dorky project to another storage (partially implemented)
package/bin/mcp.js ADDED
@@ -0,0 +1,631 @@
1
+ #!/usr/bin/env node
2
+
3
+ const { Server } = require("@modelcontextprotocol/sdk/server/index.js");
4
+ const { StdioServerTransport } = require("@modelcontextprotocol/sdk/server/stdio.js");
5
+ const { CallToolRequestSchema, ListToolsRequestSchema } = require("@modelcontextprotocol/sdk/types.js");
6
+ const { existsSync, mkdirSync, writeFileSync, readFileSync, createReadStream, unlinkSync, rmSync } = require("fs");
7
+ const { glob } = require("glob");
8
+ const path = require("path");
9
+ const mimeTypes = require("mime-types");
10
+ const md5 = require("md5");
11
+ const EOL = require("os").type() == "Darwin" ? "\r\n" : "\n";
12
+ const { GetObjectCommand, PutObjectCommand, ListObjectsV2Command, DeleteObjectCommand, DeleteObjectsCommand, S3Client } = require("@aws-sdk/client-s3");
13
+ const { authenticate } = require("@google-cloud/local-auth");
14
+ const { google } = require("googleapis");
15
+
16
+ // Constants & Config
17
+ const DORKY_DIR = ".dorky";
18
+ const METADATA_PATH = path.join(DORKY_DIR, "metadata.json");
19
+ const CREDENTIALS_PATH = path.join(DORKY_DIR, "credentials.json");
20
+ const HISTORY_PATH = path.join(DORKY_DIR, "history.json");
21
+ const GD_CREDENTIALS_PATH = path.join(__dirname, "../google-drive-credentials.json");
22
+ const SCOPES = ["https://www.googleapis.com/auth/drive"];
23
+
24
+ // Helpers
25
+ const readJson = (p) => existsSync(p) ? JSON.parse(readFileSync(p)) : {};
26
+ const writeJson = (p, d) => writeFileSync(p, JSON.stringify(d, null, 2));
27
+
28
+ const checkDorkyProject = () => {
29
+ if (!existsSync(DORKY_DIR) && !existsSync(".dorkyignore")) {
30
+ throw new Error("Not a dorky project. Please run init first.");
31
+ }
32
+ };
33
+
34
+ function updateGitIgnore() {
35
+ let content = existsSync(".gitignore") ? readFileSync(".gitignore").toString() : "";
36
+ if (!content.includes(CREDENTIALS_PATH)) {
37
+ writeFileSync(".gitignore", content + EOL + CREDENTIALS_PATH + EOL);
38
+ }
39
+ }
40
+
41
+ async function authorizeGoogleDriveClient(forceReauth = false) {
42
+ if (!forceReauth && existsSync(CREDENTIALS_PATH)) {
43
+ const saved = readJson(CREDENTIALS_PATH);
44
+ if (saved.storage === "google-drive" && saved.expiry_date) {
45
+ const keys = readJson(GD_CREDENTIALS_PATH);
46
+ const key = keys.installed || keys.web;
47
+ const client = new google.auth.OAuth2(key.client_id, key.client_secret, key.redirect_uris[0]);
48
+ client.setCredentials(saved);
49
+
50
+ if (Date.now() >= saved.expiry_date - 300000) {
51
+ try {
52
+ const { credentials } = await client.refreshAccessToken();
53
+ writeJson(CREDENTIALS_PATH, { storage: "google-drive", ...credentials });
54
+ client.setCredentials(credentials);
55
+ } catch (e) {
56
+ return authorizeGoogleDriveClient(true);
57
+ }
58
+ }
59
+ return client;
60
+ }
61
+ }
62
+
63
+ const client = await authenticate({ scopes: SCOPES, keyfilePath: GD_CREDENTIALS_PATH });
64
+ if (client?.credentials && existsSync(path.dirname(CREDENTIALS_PATH))) {
65
+ writeJson(CREDENTIALS_PATH, { storage: "google-drive", ...client.credentials });
66
+ }
67
+ return client;
68
+ }
69
+
70
+ async function init(storage) {
71
+ if (existsSync(DORKY_DIR)) return "Dorky is already initialized.";
72
+ if (!["aws", "google-drive"].includes(storage)) return "Invalid storage. Use 'aws' or 'google-drive'.";
73
+
74
+ let credentials = {};
75
+ if (storage === "aws") {
76
+ if (!process.env.AWS_ACCESS_KEY || !process.env.AWS_SECRET_KEY || !process.env.AWS_REGION || !process.env.BUCKET_NAME) {
77
+ return "Missing AWS environment variables (AWS_ACCESS_KEY, AWS_SECRET_KEY, AWS_REGION, BUCKET_NAME).";
78
+ }
79
+ credentials = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME };
80
+ } else {
81
+ const client = await authorizeGoogleDriveClient(true);
82
+ credentials = { storage: "google-drive", ...client.credentials };
83
+ }
84
+
85
+ mkdirSync(DORKY_DIR);
86
+ writeJson(METADATA_PATH, { "stage-1-files": {}, "uploaded-files": {} });
87
+ writeJson(HISTORY_PATH, []);
88
+ writeFileSync(".dorkyignore", "");
89
+ writeJson(CREDENTIALS_PATH, credentials);
90
+ updateGitIgnore();
91
+ return "Dorky project initialized successfully.";
92
+ }
93
+
94
+ async function list(type) {
95
+ checkDorkyProject();
96
+ const meta = readJson(METADATA_PATH);
97
+ const lines = [];
98
+
99
+ if (type === "remote") {
100
+ if (!await checkCredentials()) return "Credentials not found. Please run init first.";
101
+ const creds = readJson(CREDENTIALS_PATH);
102
+ const root = path.basename(process.cwd());
103
+ lines.push("Remote Files:");
104
+
105
+ if (creds.storage === "aws") {
106
+ await runS3(creds, async (s3, bucket) => {
107
+ const data = await s3.send(new ListObjectsV2Command({ Bucket: bucket, Prefix: root + "/" }));
108
+ if (!data.Contents?.length) { lines.push("No remote files found."); return; }
109
+ data.Contents.forEach(o => lines.push(` ${o.Key.replace(root + "/", "")}`));
110
+ });
111
+ } else {
112
+ await runDrive(async (drive) => {
113
+ const q = `name='${root}' and mimeType='application/vnd.google-apps.folder' and 'root' in parents and trashed=false`;
114
+ const { data: { files: [folder] } } = await drive.files.list({ q, fields: "files(id)" });
115
+ if (!folder) { lines.push("Remote folder not found."); return; }
116
+ const walk = async (pid, p = "") => {
117
+ const { data: { files } } = await drive.files.list({ q: `'${pid}' in parents and trashed=false`, fields: "files(id, name, mimeType)" });
118
+ for (const f of files) {
119
+ if (f.mimeType === "application/vnd.google-apps.folder") await walk(f.id, path.join(p, f.name));
120
+ else lines.push(` ${path.join(p, f.name)}`);
121
+ }
122
+ };
123
+ await walk(folder.id);
124
+ });
125
+ }
126
+ } else {
127
+ lines.push("Untracked Files:");
128
+ const exclusions = existsSync(".dorkyignore") ? readFileSync(".dorkyignore").toString().split(EOL).filter(Boolean) : [];
129
+ const files = await glob("**/*", { dot: true, ignore: [...exclusions.map(e => `**/${e}/**`), ...exclusions, ".dorky/**", ".dorkyignore", ".git/**", "node_modules/**"] });
130
+
131
+ files.forEach(f => {
132
+ const rel = path.relative(process.cwd(), f);
133
+ if (rel.includes(".env") || rel.includes(".config")) lines.push(` ${rel} (Potential sensitive file)`);
134
+ else lines.push(` ${rel}`);
135
+ });
136
+ lines.push("\nStaged Files:");
137
+ Object.keys(meta["stage-1-files"]).forEach(f => lines.push(` ${f}`));
138
+ }
139
+
140
+ return lines.join("\n");
141
+ }
142
+
143
+ function add(files) {
144
+ checkDorkyProject();
145
+ const meta = readJson(METADATA_PATH);
146
+ const results = [];
147
+ files.forEach(f => {
148
+ if (!existsSync(f)) { results.push(`File not found: ${f}`); return; }
149
+ const hash = md5(readFileSync(f));
150
+ if (meta["stage-1-files"][f]?.hash === hash) { results.push(`${f} (unchanged)`); return; }
151
+ meta["stage-1-files"][f] = { "mime-type": mimeTypes.lookup(f) || "application/octet-stream", hash };
152
+ results.push(`Staged: ${f}`);
153
+ });
154
+ writeJson(METADATA_PATH, meta);
155
+ return results.join("\n");
156
+ }
157
+
158
+ function rm(files) {
159
+ checkDorkyProject();
160
+ const meta = readJson(METADATA_PATH);
161
+ const removed = files.filter(f => {
162
+ if (!meta["stage-1-files"][f]) return false;
163
+ delete meta["stage-1-files"][f];
164
+ return true;
165
+ });
166
+ writeJson(METADATA_PATH, meta);
167
+ return removed.length
168
+ ? removed.map(f => `Unstaged: ${f}`).join("\n")
169
+ : "No matching files to remove.";
170
+ }
171
+
172
+ async function checkCredentials() {
173
+ if (existsSync(CREDENTIALS_PATH)) return true;
174
+ if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY) {
175
+ writeJson(CREDENTIALS_PATH, {
176
+ storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY,
177
+ awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME
178
+ });
179
+ return true;
180
+ }
181
+ try {
182
+ const client = await authorizeGoogleDriveClient(true);
183
+ if (client) return true;
184
+ } catch { }
185
+ return false;
186
+ }
187
+
188
+ const getS3 = (c) => new S3Client({
189
+ credentials: { accessKeyId: c.accessKey || process.env.AWS_ACCESS_KEY, secretAccessKey: c.secretKey || process.env.AWS_SECRET_KEY },
190
+ region: c.awsRegion || process.env.AWS_REGION
191
+ });
192
+
193
+ async function runS3(creds, fn) {
194
+ try { await fn(getS3(creds), creds.bucket || process.env.BUCKET_NAME); }
195
+ catch (err) {
196
+ if (["InvalidAccessKeyId", "SignatureDoesNotMatch"].includes(err.name) || err.$metadata?.httpStatusCode === 403) {
197
+ if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY) {
198
+ const newCreds = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME };
199
+ writeJson(CREDENTIALS_PATH, newCreds);
200
+ try { await fn(getS3(newCreds), newCreds.bucket); return; } catch { }
201
+ }
202
+ throw new Error("AWS authentication failed.");
203
+ }
204
+ throw err;
205
+ }
206
+ }
207
+
208
+ async function getFolderId(pathStr, drive, create = true) {
209
+ let parentId = "root";
210
+ if (!pathStr || pathStr === ".") return parentId;
211
+ for (const folder of pathStr.split("/")) {
212
+ if (!folder) continue;
213
+ const res = await drive.files.list({ q: `name='${folder}' and mimeType='application/vnd.google-apps.folder' and '${parentId}' in parents and trashed=false`, fields: "files(id)" });
214
+ if (res.data.files[0]) parentId = res.data.files[0].id;
215
+ else if (create) parentId = (await drive.files.create({ requestBody: { name: folder, mimeType: "application/vnd.google-apps.folder", parents: [parentId] }, fields: "id" })).data.id;
216
+ else return null;
217
+ }
218
+ return parentId;
219
+ }
220
+
221
+ async function runDrive(fn) {
222
+ let client = await authorizeGoogleDriveClient();
223
+ let drive = google.drive({ version: "v3", auth: client });
224
+ try { await fn(drive); }
225
+ catch (err) {
226
+ if (err.code === 401 || err.message?.includes("invalid_grant")) {
227
+ if (existsSync(CREDENTIALS_PATH)) unlinkSync(CREDENTIALS_PATH);
228
+ client = await authorizeGoogleDriveClient(true);
229
+ drive = google.drive({ version: "v3", auth: client });
230
+ await fn(drive);
231
+ } else throw err;
232
+ }
233
+ }
234
+
235
+ async function push() {
236
+ checkDorkyProject();
237
+ if (!await checkCredentials()) return "Credentials not found. Please run init first.";
238
+ const meta = readJson(METADATA_PATH);
239
+ const filesToUpload = Object.keys(meta["stage-1-files"])
240
+ .filter(f => !meta["uploaded-files"][f] || meta["stage-1-files"][f].hash !== meta["uploaded-files"][f].hash)
241
+ .map(f => ({ name: f, ...meta["stage-1-files"][f] }));
242
+ const filesToDelete = Object.keys(meta["uploaded-files"]).filter(f => !meta["stage-1-files"][f]);
243
+
244
+ if (filesToUpload.length === 0 && filesToDelete.length === 0) return "Nothing to push.";
245
+
246
+ const creds = readJson(CREDENTIALS_PATH);
247
+ const results = [];
248
+
249
+ if (creds.storage === "aws") {
250
+ await runS3(creds, async (s3, bucket) => {
251
+ if (filesToUpload.length > 0) {
252
+ await Promise.all(filesToUpload.map(async f => {
253
+ const key = path.join(path.basename(process.cwd()), f.name);
254
+ await s3.send(new PutObjectCommand({ Bucket: bucket, Key: key, Body: readFileSync(f.name) }));
255
+ results.push(`Uploaded: ${f.name}`);
256
+ }));
257
+ }
258
+ if (filesToDelete.length > 0) {
259
+ await Promise.all(filesToDelete.map(async f => {
260
+ const key = path.join(path.basename(process.cwd()), f);
261
+ await s3.send(new DeleteObjectCommand({ Bucket: bucket, Key: key }));
262
+ results.push(`Deleted remote: ${f}`);
263
+ }));
264
+ }
265
+ });
266
+ } else if (creds.storage === "google-drive") {
267
+ await runDrive(async (drive) => {
268
+ if (filesToUpload.length > 0) {
269
+ for (const f of filesToUpload) {
270
+ const root = path.basename(process.cwd());
271
+ const parentId = await getFolderId(path.dirname(path.join(root, f.name)), drive);
272
+ await drive.files.create({
273
+ requestBody: { name: path.basename(f.name), parents: [parentId] },
274
+ media: { mimeType: f["mime-type"], body: createReadStream(f.name) }
275
+ });
276
+ results.push(`Uploaded: ${f.name}`);
277
+ }
278
+ }
279
+ if (filesToDelete.length > 0) {
280
+ const root = path.basename(process.cwd());
281
+ for (const f of filesToDelete) {
282
+ const parentId = await getFolderId(path.dirname(path.join(root, f)), drive, false);
283
+ if (parentId) {
284
+ const res = await drive.files.list({ q: `name='${path.basename(f)}' and '${parentId}' in parents and trashed=false`, fields: "files(id)" });
285
+ if (res.data.files[0]) {
286
+ await drive.files.delete({ fileId: res.data.files[0].id });
287
+ results.push(`Deleted remote: ${f}`);
288
+ }
289
+ }
290
+ }
291
+ }
292
+ });
293
+ }
294
+
295
+ meta["uploaded-files"] = { ...meta["stage-1-files"] };
296
+ writeJson(METADATA_PATH, meta);
297
+
298
+ const commitFiles = { ...meta["stage-1-files"] };
299
+ const commitId = md5(JSON.stringify(commitFiles)).slice(0, 8);
300
+ const history = existsSync(HISTORY_PATH) ? JSON.parse(readFileSync(HISTORY_PATH)) : [];
301
+ if (!history.find(e => e.id === commitId)) {
302
+ history.push({ id: commitId, timestamp: new Date().toISOString(), files: commitFiles });
303
+ writeJson(HISTORY_PATH, history);
304
+
305
+ const root = path.basename(process.cwd());
306
+ const historyPrefix = path.join(root, ".dorky-history", commitId);
307
+ if (creds.storage === "aws") {
308
+ await runS3(creds, async (s3, bucket) => {
309
+ await Promise.all(Object.keys(commitFiles).map(async f => {
310
+ const key = path.join(historyPrefix, f);
311
+ await s3.send(new PutObjectCommand({ Bucket: bucket, Key: key, Body: readFileSync(f) }));
312
+ }));
313
+ });
314
+ } else if (creds.storage === "google-drive") {
315
+ await runDrive(async (drive) => {
316
+ for (const f of Object.keys(commitFiles)) {
317
+ const parentId = await getFolderId(path.join(root, ".dorky-history", commitId, path.dirname(f)), drive);
318
+ await drive.files.create({
319
+ requestBody: { name: path.basename(f), parents: [parentId] },
320
+ media: { mimeType: commitFiles[f]["mime-type"], body: createReadStream(f) }
321
+ });
322
+ }
323
+ });
324
+ }
325
+ results.push(`History commit saved: ${commitId}`);
326
+ }
327
+
328
+ return results.join("\n");
329
+ }
330
+
331
+ async function pull() {
332
+ checkDorkyProject();
333
+ if (!await checkCredentials()) return "Credentials not found. Please run init first.";
334
+ const meta = readJson(METADATA_PATH);
335
+ const files = meta["uploaded-files"];
336
+ const creds = readJson(CREDENTIALS_PATH);
337
+ const results = [];
338
+
339
+ if (creds.storage === "aws") {
340
+ await runS3(creds, async (s3, bucket) => {
341
+ await Promise.all(Object.keys(files).map(async f => {
342
+ const key = path.join(path.basename(process.cwd()), f);
343
+ const { Body } = await s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
344
+ const dir = path.dirname(f);
345
+ if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
346
+ writeFileSync(f, await Body.transformToString());
347
+ results.push(`Downloaded: ${f}`);
348
+ }));
349
+ });
350
+ } else if (creds.storage === "google-drive") {
351
+ await runDrive(async (drive) => {
352
+ const fileList = Object.keys(files).map(k => ({ name: k, ...files[k] }));
353
+ await Promise.all(fileList.map(async f => {
354
+ const res = await drive.files.list({ q: `name='${path.basename(f.name)}' and mimeType!='application/vnd.google-apps.folder'`, fields: "files(id)" });
355
+ if (!res.data.files[0]) { results.push(`Missing remote file: ${f.name}`); return; }
356
+ const data = await drive.files.get({ fileId: res.data.files[0].id, alt: "media" });
357
+ if (!existsSync(path.dirname(f.name))) mkdirSync(path.dirname(f.name), { recursive: true });
358
+ writeFileSync(f.name, await data.data.text());
359
+ results.push(`Downloaded: ${f.name}`);
360
+ }));
361
+ });
362
+ }
363
+
364
+ return results.join("\n") || "Nothing to pull.";
365
+ }
366
+
367
+ function log() {
368
+ checkDorkyProject();
369
+ const history = existsSync(HISTORY_PATH) ? JSON.parse(readFileSync(HISTORY_PATH)) : [];
370
+ if (!history.length) return "No history found. Push some files first.";
371
+
372
+ const lines = ["Push History:"];
373
+ [...history].reverse().forEach((entry, i) => {
374
+ const date = new Date(entry.timestamp).toLocaleString();
375
+ const fileCount = Object.keys(entry.files).length;
376
+ lines.push(` commit ${entry.id}${i === 0 ? " (latest)" : ""}`);
377
+ lines.push(` Date: ${date}`);
378
+ lines.push(` Files: ${fileCount}`);
379
+ Object.keys(entry.files).forEach(f => lines.push(` • ${f}`));
380
+ lines.push("");
381
+ });
382
+ return lines.join("\n");
383
+ }
384
+
385
+ async function checkout(commitId) {
386
+ checkDorkyProject();
387
+ if (!await checkCredentials()) return "Credentials not found. Please run init first.";
388
+
389
+ const history = existsSync(HISTORY_PATH) ? JSON.parse(readFileSync(HISTORY_PATH)) : [];
390
+ const entry = history.find(e => e.id === commitId || e.id.startsWith(commitId));
391
+ if (!entry) return `Commit not found: ${commitId}. Run log to see available commits.`;
392
+
393
+ const creds = readJson(CREDENTIALS_PATH);
394
+ const root = path.basename(process.cwd());
395
+ const historyPrefix = path.join(root, ".dorky-history", entry.id);
396
+ const results = [`Checking out commit ${entry.id} (${new Date(entry.timestamp).toLocaleString()}):`];
397
+
398
+ if (creds.storage === "aws") {
399
+ await runS3(creds, async (s3, bucket) => {
400
+ await Promise.all(Object.keys(entry.files).map(async f => {
401
+ const key = path.join(historyPrefix, f);
402
+ const { Body } = await s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
403
+ if (!existsSync(path.dirname(f))) mkdirSync(path.dirname(f), { recursive: true });
404
+ writeFileSync(f, await Body.transformToString());
405
+ results.push(`Restored: ${f}`);
406
+ }));
407
+ });
408
+ } else if (creds.storage === "google-drive") {
409
+ await runDrive(async (drive) => {
410
+ for (const f of Object.keys(entry.files)) {
411
+ const parentId = await getFolderId(path.join(root, ".dorky-history", entry.id, path.dirname(f)), drive, false);
412
+ if (!parentId) { results.push(`Remote history folder missing for: ${f}`); continue; }
413
+ const res = await drive.files.list({ q: `name='${path.basename(f)}' and '${parentId}' in parents and trashed=false`, fields: "files(id)" });
414
+ if (!res.data.files[0]) { results.push(`Missing remote history file: ${f}`); continue; }
415
+ const data = await drive.files.get({ fileId: res.data.files[0].id, alt: "media" });
416
+ if (!existsSync(path.dirname(f))) mkdirSync(path.dirname(f), { recursive: true });
417
+ writeFileSync(f, await data.data.text());
418
+ results.push(`Restored: ${f}`);
419
+ }
420
+ });
421
+ }
422
+
423
+ const meta = readJson(METADATA_PATH);
424
+ meta["stage-1-files"] = { ...entry.files };
425
+ meta["uploaded-files"] = { ...entry.files };
426
+ writeJson(METADATA_PATH, meta);
427
+ results.push(`Staged and uploaded state restored to commit ${entry.id}.`);
428
+ return results.join("\n");
429
+ }
430
+
431
+ async function destroy() {
432
+ checkDorkyProject();
433
+ if (!await checkCredentials()) return "Credentials not found. Please run init first.";
434
+
435
+ const creds = readJson(CREDENTIALS_PATH);
436
+ const root = path.basename(process.cwd());
437
+ const results = [];
438
+
439
+ if (creds.storage === "aws") {
440
+ await runS3(creds, async (s3, bucket) => {
441
+ const data = await s3.send(new ListObjectsV2Command({ Bucket: bucket, Prefix: root + "/" }));
442
+ if (data.Contents && data.Contents.length > 0) {
443
+ const deleteParams = { Bucket: bucket, Delete: { Objects: data.Contents.map(o => ({ Key: o.Key })) } };
444
+ await s3.send(new DeleteObjectsCommand(deleteParams));
445
+ results.push("Remote files deleted.");
446
+ }
447
+ });
448
+ } else if (creds.storage === "google-drive") {
449
+ await runDrive(async (drive) => {
450
+ const q = `name='${root}' and mimeType='application/vnd.google-apps.folder' and 'root' in parents and trashed=false`;
451
+ const { data: { files: [folder] } } = await drive.files.list({ q, fields: "files(id)" });
452
+ if (folder) {
453
+ await drive.files.delete({ fileId: folder.id });
454
+ results.push("Remote folder deleted.");
455
+ }
456
+ });
457
+ }
458
+
459
+ if (existsSync(DORKY_DIR)) rmSync(DORKY_DIR, { recursive: true, force: true });
460
+ if (existsSync(".dorkyignore")) unlinkSync(".dorkyignore");
461
+ results.push("Project destroyed locally.");
462
+ return results.join("\n");
463
+ }
464
+
465
+ // MCP Server setup
466
+ const server = new Server(
467
+ { name: "dorky", version: require("../package.json").version },
468
+ { capabilities: { tools: {} } }
469
+ );
470
+
471
+ server.setRequestHandler(ListToolsRequestSchema, async () => ({
472
+ tools: [
473
+ {
474
+ name: "init",
475
+ description: "Initialize a dorky project with AWS S3 or Google Drive storage. Use this when the user wants to start tracking sensitive files (e.g. .env, credentials) remotely.",
476
+ inputSchema: {
477
+ type: "object",
478
+ properties: {
479
+ provider: {
480
+ type: "string",
481
+ enum: ["aws", "google-drive"],
482
+ description: "Storage provider to use: 'aws' for AWS S3, 'google-drive' for Google Drive."
483
+ }
484
+ },
485
+ required: ["provider"]
486
+ }
487
+ },
488
+ {
489
+ name: "list",
490
+ description: "List files tracked by dorky. Use this to discover what sensitive files (.env, credentials, configs) are available remotely when they appear to be missing locally. Without arguments lists local untracked and staged files. Pass 'remote' to list files in remote storage.",
491
+ inputSchema: {
492
+ type: "object",
493
+ properties: {
494
+ remote: {
495
+ type: "boolean",
496
+ description: "Set to true to list remote files in storage instead of local files."
497
+ }
498
+ }
499
+ }
500
+ },
501
+ {
502
+ name: "add",
503
+ description: "Stage one or more files to be pushed to remote storage.",
504
+ inputSchema: {
505
+ type: "object",
506
+ properties: {
507
+ files: {
508
+ type: "array",
509
+ items: { type: "string" },
510
+ description: "List of file paths to stage."
511
+ }
512
+ },
513
+ required: ["files"]
514
+ }
515
+ },
516
+ {
517
+ name: "remove",
518
+ description: "Unstage one or more files from dorky tracking.",
519
+ inputSchema: {
520
+ type: "object",
521
+ properties: {
522
+ files: {
523
+ type: "array",
524
+ items: { type: "string" },
525
+ description: "List of file paths to unstage."
526
+ }
527
+ },
528
+ required: ["files"]
529
+ }
530
+ },
531
+ {
532
+ name: "push",
533
+ description: "Push all staged files to remote storage (AWS S3 or Google Drive). Use this when the user has updated sensitive files (.env, credentials, configs) and wants to sync them to remote storage for teammates.",
534
+ inputSchema: {
535
+ type: "object",
536
+ properties: {}
537
+ }
538
+ },
539
+ {
540
+ name: "pull",
541
+ description: "Pull all tracked files from remote storage to the local project. Use this proactively whenever you detect that .env, credential, or config files are missing locally — dorky manages these sensitive files in remote storage instead of git. If a task requires environment variables or secrets that are not present, pull first before proceeding.",
542
+ inputSchema: {
543
+ type: "object",
544
+ properties: {}
545
+ }
546
+ },
547
+ {
548
+ name: "log",
549
+ description: "Show the push history with commit IDs, timestamps, and file lists.",
550
+ inputSchema: {
551
+ type: "object",
552
+ properties: {}
553
+ }
554
+ },
555
+ {
556
+ name: "checkout",
557
+ description: "Restore files from a specific history commit. Use 'log' first to find commit IDs.",
558
+ inputSchema: {
559
+ type: "object",
560
+ properties: {
561
+ commitId: {
562
+ type: "string",
563
+ description: "The commit ID (or prefix) to restore. Find commit IDs with the 'log' tool."
564
+ }
565
+ },
566
+ required: ["commitId"]
567
+ }
568
+ },
569
+ {
570
+ name: "destroy",
571
+ description: "Destroy the dorky project locally and remove all files from remote storage. This action is irreversible.",
572
+ inputSchema: {
573
+ type: "object",
574
+ properties: {}
575
+ }
576
+ }
577
+ ]
578
+ }));
579
+
580
+ server.setRequestHandler(CallToolRequestSchema, async (request) => {
581
+ const { name, arguments: args = {} } = request.params;
582
+
583
+ try {
584
+ let result;
585
+ switch (name) {
586
+ case "init":
587
+ result = await init(args.provider);
588
+ break;
589
+ case "list":
590
+ result = await list(args.remote ? "remote" : undefined);
591
+ break;
592
+ case "add":
593
+ result = add(args.files);
594
+ break;
595
+ case "remove":
596
+ result = rm(args.files);
597
+ break;
598
+ case "push":
599
+ result = await push();
600
+ break;
601
+ case "pull":
602
+ result = await pull();
603
+ break;
604
+ case "log":
605
+ result = log();
606
+ break;
607
+ case "checkout":
608
+ result = await checkout(args.commitId);
609
+ break;
610
+ case "destroy":
611
+ result = await destroy();
612
+ break;
613
+ default:
614
+ return { content: [{ type: "text", text: `Unknown tool: ${name}` }], isError: true };
615
+ }
616
+
617
+ return { content: [{ type: "text", text: result || "Done." }] };
618
+ } catch (err) {
619
+ return { content: [{ type: "text", text: `Error: ${err.message}` }], isError: true };
620
+ }
621
+ });
622
+
623
+ async function main() {
624
+ const transport = new StdioServerTransport();
625
+ await server.connect(transport);
626
+ }
627
+
628
+ main().catch(err => {
629
+ process.stderr.write(`Fatal error: ${err.message}\n`);
630
+ process.exit(1);
631
+ });
package/package.json CHANGED
@@ -1,9 +1,10 @@
1
1
  {
2
2
  "name": "dorky",
3
- "version": "4.0.0",
3
+ "version": "4.1.0",
4
4
  "description": "DevOps Records Keeper.",
5
5
  "bin": {
6
- "dorky": "bin/index.js"
6
+ "dorky": "bin/index.js",
7
+ "dorky-mcp": "bin/mcp.js"
7
8
  },
8
9
  "scripts": {
9
10
  "start": "node bin/index.js",
@@ -38,6 +39,7 @@
38
39
  "vitest": "^2.1.8"
39
40
  },
40
41
  "dependencies": {
42
+ "@modelcontextprotocol/sdk": "^1.29.0",
41
43
  "@aws-sdk/client-s3": "^3.679.0",
42
44
  "@google-cloud/local-auth": "^3.0.1",
43
45
  "chalk": "^4.1.2",