dorky 3.0.2 → 4.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.dorky/history.json +22 -0
- package/.mcp.json +20 -0
- package/README.md +125 -5
- package/bin/index.js +101 -0
- package/bin/mcp.js +631 -0
- package/package.json +4 -2
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
[
|
|
2
|
+
{
|
|
3
|
+
"id": "34aaf3c3",
|
|
4
|
+
"timestamp": "2026-04-22T18:30:13.819Z",
|
|
5
|
+
"files": {
|
|
6
|
+
".env": {
|
|
7
|
+
"mime-type": "application/octet-stream",
|
|
8
|
+
"hash": "ef690809a391f839b909e303e8d1f888"
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
},
|
|
12
|
+
{
|
|
13
|
+
"id": "e5ecba71",
|
|
14
|
+
"timestamp": "2026-04-22T18:30:45.770Z",
|
|
15
|
+
"files": {
|
|
16
|
+
".env": {
|
|
17
|
+
"mime-type": "application/octet-stream",
|
|
18
|
+
"hash": "0f3124c0e7688e35ae2187da45f23f22"
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
]
|
package/.mcp.json
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"mcpServers": {
|
|
3
|
+
"ccrag": {
|
|
4
|
+
"command": "/Users/trishantpahwa/Desktop/open-source/ccrag/.venv/bin/ccrag",
|
|
5
|
+
"args": [
|
|
6
|
+
"serve",
|
|
7
|
+
"/Users/trishantpahwa/Desktop/open-source/dorky"
|
|
8
|
+
]
|
|
9
|
+
},
|
|
10
|
+
"dorky": {
|
|
11
|
+
"type": "stdio",
|
|
12
|
+
"command": "npx",
|
|
13
|
+
"args": [
|
|
14
|
+
"-y",
|
|
15
|
+
"dorky-mcp"
|
|
16
|
+
],
|
|
17
|
+
"env": {}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
}
|
package/README.md
CHANGED
|
@@ -205,6 +205,23 @@ This command:
|
|
|
205
205
|
- Creates necessary directories
|
|
206
206
|
- Overwrites local files
|
|
207
207
|
|
|
208
|
+
### Show Push History (`-lg`)
|
|
209
|
+
|
|
210
|
+
```bash
|
|
211
|
+
dorky --log
|
|
212
|
+
```
|
|
213
|
+
|
|
214
|
+
Prints all past push commits in reverse chronological order, showing the commit ID, timestamp, and list of files included in each snapshot.
|
|
215
|
+
|
|
216
|
+
### Checkout a Commit (`-co`)
|
|
217
|
+
|
|
218
|
+
```bash
|
|
219
|
+
# Restore files to a specific commit
|
|
220
|
+
dorky --checkout <commit-id>
|
|
221
|
+
```
|
|
222
|
+
|
|
223
|
+
Downloads the files as they were at the given commit from remote storage and restores the local staged/uploaded state to match. The commit ID can be found with `--log`. Prefix matching is supported (e.g. `dorky --checkout a1b2` if the full ID is `a1b2c3d4`).
|
|
224
|
+
|
|
208
225
|
### Destroy Project (`-d`)
|
|
209
226
|
|
|
210
227
|
```bash
|
|
@@ -241,7 +258,8 @@ After initialization:
|
|
|
241
258
|
your-project/
|
|
242
259
|
├── .dorky/
|
|
243
260
|
│ ├── credentials.json # Storage credentials (auto-ignored by git)
|
|
244
|
-
│
|
|
261
|
+
│ ├── metadata.json # Tracked files metadata
|
|
262
|
+
│ └── history.json # Push commit history
|
|
245
263
|
├── .dorkyignore # Exclusion patterns
|
|
246
264
|
└── .gitignore # Updated automatically
|
|
247
265
|
```
|
|
@@ -323,6 +341,104 @@ dorky --push
|
|
|
323
341
|
dorky --pull
|
|
324
342
|
```
|
|
325
343
|
|
|
344
|
+
## MCP Server (AI Agent Integration)
|
|
345
|
+
|
|
346
|
+
dorky ships a [Model Context Protocol (MCP)](https://modelcontextprotocol.io/) server so AI coding assistants (Claude, Cursor, VS Code Copilot, etc.) can invoke dorky commands directly from within AI-assisted workflows.
|
|
347
|
+
|
|
348
|
+
### Available MCP Tools
|
|
349
|
+
|
|
350
|
+
| Tool | Description |
|
|
351
|
+
| ---------- | ---------------------------------------------------- |
|
|
352
|
+
| `init` | Initialize a dorky project (`aws` or `google-drive`) |
|
|
353
|
+
| `list` | List local untracked/staged files or remote files |
|
|
354
|
+
| `add` | Stage files for upload |
|
|
355
|
+
| `remove` | Unstage files from tracking |
|
|
356
|
+
| `push` | Push staged files to remote storage |
|
|
357
|
+
| `pull` | Pull tracked files from remote storage |
|
|
358
|
+
| `log` | Show push history |
|
|
359
|
+
| `checkout` | Restore files from a history commit |
|
|
360
|
+
| `destroy` | Destroy the project locally and remotely |
|
|
361
|
+
|
|
362
|
+
### Running the MCP Server
|
|
363
|
+
|
|
364
|
+
```bash
|
|
365
|
+
npx dorky-mcp
|
|
366
|
+
```
|
|
367
|
+
|
|
368
|
+
Or, if installed globally:
|
|
369
|
+
|
|
370
|
+
```bash
|
|
371
|
+
dorky-mcp
|
|
372
|
+
```
|
|
373
|
+
|
|
374
|
+
### Configuring MCP Clients
|
|
375
|
+
|
|
376
|
+
#### Claude Desktop
|
|
377
|
+
|
|
378
|
+
Add the following to your `claude_desktop_config.json` (usually at `~/Library/Application Support/Claude/claude_desktop_config.json` on macOS or `%APPDATA%\Claude\claude_desktop_config.json` on Windows):
|
|
379
|
+
|
|
380
|
+
```json
|
|
381
|
+
{
|
|
382
|
+
"mcpServers": {
|
|
383
|
+
"dorky": {
|
|
384
|
+
"command": "npx",
|
|
385
|
+
"args": ["dorky-mcp"],
|
|
386
|
+
"env": {
|
|
387
|
+
"AWS_ACCESS_KEY": "your-access-key",
|
|
388
|
+
"AWS_SECRET_KEY": "your-secret-key",
|
|
389
|
+
"AWS_REGION": "us-east-1",
|
|
390
|
+
"BUCKET_NAME": "your-bucket-name"
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
```
|
|
396
|
+
|
|
397
|
+
#### VS Code (GitHub Copilot)
|
|
398
|
+
|
|
399
|
+
Add to your VS Code `settings.json`:
|
|
400
|
+
|
|
401
|
+
```json
|
|
402
|
+
{
|
|
403
|
+
"mcp": {
|
|
404
|
+
"servers": {
|
|
405
|
+
"dorky": {
|
|
406
|
+
"type": "stdio",
|
|
407
|
+
"command": "npx",
|
|
408
|
+
"args": ["dorky-mcp"],
|
|
409
|
+
"env": {
|
|
410
|
+
"AWS_ACCESS_KEY": "your-access-key",
|
|
411
|
+
"AWS_SECRET_KEY": "your-secret-key",
|
|
412
|
+
"AWS_REGION": "us-east-1",
|
|
413
|
+
"BUCKET_NAME": "your-bucket-name"
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
```
|
|
420
|
+
|
|
421
|
+
#### Cursor
|
|
422
|
+
|
|
423
|
+
Add to your Cursor MCP config (`.cursor/mcp.json` in your project or `~/.cursor/mcp.json` globally):
|
|
424
|
+
|
|
425
|
+
```json
|
|
426
|
+
{
|
|
427
|
+
"mcpServers": {
|
|
428
|
+
"dorky": {
|
|
429
|
+
"command": "npx",
|
|
430
|
+
"args": ["dorky-mcp"],
|
|
431
|
+
"env": {
|
|
432
|
+
"AWS_ACCESS_KEY": "your-access-key",
|
|
433
|
+
"AWS_SECRET_KEY": "your-secret-key",
|
|
434
|
+
"AWS_REGION": "us-east-1",
|
|
435
|
+
"BUCKET_NAME": "your-bucket-name"
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
```
|
|
441
|
+
|
|
326
442
|
## VS Code Extension
|
|
327
443
|
|
|
328
444
|
A graphical interface for dorky is available as a VS Code extension — manage staged and uploaded files directly from the sidebar without leaving your editor.
|
|
@@ -345,14 +461,18 @@ A graphical interface for dorky is available as a VS Code extension — manage s
|
|
|
345
461
|
- ✅ Recursive folder creation on pull
|
|
346
462
|
- ✅ Destroy project and clean up remote files
|
|
347
463
|
- ✅ Auto-recovery of AWS credentials from environment variables
|
|
464
|
+
- ✅ Push history with versioned remote snapshots
|
|
465
|
+
- ✅ Restore files to any previous push commit
|
|
466
|
+
- ✅ MCP server for AI agent integration (Claude, Cursor, VS Code Copilot)
|
|
348
467
|
|
|
349
468
|
## How It Works
|
|
350
469
|
|
|
351
|
-
1. **Initialization**: Creates `.dorky/` folder with metadata and
|
|
470
|
+
1. **Initialization**: Creates `.dorky/` folder with metadata, credentials, and history
|
|
352
471
|
2. **File Tracking**: Maintains a hash-based registry of files in `metadata.json`
|
|
353
472
|
3. **Smart Uploads**: Only uploads files that have changed (based on MD5 hash)
|
|
354
473
|
4. **Auto-detection**: Highlights `.env` and `.config` files during listing
|
|
355
474
|
5. **Security**: Automatically updates `.gitignore` to protect credentials
|
|
475
|
+
6. **History**: Each push saves a commit entry in `history.json` and uploads a versioned snapshot to `<project>/.dorky-history/<commit-id>/` on remote storage, enabling point-in-time restore via `--checkout`
|
|
356
476
|
|
|
357
477
|
## Security Best Practices
|
|
358
478
|
|
|
@@ -413,12 +533,12 @@ ISC License - see [LICENSE](LICENSE) file for details.
|
|
|
413
533
|
- [x] Uninitialize dorky setup (Bug fix release)
|
|
414
534
|
- [ ] dorky --list remote --update should sync metadata according to remote (Minor release)
|
|
415
535
|
- [x] Extension for VS Code to list and highlight them like git (Major release)
|
|
416
|
-
- [
|
|
536
|
+
- [x] MCP server (Minor release)
|
|
417
537
|
- [ ] Encryption of files (Minor release)
|
|
418
|
-
- [
|
|
538
|
+
- [x] Add stages for variables (Major release)
|
|
419
539
|
- [ ] Migrate dorky project to another storage (partially implemented)
|
|
420
540
|
- [ ] Add more test cases
|
|
421
541
|
- [ ] Deletion of files
|
|
422
542
|
- [ ] Edge cases for failure when credentials are invalid
|
|
423
|
-
- [
|
|
543
|
+
- [x] Add coverage reports badges
|
|
424
544
|
|
package/bin/index.js
CHANGED
|
@@ -16,6 +16,7 @@ const { google } = require('googleapis');
|
|
|
16
16
|
const DORKY_DIR = ".dorky";
|
|
17
17
|
const METADATA_PATH = path.join(DORKY_DIR, "metadata.json");
|
|
18
18
|
const CREDENTIALS_PATH = path.join(DORKY_DIR, "credentials.json");
|
|
19
|
+
const HISTORY_PATH = path.join(DORKY_DIR, "history.json");
|
|
19
20
|
const GD_CREDENTIALS_PATH = path.join(__dirname, "../google-drive-credentials.json");
|
|
20
21
|
const SCOPES = ['https://www.googleapis.com/auth/drive'];
|
|
21
22
|
|
|
@@ -50,6 +51,8 @@ const args = yargs
|
|
|
50
51
|
.option("pull", { alias: "pl", describe: "Pull files", type: "string" })
|
|
51
52
|
.option("migrate", { alias: "m", describe: "Migrate project", type: "string" })
|
|
52
53
|
.option("destroy", { alias: "d", describe: "Destroy project", type: "boolean" })
|
|
54
|
+
.option("log", { alias: "lg", describe: "Show push history", type: "boolean" })
|
|
55
|
+
.option("checkout", { alias: "co", describe: "Restore files from a history commit", type: "string" })
|
|
53
56
|
.help('help').strict().argv;
|
|
54
57
|
|
|
55
58
|
if (Object.keys(args).length === 2 && args._.length === 0) yargs.showHelp();
|
|
@@ -110,6 +113,7 @@ async function init(storage) {
|
|
|
110
113
|
|
|
111
114
|
mkdirSync(DORKY_DIR);
|
|
112
115
|
writeJson(METADATA_PATH, { "stage-1-files": {}, "uploaded-files": {} });
|
|
116
|
+
writeJson(HISTORY_PATH, []);
|
|
113
117
|
writeFileSync(".dorkyignore", "");
|
|
114
118
|
writeJson(CREDENTIALS_PATH, credentials);
|
|
115
119
|
console.log(chalk.green("✔ Dorky project initialized successfully."));
|
|
@@ -326,6 +330,36 @@ async function push() {
|
|
|
326
330
|
|
|
327
331
|
meta["uploaded-files"] = { ...meta["stage-1-files"] };
|
|
328
332
|
writeJson(METADATA_PATH, meta);
|
|
333
|
+
|
|
334
|
+
const commitFiles = { ...meta["stage-1-files"] };
|
|
335
|
+
const commitId = md5(JSON.stringify(commitFiles)).slice(0, 8);
|
|
336
|
+
const history = existsSync(HISTORY_PATH) ? JSON.parse(readFileSync(HISTORY_PATH)) : [];
|
|
337
|
+
if (!history.find(e => e.id === commitId)) {
|
|
338
|
+
history.push({ id: commitId, timestamp: new Date().toISOString(), files: commitFiles });
|
|
339
|
+
writeJson(HISTORY_PATH, history);
|
|
340
|
+
|
|
341
|
+
const root = path.basename(process.cwd());
|
|
342
|
+
const historyPrefix = path.join(root, ".dorky-history", commitId);
|
|
343
|
+
if (creds.storage === "aws") {
|
|
344
|
+
await runS3(creds, async (s3, bucket) => {
|
|
345
|
+
await Promise.all(Object.keys(commitFiles).map(async f => {
|
|
346
|
+
const key = path.join(historyPrefix, f);
|
|
347
|
+
await s3.send(new PutObjectCommand({ Bucket: bucket, Key: key, Body: readFileSync(f) }));
|
|
348
|
+
}));
|
|
349
|
+
});
|
|
350
|
+
} else if (creds.storage === "google-drive") {
|
|
351
|
+
await runDrive(async (drive) => {
|
|
352
|
+
for (const f of Object.keys(commitFiles)) {
|
|
353
|
+
const parentId = await getFolderId(path.join(root, ".dorky-history", commitId, path.dirname(f)), drive);
|
|
354
|
+
await drive.files.create({
|
|
355
|
+
requestBody: { name: path.basename(f), parents: [parentId] },
|
|
356
|
+
media: { mimeType: commitFiles[f]["mime-type"], body: createReadStream(f) }
|
|
357
|
+
});
|
|
358
|
+
}
|
|
359
|
+
});
|
|
360
|
+
}
|
|
361
|
+
console.log(chalk.cyan(`ℹ History commit saved: ${commitId}`));
|
|
362
|
+
}
|
|
329
363
|
}
|
|
330
364
|
|
|
331
365
|
async function pull() {
|
|
@@ -361,6 +395,71 @@ async function pull() {
|
|
|
361
395
|
}
|
|
362
396
|
}
|
|
363
397
|
|
|
398
|
+
function log() {
|
|
399
|
+
checkDorkyProject();
|
|
400
|
+
const history = existsSync(HISTORY_PATH) ? JSON.parse(readFileSync(HISTORY_PATH)) : [];
|
|
401
|
+
if (!history.length) return console.log(chalk.yellow("ℹ No history found. Push some files first."));
|
|
402
|
+
console.log(chalk.blue.bold("\n📜 Push History:\n"));
|
|
403
|
+
[...history].reverse().forEach((entry, i) => {
|
|
404
|
+
const date = new Date(entry.timestamp).toLocaleString();
|
|
405
|
+
const fileCount = Object.keys(entry.files).length;
|
|
406
|
+
console.log(chalk.yellow(` commit ${entry.id}`) + (i === 0 ? chalk.green(" (latest)") : ""));
|
|
407
|
+
console.log(chalk.gray(` Date: ${date}`));
|
|
408
|
+
console.log(chalk.gray(` Files: ${fileCount}`));
|
|
409
|
+
Object.keys(entry.files).forEach(f => console.log(chalk.cyan(` • ${f}`)));
|
|
410
|
+
console.log();
|
|
411
|
+
});
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
async function checkout(commitId) {
|
|
415
|
+
checkDorkyProject();
|
|
416
|
+
if (!await checkCredentials()) return;
|
|
417
|
+
|
|
418
|
+
const history = existsSync(HISTORY_PATH) ? JSON.parse(readFileSync(HISTORY_PATH)) : [];
|
|
419
|
+
const entry = history.find(e => e.id === commitId || e.id.startsWith(commitId));
|
|
420
|
+
if (!entry) return console.log(chalk.red(`✖ Commit not found: ${commitId}. Run --log to see available commits.`));
|
|
421
|
+
|
|
422
|
+
console.log(chalk.blue.bold(`\n⏪ Checking out commit ${entry.id} (${new Date(entry.timestamp).toLocaleString()}):\n`));
|
|
423
|
+
|
|
424
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
425
|
+
const root = path.basename(process.cwd());
|
|
426
|
+
const historyPrefix = path.join(root, ".dorky-history", entry.id);
|
|
427
|
+
|
|
428
|
+
if (creds.storage === "aws") {
|
|
429
|
+
await runS3(creds, async (s3, bucket) => {
|
|
430
|
+
await Promise.all(Object.keys(entry.files).map(async f => {
|
|
431
|
+
const key = path.join(historyPrefix, f);
|
|
432
|
+
const { Body } = await s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
|
|
433
|
+
if (!existsSync(path.dirname(f))) mkdirSync(path.dirname(f), { recursive: true });
|
|
434
|
+
writeFileSync(f, await Body.transformToString());
|
|
435
|
+
console.log(chalk.green(`✔ Restored: ${f}`));
|
|
436
|
+
}));
|
|
437
|
+
});
|
|
438
|
+
} else if (creds.storage === "google-drive") {
|
|
439
|
+
await runDrive(async (drive) => {
|
|
440
|
+
for (const f of Object.keys(entry.files)) {
|
|
441
|
+
const parentId = await getFolderId(path.join(root, ".dorky-history", entry.id, path.dirname(f)), drive, false);
|
|
442
|
+
if (!parentId) { console.log(chalk.red(`✖ Remote history folder missing for: ${f}`)); continue; }
|
|
443
|
+
const res = await drive.files.list({
|
|
444
|
+
q: `name='${path.basename(f)}' and '${parentId}' in parents and trashed=false`,
|
|
445
|
+
fields: 'files(id)'
|
|
446
|
+
});
|
|
447
|
+
if (!res.data.files[0]) { console.log(chalk.red(`✖ Missing remote history file: ${f}`)); continue; }
|
|
448
|
+
const data = await drive.files.get({ fileId: res.data.files[0].id, alt: 'media' });
|
|
449
|
+
if (!existsSync(path.dirname(f))) mkdirSync(path.dirname(f), { recursive: true });
|
|
450
|
+
writeFileSync(f, await data.data.text());
|
|
451
|
+
console.log(chalk.green(`✔ Restored: ${f}`));
|
|
452
|
+
}
|
|
453
|
+
});
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
const meta = readJson(METADATA_PATH);
|
|
457
|
+
meta["stage-1-files"] = { ...entry.files };
|
|
458
|
+
meta["uploaded-files"] = { ...entry.files };
|
|
459
|
+
writeJson(METADATA_PATH, meta);
|
|
460
|
+
console.log(chalk.cyan(`\nℹ Staged and uploaded state restored to commit ${entry.id}.`));
|
|
461
|
+
}
|
|
462
|
+
|
|
364
463
|
async function destroy() {
|
|
365
464
|
checkDorkyProject();
|
|
366
465
|
if (!await checkCredentials()) return;
|
|
@@ -403,4 +502,6 @@ if (args.add !== undefined) add(args.add);
|
|
|
403
502
|
if (args.rm !== undefined) rm(args.rm);
|
|
404
503
|
if (args.push !== undefined) push();
|
|
405
504
|
if (args.pull !== undefined) pull();
|
|
505
|
+
if (args.log !== undefined) log();
|
|
506
|
+
if (args.checkout !== undefined) checkout(args.checkout);
|
|
406
507
|
if (args.destroy !== undefined) destroy();
|
package/bin/mcp.js
ADDED
|
@@ -0,0 +1,631 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const { Server } = require("@modelcontextprotocol/sdk/server/index.js");
|
|
4
|
+
const { StdioServerTransport } = require("@modelcontextprotocol/sdk/server/stdio.js");
|
|
5
|
+
const { CallToolRequestSchema, ListToolsRequestSchema } = require("@modelcontextprotocol/sdk/types.js");
|
|
6
|
+
const { existsSync, mkdirSync, writeFileSync, readFileSync, createReadStream, unlinkSync, rmSync } = require("fs");
|
|
7
|
+
const { glob } = require("glob");
|
|
8
|
+
const path = require("path");
|
|
9
|
+
const mimeTypes = require("mime-types");
|
|
10
|
+
const md5 = require("md5");
|
|
11
|
+
const EOL = require("os").type() == "Darwin" ? "\r\n" : "\n";
|
|
12
|
+
const { GetObjectCommand, PutObjectCommand, ListObjectsV2Command, DeleteObjectCommand, DeleteObjectsCommand, S3Client } = require("@aws-sdk/client-s3");
|
|
13
|
+
const { authenticate } = require("@google-cloud/local-auth");
|
|
14
|
+
const { google } = require("googleapis");
|
|
15
|
+
|
|
16
|
+
// Constants & Config
|
|
17
|
+
const DORKY_DIR = ".dorky";
|
|
18
|
+
const METADATA_PATH = path.join(DORKY_DIR, "metadata.json");
|
|
19
|
+
const CREDENTIALS_PATH = path.join(DORKY_DIR, "credentials.json");
|
|
20
|
+
const HISTORY_PATH = path.join(DORKY_DIR, "history.json");
|
|
21
|
+
const GD_CREDENTIALS_PATH = path.join(__dirname, "../google-drive-credentials.json");
|
|
22
|
+
const SCOPES = ["https://www.googleapis.com/auth/drive"];
|
|
23
|
+
|
|
24
|
+
// Helpers
|
|
25
|
+
const readJson = (p) => existsSync(p) ? JSON.parse(readFileSync(p)) : {};
|
|
26
|
+
const writeJson = (p, d) => writeFileSync(p, JSON.stringify(d, null, 2));
|
|
27
|
+
|
|
28
|
+
const checkDorkyProject = () => {
|
|
29
|
+
if (!existsSync(DORKY_DIR) && !existsSync(".dorkyignore")) {
|
|
30
|
+
throw new Error("Not a dorky project. Please run init first.");
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
function updateGitIgnore() {
|
|
35
|
+
let content = existsSync(".gitignore") ? readFileSync(".gitignore").toString() : "";
|
|
36
|
+
if (!content.includes(CREDENTIALS_PATH)) {
|
|
37
|
+
writeFileSync(".gitignore", content + EOL + CREDENTIALS_PATH + EOL);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async function authorizeGoogleDriveClient(forceReauth = false) {
|
|
42
|
+
if (!forceReauth && existsSync(CREDENTIALS_PATH)) {
|
|
43
|
+
const saved = readJson(CREDENTIALS_PATH);
|
|
44
|
+
if (saved.storage === "google-drive" && saved.expiry_date) {
|
|
45
|
+
const keys = readJson(GD_CREDENTIALS_PATH);
|
|
46
|
+
const key = keys.installed || keys.web;
|
|
47
|
+
const client = new google.auth.OAuth2(key.client_id, key.client_secret, key.redirect_uris[0]);
|
|
48
|
+
client.setCredentials(saved);
|
|
49
|
+
|
|
50
|
+
if (Date.now() >= saved.expiry_date - 300000) {
|
|
51
|
+
try {
|
|
52
|
+
const { credentials } = await client.refreshAccessToken();
|
|
53
|
+
writeJson(CREDENTIALS_PATH, { storage: "google-drive", ...credentials });
|
|
54
|
+
client.setCredentials(credentials);
|
|
55
|
+
} catch (e) {
|
|
56
|
+
return authorizeGoogleDriveClient(true);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
return client;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const client = await authenticate({ scopes: SCOPES, keyfilePath: GD_CREDENTIALS_PATH });
|
|
64
|
+
if (client?.credentials && existsSync(path.dirname(CREDENTIALS_PATH))) {
|
|
65
|
+
writeJson(CREDENTIALS_PATH, { storage: "google-drive", ...client.credentials });
|
|
66
|
+
}
|
|
67
|
+
return client;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
async function init(storage) {
|
|
71
|
+
if (existsSync(DORKY_DIR)) return "Dorky is already initialized.";
|
|
72
|
+
if (!["aws", "google-drive"].includes(storage)) return "Invalid storage. Use 'aws' or 'google-drive'.";
|
|
73
|
+
|
|
74
|
+
let credentials = {};
|
|
75
|
+
if (storage === "aws") {
|
|
76
|
+
if (!process.env.AWS_ACCESS_KEY || !process.env.AWS_SECRET_KEY || !process.env.AWS_REGION || !process.env.BUCKET_NAME) {
|
|
77
|
+
return "Missing AWS environment variables (AWS_ACCESS_KEY, AWS_SECRET_KEY, AWS_REGION, BUCKET_NAME).";
|
|
78
|
+
}
|
|
79
|
+
credentials = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME };
|
|
80
|
+
} else {
|
|
81
|
+
const client = await authorizeGoogleDriveClient(true);
|
|
82
|
+
credentials = { storage: "google-drive", ...client.credentials };
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
mkdirSync(DORKY_DIR);
|
|
86
|
+
writeJson(METADATA_PATH, { "stage-1-files": {}, "uploaded-files": {} });
|
|
87
|
+
writeJson(HISTORY_PATH, []);
|
|
88
|
+
writeFileSync(".dorkyignore", "");
|
|
89
|
+
writeJson(CREDENTIALS_PATH, credentials);
|
|
90
|
+
updateGitIgnore();
|
|
91
|
+
return "Dorky project initialized successfully.";
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
async function list(type) {
|
|
95
|
+
checkDorkyProject();
|
|
96
|
+
const meta = readJson(METADATA_PATH);
|
|
97
|
+
const lines = [];
|
|
98
|
+
|
|
99
|
+
if (type === "remote") {
|
|
100
|
+
if (!await checkCredentials()) return "Credentials not found. Please run init first.";
|
|
101
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
102
|
+
const root = path.basename(process.cwd());
|
|
103
|
+
lines.push("Remote Files:");
|
|
104
|
+
|
|
105
|
+
if (creds.storage === "aws") {
|
|
106
|
+
await runS3(creds, async (s3, bucket) => {
|
|
107
|
+
const data = await s3.send(new ListObjectsV2Command({ Bucket: bucket, Prefix: root + "/" }));
|
|
108
|
+
if (!data.Contents?.length) { lines.push("No remote files found."); return; }
|
|
109
|
+
data.Contents.forEach(o => lines.push(` ${o.Key.replace(root + "/", "")}`));
|
|
110
|
+
});
|
|
111
|
+
} else {
|
|
112
|
+
await runDrive(async (drive) => {
|
|
113
|
+
const q = `name='${root}' and mimeType='application/vnd.google-apps.folder' and 'root' in parents and trashed=false`;
|
|
114
|
+
const { data: { files: [folder] } } = await drive.files.list({ q, fields: "files(id)" });
|
|
115
|
+
if (!folder) { lines.push("Remote folder not found."); return; }
|
|
116
|
+
const walk = async (pid, p = "") => {
|
|
117
|
+
const { data: { files } } = await drive.files.list({ q: `'${pid}' in parents and trashed=false`, fields: "files(id, name, mimeType)" });
|
|
118
|
+
for (const f of files) {
|
|
119
|
+
if (f.mimeType === "application/vnd.google-apps.folder") await walk(f.id, path.join(p, f.name));
|
|
120
|
+
else lines.push(` ${path.join(p, f.name)}`);
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
await walk(folder.id);
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
} else {
|
|
127
|
+
lines.push("Untracked Files:");
|
|
128
|
+
const exclusions = existsSync(".dorkyignore") ? readFileSync(".dorkyignore").toString().split(EOL).filter(Boolean) : [];
|
|
129
|
+
const files = await glob("**/*", { dot: true, ignore: [...exclusions.map(e => `**/${e}/**`), ...exclusions, ".dorky/**", ".dorkyignore", ".git/**", "node_modules/**"] });
|
|
130
|
+
|
|
131
|
+
files.forEach(f => {
|
|
132
|
+
const rel = path.relative(process.cwd(), f);
|
|
133
|
+
if (rel.includes(".env") || rel.includes(".config")) lines.push(` ${rel} (Potential sensitive file)`);
|
|
134
|
+
else lines.push(` ${rel}`);
|
|
135
|
+
});
|
|
136
|
+
lines.push("\nStaged Files:");
|
|
137
|
+
Object.keys(meta["stage-1-files"]).forEach(f => lines.push(` ${f}`));
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
return lines.join("\n");
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
function add(files) {
|
|
144
|
+
checkDorkyProject();
|
|
145
|
+
const meta = readJson(METADATA_PATH);
|
|
146
|
+
const results = [];
|
|
147
|
+
files.forEach(f => {
|
|
148
|
+
if (!existsSync(f)) { results.push(`File not found: ${f}`); return; }
|
|
149
|
+
const hash = md5(readFileSync(f));
|
|
150
|
+
if (meta["stage-1-files"][f]?.hash === hash) { results.push(`${f} (unchanged)`); return; }
|
|
151
|
+
meta["stage-1-files"][f] = { "mime-type": mimeTypes.lookup(f) || "application/octet-stream", hash };
|
|
152
|
+
results.push(`Staged: ${f}`);
|
|
153
|
+
});
|
|
154
|
+
writeJson(METADATA_PATH, meta);
|
|
155
|
+
return results.join("\n");
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
function rm(files) {
|
|
159
|
+
checkDorkyProject();
|
|
160
|
+
const meta = readJson(METADATA_PATH);
|
|
161
|
+
const removed = files.filter(f => {
|
|
162
|
+
if (!meta["stage-1-files"][f]) return false;
|
|
163
|
+
delete meta["stage-1-files"][f];
|
|
164
|
+
return true;
|
|
165
|
+
});
|
|
166
|
+
writeJson(METADATA_PATH, meta);
|
|
167
|
+
return removed.length
|
|
168
|
+
? removed.map(f => `Unstaged: ${f}`).join("\n")
|
|
169
|
+
: "No matching files to remove.";
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
async function checkCredentials() {
|
|
173
|
+
if (existsSync(CREDENTIALS_PATH)) return true;
|
|
174
|
+
if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY) {
|
|
175
|
+
writeJson(CREDENTIALS_PATH, {
|
|
176
|
+
storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY,
|
|
177
|
+
awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME
|
|
178
|
+
});
|
|
179
|
+
return true;
|
|
180
|
+
}
|
|
181
|
+
try {
|
|
182
|
+
const client = await authorizeGoogleDriveClient(true);
|
|
183
|
+
if (client) return true;
|
|
184
|
+
} catch { }
|
|
185
|
+
return false;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
const getS3 = (c) => new S3Client({
|
|
189
|
+
credentials: { accessKeyId: c.accessKey || process.env.AWS_ACCESS_KEY, secretAccessKey: c.secretKey || process.env.AWS_SECRET_KEY },
|
|
190
|
+
region: c.awsRegion || process.env.AWS_REGION
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
async function runS3(creds, fn) {
|
|
194
|
+
try { await fn(getS3(creds), creds.bucket || process.env.BUCKET_NAME); }
|
|
195
|
+
catch (err) {
|
|
196
|
+
if (["InvalidAccessKeyId", "SignatureDoesNotMatch"].includes(err.name) || err.$metadata?.httpStatusCode === 403) {
|
|
197
|
+
if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY) {
|
|
198
|
+
const newCreds = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME };
|
|
199
|
+
writeJson(CREDENTIALS_PATH, newCreds);
|
|
200
|
+
try { await fn(getS3(newCreds), newCreds.bucket); return; } catch { }
|
|
201
|
+
}
|
|
202
|
+
throw new Error("AWS authentication failed.");
|
|
203
|
+
}
|
|
204
|
+
throw err;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
async function getFolderId(pathStr, drive, create = true) {
|
|
209
|
+
let parentId = "root";
|
|
210
|
+
if (!pathStr || pathStr === ".") return parentId;
|
|
211
|
+
for (const folder of pathStr.split("/")) {
|
|
212
|
+
if (!folder) continue;
|
|
213
|
+
const res = await drive.files.list({ q: `name='${folder}' and mimeType='application/vnd.google-apps.folder' and '${parentId}' in parents and trashed=false`, fields: "files(id)" });
|
|
214
|
+
if (res.data.files[0]) parentId = res.data.files[0].id;
|
|
215
|
+
else if (create) parentId = (await drive.files.create({ requestBody: { name: folder, mimeType: "application/vnd.google-apps.folder", parents: [parentId] }, fields: "id" })).data.id;
|
|
216
|
+
else return null;
|
|
217
|
+
}
|
|
218
|
+
return parentId;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
async function runDrive(fn) {
|
|
222
|
+
let client = await authorizeGoogleDriveClient();
|
|
223
|
+
let drive = google.drive({ version: "v3", auth: client });
|
|
224
|
+
try { await fn(drive); }
|
|
225
|
+
catch (err) {
|
|
226
|
+
if (err.code === 401 || err.message?.includes("invalid_grant")) {
|
|
227
|
+
if (existsSync(CREDENTIALS_PATH)) unlinkSync(CREDENTIALS_PATH);
|
|
228
|
+
client = await authorizeGoogleDriveClient(true);
|
|
229
|
+
drive = google.drive({ version: "v3", auth: client });
|
|
230
|
+
await fn(drive);
|
|
231
|
+
} else throw err;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
async function push() {
|
|
236
|
+
checkDorkyProject();
|
|
237
|
+
if (!await checkCredentials()) return "Credentials not found. Please run init first.";
|
|
238
|
+
const meta = readJson(METADATA_PATH);
|
|
239
|
+
const filesToUpload = Object.keys(meta["stage-1-files"])
|
|
240
|
+
.filter(f => !meta["uploaded-files"][f] || meta["stage-1-files"][f].hash !== meta["uploaded-files"][f].hash)
|
|
241
|
+
.map(f => ({ name: f, ...meta["stage-1-files"][f] }));
|
|
242
|
+
const filesToDelete = Object.keys(meta["uploaded-files"]).filter(f => !meta["stage-1-files"][f]);
|
|
243
|
+
|
|
244
|
+
if (filesToUpload.length === 0 && filesToDelete.length === 0) return "Nothing to push.";
|
|
245
|
+
|
|
246
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
247
|
+
const results = [];
|
|
248
|
+
|
|
249
|
+
if (creds.storage === "aws") {
|
|
250
|
+
await runS3(creds, async (s3, bucket) => {
|
|
251
|
+
if (filesToUpload.length > 0) {
|
|
252
|
+
await Promise.all(filesToUpload.map(async f => {
|
|
253
|
+
const key = path.join(path.basename(process.cwd()), f.name);
|
|
254
|
+
await s3.send(new PutObjectCommand({ Bucket: bucket, Key: key, Body: readFileSync(f.name) }));
|
|
255
|
+
results.push(`Uploaded: ${f.name}`);
|
|
256
|
+
}));
|
|
257
|
+
}
|
|
258
|
+
if (filesToDelete.length > 0) {
|
|
259
|
+
await Promise.all(filesToDelete.map(async f => {
|
|
260
|
+
const key = path.join(path.basename(process.cwd()), f);
|
|
261
|
+
await s3.send(new DeleteObjectCommand({ Bucket: bucket, Key: key }));
|
|
262
|
+
results.push(`Deleted remote: ${f}`);
|
|
263
|
+
}));
|
|
264
|
+
}
|
|
265
|
+
});
|
|
266
|
+
} else if (creds.storage === "google-drive") {
|
|
267
|
+
await runDrive(async (drive) => {
|
|
268
|
+
if (filesToUpload.length > 0) {
|
|
269
|
+
for (const f of filesToUpload) {
|
|
270
|
+
const root = path.basename(process.cwd());
|
|
271
|
+
const parentId = await getFolderId(path.dirname(path.join(root, f.name)), drive);
|
|
272
|
+
await drive.files.create({
|
|
273
|
+
requestBody: { name: path.basename(f.name), parents: [parentId] },
|
|
274
|
+
media: { mimeType: f["mime-type"], body: createReadStream(f.name) }
|
|
275
|
+
});
|
|
276
|
+
results.push(`Uploaded: ${f.name}`);
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
if (filesToDelete.length > 0) {
|
|
280
|
+
const root = path.basename(process.cwd());
|
|
281
|
+
for (const f of filesToDelete) {
|
|
282
|
+
const parentId = await getFolderId(path.dirname(path.join(root, f)), drive, false);
|
|
283
|
+
if (parentId) {
|
|
284
|
+
const res = await drive.files.list({ q: `name='${path.basename(f)}' and '${parentId}' in parents and trashed=false`, fields: "files(id)" });
|
|
285
|
+
if (res.data.files[0]) {
|
|
286
|
+
await drive.files.delete({ fileId: res.data.files[0].id });
|
|
287
|
+
results.push(`Deleted remote: ${f}`);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
meta["uploaded-files"] = { ...meta["stage-1-files"] };
|
|
296
|
+
writeJson(METADATA_PATH, meta);
|
|
297
|
+
|
|
298
|
+
const commitFiles = { ...meta["stage-1-files"] };
|
|
299
|
+
const commitId = md5(JSON.stringify(commitFiles)).slice(0, 8);
|
|
300
|
+
const history = existsSync(HISTORY_PATH) ? JSON.parse(readFileSync(HISTORY_PATH)) : [];
|
|
301
|
+
if (!history.find(e => e.id === commitId)) {
|
|
302
|
+
history.push({ id: commitId, timestamp: new Date().toISOString(), files: commitFiles });
|
|
303
|
+
writeJson(HISTORY_PATH, history);
|
|
304
|
+
|
|
305
|
+
const root = path.basename(process.cwd());
|
|
306
|
+
const historyPrefix = path.join(root, ".dorky-history", commitId);
|
|
307
|
+
if (creds.storage === "aws") {
|
|
308
|
+
await runS3(creds, async (s3, bucket) => {
|
|
309
|
+
await Promise.all(Object.keys(commitFiles).map(async f => {
|
|
310
|
+
const key = path.join(historyPrefix, f);
|
|
311
|
+
await s3.send(new PutObjectCommand({ Bucket: bucket, Key: key, Body: readFileSync(f) }));
|
|
312
|
+
}));
|
|
313
|
+
});
|
|
314
|
+
} else if (creds.storage === "google-drive") {
|
|
315
|
+
await runDrive(async (drive) => {
|
|
316
|
+
for (const f of Object.keys(commitFiles)) {
|
|
317
|
+
const parentId = await getFolderId(path.join(root, ".dorky-history", commitId, path.dirname(f)), drive);
|
|
318
|
+
await drive.files.create({
|
|
319
|
+
requestBody: { name: path.basename(f), parents: [parentId] },
|
|
320
|
+
media: { mimeType: commitFiles[f]["mime-type"], body: createReadStream(f) }
|
|
321
|
+
});
|
|
322
|
+
}
|
|
323
|
+
});
|
|
324
|
+
}
|
|
325
|
+
results.push(`History commit saved: ${commitId}`);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
return results.join("\n");
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
async function pull() {
|
|
332
|
+
checkDorkyProject();
|
|
333
|
+
if (!await checkCredentials()) return "Credentials not found. Please run init first.";
|
|
334
|
+
const meta = readJson(METADATA_PATH);
|
|
335
|
+
const files = meta["uploaded-files"];
|
|
336
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
337
|
+
const results = [];
|
|
338
|
+
|
|
339
|
+
if (creds.storage === "aws") {
|
|
340
|
+
await runS3(creds, async (s3, bucket) => {
|
|
341
|
+
await Promise.all(Object.keys(files).map(async f => {
|
|
342
|
+
const key = path.join(path.basename(process.cwd()), f);
|
|
343
|
+
const { Body } = await s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
|
|
344
|
+
const dir = path.dirname(f);
|
|
345
|
+
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
346
|
+
writeFileSync(f, await Body.transformToString());
|
|
347
|
+
results.push(`Downloaded: ${f}`);
|
|
348
|
+
}));
|
|
349
|
+
});
|
|
350
|
+
} else if (creds.storage === "google-drive") {
|
|
351
|
+
await runDrive(async (drive) => {
|
|
352
|
+
const fileList = Object.keys(files).map(k => ({ name: k, ...files[k] }));
|
|
353
|
+
await Promise.all(fileList.map(async f => {
|
|
354
|
+
const res = await drive.files.list({ q: `name='${path.basename(f.name)}' and mimeType!='application/vnd.google-apps.folder'`, fields: "files(id)" });
|
|
355
|
+
if (!res.data.files[0]) { results.push(`Missing remote file: ${f.name}`); return; }
|
|
356
|
+
const data = await drive.files.get({ fileId: res.data.files[0].id, alt: "media" });
|
|
357
|
+
if (!existsSync(path.dirname(f.name))) mkdirSync(path.dirname(f.name), { recursive: true });
|
|
358
|
+
writeFileSync(f.name, await data.data.text());
|
|
359
|
+
results.push(`Downloaded: ${f.name}`);
|
|
360
|
+
}));
|
|
361
|
+
});
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
return results.join("\n") || "Nothing to pull.";
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
function log() {
|
|
368
|
+
checkDorkyProject();
|
|
369
|
+
const history = existsSync(HISTORY_PATH) ? JSON.parse(readFileSync(HISTORY_PATH)) : [];
|
|
370
|
+
if (!history.length) return "No history found. Push some files first.";
|
|
371
|
+
|
|
372
|
+
const lines = ["Push History:"];
|
|
373
|
+
[...history].reverse().forEach((entry, i) => {
|
|
374
|
+
const date = new Date(entry.timestamp).toLocaleString();
|
|
375
|
+
const fileCount = Object.keys(entry.files).length;
|
|
376
|
+
lines.push(` commit ${entry.id}${i === 0 ? " (latest)" : ""}`);
|
|
377
|
+
lines.push(` Date: ${date}`);
|
|
378
|
+
lines.push(` Files: ${fileCount}`);
|
|
379
|
+
Object.keys(entry.files).forEach(f => lines.push(` • ${f}`));
|
|
380
|
+
lines.push("");
|
|
381
|
+
});
|
|
382
|
+
return lines.join("\n");
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
async function checkout(commitId) {
|
|
386
|
+
checkDorkyProject();
|
|
387
|
+
if (!await checkCredentials()) return "Credentials not found. Please run init first.";
|
|
388
|
+
|
|
389
|
+
const history = existsSync(HISTORY_PATH) ? JSON.parse(readFileSync(HISTORY_PATH)) : [];
|
|
390
|
+
const entry = history.find(e => e.id === commitId || e.id.startsWith(commitId));
|
|
391
|
+
if (!entry) return `Commit not found: ${commitId}. Run log to see available commits.`;
|
|
392
|
+
|
|
393
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
394
|
+
const root = path.basename(process.cwd());
|
|
395
|
+
const historyPrefix = path.join(root, ".dorky-history", entry.id);
|
|
396
|
+
const results = [`Checking out commit ${entry.id} (${new Date(entry.timestamp).toLocaleString()}):`];
|
|
397
|
+
|
|
398
|
+
if (creds.storage === "aws") {
|
|
399
|
+
await runS3(creds, async (s3, bucket) => {
|
|
400
|
+
await Promise.all(Object.keys(entry.files).map(async f => {
|
|
401
|
+
const key = path.join(historyPrefix, f);
|
|
402
|
+
const { Body } = await s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
|
|
403
|
+
if (!existsSync(path.dirname(f))) mkdirSync(path.dirname(f), { recursive: true });
|
|
404
|
+
writeFileSync(f, await Body.transformToString());
|
|
405
|
+
results.push(`Restored: ${f}`);
|
|
406
|
+
}));
|
|
407
|
+
});
|
|
408
|
+
} else if (creds.storage === "google-drive") {
|
|
409
|
+
await runDrive(async (drive) => {
|
|
410
|
+
for (const f of Object.keys(entry.files)) {
|
|
411
|
+
const parentId = await getFolderId(path.join(root, ".dorky-history", entry.id, path.dirname(f)), drive, false);
|
|
412
|
+
if (!parentId) { results.push(`Remote history folder missing for: ${f}`); continue; }
|
|
413
|
+
const res = await drive.files.list({ q: `name='${path.basename(f)}' and '${parentId}' in parents and trashed=false`, fields: "files(id)" });
|
|
414
|
+
if (!res.data.files[0]) { results.push(`Missing remote history file: ${f}`); continue; }
|
|
415
|
+
const data = await drive.files.get({ fileId: res.data.files[0].id, alt: "media" });
|
|
416
|
+
if (!existsSync(path.dirname(f))) mkdirSync(path.dirname(f), { recursive: true });
|
|
417
|
+
writeFileSync(f, await data.data.text());
|
|
418
|
+
results.push(`Restored: ${f}`);
|
|
419
|
+
}
|
|
420
|
+
});
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
const meta = readJson(METADATA_PATH);
|
|
424
|
+
meta["stage-1-files"] = { ...entry.files };
|
|
425
|
+
meta["uploaded-files"] = { ...entry.files };
|
|
426
|
+
writeJson(METADATA_PATH, meta);
|
|
427
|
+
results.push(`Staged and uploaded state restored to commit ${entry.id}.`);
|
|
428
|
+
return results.join("\n");
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
async function destroy() {
|
|
432
|
+
checkDorkyProject();
|
|
433
|
+
if (!await checkCredentials()) return "Credentials not found. Please run init first.";
|
|
434
|
+
|
|
435
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
436
|
+
const root = path.basename(process.cwd());
|
|
437
|
+
const results = [];
|
|
438
|
+
|
|
439
|
+
if (creds.storage === "aws") {
|
|
440
|
+
await runS3(creds, async (s3, bucket) => {
|
|
441
|
+
const data = await s3.send(new ListObjectsV2Command({ Bucket: bucket, Prefix: root + "/" }));
|
|
442
|
+
if (data.Contents && data.Contents.length > 0) {
|
|
443
|
+
const deleteParams = { Bucket: bucket, Delete: { Objects: data.Contents.map(o => ({ Key: o.Key })) } };
|
|
444
|
+
await s3.send(new DeleteObjectsCommand(deleteParams));
|
|
445
|
+
results.push("Remote files deleted.");
|
|
446
|
+
}
|
|
447
|
+
});
|
|
448
|
+
} else if (creds.storage === "google-drive") {
|
|
449
|
+
await runDrive(async (drive) => {
|
|
450
|
+
const q = `name='${root}' and mimeType='application/vnd.google-apps.folder' and 'root' in parents and trashed=false`;
|
|
451
|
+
const { data: { files: [folder] } } = await drive.files.list({ q, fields: "files(id)" });
|
|
452
|
+
if (folder) {
|
|
453
|
+
await drive.files.delete({ fileId: folder.id });
|
|
454
|
+
results.push("Remote folder deleted.");
|
|
455
|
+
}
|
|
456
|
+
});
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
if (existsSync(DORKY_DIR)) rmSync(DORKY_DIR, { recursive: true, force: true });
|
|
460
|
+
if (existsSync(".dorkyignore")) unlinkSync(".dorkyignore");
|
|
461
|
+
results.push("Project destroyed locally.");
|
|
462
|
+
return results.join("\n");
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
// MCP Server setup
|
|
466
|
+
const server = new Server(
|
|
467
|
+
{ name: "dorky", version: require("../package.json").version },
|
|
468
|
+
{ capabilities: { tools: {} } }
|
|
469
|
+
);
|
|
470
|
+
|
|
471
|
+
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
|
472
|
+
tools: [
|
|
473
|
+
{
|
|
474
|
+
name: "init",
|
|
475
|
+
description: "Initialize a dorky project with AWS S3 or Google Drive storage. Use this when the user wants to start tracking sensitive files (e.g. .env, credentials) remotely.",
|
|
476
|
+
inputSchema: {
|
|
477
|
+
type: "object",
|
|
478
|
+
properties: {
|
|
479
|
+
provider: {
|
|
480
|
+
type: "string",
|
|
481
|
+
enum: ["aws", "google-drive"],
|
|
482
|
+
description: "Storage provider to use: 'aws' for AWS S3, 'google-drive' for Google Drive."
|
|
483
|
+
}
|
|
484
|
+
},
|
|
485
|
+
required: ["provider"]
|
|
486
|
+
}
|
|
487
|
+
},
|
|
488
|
+
{
|
|
489
|
+
name: "list",
|
|
490
|
+
description: "List files tracked by dorky. Use this to discover what sensitive files (.env, credentials, configs) are available remotely when they appear to be missing locally. Without arguments lists local untracked and staged files. Pass 'remote' to list files in remote storage.",
|
|
491
|
+
inputSchema: {
|
|
492
|
+
type: "object",
|
|
493
|
+
properties: {
|
|
494
|
+
remote: {
|
|
495
|
+
type: "boolean",
|
|
496
|
+
description: "Set to true to list remote files in storage instead of local files."
|
|
497
|
+
}
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
},
|
|
501
|
+
{
|
|
502
|
+
name: "add",
|
|
503
|
+
description: "Stage one or more files to be pushed to remote storage.",
|
|
504
|
+
inputSchema: {
|
|
505
|
+
type: "object",
|
|
506
|
+
properties: {
|
|
507
|
+
files: {
|
|
508
|
+
type: "array",
|
|
509
|
+
items: { type: "string" },
|
|
510
|
+
description: "List of file paths to stage."
|
|
511
|
+
}
|
|
512
|
+
},
|
|
513
|
+
required: ["files"]
|
|
514
|
+
}
|
|
515
|
+
},
|
|
516
|
+
{
|
|
517
|
+
name: "remove",
|
|
518
|
+
description: "Unstage one or more files from dorky tracking.",
|
|
519
|
+
inputSchema: {
|
|
520
|
+
type: "object",
|
|
521
|
+
properties: {
|
|
522
|
+
files: {
|
|
523
|
+
type: "array",
|
|
524
|
+
items: { type: "string" },
|
|
525
|
+
description: "List of file paths to unstage."
|
|
526
|
+
}
|
|
527
|
+
},
|
|
528
|
+
required: ["files"]
|
|
529
|
+
}
|
|
530
|
+
},
|
|
531
|
+
{
|
|
532
|
+
name: "push",
|
|
533
|
+
description: "Push all staged files to remote storage (AWS S3 or Google Drive). Use this when the user has updated sensitive files (.env, credentials, configs) and wants to sync them to remote storage for teammates.",
|
|
534
|
+
inputSchema: {
|
|
535
|
+
type: "object",
|
|
536
|
+
properties: {}
|
|
537
|
+
}
|
|
538
|
+
},
|
|
539
|
+
{
|
|
540
|
+
name: "pull",
|
|
541
|
+
description: "Pull all tracked files from remote storage to the local project. Use this proactively whenever you detect that .env, credential, or config files are missing locally — dorky manages these sensitive files in remote storage instead of git. If a task requires environment variables or secrets that are not present, pull first before proceeding.",
|
|
542
|
+
inputSchema: {
|
|
543
|
+
type: "object",
|
|
544
|
+
properties: {}
|
|
545
|
+
}
|
|
546
|
+
},
|
|
547
|
+
{
|
|
548
|
+
name: "log",
|
|
549
|
+
description: "Show the push history with commit IDs, timestamps, and file lists.",
|
|
550
|
+
inputSchema: {
|
|
551
|
+
type: "object",
|
|
552
|
+
properties: {}
|
|
553
|
+
}
|
|
554
|
+
},
|
|
555
|
+
{
|
|
556
|
+
name: "checkout",
|
|
557
|
+
description: "Restore files from a specific history commit. Use 'log' first to find commit IDs.",
|
|
558
|
+
inputSchema: {
|
|
559
|
+
type: "object",
|
|
560
|
+
properties: {
|
|
561
|
+
commitId: {
|
|
562
|
+
type: "string",
|
|
563
|
+
description: "The commit ID (or prefix) to restore. Find commit IDs with the 'log' tool."
|
|
564
|
+
}
|
|
565
|
+
},
|
|
566
|
+
required: ["commitId"]
|
|
567
|
+
}
|
|
568
|
+
},
|
|
569
|
+
{
|
|
570
|
+
name: "destroy",
|
|
571
|
+
description: "Destroy the dorky project locally and remove all files from remote storage. This action is irreversible.",
|
|
572
|
+
inputSchema: {
|
|
573
|
+
type: "object",
|
|
574
|
+
properties: {}
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
]
|
|
578
|
+
}));
|
|
579
|
+
|
|
580
|
+
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
581
|
+
const { name, arguments: args = {} } = request.params;
|
|
582
|
+
|
|
583
|
+
try {
|
|
584
|
+
let result;
|
|
585
|
+
switch (name) {
|
|
586
|
+
case "init":
|
|
587
|
+
result = await init(args.provider);
|
|
588
|
+
break;
|
|
589
|
+
case "list":
|
|
590
|
+
result = await list(args.remote ? "remote" : undefined);
|
|
591
|
+
break;
|
|
592
|
+
case "add":
|
|
593
|
+
result = add(args.files);
|
|
594
|
+
break;
|
|
595
|
+
case "remove":
|
|
596
|
+
result = rm(args.files);
|
|
597
|
+
break;
|
|
598
|
+
case "push":
|
|
599
|
+
result = await push();
|
|
600
|
+
break;
|
|
601
|
+
case "pull":
|
|
602
|
+
result = await pull();
|
|
603
|
+
break;
|
|
604
|
+
case "log":
|
|
605
|
+
result = log();
|
|
606
|
+
break;
|
|
607
|
+
case "checkout":
|
|
608
|
+
result = await checkout(args.commitId);
|
|
609
|
+
break;
|
|
610
|
+
case "destroy":
|
|
611
|
+
result = await destroy();
|
|
612
|
+
break;
|
|
613
|
+
default:
|
|
614
|
+
return { content: [{ type: "text", text: `Unknown tool: ${name}` }], isError: true };
|
|
615
|
+
}
|
|
616
|
+
|
|
617
|
+
return { content: [{ type: "text", text: result || "Done." }] };
|
|
618
|
+
} catch (err) {
|
|
619
|
+
return { content: [{ type: "text", text: `Error: ${err.message}` }], isError: true };
|
|
620
|
+
}
|
|
621
|
+
});
|
|
622
|
+
|
|
623
|
+
async function main() {
|
|
624
|
+
const transport = new StdioServerTransport();
|
|
625
|
+
await server.connect(transport);
|
|
626
|
+
}
|
|
627
|
+
|
|
628
|
+
main().catch(err => {
|
|
629
|
+
process.stderr.write(`Fatal error: ${err.message}\n`);
|
|
630
|
+
process.exit(1);
|
|
631
|
+
});
|
package/package.json
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "dorky",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "4.1.0",
|
|
4
4
|
"description": "DevOps Records Keeper.",
|
|
5
5
|
"bin": {
|
|
6
|
-
"dorky": "bin/index.js"
|
|
6
|
+
"dorky": "bin/index.js",
|
|
7
|
+
"dorky-mcp": "bin/mcp.js"
|
|
7
8
|
},
|
|
8
9
|
"scripts": {
|
|
9
10
|
"start": "node bin/index.js",
|
|
@@ -38,6 +39,7 @@
|
|
|
38
39
|
"vitest": "^2.1.8"
|
|
39
40
|
},
|
|
40
41
|
"dependencies": {
|
|
42
|
+
"@modelcontextprotocol/sdk": "^1.29.0",
|
|
41
43
|
"@aws-sdk/client-s3": "^3.679.0",
|
|
42
44
|
"@google-cloud/local-auth": "^3.0.1",
|
|
43
45
|
"chalk": "^4.1.2",
|