dorky 2.3.8 → 2.3.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -2
- package/bin/index.js +289 -479
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -381,11 +381,17 @@ ISC License - see [LICENSE](LICENSE) file for details.
|
|
|
381
381
|
## Roadmap
|
|
382
382
|
|
|
383
383
|
- [x] Update README with AWS IAM policy (bug fix release)
|
|
384
|
-
- [
|
|
385
|
-
- [
|
|
384
|
+
- [x] Handle invalid access token for Google Drive and AWS (edge cases)
|
|
385
|
+
- [x] rm + push should delete file from remote storage (minor release)
|
|
386
386
|
- [ ] Uninitialize dorky setup (Bug fix release)
|
|
387
|
+
- [ ] dorky --list remote --update should sync metadata according to remote (Minor release)
|
|
388
|
+
- [ ] Extension for VS Code to list and highlight them like git (Major release)
|
|
387
389
|
- [ ] MCP server (Minor release)
|
|
388
390
|
- [ ] Encryption of files (Minor release)
|
|
389
391
|
- [ ] Add stages for variables (Major release)
|
|
390
392
|
- [ ] Migrate dorky project to another storage (partially implemented)
|
|
393
|
+
- [ ] Add more test cases
|
|
394
|
+
- [ ] Deletion of files
|
|
395
|
+
- [ ] Edge cases for failure when credentials are invalid
|
|
396
|
+
- [ ] Add coverage reports badges
|
|
391
397
|
|
package/bin/index.js
CHANGED
|
@@ -1,23 +1,34 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
3
|
const yargs = require("yargs");
|
|
4
|
-
const { existsSync, mkdirSync, writeFileSync } = require("fs");
|
|
4
|
+
const { existsSync, mkdirSync, writeFileSync, readFileSync, createReadStream, unlinkSync } = require("fs");
|
|
5
5
|
const chalk = require("chalk");
|
|
6
6
|
const { glob } = require("glob");
|
|
7
7
|
const path = require("path");
|
|
8
|
-
const fs = require("fs");
|
|
9
8
|
const mimeTypes = require("mime-types");
|
|
10
9
|
const md5 = require('md5');
|
|
11
10
|
const EOL = require("os").type() == "Darwin" ? "\r\n" : "\n";
|
|
12
|
-
const { GetObjectCommand, PutObjectCommand, S3Client } = require("@aws-sdk/client-s3");
|
|
11
|
+
const { GetObjectCommand, PutObjectCommand, ListObjectsV2Command, DeleteObjectCommand, S3Client } = require("@aws-sdk/client-s3");
|
|
13
12
|
const { authenticate } = require('@google-cloud/local-auth');
|
|
14
13
|
const { google } = require('googleapis');
|
|
15
14
|
|
|
16
|
-
//
|
|
15
|
+
// Constants & Config
|
|
16
|
+
const DORKY_DIR = ".dorky";
|
|
17
|
+
const METADATA_PATH = path.join(DORKY_DIR, "metadata.json");
|
|
18
|
+
const CREDENTIALS_PATH = path.join(DORKY_DIR, "credentials.json");
|
|
19
|
+
const GD_CREDENTIALS_PATH = path.join(__dirname, "../google-drive-credentials.json");
|
|
17
20
|
const SCOPES = ['https://www.googleapis.com/auth/drive'];
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
+
|
|
22
|
+
// Helpers
|
|
23
|
+
const readJson = (p) => existsSync(p) ? JSON.parse(readFileSync(p)) : {};
|
|
24
|
+
const writeJson = (p, d) => writeFileSync(p, JSON.stringify(d, null, 2));
|
|
25
|
+
|
|
26
|
+
const checkDorkyProject = () => {
|
|
27
|
+
if (!existsSync(DORKY_DIR) && !existsSync(".dorkyignore")) {
|
|
28
|
+
console.log(chalk.red("✖ Not a dorky project. Please run ") + chalk.cyan("dorky --init [aws|google-drive]"));
|
|
29
|
+
process.exit(1);
|
|
30
|
+
}
|
|
31
|
+
};
|
|
21
32
|
|
|
22
33
|
const figlet = `
|
|
23
34
|
__ __ \t
|
|
@@ -26,533 +37,332 @@ const figlet = `
|
|
|
26
37
|
|_____|_____|__| |__|__|___ |\t
|
|
27
38
|
|_____|\t
|
|
28
39
|
`;
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
.option("add", { alias: "a", describe: "Add files to push or pull", type: "array", demandOption: false })
|
|
45
|
-
.option("rm", { alias: "r", describe: "Remove files from push or pull", type: "array", demandOption: false })
|
|
46
|
-
.option("push", { alias: "ph", describe: "Push files to storage", type: "string", demandOption: false })
|
|
47
|
-
.option("pull", { alias: "pl", describe: "Pull files from storage", type: "string", demandOption: false })
|
|
48
|
-
.option("migrate", { alias: "m", describe: "Migrate dorky project to another storage", type: "string", demandOption: false })
|
|
49
|
-
.example('$0 --init aws', 'Initialize a dorky project with AWS storage')
|
|
50
|
-
.example('$0 --init google-drive', 'Initialize a dorky project with Google Drive storage')
|
|
51
|
-
.example('$0 --list', 'List local files that can be added and already added files')
|
|
52
|
-
.example('$0 --list remote', 'List files in remote storage')
|
|
53
|
-
.example('$0 --add file1.txt file2.js', 'Add specific files to stage-1')
|
|
54
|
-
.example('$0 --rm file1.txt', 'Remove a file from stage-1')
|
|
55
|
-
.example('$0 --push', 'Push staged files to storage')
|
|
56
|
-
.example('$0 --pull', 'Pull files from storage')
|
|
57
|
-
.example('$0 --migrate aws', 'Migrate the project to AWS storage')
|
|
58
|
-
.help('help')
|
|
59
|
-
.strict()
|
|
60
|
-
.argv
|
|
61
|
-
|
|
62
|
-
if (Object.keys(args).length == 2) {
|
|
63
|
-
yargs.showHelp()
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
function checkIfDorkyProject() {
|
|
67
|
-
if (!existsSync(".dorky") && !existsSync(".dorkyignore")) {
|
|
68
|
-
console.log(chalk.red("This is not a dorky project. Please run `dorky --init [aws|google-drive]` to initialize a dorky project."));
|
|
69
|
-
process.exit(1);
|
|
70
|
-
}
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
function setupFilesAndFolders(metaData, credentials) {
|
|
74
|
-
console.log("Initializing dorky project");
|
|
75
|
-
if (existsSync(".dorky")) {
|
|
76
|
-
console.log("Dorky is already initialised in this project.");
|
|
77
|
-
} else {
|
|
78
|
-
mkdirSync(".dorky");
|
|
79
|
-
console.log(chalk.bgGreen("Created .dorky folder."));
|
|
80
|
-
writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
|
|
81
|
-
console.log(chalk.bgGreen("Created .dorky/metadata.json file."));
|
|
82
|
-
writeFileSync(".dorkyignore", "");
|
|
83
|
-
console.log(chalk.bgGreen("Created .dorkyignore file."));
|
|
84
|
-
writeFileSync(".dorky/credentials.json", JSON.stringify(credentials, null, 2));
|
|
85
|
-
console.log(chalk.bgGreen("Created .dorky/credentials.json file."));
|
|
86
|
-
}
|
|
87
|
-
}
|
|
40
|
+
let randomColor = `#${Math.floor(Math.random() * 16777215).toString(16)}`;
|
|
41
|
+
while (randomColor[2] === "f" || randomColor[3] === "f") randomColor = `#${Math.floor(Math.random() * 16777215).toString(16)}`;
|
|
42
|
+
console.log(chalk.bgHex(randomColor)(figlet));
|
|
43
|
+
|
|
44
|
+
const args = yargs
|
|
45
|
+
.option("init", { alias: "i", describe: "Initialize dorky", type: "string" })
|
|
46
|
+
.option("list", { alias: "l", describe: "List files", type: "string" })
|
|
47
|
+
.option("add", { alias: "a", describe: "Add files", type: "array" })
|
|
48
|
+
.option("rm", { alias: "r", describe: "Remove files", type: "array" })
|
|
49
|
+
.option("push", { alias: "ph", describe: "Push files", type: "string" })
|
|
50
|
+
.option("pull", { alias: "pl", describe: "Pull files", type: "string" })
|
|
51
|
+
.option("migrate", { alias: "m", describe: "Migrate project", type: "string" })
|
|
52
|
+
.help('help').strict().argv;
|
|
53
|
+
|
|
54
|
+
if (Object.keys(args).length === 2 && args._.length === 0) yargs.showHelp();
|
|
88
55
|
|
|
89
56
|
function updateGitIgnore() {
|
|
90
|
-
let
|
|
91
|
-
if (
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
const dorkyIgnoreEntry = ".dorky/credentials.json";
|
|
95
|
-
if (!gitignoreContent.includes(dorkyIgnoreEntry)) {
|
|
96
|
-
gitignoreContent += EOL + dorkyIgnoreEntry + EOL;
|
|
97
|
-
fs.writeFileSync(".gitignore", gitignoreContent);
|
|
98
|
-
console.log(`${chalk.bgGreen("Updated .gitignore to ignore .dorky/credentials.json.")} ${chalk.red("⚠️ This is done to protect your credentials.")}`);
|
|
57
|
+
let content = existsSync(".gitignore") ? readFileSync(".gitignore").toString() : "";
|
|
58
|
+
if (!content.includes(CREDENTIALS_PATH)) {
|
|
59
|
+
writeFileSync(".gitignore", content + EOL + CREDENTIALS_PATH + EOL);
|
|
60
|
+
console.log(chalk.cyan("ℹ Updated .gitignore to secure credentials."));
|
|
99
61
|
}
|
|
100
62
|
}
|
|
101
63
|
|
|
102
64
|
async function authorizeGoogleDriveClient(forceReauth = false) {
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
const
|
|
107
|
-
|
|
108
|
-
if (!savedCredentials.access_token && !savedCredentials.refresh_token) {
|
|
109
|
-
return null;
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
const keys = JSON.parse(fs.readFileSync(CREDENTIALS_PATH));
|
|
65
|
+
if (!forceReauth && existsSync(CREDENTIALS_PATH)) {
|
|
66
|
+
const saved = readJson(CREDENTIALS_PATH);
|
|
67
|
+
if (saved.storage === 'google-drive' && saved.expiry_date) {
|
|
68
|
+
const keys = readJson(GD_CREDENTIALS_PATH);
|
|
113
69
|
const key = keys.installed || keys.web;
|
|
114
|
-
const
|
|
115
|
-
|
|
116
|
-
key.client_secret,
|
|
117
|
-
key.redirect_uris[0]
|
|
118
|
-
);
|
|
119
|
-
|
|
120
|
-
const { storage, ...authCredentials } = savedCredentials;
|
|
121
|
-
oAuth2Client.setCredentials(authCredentials);
|
|
122
|
-
|
|
123
|
-
return oAuth2Client;
|
|
124
|
-
} catch (err) {
|
|
125
|
-
return null;
|
|
126
|
-
}
|
|
127
|
-
}
|
|
70
|
+
const client = new google.auth.OAuth2(key.client_id, key.client_secret, key.redirect_uris[0]);
|
|
71
|
+
client.setCredentials(saved);
|
|
128
72
|
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
return currentTimeUTC >= (expiryTimeUTC - expiryBuffer);
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
async function refreshAndSaveToken(client) {
|
|
141
|
-
try {
|
|
142
|
-
await client.getAccessToken();
|
|
143
|
-
const newCredentials = client.credentials;
|
|
144
|
-
const credentialsToSave = {
|
|
145
|
-
storage: "google-drive",
|
|
146
|
-
...newCredentials
|
|
147
|
-
};
|
|
148
|
-
fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
|
|
149
|
-
return client;
|
|
150
|
-
} catch (err) {
|
|
151
|
-
return null;
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
if (!forceReauth) {
|
|
156
|
-
let client = await loadSavedCredentialsIfExist();
|
|
157
|
-
if (client) {
|
|
158
|
-
const credentials = JSON.parse(fs.readFileSync(TOKEN_PATH));
|
|
159
|
-
|
|
160
|
-
const clientCredentials = client.credentials || credentials;
|
|
161
|
-
|
|
162
|
-
if (await isTokenExpired(clientCredentials)) {
|
|
163
|
-
client = await refreshAndSaveToken(client);
|
|
164
|
-
if (client) {
|
|
165
|
-
return client;
|
|
73
|
+
if (Date.now() >= saved.expiry_date - 300000) {
|
|
74
|
+
try {
|
|
75
|
+
const { credentials } = await client.refreshAccessToken();
|
|
76
|
+
writeJson(CREDENTIALS_PATH, { storage: "google-drive", ...credentials });
|
|
77
|
+
client.setCredentials(credentials);
|
|
78
|
+
} catch (e) {
|
|
79
|
+
console.log(chalk.yellow("Token refresh failed. Re-authenticating..."));
|
|
80
|
+
return authorizeGoogleDriveClient(true);
|
|
166
81
|
}
|
|
167
|
-
} else {
|
|
168
|
-
return client;
|
|
169
82
|
}
|
|
83
|
+
return client;
|
|
170
84
|
}
|
|
171
85
|
}
|
|
172
86
|
|
|
173
|
-
client = await authenticate({
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
});
|
|
177
|
-
|
|
178
|
-
if (client && client.credentials && existsSync(path.dirname(TOKEN_PATH))) {
|
|
179
|
-
const credentialsToSave = {
|
|
180
|
-
storage: "google-drive",
|
|
181
|
-
...client.credentials
|
|
182
|
-
};
|
|
183
|
-
fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
|
|
87
|
+
const client = await authenticate({ scopes: SCOPES, keyfilePath: GD_CREDENTIALS_PATH });
|
|
88
|
+
if (client?.credentials && existsSync(path.dirname(CREDENTIALS_PATH))) {
|
|
89
|
+
writeJson(CREDENTIALS_PATH, { storage: "google-drive", ...client.credentials });
|
|
184
90
|
}
|
|
185
|
-
|
|
186
91
|
return client;
|
|
187
92
|
}
|
|
188
93
|
|
|
189
94
|
async function init(storage) {
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
console.log("Please provide a valid storage option <aws|google-drive>");
|
|
204
|
-
break;
|
|
95
|
+
if (existsSync(DORKY_DIR)) return console.log(chalk.yellow("⚠ Dorky is already initialized."));
|
|
96
|
+
if (!["aws", "google-drive"].includes(storage)) return console.log(chalk.red("✖ Invalid storage. Use 'aws' or 'google-drive'."));
|
|
97
|
+
|
|
98
|
+
let credentials = {};
|
|
99
|
+
if (storage === "aws") {
|
|
100
|
+
if (!process.env.AWS_ACCESS_KEY || !process.env.AWS_SECRET_KEY || !process.env.AWS_REGION || !process.env.BUCKET_NAME) {
|
|
101
|
+
console.log(chalk.red("✖ Missing AWS environment variables."));
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
credentials = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME };
|
|
105
|
+
} else {
|
|
106
|
+
const client = await authorizeGoogleDriveClient(true);
|
|
107
|
+
credentials = { storage: "google-drive", ...client.credentials };
|
|
205
108
|
}
|
|
109
|
+
|
|
110
|
+
mkdirSync(DORKY_DIR);
|
|
111
|
+
writeJson(METADATA_PATH, { "stage-1-files": {}, "uploaded-files": {} });
|
|
112
|
+
writeFileSync(".dorkyignore", "");
|
|
113
|
+
writeJson(CREDENTIALS_PATH, credentials);
|
|
114
|
+
console.log(chalk.green("✔ Dorky project initialized successfully."));
|
|
206
115
|
updateGitIgnore();
|
|
207
116
|
}
|
|
208
117
|
|
|
209
118
|
async function list(type) {
|
|
210
|
-
|
|
211
|
-
const
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
console.log(chalk.red("Listing files that can be added:"));
|
|
224
|
-
var exclusions = fs.readFileSync(".dorkyignore").toString().split(EOL);
|
|
225
|
-
exclusions = exclusions.filter((exclusion) => exclusion !== "");
|
|
226
|
-
const src = process.cwd();
|
|
227
|
-
const files = await glob(path.join(src, "**/*"), { dot: true });
|
|
228
|
-
const filteredFiles = files.filter((file) => {
|
|
229
|
-
for (let i = 0; i < exclusions.length; i++) {
|
|
230
|
-
if (file.includes(exclusions[i])) return false;
|
|
231
|
-
}
|
|
232
|
-
if (file.includes(".dorky/")) return false;
|
|
233
|
-
if (file.endsWith(".dorky") && fs.lstatSync(file).isDirectory()) return false;
|
|
234
|
-
if (file.endsWith(".dorkyignore")) return false;
|
|
235
|
-
return true;
|
|
119
|
+
checkDorkyProject();
|
|
120
|
+
const meta = readJson(METADATA_PATH);
|
|
121
|
+
if (type === "remote") {
|
|
122
|
+
if (!await checkCredentials()) return;
|
|
123
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
124
|
+
const root = path.basename(process.cwd());
|
|
125
|
+
console.log(chalk.blue.bold("\n☁ Remote Files:"));
|
|
126
|
+
|
|
127
|
+
if (creds.storage === "aws") {
|
|
128
|
+
await runS3(creds, async (s3, bucket) => {
|
|
129
|
+
const data = await s3.send(new ListObjectsV2Command({ Bucket: bucket, Prefix: root + "/" }));
|
|
130
|
+
if (!data.Contents?.length) return console.log(chalk.yellow("ℹ No remote files found."));
|
|
131
|
+
data.Contents.forEach(o => console.log(chalk.cyan(` ${o.Key.replace(root + "/", "")}`)));
|
|
236
132
|
});
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
133
|
+
} else {
|
|
134
|
+
await runDrive(async (drive) => {
|
|
135
|
+
const q = `name='${root}' and mimeType='application/vnd.google-apps.folder' and 'root' in parents and trashed=false`;
|
|
136
|
+
const { data: { files: [folder] } } = await drive.files.list({ q, fields: 'files(id)' });
|
|
137
|
+
if (!folder) return console.log(chalk.yellow("ℹ Remote folder not found."));
|
|
138
|
+
const walk = async (pid, p = '') => {
|
|
139
|
+
const { data: { files } } = await drive.files.list({ q: `'${pid}' in parents and trashed=false`, fields: 'files(id, name, mimeType)' });
|
|
140
|
+
for (const f of files) {
|
|
141
|
+
if (f.mimeType === 'application/vnd.google-apps.folder') await walk(f.id, path.join(p, f.name));
|
|
142
|
+
else console.log(chalk.cyan(` ${path.join(p, f.name)}`));
|
|
143
|
+
}
|
|
144
|
+
};
|
|
145
|
+
await walk(folder.id);
|
|
244
146
|
});
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
147
|
+
}
|
|
148
|
+
} else {
|
|
149
|
+
console.log(chalk.blue.bold("\n📂 Untracked Files:"));
|
|
150
|
+
const exclusions = existsSync(".dorkyignore") ? readFileSync(".dorkyignore").toString().split(EOL).filter(Boolean) : [];
|
|
151
|
+
const files = await glob("**/*", { dot: true, ignore: [...exclusions.map(e => `**/${e}/**`), ...exclusions, ".dorky/**", ".dorkyignore", ".git/**", "node_modules/**"] });
|
|
152
|
+
|
|
153
|
+
files.forEach(f => {
|
|
154
|
+
const rel = path.relative(process.cwd(), f);
|
|
155
|
+
if (rel.includes('.env') || rel.includes('.config')) console.log(chalk.yellow(` ⚠ ${rel} (Potential sensitive file)`));
|
|
156
|
+
else console.log(chalk.gray(` ${rel}`));
|
|
157
|
+
});
|
|
158
|
+
console.log(chalk.blue.bold("\n📦 Staged Files:"));
|
|
159
|
+
Object.keys(meta["stage-1-files"]).forEach(f => console.log(chalk.green(` ✔ ${f}`)));
|
|
249
160
|
}
|
|
250
161
|
}
|
|
251
162
|
|
|
252
|
-
function add(
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
const
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
const fileContents = fs.readFileSync(file);
|
|
263
|
-
const fileType = mimeTypes.lookup(file);
|
|
264
|
-
const newHash = md5(fileContents);
|
|
265
|
-
const existingEntry = metaData["stage-1-files"][file];
|
|
266
|
-
if (existingEntry && existingEntry.hash === newHash) {
|
|
267
|
-
console.log(chalk.yellow(`File ${file} has no changes, skipping.`));
|
|
268
|
-
return;
|
|
269
|
-
}
|
|
270
|
-
metaData["stage-1-files"][file] = {
|
|
271
|
-
"mime-type": fileType ? fileType : "application/octet-stream",
|
|
272
|
-
"hash": newHash
|
|
273
|
-
};
|
|
274
|
-
addedFiles.push(file);
|
|
163
|
+
function add(files) {
|
|
164
|
+
checkDorkyProject();
|
|
165
|
+
const meta = readJson(METADATA_PATH);
|
|
166
|
+
const added = [];
|
|
167
|
+
files.forEach(f => {
|
|
168
|
+
if (!existsSync(f)) return console.log(chalk.red(`✖ File not found: ${f}`));
|
|
169
|
+
const hash = md5(readFileSync(f));
|
|
170
|
+
if (meta["stage-1-files"][f]?.hash === hash) return console.log(chalk.gray(`• ${f} (unchanged)`));
|
|
171
|
+
meta["stage-1-files"][f] = { "mime-type": mimeTypes.lookup(f) || "application/octet-stream", hash };
|
|
172
|
+
added.push(f);
|
|
275
173
|
});
|
|
276
|
-
|
|
277
|
-
|
|
174
|
+
writeJson(METADATA_PATH, meta);
|
|
175
|
+
added.forEach(f => console.log(chalk.green(`✔ Staged: ${f}`)));
|
|
278
176
|
}
|
|
279
177
|
|
|
280
|
-
function rm(
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
const
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
delete metaData["stage-1-files"][file];
|
|
178
|
+
function rm(files) {
|
|
179
|
+
checkDorkyProject();
|
|
180
|
+
const meta = readJson(METADATA_PATH);
|
|
181
|
+
const removed = files.filter(f => {
|
|
182
|
+
if (!meta["stage-1-files"][f]) return false;
|
|
183
|
+
delete meta["stage-1-files"][f];
|
|
287
184
|
return true;
|
|
288
185
|
});
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
else console.log(chalk.red("No files found that can be removed."));
|
|
186
|
+
writeJson(METADATA_PATH, meta);
|
|
187
|
+
removed.length ? removed.forEach(f => console.log(chalk.yellow(`✔ Unstaged: ${f}`))) : console.log(chalk.gray("ℹ No matching files to remove."));
|
|
292
188
|
}
|
|
293
189
|
|
|
294
190
|
async function checkCredentials() {
|
|
191
|
+
if (existsSync(CREDENTIALS_PATH)) return true;
|
|
192
|
+
if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY) {
|
|
193
|
+
writeJson(CREDENTIALS_PATH, {
|
|
194
|
+
storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY,
|
|
195
|
+
awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME
|
|
196
|
+
});
|
|
197
|
+
return true;
|
|
198
|
+
}
|
|
295
199
|
try {
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
} else {
|
|
200
|
+
const client = await authorizeGoogleDriveClient(true);
|
|
201
|
+
if (client) return true;
|
|
202
|
+
} catch { }
|
|
203
|
+
console.log(chalk.red("✖ Credentials not found. Please run --init."));
|
|
204
|
+
return false;
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
const getS3 = (c) => new S3Client({
|
|
208
|
+
credentials: { accessKeyId: c.accessKey || process.env.AWS_ACCESS_KEY, secretAccessKey: c.secretKey || process.env.AWS_SECRET_KEY },
|
|
209
|
+
region: c.awsRegion || process.env.AWS_REGION
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
async function runS3(creds, fn) {
|
|
213
|
+
try { await fn(getS3(creds), creds.bucket || process.env.BUCKET_NAME); }
|
|
214
|
+
catch (err) {
|
|
215
|
+
if (["InvalidAccessKeyId", "SignatureDoesNotMatch"].includes(err.name) || err.$metadata?.httpStatusCode === 403) {
|
|
216
|
+
if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY) {
|
|
217
|
+
console.log(chalk.yellow("AWS auth failed. Retrying with env vars..."));
|
|
218
|
+
const newCreds = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME };
|
|
219
|
+
writeJson(CREDENTIALS_PATH, newCreds);
|
|
317
220
|
try {
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
console.log(chalk.green("Credentials saved in .dorky/credentials.json"));
|
|
323
|
-
console.log(chalk.red("Please ignore the warning to set credentials below and run the command again."));
|
|
324
|
-
return false;
|
|
325
|
-
} catch (err) {
|
|
326
|
-
console.log(chalk.red("Failed to authorize Google Drive client: " + err.message));
|
|
327
|
-
console.log(chalk.red("Please provide credentials in .dorky/credentials.json"));
|
|
328
|
-
return false;
|
|
221
|
+
await fn(getS3(newCreds), newCreds.bucket);
|
|
222
|
+
return;
|
|
223
|
+
} catch (e) {
|
|
224
|
+
console.log(chalk.red("Retried with env vars but failed."));
|
|
329
225
|
}
|
|
330
226
|
}
|
|
227
|
+
console.log(chalk.red("AWS authentication failed."));
|
|
228
|
+
console.log(chalk.yellow("Please set correct AWS_ACCESS_KEY, AWS_SECRET_KEY, AWS_REGION and BUCKET_NAME in environment or .dorky/credentials.json"));
|
|
229
|
+
process.exit(1);
|
|
331
230
|
}
|
|
332
|
-
|
|
333
|
-
console.log(chalk.red("Please provide credentials in .dorky/credentials.json"));
|
|
334
|
-
return false;
|
|
231
|
+
throw err;
|
|
335
232
|
}
|
|
336
233
|
}
|
|
337
234
|
|
|
338
|
-
async function
|
|
339
|
-
|
|
340
|
-
if (!
|
|
341
|
-
|
|
342
|
-
|
|
235
|
+
async function getFolderId(pathStr, drive, create = true) {
|
|
236
|
+
let parentId = 'root';
|
|
237
|
+
if (!pathStr || pathStr === '.') return parentId;
|
|
238
|
+
for (const folder of pathStr.split("/")) {
|
|
239
|
+
if (!folder) continue;
|
|
240
|
+
const res = await drive.files.list({ q: `name='${folder}' and mimeType='application/vnd.google-apps.folder' and '${parentId}' in parents and trashed=false`, fields: 'files(id)' });
|
|
241
|
+
if (res.data.files[0]) parentId = res.data.files[0].id;
|
|
242
|
+
else if (create) parentId = (await drive.files.create({ requestBody: { name: folder, mimeType: 'application/vnd.google-apps.folder', parents: [parentId] }, fields: 'id' })).data.id;
|
|
243
|
+
else return null;
|
|
343
244
|
}
|
|
344
|
-
|
|
345
|
-
const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
|
|
346
|
-
const stage1Files = metaData["stage-1-files"];
|
|
347
|
-
const pushedFiles = metaData["uploaded-files"];
|
|
348
|
-
var filesToPush = [];
|
|
349
|
-
Object.keys(stage1Files).map((file) => {
|
|
350
|
-
if (pushedFiles[file]) {
|
|
351
|
-
if (stage1Files[file]["hash"] != pushedFiles[file]["hash"]) filesToPush.push(file);
|
|
352
|
-
} else filesToPush.push(file);
|
|
353
|
-
});
|
|
354
|
-
filesToPush = filesToPush.map((file) => {
|
|
355
|
-
return {
|
|
356
|
-
"name": file,
|
|
357
|
-
"mime-type": stage1Files[file]["mime-type"],
|
|
358
|
-
"hash": stage1Files[file]["hash"]
|
|
359
|
-
}
|
|
360
|
-
});
|
|
361
|
-
const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
|
|
362
|
-
switch (credentials.storage) {
|
|
363
|
-
case "aws":
|
|
364
|
-
pushToS3(filesToPush, credentials);
|
|
365
|
-
break;
|
|
366
|
-
case "google-drive":
|
|
367
|
-
pushToGoogleDrive(filesToPush);
|
|
368
|
-
break;
|
|
369
|
-
default:
|
|
370
|
-
console.log("Please provide a valid storage option <aws|google-drive>");
|
|
371
|
-
break;
|
|
372
|
-
}
|
|
373
|
-
metaData["uploaded-files"] = metaData["stage-1-files"];
|
|
374
|
-
fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
|
|
375
|
-
console.log(chalk.green("Pushed the following files to storage:"));
|
|
376
|
-
}
|
|
377
|
-
|
|
378
|
-
function pushToS3(files, credentials) {
|
|
379
|
-
const s3 = new S3Client({
|
|
380
|
-
credentials: {
|
|
381
|
-
accessKeyId: credentials.accessKey ?? process.env.AWS_ACCESS_KEY,
|
|
382
|
-
secretAccessKey: credentials.secretKey ?? process.env.AWS_SECRET_KEY
|
|
383
|
-
},
|
|
384
|
-
region: credentials.awsRegion ?? process.env.AWS_REGION
|
|
385
|
-
});
|
|
386
|
-
const bucketName = credentials.bucket ?? process.env.BUCKET_NAME;
|
|
387
|
-
Promise.all(files.map(async (file) => {
|
|
388
|
-
const rootFolder = path.basename(process.cwd());
|
|
389
|
-
const pathToFile = path.join(rootFolder, file.name);
|
|
390
|
-
await s3.send(
|
|
391
|
-
new PutObjectCommand({
|
|
392
|
-
Bucket: bucketName,
|
|
393
|
-
Key: pathToFile,
|
|
394
|
-
Body: fs.readFileSync(file.name).toString(),
|
|
395
|
-
})
|
|
396
|
-
);
|
|
397
|
-
console.log(chalk.green(`Pushed ${pathToFile} to storage.`));
|
|
398
|
-
}));
|
|
245
|
+
return parentId;
|
|
399
246
|
}
|
|
400
247
|
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
parentId = res.data.files[0].id;
|
|
414
|
-
} else {
|
|
415
|
-
const folderMetadata = {
|
|
416
|
-
name: folder,
|
|
417
|
-
mimeType: 'application/vnd.google-apps.folder',
|
|
418
|
-
parents: [parentId],
|
|
419
|
-
};
|
|
420
|
-
const folderRes = await drive.files.create({
|
|
421
|
-
requestBody: folderMetadata,
|
|
422
|
-
fields: 'id',
|
|
423
|
-
});
|
|
424
|
-
parentId = folderRes.data.id;
|
|
425
|
-
}
|
|
426
|
-
}
|
|
427
|
-
return parentId;
|
|
428
|
-
}
|
|
429
|
-
console.log("Uploading to google drive");
|
|
430
|
-
const client = await authorizeGoogleDriveClient(false);
|
|
431
|
-
|
|
432
|
-
const credentialsToSave = {
|
|
433
|
-
storage: "google-drive",
|
|
434
|
-
...client.credentials
|
|
435
|
-
};
|
|
436
|
-
fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
|
|
437
|
-
|
|
438
|
-
const drive = google.drive({ version: 'v3', auth: client });
|
|
439
|
-
for (const file of files) {
|
|
440
|
-
const rootFolder = path.basename(process.cwd());
|
|
441
|
-
const pathToFile = path.join(rootFolder, file.name);
|
|
442
|
-
const requestBody = {
|
|
443
|
-
name: path.basename(file.name),
|
|
444
|
-
parents: [await getOrCreateFolderId(pathToFile.split("/").slice(0, -1).join("/"), drive)],
|
|
445
|
-
fields: 'id',
|
|
446
|
-
};
|
|
447
|
-
const media = {
|
|
448
|
-
mimeType: file["mime-type"],
|
|
449
|
-
body: fs.createReadStream(path.join(process.cwd(), file.name)),
|
|
450
|
-
};
|
|
451
|
-
try {
|
|
452
|
-
await drive.files.create({
|
|
453
|
-
requestBody,
|
|
454
|
-
media: media,
|
|
455
|
-
});
|
|
456
|
-
console.log(chalk.green(`Pushed ${file.name} to storage.`));
|
|
457
|
-
} catch (err) {
|
|
458
|
-
console.log(err);
|
|
459
|
-
throw err;
|
|
460
|
-
}
|
|
248
|
+
async function runDrive(fn) {
|
|
249
|
+
let client = await authorizeGoogleDriveClient();
|
|
250
|
+
let drive = google.drive({ version: 'v3', auth: client });
|
|
251
|
+
try { await fn(drive); }
|
|
252
|
+
catch (err) {
|
|
253
|
+
if (err.code === 401 || err.message?.includes('invalid_grant')) {
|
|
254
|
+
console.log(chalk.yellow("Drive auth failed. Re-authenticating..."));
|
|
255
|
+
if (existsSync(CREDENTIALS_PATH)) unlinkSync(CREDENTIALS_PATH);
|
|
256
|
+
client = await authorizeGoogleDriveClient(true);
|
|
257
|
+
drive = google.drive({ version: 'v3', auth: client });
|
|
258
|
+
await fn(drive);
|
|
259
|
+
} else throw err;
|
|
461
260
|
}
|
|
462
261
|
}
|
|
463
262
|
|
|
464
|
-
async function
|
|
465
|
-
|
|
466
|
-
if (!
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
const
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
263
|
+
async function push() {
|
|
264
|
+
checkDorkyProject();
|
|
265
|
+
if (!await checkCredentials()) return;
|
|
266
|
+
const meta = readJson(METADATA_PATH);
|
|
267
|
+
const filesToUpload = Object.keys(meta["stage-1-files"])
|
|
268
|
+
.filter(f => !meta["uploaded-files"][f] || meta["stage-1-files"][f].hash !== meta["uploaded-files"][f].hash)
|
|
269
|
+
.map(f => ({ name: f, ...meta["stage-1-files"][f] }));
|
|
270
|
+
|
|
271
|
+
const filesToDelete = Object.keys(meta["uploaded-files"])
|
|
272
|
+
.filter(f => !meta["stage-1-files"][f]);
|
|
273
|
+
|
|
274
|
+
if (filesToUpload.length === 0 && filesToDelete.length === 0) return console.log(chalk.yellow("ℹ Nothing to push."));
|
|
275
|
+
|
|
276
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
277
|
+
if (creds.storage === "aws") {
|
|
278
|
+
await runS3(creds, async (s3, bucket) => {
|
|
279
|
+
if (filesToUpload.length > 0) {
|
|
280
|
+
await Promise.all(filesToUpload.map(async f => {
|
|
281
|
+
const key = path.join(path.basename(process.cwd()), f.name);
|
|
282
|
+
await s3.send(new PutObjectCommand({ Bucket: bucket, Key: key, Body: readFileSync(f.name) }));
|
|
283
|
+
console.log(chalk.green(`✔ Uploaded: ${f.name}`));
|
|
284
|
+
}));
|
|
285
|
+
}
|
|
286
|
+
if (filesToDelete.length > 0) {
|
|
287
|
+
await Promise.all(filesToDelete.map(async f => {
|
|
288
|
+
const key = path.join(path.basename(process.cwd()), f);
|
|
289
|
+
await s3.send(new DeleteObjectCommand({ Bucket: bucket, Key: key }));
|
|
290
|
+
console.log(chalk.yellow(`✔ Deleted remote: ${f}`));
|
|
291
|
+
}));
|
|
292
|
+
}
|
|
293
|
+
});
|
|
294
|
+
} else if (creds.storage === "google-drive") {
|
|
295
|
+
await runDrive(async (drive) => {
|
|
296
|
+
if (filesToUpload.length > 0) {
|
|
297
|
+
for (const f of filesToUpload) {
|
|
298
|
+
const root = path.basename(process.cwd());
|
|
299
|
+
const parentId = await getFolderId(path.dirname(path.join(root, f.name)), drive);
|
|
300
|
+
await drive.files.create({
|
|
301
|
+
requestBody: { name: path.basename(f.name), parents: [parentId] },
|
|
302
|
+
media: { mimeType: f["mime-type"], body: createReadStream(f.name) }
|
|
303
|
+
});
|
|
304
|
+
console.log(chalk.green(`✔ Uploaded: ${f.name}`));
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
if (filesToDelete.length > 0) {
|
|
308
|
+
const root = path.basename(process.cwd());
|
|
309
|
+
for (const f of filesToDelete) {
|
|
310
|
+
const parentId = await getFolderId(path.dirname(path.join(root, f)), drive, false);
|
|
311
|
+
if (parentId) {
|
|
312
|
+
const res = await drive.files.list({
|
|
313
|
+
q: `name='${path.basename(f)}' and '${parentId}' in parents and trashed=false`,
|
|
314
|
+
fields: 'files(id)'
|
|
315
|
+
});
|
|
316
|
+
if (res.data.files[0]) {
|
|
317
|
+
await drive.files.delete({ fileId: res.data.files[0].id });
|
|
318
|
+
console.log(chalk.yellow(`✔ Deleted remote: ${f}`));
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
});
|
|
484
324
|
}
|
|
485
|
-
}
|
|
486
325
|
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
credentials: {
|
|
490
|
-
accessKeyId: credentials.accessKey ?? process.env.AWS_ACCESS_KEY,
|
|
491
|
-
secretAccessKey: credentials.secretKey ?? process.env.AWS_SECRET_KEY
|
|
492
|
-
},
|
|
493
|
-
region: credentials.awsRegion ?? process.env.AWS_REGION
|
|
494
|
-
});
|
|
495
|
-
const bucketName = credentials.bucket ?? process.env.BUCKET_NAME;
|
|
496
|
-
Promise.all(Object.keys(files).map(async (file) => {
|
|
497
|
-
const rootFolder = path.basename(process.cwd());
|
|
498
|
-
const pathToFile = path.join(rootFolder, file);
|
|
499
|
-
const { Body } = await s3.send(
|
|
500
|
-
new GetObjectCommand({
|
|
501
|
-
Bucket: bucketName,
|
|
502
|
-
Key: pathToFile,
|
|
503
|
-
})
|
|
504
|
-
);
|
|
505
|
-
const dir = path.dirname(file);
|
|
506
|
-
if (!fs.existsSync(dir)) {
|
|
507
|
-
fs.mkdirSync(dir, { recursive: true });
|
|
508
|
-
}
|
|
509
|
-
fs.writeFileSync(file, await Body.transformToString());
|
|
510
|
-
console.log(chalk.green(`Pulled ${file} from storage.`));
|
|
511
|
-
}));
|
|
326
|
+
meta["uploaded-files"] = { ...meta["stage-1-files"] };
|
|
327
|
+
writeJson(METADATA_PATH, meta);
|
|
512
328
|
}
|
|
513
329
|
|
|
514
|
-
async function
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
console.log(chalk.red(
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
fs.mkdirSync(dir, { recursive: true });
|
|
544
|
-
}
|
|
545
|
-
fs.writeFileSync(file.name, await _file.data.text(), "utf-8");
|
|
546
|
-
console.log(chalk.green(`Pulled ${file.name} from storage.`));
|
|
330
|
+
async function pull() {
|
|
331
|
+
checkDorkyProject();
|
|
332
|
+
if (!await checkCredentials()) return;
|
|
333
|
+
const meta = readJson(METADATA_PATH);
|
|
334
|
+
const files = meta["uploaded-files"];
|
|
335
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
336
|
+
|
|
337
|
+
if (creds.storage === "aws") {
|
|
338
|
+
await runS3(creds, async (s3, bucket) => {
|
|
339
|
+
await Promise.all(Object.keys(files).map(async f => {
|
|
340
|
+
const key = path.join(path.basename(process.cwd()), f);
|
|
341
|
+
const { Body } = await s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
|
|
342
|
+
const dir = path.dirname(f);
|
|
343
|
+
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
344
|
+
writeFileSync(f, await Body.transformToString());
|
|
345
|
+
console.log(chalk.green(`✔ Downloaded: ${f}`));
|
|
346
|
+
}));
|
|
347
|
+
});
|
|
348
|
+
} else if (creds.storage === "google-drive") {
|
|
349
|
+
await runDrive(async (drive) => {
|
|
350
|
+
const fileList = Object.keys(files).map(k => ({ name: k, ...files[k] }));
|
|
351
|
+
await Promise.all(fileList.map(async f => {
|
|
352
|
+
const res = await drive.files.list({ q: `name='${path.basename(f.name)}' and mimeType!='application/vnd.google-apps.folder'`, fields: 'files(id)' });
|
|
353
|
+
if (!res.data.files[0]) return console.log(chalk.red(`✖ Missing remote file: ${f.name}`));
|
|
354
|
+
const data = await drive.files.get({ fileId: res.data.files[0].id, alt: 'media' });
|
|
355
|
+
if (!existsSync(path.dirname(f.name))) mkdirSync(path.dirname(f.name), { recursive: true });
|
|
356
|
+
writeFileSync(f.name, await data.data.text());
|
|
357
|
+
console.log(chalk.green(`✔ Downloaded: ${f.name}`));
|
|
358
|
+
}));
|
|
547
359
|
});
|
|
548
|
-
} catch (err) {
|
|
549
|
-
throw err;
|
|
550
360
|
}
|
|
551
361
|
}
|
|
552
362
|
|
|
553
|
-
if (
|
|
554
|
-
if (
|
|
555
|
-
if (
|
|
556
|
-
if (
|
|
557
|
-
if (
|
|
558
|
-
if (
|
|
363
|
+
if (args.init !== undefined) init(args.init);
|
|
364
|
+
if (args.list !== undefined) list(args.list);
|
|
365
|
+
if (args.add !== undefined) add(args.add);
|
|
366
|
+
if (args.rm !== undefined) rm(args.rm);
|
|
367
|
+
if (args.push !== undefined) push();
|
|
368
|
+
if (args.pull !== undefined) pull();
|