dorky 2.3.8 → 2.3.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +4 -2
  2. package/bin/index.js +256 -477
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -381,9 +381,11 @@ ISC License - see [LICENSE](LICENSE) file for details.
381
381
  ## Roadmap
382
382
 
383
383
  - [x] Update README with AWS IAM policy (bug fix release)
384
- - [ ] Handle invalid access token for Google Drive (edge cases)
385
- - [ ] Extension for VS Code to list and highlight them like git (Major release)
384
+ - [*] Handle invalid access token for Google Drive and AWS (edge cases)
385
+ - [ ] rm + push should delete file from storage (minor release)
386
386
  - [ ] Uninitialize dorky setup (Bug fix release)
387
+ - [ ] dorky --list remote --update should sync metadata according to remote (Minor release)
388
+ - [ ] Extension for VS Code to list and highlight them like git (Major release)
387
389
  - [ ] MCP server (Minor release)
388
390
  - [ ] Encryption of files (Minor release)
389
391
  - [ ] Add stages for variables (Major release)
package/bin/index.js CHANGED
@@ -1,23 +1,34 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  const yargs = require("yargs");
4
- const { existsSync, mkdirSync, writeFileSync } = require("fs");
4
+ const { existsSync, mkdirSync, writeFileSync, readFileSync, createReadStream, unlinkSync } = require("fs");
5
5
  const chalk = require("chalk");
6
6
  const { glob } = require("glob");
7
7
  const path = require("path");
8
- const fs = require("fs");
9
8
  const mimeTypes = require("mime-types");
10
9
  const md5 = require('md5');
11
10
  const EOL = require("os").type() == "Darwin" ? "\r\n" : "\n";
12
- const { GetObjectCommand, PutObjectCommand, S3Client } = require("@aws-sdk/client-s3");
11
+ const { GetObjectCommand, PutObjectCommand, ListObjectsV2Command, S3Client } = require("@aws-sdk/client-s3");
13
12
  const { authenticate } = require('@google-cloud/local-auth');
14
13
  const { google } = require('googleapis');
15
14
 
16
- // Google Drive config ************************************************************
15
+ // Constants & Config
16
+ const DORKY_DIR = ".dorky";
17
+ const METADATA_PATH = path.join(DORKY_DIR, "metadata.json");
18
+ const CREDENTIALS_PATH = path.join(DORKY_DIR, "credentials.json");
19
+ const GD_CREDENTIALS_PATH = path.join(__dirname, "../google-drive-credentials.json");
17
20
  const SCOPES = ['https://www.googleapis.com/auth/drive'];
18
- const CREDENTIALS_PATH = path.join(__dirname, "../google-drive-credentials.json");
19
- const TOKEN_PATH = path.join(process.cwd(), '.dorky/credentials.json');
20
- // ********************************************************************************
21
+
22
+ // Helpers
23
+ const readJson = (p) => existsSync(p) ? JSON.parse(readFileSync(p)) : {};
24
+ const writeJson = (p, d) => writeFileSync(p, JSON.stringify(d, null, 2));
25
+
26
+ const checkDorkyProject = () => {
27
+ if (!existsSync(DORKY_DIR) && !existsSync(".dorkyignore")) {
28
+ console.log(chalk.red("✖ Not a dorky project. Please run ") + chalk.cyan("dorky --init [aws|google-drive]"));
29
+ process.exit(1);
30
+ }
31
+ };
21
32
 
22
33
  const figlet = `
23
34
  __ __ \t
@@ -26,533 +37,301 @@ const figlet = `
26
37
  |_____|_____|__| |__|__|___ |\t
27
38
  |_____|\t
28
39
  `;
29
- // Should display the process.env.AWS_ACCESS_KEY, process.env.AWS_SECRET_KEY, process.env.AWS_REGION, process.env.BUCKET_NAME to be set
30
- const usage = `${figlet}`;
31
- let randomColor = null;
32
- do {
33
- const randomHex = Math.floor(Math.random() * 16777215).toString(16);
34
- randomColor = `#${randomHex}`;
35
- } while (randomColor[2] === "f" || randomColor[3] === "f");
36
- console.log(chalk.bgHex(randomColor)(usage));
37
-
38
- if (process.argv.slice(2).length === 0) {
39
- process.argv.push("--help");
40
- }
41
- var args = yargs
42
- .option("init", { alias: "i", describe: "Initialize dorky project", type: "string", demandOption: false })
43
- .option("list", { alias: "l", describe: "List files in dorky", type: "string", demandOption: false })
44
- .option("add", { alias: "a", describe: "Add files to push or pull", type: "array", demandOption: false })
45
- .option("rm", { alias: "r", describe: "Remove files from push or pull", type: "array", demandOption: false })
46
- .option("push", { alias: "ph", describe: "Push files to storage", type: "string", demandOption: false })
47
- .option("pull", { alias: "pl", describe: "Pull files from storage", type: "string", demandOption: false })
48
- .option("migrate", { alias: "m", describe: "Migrate dorky project to another storage", type: "string", demandOption: false })
49
- .example('$0 --init aws', 'Initialize a dorky project with AWS storage')
50
- .example('$0 --init google-drive', 'Initialize a dorky project with Google Drive storage')
51
- .example('$0 --list', 'List local files that can be added and already added files')
52
- .example('$0 --list remote', 'List files in remote storage')
53
- .example('$0 --add file1.txt file2.js', 'Add specific files to stage-1')
54
- .example('$0 --rm file1.txt', 'Remove a file from stage-1')
55
- .example('$0 --push', 'Push staged files to storage')
56
- .example('$0 --pull', 'Pull files from storage')
57
- .example('$0 --migrate aws', 'Migrate the project to AWS storage')
58
- .help('help')
59
- .strict()
60
- .argv
61
-
62
- if (Object.keys(args).length == 2) {
63
- yargs.showHelp()
64
- }
65
-
66
- function checkIfDorkyProject() {
67
- if (!existsSync(".dorky") && !existsSync(".dorkyignore")) {
68
- console.log(chalk.red("This is not a dorky project. Please run `dorky --init [aws|google-drive]` to initialize a dorky project."));
69
- process.exit(1);
70
- }
71
- }
72
-
73
- function setupFilesAndFolders(metaData, credentials) {
74
- console.log("Initializing dorky project");
75
- if (existsSync(".dorky")) {
76
- console.log("Dorky is already initialised in this project.");
77
- } else {
78
- mkdirSync(".dorky");
79
- console.log(chalk.bgGreen("Created .dorky folder."));
80
- writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
81
- console.log(chalk.bgGreen("Created .dorky/metadata.json file."));
82
- writeFileSync(".dorkyignore", "");
83
- console.log(chalk.bgGreen("Created .dorkyignore file."));
84
- writeFileSync(".dorky/credentials.json", JSON.stringify(credentials, null, 2));
85
- console.log(chalk.bgGreen("Created .dorky/credentials.json file."));
86
- }
87
- }
40
+ let randomColor = `#${Math.floor(Math.random() * 16777215).toString(16)}`;
41
+ while (randomColor[2] === "f" || randomColor[3] === "f") randomColor = `#${Math.floor(Math.random() * 16777215).toString(16)}`;
42
+ console.log(chalk.bgHex(randomColor)(figlet));
43
+
44
+ const args = yargs
45
+ .option("init", { alias: "i", describe: "Initialize dorky", type: "string" })
46
+ .option("list", { alias: "l", describe: "List files", type: "string" })
47
+ .option("add", { alias: "a", describe: "Add files", type: "array" })
48
+ .option("rm", { alias: "r", describe: "Remove files", type: "array" })
49
+ .option("push", { alias: "ph", describe: "Push files", type: "string" })
50
+ .option("pull", { alias: "pl", describe: "Pull files", type: "string" })
51
+ .option("migrate", { alias: "m", describe: "Migrate project", type: "string" })
52
+ .help('help').strict().argv;
53
+
54
+ if (Object.keys(args).length === 2 && args._.length === 0) yargs.showHelp();
88
55
 
89
56
  function updateGitIgnore() {
90
- let gitignoreContent = "";
91
- if (existsSync(".gitignore")) {
92
- gitignoreContent = fs.readFileSync(".gitignore").toString();
93
- }
94
- const dorkyIgnoreEntry = ".dorky/credentials.json";
95
- if (!gitignoreContent.includes(dorkyIgnoreEntry)) {
96
- gitignoreContent += EOL + dorkyIgnoreEntry + EOL;
97
- fs.writeFileSync(".gitignore", gitignoreContent);
98
- console.log(`${chalk.bgGreen("Updated .gitignore to ignore .dorky/credentials.json.")} ${chalk.red("⚠️ This is done to protect your credentials.")}`);
57
+ let content = existsSync(".gitignore") ? readFileSync(".gitignore").toString() : "";
58
+ if (!content.includes(CREDENTIALS_PATH)) {
59
+ writeFileSync(".gitignore", content + EOL + CREDENTIALS_PATH + EOL);
60
+ console.log(chalk.cyan("ℹ Updated .gitignore to secure credentials."));
99
61
  }
100
62
  }
101
63
 
102
64
  async function authorizeGoogleDriveClient(forceReauth = false) {
103
- async function loadSavedCredentialsIfExist() {
104
- try {
105
- const content = await fs.readFileSync(TOKEN_PATH);
106
- const savedCredentials = JSON.parse(content);
107
-
108
- if (!savedCredentials.access_token && !savedCredentials.refresh_token) {
109
- return null;
110
- }
111
-
112
- const keys = JSON.parse(fs.readFileSync(CREDENTIALS_PATH));
65
+ if (!forceReauth && existsSync(CREDENTIALS_PATH)) {
66
+ const saved = readJson(CREDENTIALS_PATH);
67
+ if (saved.storage === 'google-drive' && saved.expiry_date) {
68
+ const keys = readJson(GD_CREDENTIALS_PATH);
113
69
  const key = keys.installed || keys.web;
114
- const oAuth2Client = new google.auth.OAuth2(
115
- key.client_id,
116
- key.client_secret,
117
- key.redirect_uris[0]
118
- );
119
-
120
- const { storage, ...authCredentials } = savedCredentials;
121
- oAuth2Client.setCredentials(authCredentials);
122
-
123
- return oAuth2Client;
124
- } catch (err) {
125
- return null;
126
- }
127
- }
128
-
129
- async function isTokenExpired(credentials) {
130
- if (!credentials.expiry_date) {
131
- return true;
132
- }
133
- const expiryBuffer = 300000;
134
- const currentTimeUTC = Date.now();
135
- const expiryTimeUTC = credentials.expiry_date;
136
-
137
- return currentTimeUTC >= (expiryTimeUTC - expiryBuffer);
138
- }
139
-
140
- async function refreshAndSaveToken(client) {
141
- try {
142
- await client.getAccessToken();
143
- const newCredentials = client.credentials;
144
- const credentialsToSave = {
145
- storage: "google-drive",
146
- ...newCredentials
147
- };
148
- fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
149
- return client;
150
- } catch (err) {
151
- return null;
152
- }
153
- }
70
+ const client = new google.auth.OAuth2(key.client_id, key.client_secret, key.redirect_uris[0]);
71
+ client.setCredentials(saved);
154
72
 
155
- if (!forceReauth) {
156
- let client = await loadSavedCredentialsIfExist();
157
- if (client) {
158
- const credentials = JSON.parse(fs.readFileSync(TOKEN_PATH));
159
-
160
- const clientCredentials = client.credentials || credentials;
161
-
162
- if (await isTokenExpired(clientCredentials)) {
163
- client = await refreshAndSaveToken(client);
164
- if (client) {
165
- return client;
73
+ if (Date.now() >= saved.expiry_date - 300000) {
74
+ try {
75
+ const { credentials } = await client.refreshAccessToken();
76
+ writeJson(CREDENTIALS_PATH, { storage: "google-drive", ...credentials });
77
+ client.setCredentials(credentials);
78
+ } catch (e) {
79
+ console.log(chalk.yellow("Token refresh failed. Re-authenticating..."));
80
+ return authorizeGoogleDriveClient(true);
166
81
  }
167
- } else {
168
- return client;
169
82
  }
83
+ return client;
170
84
  }
171
85
  }
172
86
 
173
- client = await authenticate({
174
- scopes: SCOPES,
175
- keyfilePath: CREDENTIALS_PATH,
176
- });
177
-
178
- if (client && client.credentials && existsSync(path.dirname(TOKEN_PATH))) {
179
- const credentialsToSave = {
180
- storage: "google-drive",
181
- ...client.credentials
182
- };
183
- fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
87
+ const client = await authenticate({ scopes: SCOPES, keyfilePath: GD_CREDENTIALS_PATH });
88
+ if (client?.credentials && existsSync(path.dirname(CREDENTIALS_PATH))) {
89
+ writeJson(CREDENTIALS_PATH, { storage: "google-drive", ...client.credentials });
184
90
  }
185
-
186
91
  return client;
187
92
  }
188
93
 
189
94
  async function init(storage) {
190
- const metaData = { "stage-1-files": {}, "uploaded-files": {} };
191
- var credentials;
192
- switch (storage) {
193
- case "aws":
194
- credentials = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME }
195
- setupFilesAndFolders(metaData, credentials);
196
- break;
197
- case "google-drive":
198
- const client = await authorizeGoogleDriveClient(true);
199
- credentials = { storage: "google-drive", ...client.credentials };
200
- setupFilesAndFolders(metaData, credentials);
201
- break;
202
- default:
203
- console.log("Please provide a valid storage option <aws|google-drive>");
204
- break;
95
+ if (existsSync(DORKY_DIR)) return console.log(chalk.yellow(" Dorky is already initialized."));
96
+ if (!["aws", "google-drive"].includes(storage)) return console.log(chalk.red("✖ Invalid storage. Use 'aws' or 'google-drive'."));
97
+
98
+ let credentials = {};
99
+ if (storage === "aws") {
100
+ if (!process.env.AWS_ACCESS_KEY || !process.env.AWS_SECRET_KEY || !process.env.AWS_REGION || !process.env.BUCKET_NAME) {
101
+ console.log(chalk.red("✖ Missing AWS environment variables."));
102
+ return;
103
+ }
104
+ credentials = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME };
105
+ } else {
106
+ const client = await authorizeGoogleDriveClient(true);
107
+ credentials = { storage: "google-drive", ...client.credentials };
205
108
  }
109
+
110
+ mkdirSync(DORKY_DIR);
111
+ writeJson(METADATA_PATH, { "stage-1-files": {}, "uploaded-files": {} });
112
+ writeFileSync(".dorkyignore", "");
113
+ writeJson(CREDENTIALS_PATH, credentials);
114
+ console.log(chalk.green("✔ Dorky project initialized successfully."));
206
115
  updateGitIgnore();
207
116
  }
208
117
 
209
118
  async function list(type) {
210
- checkIfDorkyProject();
211
- const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
212
- switch (type) {
213
- case "remote":
214
- const uploadedFiles = Object.keys(metaData["uploaded-files"]);
215
- if (uploadedFiles.length === 0) {
216
- console.log(chalk.red("No files found in remote storage."));
217
- return;
218
- }
219
- console.log(chalk.green("Listing files in stage-1:"));
220
- uploadedFiles.forEach((file) => console.log(chalk.green(`- ${file}`)));
221
- break;
222
- default:
223
- console.log(chalk.red("Listing files that can be added:"));
224
- var exclusions = fs.readFileSync(".dorkyignore").toString().split(EOL);
225
- exclusions = exclusions.filter((exclusion) => exclusion !== "");
226
- const src = process.cwd();
227
- const files = await glob(path.join(src, "**/*"), { dot: true });
228
- const filteredFiles = files.filter((file) => {
229
- for (let i = 0; i < exclusions.length; i++) {
230
- if (file.includes(exclusions[i])) return false;
231
- }
232
- if (file.includes(".dorky/")) return false;
233
- if (file.endsWith(".dorky") && fs.lstatSync(file).isDirectory()) return false;
234
- if (file.endsWith(".dorkyignore")) return false;
235
- return true;
119
+ checkDorkyProject();
120
+ const meta = readJson(METADATA_PATH);
121
+ if (type === "remote") {
122
+ if (!await checkCredentials()) return;
123
+ const creds = readJson(CREDENTIALS_PATH);
124
+ const root = path.basename(process.cwd());
125
+ console.log(chalk.blue.bold("\n☁ Remote Files:"));
126
+
127
+ if (creds.storage === "aws") {
128
+ await runS3(creds, async (s3, bucket) => {
129
+ const data = await s3.send(new ListObjectsV2Command({ Bucket: bucket, Prefix: root + "/" }));
130
+ if (!data.Contents?.length) return console.log(chalk.yellow("ℹ No remote files found."));
131
+ data.Contents.forEach(o => console.log(chalk.cyan(` ${o.Key.replace(root + "/", "")}`)));
236
132
  });
237
- filteredFiles.forEach((file) => {
238
- const relativePath = path.relative(process.cwd(), file);
239
- if (relativePath.includes('.env') || relativePath.includes('.config')) {
240
- console.log(chalk.bold.bgYellowBright.red(`- ${relativePath} (This file might be sensitive, please add it to dorky if needed)`));
241
- } else {
242
- console.log(chalk.red(`- ${relativePath}`));
243
- }
133
+ } else {
134
+ await runDrive(async (drive) => {
135
+ const q = `name='${root}' and mimeType='application/vnd.google-apps.folder' and 'root' in parents and trashed=false`;
136
+ const { data: { files: [folder] } } = await drive.files.list({ q, fields: 'files(id)' });
137
+ if (!folder) return console.log(chalk.yellow("ℹ Remote folder not found."));
138
+ const walk = async (pid, p = '') => {
139
+ const { data: { files } } = await drive.files.list({ q: `'${pid}' in parents and trashed=false`, fields: 'files(id, name, mimeType)' });
140
+ for (const f of files) {
141
+ if (f.mimeType === 'application/vnd.google-apps.folder') await walk(f.id, path.join(p, f.name));
142
+ else console.log(chalk.cyan(` ${path.join(p, f.name)}`));
143
+ }
144
+ };
145
+ await walk(folder.id);
244
146
  });
245
- console.log(chalk.green("\nList of files that are already added:"));
246
- const addedFiles = Object.keys(metaData["stage-1-files"]);
247
- addedFiles.forEach((file) => console.log(chalk.green(`- ${file}`)));
248
- break;
147
+ }
148
+ } else {
149
+ console.log(chalk.blue.bold("\n📂 Untracked Files:"));
150
+ const exclusions = existsSync(".dorkyignore") ? readFileSync(".dorkyignore").toString().split(EOL).filter(Boolean) : [];
151
+ const files = await glob("**/*", { dot: true, ignore: [...exclusions.map(e => `**/${e}/**`), ...exclusions, ".dorky/**", ".dorkyignore", ".git/**", "node_modules/**"] });
152
+
153
+ files.forEach(f => {
154
+ const rel = path.relative(process.cwd(), f);
155
+ if (rel.includes('.env') || rel.includes('.config')) console.log(chalk.yellow(` ⚠ ${rel} (Potential sensitive file)`));
156
+ else console.log(chalk.gray(` ${rel}`));
157
+ });
158
+ console.log(chalk.blue.bold("\n📦 Staged Files:"));
159
+ Object.keys(meta["stage-1-files"]).forEach(f => console.log(chalk.green(` ✔ ${f}`)));
249
160
  }
250
161
  }
251
162
 
252
- function add(listOfFiles) {
253
- checkIfDorkyProject();
254
- console.log("Adding files to stage-1 to push to storage");
255
- const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
256
- const addedFiles = [];
257
- listOfFiles.forEach((file) => {
258
- if (!fs.existsSync(file)) {
259
- console.log(chalk.red(`File ${file} does not exist.`));
260
- return;
261
- }
262
- const fileContents = fs.readFileSync(file);
263
- const fileType = mimeTypes.lookup(file);
264
- const newHash = md5(fileContents);
265
- const existingEntry = metaData["stage-1-files"][file];
266
- if (existingEntry && existingEntry.hash === newHash) {
267
- console.log(chalk.yellow(`File ${file} has no changes, skipping.`));
268
- return;
269
- }
270
- metaData["stage-1-files"][file] = {
271
- "mime-type": fileType ? fileType : "application/octet-stream",
272
- "hash": newHash
273
- };
274
- addedFiles.push(file);
163
+ function add(files) {
164
+ checkDorkyProject();
165
+ const meta = readJson(METADATA_PATH);
166
+ const added = [];
167
+ files.forEach(f => {
168
+ if (!existsSync(f)) return console.log(chalk.red(`✖ File not found: ${f}`));
169
+ const hash = md5(readFileSync(f));
170
+ if (meta["stage-1-files"][f]?.hash === hash) return console.log(chalk.gray(`• ${f} (unchanged)`));
171
+ meta["stage-1-files"][f] = { "mime-type": mimeTypes.lookup(f) || "application/octet-stream", hash };
172
+ added.push(f);
275
173
  });
276
- fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
277
- addedFiles.forEach((file) => console.log(chalk.green(`Added ${file} to stage-1.`)));
174
+ writeJson(METADATA_PATH, meta);
175
+ added.forEach(f => console.log(chalk.green(`✔ Staged: ${f}`)));
278
176
  }
279
177
 
280
- function rm(listOfFiles) {
281
- checkIfDorkyProject();
282
- console.log(chalk.red("Removing files from stage-1"));
283
- const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
284
- listOfFiles = listOfFiles.filter((file) => {
285
- if (metaData["stage-1-files"][file] == undefined) return false;
286
- delete metaData["stage-1-files"][file];
178
+ function rm(files) {
179
+ checkDorkyProject();
180
+ const meta = readJson(METADATA_PATH);
181
+ const removed = files.filter(f => {
182
+ if (!meta["stage-1-files"][f]) return false;
183
+ delete meta["stage-1-files"][f];
287
184
  return true;
288
185
  });
289
- fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
290
- if (listOfFiles.length) listOfFiles.map((file) => console.log(chalk.red(`Removed ${file} from stage-1.`)));
291
- else console.log(chalk.red("No files found that can be removed."));
186
+ writeJson(METADATA_PATH, meta);
187
+ removed.length ? removed.forEach(f => console.log(chalk.yellow(`✔ Unstaged: ${f}`))) : console.log(chalk.gray("ℹ No matching files to remove."));
292
188
  }
293
189
 
294
190
  async function checkCredentials() {
191
+ if (existsSync(CREDENTIALS_PATH)) return true;
192
+ if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY) {
193
+ writeJson(CREDENTIALS_PATH, {
194
+ storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY,
195
+ awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME
196
+ });
197
+ return true;
198
+ }
295
199
  try {
296
- if (fs.existsSync(".dorky/credentials.json")) {
297
- const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
298
- if (credentials.storage === "google-drive") {
299
- if (credentials.access_token && credentials.scope && credentials.token_type && credentials.expiry_date) return true;
300
- else return false;
301
- } else {
302
- if (credentials.accessKey && credentials.secretKey && credentials.awsRegion && credentials.bucket) return true;
303
- else return false;
304
- }
305
- } else {
306
- console.log("Setting the credentials again.")
307
- if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY && process.env.AWS_REGION && process.env.BUCKET_NAME) {
308
- fs.writeFileSync(".dorky/credentials.json", JSON.stringify({
309
- "storage": "aws",
310
- "accessKey": process.env.AWS_ACCESS_KEY,
311
- "secretKey": process.env.AWS_SECRET_KEY,
312
- "awsRegion": process.env.AWS_REGION,
313
- "bucket": process.env.BUCKET_NAME
314
- }, null, 2));
315
- return true;
316
- } else {
200
+ const client = await authorizeGoogleDriveClient(true);
201
+ if (client) return true;
202
+ } catch { }
203
+ console.log(chalk.red("✖ Credentials not found. Please run --init."));
204
+ return false;
205
+ }
206
+
207
+ const getS3 = (c) => new S3Client({
208
+ credentials: { accessKeyId: c.accessKey || process.env.AWS_ACCESS_KEY, secretAccessKey: c.secretKey || process.env.AWS_SECRET_KEY },
209
+ region: c.awsRegion || process.env.AWS_REGION
210
+ });
211
+
212
+ async function runS3(creds, fn) {
213
+ try { await fn(getS3(creds), creds.bucket || process.env.BUCKET_NAME); }
214
+ catch (err) {
215
+ if (["InvalidAccessKeyId", "SignatureDoesNotMatch"].includes(err.name) || err.$metadata?.httpStatusCode === 403) {
216
+ if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY) {
217
+ console.log(chalk.yellow("AWS auth failed. Retrying with env vars..."));
218
+ const newCreds = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME };
219
+ writeJson(CREDENTIALS_PATH, newCreds);
317
220
  try {
318
- let credentials;
319
- const client = await authorizeGoogleDriveClient(true);
320
- credentials = { storage: "google-drive", ...client.credentials };
321
- fs.writeFileSync(".dorky/credentials.json", JSON.stringify(credentials, null, 2));
322
- console.log(chalk.green("Credentials saved in .dorky/credentials.json"));
323
- console.log(chalk.red("Please ignore the warning to set credentials below and run the command again."));
324
- return false;
325
- } catch (err) {
326
- console.log(chalk.red("Failed to authorize Google Drive client: " + err.message));
327
- console.log(chalk.red("Please provide credentials in .dorky/credentials.json"));
328
- return false;
221
+ await fn(getS3(newCreds), newCreds.bucket);
222
+ return;
223
+ } catch (e) {
224
+ console.log(chalk.red("Retried with env vars but failed."));
329
225
  }
330
226
  }
227
+ console.log(chalk.red("AWS authentication failed."));
228
+ console.log(chalk.yellow("Please set correct AWS_ACCESS_KEY, AWS_SECRET_KEY, AWS_REGION and BUCKET_NAME in environment or .dorky/credentials.json"));
229
+ process.exit(1);
331
230
  }
332
- } catch (err) {
333
- console.log(chalk.red("Please provide credentials in .dorky/credentials.json"));
334
- return false;
231
+ throw err;
335
232
  }
336
233
  }
337
234
 
338
- async function push() {
339
- checkIfDorkyProject();
340
- if (!(await checkCredentials())) {
341
- console.log(chalk.red("Please setup credentials in environment variables or in .dorky/credentials.json"));
342
- return;
343
- }
344
- console.log("Pushing files to storage");
345
- const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
346
- const stage1Files = metaData["stage-1-files"];
347
- const pushedFiles = metaData["uploaded-files"];
348
- var filesToPush = [];
349
- Object.keys(stage1Files).map((file) => {
350
- if (pushedFiles[file]) {
351
- if (stage1Files[file]["hash"] != pushedFiles[file]["hash"]) filesToPush.push(file);
352
- } else filesToPush.push(file);
353
- });
354
- filesToPush = filesToPush.map((file) => {
355
- return {
356
- "name": file,
357
- "mime-type": stage1Files[file]["mime-type"],
358
- "hash": stage1Files[file]["hash"]
359
- }
360
- });
361
- const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
362
- switch (credentials.storage) {
363
- case "aws":
364
- pushToS3(filesToPush, credentials);
365
- break;
366
- case "google-drive":
367
- pushToGoogleDrive(filesToPush);
368
- break;
369
- default:
370
- console.log("Please provide a valid storage option <aws|google-drive>");
371
- break;
235
+ async function getFolderId(pathStr, drive) {
236
+ let parentId = 'root';
237
+ if (!pathStr || pathStr === '.') return parentId;
238
+ for (const folder of pathStr.split("/")) {
239
+ if (!folder) continue;
240
+ const res = await drive.files.list({ q: `name='${folder}' and mimeType='application/vnd.google-apps.folder' and '${parentId}' in parents`, fields: 'files(id)' });
241
+ if (res.data.files[0]) parentId = res.data.files[0].id;
242
+ else parentId = (await drive.files.create({ requestBody: { name: folder, mimeType: 'application/vnd.google-apps.folder', parents: [parentId] }, fields: 'id' })).data.id;
372
243
  }
373
- metaData["uploaded-files"] = metaData["stage-1-files"];
374
- fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
375
- console.log(chalk.green("Pushed the following files to storage:"));
244
+ return parentId;
376
245
  }
377
246
 
378
- function pushToS3(files, credentials) {
379
- const s3 = new S3Client({
380
- credentials: {
381
- accessKeyId: credentials.accessKey ?? process.env.AWS_ACCESS_KEY,
382
- secretAccessKey: credentials.secretKey ?? process.env.AWS_SECRET_KEY
383
- },
384
- region: credentials.awsRegion ?? process.env.AWS_REGION
385
- });
386
- const bucketName = credentials.bucket ?? process.env.BUCKET_NAME;
387
- Promise.all(files.map(async (file) => {
388
- const rootFolder = path.basename(process.cwd());
389
- const pathToFile = path.join(rootFolder, file.name);
390
- await s3.send(
391
- new PutObjectCommand({
392
- Bucket: bucketName,
393
- Key: pathToFile,
394
- Body: fs.readFileSync(file.name).toString(),
395
- })
396
- );
397
- console.log(chalk.green(`Pushed ${pathToFile} to storage.`));
398
- }));
247
+ async function runDrive(fn) {
248
+ let client = await authorizeGoogleDriveClient();
249
+ let drive = google.drive({ version: 'v3', auth: client });
250
+ try { await fn(drive); }
251
+ catch (err) {
252
+ if (err.code === 401 || err.message?.includes('invalid_grant')) {
253
+ console.log(chalk.yellow("Drive auth failed. Re-authenticating..."));
254
+ if (existsSync(CREDENTIALS_PATH)) unlinkSync(CREDENTIALS_PATH);
255
+ client = await authorizeGoogleDriveClient(true);
256
+ drive = google.drive({ version: 'v3', auth: client });
257
+ await fn(drive);
258
+ } else throw err;
259
+ }
399
260
  }
400
261
 
401
-
402
- async function pushToGoogleDrive(files) {
403
- async function getOrCreateFolderId(folderPath, drive) {
404
- const folders = folderPath.split(path.sep);
405
- let parentId = 'root';
406
- for (const folder of folders) {
407
- const res = await drive.files.list({
408
- q: `name='${folder}' and mimeType='application/vnd.google-apps.folder' and '${parentId}' in parents`,
409
- fields: 'files(id, name)',
410
- spaces: 'drive'
411
- });
412
- if (res.data.files.length > 0) {
413
- parentId = res.data.files[0].id;
414
- } else {
415
- const folderMetadata = {
416
- name: folder,
417
- mimeType: 'application/vnd.google-apps.folder',
418
- parents: [parentId],
419
- };
420
- const folderRes = await drive.files.create({
421
- requestBody: folderMetadata,
422
- fields: 'id',
262
+ async function push() {
263
+ checkDorkyProject();
264
+ if (!await checkCredentials()) return;
265
+ const meta = readJson(METADATA_PATH);
266
+ const files = Object.keys(meta["stage-1-files"])
267
+ .filter(f => !meta["uploaded-files"][f] || meta["stage-1-files"][f].hash !== meta["uploaded-files"][f].hash)
268
+ .map(f => ({ name: f, ...meta["stage-1-files"][f] }));
269
+
270
+ if (files.length === 0) return console.log(chalk.yellow("ℹ Nothing to push."));
271
+
272
+ const creds = readJson(CREDENTIALS_PATH);
273
+ if (creds.storage === "aws") {
274
+ await runS3(creds, async (s3, bucket) => {
275
+ await Promise.all(files.map(async f => {
276
+ const key = path.join(path.basename(process.cwd()), f.name);
277
+ await s3.send(new PutObjectCommand({ Bucket: bucket, Key: key, Body: readFileSync(f.name) }));
278
+ console.log(chalk.green(`✔ Uploaded: ${f.name}`));
279
+ }));
280
+ });
281
+ } else if (creds.storage === "google-drive") {
282
+ await runDrive(async (drive) => {
283
+ for (const f of files) {
284
+ const root = path.basename(process.cwd());
285
+ const parentId = await getFolderId(path.dirname(path.join(root, f.name)), drive);
286
+ await drive.files.create({
287
+ requestBody: { name: path.basename(f.name), parents: [parentId] },
288
+ media: { mimeType: f["mime-type"], body: createReadStream(f.name) }
423
289
  });
424
- parentId = folderRes.data.id;
290
+ console.log(chalk.green(`✔ Uploaded: ${f.name}`));
425
291
  }
426
- }
427
- return parentId;
292
+ });
428
293
  }
429
- console.log("Uploading to google drive");
430
- const client = await authorizeGoogleDriveClient(false);
431
-
432
- const credentialsToSave = {
433
- storage: "google-drive",
434
- ...client.credentials
435
- };
436
- fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
437
294
 
438
- const drive = google.drive({ version: 'v3', auth: client });
439
- for (const file of files) {
440
- const rootFolder = path.basename(process.cwd());
441
- const pathToFile = path.join(rootFolder, file.name);
442
- const requestBody = {
443
- name: path.basename(file.name),
444
- parents: [await getOrCreateFolderId(pathToFile.split("/").slice(0, -1).join("/"), drive)],
445
- fields: 'id',
446
- };
447
- const media = {
448
- mimeType: file["mime-type"],
449
- body: fs.createReadStream(path.join(process.cwd(), file.name)),
450
- };
451
- try {
452
- await drive.files.create({
453
- requestBody,
454
- media: media,
455
- });
456
- console.log(chalk.green(`Pushed ${file.name} to storage.`));
457
- } catch (err) {
458
- console.log(err);
459
- throw err;
460
- }
461
- }
295
+ meta["uploaded-files"] = { ...meta["uploaded-files"], ...meta["stage-1-files"] };
296
+ writeJson(METADATA_PATH, meta);
462
297
  }
463
298
 
464
299
  async function pull() {
465
- checkIfDorkyProject();
466
- if (!(await checkCredentials())) {
467
- console.log(chalk.red("Please setup credentials in environment variables or in .dorky/credentials.json"));
468
- return;
469
- }
470
- console.log("Pulling files from storage");
471
- const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
472
- const filesToPull = metaData["uploaded-files"];
473
- const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
474
- switch (credentials.storage) {
475
- case "aws":
476
- pullFromS3(filesToPull, credentials);
477
- break;
478
- case "google-drive":
479
- pullFromGoogleDrive(filesToPull);
480
- break;
481
- default:
482
- console.log("Please provide a valid storage option <aws|google-drive>");
483
- break;
484
- }
485
- }
486
-
487
- function pullFromS3(files, credentials) {
488
- const s3 = new S3Client({
489
- credentials: {
490
- accessKeyId: credentials.accessKey ?? process.env.AWS_ACCESS_KEY,
491
- secretAccessKey: credentials.secretKey ?? process.env.AWS_SECRET_KEY
492
- },
493
- region: credentials.awsRegion ?? process.env.AWS_REGION
494
- });
495
- const bucketName = credentials.bucket ?? process.env.BUCKET_NAME;
496
- Promise.all(Object.keys(files).map(async (file) => {
497
- const rootFolder = path.basename(process.cwd());
498
- const pathToFile = path.join(rootFolder, file);
499
- const { Body } = await s3.send(
500
- new GetObjectCommand({
501
- Bucket: bucketName,
502
- Key: pathToFile,
503
- })
504
- );
505
- const dir = path.dirname(file);
506
- if (!fs.existsSync(dir)) {
507
- fs.mkdirSync(dir, { recursive: true });
508
- }
509
- fs.writeFileSync(file, await Body.transformToString());
510
- console.log(chalk.green(`Pulled ${file} from storage.`));
511
- }));
512
- }
513
-
514
- async function pullFromGoogleDrive(files) {
515
- console.log("Downloading from google drive");
516
- files = Object.keys(files).map((file) => {
517
- return { name: file, ...files[file] };
518
- });
519
-
520
- const client = await authorizeGoogleDriveClient(false);
521
-
522
- const credentialsToSave = {
523
- storage: "google-drive",
524
- ...client.credentials
525
- };
526
- fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
527
-
528
- const drive = google.drive({ version: "v3", auth: client });
529
- try {
530
- files.map(async (file) => {
531
- const res = await drive.files.list({
532
- q: `name='${path.basename(file.name)}' and mimeType!='application/vnd.google-apps.folder'`,
533
- fields: 'files(id, name)',
534
- spaces: 'drive'
535
- });
536
- if (res.data.files.length === 0) {
537
- console.log(chalk.red(`File ${file.name} not found in Google Drive.`));
538
- return;
539
- }
540
- const _file = await drive.files.get({ fileId: res.data.files[0].id, alt: "media" });
541
- const dir = path.dirname(file.name);
542
- if (!fs.existsSync(dir)) {
543
- fs.mkdirSync(dir, { recursive: true });
544
- }
545
- fs.writeFileSync(file.name, await _file.data.text(), "utf-8");
546
- console.log(chalk.green(`Pulled ${file.name} from storage.`));
300
+ checkDorkyProject();
301
+ if (!await checkCredentials()) return;
302
+ const meta = readJson(METADATA_PATH);
303
+ const files = meta["uploaded-files"];
304
+ const creds = readJson(CREDENTIALS_PATH);
305
+
306
+ if (creds.storage === "aws") {
307
+ await runS3(creds, async (s3, bucket) => {
308
+ await Promise.all(Object.keys(files).map(async f => {
309
+ const key = path.join(path.basename(process.cwd()), f);
310
+ const { Body } = await s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
311
+ const dir = path.dirname(f);
312
+ if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
313
+ writeFileSync(f, await Body.transformToString());
314
+ console.log(chalk.green(`✔ Downloaded: ${f}`));
315
+ }));
316
+ });
317
+ } else if (creds.storage === "google-drive") {
318
+ await runDrive(async (drive) => {
319
+ const fileList = Object.keys(files).map(k => ({ name: k, ...files[k] }));
320
+ await Promise.all(fileList.map(async f => {
321
+ const res = await drive.files.list({ q: `name='${path.basename(f.name)}' and mimeType!='application/vnd.google-apps.folder'`, fields: 'files(id)' });
322
+ if (!res.data.files[0]) return console.log(chalk.red(`✖ Missing remote file: ${f.name}`));
323
+ const data = await drive.files.get({ fileId: res.data.files[0].id, alt: 'media' });
324
+ if (!existsSync(path.dirname(f.name))) mkdirSync(path.dirname(f.name), { recursive: true });
325
+ writeFileSync(f.name, await data.data.text());
326
+ console.log(chalk.green(`✔ Downloaded: ${f.name}`));
327
+ }));
547
328
  });
548
- } catch (err) {
549
- throw err;
550
329
  }
551
330
  }
552
331
 
553
- if (Object.keys(args).includes("init")) init(args.init);
554
- if (Object.keys(args).includes("list")) list(args.list);
555
- if (Object.keys(args).includes("add")) add(args.add);
556
- if (Object.keys(args).includes("rm")) rm(args.rm);
557
- if (Object.keys(args).includes("push")) push();
558
- if (Object.keys(args).includes("pull")) pull();
332
+ if (args.init !== undefined) init(args.init);
333
+ if (args.list !== undefined) list(args.list);
334
+ if (args.add !== undefined) add(args.add);
335
+ if (args.rm !== undefined) rm(args.rm);
336
+ if (args.push !== undefined) push();
337
+ if (args.pull !== undefined) pull();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "dorky",
3
- "version": "2.3.8",
3
+ "version": "2.3.9",
4
4
  "description": "DevOps Records Keeper.",
5
5
  "bin": {
6
6
  "dorky": "bin/index.js"