dorky 2.3.7 → 2.3.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +31 -4
- package/bin/index.js +256 -477
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -35,8 +35,32 @@ npx dorky --help
|
|
|
35
35
|
### AWS S3
|
|
36
36
|
|
|
37
37
|
1. Create an S3 bucket in your AWS account
|
|
38
|
-
2.
|
|
39
|
-
3.
|
|
38
|
+
2. Create an IAM user with programmatic access
|
|
39
|
+
3. Attach the following IAM policy to the user (replace `your-bucket-name` with your actual bucket name):
|
|
40
|
+
|
|
41
|
+
```json
|
|
42
|
+
{
|
|
43
|
+
"Version": "2012-10-17",
|
|
44
|
+
"Statement": [
|
|
45
|
+
{
|
|
46
|
+
"Effect": "Allow",
|
|
47
|
+
"Action": [
|
|
48
|
+
"s3:PutObject",
|
|
49
|
+
"s3:GetObject",
|
|
50
|
+
"s3:DeleteObject",
|
|
51
|
+
"s3:ListBucket"
|
|
52
|
+
],
|
|
53
|
+
"Resource": [
|
|
54
|
+
"arn:aws:s3:::your-bucket-name",
|
|
55
|
+
"arn:aws:s3:::your-bucket-name/*"
|
|
56
|
+
]
|
|
57
|
+
}
|
|
58
|
+
]
|
|
59
|
+
}
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
4. Generate AWS credentials (Access Key ID and Secret Access Key) for the IAM user
|
|
63
|
+
5. Set up environment variables:
|
|
40
64
|
|
|
41
65
|
```bash
|
|
42
66
|
export AWS_ACCESS_KEY="your-access-key"
|
|
@@ -356,9 +380,12 @@ ISC License - see [LICENSE](LICENSE) file for details.
|
|
|
356
380
|
|
|
357
381
|
## Roadmap
|
|
358
382
|
|
|
359
|
-
- [
|
|
360
|
-
- [
|
|
383
|
+
- [x] Update README with AWS IAM policy (bug fix release)
|
|
384
|
+
- [*] Handle invalid access token for Google Drive and AWS (edge cases)
|
|
385
|
+
- [ ] rm + push should delete file from storage (minor release)
|
|
361
386
|
- [ ] Uninitialize dorky setup (Bug fix release)
|
|
387
|
+
- [ ] dorky --list remote --update should sync metadata according to remote (Minor release)
|
|
388
|
+
- [ ] Extension for VS Code to list and highlight them like git (Major release)
|
|
362
389
|
- [ ] MCP server (Minor release)
|
|
363
390
|
- [ ] Encryption of files (Minor release)
|
|
364
391
|
- [ ] Add stages for variables (Major release)
|
package/bin/index.js
CHANGED
|
@@ -1,23 +1,34 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
3
|
const yargs = require("yargs");
|
|
4
|
-
const { existsSync, mkdirSync, writeFileSync } = require("fs");
|
|
4
|
+
const { existsSync, mkdirSync, writeFileSync, readFileSync, createReadStream, unlinkSync } = require("fs");
|
|
5
5
|
const chalk = require("chalk");
|
|
6
6
|
const { glob } = require("glob");
|
|
7
7
|
const path = require("path");
|
|
8
|
-
const fs = require("fs");
|
|
9
8
|
const mimeTypes = require("mime-types");
|
|
10
9
|
const md5 = require('md5');
|
|
11
10
|
const EOL = require("os").type() == "Darwin" ? "\r\n" : "\n";
|
|
12
|
-
const { GetObjectCommand, PutObjectCommand, S3Client } = require("@aws-sdk/client-s3");
|
|
11
|
+
const { GetObjectCommand, PutObjectCommand, ListObjectsV2Command, S3Client } = require("@aws-sdk/client-s3");
|
|
13
12
|
const { authenticate } = require('@google-cloud/local-auth');
|
|
14
13
|
const { google } = require('googleapis');
|
|
15
14
|
|
|
16
|
-
//
|
|
15
|
+
// Constants & Config
|
|
16
|
+
const DORKY_DIR = ".dorky";
|
|
17
|
+
const METADATA_PATH = path.join(DORKY_DIR, "metadata.json");
|
|
18
|
+
const CREDENTIALS_PATH = path.join(DORKY_DIR, "credentials.json");
|
|
19
|
+
const GD_CREDENTIALS_PATH = path.join(__dirname, "../google-drive-credentials.json");
|
|
17
20
|
const SCOPES = ['https://www.googleapis.com/auth/drive'];
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
+
|
|
22
|
+
// Helpers
|
|
23
|
+
const readJson = (p) => existsSync(p) ? JSON.parse(readFileSync(p)) : {};
|
|
24
|
+
const writeJson = (p, d) => writeFileSync(p, JSON.stringify(d, null, 2));
|
|
25
|
+
|
|
26
|
+
const checkDorkyProject = () => {
|
|
27
|
+
if (!existsSync(DORKY_DIR) && !existsSync(".dorkyignore")) {
|
|
28
|
+
console.log(chalk.red("✖ Not a dorky project. Please run ") + chalk.cyan("dorky --init [aws|google-drive]"));
|
|
29
|
+
process.exit(1);
|
|
30
|
+
}
|
|
31
|
+
};
|
|
21
32
|
|
|
22
33
|
const figlet = `
|
|
23
34
|
__ __ \t
|
|
@@ -26,533 +37,301 @@ const figlet = `
|
|
|
26
37
|
|_____|_____|__| |__|__|___ |\t
|
|
27
38
|
|_____|\t
|
|
28
39
|
`;
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
.option("add", { alias: "a", describe: "Add files to push or pull", type: "array", demandOption: false })
|
|
45
|
-
.option("rm", { alias: "r", describe: "Remove files from push or pull", type: "array", demandOption: false })
|
|
46
|
-
.option("push", { alias: "ph", describe: "Push files to storage", type: "string", demandOption: false })
|
|
47
|
-
.option("pull", { alias: "pl", describe: "Pull files from storage", type: "string", demandOption: false })
|
|
48
|
-
.option("migrate", { alias: "m", describe: "Migrate dorky project to another storage", type: "string", demandOption: false })
|
|
49
|
-
.example('$0 --init aws', 'Initialize a dorky project with AWS storage')
|
|
50
|
-
.example('$0 --init google-drive', 'Initialize a dorky project with Google Drive storage')
|
|
51
|
-
.example('$0 --list', 'List local files that can be added and already added files')
|
|
52
|
-
.example('$0 --list remote', 'List files in remote storage')
|
|
53
|
-
.example('$0 --add file1.txt file2.js', 'Add specific files to stage-1')
|
|
54
|
-
.example('$0 --rm file1.txt', 'Remove a file from stage-1')
|
|
55
|
-
.example('$0 --push', 'Push staged files to storage')
|
|
56
|
-
.example('$0 --pull', 'Pull files from storage')
|
|
57
|
-
.example('$0 --migrate aws', 'Migrate the project to AWS storage')
|
|
58
|
-
.help('help')
|
|
59
|
-
.strict()
|
|
60
|
-
.argv
|
|
61
|
-
|
|
62
|
-
if (Object.keys(args).length == 2) {
|
|
63
|
-
yargs.showHelp()
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
function checkIfDorkyProject() {
|
|
67
|
-
if (!existsSync(".dorky") && !existsSync(".dorkyignore")) {
|
|
68
|
-
console.log(chalk.red("This is not a dorky project. Please run `dorky --init [aws|google-drive]` to initialize a dorky project."));
|
|
69
|
-
process.exit(1);
|
|
70
|
-
}
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
function setupFilesAndFolders(metaData, credentials) {
|
|
74
|
-
console.log("Initializing dorky project");
|
|
75
|
-
if (existsSync(".dorky")) {
|
|
76
|
-
console.log("Dorky is already initialised in this project.");
|
|
77
|
-
} else {
|
|
78
|
-
mkdirSync(".dorky");
|
|
79
|
-
console.log(chalk.bgGreen("Created .dorky folder."));
|
|
80
|
-
writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
|
|
81
|
-
console.log(chalk.bgGreen("Created .dorky/metadata.json file."));
|
|
82
|
-
writeFileSync(".dorkyignore", "");
|
|
83
|
-
console.log(chalk.bgGreen("Created .dorkyignore file."));
|
|
84
|
-
writeFileSync(".dorky/credentials.json", JSON.stringify(credentials, null, 2));
|
|
85
|
-
console.log(chalk.bgGreen("Created .dorky/credentials.json file."));
|
|
86
|
-
}
|
|
87
|
-
}
|
|
40
|
+
let randomColor = `#${Math.floor(Math.random() * 16777215).toString(16)}`;
|
|
41
|
+
while (randomColor[2] === "f" || randomColor[3] === "f") randomColor = `#${Math.floor(Math.random() * 16777215).toString(16)}`;
|
|
42
|
+
console.log(chalk.bgHex(randomColor)(figlet));
|
|
43
|
+
|
|
44
|
+
const args = yargs
|
|
45
|
+
.option("init", { alias: "i", describe: "Initialize dorky", type: "string" })
|
|
46
|
+
.option("list", { alias: "l", describe: "List files", type: "string" })
|
|
47
|
+
.option("add", { alias: "a", describe: "Add files", type: "array" })
|
|
48
|
+
.option("rm", { alias: "r", describe: "Remove files", type: "array" })
|
|
49
|
+
.option("push", { alias: "ph", describe: "Push files", type: "string" })
|
|
50
|
+
.option("pull", { alias: "pl", describe: "Pull files", type: "string" })
|
|
51
|
+
.option("migrate", { alias: "m", describe: "Migrate project", type: "string" })
|
|
52
|
+
.help('help').strict().argv;
|
|
53
|
+
|
|
54
|
+
if (Object.keys(args).length === 2 && args._.length === 0) yargs.showHelp();
|
|
88
55
|
|
|
89
56
|
function updateGitIgnore() {
|
|
90
|
-
let
|
|
91
|
-
if (
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
const dorkyIgnoreEntry = ".dorky/credentials.json";
|
|
95
|
-
if (!gitignoreContent.includes(dorkyIgnoreEntry)) {
|
|
96
|
-
gitignoreContent += EOL + dorkyIgnoreEntry + EOL;
|
|
97
|
-
fs.writeFileSync(".gitignore", gitignoreContent);
|
|
98
|
-
console.log(`${chalk.bgGreen("Updated .gitignore to ignore .dorky/credentials.json.")} ${chalk.red("⚠️ This is done to protect your credentials.")}`);
|
|
57
|
+
let content = existsSync(".gitignore") ? readFileSync(".gitignore").toString() : "";
|
|
58
|
+
if (!content.includes(CREDENTIALS_PATH)) {
|
|
59
|
+
writeFileSync(".gitignore", content + EOL + CREDENTIALS_PATH + EOL);
|
|
60
|
+
console.log(chalk.cyan("ℹ Updated .gitignore to secure credentials."));
|
|
99
61
|
}
|
|
100
62
|
}
|
|
101
63
|
|
|
102
64
|
async function authorizeGoogleDriveClient(forceReauth = false) {
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
const
|
|
107
|
-
|
|
108
|
-
if (!savedCredentials.access_token && !savedCredentials.refresh_token) {
|
|
109
|
-
return null;
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
const keys = JSON.parse(fs.readFileSync(CREDENTIALS_PATH));
|
|
65
|
+
if (!forceReauth && existsSync(CREDENTIALS_PATH)) {
|
|
66
|
+
const saved = readJson(CREDENTIALS_PATH);
|
|
67
|
+
if (saved.storage === 'google-drive' && saved.expiry_date) {
|
|
68
|
+
const keys = readJson(GD_CREDENTIALS_PATH);
|
|
113
69
|
const key = keys.installed || keys.web;
|
|
114
|
-
const
|
|
115
|
-
|
|
116
|
-
key.client_secret,
|
|
117
|
-
key.redirect_uris[0]
|
|
118
|
-
);
|
|
119
|
-
|
|
120
|
-
const { storage, ...authCredentials } = savedCredentials;
|
|
121
|
-
oAuth2Client.setCredentials(authCredentials);
|
|
122
|
-
|
|
123
|
-
return oAuth2Client;
|
|
124
|
-
} catch (err) {
|
|
125
|
-
return null;
|
|
126
|
-
}
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
async function isTokenExpired(credentials) {
|
|
130
|
-
if (!credentials.expiry_date) {
|
|
131
|
-
return true;
|
|
132
|
-
}
|
|
133
|
-
const expiryBuffer = 300000;
|
|
134
|
-
const currentTimeUTC = Date.now();
|
|
135
|
-
const expiryTimeUTC = credentials.expiry_date;
|
|
136
|
-
|
|
137
|
-
return currentTimeUTC >= (expiryTimeUTC - expiryBuffer);
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
async function refreshAndSaveToken(client) {
|
|
141
|
-
try {
|
|
142
|
-
await client.getAccessToken();
|
|
143
|
-
const newCredentials = client.credentials;
|
|
144
|
-
const credentialsToSave = {
|
|
145
|
-
storage: "google-drive",
|
|
146
|
-
...newCredentials
|
|
147
|
-
};
|
|
148
|
-
fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
|
|
149
|
-
return client;
|
|
150
|
-
} catch (err) {
|
|
151
|
-
return null;
|
|
152
|
-
}
|
|
153
|
-
}
|
|
70
|
+
const client = new google.auth.OAuth2(key.client_id, key.client_secret, key.redirect_uris[0]);
|
|
71
|
+
client.setCredentials(saved);
|
|
154
72
|
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
client = await refreshAndSaveToken(client);
|
|
164
|
-
if (client) {
|
|
165
|
-
return client;
|
|
73
|
+
if (Date.now() >= saved.expiry_date - 300000) {
|
|
74
|
+
try {
|
|
75
|
+
const { credentials } = await client.refreshAccessToken();
|
|
76
|
+
writeJson(CREDENTIALS_PATH, { storage: "google-drive", ...credentials });
|
|
77
|
+
client.setCredentials(credentials);
|
|
78
|
+
} catch (e) {
|
|
79
|
+
console.log(chalk.yellow("Token refresh failed. Re-authenticating..."));
|
|
80
|
+
return authorizeGoogleDriveClient(true);
|
|
166
81
|
}
|
|
167
|
-
} else {
|
|
168
|
-
return client;
|
|
169
82
|
}
|
|
83
|
+
return client;
|
|
170
84
|
}
|
|
171
85
|
}
|
|
172
86
|
|
|
173
|
-
client = await authenticate({
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
});
|
|
177
|
-
|
|
178
|
-
if (client && client.credentials && existsSync(path.dirname(TOKEN_PATH))) {
|
|
179
|
-
const credentialsToSave = {
|
|
180
|
-
storage: "google-drive",
|
|
181
|
-
...client.credentials
|
|
182
|
-
};
|
|
183
|
-
fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
|
|
87
|
+
const client = await authenticate({ scopes: SCOPES, keyfilePath: GD_CREDENTIALS_PATH });
|
|
88
|
+
if (client?.credentials && existsSync(path.dirname(CREDENTIALS_PATH))) {
|
|
89
|
+
writeJson(CREDENTIALS_PATH, { storage: "google-drive", ...client.credentials });
|
|
184
90
|
}
|
|
185
|
-
|
|
186
91
|
return client;
|
|
187
92
|
}
|
|
188
93
|
|
|
189
94
|
async function init(storage) {
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
console.log("Please provide a valid storage option <aws|google-drive>");
|
|
204
|
-
break;
|
|
95
|
+
if (existsSync(DORKY_DIR)) return console.log(chalk.yellow("⚠ Dorky is already initialized."));
|
|
96
|
+
if (!["aws", "google-drive"].includes(storage)) return console.log(chalk.red("✖ Invalid storage. Use 'aws' or 'google-drive'."));
|
|
97
|
+
|
|
98
|
+
let credentials = {};
|
|
99
|
+
if (storage === "aws") {
|
|
100
|
+
if (!process.env.AWS_ACCESS_KEY || !process.env.AWS_SECRET_KEY || !process.env.AWS_REGION || !process.env.BUCKET_NAME) {
|
|
101
|
+
console.log(chalk.red("✖ Missing AWS environment variables."));
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
credentials = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME };
|
|
105
|
+
} else {
|
|
106
|
+
const client = await authorizeGoogleDriveClient(true);
|
|
107
|
+
credentials = { storage: "google-drive", ...client.credentials };
|
|
205
108
|
}
|
|
109
|
+
|
|
110
|
+
mkdirSync(DORKY_DIR);
|
|
111
|
+
writeJson(METADATA_PATH, { "stage-1-files": {}, "uploaded-files": {} });
|
|
112
|
+
writeFileSync(".dorkyignore", "");
|
|
113
|
+
writeJson(CREDENTIALS_PATH, credentials);
|
|
114
|
+
console.log(chalk.green("✔ Dorky project initialized successfully."));
|
|
206
115
|
updateGitIgnore();
|
|
207
116
|
}
|
|
208
117
|
|
|
209
118
|
async function list(type) {
|
|
210
|
-
|
|
211
|
-
const
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
console.log(chalk.red("Listing files that can be added:"));
|
|
224
|
-
var exclusions = fs.readFileSync(".dorkyignore").toString().split(EOL);
|
|
225
|
-
exclusions = exclusions.filter((exclusion) => exclusion !== "");
|
|
226
|
-
const src = process.cwd();
|
|
227
|
-
const files = await glob(path.join(src, "**/*"), { dot: true });
|
|
228
|
-
const filteredFiles = files.filter((file) => {
|
|
229
|
-
for (let i = 0; i < exclusions.length; i++) {
|
|
230
|
-
if (file.includes(exclusions[i])) return false;
|
|
231
|
-
}
|
|
232
|
-
if (file.includes(".dorky/")) return false;
|
|
233
|
-
if (file.endsWith(".dorky") && fs.lstatSync(file).isDirectory()) return false;
|
|
234
|
-
if (file.endsWith(".dorkyignore")) return false;
|
|
235
|
-
return true;
|
|
119
|
+
checkDorkyProject();
|
|
120
|
+
const meta = readJson(METADATA_PATH);
|
|
121
|
+
if (type === "remote") {
|
|
122
|
+
if (!await checkCredentials()) return;
|
|
123
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
124
|
+
const root = path.basename(process.cwd());
|
|
125
|
+
console.log(chalk.blue.bold("\n☁ Remote Files:"));
|
|
126
|
+
|
|
127
|
+
if (creds.storage === "aws") {
|
|
128
|
+
await runS3(creds, async (s3, bucket) => {
|
|
129
|
+
const data = await s3.send(new ListObjectsV2Command({ Bucket: bucket, Prefix: root + "/" }));
|
|
130
|
+
if (!data.Contents?.length) return console.log(chalk.yellow("ℹ No remote files found."));
|
|
131
|
+
data.Contents.forEach(o => console.log(chalk.cyan(` ${o.Key.replace(root + "/", "")}`)));
|
|
236
132
|
});
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
133
|
+
} else {
|
|
134
|
+
await runDrive(async (drive) => {
|
|
135
|
+
const q = `name='${root}' and mimeType='application/vnd.google-apps.folder' and 'root' in parents and trashed=false`;
|
|
136
|
+
const { data: { files: [folder] } } = await drive.files.list({ q, fields: 'files(id)' });
|
|
137
|
+
if (!folder) return console.log(chalk.yellow("ℹ Remote folder not found."));
|
|
138
|
+
const walk = async (pid, p = '') => {
|
|
139
|
+
const { data: { files } } = await drive.files.list({ q: `'${pid}' in parents and trashed=false`, fields: 'files(id, name, mimeType)' });
|
|
140
|
+
for (const f of files) {
|
|
141
|
+
if (f.mimeType === 'application/vnd.google-apps.folder') await walk(f.id, path.join(p, f.name));
|
|
142
|
+
else console.log(chalk.cyan(` ${path.join(p, f.name)}`));
|
|
143
|
+
}
|
|
144
|
+
};
|
|
145
|
+
await walk(folder.id);
|
|
244
146
|
});
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
147
|
+
}
|
|
148
|
+
} else {
|
|
149
|
+
console.log(chalk.blue.bold("\n📂 Untracked Files:"));
|
|
150
|
+
const exclusions = existsSync(".dorkyignore") ? readFileSync(".dorkyignore").toString().split(EOL).filter(Boolean) : [];
|
|
151
|
+
const files = await glob("**/*", { dot: true, ignore: [...exclusions.map(e => `**/${e}/**`), ...exclusions, ".dorky/**", ".dorkyignore", ".git/**", "node_modules/**"] });
|
|
152
|
+
|
|
153
|
+
files.forEach(f => {
|
|
154
|
+
const rel = path.relative(process.cwd(), f);
|
|
155
|
+
if (rel.includes('.env') || rel.includes('.config')) console.log(chalk.yellow(` ⚠ ${rel} (Potential sensitive file)`));
|
|
156
|
+
else console.log(chalk.gray(` ${rel}`));
|
|
157
|
+
});
|
|
158
|
+
console.log(chalk.blue.bold("\n📦 Staged Files:"));
|
|
159
|
+
Object.keys(meta["stage-1-files"]).forEach(f => console.log(chalk.green(` ✔ ${f}`)));
|
|
249
160
|
}
|
|
250
161
|
}
|
|
251
162
|
|
|
252
|
-
function add(
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
const
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
const fileContents = fs.readFileSync(file);
|
|
263
|
-
const fileType = mimeTypes.lookup(file);
|
|
264
|
-
const newHash = md5(fileContents);
|
|
265
|
-
const existingEntry = metaData["stage-1-files"][file];
|
|
266
|
-
if (existingEntry && existingEntry.hash === newHash) {
|
|
267
|
-
console.log(chalk.yellow(`File ${file} has no changes, skipping.`));
|
|
268
|
-
return;
|
|
269
|
-
}
|
|
270
|
-
metaData["stage-1-files"][file] = {
|
|
271
|
-
"mime-type": fileType ? fileType : "application/octet-stream",
|
|
272
|
-
"hash": newHash
|
|
273
|
-
};
|
|
274
|
-
addedFiles.push(file);
|
|
163
|
+
function add(files) {
|
|
164
|
+
checkDorkyProject();
|
|
165
|
+
const meta = readJson(METADATA_PATH);
|
|
166
|
+
const added = [];
|
|
167
|
+
files.forEach(f => {
|
|
168
|
+
if (!existsSync(f)) return console.log(chalk.red(`✖ File not found: ${f}`));
|
|
169
|
+
const hash = md5(readFileSync(f));
|
|
170
|
+
if (meta["stage-1-files"][f]?.hash === hash) return console.log(chalk.gray(`• ${f} (unchanged)`));
|
|
171
|
+
meta["stage-1-files"][f] = { "mime-type": mimeTypes.lookup(f) || "application/octet-stream", hash };
|
|
172
|
+
added.push(f);
|
|
275
173
|
});
|
|
276
|
-
|
|
277
|
-
|
|
174
|
+
writeJson(METADATA_PATH, meta);
|
|
175
|
+
added.forEach(f => console.log(chalk.green(`✔ Staged: ${f}`)));
|
|
278
176
|
}
|
|
279
177
|
|
|
280
|
-
function rm(
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
const
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
delete metaData["stage-1-files"][file];
|
|
178
|
+
function rm(files) {
|
|
179
|
+
checkDorkyProject();
|
|
180
|
+
const meta = readJson(METADATA_PATH);
|
|
181
|
+
const removed = files.filter(f => {
|
|
182
|
+
if (!meta["stage-1-files"][f]) return false;
|
|
183
|
+
delete meta["stage-1-files"][f];
|
|
287
184
|
return true;
|
|
288
185
|
});
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
else console.log(chalk.red("No files found that can be removed."));
|
|
186
|
+
writeJson(METADATA_PATH, meta);
|
|
187
|
+
removed.length ? removed.forEach(f => console.log(chalk.yellow(`✔ Unstaged: ${f}`))) : console.log(chalk.gray("ℹ No matching files to remove."));
|
|
292
188
|
}
|
|
293
189
|
|
|
294
190
|
async function checkCredentials() {
|
|
191
|
+
if (existsSync(CREDENTIALS_PATH)) return true;
|
|
192
|
+
if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY) {
|
|
193
|
+
writeJson(CREDENTIALS_PATH, {
|
|
194
|
+
storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY,
|
|
195
|
+
awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME
|
|
196
|
+
});
|
|
197
|
+
return true;
|
|
198
|
+
}
|
|
295
199
|
try {
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
} else {
|
|
200
|
+
const client = await authorizeGoogleDriveClient(true);
|
|
201
|
+
if (client) return true;
|
|
202
|
+
} catch { }
|
|
203
|
+
console.log(chalk.red("✖ Credentials not found. Please run --init."));
|
|
204
|
+
return false;
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
const getS3 = (c) => new S3Client({
|
|
208
|
+
credentials: { accessKeyId: c.accessKey || process.env.AWS_ACCESS_KEY, secretAccessKey: c.secretKey || process.env.AWS_SECRET_KEY },
|
|
209
|
+
region: c.awsRegion || process.env.AWS_REGION
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
async function runS3(creds, fn) {
|
|
213
|
+
try { await fn(getS3(creds), creds.bucket || process.env.BUCKET_NAME); }
|
|
214
|
+
catch (err) {
|
|
215
|
+
if (["InvalidAccessKeyId", "SignatureDoesNotMatch"].includes(err.name) || err.$metadata?.httpStatusCode === 403) {
|
|
216
|
+
if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY) {
|
|
217
|
+
console.log(chalk.yellow("AWS auth failed. Retrying with env vars..."));
|
|
218
|
+
const newCreds = { storage: "aws", accessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, awsRegion: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME };
|
|
219
|
+
writeJson(CREDENTIALS_PATH, newCreds);
|
|
317
220
|
try {
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
console.log(chalk.green("Credentials saved in .dorky/credentials.json"));
|
|
323
|
-
console.log(chalk.red("Please ignore the warning to set credentials below and run the command again."));
|
|
324
|
-
return false;
|
|
325
|
-
} catch (err) {
|
|
326
|
-
console.log(chalk.red("Failed to authorize Google Drive client: " + err.message));
|
|
327
|
-
console.log(chalk.red("Please provide credentials in .dorky/credentials.json"));
|
|
328
|
-
return false;
|
|
221
|
+
await fn(getS3(newCreds), newCreds.bucket);
|
|
222
|
+
return;
|
|
223
|
+
} catch (e) {
|
|
224
|
+
console.log(chalk.red("Retried with env vars but failed."));
|
|
329
225
|
}
|
|
330
226
|
}
|
|
227
|
+
console.log(chalk.red("AWS authentication failed."));
|
|
228
|
+
console.log(chalk.yellow("Please set correct AWS_ACCESS_KEY, AWS_SECRET_KEY, AWS_REGION and BUCKET_NAME in environment or .dorky/credentials.json"));
|
|
229
|
+
process.exit(1);
|
|
331
230
|
}
|
|
332
|
-
|
|
333
|
-
console.log(chalk.red("Please provide credentials in .dorky/credentials.json"));
|
|
334
|
-
return false;
|
|
231
|
+
throw err;
|
|
335
232
|
}
|
|
336
233
|
}
|
|
337
234
|
|
|
338
|
-
async function
|
|
339
|
-
|
|
340
|
-
if (!
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
const stage1Files = metaData["stage-1-files"];
|
|
347
|
-
const pushedFiles = metaData["uploaded-files"];
|
|
348
|
-
var filesToPush = [];
|
|
349
|
-
Object.keys(stage1Files).map((file) => {
|
|
350
|
-
if (pushedFiles[file]) {
|
|
351
|
-
if (stage1Files[file]["hash"] != pushedFiles[file]["hash"]) filesToPush.push(file);
|
|
352
|
-
} else filesToPush.push(file);
|
|
353
|
-
});
|
|
354
|
-
filesToPush = filesToPush.map((file) => {
|
|
355
|
-
return {
|
|
356
|
-
"name": file,
|
|
357
|
-
"mime-type": stage1Files[file]["mime-type"],
|
|
358
|
-
"hash": stage1Files[file]["hash"]
|
|
359
|
-
}
|
|
360
|
-
});
|
|
361
|
-
const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
|
|
362
|
-
switch (credentials.storage) {
|
|
363
|
-
case "aws":
|
|
364
|
-
pushToS3(filesToPush, credentials);
|
|
365
|
-
break;
|
|
366
|
-
case "google-drive":
|
|
367
|
-
pushToGoogleDrive(filesToPush);
|
|
368
|
-
break;
|
|
369
|
-
default:
|
|
370
|
-
console.log("Please provide a valid storage option <aws|google-drive>");
|
|
371
|
-
break;
|
|
235
|
+
async function getFolderId(pathStr, drive) {
|
|
236
|
+
let parentId = 'root';
|
|
237
|
+
if (!pathStr || pathStr === '.') return parentId;
|
|
238
|
+
for (const folder of pathStr.split("/")) {
|
|
239
|
+
if (!folder) continue;
|
|
240
|
+
const res = await drive.files.list({ q: `name='${folder}' and mimeType='application/vnd.google-apps.folder' and '${parentId}' in parents`, fields: 'files(id)' });
|
|
241
|
+
if (res.data.files[0]) parentId = res.data.files[0].id;
|
|
242
|
+
else parentId = (await drive.files.create({ requestBody: { name: folder, mimeType: 'application/vnd.google-apps.folder', parents: [parentId] }, fields: 'id' })).data.id;
|
|
372
243
|
}
|
|
373
|
-
|
|
374
|
-
fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
|
|
375
|
-
console.log(chalk.green("Pushed the following files to storage:"));
|
|
244
|
+
return parentId;
|
|
376
245
|
}
|
|
377
246
|
|
|
378
|
-
function
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
new PutObjectCommand({
|
|
392
|
-
Bucket: bucketName,
|
|
393
|
-
Key: pathToFile,
|
|
394
|
-
Body: fs.readFileSync(file.name).toString(),
|
|
395
|
-
})
|
|
396
|
-
);
|
|
397
|
-
console.log(chalk.green(`Pushed ${pathToFile} to storage.`));
|
|
398
|
-
}));
|
|
247
|
+
async function runDrive(fn) {
|
|
248
|
+
let client = await authorizeGoogleDriveClient();
|
|
249
|
+
let drive = google.drive({ version: 'v3', auth: client });
|
|
250
|
+
try { await fn(drive); }
|
|
251
|
+
catch (err) {
|
|
252
|
+
if (err.code === 401 || err.message?.includes('invalid_grant')) {
|
|
253
|
+
console.log(chalk.yellow("Drive auth failed. Re-authenticating..."));
|
|
254
|
+
if (existsSync(CREDENTIALS_PATH)) unlinkSync(CREDENTIALS_PATH);
|
|
255
|
+
client = await authorizeGoogleDriveClient(true);
|
|
256
|
+
drive = google.drive({ version: 'v3', auth: client });
|
|
257
|
+
await fn(drive);
|
|
258
|
+
} else throw err;
|
|
259
|
+
}
|
|
399
260
|
}
|
|
400
261
|
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
const
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
262
|
+
async function push() {
|
|
263
|
+
checkDorkyProject();
|
|
264
|
+
if (!await checkCredentials()) return;
|
|
265
|
+
const meta = readJson(METADATA_PATH);
|
|
266
|
+
const files = Object.keys(meta["stage-1-files"])
|
|
267
|
+
.filter(f => !meta["uploaded-files"][f] || meta["stage-1-files"][f].hash !== meta["uploaded-files"][f].hash)
|
|
268
|
+
.map(f => ({ name: f, ...meta["stage-1-files"][f] }));
|
|
269
|
+
|
|
270
|
+
if (files.length === 0) return console.log(chalk.yellow("ℹ Nothing to push."));
|
|
271
|
+
|
|
272
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
273
|
+
if (creds.storage === "aws") {
|
|
274
|
+
await runS3(creds, async (s3, bucket) => {
|
|
275
|
+
await Promise.all(files.map(async f => {
|
|
276
|
+
const key = path.join(path.basename(process.cwd()), f.name);
|
|
277
|
+
await s3.send(new PutObjectCommand({ Bucket: bucket, Key: key, Body: readFileSync(f.name) }));
|
|
278
|
+
console.log(chalk.green(`✔ Uploaded: ${f.name}`));
|
|
279
|
+
}));
|
|
280
|
+
});
|
|
281
|
+
} else if (creds.storage === "google-drive") {
|
|
282
|
+
await runDrive(async (drive) => {
|
|
283
|
+
for (const f of files) {
|
|
284
|
+
const root = path.basename(process.cwd());
|
|
285
|
+
const parentId = await getFolderId(path.dirname(path.join(root, f.name)), drive);
|
|
286
|
+
await drive.files.create({
|
|
287
|
+
requestBody: { name: path.basename(f.name), parents: [parentId] },
|
|
288
|
+
media: { mimeType: f["mime-type"], body: createReadStream(f.name) }
|
|
423
289
|
});
|
|
424
|
-
|
|
290
|
+
console.log(chalk.green(`✔ Uploaded: ${f.name}`));
|
|
425
291
|
}
|
|
426
|
-
}
|
|
427
|
-
return parentId;
|
|
292
|
+
});
|
|
428
293
|
}
|
|
429
|
-
console.log("Uploading to google drive");
|
|
430
|
-
const client = await authorizeGoogleDriveClient(false);
|
|
431
|
-
|
|
432
|
-
const credentialsToSave = {
|
|
433
|
-
storage: "google-drive",
|
|
434
|
-
...client.credentials
|
|
435
|
-
};
|
|
436
|
-
fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
|
|
437
294
|
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
const rootFolder = path.basename(process.cwd());
|
|
441
|
-
const pathToFile = path.join(rootFolder, file.name);
|
|
442
|
-
const requestBody = {
|
|
443
|
-
name: path.basename(file.name),
|
|
444
|
-
parents: [await getOrCreateFolderId(pathToFile.split("/").slice(0, -1).join("/"), drive)],
|
|
445
|
-
fields: 'id',
|
|
446
|
-
};
|
|
447
|
-
const media = {
|
|
448
|
-
mimeType: file["mime-type"],
|
|
449
|
-
body: fs.createReadStream(path.join(process.cwd(), file.name)),
|
|
450
|
-
};
|
|
451
|
-
try {
|
|
452
|
-
await drive.files.create({
|
|
453
|
-
requestBody,
|
|
454
|
-
media: media,
|
|
455
|
-
});
|
|
456
|
-
console.log(chalk.green(`Pushed ${file.name} to storage.`));
|
|
457
|
-
} catch (err) {
|
|
458
|
-
console.log(err);
|
|
459
|
-
throw err;
|
|
460
|
-
}
|
|
461
|
-
}
|
|
295
|
+
meta["uploaded-files"] = { ...meta["uploaded-files"], ...meta["stage-1-files"] };
|
|
296
|
+
writeJson(METADATA_PATH, meta);
|
|
462
297
|
}
|
|
463
298
|
|
|
464
299
|
async function pull() {
|
|
465
|
-
|
|
466
|
-
if (!
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
region: credentials.awsRegion ?? process.env.AWS_REGION
|
|
494
|
-
});
|
|
495
|
-
const bucketName = credentials.bucket ?? process.env.BUCKET_NAME;
|
|
496
|
-
Promise.all(Object.keys(files).map(async (file) => {
|
|
497
|
-
const rootFolder = path.basename(process.cwd());
|
|
498
|
-
const pathToFile = path.join(rootFolder, file);
|
|
499
|
-
const { Body } = await s3.send(
|
|
500
|
-
new GetObjectCommand({
|
|
501
|
-
Bucket: bucketName,
|
|
502
|
-
Key: pathToFile,
|
|
503
|
-
})
|
|
504
|
-
);
|
|
505
|
-
const dir = path.dirname(file);
|
|
506
|
-
if (!fs.existsSync(dir)) {
|
|
507
|
-
fs.mkdirSync(dir, { recursive: true });
|
|
508
|
-
}
|
|
509
|
-
fs.writeFileSync(file, await Body.transformToString());
|
|
510
|
-
console.log(chalk.green(`Pulled ${file} from storage.`));
|
|
511
|
-
}));
|
|
512
|
-
}
|
|
513
|
-
|
|
514
|
-
async function pullFromGoogleDrive(files) {
|
|
515
|
-
console.log("Downloading from google drive");
|
|
516
|
-
files = Object.keys(files).map((file) => {
|
|
517
|
-
return { name: file, ...files[file] };
|
|
518
|
-
});
|
|
519
|
-
|
|
520
|
-
const client = await authorizeGoogleDriveClient(false);
|
|
521
|
-
|
|
522
|
-
const credentialsToSave = {
|
|
523
|
-
storage: "google-drive",
|
|
524
|
-
...client.credentials
|
|
525
|
-
};
|
|
526
|
-
fs.writeFileSync(TOKEN_PATH, JSON.stringify(credentialsToSave, null, 2));
|
|
527
|
-
|
|
528
|
-
const drive = google.drive({ version: "v3", auth: client });
|
|
529
|
-
try {
|
|
530
|
-
files.map(async (file) => {
|
|
531
|
-
const res = await drive.files.list({
|
|
532
|
-
q: `name='${path.basename(file.name)}' and mimeType!='application/vnd.google-apps.folder'`,
|
|
533
|
-
fields: 'files(id, name)',
|
|
534
|
-
spaces: 'drive'
|
|
535
|
-
});
|
|
536
|
-
if (res.data.files.length === 0) {
|
|
537
|
-
console.log(chalk.red(`File ${file.name} not found in Google Drive.`));
|
|
538
|
-
return;
|
|
539
|
-
}
|
|
540
|
-
const _file = await drive.files.get({ fileId: res.data.files[0].id, alt: "media" });
|
|
541
|
-
const dir = path.dirname(file.name);
|
|
542
|
-
if (!fs.existsSync(dir)) {
|
|
543
|
-
fs.mkdirSync(dir, { recursive: true });
|
|
544
|
-
}
|
|
545
|
-
fs.writeFileSync(file.name, await _file.data.text(), "utf-8");
|
|
546
|
-
console.log(chalk.green(`Pulled ${file.name} from storage.`));
|
|
300
|
+
checkDorkyProject();
|
|
301
|
+
if (!await checkCredentials()) return;
|
|
302
|
+
const meta = readJson(METADATA_PATH);
|
|
303
|
+
const files = meta["uploaded-files"];
|
|
304
|
+
const creds = readJson(CREDENTIALS_PATH);
|
|
305
|
+
|
|
306
|
+
if (creds.storage === "aws") {
|
|
307
|
+
await runS3(creds, async (s3, bucket) => {
|
|
308
|
+
await Promise.all(Object.keys(files).map(async f => {
|
|
309
|
+
const key = path.join(path.basename(process.cwd()), f);
|
|
310
|
+
const { Body } = await s3.send(new GetObjectCommand({ Bucket: bucket, Key: key }));
|
|
311
|
+
const dir = path.dirname(f);
|
|
312
|
+
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
313
|
+
writeFileSync(f, await Body.transformToString());
|
|
314
|
+
console.log(chalk.green(`✔ Downloaded: ${f}`));
|
|
315
|
+
}));
|
|
316
|
+
});
|
|
317
|
+
} else if (creds.storage === "google-drive") {
|
|
318
|
+
await runDrive(async (drive) => {
|
|
319
|
+
const fileList = Object.keys(files).map(k => ({ name: k, ...files[k] }));
|
|
320
|
+
await Promise.all(fileList.map(async f => {
|
|
321
|
+
const res = await drive.files.list({ q: `name='${path.basename(f.name)}' and mimeType!='application/vnd.google-apps.folder'`, fields: 'files(id)' });
|
|
322
|
+
if (!res.data.files[0]) return console.log(chalk.red(`✖ Missing remote file: ${f.name}`));
|
|
323
|
+
const data = await drive.files.get({ fileId: res.data.files[0].id, alt: 'media' });
|
|
324
|
+
if (!existsSync(path.dirname(f.name))) mkdirSync(path.dirname(f.name), { recursive: true });
|
|
325
|
+
writeFileSync(f.name, await data.data.text());
|
|
326
|
+
console.log(chalk.green(`✔ Downloaded: ${f.name}`));
|
|
327
|
+
}));
|
|
547
328
|
});
|
|
548
|
-
} catch (err) {
|
|
549
|
-
throw err;
|
|
550
329
|
}
|
|
551
330
|
}
|
|
552
331
|
|
|
553
|
-
if (
|
|
554
|
-
if (
|
|
555
|
-
if (
|
|
556
|
-
if (
|
|
557
|
-
if (
|
|
558
|
-
if (
|
|
332
|
+
if (args.init !== undefined) init(args.init);
|
|
333
|
+
if (args.list !== undefined) list(args.list);
|
|
334
|
+
if (args.add !== undefined) add(args.add);
|
|
335
|
+
if (args.rm !== undefined) rm(args.rm);
|
|
336
|
+
if (args.push !== undefined) push();
|
|
337
|
+
if (args.pull !== undefined) pull();
|