dorky 1.2.2 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/To-Do +24 -0
  2. package/bin/index.js +395 -0
  3. package/package.json +16 -6
  4. package/index.js +0 -547
package/To-Do ADDED
@@ -0,0 +1,24 @@
1
+ Add stages for variables.
2
+ Fix workflow with CI instead of npm i.
3
+ Convert to TypeScript.
4
+
5
+
6
+ Possible commands to add =>
7
+ - node bin/index.js --init [aws|google-drive]
8
+ - node bin/index.js --add [file-names]
9
+ - node bin/index.js --list
10
+ - node bin/index.js --push
11
+ - node bin/index.js --pull
12
+
13
+ {
14
+ "stage-1-files": {
15
+ "abc.txt": {
16
+ "mime-type": "text/plain",
17
+ "checksum": ""
18
+ }
19
+ },
20
+ "uploaded-files": ["uid1"]
21
+ }
22
+
23
+ checksum of the file contents will be the encrypted string in md5
24
+ option to avoid checksum (used when file size is large)
package/bin/index.js ADDED
@@ -0,0 +1,395 @@
1
+ #!/usr/bin/env node
2
+
3
+ const yargs = require("yargs");
4
+ const { existsSync, mkdirSync, writeFileSync } = require("fs");
5
+ const chalk = require("chalk");
6
+ const { glob } = require("glob");
7
+ const path = require("path");
8
+ const fs = require("fs");
9
+ const mimeTypes = require("mime-types");
10
+ const md5 = require('md5');
11
+ const EOL = require("os").type() == "Darwin" ? "\r\n" : "\n";
12
+ const { GetObjectCommand, PutObjectCommand, S3Client } = require("@aws-sdk/client-s3");
13
+ const { authenticate } = require('@google-cloud/local-auth');
14
+ const { google } = require('googleapis');
15
+
16
+ // Google Drive config ************************************************************
17
+ const SCOPES = ['https://www.googleapis.com/auth/drive'];
18
+ const CREDENTIALS_PATH = path.join(__dirname, "../google-drive-credentials.json");
19
+ const TOKEN_PATH = path.join(process.cwd(), '.dorky/credentials.json');
20
+ // ********************************************************************************
21
+
22
+ const figlet = `
23
+ __ __ \t
24
+ .--| |-----.----| |--.--.--.\t
25
+ | _ | _ | _| <| | |\t
26
+ |_____|_____|__| |__|__|___ |\t
27
+ |_____|\t
28
+ `;
29
+ // Should display the process.env.AWS_ACCESS_KEY, process.env.AWS_SECRET_KEY, process.env.AWS_REGION, process.env.BUCKET_NAME to be set
30
+ const usage = `${figlet}`;
31
+ let randomColor = null;
32
+ do {
33
+ const randomHex = Math.floor(Math.random() * 16777215).toString(16);
34
+ randomColor = `#${randomHex}`;
35
+ } while (randomColor[2] === "f" || randomColor[3] === "f");
36
+ // randomColor[2].match(/[a-f]/g).length ? true : false || randomColor[3].match(/[a-f]/g).length ? true : false
37
+ console.log(chalk.bgHex(randomColor)(usage));
38
+
39
+ if (process.argv.slice(2).length === 0) {
40
+ process.argv.push("--help");
41
+ }
42
+
43
+ var args = yargs
44
+ .option("init", { alias: "i", describe: "Initialize dorky project", type: "string", demandOption: false })
45
+ .option("list", { alias: "l", describe: "List files in dorky", type: "string", demandOption: false })
46
+ .option("add", { alias: "a", describe: "Add files to push or pull", type: "array", demandOption: false })
47
+ .option("rm", { alias: "r", describe: "Remove files from push or pull", type: "array", demandOption: false })
48
+ .option("push", { alias: "ph", describe: "Push files to storage", type: "string", demandOption: false })
49
+ .option("pull", { alias: "pl", describe: "Pull files from storage", type: "string", demandOption: false })
50
+ .option("migrate", { alias: "m", describe: "Migrate dorky project to another storage", type: "string", demandOption: false })
51
+ .help('help')
52
+ .strict()
53
+ .argv
54
+
55
+ if (Object.keys(args).length == 2) {
56
+ yargs.showHelp()
57
+ }
58
+
59
+ function setupFilesAndFolders(metaData, credentials) {
60
+ console.log("Initializing dorky project");
61
+ if (existsSync(".dorky")) {
62
+ console.log("Dorky is already initialised in this project.");
63
+ } else {
64
+ mkdirSync(".dorky");
65
+ console.log(chalk.bgGreen("Created .dorky folder."));
66
+ writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
67
+ console.log(chalk.bgGreen("Created .dorky/metadata.json file."));
68
+ writeFileSync(".dorkyignore", "");
69
+ console.log(chalk.bgGreen("Created .dorkyignore file."));
70
+ writeFileSync(".dorky/credentials.json", JSON.stringify(credentials, null, 2));
71
+ console.log(chalk.bgGreen("Created .dorky/credentials.json file."));
72
+ }
73
+ }
74
+
75
+ async function authorizeGoogleDriveClient() {
76
+ async function loadSavedCredentialsIfExist() {
77
+ try {
78
+ const content = await fs.readFileSync(TOKEN_PATH);
79
+ const credentials = JSON.parse(content);
80
+ return google.auth.fromJSON(credentials);
81
+ } catch (err) {
82
+ return null;
83
+ }
84
+ }
85
+ let client = await loadSavedCredentialsIfExist();
86
+ if (client) {
87
+ return client;
88
+ }
89
+ client = await authenticate({
90
+ scopes: SCOPES,
91
+ keyfilePath: CREDENTIALS_PATH,
92
+ });
93
+ return client;
94
+ }
95
+
96
+ async function init(storage) {
97
+ const metaData = { "stage-1-files": {}, "uploaded-files": {} };
98
+ var credentials;
99
+ switch (storage) {
100
+ case "aws":
101
+ credentials = { storage: "aws", acessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, region: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME }
102
+ setupFilesAndFolders(metaData, credentials);
103
+ break;
104
+ case "google-drive":
105
+ const client = await authorizeGoogleDriveClient();
106
+ credentials = { storage: "google-drive", ...client.credentials };
107
+ setupFilesAndFolders(metaData, credentials);
108
+ break;
109
+ default:
110
+ console.log("Please provide a valid storage option <aws|google-drive>");
111
+ break;
112
+ }
113
+ }
114
+
115
+ async function list() {
116
+ console.log(chalk.red("Listing files that can be added:"));
117
+ const exclusions = fs.readFileSync(".dorkyignore").toString().split(EOL);
118
+ const src = process.cwd();
119
+ const files = await glob(path.join(src, "**/*"));
120
+ const filteredFiles = files.filter((file) => {
121
+ for (let i = 0; i < exclusions.length; i++) {
122
+ if (file.includes(exclusions[i])) return false;
123
+ }
124
+ return true;
125
+ });
126
+ filteredFiles.forEach((file) => console.log(chalk.red(`- ${file}`)));
127
+ console.log(chalk.green("\nList of files that are already added:"));
128
+ const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
129
+ const addedFiles = Object.keys(metaData["stage-1-files"]);
130
+ addedFiles.forEach((file) => console.log(chalk.green(`- ${file}`)));
131
+ }
132
+
133
+ function add(listOfFiles) {
134
+ console.log("Adding files to stage-1 to push to storage");
135
+ const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
136
+ listOfFiles.forEach((file) => {
137
+ if (!fs.existsSync(file)) {
138
+ console.log(chalk.red(`File ${file} does not exist.`));
139
+ return;
140
+ }
141
+ const fileContents = fs.readFileSync(file);
142
+ const fileType = mimeTypes.lookup(file);
143
+ metaData["stage-1-files"][file] = {
144
+ "mime-type": fileType ? fileType : "application/octet-stream",
145
+ "hash": md5(fileContents)
146
+ };
147
+ });
148
+ fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
149
+ listOfFiles.map((file) => console.log(chalk.green(`Added ${file} to stage-1.`)));
150
+ }
151
+
152
+ function rm(listOfFiles) {
153
+ console.log(chalk.red("Removing files from stage-1"));
154
+ const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
155
+ listOfFiles = listOfFiles.filter((file) => {
156
+ if (metaData["stage-1-files"][file] == undefined) return false;
157
+ delete metaData["stage-1-files"][file];
158
+ return true;
159
+ });
160
+ fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
161
+ if (listOfFiles.length) listOfFiles.map((file) => console.log(chalk.red(`Removed ${file} from stage-1.`)));
162
+ else console.log(chalk.red("No files found that can be removed."));
163
+ }
164
+
165
+ function checkCredentials() {
166
+ const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
167
+ // This only works for AWS S3, add credential checker for google drive also, fix this => TP | 2024-09-28 16:04:41
168
+ if (credentials.accessKey && credentials.secretKey && credentials.region && credentials.bucket) {
169
+ if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY && process.env.AWS_REGION && process.env.BUCKET_NAME) {
170
+ return true;
171
+ } else {
172
+ console.log(chalk.red("Please provide credentials in .dorky/credentials.json"));
173
+ return false;
174
+ }
175
+ } else return true;
176
+ }
177
+
178
+ function push() {
179
+ if (!checkCredentials()) {
180
+ console.log(chalk.red("Please setup credentials in environment variables or in .dorky/credentials.json"));
181
+ return;
182
+ }
183
+ console.log("Pushing files to storage");
184
+ const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
185
+ const stage1Files = metaData["stage-1-files"];
186
+ const pushedFiles = metaData["uploaded-files"];
187
+ var filesToPush = [];
188
+ Object.keys(stage1Files).map((file) => {
189
+ if (pushedFiles[file]) {
190
+ if (stage1Files[file]["hash"] != pushedFiles[file]["hash"]) filesToPush.push(file);
191
+ } else filesToPush.push(file);
192
+ });
193
+ filesToPush = filesToPush.map((file) => {
194
+ return {
195
+ "name": file,
196
+ "mime-type": stage1Files[file]["mime-type"],
197
+ "hash": stage1Files[file]["hash"]
198
+ }
199
+ });
200
+ const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
201
+ switch (credentials.storage) {
202
+ case "aws":
203
+ pushToS3(filesToPush, credentials);
204
+ break;
205
+ case "google-drive":
206
+ pushToGoogleDrive(filesToPush);
207
+ break;
208
+ default:
209
+ console.log("Please provide a valid storage option <aws|google-drive>");
210
+ break;
211
+ }
212
+ metaData["uploaded-files"] = metaData["stage-1-files"];
213
+ fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
214
+ console.log(chalk.green("Pushed files to storage"));
215
+ }
216
+
217
+ function pushToS3(files, credentials) {
218
+ const s3 = new S3Client({
219
+ credentials: {
220
+ accessKeyId: credentials.acessKey ?? process.env.AWS_ACCESS_KEY,
221
+ secretAccessKey: credentials.secretKey ?? process.env.AWS_SECRET_KEY
222
+ },
223
+ region: credentials.awsRegion ?? process.env.AWS_REGION
224
+ });
225
+ const bucketName = credentials.bucket ?? process.env.BUCKET_NAME;
226
+ Promise.all(files.map(async (file) => {
227
+ const rootFolder = path.basename(process.cwd());
228
+ const pathToFile = path.join(rootFolder, file.name);
229
+ await s3.send(
230
+ new PutObjectCommand({
231
+ Bucket: bucketName,
232
+ Key: pathToFile,
233
+ Body: fs.readFileSync(file.name).toString(),
234
+ })
235
+ );
236
+ console.log(chalk.green(`Pushed ${pathToFile} to storage.`));
237
+ }));
238
+ }
239
+
240
+
241
+ async function pushToGoogleDrive(files) {
242
+ async function getOrCreateFolderId(folderPath, drive) {
243
+ const folders = folderPath.split(path.sep);
244
+ let parentId = 'root';
245
+ for (const folder of folders) {
246
+ const res = await drive.files.list({
247
+ q: `name='${folder}' and mimeType='application/vnd.google-apps.folder' and '${parentId}' in parents`,
248
+ fields: 'files(id, name)',
249
+ spaces: 'drive'
250
+ });
251
+ if (res.data.files.length > 0) {
252
+ parentId = res.data.files[0].id;
253
+ } else {
254
+ const folderMetadata = {
255
+ name: folder,
256
+ mimeType: 'application/vnd.google-apps.folder',
257
+ parents: [parentId],
258
+ };
259
+ const folderRes = await drive.files.create({
260
+ requestBody: folderMetadata,
261
+ fields: 'id',
262
+ });
263
+ parentId = folderRes.data.id;
264
+ }
265
+ }
266
+ return parentId;
267
+ }
268
+ console.log("Uploading to google drive");
269
+ const client = await authorizeGoogleDriveClient();
270
+ const drive = google.drive({ version: 'v3', auth: client });
271
+ for (const file of files) {
272
+ const rootFolder = path.basename(process.cwd());
273
+ const pathToFile = path.join(rootFolder, file.name);
274
+ const requestBody = {
275
+ name: path.basename(file.name),
276
+ parents: [await getOrCreateFolderId(pathToFile.split("/").slice(0, -1).join("/"), drive)],
277
+ fields: 'id',
278
+ };
279
+ const media = {
280
+ mimeType: file["mime-type"],
281
+ body: fs.createReadStream(path.join(process.cwd(), file.name)),
282
+ };
283
+ try {
284
+ await drive.files.create({
285
+ requestBody,
286
+ media: media,
287
+ });
288
+ console.log(chalk.green(`Pushed ${file.name} to storage.`));
289
+ } catch (err) {
290
+ console.log(err);
291
+ throw err;
292
+ }
293
+ }
294
+ }
295
+
296
+ function pull() {
297
+ if (!checkCredentials()) {
298
+ console.log(chalk.red("Please setup credentials in environment variables or in .dorky/credentials.json"));
299
+ return;
300
+ }
301
+ console.log("Pulling files from storage");
302
+ const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
303
+ const filesToPull = metaData["uploaded-files"];
304
+ const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
305
+ switch (credentials.storage) {
306
+ case "aws":
307
+ pullFromS3(filesToPull, credentials);
308
+ break;
309
+ case "google-drive":
310
+ pullFromGoogleDrive(filesToPull);
311
+ break;
312
+ default:
313
+ console.log("Please provide a valid storage option <aws|google-drive>");
314
+ break;
315
+ }
316
+ }
317
+
318
+ function pullFromS3(files, credentials) {
319
+ const s3 = new S3Client({
320
+ credentials: {
321
+ accessKeyId: credentials.acessKey ?? process.env.AWS_ACCESS_KEY,
322
+ secretAccessKey: credentials.secretKey ?? process.env.AWS_SECRET_KEY
323
+ },
324
+ region: credentials.awsRegion ?? process.env.AWS_REGION
325
+ });
326
+ const bucketName = credentials.bucket ?? process.env.BUCKET_NAME;
327
+ Promise.all(Object.keys(files).map(async (file) => {
328
+ const rootFolder = path.basename(process.cwd());
329
+ const pathToFile = path.join(rootFolder, file);
330
+ const { Body } = await s3.send(
331
+ new GetObjectCommand({
332
+ Bucket: bucketName,
333
+ Key: pathToFile,
334
+ })
335
+ );
336
+ const dir = path.dirname(file);
337
+ if (!fs.existsSync(dir)) {
338
+ fs.mkdirSync(dir, { recursive: true });
339
+ }
340
+ fs.writeFileSync(file, await Body.transformToString());
341
+ console.log(chalk.green(`Pulled ${file} from storage.`));
342
+ }));
343
+ }
344
+
345
+ async function pullFromGoogleDrive(files) {
346
+ console.log("Downloading from google drive");
347
+ files = Object.keys(files).map((file) => {
348
+ return { name: file, ...files[file] };
349
+ });
350
+
351
+ const client = await authorizeGoogleDriveClient();
352
+ const drive = google.drive({ version: "v3", auth: client });
353
+ try {
354
+ files.map(async (file) => {
355
+ const res = await drive.files.list({
356
+ q: `name='${path.basename(file.name)}' and mimeType!='application/vnd.google-apps.folder'`,
357
+ fields: 'files(id, name)',
358
+ spaces: 'drive'
359
+ });
360
+ if (res.data.files.length === 0) {
361
+ console.log(chalk.red(`File ${file.name} not found in Google Drive.`));
362
+ return;
363
+ }
364
+ const _file = await drive.files.get({ fileId: res.data.files[0].id, alt: "media" });
365
+ const dir = path.dirname(file.name);
366
+ if (!fs.existsSync(dir)) {
367
+ fs.mkdirSync(dir, { recursive: true });
368
+ }
369
+ fs.writeFileSync(file.name, await _file.data.text(), "utf-8");
370
+ console.log(chalk.green(`Pulled ${file.name} from storage.`));
371
+ // const res = await drive.files.list({
372
+ // q: `name='${file.name}'`,
373
+ // fields: 'files(id, name)',
374
+ // spaces: 'drive'
375
+ // });
376
+ // const _file = await drive.files.get({ fileId: res.data.files[0].id, alt: "media" });
377
+ // fs.writeFile(file.name, await _file.data.text(), "utf-8", (err) => {
378
+ // if (err) {
379
+ // console.log(err);
380
+ // throw err;
381
+ // }
382
+ // console.log(chalk.green(`Pulled ${file.name} from storage.`));
383
+ // });
384
+ });
385
+ } catch (err) {
386
+ throw err;
387
+ }
388
+ }
389
+
390
+ if (Object.keys(args).includes("init")) init(args.init);
391
+ if (Object.keys(args).includes("list")) list(args.list);
392
+ if (Object.keys(args).includes("add")) add(args.add);
393
+ if (Object.keys(args).includes("rm")) rm(args.rm);
394
+ if (Object.keys(args).includes("push")) push();
395
+ if (Object.keys(args).includes("pull")) pull();
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "dorky",
3
- "version": "1.2.2",
3
+ "version": "2.0.0",
4
4
  "description": "DevOps Records Keeper.",
5
- "main": "index.js",
6
5
  "bin": {
7
- "dorky": "index.js"
6
+ "dorky": "bin/index.js"
8
7
  },
9
8
  "scripts": {
10
- "test": "echo \"Error: no test specified\""
9
+ "start": "node bin/index.js",
10
+ "test": "mocha"
11
11
  },
12
12
  "repository": {
13
13
  "type": "git",
@@ -25,9 +25,19 @@
25
25
  "url": "https://github.com/trishantpahwa/dorky/issues"
26
26
  },
27
27
  "homepage": "https://github.com/trishantpahwa/dorky#readme",
28
+ "devDependencies": {
29
+ "tsc": "^2.0.4",
30
+ "typescript": "^5.5.4"
31
+ },
28
32
  "dependencies": {
29
- "@aws-sdk/client-s3": "^3.309.0",
33
+ "@aws-sdk/client-s3": "^3.658.1",
34
+ "@google-cloud/local-auth": "^3.0.1",
30
35
  "chalk": "^4.1.2",
31
- "glob": "^7.2.0"
36
+ "glob": "^11.0.0",
37
+ "googleapis": "^144.0.0",
38
+ "md5": "^2.3.0",
39
+ "mime-type": "^4.0.0",
40
+ "mime-types": "^2.1.35",
41
+ "yargs": "^17.7.2"
32
42
  }
33
43
  }
package/index.js DELETED
@@ -1,547 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- const glob = require("glob");
4
- const path = require("path");
5
- const chalk = require("chalk");
6
- const fs = require("fs");
7
- const { EOL } = require("os");
8
- const {
9
- S3Client,
10
- ListObjectsCommand,
11
- PutObjectCommand,
12
- DeleteObjectsCommand,
13
- GetObjectCommand,
14
- } = require("@aws-sdk/client-s3");
15
- const { exit } = require("process");
16
- const { createHash } = require("crypto");
17
-
18
- let s3Client, bucketName;
19
-
20
- // Initializes project, creates a new .dorky folder, and adds a metadata file to it, and creates a .dorkyignore file.
21
- function initializeProject() {
22
- if (fs.existsSync(".dorky")) {
23
- console.log(
24
- "Dorky project already initialized. Remove .dorky folder to reinitialize."
25
- );
26
- } else {
27
- fs.mkdirSync(".dorky");
28
- console.log(chalk.bgGreen("Created .dorky folder."));
29
- fs.writeFileSync(
30
- ".dorky/metadata.json",
31
- JSON.stringify({ "stage-1-files": [], "uploaded-files": [] })
32
- );
33
- console.log(chalk.bgGreen("Created .dorky/metadata.json file."));
34
- fs.writeFileSync(".dorkyignore", "");
35
- console.log(chalk.bgGreen("Created .dorkyignore file."));
36
- fs.writeFileSync(".dorky/.dorkyhash", "");
37
- console.log(chalk.bgGreen("Created .dorkyhash file."));
38
- }
39
- }
40
-
41
- // Lists all the files that are not excluded explicitly.
42
- function listFiles() {
43
- let exclusions = fs.readFileSync("./.dorkyignore").toString().split(EOL);
44
- exclusions = exclusions.filter((exclusion) => exclusion !== "");
45
- if (exclusions[0] == "") exclusions = [];
46
- var getDirectories = function (src, callback) {
47
- glob(src + "/**/*", callback);
48
- };
49
-
50
- function excludeIsPresent(element) {
51
- let present = false;
52
- let i = 0;
53
- while (i < exclusions.length) {
54
- if (element.includes(exclusions[i])) present = true;
55
- i += 1;
56
- }
57
- return present;
58
- }
59
- getDirectories(process.cwd(), function (err, res) {
60
- if (err) {
61
- console.log("Error", err);
62
- } else {
63
- let listOfFiles;
64
- listOfFiles = res
65
- .filter((element) => !excludeIsPresent(element))
66
- .map((file) => path.relative(process.cwd(), file));
67
- console.log(chalk.green("Found files:"));
68
- listOfFiles.map((file) => console.log("\t" + chalk.bgGrey(file)));
69
- }
70
- });
71
- }
72
-
73
- // Pushes changes to S3 bucket.
74
- function pushChanges() {
75
- console.log("Pushing files to server.");
76
- let rootFolder;
77
- if (process.cwd().includes("\\")) {
78
- rootFolder = process.cwd().split("\\").pop();
79
- } else if (process.cwd().includes("/")) {
80
- rootFolder = process.cwd().split("/").pop();
81
- } else rootFolder = process.cwd();
82
- console.log(rootFolder);
83
- async function rootFolderExists(rootFolder) {
84
- const bucketParams = { Bucket: bucketName };
85
- const response = await s3Client.send(new ListObjectsCommand(bucketParams));
86
- if (
87
- response.Contents.filter(
88
- (object) => object.Key.split("/")[0] == rootFolder
89
- ).length > 0
90
- ) {
91
- let metaData = JSON.parse(
92
- fs.readFileSync(path.join(".dorky", "metadata.json")).toString()
93
- );
94
- // Get removed files
95
- let removed = metaData["uploaded-files"].filter(
96
- (x) => !metaData["stage-1-files"].includes(x)
97
- );
98
- // Uploaded added files.
99
- let added = metaData["stage-1-files"].filter(
100
- (x) => !metaData["uploaded-files"].includes(x)
101
- );
102
-
103
- added.map(async (file) => {
104
- if (metaData["uploaded-files"].includes(file)) return;
105
- else {
106
- const putObjectParams = {
107
- Bucket: bucketName,
108
- Key: path
109
- .join(rootFolder, path.relative(process.cwd(), file))
110
- .split("\\")
111
- .join("/"),
112
- Body: fs
113
- .readFileSync(path.relative(process.cwd(), file))
114
- .toString(),
115
- };
116
- // Upload records
117
- try {
118
- const uploadResponse = await s3Client.send(
119
- new PutObjectCommand(putObjectParams)
120
- );
121
- if (uploadResponse) console.log(chalk.green("Uploaded " + file));
122
- } catch (err) {
123
- console.log(
124
- "Unable to upload file " +
125
- path
126
- .join(rootFolder, path.relative(process.cwd(), file))
127
- .replace(/\\/g, "/")
128
- );
129
- console.log(err);
130
- }
131
- metaData["uploaded-files"].push(file);
132
- }
133
- });
134
-
135
- if (removed.length) {
136
- const removedObjectParams = {
137
- Bucket: bucketName,
138
- Delete: {
139
- Objects: removed.map((file) => {
140
- return { Key: file };
141
- }),
142
- Quiet: true,
143
- },
144
- };
145
-
146
- // Delete removed records, doesn't delete immediately.
147
- try {
148
- const deleteResponse = s3Client.send(
149
- new DeleteObjectsCommand(removedObjectParams)
150
- );
151
- if (deleteResponse) {
152
- console.log("Deleted removed files:");
153
- removed.map((file) => console.log(chalk.bgRed(file)));
154
- }
155
- } catch (err) {
156
- console.log("Unable to delete files.");
157
- console.log(err);
158
- }
159
- }
160
- if (metaData["uploaded-files"] != metaData["stage-1-files"]) {
161
- metaData["uploaded-files"] = Array.from(
162
- new Set(metaData["stage-1-files"])
163
- );
164
- fs.writeFileSync(
165
- path.join(".dorky", "metadata.json"),
166
- JSON.stringify(metaData)
167
- );
168
- putObjectParams = {
169
- Bucket: bucketName,
170
- Key: path
171
- .relative(
172
- process.cwd(),
173
- path.join(rootFolder.toString(), "metadata.json")
174
- )
175
- .replace(/\\/g, "/"),
176
- Body: JSON.stringify(metaData),
177
- };
178
- try {
179
- const uploadResponse = await s3Client.send(
180
- new PutObjectCommand(putObjectParams)
181
- );
182
- if (uploadResponse)
183
- console.log(
184
- chalk.green(
185
- "Uploaded " + path.join(rootFolder.toString(), "metadata.json")
186
- )
187
- );
188
- } catch (err) {
189
- console.log(
190
- "Unable to upload file " +
191
- path
192
- .join(
193
- rootFolder,
194
- path.relative(
195
- process.cwd(),
196
- path.join(rootFolder.toString(), "metadata.json")
197
- )
198
- )
199
- .replace(/\\/g, "/")
200
- );
201
- console.log(err);
202
- }
203
- } else {
204
- console.log("Nothing to push");
205
- }
206
- } else {
207
- let metaData = JSON.parse(
208
- fs.readFileSync(path.join(".dorky", "metadata.json")).toString()
209
- );
210
- metaData["stage-1-files"].map(async (file) => {
211
- if (metaData["uploaded-files"].includes(file)) return;
212
- else {
213
- const putObjectParams = {
214
- Bucket: bucketName,
215
- Key: path
216
- .join(rootFolder, path.relative(process.cwd(), file))
217
- .replace(/\\/g, "/"),
218
- Body: fs
219
- .readFileSync(path.relative(process.cwd(), file))
220
- .toString(),
221
- };
222
- // Upload records
223
- try {
224
- const uploadResponse = await s3Client.send(
225
- new PutObjectCommand(putObjectParams)
226
- );
227
- if (uploadResponse) console.log(chalk.green("Uploaded " + file));
228
- } catch (err) {
229
- console.log(
230
- "Unable to upload file " +
231
- path
232
- .join(rootFolder, path.relative(process.cwd(), file))
233
- .replace(/\\/g, "/")
234
- );
235
- console.log(err);
236
- }
237
- metaData["uploaded-files"].push(file);
238
- }
239
- });
240
- metaData["uploaded-files"] = Array.from(
241
- new Set(metaData["uploaded-files"])
242
- );
243
- fs.writeFileSync(
244
- path.join(".dorky", "metadata.json"),
245
- JSON.stringify(metaData)
246
- );
247
- putObjectParams = {
248
- Bucket: bucketName,
249
- Key: path
250
- .relative(
251
- process.cwd(),
252
- path.join(rootFolder.toString(), "metadata.json")
253
- )
254
- .replace(/\\/g, "/"),
255
- Body: JSON.stringify(metaData),
256
- };
257
- // Upload metadata.json
258
- try {
259
- const uploadResponse = await s3Client.send(
260
- new PutObjectCommand(putObjectParams)
261
- );
262
- if (uploadResponse)
263
- console.log(
264
- chalk.green(
265
- "Uploaded " + path.join(rootFolder.toString(), "metadata.json")
266
- )
267
- );
268
- } catch (err) {
269
- console.log(
270
- "Unable to upload file " +
271
- path
272
- .join(rootFolder, path.relative(process.cwd(), file))
273
- .replace(/\\/g, "/")
274
- );
275
- console.log(err);
276
- }
277
- }
278
- }
279
- rootFolderExists(rootFolder);
280
- }
281
-
282
- async function pullChanges() {
283
- console.log("Pulling files from server.");
284
- let rootFolder;
285
- if (process.cwd().includes("\\")) {
286
- rootFolder = process.cwd().split("\\").pop();
287
- } else if (process.cwd().includes("/")) {
288
- rootFolder = process.cwd().split("/").pop();
289
- } else rootFolder = process.cwd();
290
- const bucketParams = { Bucket: bucketName };
291
- const getObjectsResponse = await s3Client.send(
292
- new ListObjectsCommand(bucketParams)
293
- );
294
- if (
295
- getObjectsResponse.Contents.filter(
296
- (object) => object.Key.split("/")[0] == rootFolder
297
- ).length > 0
298
- ) {
299
- if (
300
- getObjectsResponse.Contents.filter(
301
- (object) => object.Key == rootFolder + "/metadata.json"
302
- ).length > 0
303
- ) {
304
- const params = {
305
- Bucket: bucketName,
306
- Key: rootFolder + "/metadata.json",
307
- };
308
- s3Client.send(new GetObjectCommand(params), async (err, data) => {
309
- if (err) console.error(err);
310
- else {
311
- let metaData = JSON.parse(await data.Body.transformToString());
312
- // Pull metadata.json
313
- const METADATA_FILE = ".dorky/metadata.json";
314
- fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
315
- let pullFileParams;
316
- metaData["uploaded-files"].map((file) => {
317
- pullFileParams = {
318
- Bucket: bucketName,
319
- Key: rootFolder + "/" + file,
320
- };
321
- s3Client.send(
322
- new GetObjectCommand(pullFileParams),
323
- async (err, data) => {
324
- if (err) console.log(err);
325
- else {
326
- console.log("Creating file " + file);
327
- let fileData = await data.Body.transformToString();
328
- let subDirectories;
329
- if (process.cwd().includes("\\")) {
330
- subDirectories = path
331
- .relative(process.cwd(), file)
332
- .split("\\");
333
- } else if (process.cwd().includes("/")) {
334
- subDirectories = path
335
- .relative(process.cwd(), file)
336
- .split("/");
337
- } else subDirectories = path.relative(process.cwd(), file);
338
- subDirectories.pop();
339
- if (process.platform === "win32") {
340
- subDirectories = subDirectories.join("\\");
341
- } else if (
342
- process.platform === "linux" ||
343
- process.platform === "darwin"
344
- ) {
345
- subDirectories = subDirectories.join("/");
346
- }
347
- if (subDirectories.length)
348
- fs.mkdirSync(subDirectories, { recursive: true });
349
- fs.writeFileSync(
350
- path.relative(process.cwd(), file),
351
- fileData
352
- );
353
- }
354
- }
355
- );
356
- });
357
- }
358
- });
359
- } else {
360
- console.log("Metadata doesn't exist");
361
- }
362
- } else {
363
- console.error(chalk.red("Failed to pull folder, as it doesn't exist"));
364
- }
365
- }
366
-
367
- if (
368
- process.env.BUCKET_NAME &&
369
- process.env.AWS_ACCESS_KEY &&
370
- process.env.AWS_SECRET_KEY &&
371
- process.env.AWS_REGION
372
- ) {
373
- bucketName = process.env.BUCKET_NAME;
374
- s3Client = new S3Client({
375
- region: process.env.AWS_REGION,
376
- credentials: {
377
- accessKeyId: process.env.AWS_ACCESS_KEY,
378
- secretAccessKey: process.env.AWS_SECRET_KEY,
379
- },
380
- });
381
- if (fs.existsSync(".dorky")) {
382
- const credentials = [
383
- `AWS_ACCESS_KEY=${process.env.AWS_ACCESS_KEY}`,
384
- `AWS_SECRET_KEY=${process.env.AWS_SECRET_KEY}`,
385
- `AWS_REGION=${process.env.AWS_REGION}`,
386
- `BUCKET_NAME=${process.env.BUCKET_NAME}`,
387
- ];
388
- fs.writeFileSync(".dorky/.credentials", credentials.join("\n"));
389
- }
390
- } else {
391
- if (fs.existsSync(".dorky")) {
392
- if (fs.existsSync(".dorky/.credentials")) {
393
- const credentials = fs
394
- .readFileSync(".dorky/.credentials", "utf8")
395
- .toString()
396
- .split("\n");
397
- if (credentials.length < 4) {
398
- console.log(
399
- chalk.red(
400
- "Set BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION first."
401
- )
402
- );
403
- exit();
404
- }
405
- const region = credentials
406
- .filter((credential) => credential.includes("AWS_REGION"))[0]
407
- .split("=")[1];
408
- const accessKey = credentials
409
- .filter((credential) => credential.includes("AWS_ACCESS_KEY"))[0]
410
- .split("=")[1];
411
- const secretKey = credentials
412
- .filter((credential) => credential.includes("AWS_SECRET_KEY"))[0]
413
- .split("=")[1];
414
- bucketName = credentials
415
- .filter((credential) => credential.includes("BUCKET_NAME"))[0]
416
- .split("=")[1];
417
- s3Client = new S3Client({
418
- region: region,
419
- credentials: {
420
- accessKeyId: accessKey,
421
- secretAccessKey: secretKey,
422
- },
423
- });
424
- console.log(chalk.blue("Set credentials from file."));
425
- } else {
426
- console.log(
427
- chalk.red(
428
- "Set BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION first."
429
- )
430
- );
431
- exit();
432
- }
433
- } else {
434
- console.log(
435
- chalk.red(
436
- "Unable to find .dorky folder, please reinitialize the project in the root folder or set the BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION in environment variables."
437
- )
438
- );
439
- exit();
440
- }
441
- }
442
-
443
- const args = process.argv.splice(2, 2);
444
-
445
- if (args.length == 0) {
446
- const figlet = `
447
- __ __
448
- .--| .-----.----| |--.--.--.
449
- | _ | _ | _| <| | |
450
- |_____|_____|__| |__|__|___ |
451
- |_____|
452
- `;
453
- console.log(figlet);
454
- const helpMessage = `Help message:\ninit\t Initializes a dorky project.\nlist\t Lists files in current root directory.\npush\t Pushes changes to S3 bucket.\npull\t Pulls changes from S3 bucket to local root folder.`;
455
- console.log(helpMessage);
456
- } else if (args.length == 1) {
457
- if (args[0] == "init") initializeProject();
458
- if (args[0] == "list") listFiles();
459
- if (args[0] == "push") pushChanges();
460
- if (args[0] == "pull") pullChanges();
461
- } else if (args.length == 2) {
462
- if (args[0] == "add") {
463
- const METADATA_FILE = ".dorky/metadata.json";
464
- const HASHES_FILE = ".dorky/.dorkyhash";
465
- const file = args[1];
466
- if (fs.existsSync(file)) {
467
- const hashes = {};
468
- fs.readFileSync(HASHES_FILE)
469
- .toString()
470
- .split("\n")
471
- .filter((hash) => hash)
472
- .map((hash) => {
473
- hashes[hash.split("=")[0]] = hash.split("=")[1];
474
- });
475
- if (Object.keys(hashes).includes(file)) {
476
- // File already staged
477
- const fileContent = fs.readFileSync(file).toString();
478
- const currentHash = createHash("md5")
479
- .update(fileContent)
480
- .digest("base64")
481
- .split("==")[0];
482
- const hashToCompare = hashes[file];
483
- if (currentHash == hashToCompare) {
484
- console.log(
485
- chalk.red(
486
- `File ${chalk.bgRed(
487
- chalk.white(file)
488
- )} hasn\'t been modified since last push.`
489
- )
490
- );
491
- return;
492
- } else {
493
- console.log(chalk.green(`Staging ${file} since has been modified.`));
494
- hashes[file] = currentHash;
495
- const updatedFileContent = Object.entries(hashes).map(
496
- (fileAndHash) => {
497
- return fileAndHash.join("=");
498
- }
499
- );
500
- fs.writeFileSync(HASHES_FILE, updatedFileContent.join("\n"));
501
- const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
502
- // Clear from uploaded files
503
- const uploadedFiles = new Set(metaData["uploaded-files"]);
504
- uploadedFiles.delete(file);
505
- metaData["uploaded-files"] = Array.from(uploadedFiles);
506
- fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
507
- console.log(
508
- `Updated ${chalk.bgGreen(
509
- chalk.white(file)
510
- )}, ready to push the updates from it.`
511
- );
512
- }
513
- } else {
514
- // New file
515
- const fileContent = fs.readFileSync(file).toString();
516
- hashes[file] = createHash("md5")
517
- .update(fileContent)
518
- .digest("base64")
519
- .split("==")[0];
520
- const updatedFileContent = Object.entries(hashes).map((fileAndHash) => {
521
- return fileAndHash.join("=");
522
- });
523
- fs.writeFileSync(HASHES_FILE, updatedFileContent.join("\n"));
524
- console.log(
525
- `Tracking updates from ${chalk.bgGreen(chalk.white(file))}`
526
- );
527
- }
528
- const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
529
- const stage1Files = new Set(metaData["stage-1-files"]);
530
- stage1Files.add(file);
531
- metaData["stage-1-files"] = Array.from(stage1Files);
532
- fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
533
- console.log(chalk.bgGreen("Success"));
534
- console.log(chalk.green(`Added file ${file} successfully to stage-1.`));
535
- } else {
536
- console.log(chalk.bgRed("Error"));
537
- console.log(chalk.red(`\tFile ${file} doesn\'t exist`));
538
- }
539
- } else if (args[0] == "reset") {
540
- const METADATA_FILE = ".dorky/metadata.json";
541
- const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
542
- const file = args[1];
543
- resetFileIndex = metaData["stage-1-files"].indexOf(file);
544
- metaData["stage-1-files"].splice(resetFileIndex, 1);
545
- fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
546
- }
547
- }