dorky 1.0.5 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +510 -264
  2. package/package.json +2 -2
package/index.js CHANGED
@@ -1,293 +1,539 @@
1
1
  #!/usr/bin/env node
2
2
 
3
- const glob = require('glob');
4
- const path = require('path');
5
- const chalk = require('chalk');
6
- const fs = require('fs');
7
- const { EOL } = require('os');
8
- var AWS = require('aws-sdk');
9
- const { exit } = require('process');
3
+ const glob = require("glob");
4
+ const path = require("path");
5
+ const chalk = require("chalk");
6
+ const fs = require("fs");
7
+ const { EOL } = require("os");
8
+ const {
9
+ S3Client,
10
+ ListObjectsCommand,
11
+ PutObjectCommand,
12
+ DeleteObjectsCommand,
13
+ GetObjectCommand,
14
+ } = require("@aws-sdk/client-s3");
15
+ const { exit } = require("process");
16
+ const { createHash } = require("crypto");
10
17
 
18
+ let s3Client, bucketName;
11
19
 
12
20
  // Initializes project, creates a new .dorky folder, and adds a metadata file to it, and creates a .dorkyignore file.
13
21
  function initializeProject() {
14
- if (fs.existsSync('.dorky')) {
15
- console.log('Dorky project already initialized. Remove .dorky folder to reinitialize.')
16
- } else {
17
- fs.mkdirSync('.dorky');
18
- fs.writeFileSync('.dorky/metadata.json', JSON.stringify({ 'stage-1-files': [], 'uploaded-files': [] }));
19
- if (fs.existsSync('.dorkyignore')) {
20
- fs.rmdirSync('.dorky');
21
- console.log('Dorky project already initialized. Remove .dorkyignore file to reinitialize.');
22
- } else {
23
- fs.writeFileSync('.dorkyignore', '');
24
- console.log('Initialized project in current folder(.dorky).');
25
- }
26
- }
22
+ if (fs.existsSync(".dorky")) {
23
+ console.log(
24
+ "Dorky project already initialized. Remove .dorky folder to reinitialize."
25
+ );
26
+ } else {
27
+ fs.mkdirSync(".dorky");
28
+ console.log(chalk.bgGreen("Created .dorky folder."));
29
+ fs.writeFileSync(
30
+ ".dorky/metadata.json",
31
+ JSON.stringify({ "stage-1-files": [], "uploaded-files": [] })
32
+ );
33
+ console.log(chalk.bgGreen("Created .dorky/metadata.json file."));
34
+ fs.writeFileSync(".dorkyignore", "");
35
+ console.log(chalk.bgGreen("Created .dorkyignore file."));
36
+ fs.writeFileSync(".dorky/.dorkyhash", "");
37
+ console.log(chalk.bgGreen("Created .dorkyhash file."));
38
+ }
27
39
  }
28
40
 
29
41
  // Lists all the files that are not excluded explicitly.
30
42
  function listFiles() {
31
- let exclusions = fs.readFileSync('./.dorkyignore').toString().split(EOL);
32
- exclusions = exclusions.filter((exclusion) => exclusion !== '');
33
- if (exclusions[0] == '') exclusions = [];
34
- var getDirectories = function (src, callback) {
35
- glob(src + '/**/*', callback);
36
- };
43
+ let exclusions = fs.readFileSync("./.dorkyignore").toString().split(EOL);
44
+ exclusions = exclusions.filter((exclusion) => exclusion !== "");
45
+ if (exclusions[0] == "") exclusions = [];
46
+ var getDirectories = function (src, callback) {
47
+ glob(src + "/**/*", callback);
48
+ };
49
+
50
+ function excludeIsPresent(element) {
51
+ let present = false;
52
+ let i = 0;
53
+ while (i < exclusions.length) {
54
+ if (element.includes(exclusions[i])) present = true;
55
+ i += 1;
56
+ }
57
+ return present;
58
+ }
59
+ getDirectories(process.cwd(), function (err, res) {
60
+ if (err) {
61
+ console.log("Error", err);
62
+ } else {
63
+ let listOfFiles;
64
+ listOfFiles = res
65
+ .filter((element) => !excludeIsPresent(element))
66
+ .map((file) => path.relative(process.cwd(), file));
67
+ console.log(chalk.green("Found files:"));
68
+ listOfFiles.map((file) => console.log("\t" + chalk.bgGrey(file)));
69
+ }
70
+ });
71
+ }
72
+
73
+ // Pushes changes to S3 bucket.
74
+ function pushChanges() {
75
+ console.log("Pushing files to server.");
76
+ let rootFolder;
77
+ if (process.cwd().includes("\\")) {
78
+ rootFolder = process.cwd().split("\\").pop();
79
+ } else if (process.cwd().includes("/")) {
80
+ rootFolder = process.cwd().split("/").pop();
81
+ } else rootFolder = process.cwd();
82
+ console.log(rootFolder);
83
+ async function rootFolderExists(rootFolder) {
84
+ const bucketParams = { Bucket: bucketName };
85
+ const response = await s3Client.send(new ListObjectsCommand(bucketParams));
86
+ if (
87
+ response.Contents.filter(
88
+ (object) => object.Key.split("/")[0] == rootFolder
89
+ ).length > 0
90
+ ) {
91
+ let metaData = JSON.parse(
92
+ fs.readFileSync(path.join(".dorky", "metadata.json")).toString()
93
+ );
94
+ // Get removed files
95
+ let removed = metaData["uploaded-files"].filter(
96
+ (x) => !metaData["stage-1-files"].includes(x)
97
+ );
98
+ // Uploaded added files.
99
+ let added = metaData["stage-1-files"].filter(
100
+ (x) => !metaData["uploaded-files"].includes(x)
101
+ );
102
+
103
+ added.map(async (file) => {
104
+ if (metaData["uploaded-files"].includes(file)) return;
105
+ else {
106
+ const putObjectParams = {
107
+ Bucket: bucketName,
108
+ Key: path
109
+ .join(rootFolder, path.relative(process.cwd(), file))
110
+ .split("\\")
111
+ .join("/"),
112
+ Body: fs
113
+ .readFileSync(path.relative(process.cwd(), file))
114
+ .toString(),
115
+ };
116
+ // Upload records
117
+ try {
118
+ const uploadResponse = await s3Client.send(
119
+ new PutObjectCommand(putObjectParams)
120
+ );
121
+ if (uploadResponse) console.log(chalk.green("Uploaded " + file));
122
+ } catch (err) {
123
+ console.log(
124
+ "Unable to upload file " +
125
+ path
126
+ .join(rootFolder, path.relative(process.cwd(), file))
127
+ .replace(/\\/g, "/")
128
+ );
129
+ console.log(err);
130
+ }
131
+ metaData["uploaded-files"].push(file);
132
+ }
133
+ });
37
134
 
38
- function excludeIsPresent(element) {
39
- let present = false;
40
- let i = 0;
41
- while (i < exclusions.length) {
42
- if (element.includes(exclusions[i])) present = true;
43
- i += 1;
135
+ if (removed.length) {
136
+ const removedObjectParams = {
137
+ Bucket: bucketName,
138
+ Delete: {
139
+ Objects: removed.map((file) => {
140
+ return { Key: file };
141
+ }),
142
+ Quiet: true,
143
+ },
144
+ };
145
+
146
+ // Delete removed records, doesn't delete immediately.
147
+ try {
148
+ const deleteResponse = s3Client.send(
149
+ new DeleteObjectsCommand(removedObjectParams)
150
+ );
151
+ if (deleteResponse) {
152
+ console.log("Deleted removed files:");
153
+ removed.map((file) => console.log(chalk.bgRed(file)));
154
+ }
155
+ } catch (err) {
156
+ console.log("Unable to delete files.");
157
+ console.log(err);
158
+ }
159
+ }
160
+ if (metaData["uploaded-files"] != metaData["stage-1-files"]) {
161
+ metaData["uploaded-files"] = Array.from(
162
+ new Set(metaData["stage-1-files"])
163
+ );
164
+ fs.writeFileSync(
165
+ path.join(".dorky", "metadata.json"),
166
+ JSON.stringify(metaData)
167
+ );
168
+ putObjectParams = {
169
+ Bucket: bucketName,
170
+ Key: path
171
+ .relative(
172
+ process.cwd(),
173
+ path.join(rootFolder.toString(), "metadata.json")
174
+ )
175
+ .replace(/\\/g, "/"),
176
+ Body: JSON.stringify(metaData),
177
+ };
178
+ try {
179
+ const uploadResponse = await s3Client.send(
180
+ new PutObjectCommand(putObjectParams)
181
+ );
182
+ if (uploadResponse)
183
+ console.log(
184
+ chalk.green(
185
+ "Uploaded " + path.join(rootFolder.toString(), "metadata.json")
186
+ )
187
+ );
188
+ } catch (err) {
189
+ console.log(
190
+ "Unable to upload file " +
191
+ path
192
+ .join(
193
+ rootFolder,
194
+ path.relative(
195
+ process.cwd(),
196
+ path.join(rootFolder.toString(), "metadata.json")
197
+ )
198
+ )
199
+ .replace(/\\/g, "/")
200
+ );
201
+ console.log(err);
202
+ }
203
+ } else {
204
+ console.log("Nothing to push");
205
+ }
206
+ } else {
207
+ let metaData = JSON.parse(
208
+ fs.readFileSync(path.join(".dorky", "metadata.json")).toString()
209
+ );
210
+ metaData["stage-1-files"].map(async (file) => {
211
+ if (metaData["uploaded-files"].includes(file)) return;
212
+ else {
213
+ const putObjectParams = {
214
+ Bucket: bucketName,
215
+ Key: path
216
+ .join(rootFolder, path.relative(process.cwd(), file))
217
+ .replace(/\\/g, "/"),
218
+ Body: fs
219
+ .readFileSync(path.relative(process.cwd(), file))
220
+ .toString(),
221
+ };
222
+ // Upload records
223
+ try {
224
+ const uploadResponse = await s3Client.send(
225
+ new PutObjectCommand(putObjectParams)
226
+ );
227
+ if (uploadResponse) console.log(chalk.green("Uploaded " + file));
228
+ } catch (err) {
229
+ console.log(
230
+ "Unable to upload file " +
231
+ path
232
+ .join(rootFolder, path.relative(process.cwd(), file))
233
+ .replace(/\\/g, "/")
234
+ );
235
+ console.log(err);
236
+ }
237
+ metaData["uploaded-files"].push(file);
44
238
  }
45
- return present;
239
+ });
240
+ metaData["uploaded-files"] = Array.from(
241
+ new Set(metaData["uploaded-files"])
242
+ );
243
+ fs.writeFileSync(
244
+ path.join(".dorky", "metadata.json"),
245
+ JSON.stringify(metaData)
246
+ );
247
+ putObjectParams = {
248
+ Bucket: bucketName,
249
+ Key: path
250
+ .relative(
251
+ process.cwd(),
252
+ path.join(rootFolder.toString(), "metadata.json")
253
+ )
254
+ .replace(/\\/g, "/"),
255
+ Body: JSON.stringify(metaData),
256
+ };
257
+ // Upload metadata.json
258
+ try {
259
+ const uploadResponse = await s3Client.send(
260
+ new PutObjectCommand(putObjectParams)
261
+ );
262
+ if (uploadResponse)
263
+ console.log(
264
+ chalk.green(
265
+ "Uploaded " + path.join(rootFolder.toString(), "metadata.json")
266
+ )
267
+ );
268
+ } catch (err) {
269
+ console.log(
270
+ "Unable to upload file " +
271
+ path
272
+ .join(rootFolder, path.relative(process.cwd(), file))
273
+ .replace(/\\/g, "/")
274
+ );
275
+ console.log(err);
276
+ }
46
277
  }
47
- getDirectories(process.cwd(), function (err, res) {
48
- if (err) {
49
- console.log('Error', err);
50
- } else {
51
- let listOfFiles;
52
- listOfFiles = res.filter(element => !excludeIsPresent(element)).map(file => path.relative(process.cwd(), file));
53
- console.log(chalk.green('Found files:'))
54
- listOfFiles.map((file) => console.log('\t' + chalk.bgGrey(file)));
278
+ }
279
+ rootFolderExists(rootFolder);
280
+ }
281
+
282
+ async function pullChanges() {
283
+ console.log("Pulling files from server.");
284
+ let rootFolder;
285
+ if (process.cwd().includes("\\")) {
286
+ rootFolder = process.cwd().split("\\").pop();
287
+ } else if (process.cwd().includes("/")) {
288
+ rootFolder = process.cwd().split("/").pop();
289
+ } else rootFolder = process.cwd();
290
+ const bucketParams = { Bucket: bucketName };
291
+ const getObjectsResponse = await s3Client.send(
292
+ new ListObjectsCommand(bucketParams)
293
+ );
294
+ if (
295
+ getObjectsResponse.Contents.filter(
296
+ (object) => object.Key.split("/")[0] == rootFolder
297
+ ).length > 0
298
+ ) {
299
+ if (
300
+ getObjectsResponse.Contents.filter(
301
+ (object) => object.Key == rootFolder + "/metadata.json"
302
+ ).length > 0
303
+ ) {
304
+ const params = {
305
+ Bucket: bucketName,
306
+ Key: rootFolder + "/metadata.json",
307
+ };
308
+ s3Client.send(new GetObjectCommand(params), async (err, data) => {
309
+ if (err) console.error(err);
310
+ else {
311
+ let metaData = JSON.parse(await data.Body.transformToString());
312
+ // Pull metadata.json
313
+ const METADATA_FILE = ".dorky/metadata.json";
314
+ fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
315
+ let pullFileParams;
316
+ metaData["uploaded-files"].map((file) => {
317
+ pullFileParams = {
318
+ Bucket: bucketName,
319
+ Key: rootFolder + "/" + file,
320
+ };
321
+ s3Client.send(
322
+ new GetObjectCommand(pullFileParams),
323
+ async (err, data) => {
324
+ if (err) console.log(err);
325
+ else {
326
+ console.log("Creating file " + file);
327
+ let fileData = await data.Body.transformToString();
328
+ let subDirectories;
329
+ if (process.cwd().includes("\\")) {
330
+ subDirectories = path
331
+ .relative(process.cwd(), file)
332
+ .split("\\");
333
+ } else if (process.cwd().includes("/")) {
334
+ subDirectories = path
335
+ .relative(process.cwd(), file)
336
+ .split("/");
337
+ } else subDirectories = path.relative(process.cwd(), file);
338
+ subDirectories.pop();
339
+ if (process.platform === "win32") {
340
+ subDirectories = subDirectories.join("\\");
341
+ } else if (
342
+ process.platform === "linux" ||
343
+ process.platform === "darwin"
344
+ ) {
345
+ subDirectories = subDirectories.join("/");
346
+ }
347
+ if (subDirectories.length)
348
+ fs.mkdirSync(subDirectories, { recursive: true });
349
+ fs.writeFileSync(
350
+ path.relative(process.cwd(), file),
351
+ fileData
352
+ );
353
+ }
354
+ }
355
+ );
356
+ });
55
357
  }
56
- });
358
+ });
359
+ } else {
360
+ console.log("Metadata doesn't exist");
361
+ }
362
+ } else {
363
+ console.error(chalk.red("Failed to pull folder, as it doesn't exist"));
364
+ }
57
365
  }
58
366
 
59
- if (process.env.BUCKET_NAME && process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY && process.env.AWS_REGION) {
60
- AWS.config.update({
61
- accessKeyId: process.env.AWS_ACCESS_KEY,
62
- secretAccessKey: process.env.AWS_SECRET_KEY,
63
- region: process.env.AWS_REGION
64
- });
367
+ if (
368
+ process.env.BUCKET_NAME &&
369
+ process.env.AWS_ACCESS_KEY &&
370
+ process.env.AWS_SECRET_KEY &&
371
+ process.env.AWS_REGION
372
+ ) {
373
+ bucketName = process.env.BUCKET_NAME;
374
+ s3Client = new S3Client({
375
+ region: process.env.AWS_REGION,
376
+ credentials: {
377
+ accessKeyId: process.env.AWS_ACCESS_KEY,
378
+ secretAccessKey: process.env.AWS_SECRET_KEY,
379
+ },
380
+ });
381
+ if (fs.existsSync(".dorky")) {
382
+ const credentials = [
383
+ `AWS_ACCESS_KEY=${process.env.AWS_ACCESS_KEY}`,
384
+ `AWS_SECRET_KEY=${process.env.AWS_SECRET_KEY}`,
385
+ `AWS_REGION=${process.env.AWS_REGION}`,
386
+ `BUCKET_NAME=${process.env.BUCKET_NAME}`,
387
+ ];
388
+ fs.writeFileSync(".dorky/.credentials", credentials.join("\n"));
389
+ }
65
390
  } else {
66
- console.log('Set BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION first.')
391
+ if (fs.existsSync(".dorky")) {
392
+ if (fs.existsSync(".dorky/.credentials")) {
393
+ const credentials = fs
394
+ .readFileSync(".dorky/.credentials", "utf8")
395
+ .toString()
396
+ .split("\n");
397
+ if (credentials.length < 4) {
398
+ console.log(
399
+ chalk.red(
400
+ "Set BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION first."
401
+ )
402
+ );
403
+ exit();
404
+ }
405
+ const region = credentials
406
+ .filter((credential) => credential.includes("AWS_REGION"))[0]
407
+ .split("=")[1];
408
+ const accessKey = credentials
409
+ .filter((credential) => credential.includes("AWS_ACCESS_KEY"))[0]
410
+ .split("=")[1];
411
+ const secretKey = credentials
412
+ .filter((credential) => credential.includes("AWS_SECRET_KEY"))[0]
413
+ .split("=")[1];
414
+ bucketName = credentials
415
+ .filter((credential) => credential.includes("BUCKET_NAME"))[0]
416
+ .split("=")[1];
417
+ s3Client = new S3Client({
418
+ region: region,
419
+ credentials: {
420
+ accessKeyId: accessKey,
421
+ secretAccessKey: secretKey,
422
+ },
423
+ });
424
+ console.log(chalk.blue("Set credentials from file."));
425
+ } else {
426
+ console.log(
427
+ chalk.red(
428
+ "Set BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION first."
429
+ )
430
+ );
431
+ exit();
432
+ }
433
+ } else {
434
+ console.log(
435
+ chalk.red(
436
+ "Unable to find .dorky folder, please reinitialize the project in the root folder or set the BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION in environment variables."
437
+ )
438
+ );
67
439
  exit();
440
+ }
68
441
  }
69
442
 
70
443
  const args = process.argv.splice(2, 2);
71
444
 
72
445
  if (args.length == 0) {
73
- const helpMessage = `Help message:\ninit\t Initializes a dorky project.\nlist\t Lists files in current root directory.\npush\t Pushes changes to S3 bucket.\npull\t Pulls changes from S3 bucket to local root folder.`
74
- console.log(helpMessage);
446
+ const helpMessage = `Help message:\ninit\t Initializes a dorky project.\nlist\t Lists files in current root directory.\npush\t Pushes changes to S3 bucket.\npull\t Pulls changes from S3 bucket to local root folder.`;
447
+ console.log(helpMessage);
75
448
  } else if (args.length == 1) {
76
- if (args[0] == 'init') {
77
- initializeProject();
78
- }
79
- if (args[0] == 'list') {
80
- listFiles();
81
- }
82
- if (args[0] == 'push') {
83
- console.log('Pushing files to server.');
84
- let rootFolder;
85
- if (process.cwd().includes('\\')) {
86
- rootFolder = process.cwd().split('\\').pop()
87
- } else if (process.cwd().includes('/')) {
88
- rootFolder = process.cwd().split('/').pop()
89
- } else rootFolder = process.cwd()
90
- console.log(rootFolder)
91
- function rootFolderExists(rootFolder) {
92
- let s3 = new AWS.S3();
93
- const bucketParams = { Bucket: process.env.BUCKET_NAME };
94
- s3.listObjects(bucketParams, (err, s3Objects) => {
95
- if (err) console.log(err);
96
- else {
97
- if (s3Objects.Contents.filter((object) => object.Key.split('/')[0] == rootFolder).length > 0) {
98
- let metaData = JSON.parse(fs.readFileSync(path.join('.dorky', 'metadata.json')).toString());
99
- // Get removed files
100
- let removed = metaData['uploaded-files'].filter(x => !metaData['stage-1-files'].includes(x));
101
- // Uploaded added files.
102
- let added = metaData['stage-1-files'].filter(x => !metaData['uploaded-files'].includes(x));
103
-
104
- added.map((file) => {
105
- if (metaData['uploaded-files'].includes(file)) return;
106
- else {
107
- const putObjectParams = {
108
- Bucket: process.env.BUCKET_NAME,
109
- Key: path.join(rootFolder, path.relative(process.cwd(), file)).split('\\').join('/'),
110
- Body: fs.readFileSync(path.relative(process.cwd(), file)).toString()
111
- }
112
- // Upload records
113
- s3.putObject(putObjectParams, (err, data) => {
114
- if (err) {
115
- console.log('Unable to upload file ' + path.join(rootFolder, path.relative(process.cwd(), file)).replace(/\\/g, '/'))
116
- console.log(err);
117
- }
118
- else console.log(chalk.green('Uploaded ' + file));
119
- });
120
- metaData['uploaded-files'].push(file);
121
- }
122
- });
123
-
124
- if (removed.length) {
125
- const removedObjectParams = {
126
- Bucket: process.env.BUCKET_NAME,
127
- Delete: {
128
- Objects: removed.map((file) => {
129
- return { Key: file };
130
- }),
131
- Quiet: true
132
- }
133
- }
134
- // Delete removed records, doesn't delete immediately.
135
- s3.deleteObjects(removedObjectParams, (err, data) => {
136
- if (err) console.log(err.stack);
137
- else console.log('Deleted removed files.');
138
- });
139
- }
140
- if (metaData['uploaded-files'] != metaData['stage-1-files']) {
141
- metaData['uploaded-files'] = Array.from(new Set(metaData['stage-1-files']));
142
- fs.writeFileSync(path.join('.dorky', 'metadata.json'), JSON.stringify(metaData));
143
- putObjectParams = {
144
- Bucket: process.env.BUCKET_NAME,
145
- Key: path.relative(process.cwd(), path.join(rootFolder.toString(), 'metadata.json')).replace(/\\/g, '/'),
146
- Body: JSON.stringify(metaData)
147
- }
148
- // Upload metadata.json
149
- s3.putObject(putObjectParams, (err, data) => {
150
- if (err) console.log(err);
151
- else console.log(chalk.green('Uploaded metadata'));
152
- });
153
- } else {
154
- console.log('Nothing to push');
155
- }
156
-
157
- } else {
158
-
159
- let metaData = JSON.parse(fs.readFileSync(path.join('.dorky', 'metadata.json')).toString());
160
- metaData['stage-1-files'].map((file) => {
161
- if (metaData['uploaded-files'].includes(file)) return;
162
- else {
163
- const putObjectParams = {
164
- Bucket: process.env.BUCKET_NAME,
165
- Key: path.join(rootFolder, path.relative(process.cwd(), file)).replace(/\\/g, '/'),
166
- Body: fs.readFileSync(path.relative(process.cwd(), file)).toString()
167
- }
168
- // Upload records
169
- s3.putObject(putObjectParams, (err, data) => {
170
- if (err) {
171
- console.log('Unable to upload file ' + path.join(rootFolder, path.relative(process.cwd(), file)).replace(/\\/g, '/'))
172
- console.log(err);
173
- }
174
- else console.log(chalk.green('Uploaded ' + file));
175
- });
176
- metaData['uploaded-files'].push(file);
177
- }
178
- });
179
- metaData['uploaded-files'] = Array.from(new Set(metaData['uploaded-files']));
180
- fs.writeFileSync(path.join('.dorky', 'metadata.json'), JSON.stringify(metaData));
181
- putObjectParams = {
182
- Bucket: process.env.BUCKET_NAME,
183
- Key: path.relative(process.cwd(), path.join(rootFolder.toString(), 'metadata.json')).replace(/\\/g, '/'),
184
- Body: JSON.stringify(metaData)
185
- }
186
- // Upload metadata.json
187
- s3.putObject(putObjectParams, (err, data) => {
188
- if (err) console.log(err);
189
- else console.log(chalk.green('Uploaded metadata'));
190
- });
191
- }
192
- }
193
- })
194
-
195
- }
196
- rootFolderExists(rootFolder);
197
- }
198
- if (args[0] == 'help') {
199
- const helpMessage = `Help message:\ninit\t Initializes a dorky project.\nlist\t Lists files in current root directory.\npush\t Pushes changes to S3 bucket.\npull\t Pulls changes from S3 bucket to local root folder.`
200
- console.log(helpMessage);
201
- }
202
- if (args[0] == 'pull') {
203
- console.log('Pulling files from server.')
204
- let rootFolder;
205
- if (process.cwd().includes('\\')) {
206
- rootFolder = process.cwd().split('\\').pop()
207
- } else if (process.cwd().includes('/')) {
208
- rootFolder = process.cwd().split('/').pop()
209
- } else rootFolder = process.cwd()
210
- let s3 = new AWS.S3();
211
- const bucketParams = { Bucket: process.env.BUCKET_NAME };
212
- s3.listObjects(bucketParams, (err, s3Objects) => {
213
- if (err) console.log(err);
214
- else {
215
- if (s3Objects.Contents.filter((object) => object.Key.split('/')[0] == rootFolder).length > 0) {
216
- if (s3Objects.Contents.filter((object) => object.Key == (rootFolder + '/metadata.json')).length > 0) {
217
- const params = {
218
- Bucket: process.env.BUCKET_NAME,
219
- Key: rootFolder + '/metadata.json'
220
- }
221
- s3.getObject(params, (err, data) => {
222
- if (err) console.error(err);
223
- else {
224
- let metaData = JSON.parse(data.Body.toString());
225
- // Pull metadata.json
226
- const METADATA_FILE = '.dorky/metadata.json';
227
- fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
228
- let pullFileParams;
229
- metaData['uploaded-files'].map((file) => {
230
- pullFileParams = {
231
- Bucket: process.env.BUCKET_NAME,
232
- Key: rootFolder + '/' + file
233
- }
234
- console.log(pullFileParams)
235
- s3.getObject(pullFileParams, (err, data) => {
236
- if (err) console.log(err);
237
- else {
238
- console.log('Creating file ' + file);
239
- let fileData = data.Body.toString();
240
- let subDirectories;
241
- if (process.cwd().includes('\\')) {
242
- subDirectories = path.relative(process.cwd(), file).split('\\');
243
- } else if (process.cwd().includes('/')) {
244
- subDirectories = path.relative(process.cwd(), file).split('/');
245
- } else subDirectories = path.relative(process.cwd(), file)
246
- subDirectories.pop()
247
- if (process.platform === "win32") {
248
- subDirectories = subDirectories.join('\\')
249
- } else if (process.platform === "linux" || process.platform === "darwin") {
250
- subDirectories = subDirectories.join('/');
251
- }
252
- if (subDirectories.length) fs.mkdirSync(subDirectories, { recursive: true });
253
- fs.writeFileSync(path.relative(process.cwd(), file), fileData);
254
- }
255
- })
256
- });
257
- }
258
- });
259
- } else {
260
- console.log('Metadata doesn\'t exist')
261
- }
262
- } else {
263
- console.error(chalk.red('Failed to pull folder, as it doesn\'t exist'));
264
- }
265
- }
266
- });
267
- }
449
+ if (args[0] == "init") initializeProject();
450
+ if (args[0] == "list") listFiles();
451
+ if (args[0] == "push") pushChanges();
452
+ if (args[0] == "pull") pullChanges();
268
453
  } else if (args.length == 2) {
269
- if (args[0] == 'add') {
270
- const METADATA_FILE = '.dorky/metadata.json';
271
- const file = args[1];
272
- if (fs.existsSync(file)) {
273
- const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
274
- const stage1Files = new Set(metaData['stage-1-files']);
275
- stage1Files.add(file);
276
- metaData['stage-1-files'] = Array.from(stage1Files);
277
- fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
278
- console.log(chalk.bgGreen('Success'));
279
- console.log(chalk.green(`Added file ${file} successfully to stage-1.`))
454
+ if (args[0] == "add") {
455
+ const METADATA_FILE = ".dorky/metadata.json";
456
+ const HASHES_FILE = ".dorky/.dorkyhash";
457
+ const file = args[1];
458
+ if (fs.existsSync(file)) {
459
+ const hashes = {};
460
+ fs.readFileSync(HASHES_FILE)
461
+ .toString()
462
+ .split("\n")
463
+ .filter((hash) => hash)
464
+ .map((hash) => {
465
+ hashes[hash.split("=")[0]] = hash.split("=")[1];
466
+ });
467
+ if (Object.keys(hashes).includes(file)) {
468
+ // File already staged
469
+ const fileContent = fs.readFileSync(file).toString();
470
+ const currentHash = createHash("md5")
471
+ .update(fileContent)
472
+ .digest("base64")
473
+ .split("==")[0];
474
+ const hashToCompare = hashes[file];
475
+ if (currentHash == hashToCompare) {
476
+ console.log(
477
+ chalk.red(
478
+ `File ${chalk.bgRed(
479
+ chalk.white(file)
480
+ )} hasn\'t been modified since last push.`
481
+ )
482
+ );
483
+ return;
280
484
  } else {
281
- console.log(chalk
282
- .bgRed('Error'))
283
- console.log(chalk.red(`\tFile ${file} doesn\'t exist`))
485
+ console.log(chalk.green(`Staging ${file} since has been modified.`));
486
+ hashes[file] = currentHash;
487
+ const updatedFileContent = Object.entries(hashes).map(
488
+ (fileAndHash) => {
489
+ return fileAndHash.join("=");
490
+ }
491
+ );
492
+ fs.writeFileSync(HASHES_FILE, updatedFileContent.join("\n"));
493
+ const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
494
+ // Clear from uploaded files
495
+ const uploadedFiles = new Set(metaData["uploaded-files"]);
496
+ uploadedFiles.delete(file);
497
+ metaData["uploaded-files"] = Array.from(uploadedFiles);
498
+ fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
499
+ console.log(
500
+ `Updated ${chalk.bgGreen(
501
+ chalk.white(file)
502
+ )}, ready to push the updates from it.`
503
+ );
284
504
  }
285
- } else if (args[0] == 'reset') {
286
- const METADATA_FILE = '.dorky/metadata.json';
287
- const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
288
- const file = args[1];
289
- resetFileIndex = metaData['stage-1-files'].indexOf(file);
290
- metaData['stage-1-files'].splice(resetFileIndex, 1);
291
- fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
505
+ } else {
506
+ // New file
507
+ const fileContent = fs.readFileSync(file).toString();
508
+ hashes[file] = createHash("md5")
509
+ .update(fileContent)
510
+ .digest("base64")
511
+ .split("==")[0];
512
+ const updatedFileContent = Object.entries(hashes).map((fileAndHash) => {
513
+ return fileAndHash.join("=");
514
+ });
515
+ fs.writeFileSync(HASHES_FILE, updatedFileContent.join("\n"));
516
+ console.log(
517
+ `Tracking updates from ${chalk.bgGreen(chalk.white(file))}`
518
+ );
519
+ }
520
+ const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
521
+ const stage1Files = new Set(metaData["stage-1-files"]);
522
+ stage1Files.add(file);
523
+ metaData["stage-1-files"] = Array.from(stage1Files);
524
+ fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
525
+ console.log(chalk.bgGreen("Success"));
526
+ console.log(chalk.green(`Added file ${file} successfully to stage-1.`));
527
+ } else {
528
+ console.log(chalk.bgRed("Error"));
529
+ console.log(chalk.red(`\tFile ${file} doesn\'t exist`));
292
530
  }
531
+ } else if (args[0] == "reset") {
532
+ const METADATA_FILE = ".dorky/metadata.json";
533
+ const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
534
+ const file = args[1];
535
+ resetFileIndex = metaData["stage-1-files"].indexOf(file);
536
+ metaData["stage-1-files"].splice(resetFileIndex, 1);
537
+ fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
538
+ }
293
539
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "dorky",
3
- "version": "1.0.5",
3
+ "version": "1.1.0",
4
4
  "description": "DevOps Records Keeper.",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -26,7 +26,7 @@
26
26
  },
27
27
  "homepage": "https://github.com/trishantpahwa/dorky#readme",
28
28
  "dependencies": {
29
- "aws-sdk": "^2.1062.0",
29
+ "@aws-sdk/client-s3": "^3.309.0",
30
30
  "chalk": "^4.1.2",
31
31
  "glob": "^7.2.0"
32
32
  }