dorky 1.0.5 → 1.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +505 -264
- package/package.json +2 -2
package/index.js
CHANGED
|
@@ -1,293 +1,534 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
|
-
const glob = require(
|
|
4
|
-
const path = require(
|
|
5
|
-
const chalk = require(
|
|
6
|
-
const fs = require(
|
|
7
|
-
const { EOL } = require(
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
const glob = require("glob");
|
|
4
|
+
const path = require("path");
|
|
5
|
+
const chalk = require("chalk");
|
|
6
|
+
const fs = require("fs");
|
|
7
|
+
const { EOL } = require("os");
|
|
8
|
+
const {
|
|
9
|
+
S3Client,
|
|
10
|
+
ListObjectsCommand,
|
|
11
|
+
PutObjectCommand,
|
|
12
|
+
DeleteObjectsCommand,
|
|
13
|
+
GetObjectCommand,
|
|
14
|
+
} = require("@aws-sdk/client-s3");
|
|
15
|
+
const { exit } = require("process");
|
|
16
|
+
const { createHash } = require("crypto");
|
|
10
17
|
|
|
18
|
+
let s3Client, bucketName;
|
|
11
19
|
|
|
12
20
|
// Initializes project, creates a new .dorky folder, and adds a metadata file to it, and creates a .dorkyignore file.
|
|
13
21
|
function initializeProject() {
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
22
|
+
if (fs.existsSync(".dorky")) {
|
|
23
|
+
console.log(
|
|
24
|
+
"Dorky project already initialized. Remove .dorky folder to reinitialize."
|
|
25
|
+
);
|
|
26
|
+
} else {
|
|
27
|
+
fs.mkdirSync(".dorky");
|
|
28
|
+
console.log(chalk.bgGreen("Created .dorky folder."));
|
|
29
|
+
fs.writeFileSync(
|
|
30
|
+
".dorky/metadata.json",
|
|
31
|
+
JSON.stringify({ "stage-1-files": [], "uploaded-files": [] })
|
|
32
|
+
);
|
|
33
|
+
console.log(chalk.bgGreen("Created .dorky/metadata.json file."));
|
|
34
|
+
fs.writeFileSync(".dorkyignore", "");
|
|
35
|
+
console.log(chalk.bgGreen("Created .dorkyignore file."));
|
|
36
|
+
fs.writeFileSync(".dorky/.dorkyhash", "");
|
|
37
|
+
console.log(chalk.bgGreen("Created .dorkyhash file."));
|
|
38
|
+
}
|
|
27
39
|
}
|
|
28
40
|
|
|
29
41
|
// Lists all the files that are not excluded explicitly.
|
|
30
42
|
function listFiles() {
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
43
|
+
let exclusions = fs.readFileSync("./.dorkyignore").toString().split(EOL);
|
|
44
|
+
exclusions = exclusions.filter((exclusion) => exclusion !== "");
|
|
45
|
+
if (exclusions[0] == "") exclusions = [];
|
|
46
|
+
var getDirectories = function (src, callback) {
|
|
47
|
+
glob(src + "/**/*", callback);
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
function excludeIsPresent(element) {
|
|
51
|
+
let present = false;
|
|
52
|
+
let i = 0;
|
|
53
|
+
while (i < exclusions.length) {
|
|
54
|
+
if (element.includes(exclusions[i])) present = true;
|
|
55
|
+
i += 1;
|
|
56
|
+
}
|
|
57
|
+
return present;
|
|
58
|
+
}
|
|
59
|
+
getDirectories(process.cwd(), function (err, res) {
|
|
60
|
+
if (err) {
|
|
61
|
+
console.log("Error", err);
|
|
62
|
+
} else {
|
|
63
|
+
let listOfFiles;
|
|
64
|
+
listOfFiles = res
|
|
65
|
+
.filter((element) => !excludeIsPresent(element))
|
|
66
|
+
.map((file) => path.relative(process.cwd(), file));
|
|
67
|
+
console.log(chalk.green("Found files:"));
|
|
68
|
+
listOfFiles.map((file) => console.log("\t" + chalk.bgGrey(file)));
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Pushes changes to S3 bucket.
|
|
74
|
+
function pushChanges() {
|
|
75
|
+
console.log("Pushing files to server.");
|
|
76
|
+
let rootFolder;
|
|
77
|
+
if (process.cwd().includes("\\")) {
|
|
78
|
+
rootFolder = process.cwd().split("\\").pop();
|
|
79
|
+
} else if (process.cwd().includes("/")) {
|
|
80
|
+
rootFolder = process.cwd().split("/").pop();
|
|
81
|
+
} else rootFolder = process.cwd();
|
|
82
|
+
console.log(rootFolder);
|
|
83
|
+
async function rootFolderExists(rootFolder) {
|
|
84
|
+
const bucketParams = { Bucket: bucketName };
|
|
85
|
+
const response = await s3Client.send(new ListObjectsCommand(bucketParams));
|
|
86
|
+
if (
|
|
87
|
+
response.Contents.filter(
|
|
88
|
+
(object) => object.Key.split("/")[0] == rootFolder
|
|
89
|
+
).length > 0
|
|
90
|
+
) {
|
|
91
|
+
let metaData = JSON.parse(
|
|
92
|
+
fs.readFileSync(path.join(".dorky", "metadata.json")).toString()
|
|
93
|
+
);
|
|
94
|
+
// Get removed files
|
|
95
|
+
let removed = metaData["uploaded-files"].filter(
|
|
96
|
+
(x) => !metaData["stage-1-files"].includes(x)
|
|
97
|
+
);
|
|
98
|
+
// Uploaded added files.
|
|
99
|
+
let added = metaData["stage-1-files"].filter(
|
|
100
|
+
(x) => !metaData["uploaded-files"].includes(x)
|
|
101
|
+
);
|
|
102
|
+
|
|
103
|
+
added.map(async (file) => {
|
|
104
|
+
if (metaData["uploaded-files"].includes(file)) return;
|
|
105
|
+
else {
|
|
106
|
+
const putObjectParams = {
|
|
107
|
+
Bucket: bucketName,
|
|
108
|
+
Key: path
|
|
109
|
+
.join(rootFolder, path.relative(process.cwd(), file))
|
|
110
|
+
.split("\\")
|
|
111
|
+
.join("/"),
|
|
112
|
+
Body: fs
|
|
113
|
+
.readFileSync(path.relative(process.cwd(), file))
|
|
114
|
+
.toString(),
|
|
115
|
+
};
|
|
116
|
+
// Upload records
|
|
117
|
+
try {
|
|
118
|
+
const uploadResponse = await s3Client.send(
|
|
119
|
+
new PutObjectCommand(putObjectParams)
|
|
120
|
+
);
|
|
121
|
+
if (uploadResponse) console.log(chalk.green("Uploaded " + file));
|
|
122
|
+
} catch (err) {
|
|
123
|
+
console.log(
|
|
124
|
+
"Unable to upload file " +
|
|
125
|
+
path
|
|
126
|
+
.join(rootFolder, path.relative(process.cwd(), file))
|
|
127
|
+
.replace(/\\/g, "/")
|
|
128
|
+
);
|
|
129
|
+
console.log(err);
|
|
130
|
+
}
|
|
131
|
+
metaData["uploaded-files"].push(file);
|
|
132
|
+
}
|
|
133
|
+
});
|
|
37
134
|
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
135
|
+
if (removed.length) {
|
|
136
|
+
const removedObjectParams = {
|
|
137
|
+
Bucket: bucketName,
|
|
138
|
+
Delete: {
|
|
139
|
+
Objects: removed.map((file) => {
|
|
140
|
+
return { Key: file };
|
|
141
|
+
}),
|
|
142
|
+
Quiet: true,
|
|
143
|
+
},
|
|
144
|
+
};
|
|
145
|
+
|
|
146
|
+
// Delete removed records, doesn't delete immediately.
|
|
147
|
+
try {
|
|
148
|
+
const deleteResponse = s3Client.send(
|
|
149
|
+
new DeleteObjectsCommand(removedObjectParams)
|
|
150
|
+
);
|
|
151
|
+
if (deleteResponse) {
|
|
152
|
+
console.log("Deleted removed files:");
|
|
153
|
+
removed.map((file) => console.log(chalk.bgRed(file)));
|
|
154
|
+
}
|
|
155
|
+
} catch (err) {
|
|
156
|
+
console.log("Unable to delete files.");
|
|
157
|
+
console.log(err);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
if (metaData["uploaded-files"] != metaData["stage-1-files"]) {
|
|
161
|
+
metaData["uploaded-files"] = Array.from(
|
|
162
|
+
new Set(metaData["stage-1-files"])
|
|
163
|
+
);
|
|
164
|
+
fs.writeFileSync(
|
|
165
|
+
path.join(".dorky", "metadata.json"),
|
|
166
|
+
JSON.stringify(metaData)
|
|
167
|
+
);
|
|
168
|
+
putObjectParams = {
|
|
169
|
+
Bucket: bucketName,
|
|
170
|
+
Key: path
|
|
171
|
+
.relative(
|
|
172
|
+
process.cwd(),
|
|
173
|
+
path.join(rootFolder.toString(), "metadata.json")
|
|
174
|
+
)
|
|
175
|
+
.replace(/\\/g, "/"),
|
|
176
|
+
Body: JSON.stringify(metaData),
|
|
177
|
+
};
|
|
178
|
+
try {
|
|
179
|
+
const uploadResponse = await s3Client.send(
|
|
180
|
+
new PutObjectCommand(putObjectParams)
|
|
181
|
+
);
|
|
182
|
+
if (uploadResponse)
|
|
183
|
+
console.log(
|
|
184
|
+
chalk.green(
|
|
185
|
+
"Uploaded " + path.join(rootFolder.toString(), "metadata.json")
|
|
186
|
+
)
|
|
187
|
+
);
|
|
188
|
+
} catch (err) {
|
|
189
|
+
console.log(
|
|
190
|
+
"Unable to upload file " +
|
|
191
|
+
path
|
|
192
|
+
.join(
|
|
193
|
+
rootFolder,
|
|
194
|
+
path.relative(
|
|
195
|
+
process.cwd(),
|
|
196
|
+
path.join(rootFolder.toString(), "metadata.json")
|
|
197
|
+
)
|
|
198
|
+
)
|
|
199
|
+
.replace(/\\/g, "/")
|
|
200
|
+
);
|
|
201
|
+
console.log(err);
|
|
202
|
+
}
|
|
203
|
+
} else {
|
|
204
|
+
console.log("Nothing to push");
|
|
205
|
+
}
|
|
206
|
+
} else {
|
|
207
|
+
let metaData = JSON.parse(
|
|
208
|
+
fs.readFileSync(path.join(".dorky", "metadata.json")).toString()
|
|
209
|
+
);
|
|
210
|
+
metaData["stage-1-files"].map(async (file) => {
|
|
211
|
+
if (metaData["uploaded-files"].includes(file)) return;
|
|
212
|
+
else {
|
|
213
|
+
const putObjectParams = {
|
|
214
|
+
Bucket: bucketName,
|
|
215
|
+
Key: path
|
|
216
|
+
.join(rootFolder, path.relative(process.cwd(), file))
|
|
217
|
+
.replace(/\\/g, "/"),
|
|
218
|
+
Body: fs
|
|
219
|
+
.readFileSync(path.relative(process.cwd(), file))
|
|
220
|
+
.toString(),
|
|
221
|
+
};
|
|
222
|
+
// Upload records
|
|
223
|
+
try {
|
|
224
|
+
const uploadResponse = await s3Client.send(
|
|
225
|
+
new PutObjectCommand(putObjectParams)
|
|
226
|
+
);
|
|
227
|
+
if (uploadResponse) console.log(chalk.green("Uploaded " + file));
|
|
228
|
+
} catch (err) {
|
|
229
|
+
console.log(
|
|
230
|
+
"Unable to upload file " +
|
|
231
|
+
path
|
|
232
|
+
.join(rootFolder, path.relative(process.cwd(), file))
|
|
233
|
+
.replace(/\\/g, "/")
|
|
234
|
+
);
|
|
235
|
+
console.log(err);
|
|
236
|
+
}
|
|
237
|
+
metaData["uploaded-files"].push(file);
|
|
44
238
|
}
|
|
45
|
-
|
|
239
|
+
});
|
|
240
|
+
metaData["uploaded-files"] = Array.from(
|
|
241
|
+
new Set(metaData["uploaded-files"])
|
|
242
|
+
);
|
|
243
|
+
fs.writeFileSync(
|
|
244
|
+
path.join(".dorky", "metadata.json"),
|
|
245
|
+
JSON.stringify(metaData)
|
|
246
|
+
);
|
|
247
|
+
putObjectParams = {
|
|
248
|
+
Bucket: bucketName,
|
|
249
|
+
Key: path
|
|
250
|
+
.relative(
|
|
251
|
+
process.cwd(),
|
|
252
|
+
path.join(rootFolder.toString(), "metadata.json")
|
|
253
|
+
)
|
|
254
|
+
.replace(/\\/g, "/"),
|
|
255
|
+
Body: JSON.stringify(metaData),
|
|
256
|
+
};
|
|
257
|
+
// Upload metadata.json
|
|
258
|
+
try {
|
|
259
|
+
const uploadResponse = await s3Client.send(
|
|
260
|
+
new PutObjectCommand(putObjectParams)
|
|
261
|
+
);
|
|
262
|
+
if (uploadResponse) console.log(chalk.green("Uploaded " + file));
|
|
263
|
+
} catch (err) {
|
|
264
|
+
console.log(
|
|
265
|
+
"Unable to upload file " +
|
|
266
|
+
path
|
|
267
|
+
.join(rootFolder, path.relative(process.cwd(), file))
|
|
268
|
+
.replace(/\\/g, "/")
|
|
269
|
+
);
|
|
270
|
+
console.log(err);
|
|
271
|
+
}
|
|
46
272
|
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
273
|
+
}
|
|
274
|
+
rootFolderExists(rootFolder);
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
async function pullChanges() {
|
|
278
|
+
console.log("Pulling files from server.");
|
|
279
|
+
let rootFolder;
|
|
280
|
+
if (process.cwd().includes("\\")) {
|
|
281
|
+
rootFolder = process.cwd().split("\\").pop();
|
|
282
|
+
} else if (process.cwd().includes("/")) {
|
|
283
|
+
rootFolder = process.cwd().split("/").pop();
|
|
284
|
+
} else rootFolder = process.cwd();
|
|
285
|
+
const bucketParams = { Bucket: bucketName };
|
|
286
|
+
const getObjectsResponse = await s3Client.send(
|
|
287
|
+
new ListObjectsCommand(bucketParams)
|
|
288
|
+
);
|
|
289
|
+
if (
|
|
290
|
+
getObjectsResponse.Contents.filter(
|
|
291
|
+
(object) => object.Key.split("/")[0] == rootFolder
|
|
292
|
+
).length > 0
|
|
293
|
+
) {
|
|
294
|
+
if (
|
|
295
|
+
getObjectsResponse.Contents.filter(
|
|
296
|
+
(object) => object.Key == rootFolder + "/metadata.json"
|
|
297
|
+
).length > 0
|
|
298
|
+
) {
|
|
299
|
+
const params = {
|
|
300
|
+
Bucket: bucketName,
|
|
301
|
+
Key: rootFolder + "/metadata.json",
|
|
302
|
+
};
|
|
303
|
+
s3Client.send(new GetObjectCommand(params), async (err, data) => {
|
|
304
|
+
if (err) console.error(err);
|
|
305
|
+
else {
|
|
306
|
+
let metaData = JSON.parse(await data.Body.transformToString());
|
|
307
|
+
// Pull metadata.json
|
|
308
|
+
const METADATA_FILE = ".dorky/metadata.json";
|
|
309
|
+
fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
|
|
310
|
+
let pullFileParams;
|
|
311
|
+
metaData["uploaded-files"].map((file) => {
|
|
312
|
+
pullFileParams = {
|
|
313
|
+
Bucket: bucketName,
|
|
314
|
+
Key: rootFolder + "/" + file,
|
|
315
|
+
};
|
|
316
|
+
s3Client.send(
|
|
317
|
+
new GetObjectCommand(pullFileParams),
|
|
318
|
+
async (err, data) => {
|
|
319
|
+
if (err) console.log(err);
|
|
320
|
+
else {
|
|
321
|
+
console.log("Creating file " + file);
|
|
322
|
+
let fileData = await data.Body.transformToString();
|
|
323
|
+
let subDirectories;
|
|
324
|
+
if (process.cwd().includes("\\")) {
|
|
325
|
+
subDirectories = path
|
|
326
|
+
.relative(process.cwd(), file)
|
|
327
|
+
.split("\\");
|
|
328
|
+
} else if (process.cwd().includes("/")) {
|
|
329
|
+
subDirectories = path
|
|
330
|
+
.relative(process.cwd(), file)
|
|
331
|
+
.split("/");
|
|
332
|
+
} else subDirectories = path.relative(process.cwd(), file);
|
|
333
|
+
subDirectories.pop();
|
|
334
|
+
if (process.platform === "win32") {
|
|
335
|
+
subDirectories = subDirectories.join("\\");
|
|
336
|
+
} else if (
|
|
337
|
+
process.platform === "linux" ||
|
|
338
|
+
process.platform === "darwin"
|
|
339
|
+
) {
|
|
340
|
+
subDirectories = subDirectories.join("/");
|
|
341
|
+
}
|
|
342
|
+
if (subDirectories.length)
|
|
343
|
+
fs.mkdirSync(subDirectories, { recursive: true });
|
|
344
|
+
fs.writeFileSync(
|
|
345
|
+
path.relative(process.cwd(), file),
|
|
346
|
+
fileData
|
|
347
|
+
);
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
);
|
|
351
|
+
});
|
|
55
352
|
}
|
|
56
|
-
|
|
353
|
+
});
|
|
354
|
+
} else {
|
|
355
|
+
console.log("Metadata doesn't exist");
|
|
356
|
+
}
|
|
357
|
+
} else {
|
|
358
|
+
console.error(chalk.red("Failed to pull folder, as it doesn't exist"));
|
|
359
|
+
}
|
|
57
360
|
}
|
|
58
361
|
|
|
59
|
-
if (
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
362
|
+
if (
|
|
363
|
+
process.env.BUCKET_NAME &&
|
|
364
|
+
process.env.AWS_ACCESS_KEY &&
|
|
365
|
+
process.env.AWS_SECRET_KEY &&
|
|
366
|
+
process.env.AWS_REGION
|
|
367
|
+
) {
|
|
368
|
+
bucketName = process.env.BUCKET_NAME;
|
|
369
|
+
s3Client = new S3Client({
|
|
370
|
+
region: process.env.AWS_REGION,
|
|
371
|
+
credentials: {
|
|
372
|
+
accessKeyId: process.env.AWS_ACCESS_KEY,
|
|
373
|
+
secretAccessKey: process.env.AWS_SECRET_KEY,
|
|
374
|
+
},
|
|
375
|
+
});
|
|
376
|
+
if (fs.existsSync(".dorky")) {
|
|
377
|
+
const credentials = [
|
|
378
|
+
`AWS_ACCESS_KEY=${process.env.AWS_ACCESS_KEY}`,
|
|
379
|
+
`AWS_SECRET_KEY=${process.env.AWS_SECRET_KEY}`,
|
|
380
|
+
`AWS_REGION=${process.env.AWS_REGION}`,
|
|
381
|
+
`BUCKET_NAME=${process.env.BUCKET_NAME}`,
|
|
382
|
+
];
|
|
383
|
+
fs.writeFileSync(".dorky/.credentials", credentials.join("\n"));
|
|
384
|
+
}
|
|
65
385
|
} else {
|
|
66
|
-
|
|
386
|
+
if (fs.existsSync(".dorky")) {
|
|
387
|
+
if (fs.existsSync(".dorky/.credentials")) {
|
|
388
|
+
const credentials = fs
|
|
389
|
+
.readFileSync(".dorky/.credentials", "utf8")
|
|
390
|
+
.toString()
|
|
391
|
+
.split("\n");
|
|
392
|
+
if (credentials.length < 4) {
|
|
393
|
+
console.log(
|
|
394
|
+
chalk.red(
|
|
395
|
+
"Set BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION first."
|
|
396
|
+
)
|
|
397
|
+
);
|
|
398
|
+
exit();
|
|
399
|
+
}
|
|
400
|
+
const region = credentials
|
|
401
|
+
.filter((credential) => credential.includes("AWS_REGION"))[0]
|
|
402
|
+
.split("=")[1];
|
|
403
|
+
const accessKey = credentials
|
|
404
|
+
.filter((credential) => credential.includes("AWS_ACCESS_KEY"))[0]
|
|
405
|
+
.split("=")[1];
|
|
406
|
+
const secretKey = credentials
|
|
407
|
+
.filter((credential) => credential.includes("AWS_SECRET_KEY"))[0]
|
|
408
|
+
.split("=")[1];
|
|
409
|
+
bucketName = credentials
|
|
410
|
+
.filter((credential) => credential.includes("BUCKET_NAME"))[0]
|
|
411
|
+
.split("=")[1];
|
|
412
|
+
s3Client = new S3Client({
|
|
413
|
+
region: region,
|
|
414
|
+
credentials: {
|
|
415
|
+
accessKeyId: accessKey,
|
|
416
|
+
secretAccessKey: secretKey,
|
|
417
|
+
},
|
|
418
|
+
});
|
|
419
|
+
console.log(chalk.blue("Set credentials from file."));
|
|
420
|
+
} else {
|
|
421
|
+
console.log(
|
|
422
|
+
chalk.red(
|
|
423
|
+
"Set BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION first."
|
|
424
|
+
)
|
|
425
|
+
);
|
|
426
|
+
exit();
|
|
427
|
+
}
|
|
428
|
+
} else {
|
|
429
|
+
console.log(
|
|
430
|
+
chalk.red(
|
|
431
|
+
"Unable to find .dorky folder, please reinitialize the project in the root folder."
|
|
432
|
+
)
|
|
433
|
+
);
|
|
67
434
|
exit();
|
|
435
|
+
}
|
|
68
436
|
}
|
|
69
437
|
|
|
70
438
|
const args = process.argv.splice(2, 2);
|
|
71
439
|
|
|
72
440
|
if (args.length == 0) {
|
|
73
|
-
|
|
74
|
-
|
|
441
|
+
const helpMessage = `Help message:\ninit\t Initializes a dorky project.\nlist\t Lists files in current root directory.\npush\t Pushes changes to S3 bucket.\npull\t Pulls changes from S3 bucket to local root folder.`;
|
|
442
|
+
console.log(helpMessage);
|
|
75
443
|
} else if (args.length == 1) {
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
listFiles();
|
|
81
|
-
}
|
|
82
|
-
if (args[0] == 'push') {
|
|
83
|
-
console.log('Pushing files to server.');
|
|
84
|
-
let rootFolder;
|
|
85
|
-
if (process.cwd().includes('\\')) {
|
|
86
|
-
rootFolder = process.cwd().split('\\').pop()
|
|
87
|
-
} else if (process.cwd().includes('/')) {
|
|
88
|
-
rootFolder = process.cwd().split('/').pop()
|
|
89
|
-
} else rootFolder = process.cwd()
|
|
90
|
-
console.log(rootFolder)
|
|
91
|
-
function rootFolderExists(rootFolder) {
|
|
92
|
-
let s3 = new AWS.S3();
|
|
93
|
-
const bucketParams = { Bucket: process.env.BUCKET_NAME };
|
|
94
|
-
s3.listObjects(bucketParams, (err, s3Objects) => {
|
|
95
|
-
if (err) console.log(err);
|
|
96
|
-
else {
|
|
97
|
-
if (s3Objects.Contents.filter((object) => object.Key.split('/')[0] == rootFolder).length > 0) {
|
|
98
|
-
let metaData = JSON.parse(fs.readFileSync(path.join('.dorky', 'metadata.json')).toString());
|
|
99
|
-
// Get removed files
|
|
100
|
-
let removed = metaData['uploaded-files'].filter(x => !metaData['stage-1-files'].includes(x));
|
|
101
|
-
// Uploaded added files.
|
|
102
|
-
let added = metaData['stage-1-files'].filter(x => !metaData['uploaded-files'].includes(x));
|
|
103
|
-
|
|
104
|
-
added.map((file) => {
|
|
105
|
-
if (metaData['uploaded-files'].includes(file)) return;
|
|
106
|
-
else {
|
|
107
|
-
const putObjectParams = {
|
|
108
|
-
Bucket: process.env.BUCKET_NAME,
|
|
109
|
-
Key: path.join(rootFolder, path.relative(process.cwd(), file)).split('\\').join('/'),
|
|
110
|
-
Body: fs.readFileSync(path.relative(process.cwd(), file)).toString()
|
|
111
|
-
}
|
|
112
|
-
// Upload records
|
|
113
|
-
s3.putObject(putObjectParams, (err, data) => {
|
|
114
|
-
if (err) {
|
|
115
|
-
console.log('Unable to upload file ' + path.join(rootFolder, path.relative(process.cwd(), file)).replace(/\\/g, '/'))
|
|
116
|
-
console.log(err);
|
|
117
|
-
}
|
|
118
|
-
else console.log(chalk.green('Uploaded ' + file));
|
|
119
|
-
});
|
|
120
|
-
metaData['uploaded-files'].push(file);
|
|
121
|
-
}
|
|
122
|
-
});
|
|
123
|
-
|
|
124
|
-
if (removed.length) {
|
|
125
|
-
const removedObjectParams = {
|
|
126
|
-
Bucket: process.env.BUCKET_NAME,
|
|
127
|
-
Delete: {
|
|
128
|
-
Objects: removed.map((file) => {
|
|
129
|
-
return { Key: file };
|
|
130
|
-
}),
|
|
131
|
-
Quiet: true
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
// Delete removed records, doesn't delete immediately.
|
|
135
|
-
s3.deleteObjects(removedObjectParams, (err, data) => {
|
|
136
|
-
if (err) console.log(err.stack);
|
|
137
|
-
else console.log('Deleted removed files.');
|
|
138
|
-
});
|
|
139
|
-
}
|
|
140
|
-
if (metaData['uploaded-files'] != metaData['stage-1-files']) {
|
|
141
|
-
metaData['uploaded-files'] = Array.from(new Set(metaData['stage-1-files']));
|
|
142
|
-
fs.writeFileSync(path.join('.dorky', 'metadata.json'), JSON.stringify(metaData));
|
|
143
|
-
putObjectParams = {
|
|
144
|
-
Bucket: process.env.BUCKET_NAME,
|
|
145
|
-
Key: path.relative(process.cwd(), path.join(rootFolder.toString(), 'metadata.json')).replace(/\\/g, '/'),
|
|
146
|
-
Body: JSON.stringify(metaData)
|
|
147
|
-
}
|
|
148
|
-
// Upload metadata.json
|
|
149
|
-
s3.putObject(putObjectParams, (err, data) => {
|
|
150
|
-
if (err) console.log(err);
|
|
151
|
-
else console.log(chalk.green('Uploaded metadata'));
|
|
152
|
-
});
|
|
153
|
-
} else {
|
|
154
|
-
console.log('Nothing to push');
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
} else {
|
|
158
|
-
|
|
159
|
-
let metaData = JSON.parse(fs.readFileSync(path.join('.dorky', 'metadata.json')).toString());
|
|
160
|
-
metaData['stage-1-files'].map((file) => {
|
|
161
|
-
if (metaData['uploaded-files'].includes(file)) return;
|
|
162
|
-
else {
|
|
163
|
-
const putObjectParams = {
|
|
164
|
-
Bucket: process.env.BUCKET_NAME,
|
|
165
|
-
Key: path.join(rootFolder, path.relative(process.cwd(), file)).replace(/\\/g, '/'),
|
|
166
|
-
Body: fs.readFileSync(path.relative(process.cwd(), file)).toString()
|
|
167
|
-
}
|
|
168
|
-
// Upload records
|
|
169
|
-
s3.putObject(putObjectParams, (err, data) => {
|
|
170
|
-
if (err) {
|
|
171
|
-
console.log('Unable to upload file ' + path.join(rootFolder, path.relative(process.cwd(), file)).replace(/\\/g, '/'))
|
|
172
|
-
console.log(err);
|
|
173
|
-
}
|
|
174
|
-
else console.log(chalk.green('Uploaded ' + file));
|
|
175
|
-
});
|
|
176
|
-
metaData['uploaded-files'].push(file);
|
|
177
|
-
}
|
|
178
|
-
});
|
|
179
|
-
metaData['uploaded-files'] = Array.from(new Set(metaData['uploaded-files']));
|
|
180
|
-
fs.writeFileSync(path.join('.dorky', 'metadata.json'), JSON.stringify(metaData));
|
|
181
|
-
putObjectParams = {
|
|
182
|
-
Bucket: process.env.BUCKET_NAME,
|
|
183
|
-
Key: path.relative(process.cwd(), path.join(rootFolder.toString(), 'metadata.json')).replace(/\\/g, '/'),
|
|
184
|
-
Body: JSON.stringify(metaData)
|
|
185
|
-
}
|
|
186
|
-
// Upload metadata.json
|
|
187
|
-
s3.putObject(putObjectParams, (err, data) => {
|
|
188
|
-
if (err) console.log(err);
|
|
189
|
-
else console.log(chalk.green('Uploaded metadata'));
|
|
190
|
-
});
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
})
|
|
194
|
-
|
|
195
|
-
}
|
|
196
|
-
rootFolderExists(rootFolder);
|
|
197
|
-
}
|
|
198
|
-
if (args[0] == 'help') {
|
|
199
|
-
const helpMessage = `Help message:\ninit\t Initializes a dorky project.\nlist\t Lists files in current root directory.\npush\t Pushes changes to S3 bucket.\npull\t Pulls changes from S3 bucket to local root folder.`
|
|
200
|
-
console.log(helpMessage);
|
|
201
|
-
}
|
|
202
|
-
if (args[0] == 'pull') {
|
|
203
|
-
console.log('Pulling files from server.')
|
|
204
|
-
let rootFolder;
|
|
205
|
-
if (process.cwd().includes('\\')) {
|
|
206
|
-
rootFolder = process.cwd().split('\\').pop()
|
|
207
|
-
} else if (process.cwd().includes('/')) {
|
|
208
|
-
rootFolder = process.cwd().split('/').pop()
|
|
209
|
-
} else rootFolder = process.cwd()
|
|
210
|
-
let s3 = new AWS.S3();
|
|
211
|
-
const bucketParams = { Bucket: process.env.BUCKET_NAME };
|
|
212
|
-
s3.listObjects(bucketParams, (err, s3Objects) => {
|
|
213
|
-
if (err) console.log(err);
|
|
214
|
-
else {
|
|
215
|
-
if (s3Objects.Contents.filter((object) => object.Key.split('/')[0] == rootFolder).length > 0) {
|
|
216
|
-
if (s3Objects.Contents.filter((object) => object.Key == (rootFolder + '/metadata.json')).length > 0) {
|
|
217
|
-
const params = {
|
|
218
|
-
Bucket: process.env.BUCKET_NAME,
|
|
219
|
-
Key: rootFolder + '/metadata.json'
|
|
220
|
-
}
|
|
221
|
-
s3.getObject(params, (err, data) => {
|
|
222
|
-
if (err) console.error(err);
|
|
223
|
-
else {
|
|
224
|
-
let metaData = JSON.parse(data.Body.toString());
|
|
225
|
-
// Pull metadata.json
|
|
226
|
-
const METADATA_FILE = '.dorky/metadata.json';
|
|
227
|
-
fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
|
|
228
|
-
let pullFileParams;
|
|
229
|
-
metaData['uploaded-files'].map((file) => {
|
|
230
|
-
pullFileParams = {
|
|
231
|
-
Bucket: process.env.BUCKET_NAME,
|
|
232
|
-
Key: rootFolder + '/' + file
|
|
233
|
-
}
|
|
234
|
-
console.log(pullFileParams)
|
|
235
|
-
s3.getObject(pullFileParams, (err, data) => {
|
|
236
|
-
if (err) console.log(err);
|
|
237
|
-
else {
|
|
238
|
-
console.log('Creating file ' + file);
|
|
239
|
-
let fileData = data.Body.toString();
|
|
240
|
-
let subDirectories;
|
|
241
|
-
if (process.cwd().includes('\\')) {
|
|
242
|
-
subDirectories = path.relative(process.cwd(), file).split('\\');
|
|
243
|
-
} else if (process.cwd().includes('/')) {
|
|
244
|
-
subDirectories = path.relative(process.cwd(), file).split('/');
|
|
245
|
-
} else subDirectories = path.relative(process.cwd(), file)
|
|
246
|
-
subDirectories.pop()
|
|
247
|
-
if (process.platform === "win32") {
|
|
248
|
-
subDirectories = subDirectories.join('\\')
|
|
249
|
-
} else if (process.platform === "linux" || process.platform === "darwin") {
|
|
250
|
-
subDirectories = subDirectories.join('/');
|
|
251
|
-
}
|
|
252
|
-
if (subDirectories.length) fs.mkdirSync(subDirectories, { recursive: true });
|
|
253
|
-
fs.writeFileSync(path.relative(process.cwd(), file), fileData);
|
|
254
|
-
}
|
|
255
|
-
})
|
|
256
|
-
});
|
|
257
|
-
}
|
|
258
|
-
});
|
|
259
|
-
} else {
|
|
260
|
-
console.log('Metadata doesn\'t exist')
|
|
261
|
-
}
|
|
262
|
-
} else {
|
|
263
|
-
console.error(chalk.red('Failed to pull folder, as it doesn\'t exist'));
|
|
264
|
-
}
|
|
265
|
-
}
|
|
266
|
-
});
|
|
267
|
-
}
|
|
444
|
+
if (args[0] == "init") initializeProject();
|
|
445
|
+
if (args[0] == "list") listFiles();
|
|
446
|
+
if (args[0] == "push") pushChanges();
|
|
447
|
+
if (args[0] == "pull") pullChanges();
|
|
268
448
|
} else if (args.length == 2) {
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
449
|
+
if (args[0] == "add") {
|
|
450
|
+
const METADATA_FILE = ".dorky/metadata.json";
|
|
451
|
+
const HASHES_FILE = ".dorky/.dorkyhash";
|
|
452
|
+
const file = args[1];
|
|
453
|
+
if (fs.existsSync(file)) {
|
|
454
|
+
const hashes = {};
|
|
455
|
+
fs.readFileSync(HASHES_FILE)
|
|
456
|
+
.toString()
|
|
457
|
+
.split("\n")
|
|
458
|
+
.filter((hash) => hash)
|
|
459
|
+
.map((hash) => {
|
|
460
|
+
hashes[hash.split("=")[0]] = hash.split("=")[1];
|
|
461
|
+
});
|
|
462
|
+
if (Object.keys(hashes).includes(file)) {
|
|
463
|
+
// File already staged
|
|
464
|
+
const fileContent = fs.readFileSync(file).toString();
|
|
465
|
+
const currentHash = createHash("md5")
|
|
466
|
+
.update(fileContent)
|
|
467
|
+
.digest("base64")
|
|
468
|
+
.split("==")[0];
|
|
469
|
+
const hashToCompare = hashes[file];
|
|
470
|
+
if (currentHash == hashToCompare) {
|
|
471
|
+
console.log(
|
|
472
|
+
chalk.red(
|
|
473
|
+
`File ${chalk.bgRed(
|
|
474
|
+
chalk.white(file)
|
|
475
|
+
)} hasn\'t been modified since last push.`
|
|
476
|
+
)
|
|
477
|
+
);
|
|
478
|
+
return;
|
|
280
479
|
} else {
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
480
|
+
console.log(chalk.green(`Staging ${file} since has been modified.`));
|
|
481
|
+
hashes[file] = currentHash;
|
|
482
|
+
const updatedFileContent = Object.entries(hashes).map(
|
|
483
|
+
(fileAndHash) => {
|
|
484
|
+
return fileAndHash.join("=");
|
|
485
|
+
}
|
|
486
|
+
);
|
|
487
|
+
fs.writeFileSync(HASHES_FILE, updatedFileContent.join("\n"));
|
|
488
|
+
const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
|
|
489
|
+
// Clear from uploaded files
|
|
490
|
+
const uploadedFiles = new Set(metaData["uploaded-files"]);
|
|
491
|
+
uploadedFiles.delete(file);
|
|
492
|
+
metaData["uploaded-files"] = Array.from(uploadedFiles);
|
|
493
|
+
fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
|
|
494
|
+
console.log(
|
|
495
|
+
`Updated ${chalk.bgGreen(
|
|
496
|
+
chalk.white(file)
|
|
497
|
+
)}, ready to push the updates from it.`
|
|
498
|
+
);
|
|
284
499
|
}
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
const
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
500
|
+
} else {
|
|
501
|
+
// New file
|
|
502
|
+
const fileContent = fs.readFileSync(file).toString();
|
|
503
|
+
hashes[file] = createHash("md5")
|
|
504
|
+
.update(fileContent)
|
|
505
|
+
.digest("base64")
|
|
506
|
+
.split("==")[0];
|
|
507
|
+
const updatedFileContent = Object.entries(hashes).map((fileAndHash) => {
|
|
508
|
+
return fileAndHash.join("=");
|
|
509
|
+
});
|
|
510
|
+
fs.writeFileSync(HASHES_FILE, updatedFileContent.join("\n"));
|
|
511
|
+
console.log(
|
|
512
|
+
`Tracking updates from ${chalk.bgGreen(chalk.white(file))}`
|
|
513
|
+
);
|
|
514
|
+
}
|
|
515
|
+
const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
|
|
516
|
+
const stage1Files = new Set(metaData["stage-1-files"]);
|
|
517
|
+
stage1Files.add(file);
|
|
518
|
+
metaData["stage-1-files"] = Array.from(stage1Files);
|
|
519
|
+
fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
|
|
520
|
+
console.log(chalk.bgGreen("Success"));
|
|
521
|
+
console.log(chalk.green(`Added file ${file} successfully to stage-1.`));
|
|
522
|
+
} else {
|
|
523
|
+
console.log(chalk.bgRed("Error"));
|
|
524
|
+
console.log(chalk.red(`\tFile ${file} doesn\'t exist`));
|
|
292
525
|
}
|
|
526
|
+
} else if (args[0] == "reset") {
|
|
527
|
+
const METADATA_FILE = ".dorky/metadata.json";
|
|
528
|
+
const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
|
|
529
|
+
const file = args[1];
|
|
530
|
+
resetFileIndex = metaData["stage-1-files"].indexOf(file);
|
|
531
|
+
metaData["stage-1-files"].splice(resetFileIndex, 1);
|
|
532
|
+
fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
|
|
533
|
+
}
|
|
293
534
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "dorky",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.6",
|
|
4
4
|
"description": "DevOps Records Keeper.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"bin": {
|
|
@@ -26,7 +26,7 @@
|
|
|
26
26
|
},
|
|
27
27
|
"homepage": "https://github.com/trishantpahwa/dorky#readme",
|
|
28
28
|
"dependencies": {
|
|
29
|
-
"aws-sdk": "^
|
|
29
|
+
"@aws-sdk/client-s3": "^3.309.0",
|
|
30
30
|
"chalk": "^4.1.2",
|
|
31
31
|
"glob": "^7.2.0"
|
|
32
32
|
}
|