dorky 1.2.2 → 2.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +61 -14
- package/bin/index.js +394 -0
- package/google-drive-credentials.json +16 -0
- package/package.json +17 -7
- package/index.js +0 -547
package/README.md
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# dorky
|
|
2
|
+
|
|
2
3
|
```
|
|
3
|
-
__ __
|
|
4
|
+
__ __
|
|
4
5
|
.--| .-----.----| |--.--.--.
|
|
5
6
|
| _ | _ | _| <| | |
|
|
6
7
|
|_____|_____|__| |__|__|___ |
|
|
@@ -17,9 +18,11 @@ Let us assume that we need to create a project.
|
|
|
17
18
|
|
|
18
19
|
The project obviously contains some code and some secret variables like database information, API keys, etc. This data cannot be shared on public VCS (GitHub), but at times is required to be accessible remotely to be shared among reliable sources.
|
|
19
20
|
|
|
20
|
-
Anyhow, we shall store it on a private storage, using **dorky**, that stores it on a S3.
|
|
21
|
+
Anyhow, we shall store it on a private storage, using **dorky**, that stores it on a S3 or Google-Drive.
|
|
22
|
+
|
|
23
|
+
## AWS S3
|
|
21
24
|
|
|
22
|
-
|
|
25
|
+
### Steps to use with S3:
|
|
23
26
|
|
|
24
27
|
> Create a S3 bucket, AWS_ACCESS_KEY and AWS_SECRET_KEY.
|
|
25
28
|
|
|
@@ -27,16 +30,60 @@ Anyhow, we shall store it on a private storage, using **dorky**, that stores it
|
|
|
27
30
|
|
|
28
31
|
> Please use your own repository, this repository `sample_project` is just for demonstration.
|
|
29
32
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
33
|
+
#### To list files in the project.
|
|
34
|
+
|
|
35
|
+
1. Initialize dorky setup in the root folder of your project, using `dorky --init aws`.
|
|
36
|
+
2. List the files using `dorky --list`.
|
|
37
|
+
> This command will not list the files that are excluded in `.dorkyignore`.
|
|
38
|
+
|
|
39
|
+
#### To push files to S3 bucket.
|
|
40
|
+
|
|
41
|
+
1. Initialize dorky setup in the root folder of your project, using `dorky --init aws`.
|
|
42
|
+
2. List the files using `dorky --list`, (make sure to add excluded file or folder patterns to .dorkyignore, to minimize the list).
|
|
43
|
+
3. Add files to stage-1 using `dorky --add file-name`.
|
|
44
|
+
4. Push files to S3 bucket using `dorky --push`.
|
|
45
|
+
|
|
46
|
+
#### To remove a file from project.
|
|
47
|
+
|
|
48
|
+
1. Remove files using `dorky --rm file-name`. [Removes file from stage-1 <local>]
|
|
49
|
+
2. Push files to S3 bucket using `dorky --push`. [Removes file from S3 bucket <remote>]
|
|
50
|
+
|
|
51
|
+
#### To pull files from S3 bucket.
|
|
52
|
+
|
|
53
|
+
1. Use `dorky --pull` to pull the files from S3 bucket.
|
|
54
|
+
|
|
55
|
+
## Google Drive
|
|
56
|
+
|
|
57
|
+
### Steps to use with Google Drive:
|
|
58
|
+
|
|
59
|
+

|
|
60
|
+
|
|
61
|
+
> Please use your own repository, this repository `sample_project` is just for demonstration.
|
|
62
|
+
|
|
63
|
+
#### To list files in the project.
|
|
64
|
+
|
|
65
|
+
1. Initialize dorky setup in the root folder of your project, using `dorky --init google-drive`.
|
|
66
|
+
2. List the files using `dorky --list`.
|
|
67
|
+
> This command will not list the files that are excluded in `.dorkyignore`.
|
|
68
|
+
|
|
69
|
+
#### To push files to Google Drive.
|
|
70
|
+
|
|
71
|
+
1. Initialize dorky setup in the root folder of your project, using `dorky --init google-drive`.
|
|
72
|
+
2. List the files using `dorky --list`, (make sure to add excluded file or folder patterns to .dorkyignore, to minimize the list).
|
|
73
|
+
3. Add files to stage-1 using `dorky --add file-name`.
|
|
74
|
+
4. Push files to Google Drive using `dorky --push`.
|
|
75
|
+
|
|
76
|
+
#### To remove a file from project.
|
|
77
|
+
|
|
78
|
+
1. Remove files using `dorky --rm file-name`. [Removes file from stage-1 <local>]
|
|
79
|
+
2. Push files to Google Drive using `dorky --push`. [Removes file from Google Drive <remote>]
|
|
80
|
+
|
|
81
|
+
#### To pull files from Google Drive.
|
|
82
|
+
|
|
83
|
+
1. Use `dorky --pull` to pull the files from Google Drive.
|
|
84
|
+
|
|
85
|
+
## To-Do
|
|
35
86
|
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
2. Push files to S3 bucket using `dorky push`.
|
|
87
|
+
[ ] Add stages for variables.
|
|
88
|
+
[ ] Convert to TypeScript.
|
|
39
89
|
|
|
40
|
-
### To pull files from S3 bucket.
|
|
41
|
-
1. Initialize dorky project using `dorky init`.
|
|
42
|
-
2. Use `dorky pull` to pull the files from S3 bucket.
|
package/bin/index.js
ADDED
|
@@ -0,0 +1,394 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const yargs = require("yargs");
|
|
4
|
+
const { existsSync, mkdirSync, writeFileSync } = require("fs");
|
|
5
|
+
const chalk = require("chalk");
|
|
6
|
+
const { glob } = require("glob");
|
|
7
|
+
const path = require("path");
|
|
8
|
+
const fs = require("fs");
|
|
9
|
+
const mimeTypes = require("mime-types");
|
|
10
|
+
const md5 = require('md5');
|
|
11
|
+
const EOL = require("os").type() == "Darwin" ? "\r\n" : "\n";
|
|
12
|
+
const { GetObjectCommand, PutObjectCommand, S3Client } = require("@aws-sdk/client-s3");
|
|
13
|
+
const { authenticate } = require('@google-cloud/local-auth');
|
|
14
|
+
const { google } = require('googleapis');
|
|
15
|
+
|
|
16
|
+
// Google Drive config ************************************************************
|
|
17
|
+
const SCOPES = ['https://www.googleapis.com/auth/drive'];
|
|
18
|
+
const CREDENTIALS_PATH = path.join(__dirname, "../google-drive-credentials.json");
|
|
19
|
+
const TOKEN_PATH = path.join(process.cwd(), '.dorky/credentials.json');
|
|
20
|
+
// ********************************************************************************
|
|
21
|
+
|
|
22
|
+
const figlet = `
|
|
23
|
+
__ __ \t
|
|
24
|
+
.--| |-----.----| |--.--.--.\t
|
|
25
|
+
| _ | _ | _| <| | |\t
|
|
26
|
+
|_____|_____|__| |__|__|___ |\t
|
|
27
|
+
|_____|\t
|
|
28
|
+
`;
|
|
29
|
+
// Should display the process.env.AWS_ACCESS_KEY, process.env.AWS_SECRET_KEY, process.env.AWS_REGION, process.env.BUCKET_NAME to be set
|
|
30
|
+
const usage = `${figlet}`;
|
|
31
|
+
let randomColor = null;
|
|
32
|
+
do {
|
|
33
|
+
const randomHex = Math.floor(Math.random() * 16777215).toString(16);
|
|
34
|
+
randomColor = `#${randomHex}`;
|
|
35
|
+
} while (randomColor[2] === "f" || randomColor[3] === "f");
|
|
36
|
+
console.log(chalk.bgHex(randomColor)(usage));
|
|
37
|
+
|
|
38
|
+
if (process.argv.slice(2).length === 0) {
|
|
39
|
+
process.argv.push("--help");
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
var args = yargs
|
|
43
|
+
.option("init", { alias: "i", describe: "Initialize dorky project", type: "string", demandOption: false })
|
|
44
|
+
.option("list", { alias: "l", describe: "List files in dorky", type: "string", demandOption: false })
|
|
45
|
+
.option("add", { alias: "a", describe: "Add files to push or pull", type: "array", demandOption: false })
|
|
46
|
+
.option("rm", { alias: "r", describe: "Remove files from push or pull", type: "array", demandOption: false })
|
|
47
|
+
.option("push", { alias: "ph", describe: "Push files to storage", type: "string", demandOption: false })
|
|
48
|
+
.option("pull", { alias: "pl", describe: "Pull files from storage", type: "string", demandOption: false })
|
|
49
|
+
.option("migrate", { alias: "m", describe: "Migrate dorky project to another storage", type: "string", demandOption: false })
|
|
50
|
+
.help('help')
|
|
51
|
+
.strict()
|
|
52
|
+
.argv
|
|
53
|
+
|
|
54
|
+
if (Object.keys(args).length == 2) {
|
|
55
|
+
yargs.showHelp()
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function checkIfDorkyProject() {
|
|
59
|
+
if (!existsSync(".dorky") && !existsSync(".dorkyignore")) {
|
|
60
|
+
console.log(chalk.red("This is not a dorky project. Please run `dorky --init [aws|google-drive]` to initialize a dorky project."));
|
|
61
|
+
process.exit(1);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function setupFilesAndFolders(metaData, credentials) {
|
|
66
|
+
console.log("Initializing dorky project");
|
|
67
|
+
if (existsSync(".dorky")) {
|
|
68
|
+
console.log("Dorky is already initialised in this project.");
|
|
69
|
+
} else {
|
|
70
|
+
mkdirSync(".dorky");
|
|
71
|
+
console.log(chalk.bgGreen("Created .dorky folder."));
|
|
72
|
+
writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
|
|
73
|
+
console.log(chalk.bgGreen("Created .dorky/metadata.json file."));
|
|
74
|
+
writeFileSync(".dorkyignore", "");
|
|
75
|
+
console.log(chalk.bgGreen("Created .dorkyignore file."));
|
|
76
|
+
writeFileSync(".dorky/credentials.json", JSON.stringify(credentials, null, 2));
|
|
77
|
+
console.log(chalk.bgGreen("Created .dorky/credentials.json file."));
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async function authorizeGoogleDriveClient() {
|
|
82
|
+
async function loadSavedCredentialsIfExist() {
|
|
83
|
+
try {
|
|
84
|
+
const content = await fs.readFileSync(TOKEN_PATH);
|
|
85
|
+
const credentials = JSON.parse(content);
|
|
86
|
+
return google.auth.fromJSON(credentials);
|
|
87
|
+
} catch (err) {
|
|
88
|
+
return null;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
let client = await loadSavedCredentialsIfExist();
|
|
92
|
+
if (client) {
|
|
93
|
+
return client;
|
|
94
|
+
}
|
|
95
|
+
client = await authenticate({
|
|
96
|
+
scopes: SCOPES,
|
|
97
|
+
keyfilePath: CREDENTIALS_PATH,
|
|
98
|
+
});
|
|
99
|
+
return client;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
async function init(storage) {
|
|
103
|
+
const metaData = { "stage-1-files": {}, "uploaded-files": {} };
|
|
104
|
+
var credentials;
|
|
105
|
+
switch (storage) {
|
|
106
|
+
case "aws":
|
|
107
|
+
credentials = { storage: "aws", acessKey: process.env.AWS_ACCESS_KEY, secretKey: process.env.AWS_SECRET_KEY, region: process.env.AWS_REGION, bucket: process.env.BUCKET_NAME }
|
|
108
|
+
setupFilesAndFolders(metaData, credentials);
|
|
109
|
+
break;
|
|
110
|
+
case "google-drive":
|
|
111
|
+
const client = await authorizeGoogleDriveClient();
|
|
112
|
+
credentials = { storage: "google-drive", ...client.credentials };
|
|
113
|
+
setupFilesAndFolders(metaData, credentials);
|
|
114
|
+
break;
|
|
115
|
+
default:
|
|
116
|
+
console.log("Please provide a valid storage option <aws|google-drive>");
|
|
117
|
+
break;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
async function list() {
|
|
122
|
+
checkIfDorkyProject();
|
|
123
|
+
console.log(chalk.red("Listing files that can be added:"));
|
|
124
|
+
var exclusions = fs.readFileSync(".dorkyignore").toString().split(EOL);
|
|
125
|
+
exclusions = exclusions.filter((exclusion) => exclusion !== "");
|
|
126
|
+
const src = process.cwd();
|
|
127
|
+
const files = await glob(path.join(src, "**/*"), { dot: true });
|
|
128
|
+
const filteredFiles = files.filter((file) => {
|
|
129
|
+
for (let i = 0; i < exclusions.length; i++) {
|
|
130
|
+
if (file.includes(exclusions[i])) return false;
|
|
131
|
+
}
|
|
132
|
+
return true;
|
|
133
|
+
});
|
|
134
|
+
filteredFiles.forEach((file) => console.log(chalk.red(`- ${path.relative(process.cwd(), file)}`)));
|
|
135
|
+
console.log(chalk.green("\nList of files that are already added:"));
|
|
136
|
+
const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
|
|
137
|
+
const addedFiles = Object.keys(metaData["stage-1-files"]);
|
|
138
|
+
addedFiles.forEach((file) => console.log(chalk.green(`- ${file}`)));
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
function add(listOfFiles) {
|
|
142
|
+
checkIfDorkyProject();
|
|
143
|
+
console.log("Adding files to stage-1 to push to storage");
|
|
144
|
+
const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
|
|
145
|
+
listOfFiles.forEach((file) => {
|
|
146
|
+
if (!fs.existsSync(file)) {
|
|
147
|
+
console.log(chalk.red(`File ${file} does not exist.`));
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
const fileContents = fs.readFileSync(file);
|
|
151
|
+
const fileType = mimeTypes.lookup(file);
|
|
152
|
+
metaData["stage-1-files"][file] = {
|
|
153
|
+
"mime-type": fileType ? fileType : "application/octet-stream",
|
|
154
|
+
"hash": md5(fileContents)
|
|
155
|
+
};
|
|
156
|
+
});
|
|
157
|
+
fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
|
|
158
|
+
listOfFiles.map((file) => console.log(chalk.green(`Added ${file} to stage-1.`)));
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
function rm(listOfFiles) {
|
|
162
|
+
checkIfDorkyProject();
|
|
163
|
+
console.log(chalk.red("Removing files from stage-1"));
|
|
164
|
+
const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
|
|
165
|
+
listOfFiles = listOfFiles.filter((file) => {
|
|
166
|
+
if (metaData["stage-1-files"][file] == undefined) return false;
|
|
167
|
+
delete metaData["stage-1-files"][file];
|
|
168
|
+
return true;
|
|
169
|
+
});
|
|
170
|
+
fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
|
|
171
|
+
if (listOfFiles.length) listOfFiles.map((file) => console.log(chalk.red(`Removed ${file} from stage-1.`)));
|
|
172
|
+
else console.log(chalk.red("No files found that can be removed."));
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
function checkCredentials() {
|
|
176
|
+
const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
|
|
177
|
+
// This only works for AWS S3, add credential checker for google drive also, fix this => TP | 2024-09-28 16:04:41
|
|
178
|
+
if (credentials.accessKey && credentials.secretKey && credentials.region && credentials.bucket) {
|
|
179
|
+
if (process.env.AWS_ACCESS_KEY && process.env.AWS_SECRET_KEY && process.env.AWS_REGION && process.env.BUCKET_NAME) {
|
|
180
|
+
return true;
|
|
181
|
+
} else {
|
|
182
|
+
console.log(chalk.red("Please provide credentials in .dorky/credentials.json"));
|
|
183
|
+
return false;
|
|
184
|
+
}
|
|
185
|
+
} else return true;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
function push() {
|
|
189
|
+
checkIfDorkyProject();
|
|
190
|
+
if (!checkCredentials()) {
|
|
191
|
+
console.log(chalk.red("Please setup credentials in environment variables or in .dorky/credentials.json"));
|
|
192
|
+
return;
|
|
193
|
+
}
|
|
194
|
+
console.log("Pushing files to storage");
|
|
195
|
+
const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
|
|
196
|
+
const stage1Files = metaData["stage-1-files"];
|
|
197
|
+
const pushedFiles = metaData["uploaded-files"];
|
|
198
|
+
var filesToPush = [];
|
|
199
|
+
Object.keys(stage1Files).map((file) => {
|
|
200
|
+
if (pushedFiles[file]) {
|
|
201
|
+
if (stage1Files[file]["hash"] != pushedFiles[file]["hash"]) filesToPush.push(file);
|
|
202
|
+
} else filesToPush.push(file);
|
|
203
|
+
});
|
|
204
|
+
filesToPush = filesToPush.map((file) => {
|
|
205
|
+
return {
|
|
206
|
+
"name": file,
|
|
207
|
+
"mime-type": stage1Files[file]["mime-type"],
|
|
208
|
+
"hash": stage1Files[file]["hash"]
|
|
209
|
+
}
|
|
210
|
+
});
|
|
211
|
+
const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
|
|
212
|
+
switch (credentials.storage) {
|
|
213
|
+
case "aws":
|
|
214
|
+
pushToS3(filesToPush, credentials);
|
|
215
|
+
break;
|
|
216
|
+
case "google-drive":
|
|
217
|
+
pushToGoogleDrive(filesToPush);
|
|
218
|
+
break;
|
|
219
|
+
default:
|
|
220
|
+
console.log("Please provide a valid storage option <aws|google-drive>");
|
|
221
|
+
break;
|
|
222
|
+
}
|
|
223
|
+
metaData["uploaded-files"] = metaData["stage-1-files"];
|
|
224
|
+
fs.writeFileSync(".dorky/metadata.json", JSON.stringify(metaData, null, 2));
|
|
225
|
+
console.log(chalk.green("Pushed files to storage"));
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
function pushToS3(files, credentials) {
|
|
229
|
+
const s3 = new S3Client({
|
|
230
|
+
credentials: {
|
|
231
|
+
accessKeyId: credentials.acessKey ?? process.env.AWS_ACCESS_KEY,
|
|
232
|
+
secretAccessKey: credentials.secretKey ?? process.env.AWS_SECRET_KEY
|
|
233
|
+
},
|
|
234
|
+
region: credentials.awsRegion ?? process.env.AWS_REGION
|
|
235
|
+
});
|
|
236
|
+
const bucketName = credentials.bucket ?? process.env.BUCKET_NAME;
|
|
237
|
+
Promise.all(files.map(async (file) => {
|
|
238
|
+
const rootFolder = path.basename(process.cwd());
|
|
239
|
+
const pathToFile = path.join(rootFolder, file.name);
|
|
240
|
+
await s3.send(
|
|
241
|
+
new PutObjectCommand({
|
|
242
|
+
Bucket: bucketName,
|
|
243
|
+
Key: pathToFile,
|
|
244
|
+
Body: fs.readFileSync(file.name).toString(),
|
|
245
|
+
})
|
|
246
|
+
);
|
|
247
|
+
console.log(chalk.green(`Pushed ${pathToFile} to storage.`));
|
|
248
|
+
}));
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
async function pushToGoogleDrive(files) {
|
|
253
|
+
async function getOrCreateFolderId(folderPath, drive) {
|
|
254
|
+
const folders = folderPath.split(path.sep);
|
|
255
|
+
let parentId = 'root';
|
|
256
|
+
for (const folder of folders) {
|
|
257
|
+
const res = await drive.files.list({
|
|
258
|
+
q: `name='${folder}' and mimeType='application/vnd.google-apps.folder' and '${parentId}' in parents`,
|
|
259
|
+
fields: 'files(id, name)',
|
|
260
|
+
spaces: 'drive'
|
|
261
|
+
});
|
|
262
|
+
if (res.data.files.length > 0) {
|
|
263
|
+
parentId = res.data.files[0].id;
|
|
264
|
+
} else {
|
|
265
|
+
const folderMetadata = {
|
|
266
|
+
name: folder,
|
|
267
|
+
mimeType: 'application/vnd.google-apps.folder',
|
|
268
|
+
parents: [parentId],
|
|
269
|
+
};
|
|
270
|
+
const folderRes = await drive.files.create({
|
|
271
|
+
requestBody: folderMetadata,
|
|
272
|
+
fields: 'id',
|
|
273
|
+
});
|
|
274
|
+
parentId = folderRes.data.id;
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
return parentId;
|
|
278
|
+
}
|
|
279
|
+
console.log("Uploading to google drive");
|
|
280
|
+
const client = await authorizeGoogleDriveClient();
|
|
281
|
+
const drive = google.drive({ version: 'v3', auth: client });
|
|
282
|
+
for (const file of files) {
|
|
283
|
+
const rootFolder = path.basename(process.cwd());
|
|
284
|
+
const pathToFile = path.join(rootFolder, file.name);
|
|
285
|
+
const requestBody = {
|
|
286
|
+
name: path.basename(file.name),
|
|
287
|
+
parents: [await getOrCreateFolderId(pathToFile.split("/").slice(0, -1).join("/"), drive)],
|
|
288
|
+
fields: 'id',
|
|
289
|
+
};
|
|
290
|
+
const media = {
|
|
291
|
+
mimeType: file["mime-type"],
|
|
292
|
+
body: fs.createReadStream(path.join(process.cwd(), file.name)),
|
|
293
|
+
};
|
|
294
|
+
try {
|
|
295
|
+
await drive.files.create({
|
|
296
|
+
requestBody,
|
|
297
|
+
media: media,
|
|
298
|
+
});
|
|
299
|
+
console.log(chalk.green(`Pushed ${file.name} to storage.`));
|
|
300
|
+
} catch (err) {
|
|
301
|
+
console.log(err);
|
|
302
|
+
throw err;
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
function pull() {
|
|
308
|
+
checkIfDorkyProject();
|
|
309
|
+
if (!checkCredentials()) {
|
|
310
|
+
console.log(chalk.red("Please setup credentials in environment variables or in .dorky/credentials.json"));
|
|
311
|
+
return;
|
|
312
|
+
}
|
|
313
|
+
console.log("Pulling files from storage");
|
|
314
|
+
const metaData = JSON.parse(fs.readFileSync(".dorky/metadata.json"));
|
|
315
|
+
const filesToPull = metaData["uploaded-files"];
|
|
316
|
+
const credentials = JSON.parse(fs.readFileSync(".dorky/credentials.json"));
|
|
317
|
+
switch (credentials.storage) {
|
|
318
|
+
case "aws":
|
|
319
|
+
pullFromS3(filesToPull, credentials);
|
|
320
|
+
break;
|
|
321
|
+
case "google-drive":
|
|
322
|
+
pullFromGoogleDrive(filesToPull);
|
|
323
|
+
break;
|
|
324
|
+
default:
|
|
325
|
+
console.log("Please provide a valid storage option <aws|google-drive>");
|
|
326
|
+
break;
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
function pullFromS3(files, credentials) {
|
|
331
|
+
const s3 = new S3Client({
|
|
332
|
+
credentials: {
|
|
333
|
+
accessKeyId: credentials.acessKey ?? process.env.AWS_ACCESS_KEY,
|
|
334
|
+
secretAccessKey: credentials.secretKey ?? process.env.AWS_SECRET_KEY
|
|
335
|
+
},
|
|
336
|
+
region: credentials.awsRegion ?? process.env.AWS_REGION
|
|
337
|
+
});
|
|
338
|
+
const bucketName = credentials.bucket ?? process.env.BUCKET_NAME;
|
|
339
|
+
Promise.all(Object.keys(files).map(async (file) => {
|
|
340
|
+
const rootFolder = path.basename(process.cwd());
|
|
341
|
+
const pathToFile = path.join(rootFolder, file);
|
|
342
|
+
const { Body } = await s3.send(
|
|
343
|
+
new GetObjectCommand({
|
|
344
|
+
Bucket: bucketName,
|
|
345
|
+
Key: pathToFile,
|
|
346
|
+
})
|
|
347
|
+
);
|
|
348
|
+
const dir = path.dirname(file);
|
|
349
|
+
if (!fs.existsSync(dir)) {
|
|
350
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
351
|
+
}
|
|
352
|
+
fs.writeFileSync(file, await Body.transformToString());
|
|
353
|
+
console.log(chalk.green(`Pulled ${file} from storage.`));
|
|
354
|
+
}));
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
async function pullFromGoogleDrive(files) {
|
|
358
|
+
console.log("Downloading from google drive");
|
|
359
|
+
files = Object.keys(files).map((file) => {
|
|
360
|
+
return { name: file, ...files[file] };
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
const client = await authorizeGoogleDriveClient();
|
|
364
|
+
const drive = google.drive({ version: "v3", auth: client });
|
|
365
|
+
try {
|
|
366
|
+
files.map(async (file) => {
|
|
367
|
+
const res = await drive.files.list({
|
|
368
|
+
q: `name='${path.basename(file.name)}' and mimeType!='application/vnd.google-apps.folder'`,
|
|
369
|
+
fields: 'files(id, name)',
|
|
370
|
+
spaces: 'drive'
|
|
371
|
+
});
|
|
372
|
+
if (res.data.files.length === 0) {
|
|
373
|
+
console.log(chalk.red(`File ${file.name} not found in Google Drive.`));
|
|
374
|
+
return;
|
|
375
|
+
}
|
|
376
|
+
const _file = await drive.files.get({ fileId: res.data.files[0].id, alt: "media" });
|
|
377
|
+
const dir = path.dirname(file.name);
|
|
378
|
+
if (!fs.existsSync(dir)) {
|
|
379
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
380
|
+
}
|
|
381
|
+
fs.writeFileSync(file.name, await _file.data.text(), "utf-8");
|
|
382
|
+
console.log(chalk.green(`Pulled ${file.name} from storage.`));
|
|
383
|
+
});
|
|
384
|
+
} catch (err) {
|
|
385
|
+
throw err;
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
if (Object.keys(args).includes("init")) init(args.init);
|
|
390
|
+
if (Object.keys(args).includes("list")) list(args.list);
|
|
391
|
+
if (Object.keys(args).includes("add")) add(args.add);
|
|
392
|
+
if (Object.keys(args).includes("rm")) rm(args.rm);
|
|
393
|
+
if (Object.keys(args).includes("push")) push();
|
|
394
|
+
if (Object.keys(args).includes("pull")) pull();
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
{
|
|
2
|
+
"web": {
|
|
3
|
+
"client_id": "624017989162-l9r3hqnv0urve3e3eg0eika5oq81mgin.apps.googleusercontent.com",
|
|
4
|
+
"project_id": "sonic-mile-426408-u2",
|
|
5
|
+
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
|
6
|
+
"token_uri": "https://oauth2.googleapis.com/token",
|
|
7
|
+
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
|
8
|
+
"client_secret": "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",
|
|
9
|
+
"javascript_origins": [
|
|
10
|
+
"http://localhost:3000"
|
|
11
|
+
],
|
|
12
|
+
"redirect_uris": [
|
|
13
|
+
"http://localhost:3000/oauth2callback"
|
|
14
|
+
]
|
|
15
|
+
}
|
|
16
|
+
}
|
package/package.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "dorky",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "2.1.1",
|
|
4
4
|
"description": "DevOps Records Keeper.",
|
|
5
|
-
"main": "index.js",
|
|
6
5
|
"bin": {
|
|
7
|
-
"dorky": "index.js"
|
|
6
|
+
"dorky": "bin/index.js"
|
|
8
7
|
},
|
|
9
8
|
"scripts": {
|
|
10
|
-
"
|
|
9
|
+
"start": "node bin/index.js",
|
|
10
|
+
"test": "mocha"
|
|
11
11
|
},
|
|
12
12
|
"repository": {
|
|
13
13
|
"type": "git",
|
|
@@ -25,9 +25,19 @@
|
|
|
25
25
|
"url": "https://github.com/trishantpahwa/dorky/issues"
|
|
26
26
|
},
|
|
27
27
|
"homepage": "https://github.com/trishantpahwa/dorky#readme",
|
|
28
|
+
"devDependencies": {
|
|
29
|
+
"tsc": "^2.0.4",
|
|
30
|
+
"typescript": "^5.5.4"
|
|
31
|
+
},
|
|
28
32
|
"dependencies": {
|
|
29
|
-
"@aws-sdk/client-s3": "^3.
|
|
33
|
+
"@aws-sdk/client-s3": "^3.658.1",
|
|
34
|
+
"@google-cloud/local-auth": "^3.0.1",
|
|
30
35
|
"chalk": "^4.1.2",
|
|
31
|
-
"glob": "^
|
|
36
|
+
"glob": "^11.0.0",
|
|
37
|
+
"googleapis": "^144.0.0",
|
|
38
|
+
"md5": "^2.3.0",
|
|
39
|
+
"mime-type": "^4.0.0",
|
|
40
|
+
"mime-types": "^2.1.35",
|
|
41
|
+
"yargs": "^17.7.2"
|
|
32
42
|
}
|
|
33
|
-
}
|
|
43
|
+
}
|
package/index.js
DELETED
|
@@ -1,547 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
const glob = require("glob");
|
|
4
|
-
const path = require("path");
|
|
5
|
-
const chalk = require("chalk");
|
|
6
|
-
const fs = require("fs");
|
|
7
|
-
const { EOL } = require("os");
|
|
8
|
-
const {
|
|
9
|
-
S3Client,
|
|
10
|
-
ListObjectsCommand,
|
|
11
|
-
PutObjectCommand,
|
|
12
|
-
DeleteObjectsCommand,
|
|
13
|
-
GetObjectCommand,
|
|
14
|
-
} = require("@aws-sdk/client-s3");
|
|
15
|
-
const { exit } = require("process");
|
|
16
|
-
const { createHash } = require("crypto");
|
|
17
|
-
|
|
18
|
-
let s3Client, bucketName;
|
|
19
|
-
|
|
20
|
-
// Initializes project, creates a new .dorky folder, and adds a metadata file to it, and creates a .dorkyignore file.
|
|
21
|
-
function initializeProject() {
|
|
22
|
-
if (fs.existsSync(".dorky")) {
|
|
23
|
-
console.log(
|
|
24
|
-
"Dorky project already initialized. Remove .dorky folder to reinitialize."
|
|
25
|
-
);
|
|
26
|
-
} else {
|
|
27
|
-
fs.mkdirSync(".dorky");
|
|
28
|
-
console.log(chalk.bgGreen("Created .dorky folder."));
|
|
29
|
-
fs.writeFileSync(
|
|
30
|
-
".dorky/metadata.json",
|
|
31
|
-
JSON.stringify({ "stage-1-files": [], "uploaded-files": [] })
|
|
32
|
-
);
|
|
33
|
-
console.log(chalk.bgGreen("Created .dorky/metadata.json file."));
|
|
34
|
-
fs.writeFileSync(".dorkyignore", "");
|
|
35
|
-
console.log(chalk.bgGreen("Created .dorkyignore file."));
|
|
36
|
-
fs.writeFileSync(".dorky/.dorkyhash", "");
|
|
37
|
-
console.log(chalk.bgGreen("Created .dorkyhash file."));
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
// Lists all the files that are not excluded explicitly.
|
|
42
|
-
function listFiles() {
|
|
43
|
-
let exclusions = fs.readFileSync("./.dorkyignore").toString().split(EOL);
|
|
44
|
-
exclusions = exclusions.filter((exclusion) => exclusion !== "");
|
|
45
|
-
if (exclusions[0] == "") exclusions = [];
|
|
46
|
-
var getDirectories = function (src, callback) {
|
|
47
|
-
glob(src + "/**/*", callback);
|
|
48
|
-
};
|
|
49
|
-
|
|
50
|
-
function excludeIsPresent(element) {
|
|
51
|
-
let present = false;
|
|
52
|
-
let i = 0;
|
|
53
|
-
while (i < exclusions.length) {
|
|
54
|
-
if (element.includes(exclusions[i])) present = true;
|
|
55
|
-
i += 1;
|
|
56
|
-
}
|
|
57
|
-
return present;
|
|
58
|
-
}
|
|
59
|
-
getDirectories(process.cwd(), function (err, res) {
|
|
60
|
-
if (err) {
|
|
61
|
-
console.log("Error", err);
|
|
62
|
-
} else {
|
|
63
|
-
let listOfFiles;
|
|
64
|
-
listOfFiles = res
|
|
65
|
-
.filter((element) => !excludeIsPresent(element))
|
|
66
|
-
.map((file) => path.relative(process.cwd(), file));
|
|
67
|
-
console.log(chalk.green("Found files:"));
|
|
68
|
-
listOfFiles.map((file) => console.log("\t" + chalk.bgGrey(file)));
|
|
69
|
-
}
|
|
70
|
-
});
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
// Pushes changes to S3 bucket.
|
|
74
|
-
function pushChanges() {
|
|
75
|
-
console.log("Pushing files to server.");
|
|
76
|
-
let rootFolder;
|
|
77
|
-
if (process.cwd().includes("\\")) {
|
|
78
|
-
rootFolder = process.cwd().split("\\").pop();
|
|
79
|
-
} else if (process.cwd().includes("/")) {
|
|
80
|
-
rootFolder = process.cwd().split("/").pop();
|
|
81
|
-
} else rootFolder = process.cwd();
|
|
82
|
-
console.log(rootFolder);
|
|
83
|
-
async function rootFolderExists(rootFolder) {
|
|
84
|
-
const bucketParams = { Bucket: bucketName };
|
|
85
|
-
const response = await s3Client.send(new ListObjectsCommand(bucketParams));
|
|
86
|
-
if (
|
|
87
|
-
response.Contents.filter(
|
|
88
|
-
(object) => object.Key.split("/")[0] == rootFolder
|
|
89
|
-
).length > 0
|
|
90
|
-
) {
|
|
91
|
-
let metaData = JSON.parse(
|
|
92
|
-
fs.readFileSync(path.join(".dorky", "metadata.json")).toString()
|
|
93
|
-
);
|
|
94
|
-
// Get removed files
|
|
95
|
-
let removed = metaData["uploaded-files"].filter(
|
|
96
|
-
(x) => !metaData["stage-1-files"].includes(x)
|
|
97
|
-
);
|
|
98
|
-
// Uploaded added files.
|
|
99
|
-
let added = metaData["stage-1-files"].filter(
|
|
100
|
-
(x) => !metaData["uploaded-files"].includes(x)
|
|
101
|
-
);
|
|
102
|
-
|
|
103
|
-
added.map(async (file) => {
|
|
104
|
-
if (metaData["uploaded-files"].includes(file)) return;
|
|
105
|
-
else {
|
|
106
|
-
const putObjectParams = {
|
|
107
|
-
Bucket: bucketName,
|
|
108
|
-
Key: path
|
|
109
|
-
.join(rootFolder, path.relative(process.cwd(), file))
|
|
110
|
-
.split("\\")
|
|
111
|
-
.join("/"),
|
|
112
|
-
Body: fs
|
|
113
|
-
.readFileSync(path.relative(process.cwd(), file))
|
|
114
|
-
.toString(),
|
|
115
|
-
};
|
|
116
|
-
// Upload records
|
|
117
|
-
try {
|
|
118
|
-
const uploadResponse = await s3Client.send(
|
|
119
|
-
new PutObjectCommand(putObjectParams)
|
|
120
|
-
);
|
|
121
|
-
if (uploadResponse) console.log(chalk.green("Uploaded " + file));
|
|
122
|
-
} catch (err) {
|
|
123
|
-
console.log(
|
|
124
|
-
"Unable to upload file " +
|
|
125
|
-
path
|
|
126
|
-
.join(rootFolder, path.relative(process.cwd(), file))
|
|
127
|
-
.replace(/\\/g, "/")
|
|
128
|
-
);
|
|
129
|
-
console.log(err);
|
|
130
|
-
}
|
|
131
|
-
metaData["uploaded-files"].push(file);
|
|
132
|
-
}
|
|
133
|
-
});
|
|
134
|
-
|
|
135
|
-
if (removed.length) {
|
|
136
|
-
const removedObjectParams = {
|
|
137
|
-
Bucket: bucketName,
|
|
138
|
-
Delete: {
|
|
139
|
-
Objects: removed.map((file) => {
|
|
140
|
-
return { Key: file };
|
|
141
|
-
}),
|
|
142
|
-
Quiet: true,
|
|
143
|
-
},
|
|
144
|
-
};
|
|
145
|
-
|
|
146
|
-
// Delete removed records, doesn't delete immediately.
|
|
147
|
-
try {
|
|
148
|
-
const deleteResponse = s3Client.send(
|
|
149
|
-
new DeleteObjectsCommand(removedObjectParams)
|
|
150
|
-
);
|
|
151
|
-
if (deleteResponse) {
|
|
152
|
-
console.log("Deleted removed files:");
|
|
153
|
-
removed.map((file) => console.log(chalk.bgRed(file)));
|
|
154
|
-
}
|
|
155
|
-
} catch (err) {
|
|
156
|
-
console.log("Unable to delete files.");
|
|
157
|
-
console.log(err);
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
if (metaData["uploaded-files"] != metaData["stage-1-files"]) {
|
|
161
|
-
metaData["uploaded-files"] = Array.from(
|
|
162
|
-
new Set(metaData["stage-1-files"])
|
|
163
|
-
);
|
|
164
|
-
fs.writeFileSync(
|
|
165
|
-
path.join(".dorky", "metadata.json"),
|
|
166
|
-
JSON.stringify(metaData)
|
|
167
|
-
);
|
|
168
|
-
putObjectParams = {
|
|
169
|
-
Bucket: bucketName,
|
|
170
|
-
Key: path
|
|
171
|
-
.relative(
|
|
172
|
-
process.cwd(),
|
|
173
|
-
path.join(rootFolder.toString(), "metadata.json")
|
|
174
|
-
)
|
|
175
|
-
.replace(/\\/g, "/"),
|
|
176
|
-
Body: JSON.stringify(metaData),
|
|
177
|
-
};
|
|
178
|
-
try {
|
|
179
|
-
const uploadResponse = await s3Client.send(
|
|
180
|
-
new PutObjectCommand(putObjectParams)
|
|
181
|
-
);
|
|
182
|
-
if (uploadResponse)
|
|
183
|
-
console.log(
|
|
184
|
-
chalk.green(
|
|
185
|
-
"Uploaded " + path.join(rootFolder.toString(), "metadata.json")
|
|
186
|
-
)
|
|
187
|
-
);
|
|
188
|
-
} catch (err) {
|
|
189
|
-
console.log(
|
|
190
|
-
"Unable to upload file " +
|
|
191
|
-
path
|
|
192
|
-
.join(
|
|
193
|
-
rootFolder,
|
|
194
|
-
path.relative(
|
|
195
|
-
process.cwd(),
|
|
196
|
-
path.join(rootFolder.toString(), "metadata.json")
|
|
197
|
-
)
|
|
198
|
-
)
|
|
199
|
-
.replace(/\\/g, "/")
|
|
200
|
-
);
|
|
201
|
-
console.log(err);
|
|
202
|
-
}
|
|
203
|
-
} else {
|
|
204
|
-
console.log("Nothing to push");
|
|
205
|
-
}
|
|
206
|
-
} else {
|
|
207
|
-
let metaData = JSON.parse(
|
|
208
|
-
fs.readFileSync(path.join(".dorky", "metadata.json")).toString()
|
|
209
|
-
);
|
|
210
|
-
metaData["stage-1-files"].map(async (file) => {
|
|
211
|
-
if (metaData["uploaded-files"].includes(file)) return;
|
|
212
|
-
else {
|
|
213
|
-
const putObjectParams = {
|
|
214
|
-
Bucket: bucketName,
|
|
215
|
-
Key: path
|
|
216
|
-
.join(rootFolder, path.relative(process.cwd(), file))
|
|
217
|
-
.replace(/\\/g, "/"),
|
|
218
|
-
Body: fs
|
|
219
|
-
.readFileSync(path.relative(process.cwd(), file))
|
|
220
|
-
.toString(),
|
|
221
|
-
};
|
|
222
|
-
// Upload records
|
|
223
|
-
try {
|
|
224
|
-
const uploadResponse = await s3Client.send(
|
|
225
|
-
new PutObjectCommand(putObjectParams)
|
|
226
|
-
);
|
|
227
|
-
if (uploadResponse) console.log(chalk.green("Uploaded " + file));
|
|
228
|
-
} catch (err) {
|
|
229
|
-
console.log(
|
|
230
|
-
"Unable to upload file " +
|
|
231
|
-
path
|
|
232
|
-
.join(rootFolder, path.relative(process.cwd(), file))
|
|
233
|
-
.replace(/\\/g, "/")
|
|
234
|
-
);
|
|
235
|
-
console.log(err);
|
|
236
|
-
}
|
|
237
|
-
metaData["uploaded-files"].push(file);
|
|
238
|
-
}
|
|
239
|
-
});
|
|
240
|
-
metaData["uploaded-files"] = Array.from(
|
|
241
|
-
new Set(metaData["uploaded-files"])
|
|
242
|
-
);
|
|
243
|
-
fs.writeFileSync(
|
|
244
|
-
path.join(".dorky", "metadata.json"),
|
|
245
|
-
JSON.stringify(metaData)
|
|
246
|
-
);
|
|
247
|
-
putObjectParams = {
|
|
248
|
-
Bucket: bucketName,
|
|
249
|
-
Key: path
|
|
250
|
-
.relative(
|
|
251
|
-
process.cwd(),
|
|
252
|
-
path.join(rootFolder.toString(), "metadata.json")
|
|
253
|
-
)
|
|
254
|
-
.replace(/\\/g, "/"),
|
|
255
|
-
Body: JSON.stringify(metaData),
|
|
256
|
-
};
|
|
257
|
-
// Upload metadata.json
|
|
258
|
-
try {
|
|
259
|
-
const uploadResponse = await s3Client.send(
|
|
260
|
-
new PutObjectCommand(putObjectParams)
|
|
261
|
-
);
|
|
262
|
-
if (uploadResponse)
|
|
263
|
-
console.log(
|
|
264
|
-
chalk.green(
|
|
265
|
-
"Uploaded " + path.join(rootFolder.toString(), "metadata.json")
|
|
266
|
-
)
|
|
267
|
-
);
|
|
268
|
-
} catch (err) {
|
|
269
|
-
console.log(
|
|
270
|
-
"Unable to upload file " +
|
|
271
|
-
path
|
|
272
|
-
.join(rootFolder, path.relative(process.cwd(), file))
|
|
273
|
-
.replace(/\\/g, "/")
|
|
274
|
-
);
|
|
275
|
-
console.log(err);
|
|
276
|
-
}
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
rootFolderExists(rootFolder);
|
|
280
|
-
}
|
|
281
|
-
|
|
282
|
-
async function pullChanges() {
|
|
283
|
-
console.log("Pulling files from server.");
|
|
284
|
-
let rootFolder;
|
|
285
|
-
if (process.cwd().includes("\\")) {
|
|
286
|
-
rootFolder = process.cwd().split("\\").pop();
|
|
287
|
-
} else if (process.cwd().includes("/")) {
|
|
288
|
-
rootFolder = process.cwd().split("/").pop();
|
|
289
|
-
} else rootFolder = process.cwd();
|
|
290
|
-
const bucketParams = { Bucket: bucketName };
|
|
291
|
-
const getObjectsResponse = await s3Client.send(
|
|
292
|
-
new ListObjectsCommand(bucketParams)
|
|
293
|
-
);
|
|
294
|
-
if (
|
|
295
|
-
getObjectsResponse.Contents.filter(
|
|
296
|
-
(object) => object.Key.split("/")[0] == rootFolder
|
|
297
|
-
).length > 0
|
|
298
|
-
) {
|
|
299
|
-
if (
|
|
300
|
-
getObjectsResponse.Contents.filter(
|
|
301
|
-
(object) => object.Key == rootFolder + "/metadata.json"
|
|
302
|
-
).length > 0
|
|
303
|
-
) {
|
|
304
|
-
const params = {
|
|
305
|
-
Bucket: bucketName,
|
|
306
|
-
Key: rootFolder + "/metadata.json",
|
|
307
|
-
};
|
|
308
|
-
s3Client.send(new GetObjectCommand(params), async (err, data) => {
|
|
309
|
-
if (err) console.error(err);
|
|
310
|
-
else {
|
|
311
|
-
let metaData = JSON.parse(await data.Body.transformToString());
|
|
312
|
-
// Pull metadata.json
|
|
313
|
-
const METADATA_FILE = ".dorky/metadata.json";
|
|
314
|
-
fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
|
|
315
|
-
let pullFileParams;
|
|
316
|
-
metaData["uploaded-files"].map((file) => {
|
|
317
|
-
pullFileParams = {
|
|
318
|
-
Bucket: bucketName,
|
|
319
|
-
Key: rootFolder + "/" + file,
|
|
320
|
-
};
|
|
321
|
-
s3Client.send(
|
|
322
|
-
new GetObjectCommand(pullFileParams),
|
|
323
|
-
async (err, data) => {
|
|
324
|
-
if (err) console.log(err);
|
|
325
|
-
else {
|
|
326
|
-
console.log("Creating file " + file);
|
|
327
|
-
let fileData = await data.Body.transformToString();
|
|
328
|
-
let subDirectories;
|
|
329
|
-
if (process.cwd().includes("\\")) {
|
|
330
|
-
subDirectories = path
|
|
331
|
-
.relative(process.cwd(), file)
|
|
332
|
-
.split("\\");
|
|
333
|
-
} else if (process.cwd().includes("/")) {
|
|
334
|
-
subDirectories = path
|
|
335
|
-
.relative(process.cwd(), file)
|
|
336
|
-
.split("/");
|
|
337
|
-
} else subDirectories = path.relative(process.cwd(), file);
|
|
338
|
-
subDirectories.pop();
|
|
339
|
-
if (process.platform === "win32") {
|
|
340
|
-
subDirectories = subDirectories.join("\\");
|
|
341
|
-
} else if (
|
|
342
|
-
process.platform === "linux" ||
|
|
343
|
-
process.platform === "darwin"
|
|
344
|
-
) {
|
|
345
|
-
subDirectories = subDirectories.join("/");
|
|
346
|
-
}
|
|
347
|
-
if (subDirectories.length)
|
|
348
|
-
fs.mkdirSync(subDirectories, { recursive: true });
|
|
349
|
-
fs.writeFileSync(
|
|
350
|
-
path.relative(process.cwd(), file),
|
|
351
|
-
fileData
|
|
352
|
-
);
|
|
353
|
-
}
|
|
354
|
-
}
|
|
355
|
-
);
|
|
356
|
-
});
|
|
357
|
-
}
|
|
358
|
-
});
|
|
359
|
-
} else {
|
|
360
|
-
console.log("Metadata doesn't exist");
|
|
361
|
-
}
|
|
362
|
-
} else {
|
|
363
|
-
console.error(chalk.red("Failed to pull folder, as it doesn't exist"));
|
|
364
|
-
}
|
|
365
|
-
}
|
|
366
|
-
|
|
367
|
-
if (
|
|
368
|
-
process.env.BUCKET_NAME &&
|
|
369
|
-
process.env.AWS_ACCESS_KEY &&
|
|
370
|
-
process.env.AWS_SECRET_KEY &&
|
|
371
|
-
process.env.AWS_REGION
|
|
372
|
-
) {
|
|
373
|
-
bucketName = process.env.BUCKET_NAME;
|
|
374
|
-
s3Client = new S3Client({
|
|
375
|
-
region: process.env.AWS_REGION,
|
|
376
|
-
credentials: {
|
|
377
|
-
accessKeyId: process.env.AWS_ACCESS_KEY,
|
|
378
|
-
secretAccessKey: process.env.AWS_SECRET_KEY,
|
|
379
|
-
},
|
|
380
|
-
});
|
|
381
|
-
if (fs.existsSync(".dorky")) {
|
|
382
|
-
const credentials = [
|
|
383
|
-
`AWS_ACCESS_KEY=${process.env.AWS_ACCESS_KEY}`,
|
|
384
|
-
`AWS_SECRET_KEY=${process.env.AWS_SECRET_KEY}`,
|
|
385
|
-
`AWS_REGION=${process.env.AWS_REGION}`,
|
|
386
|
-
`BUCKET_NAME=${process.env.BUCKET_NAME}`,
|
|
387
|
-
];
|
|
388
|
-
fs.writeFileSync(".dorky/.credentials", credentials.join("\n"));
|
|
389
|
-
}
|
|
390
|
-
} else {
|
|
391
|
-
if (fs.existsSync(".dorky")) {
|
|
392
|
-
if (fs.existsSync(".dorky/.credentials")) {
|
|
393
|
-
const credentials = fs
|
|
394
|
-
.readFileSync(".dorky/.credentials", "utf8")
|
|
395
|
-
.toString()
|
|
396
|
-
.split("\n");
|
|
397
|
-
if (credentials.length < 4) {
|
|
398
|
-
console.log(
|
|
399
|
-
chalk.red(
|
|
400
|
-
"Set BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION first."
|
|
401
|
-
)
|
|
402
|
-
);
|
|
403
|
-
exit();
|
|
404
|
-
}
|
|
405
|
-
const region = credentials
|
|
406
|
-
.filter((credential) => credential.includes("AWS_REGION"))[0]
|
|
407
|
-
.split("=")[1];
|
|
408
|
-
const accessKey = credentials
|
|
409
|
-
.filter((credential) => credential.includes("AWS_ACCESS_KEY"))[0]
|
|
410
|
-
.split("=")[1];
|
|
411
|
-
const secretKey = credentials
|
|
412
|
-
.filter((credential) => credential.includes("AWS_SECRET_KEY"))[0]
|
|
413
|
-
.split("=")[1];
|
|
414
|
-
bucketName = credentials
|
|
415
|
-
.filter((credential) => credential.includes("BUCKET_NAME"))[0]
|
|
416
|
-
.split("=")[1];
|
|
417
|
-
s3Client = new S3Client({
|
|
418
|
-
region: region,
|
|
419
|
-
credentials: {
|
|
420
|
-
accessKeyId: accessKey,
|
|
421
|
-
secretAccessKey: secretKey,
|
|
422
|
-
},
|
|
423
|
-
});
|
|
424
|
-
console.log(chalk.blue("Set credentials from file."));
|
|
425
|
-
} else {
|
|
426
|
-
console.log(
|
|
427
|
-
chalk.red(
|
|
428
|
-
"Set BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION first."
|
|
429
|
-
)
|
|
430
|
-
);
|
|
431
|
-
exit();
|
|
432
|
-
}
|
|
433
|
-
} else {
|
|
434
|
-
console.log(
|
|
435
|
-
chalk.red(
|
|
436
|
-
"Unable to find .dorky folder, please reinitialize the project in the root folder or set the BUCKET_NAME, AWS_ACCESS_KEY, AWS_SECRET_KEY and AWS_REGION in environment variables."
|
|
437
|
-
)
|
|
438
|
-
);
|
|
439
|
-
exit();
|
|
440
|
-
}
|
|
441
|
-
}
|
|
442
|
-
|
|
443
|
-
const args = process.argv.splice(2, 2);
|
|
444
|
-
|
|
445
|
-
if (args.length == 0) {
|
|
446
|
-
const figlet = `
|
|
447
|
-
__ __
|
|
448
|
-
.--| .-----.----| |--.--.--.
|
|
449
|
-
| _ | _ | _| <| | |
|
|
450
|
-
|_____|_____|__| |__|__|___ |
|
|
451
|
-
|_____|
|
|
452
|
-
`;
|
|
453
|
-
console.log(figlet);
|
|
454
|
-
const helpMessage = `Help message:\ninit\t Initializes a dorky project.\nlist\t Lists files in current root directory.\npush\t Pushes changes to S3 bucket.\npull\t Pulls changes from S3 bucket to local root folder.`;
|
|
455
|
-
console.log(helpMessage);
|
|
456
|
-
} else if (args.length == 1) {
|
|
457
|
-
if (args[0] == "init") initializeProject();
|
|
458
|
-
if (args[0] == "list") listFiles();
|
|
459
|
-
if (args[0] == "push") pushChanges();
|
|
460
|
-
if (args[0] == "pull") pullChanges();
|
|
461
|
-
} else if (args.length == 2) {
|
|
462
|
-
if (args[0] == "add") {
|
|
463
|
-
const METADATA_FILE = ".dorky/metadata.json";
|
|
464
|
-
const HASHES_FILE = ".dorky/.dorkyhash";
|
|
465
|
-
const file = args[1];
|
|
466
|
-
if (fs.existsSync(file)) {
|
|
467
|
-
const hashes = {};
|
|
468
|
-
fs.readFileSync(HASHES_FILE)
|
|
469
|
-
.toString()
|
|
470
|
-
.split("\n")
|
|
471
|
-
.filter((hash) => hash)
|
|
472
|
-
.map((hash) => {
|
|
473
|
-
hashes[hash.split("=")[0]] = hash.split("=")[1];
|
|
474
|
-
});
|
|
475
|
-
if (Object.keys(hashes).includes(file)) {
|
|
476
|
-
// File already staged
|
|
477
|
-
const fileContent = fs.readFileSync(file).toString();
|
|
478
|
-
const currentHash = createHash("md5")
|
|
479
|
-
.update(fileContent)
|
|
480
|
-
.digest("base64")
|
|
481
|
-
.split("==")[0];
|
|
482
|
-
const hashToCompare = hashes[file];
|
|
483
|
-
if (currentHash == hashToCompare) {
|
|
484
|
-
console.log(
|
|
485
|
-
chalk.red(
|
|
486
|
-
`File ${chalk.bgRed(
|
|
487
|
-
chalk.white(file)
|
|
488
|
-
)} hasn\'t been modified since last push.`
|
|
489
|
-
)
|
|
490
|
-
);
|
|
491
|
-
return;
|
|
492
|
-
} else {
|
|
493
|
-
console.log(chalk.green(`Staging ${file} since has been modified.`));
|
|
494
|
-
hashes[file] = currentHash;
|
|
495
|
-
const updatedFileContent = Object.entries(hashes).map(
|
|
496
|
-
(fileAndHash) => {
|
|
497
|
-
return fileAndHash.join("=");
|
|
498
|
-
}
|
|
499
|
-
);
|
|
500
|
-
fs.writeFileSync(HASHES_FILE, updatedFileContent.join("\n"));
|
|
501
|
-
const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
|
|
502
|
-
// Clear from uploaded files
|
|
503
|
-
const uploadedFiles = new Set(metaData["uploaded-files"]);
|
|
504
|
-
uploadedFiles.delete(file);
|
|
505
|
-
metaData["uploaded-files"] = Array.from(uploadedFiles);
|
|
506
|
-
fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
|
|
507
|
-
console.log(
|
|
508
|
-
`Updated ${chalk.bgGreen(
|
|
509
|
-
chalk.white(file)
|
|
510
|
-
)}, ready to push the updates from it.`
|
|
511
|
-
);
|
|
512
|
-
}
|
|
513
|
-
} else {
|
|
514
|
-
// New file
|
|
515
|
-
const fileContent = fs.readFileSync(file).toString();
|
|
516
|
-
hashes[file] = createHash("md5")
|
|
517
|
-
.update(fileContent)
|
|
518
|
-
.digest("base64")
|
|
519
|
-
.split("==")[0];
|
|
520
|
-
const updatedFileContent = Object.entries(hashes).map((fileAndHash) => {
|
|
521
|
-
return fileAndHash.join("=");
|
|
522
|
-
});
|
|
523
|
-
fs.writeFileSync(HASHES_FILE, updatedFileContent.join("\n"));
|
|
524
|
-
console.log(
|
|
525
|
-
`Tracking updates from ${chalk.bgGreen(chalk.white(file))}`
|
|
526
|
-
);
|
|
527
|
-
}
|
|
528
|
-
const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
|
|
529
|
-
const stage1Files = new Set(metaData["stage-1-files"]);
|
|
530
|
-
stage1Files.add(file);
|
|
531
|
-
metaData["stage-1-files"] = Array.from(stage1Files);
|
|
532
|
-
fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
|
|
533
|
-
console.log(chalk.bgGreen("Success"));
|
|
534
|
-
console.log(chalk.green(`Added file ${file} successfully to stage-1.`));
|
|
535
|
-
} else {
|
|
536
|
-
console.log(chalk.bgRed("Error"));
|
|
537
|
-
console.log(chalk.red(`\tFile ${file} doesn\'t exist`));
|
|
538
|
-
}
|
|
539
|
-
} else if (args[0] == "reset") {
|
|
540
|
-
const METADATA_FILE = ".dorky/metadata.json";
|
|
541
|
-
const metaData = JSON.parse(fs.readFileSync(METADATA_FILE));
|
|
542
|
-
const file = args[1];
|
|
543
|
-
resetFileIndex = metaData["stage-1-files"].indexOf(file);
|
|
544
|
-
metaData["stage-1-files"].splice(resetFileIndex, 1);
|
|
545
|
-
fs.writeFileSync(METADATA_FILE, JSON.stringify(metaData));
|
|
546
|
-
}
|
|
547
|
-
}
|