enver-cli 1.0.1 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/auth.js +97 -0
- package/dist/config.js +64 -0
- package/dist/crypto.js +48 -0
- package/dist/drive.js +280 -0
- package/dist/{bin/index.js → index.js} +3 -3
- package/package.json +9 -5
package/dist/auth.js
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.logout = logout;
|
|
7
|
+
exports.getAuthenticatedClient = getAuthenticatedClient;
|
|
8
|
+
const googleapis_1 = require("googleapis");
|
|
9
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
10
|
+
const path_1 = __importDefault(require("path"));
|
|
11
|
+
const os_1 = __importDefault(require("os"));
|
|
12
|
+
const http_1 = __importDefault(require("http"));
|
|
13
|
+
const url_1 = __importDefault(require("url"));
|
|
14
|
+
const open_1 = __importDefault(require("open"));
|
|
15
|
+
// Placeholder for your backend URL
|
|
16
|
+
const BACKEND_AUTH_URL = 'https://enver.elight.lk/auth/login';
|
|
17
|
+
const CONFIG_DIR = path_1.default.join(os_1.default.homedir(), '.env-manager');
|
|
18
|
+
const TOKEN_PATH = path_1.default.join(process.cwd(), 'token.json');
|
|
19
|
+
const CREDENTIALS_PATH = path_1.default.join(process.cwd(), 'credentials.json');
|
|
20
|
+
/**
|
|
21
|
+
* Deletes the token.json file to log the user out.
|
|
22
|
+
*/
|
|
23
|
+
async function logout() {
|
|
24
|
+
if (await fs_extra_1.default.pathExists(TOKEN_PATH)) {
|
|
25
|
+
await fs_extra_1.default.remove(TOKEN_PATH);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Ensures the global config directory exists.
|
|
30
|
+
*/
|
|
31
|
+
async function ensureConfigDir() {
|
|
32
|
+
if (!(await fs_extra_1.default.pathExists(CONFIG_DIR))) {
|
|
33
|
+
await fs_extra_1.default.ensureDir(CONFIG_DIR);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Returns an authenticated OAuth2Client.
|
|
38
|
+
* Orchestrates local server for token capture if no token is found.
|
|
39
|
+
*/
|
|
40
|
+
async function getAuthenticatedClient() {
|
|
41
|
+
await ensureConfigDir();
|
|
42
|
+
// 1. Check if token exists globally
|
|
43
|
+
if (await fs_extra_1.default.pathExists(TOKEN_PATH)) {
|
|
44
|
+
const tokenData = await fs_extra_1.default.readFile(TOKEN_PATH, 'utf8');
|
|
45
|
+
const tokens = JSON.parse(tokenData);
|
|
46
|
+
// We don't have client_id/secret anymore, but the drive-env-manager
|
|
47
|
+
// needs an OAuth2Client. Your backend should ideally provide these
|
|
48
|
+
// or the CLI should use a shared Client ID.
|
|
49
|
+
// Assuming the backend returns tokens for a predefined client.
|
|
50
|
+
const oAuth2Client = new googleapis_1.google.auth.OAuth2();
|
|
51
|
+
oAuth2Client.setCredentials(tokens);
|
|
52
|
+
return oAuth2Client;
|
|
53
|
+
}
|
|
54
|
+
// 2. Start local server to wait for callback
|
|
55
|
+
return new Promise((resolve, reject) => {
|
|
56
|
+
const server = http_1.default.createServer(async (req, res) => {
|
|
57
|
+
const parsedUrl = url_1.default.parse(req.url || '', true);
|
|
58
|
+
if (parsedUrl.pathname === '/callback') {
|
|
59
|
+
const { access_token, refresh_token, expiry_date } = parsedUrl.query;
|
|
60
|
+
if (access_token) {
|
|
61
|
+
const tokens = {
|
|
62
|
+
access_token: access_token,
|
|
63
|
+
refresh_token: refresh_token,
|
|
64
|
+
expiry_date: expiry_date ? parseInt(expiry_date) : undefined,
|
|
65
|
+
};
|
|
66
|
+
await fs_extra_1.default.writeFile(TOKEN_PATH, JSON.stringify(tokens));
|
|
67
|
+
res.writeHead(200, { 'Content-Type': 'text/html' });
|
|
68
|
+
res.end('<h1>Login Successful!</h1><p>You can close this tab and return to the CLI.</p>');
|
|
69
|
+
server.close();
|
|
70
|
+
const oAuth2Client = new googleapis_1.google.auth.OAuth2();
|
|
71
|
+
oAuth2Client.setCredentials(tokens);
|
|
72
|
+
resolve(oAuth2Client);
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
res.writeHead(400);
|
|
76
|
+
res.end('Authentication failed: No access token received.');
|
|
77
|
+
server.close();
|
|
78
|
+
reject(new Error('Authentication failed: No access token received.'));
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
// Listen on a random free port
|
|
83
|
+
server.listen(0, 'localhost', async () => {
|
|
84
|
+
const address = server.address();
|
|
85
|
+
const port = address.port;
|
|
86
|
+
const authUrlWithPort = `${BACKEND_AUTH_URL}?port=${port}`;
|
|
87
|
+
console.log('Opening browser for authentication...');
|
|
88
|
+
console.log(`If the browser does not open, visit: ${authUrlWithPort}`);
|
|
89
|
+
await (0, open_1.default)(authUrlWithPort);
|
|
90
|
+
// 5-minute timeout
|
|
91
|
+
setTimeout(() => {
|
|
92
|
+
server.close();
|
|
93
|
+
reject(new Error('Authentication timed out after 5 minutes.'));
|
|
94
|
+
}, 5 * 60 * 1000);
|
|
95
|
+
});
|
|
96
|
+
});
|
|
97
|
+
}
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getConfig = getConfig;
|
|
7
|
+
exports.updateConfig = updateConfig;
|
|
8
|
+
exports.setPassword = setPassword;
|
|
9
|
+
exports.setEncryption = setEncryption;
|
|
10
|
+
exports.resetConfig = resetConfig;
|
|
11
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
12
|
+
const path_1 = __importDefault(require("path"));
|
|
13
|
+
const os_1 = __importDefault(require("os"));
|
|
14
|
+
const CONFIG_DIR = path_1.default.join(os_1.default.homedir(), '.enver');
|
|
15
|
+
const CONFIG_FILE = path_1.default.join(CONFIG_DIR, 'config.json');
|
|
16
|
+
/**
|
|
17
|
+
* Ensures the config directory exists.
|
|
18
|
+
*/
|
|
19
|
+
async function ensureConfigDir() {
|
|
20
|
+
await fs_extra_1.default.ensureDir(CONFIG_DIR);
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Gets the current global configuration.
|
|
24
|
+
*/
|
|
25
|
+
async function getConfig() {
|
|
26
|
+
await ensureConfigDir();
|
|
27
|
+
if (!(await fs_extra_1.default.pathExists(CONFIG_FILE))) {
|
|
28
|
+
return { encryptionEnabled: false };
|
|
29
|
+
}
|
|
30
|
+
try {
|
|
31
|
+
const config = await fs_extra_1.default.readJson(CONFIG_FILE);
|
|
32
|
+
return { encryptionEnabled: false, ...config };
|
|
33
|
+
}
|
|
34
|
+
catch (err) {
|
|
35
|
+
return { encryptionEnabled: false };
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Updates the global configuration.
|
|
40
|
+
*/
|
|
41
|
+
async function updateConfig(update) {
|
|
42
|
+
await ensureConfigDir();
|
|
43
|
+
const current = await getConfig();
|
|
44
|
+
const next = { ...current, ...update };
|
|
45
|
+
await fs_extra_1.default.writeJson(CONFIG_FILE, next, { spaces: 2 });
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Sets the default encryption password.
|
|
49
|
+
*/
|
|
50
|
+
async function setPassword(password) {
|
|
51
|
+
await updateConfig({ defaultPassword: password });
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Toggles encryption globally.
|
|
55
|
+
*/
|
|
56
|
+
async function setEncryption(enabled) {
|
|
57
|
+
await updateConfig({ encryptionEnabled: enabled });
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Resets all configurations to defaults.
|
|
61
|
+
*/
|
|
62
|
+
async function resetConfig() {
|
|
63
|
+
await fs_extra_1.default.remove(CONFIG_FILE);
|
|
64
|
+
}
|
package/dist/crypto.js
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.encrypt = encrypt;
|
|
7
|
+
exports.decrypt = decrypt;
|
|
8
|
+
const crypto_1 = __importDefault(require("crypto"));
|
|
9
|
+
const ALGORITHM = 'aes-256-cbc';
|
|
10
|
+
const IV_LENGTH = 16;
|
|
11
|
+
const SALT_LENGTH = 64;
|
|
12
|
+
const KEY_LENGTH = 32;
|
|
13
|
+
const ITERATIONS = 100000;
|
|
14
|
+
/**
|
|
15
|
+
* Encrypts a string using a password.
|
|
16
|
+
*/
|
|
17
|
+
function encrypt(text, password) {
|
|
18
|
+
const iv = crypto_1.default.randomBytes(IV_LENGTH);
|
|
19
|
+
const salt = crypto_1.default.randomBytes(SALT_LENGTH);
|
|
20
|
+
const key = crypto_1.default.pbkdf2Sync(password, salt, ITERATIONS, KEY_LENGTH, 'sha512');
|
|
21
|
+
const cipher = crypto_1.default.createCipheriv(ALGORITHM, key, iv);
|
|
22
|
+
let encrypted = cipher.update(text, 'utf8', 'hex');
|
|
23
|
+
encrypted += cipher.final('hex');
|
|
24
|
+
// Return format: salt:iv:encrypted
|
|
25
|
+
return `${salt.toString('hex')}:${iv.toString('hex')}:${encrypted}`;
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Decrypts a string using a password.
|
|
29
|
+
*/
|
|
30
|
+
function decrypt(encryptedData, password) {
|
|
31
|
+
const parts = encryptedData.split(':');
|
|
32
|
+
if (parts.length !== 3) {
|
|
33
|
+
throw new Error('Invalid encrypted data format.');
|
|
34
|
+
}
|
|
35
|
+
const salt = Buffer.from(parts[0], 'hex');
|
|
36
|
+
const iv = Buffer.from(parts[1], 'hex');
|
|
37
|
+
const encryptedText = parts[2];
|
|
38
|
+
const key = crypto_1.default.pbkdf2Sync(password, salt, ITERATIONS, KEY_LENGTH, 'sha512');
|
|
39
|
+
const decipher = crypto_1.default.createDecipheriv(ALGORITHM, key, iv);
|
|
40
|
+
try {
|
|
41
|
+
let decrypted = decipher.update(encryptedText, 'hex', 'utf8');
|
|
42
|
+
decrypted += decipher.final('utf8');
|
|
43
|
+
return decrypted;
|
|
44
|
+
}
|
|
45
|
+
catch (err) {
|
|
46
|
+
throw new Error('Decryption failed. Incorrect password?');
|
|
47
|
+
}
|
|
48
|
+
}
|
package/dist/drive.js
ADDED
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.uploadEnv = uploadEnv;
|
|
40
|
+
exports.downloadEnv = downloadEnv;
|
|
41
|
+
const googleapis_1 = require("googleapis");
|
|
42
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
43
|
+
const path_1 = __importDefault(require("path"));
|
|
44
|
+
const ora_1 = __importDefault(require("ora"));
|
|
45
|
+
const inquirer_1 = __importDefault(require("inquirer"));
|
|
46
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
47
|
+
const diff = __importStar(require("diff"));
|
|
48
|
+
const auth_1 = require("./auth");
|
|
49
|
+
const crypto_1 = require("./crypto");
|
|
50
|
+
const config_1 = require("./config");
|
|
51
|
+
/**
|
|
52
|
+
* Retrieves and sanitizes project metadata.
|
|
53
|
+
*/
|
|
54
|
+
async function getProjectDetails() {
|
|
55
|
+
let name = '';
|
|
56
|
+
let version = 'v0.0.0';
|
|
57
|
+
const packageJsonPath = path_1.default.join(process.cwd(), 'package.json');
|
|
58
|
+
if (await fs_extra_1.default.pathExists(packageJsonPath)) {
|
|
59
|
+
try {
|
|
60
|
+
const pkg = await fs_extra_1.default.readJson(packageJsonPath);
|
|
61
|
+
if (pkg.name)
|
|
62
|
+
name = pkg.name;
|
|
63
|
+
if (pkg.version)
|
|
64
|
+
version = `v${pkg.version}`;
|
|
65
|
+
}
|
|
66
|
+
catch (err) { }
|
|
67
|
+
}
|
|
68
|
+
if (!name)
|
|
69
|
+
name = path_1.default.basename(process.cwd());
|
|
70
|
+
const sanitizedName = name
|
|
71
|
+
.toLowerCase()
|
|
72
|
+
.replace(/\s+/g, '-')
|
|
73
|
+
.replace(/[^a-z0-9-]/g, '');
|
|
74
|
+
return { name: sanitizedName, version };
|
|
75
|
+
}
|
|
76
|
+
function getTimestamp() {
|
|
77
|
+
const now = new Date();
|
|
78
|
+
const pad = (n) => n.toString().padStart(2, '0');
|
|
79
|
+
const date = `${now.getFullYear()}-${pad(now.getMonth() + 1)}-${pad(now.getDate())}`;
|
|
80
|
+
const time = `${pad(now.getHours())}-${pad(now.getMinutes())}-${pad(now.getSeconds())}`;
|
|
81
|
+
return `${date}_${time}`;
|
|
82
|
+
}
|
|
83
|
+
async function getOrCreateFolder(drive, folderName, parentId) {
|
|
84
|
+
let query = `name = '${folderName}' and mimeType = 'application/vnd.google-apps.folder' and trashed = false`;
|
|
85
|
+
if (parentId)
|
|
86
|
+
query += ` and '${parentId}' in parents`;
|
|
87
|
+
const response = await drive.files.list({
|
|
88
|
+
q: query,
|
|
89
|
+
fields: 'files(id, name)',
|
|
90
|
+
supportsAllDrives: true,
|
|
91
|
+
includeItemsFromAllDrives: true
|
|
92
|
+
});
|
|
93
|
+
if (response.data.files && response.data.files.length > 0)
|
|
94
|
+
return response.data.files[0].id;
|
|
95
|
+
const folder = await drive.files.create({
|
|
96
|
+
requestBody: { name: folderName, mimeType: 'application/vnd.google-apps.folder', parents: parentId ? [parentId] : [] },
|
|
97
|
+
fields: 'id',
|
|
98
|
+
});
|
|
99
|
+
return folder.data.id;
|
|
100
|
+
}
|
|
101
|
+
async function getEncryptionPassword(spinner) {
|
|
102
|
+
const config = await (0, config_1.getConfig)();
|
|
103
|
+
if (config.defaultPassword)
|
|
104
|
+
return config.defaultPassword;
|
|
105
|
+
const wasSpinning = spinner?.isSpinning;
|
|
106
|
+
if (spinner)
|
|
107
|
+
spinner.stop();
|
|
108
|
+
const { password } = await inquirer_1.default.prompt([{
|
|
109
|
+
type: 'password',
|
|
110
|
+
name: 'password',
|
|
111
|
+
message: 'Enter encryption password:',
|
|
112
|
+
mask: '*',
|
|
113
|
+
}]);
|
|
114
|
+
if (wasSpinning && spinner)
|
|
115
|
+
spinner.start();
|
|
116
|
+
return password;
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Prunes backups in a folder, keeping only the latest 5.
|
|
120
|
+
*/
|
|
121
|
+
async function pruneBackups(drive, folderId) {
|
|
122
|
+
const response = await drive.files.list({
|
|
123
|
+
q: `'${folderId}' in parents and trashed = false`,
|
|
124
|
+
fields: 'files(id, name, createdTime)',
|
|
125
|
+
orderBy: 'createdTime desc',
|
|
126
|
+
supportsAllDrives: true,
|
|
127
|
+
includeItemsFromAllDrives: true
|
|
128
|
+
});
|
|
129
|
+
const files = response.data.files || [];
|
|
130
|
+
if (files.length > 5) {
|
|
131
|
+
const toDelete = files.slice(5);
|
|
132
|
+
for (const file of toDelete) {
|
|
133
|
+
await drive.files.delete({ fileId: file.id });
|
|
134
|
+
}
|
|
135
|
+
console.log(chalk_1.default.yellow(`\nAuto-pruned ${toDelete.length} old backups.`));
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
async function uploadEnv(options = {}) {
|
|
139
|
+
const fileName = options.file || '.env';
|
|
140
|
+
const filePath = path_1.default.join(process.cwd(), fileName);
|
|
141
|
+
if (!(await fs_extra_1.default.pathExists(filePath))) {
|
|
142
|
+
console.error(chalk_1.default.red(`Error: ${fileName} not found.`));
|
|
143
|
+
return;
|
|
144
|
+
}
|
|
145
|
+
const spinner = (0, ora_1.default)('Authenticating...').start();
|
|
146
|
+
try {
|
|
147
|
+
const auth = await (0, auth_1.getAuthenticatedClient)();
|
|
148
|
+
const drive = googleapis_1.google.drive({ version: 'v3', auth });
|
|
149
|
+
const config = await (0, config_1.getConfig)();
|
|
150
|
+
const isEncryptionEnabled = config.encryptionEnabled !== false;
|
|
151
|
+
let contentToUpload = await fs_extra_1.default.readFile(filePath, 'utf8');
|
|
152
|
+
let extension = '';
|
|
153
|
+
if (isEncryptionEnabled) {
|
|
154
|
+
spinner.text = 'Encrypting content...';
|
|
155
|
+
const password = await getEncryptionPassword(spinner);
|
|
156
|
+
contentToUpload = (0, crypto_1.encrypt)(contentToUpload, password);
|
|
157
|
+
extension = '.enc';
|
|
158
|
+
}
|
|
159
|
+
const { name, version } = await getProjectDetails();
|
|
160
|
+
const folderName = `${name}-${version}`;
|
|
161
|
+
spinner.text = `Ensuring folder: Enver > ${folderName}...`;
|
|
162
|
+
const rootId = await getOrCreateFolder(drive, 'Enver');
|
|
163
|
+
const folderId = await getOrCreateFolder(drive, folderName, rootId);
|
|
164
|
+
const backupName = `${name}_${fileName}_${getTimestamp()}${extension}`;
|
|
165
|
+
spinner.text = `Uploading ${backupName}...`;
|
|
166
|
+
await drive.files.create({
|
|
167
|
+
requestBody: { name: backupName, mimeType: 'text/plain', parents: [folderId] },
|
|
168
|
+
media: { mimeType: 'text/plain', body: contentToUpload },
|
|
169
|
+
supportsAllDrives: true
|
|
170
|
+
});
|
|
171
|
+
spinner.succeed(chalk_1.default.green(`Successfully uploaded: ${backupName}`));
|
|
172
|
+
await pruneBackups(drive, folderId);
|
|
173
|
+
}
|
|
174
|
+
catch (error) {
|
|
175
|
+
spinner.fail(chalk_1.default.red(`Upload failed: ${error.message}`));
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
async function downloadEnv(options = {}) {
|
|
179
|
+
const targetFileName = options.file || '.env';
|
|
180
|
+
const spinner = (0, ora_1.default)('Authenticating...').start();
|
|
181
|
+
try {
|
|
182
|
+
const auth = await (0, auth_1.getAuthenticatedClient)();
|
|
183
|
+
const drive = googleapis_1.google.drive({ version: 'v3', auth });
|
|
184
|
+
const { name, version } = await getProjectDetails();
|
|
185
|
+
const folderName = options.name || `${name}-${version}`;
|
|
186
|
+
spinner.text = `Looking for folder: ${folderName}...`;
|
|
187
|
+
const rootId = await getOrCreateFolder(drive, 'Enver');
|
|
188
|
+
// Find projector folder inside EnvManager
|
|
189
|
+
const res = await drive.files.list({
|
|
190
|
+
q: `'${rootId}' in parents and name = '${folderName}' and mimeType = 'application/vnd.google-apps.folder' and trashed = false`,
|
|
191
|
+
fields: 'files(id, name)',
|
|
192
|
+
supportsAllDrives: true,
|
|
193
|
+
includeItemsFromAllDrives: true
|
|
194
|
+
});
|
|
195
|
+
const folderId = res.data.files?.[0]?.id;
|
|
196
|
+
if (!folderId) {
|
|
197
|
+
spinner.fail(chalk_1.default.red(`\nNo backups found for project folder: ${folderName}.`));
|
|
198
|
+
console.log(chalk_1.default.gray(`\nPlease ensure you have pushed a backup for this project version first.`));
|
|
199
|
+
return;
|
|
200
|
+
}
|
|
201
|
+
spinner.text = 'Fetching backups...';
|
|
202
|
+
if (!spinner.isSpinning)
|
|
203
|
+
spinner.start();
|
|
204
|
+
const backupRes = await drive.files.list({
|
|
205
|
+
q: `'${folderId}' in parents and trashed = false`,
|
|
206
|
+
fields: 'files(id, name, createdTime)',
|
|
207
|
+
orderBy: 'createdTime desc',
|
|
208
|
+
supportsAllDrives: true,
|
|
209
|
+
includeItemsFromAllDrives: true
|
|
210
|
+
});
|
|
211
|
+
const backups = backupRes.data.files || [];
|
|
212
|
+
if (backups.length === 0) {
|
|
213
|
+
spinner.info(chalk_1.default.yellow('No backup files found.'));
|
|
214
|
+
return;
|
|
215
|
+
}
|
|
216
|
+
let selectedFileId = '';
|
|
217
|
+
if (options.latest) {
|
|
218
|
+
selectedFileId = backups[0].id;
|
|
219
|
+
}
|
|
220
|
+
else {
|
|
221
|
+
spinner.stop();
|
|
222
|
+
const { fileId } = await inquirer_1.default.prompt([{
|
|
223
|
+
type: 'list',
|
|
224
|
+
name: 'fileId',
|
|
225
|
+
message: 'Select a backup to restore:',
|
|
226
|
+
choices: backups.map(f => ({ name: `${f.name} (${f.createdTime})`, value: f.id })),
|
|
227
|
+
}]);
|
|
228
|
+
selectedFileId = fileId;
|
|
229
|
+
}
|
|
230
|
+
const selectedFile = backups.find(f => f.id === selectedFileId);
|
|
231
|
+
const isEncrypted = selectedFile?.name?.endsWith('.enc');
|
|
232
|
+
spinner.text = `Downloading ${selectedFile?.name}...`;
|
|
233
|
+
if (!spinner.isSpinning)
|
|
234
|
+
spinner.start();
|
|
235
|
+
const response = await drive.files.get({
|
|
236
|
+
fileId: selectedFileId,
|
|
237
|
+
alt: 'media',
|
|
238
|
+
supportsAllDrives: true
|
|
239
|
+
});
|
|
240
|
+
let downloadedContent = response.data;
|
|
241
|
+
if (isEncrypted) {
|
|
242
|
+
spinner.text = 'Decrypting backup...';
|
|
243
|
+
const password = await getEncryptionPassword(spinner);
|
|
244
|
+
downloadedContent = (0, crypto_1.decrypt)(downloadedContent, password);
|
|
245
|
+
}
|
|
246
|
+
spinner.stop();
|
|
247
|
+
const localPath = path_1.default.join(process.cwd(), targetFileName);
|
|
248
|
+
if (await fs_extra_1.default.pathExists(localPath)) {
|
|
249
|
+
const localContent = await fs_extra_1.default.readFile(localPath, 'utf8');
|
|
250
|
+
const changes = diff.diffLines(localContent, downloadedContent);
|
|
251
|
+
const added = changes.filter(c => c.added).length;
|
|
252
|
+
const removed = changes.filter(c => c.removed).length;
|
|
253
|
+
console.log(chalk_1.default.blue(`\nSummary: ${chalk_1.default.green(`+${added}`)} ${chalk_1.default.red(`-${removed}`)} lines changed.`));
|
|
254
|
+
const { action } = await inquirer_1.default.prompt([{
|
|
255
|
+
type: 'list',
|
|
256
|
+
name: 'action',
|
|
257
|
+
message: 'How would you like to proceed?',
|
|
258
|
+
choices: ['Show Diff', 'Overwrite/Restore', 'Cancel'],
|
|
259
|
+
}]);
|
|
260
|
+
if (action === 'Show Diff') {
|
|
261
|
+
changes.forEach((part) => {
|
|
262
|
+
const color = part.added ? chalk_1.default.green : part.removed ? chalk_1.default.red : chalk_1.default.gray;
|
|
263
|
+
process.stdout.write(color(part.value));
|
|
264
|
+
});
|
|
265
|
+
console.log('\n');
|
|
266
|
+
const { confirm } = await inquirer_1.default.prompt([{ type: 'confirm', name: 'confirm', message: 'Proceed with restore?', default: false }]);
|
|
267
|
+
if (!confirm)
|
|
268
|
+
return;
|
|
269
|
+
}
|
|
270
|
+
else if (action === 'Cancel') {
|
|
271
|
+
return;
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
await fs_extra_1.default.writeFile(localPath, downloadedContent);
|
|
275
|
+
console.log(chalk_1.default.green(`\nSuccessfully restored to ${targetFileName}`));
|
|
276
|
+
}
|
|
277
|
+
catch (error) {
|
|
278
|
+
spinner.fail(chalk_1.default.red(`Download failed: ${error.message}`));
|
|
279
|
+
}
|
|
280
|
+
}
|
|
@@ -8,9 +8,9 @@ const commander_1 = require("commander");
|
|
|
8
8
|
const dotenv_1 = __importDefault(require("dotenv"));
|
|
9
9
|
const inquirer_1 = __importDefault(require("inquirer"));
|
|
10
10
|
const chalk_1 = __importDefault(require("chalk"));
|
|
11
|
-
const auth_1 = require("
|
|
12
|
-
const drive_1 = require("
|
|
13
|
-
const config_1 = require("
|
|
11
|
+
const auth_1 = require("./auth");
|
|
12
|
+
const drive_1 = require("./drive");
|
|
13
|
+
const config_1 = require("./config");
|
|
14
14
|
dotenv_1.default.config();
|
|
15
15
|
const program = new commander_1.Command();
|
|
16
16
|
program
|
package/package.json
CHANGED
|
@@ -1,15 +1,19 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "enver-cli",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.2",
|
|
4
4
|
"description": "A CLI tool to backup and restore .env files using Google Drive",
|
|
5
|
-
"main": "dist/
|
|
5
|
+
"main": "dist/index.js",
|
|
6
6
|
"bin": {
|
|
7
|
-
"enver": "./dist/
|
|
7
|
+
"enver": "./dist/index.js"
|
|
8
8
|
},
|
|
9
|
+
"files": [
|
|
10
|
+
"dist"
|
|
11
|
+
],
|
|
9
12
|
"scripts": {
|
|
10
13
|
"build": "tsc",
|
|
11
|
-
"
|
|
12
|
-
"
|
|
14
|
+
"prepublishOnly": "npm run build",
|
|
15
|
+
"start": "node dist/index.js",
|
|
16
|
+
"dev": "ts-node src/index.ts"
|
|
13
17
|
},
|
|
14
18
|
"keywords": [
|
|
15
19
|
"cli",
|