ante-erp-cli 1.9.7 → 1.9.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/ante-cli.js +13 -0
- package/package.json +1 -1
- package/src/commands/clone-mongodb.js +290 -0
- package/src/commands/install.js +18 -3
- package/src/utils/mongodb.js +463 -0
package/bin/ante-cli.js
CHANGED
|
@@ -32,6 +32,7 @@ import { doctor } from '../src/commands/doctor.js';
|
|
|
32
32
|
import { uninstall } from '../src/commands/uninstall.js';
|
|
33
33
|
import { migrate, seed, shell, optimize, reset as dbReset, info } from '../src/commands/database.js';
|
|
34
34
|
import { cloneDb } from '../src/commands/clone-db.js';
|
|
35
|
+
import { cloneMongodb } from '../src/commands/clone-mongodb.js';
|
|
35
36
|
import { setDomain } from '../src/commands/set-domain.js';
|
|
36
37
|
import { sslEnable, sslStatus } from '../src/commands/ssl-enable.js';
|
|
37
38
|
import { regenerateCompose } from '../src/commands/regenerate-compose.js';
|
|
@@ -47,6 +48,10 @@ program
|
|
|
47
48
|
.option('--preset <type>', 'Installation preset (minimal, standard, enterprise)', 'standard')
|
|
48
49
|
.option('--no-interactive', 'Non-interactive mode with defaults')
|
|
49
50
|
.option('--skip-checks', 'Skip system requirements check')
|
|
51
|
+
.option('--with-facial', 'Install Facial Recognition Web app')
|
|
52
|
+
.option('--with-gate', 'Install Gate App')
|
|
53
|
+
.option('--with-guardian', 'Install Guardian App')
|
|
54
|
+
.option('--with-all-frontends', 'Install all frontend applications')
|
|
50
55
|
.action(install);
|
|
51
56
|
|
|
52
57
|
// Update & Maintenance
|
|
@@ -152,6 +157,14 @@ dbCmd
|
|
|
152
157
|
.option('--no-prisma', 'Skip Prisma operations')
|
|
153
158
|
.action(cloneDb);
|
|
154
159
|
|
|
160
|
+
dbCmd
|
|
161
|
+
.command('clone-mongodb <mongodb-url>')
|
|
162
|
+
.description('Clone remote MongoDB database to local')
|
|
163
|
+
.option('--skip-dump', 'Use existing backup directory')
|
|
164
|
+
.option('--backup-dir <path>', 'Path to existing backup directory')
|
|
165
|
+
.option('--force', 'Skip confirmation prompts')
|
|
166
|
+
.action(cloneMongodb);
|
|
167
|
+
|
|
155
168
|
// Status & Monitoring
|
|
156
169
|
program
|
|
157
170
|
.command('status')
|
package/package.json
CHANGED
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import ora from 'ora';
|
|
3
|
+
import inquirer from 'inquirer';
|
|
4
|
+
import { join } from 'path';
|
|
5
|
+
import { existsSync, readFileSync, readdirSync } from 'fs';
|
|
6
|
+
import { stat } from 'fs/promises';
|
|
7
|
+
import { getInstallDir } from '../utils/config.js';
|
|
8
|
+
import {
|
|
9
|
+
parseMongoUrl,
|
|
10
|
+
buildMongoUrl,
|
|
11
|
+
testConnection,
|
|
12
|
+
dumpDatabase,
|
|
13
|
+
restoreDatabase,
|
|
14
|
+
verifyDataRestored
|
|
15
|
+
} from '../utils/mongodb.js';
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Build MONGODB_URI for installed ANTE instance
|
|
19
|
+
* @param {string} installDir - Installation directory
|
|
20
|
+
* @returns {string} MONGODB_URI for local MongoDB
|
|
21
|
+
*/
|
|
22
|
+
function readMongoDbUri(installDir) {
|
|
23
|
+
const envFile = join(installDir, '.env');
|
|
24
|
+
|
|
25
|
+
if (!existsSync(envFile)) {
|
|
26
|
+
throw new Error(
|
|
27
|
+
'.env file not found. This command is for installed ANTE instances only.\n' +
|
|
28
|
+
'For development environments, use the appropriate script.'
|
|
29
|
+
);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const envContent = readFileSync(envFile, 'utf-8');
|
|
33
|
+
|
|
34
|
+
// Try to find MONGODB_URI or MONGODB_URL
|
|
35
|
+
let match = envContent.match(/^MONGODB_URI=(.+)$/m);
|
|
36
|
+
if (!match) {
|
|
37
|
+
match = envContent.match(/^MONGODB_URL=(.+)$/m);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (!match) {
|
|
41
|
+
throw new Error('MONGODB_URI or MONGODB_URL not found in .env file');
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const mongoUri = match[1].trim();
|
|
45
|
+
|
|
46
|
+
// Validate it looks like a MongoDB URI
|
|
47
|
+
if (!mongoUri.startsWith('mongodb://') && !mongoUri.startsWith('mongodb+srv://')) {
|
|
48
|
+
throw new Error('Invalid MongoDB URI format in .env file');
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return mongoUri;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Clone remote MongoDB database to local database
|
|
56
|
+
* @param {string} sourceUrl - Source MongoDB URL
|
|
57
|
+
* @param {Object} options - Command options
|
|
58
|
+
*/
|
|
59
|
+
export async function cloneMongodb(sourceUrl, options = {}) {
|
|
60
|
+
try {
|
|
61
|
+
console.log('');
|
|
62
|
+
console.log(chalk.blue('╔════════════════════════════════════════════════════════════╗'));
|
|
63
|
+
console.log(chalk.blue('║ ANTE CLI - Clone MongoDB Database ║'));
|
|
64
|
+
console.log(chalk.blue('╚════════════════════════════════════════════════════════════╝'));
|
|
65
|
+
console.log('');
|
|
66
|
+
|
|
67
|
+
// Get ANTE installation directory
|
|
68
|
+
const installDir = getInstallDir();
|
|
69
|
+
const backupDir = join(installDir, 'backups', 'mongodb');
|
|
70
|
+
const composeFile = join(installDir, 'docker-compose.yml');
|
|
71
|
+
|
|
72
|
+
// Step 1: Parse source database URL
|
|
73
|
+
console.log(chalk.yellow('Step 1/5: Parsing source database URL...'));
|
|
74
|
+
let sourceInfo;
|
|
75
|
+
try {
|
|
76
|
+
sourceInfo = parseMongoUrl(sourceUrl);
|
|
77
|
+
console.log(chalk.green('✓ Source URL parsed successfully'));
|
|
78
|
+
console.log(chalk.gray(` Host: ${sourceInfo.host}${sourceInfo.port ? ':' + sourceInfo.port : ' (SRV)'}`));
|
|
79
|
+
console.log(chalk.gray(` Database: ${sourceInfo.database}`));
|
|
80
|
+
console.log(chalk.gray(` User: ${sourceInfo.user || '(none)'}`));
|
|
81
|
+
if (sourceInfo.authDatabase && sourceInfo.authDatabase !== sourceInfo.database) {
|
|
82
|
+
console.log(chalk.gray(` Auth Database: ${sourceInfo.authDatabase}`));
|
|
83
|
+
}
|
|
84
|
+
console.log('');
|
|
85
|
+
} catch (error) {
|
|
86
|
+
throw new Error(`Failed to parse source URL: ${error.message}`);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Step 2: Parse local database URL
|
|
90
|
+
console.log(chalk.yellow('Step 2/5: Reading local database configuration...'));
|
|
91
|
+
const localMongoUri = readMongoDbUri(installDir);
|
|
92
|
+
if (!localMongoUri) {
|
|
93
|
+
throw new Error('.env file not found or MONGODB_URI not set');
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
let localInfo;
|
|
97
|
+
try {
|
|
98
|
+
localInfo = parseMongoUrl(localMongoUri);
|
|
99
|
+
console.log(chalk.green('✓ Local database configuration loaded'));
|
|
100
|
+
console.log(chalk.gray(` Host: ${localInfo.host}${localInfo.port ? ':' + localInfo.port : ' (SRV)'}`));
|
|
101
|
+
console.log(chalk.gray(` Database: ${localInfo.database}`));
|
|
102
|
+
console.log(chalk.gray(` User: ${localInfo.user || '(none)'}`));
|
|
103
|
+
console.log('');
|
|
104
|
+
} catch (error) {
|
|
105
|
+
throw new Error(`Failed to parse local MONGODB_URI: ${error.message}`);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Step 3: Test connections
|
|
109
|
+
console.log(chalk.yellow('Step 3/5: Testing database connections...'));
|
|
110
|
+
|
|
111
|
+
// Test source connection
|
|
112
|
+
const sourceSpinner = ora('Testing source database connection...').start();
|
|
113
|
+
const sourceTest = await testConnection(sourceInfo);
|
|
114
|
+
if (!sourceTest.success) {
|
|
115
|
+
sourceSpinner.fail(chalk.red('Source database connection failed'));
|
|
116
|
+
throw new Error(`Cannot connect to source database: ${sourceTest.error}`);
|
|
117
|
+
}
|
|
118
|
+
sourceSpinner.succeed(chalk.green('Source database connection successful'));
|
|
119
|
+
|
|
120
|
+
// Test local connection (use Docker Compose for installed ANTE)
|
|
121
|
+
const localSpinner = ora('Testing local database connection...').start();
|
|
122
|
+
const localTest = await testConnection(localInfo, composeFile);
|
|
123
|
+
if (!localTest.success) {
|
|
124
|
+
localSpinner.fail(chalk.red('Local database connection failed'));
|
|
125
|
+
throw new Error(`Cannot connect to local database: ${localTest.error}`);
|
|
126
|
+
}
|
|
127
|
+
localSpinner.succeed(chalk.green('Local database connection successful'));
|
|
128
|
+
console.log('');
|
|
129
|
+
|
|
130
|
+
// Step 4: Confirmation
|
|
131
|
+
if (!options.force) {
|
|
132
|
+
console.log(chalk.yellow('⚠️ WARNING: This will DROP and REPLACE your local database collections!'));
|
|
133
|
+
console.log(chalk.yellow('⚠️ All existing local data will be LOST!'));
|
|
134
|
+
console.log('');
|
|
135
|
+
console.log(chalk.blue('Configuration Summary:'));
|
|
136
|
+
console.log(chalk.gray('┌─ Source Database:'));
|
|
137
|
+
console.log(chalk.gray(`│ Host: ${sourceInfo.host}${sourceInfo.port ? ':' + sourceInfo.port : ' (SRV)'}`));
|
|
138
|
+
console.log(chalk.gray(`│ Database: ${sourceInfo.database}`));
|
|
139
|
+
console.log(chalk.gray('│'));
|
|
140
|
+
console.log(chalk.gray('└─ Target Database (Local):'));
|
|
141
|
+
console.log(chalk.gray(` Host: ${localInfo.host}${localInfo.port ? ':' + localInfo.port : ' (SRV)'}`));
|
|
142
|
+
console.log(chalk.gray(` Database: ${localInfo.database}`));
|
|
143
|
+
console.log('');
|
|
144
|
+
|
|
145
|
+
const { confirm } = await inquirer.prompt([
|
|
146
|
+
{
|
|
147
|
+
type: 'confirm',
|
|
148
|
+
name: 'confirm',
|
|
149
|
+
message: 'Do you want to continue?',
|
|
150
|
+
default: false
|
|
151
|
+
}
|
|
152
|
+
]);
|
|
153
|
+
|
|
154
|
+
if (!confirm) {
|
|
155
|
+
console.log(chalk.gray('\nClone cancelled by user.'));
|
|
156
|
+
return;
|
|
157
|
+
}
|
|
158
|
+
console.log('');
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Step 5: Dump source database (if not skipped)
|
|
162
|
+
let dumpDirectory;
|
|
163
|
+
if (options.skipDump && options.backupDir) {
|
|
164
|
+
console.log(chalk.yellow('Step 4/5: Using existing backup directory...'));
|
|
165
|
+
dumpDirectory = options.backupDir;
|
|
166
|
+
|
|
167
|
+
if (!existsSync(dumpDirectory)) {
|
|
168
|
+
throw new Error(`Backup directory not found: ${dumpDirectory}`);
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
// Verify backup contains database folder with BSON files
|
|
172
|
+
const dbDumpDir = join(dumpDirectory, sourceInfo.database);
|
|
173
|
+
if (!existsSync(dbDumpDir)) {
|
|
174
|
+
throw new Error(`Database dump folder not found: ${dbDumpDir}`);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
const files = readdirSync(dbDumpDir);
|
|
178
|
+
const bsonFiles = files.filter(f => f.endsWith('.bson'));
|
|
179
|
+
|
|
180
|
+
if (bsonFiles.length === 0) {
|
|
181
|
+
throw new Error(`No BSON files found in backup directory: ${dbDumpDir}`);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
// Calculate total size
|
|
185
|
+
let totalSize = 0;
|
|
186
|
+
for (const file of files) {
|
|
187
|
+
const filePath = join(dbDumpDir, file);
|
|
188
|
+
const stats = await stat(filePath);
|
|
189
|
+
totalSize += stats.size;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
const sizeInMB = (totalSize / (1024 * 1024)).toFixed(2);
|
|
193
|
+
const size = sizeInMB >= 1 ? `${sizeInMB} MB` : `${(totalSize / 1024).toFixed(2)} KB`;
|
|
194
|
+
|
|
195
|
+
console.log(chalk.green('✓ Using existing backup'));
|
|
196
|
+
console.log(chalk.gray(` Directory: ${dumpDirectory}`));
|
|
197
|
+
console.log(chalk.gray(` Collections: ${bsonFiles.length}`));
|
|
198
|
+
console.log(chalk.gray(` Size: ${size}`));
|
|
199
|
+
console.log('');
|
|
200
|
+
} else {
|
|
201
|
+
console.log(chalk.yellow('Step 4/5: Dumping source database...'));
|
|
202
|
+
console.log(chalk.gray('This may take several minutes depending on database size...'));
|
|
203
|
+
console.log('');
|
|
204
|
+
|
|
205
|
+
const timestamp = new Date().toISOString().split('.')[0].replace(/:/g, '-').replace('T', '_');
|
|
206
|
+
dumpDirectory = join(backupDir, `clone-backup-${timestamp}`);
|
|
207
|
+
|
|
208
|
+
const dumpSpinner = ora('Creating database dump...').start();
|
|
209
|
+
const dumpResult = await dumpDatabase(sourceInfo, dumpDirectory);
|
|
210
|
+
|
|
211
|
+
if (!dumpResult.success) {
|
|
212
|
+
dumpSpinner.fail(chalk.red('Database dump failed'));
|
|
213
|
+
throw new Error(dumpResult.error);
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
dumpSpinner.succeed(chalk.green('Database dump completed'));
|
|
217
|
+
console.log(chalk.gray(` Directory: ${dumpResult.dir}`));
|
|
218
|
+
console.log(chalk.gray(` Collections: ${dumpResult.collections}`));
|
|
219
|
+
console.log(chalk.gray(` Size: ${dumpResult.size}`));
|
|
220
|
+
console.log('');
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// Step 6: Restore to local database
|
|
224
|
+
console.log(chalk.yellow('Step 5/5: Restoring to local database...'));
|
|
225
|
+
console.log(chalk.gray('This may take several minutes...'));
|
|
226
|
+
console.log('');
|
|
227
|
+
|
|
228
|
+
const restoreSpinner = ora('Restoring database (collections will be dropped and recreated)...').start();
|
|
229
|
+
const restoreResult = await restoreDatabase(dumpDirectory, localInfo, true, composeFile);
|
|
230
|
+
|
|
231
|
+
if (!restoreResult.success) {
|
|
232
|
+
restoreSpinner.fail(chalk.red('Database restore failed'));
|
|
233
|
+
throw new Error(restoreResult.error);
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
restoreSpinner.succeed(chalk.green('Database restore completed'));
|
|
237
|
+
console.log('');
|
|
238
|
+
|
|
239
|
+
// Step 6A: Verify data was restored
|
|
240
|
+
const verifySpinner = ora('Verifying data restoration...').start();
|
|
241
|
+
const verifyResult = await verifyDataRestored(localInfo, composeFile);
|
|
242
|
+
|
|
243
|
+
if (!verifyResult.success) {
|
|
244
|
+
verifySpinner.fail(chalk.red('Data verification failed'));
|
|
245
|
+
console.log(chalk.yellow(' Warning: Could not verify data restoration'));
|
|
246
|
+
} else {
|
|
247
|
+
verifySpinner.succeed(chalk.green(`Verified: ${verifyResult.collectionCount} collections, ${verifyResult.totalDocuments.toLocaleString()} documents restored`));
|
|
248
|
+
|
|
249
|
+
// Warn if no data was restored
|
|
250
|
+
if (verifyResult.totalDocuments === 0) {
|
|
251
|
+
console.log(chalk.red('\n⚠️ WARNING: No documents were restored!'));
|
|
252
|
+
console.log(chalk.yellow(' Possible causes:'));
|
|
253
|
+
console.log(chalk.gray(' - Source database was empty'));
|
|
254
|
+
console.log(chalk.gray(' - Authentication issues prevented data restore'));
|
|
255
|
+
console.log(chalk.gray(' - Permission issues with target database'));
|
|
256
|
+
console.log(chalk.gray(' - Data restore errors were silently ignored\n'));
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
console.log('');
|
|
260
|
+
|
|
261
|
+
// Final summary
|
|
262
|
+
console.log(chalk.green('╔════════════════════════════════════════════════════════════╗'));
|
|
263
|
+
console.log(chalk.green('║ Database Clone Completed Successfully! ║'));
|
|
264
|
+
console.log(chalk.green('╚════════════════════════════════════════════════════════════╝'));
|
|
265
|
+
console.log('');
|
|
266
|
+
console.log(chalk.blue('Summary:'));
|
|
267
|
+
console.log(chalk.gray(` ✓ Source database dumped to: ${dumpDirectory}`));
|
|
268
|
+
console.log(chalk.gray(` ✓ Restored to local database: ${localInfo.database}`));
|
|
269
|
+
|
|
270
|
+
// Show data restoration summary
|
|
271
|
+
if (verifyResult && verifyResult.success) {
|
|
272
|
+
if (verifyResult.totalDocuments > 0) {
|
|
273
|
+
console.log(chalk.green(` ✓ Data restored: ${verifyResult.totalDocuments.toLocaleString()} documents across ${verifyResult.collectionCount} collections`));
|
|
274
|
+
} else {
|
|
275
|
+
console.log(chalk.red(` ✗ No data restored (${verifyResult.collectionCount} collections created but empty)`));
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
console.log('');
|
|
280
|
+
console.log(chalk.cyan('Your local MongoDB database is now cloned from the source!'));
|
|
281
|
+
console.log(chalk.gray('You can start your services with: ante start'));
|
|
282
|
+
console.log('');
|
|
283
|
+
|
|
284
|
+
} catch (error) {
|
|
285
|
+
console.log('');
|
|
286
|
+
console.error(chalk.red('✗ MongoDB clone failed:'), error.message);
|
|
287
|
+
console.log('');
|
|
288
|
+
process.exit(1);
|
|
289
|
+
}
|
|
290
|
+
}
|
package/src/commands/install.js
CHANGED
|
@@ -483,6 +483,18 @@ export async function install(options) {
|
|
|
483
483
|
apiDomain = buildURL(manualConfig.hostIp, 3001);
|
|
484
484
|
}
|
|
485
485
|
|
|
486
|
+
// Determine which frontends to install based on flags
|
|
487
|
+
const installAllFrontends = options.withAllFrontends;
|
|
488
|
+
const installGate = installAllFrontends || options.withGate;
|
|
489
|
+
const installGuardian = installAllFrontends || options.withGuardian;
|
|
490
|
+
const installFacial = installAllFrontends || options.withFacial;
|
|
491
|
+
|
|
492
|
+
// Build frontends array
|
|
493
|
+
const frontends = ['main'];
|
|
494
|
+
if (installGate) frontends.push('gate');
|
|
495
|
+
if (installGuardian) frontends.push('guardian');
|
|
496
|
+
if (installFacial) frontends.push('facial');
|
|
497
|
+
|
|
486
498
|
config = {
|
|
487
499
|
installDir: options.dir,
|
|
488
500
|
preset: options.preset || 'standard',
|
|
@@ -492,12 +504,15 @@ export async function install(options) {
|
|
|
492
504
|
apiPort: 3001,
|
|
493
505
|
gateAppPort: 8081,
|
|
494
506
|
guardianAppPort: 8082,
|
|
507
|
+
facialWebPort: 8083,
|
|
495
508
|
gateAppDomain: buildURL(detectedIP || 'localhost', 8081),
|
|
496
509
|
guardianAppDomain: buildURL(detectedIP || 'localhost', 8082),
|
|
510
|
+
facialAppDomain: buildURL(detectedIP || 'localhost', 8083),
|
|
497
511
|
companyId: 1,
|
|
498
|
-
installGate
|
|
499
|
-
installGuardian
|
|
500
|
-
|
|
512
|
+
installGate,
|
|
513
|
+
installGuardian,
|
|
514
|
+
installFacial,
|
|
515
|
+
frontends
|
|
501
516
|
};
|
|
502
517
|
}
|
|
503
518
|
|
|
@@ -0,0 +1,463 @@
|
|
|
1
|
+
import { execa } from 'execa';
|
|
2
|
+
import { existsSync, mkdirSync, readdirSync } from 'fs';
|
|
3
|
+
import { join, dirname, resolve } from 'path';
|
|
4
|
+
import { stat } from 'fs/promises';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Parse MongoDB connection URL
|
|
8
|
+
* @param {string} url - MongoDB connection URL (e.g., mongodb://user:pass@host:port/db or mongodb+srv://...)
|
|
9
|
+
* @returns {Object} Connection info { user, password, host, port, database, authDatabase, isSrv }
|
|
10
|
+
*/
|
|
11
|
+
export function parseMongoUrl(url) {
|
|
12
|
+
try {
|
|
13
|
+
// Check if it's an SRV connection string
|
|
14
|
+
const isSrv = url.startsWith('mongodb+srv://');
|
|
15
|
+
|
|
16
|
+
// Remove protocol prefix (mongodb://, mongodb+srv://)
|
|
17
|
+
const withoutProtocol = url.replace(/^mongodb(\+srv)?:\/\//, '');
|
|
18
|
+
|
|
19
|
+
// Extract components using regex
|
|
20
|
+
const userMatch = withoutProtocol.match(/^([^:]+):/);
|
|
21
|
+
const passwordMatch = withoutProtocol.match(/:([^@]+)@/);
|
|
22
|
+
const hostMatch = withoutProtocol.match(/@([^:/]+)/);
|
|
23
|
+
const portMatch = withoutProtocol.match(/:(\d+)\//);
|
|
24
|
+
const databaseMatch = withoutProtocol.match(/\/([^?]+)/);
|
|
25
|
+
const authDatabaseMatch = withoutProtocol.match(/[?&]authSource=([^&]+)/);
|
|
26
|
+
|
|
27
|
+
// Extract values
|
|
28
|
+
const user = userMatch ? userMatch[1] : null;
|
|
29
|
+
const password = passwordMatch ? passwordMatch[1] : null;
|
|
30
|
+
const host = hostMatch ? hostMatch[1] : null;
|
|
31
|
+
const port = portMatch ? portMatch[1] : (isSrv ? null : '27017'); // SRV doesn't use explicit port
|
|
32
|
+
const database = databaseMatch ? databaseMatch[1].split('?')[0] : null; // Remove query params
|
|
33
|
+
const authDatabase = authDatabaseMatch ? authDatabaseMatch[1] : (database || 'admin');
|
|
34
|
+
|
|
35
|
+
// Validate required fields
|
|
36
|
+
if (!host || !database) {
|
|
37
|
+
throw new Error('Invalid MongoDB URL format. Expected: mongodb://[user:password@]host[:port]/database or mongodb+srv://...');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return {
|
|
41
|
+
user,
|
|
42
|
+
password,
|
|
43
|
+
host,
|
|
44
|
+
port,
|
|
45
|
+
database,
|
|
46
|
+
authDatabase,
|
|
47
|
+
isSrv
|
|
48
|
+
};
|
|
49
|
+
} catch (error) {
|
|
50
|
+
throw new Error(`Failed to parse MongoDB URL: ${error.message}`);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Build MongoDB connection string from connection info
|
|
56
|
+
* @param {Object} connectionInfo - Connection details from parseMongoUrl
|
|
57
|
+
* @returns {string} MongoDB connection string
|
|
58
|
+
*/
|
|
59
|
+
export function buildMongoUrl(connectionInfo) {
|
|
60
|
+
const { user, password, host, port, database, authDatabase, isSrv } = connectionInfo;
|
|
61
|
+
|
|
62
|
+
const protocol = isSrv ? 'mongodb+srv://' : 'mongodb://';
|
|
63
|
+
const auth = user && password ? `${user}:${password}@` : '';
|
|
64
|
+
const portPart = port && !isSrv ? `:${port}` : '';
|
|
65
|
+
const authDbPart = authDatabase && authDatabase !== database ? `?authSource=${authDatabase}` : '';
|
|
66
|
+
|
|
67
|
+
return `${protocol}${auth}${host}${portPart}/${database}${authDbPart}`;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Test MongoDB connection using Docker
|
|
72
|
+
* @param {Object} connectionInfo - Connection details from parseMongoUrl
|
|
73
|
+
* @param {string} composeFile - Optional path to docker-compose.yml for Docker Compose network access
|
|
74
|
+
* @returns {Promise<{success: boolean, error?: string}>}
|
|
75
|
+
*/
|
|
76
|
+
export async function testConnection(connectionInfo, composeFile = null) {
|
|
77
|
+
try {
|
|
78
|
+
const mongoUrl = buildMongoUrl(connectionInfo);
|
|
79
|
+
|
|
80
|
+
// If composeFile provided, use Docker Compose exec (for local database in Docker network)
|
|
81
|
+
if (composeFile) {
|
|
82
|
+
await execa('docker', [
|
|
83
|
+
'compose',
|
|
84
|
+
'-f', composeFile,
|
|
85
|
+
'exec',
|
|
86
|
+
'-T',
|
|
87
|
+
'mongo',
|
|
88
|
+
'mongosh',
|
|
89
|
+
mongoUrl,
|
|
90
|
+
'--quiet',
|
|
91
|
+
'--eval', 'db.adminCommand("ping")'
|
|
92
|
+
], {
|
|
93
|
+
timeout: 10000 // 10 second timeout
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
return { success: true };
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// Standalone Docker for remote databases
|
|
100
|
+
let testUrl = mongoUrl;
|
|
101
|
+
const dockerArgs = ['run', '--rm'];
|
|
102
|
+
|
|
103
|
+
// Handle localhost connections
|
|
104
|
+
if (connectionInfo.host === 'localhost' || connectionInfo.host === '127.0.0.1') {
|
|
105
|
+
const protocol = connectionInfo.isSrv ? 'mongodb+srv://' : 'mongodb://';
|
|
106
|
+
const auth = connectionInfo.user && connectionInfo.password ?
|
|
107
|
+
`${connectionInfo.user}:${connectionInfo.password}@` : '';
|
|
108
|
+
const portPart = connectionInfo.port && !connectionInfo.isSrv ? `:${connectionInfo.port}` : '';
|
|
109
|
+
const authDbPart = connectionInfo.authDatabase && connectionInfo.authDatabase !== connectionInfo.database ?
|
|
110
|
+
`?authSource=${connectionInfo.authDatabase}` : '';
|
|
111
|
+
|
|
112
|
+
testUrl = `${protocol}${auth}host.docker.internal${portPart}/${connectionInfo.database}${authDbPart}`;
|
|
113
|
+
dockerArgs.push('--add-host=host.docker.internal:host-gateway');
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
dockerArgs.push(
|
|
117
|
+
'mongo:latest',
|
|
118
|
+
'mongosh',
|
|
119
|
+
testUrl,
|
|
120
|
+
'--quiet',
|
|
121
|
+
'--eval', 'db.adminCommand("ping")'
|
|
122
|
+
);
|
|
123
|
+
|
|
124
|
+
// Use Docker to test connection
|
|
125
|
+
await execa('docker', dockerArgs, {
|
|
126
|
+
timeout: 10000 // 10 second timeout
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
return { success: true };
|
|
130
|
+
} catch (error) {
|
|
131
|
+
return {
|
|
132
|
+
success: false,
|
|
133
|
+
error: error.message || 'Connection failed'
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Dump MongoDB database to directory using Docker
|
|
140
|
+
* @param {Object} connectionInfo - Connection details
|
|
141
|
+
* @param {string} outputDir - Absolute path to output directory
|
|
142
|
+
* @returns {Promise<{success: boolean, dir: string, size: string, collections: number, error?: string}>}
|
|
143
|
+
*/
|
|
144
|
+
export async function dumpDatabase(connectionInfo, outputDir) {
|
|
145
|
+
try {
|
|
146
|
+
const { user, password, host, port, database, authDatabase, isSrv } = connectionInfo;
|
|
147
|
+
|
|
148
|
+
// Ensure backup directory exists
|
|
149
|
+
if (!existsSync(outputDir)) {
|
|
150
|
+
mkdirSync(outputDir, { recursive: true });
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Convert to absolute path for Docker volume mount
|
|
154
|
+
const absoluteOutputDir = resolve(outputDir);
|
|
155
|
+
|
|
156
|
+
// Build mongodump arguments
|
|
157
|
+
const dumpArgs = ['run', '--rm', '-v', `${absoluteOutputDir}:/backups`, 'mongo:latest', 'mongodump'];
|
|
158
|
+
|
|
159
|
+
// Build connection URI
|
|
160
|
+
let dumpHost = host;
|
|
161
|
+
if (host === 'localhost' || host === '127.0.0.1') {
|
|
162
|
+
dumpHost = 'host.docker.internal';
|
|
163
|
+
dumpArgs.splice(1, 0, '--add-host=host.docker.internal:host-gateway');
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// Add connection parameters
|
|
167
|
+
if (isSrv) {
|
|
168
|
+
// For SRV connections, use --uri
|
|
169
|
+
const protocol = 'mongodb+srv://';
|
|
170
|
+
const auth = user && password ? `${user}:${password}@` : '';
|
|
171
|
+
const authDbPart = authDatabase && authDatabase !== database ? `?authSource=${authDatabase}` : '';
|
|
172
|
+
const uri = `${protocol}${auth}${dumpHost}/${database}${authDbPart}`;
|
|
173
|
+
dumpArgs.push('--uri', uri);
|
|
174
|
+
} else {
|
|
175
|
+
// For standard connections, use individual parameters
|
|
176
|
+
dumpArgs.push('--host', dumpHost);
|
|
177
|
+
if (port) dumpArgs.push('--port', port);
|
|
178
|
+
if (user) dumpArgs.push('--username', user);
|
|
179
|
+
if (password) dumpArgs.push('--password', password);
|
|
180
|
+
if (authDatabase) dumpArgs.push('--authenticationDatabase', authDatabase);
|
|
181
|
+
dumpArgs.push('--db', database);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
// Output directory
|
|
185
|
+
dumpArgs.push('--out', '/backups');
|
|
186
|
+
|
|
187
|
+
// Execute mongodump
|
|
188
|
+
await execa('docker', dumpArgs, {
|
|
189
|
+
timeout: 600000 // 10 minute timeout
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
// Verify dump directory was created and has content
|
|
193
|
+
const dumpDbDir = join(outputDir, database);
|
|
194
|
+
if (!existsSync(dumpDbDir)) {
|
|
195
|
+
throw new Error('Dump directory was not created');
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// Count collections (BSON files)
|
|
199
|
+
const files = readdirSync(dumpDbDir);
|
|
200
|
+
const bsonFiles = files.filter(f => f.endsWith('.bson'));
|
|
201
|
+
const collections = bsonFiles.length;
|
|
202
|
+
|
|
203
|
+
if (collections === 0) {
|
|
204
|
+
throw new Error('No collections were dumped. Database might be empty or connection failed.');
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// Calculate total size
|
|
208
|
+
let totalSize = 0;
|
|
209
|
+
for (const file of files) {
|
|
210
|
+
const filePath = join(dumpDbDir, file);
|
|
211
|
+
const stats = await stat(filePath);
|
|
212
|
+
totalSize += stats.size;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
// Get size in human-readable format
|
|
216
|
+
const sizeInMB = (totalSize / (1024 * 1024)).toFixed(2);
|
|
217
|
+
const size = sizeInMB >= 1 ? `${sizeInMB} MB` : `${(totalSize / 1024).toFixed(2)} KB`;
|
|
218
|
+
|
|
219
|
+
return {
|
|
220
|
+
success: true,
|
|
221
|
+
dir: outputDir,
|
|
222
|
+
size,
|
|
223
|
+
collections
|
|
224
|
+
};
|
|
225
|
+
} catch (error) {
|
|
226
|
+
return {
|
|
227
|
+
success: false,
|
|
228
|
+
dir: outputDir,
|
|
229
|
+
size: '0 KB',
|
|
230
|
+
collections: 0,
|
|
231
|
+
error: error.message || 'Dump failed'
|
|
232
|
+
};
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
/**
|
|
237
|
+
* Restore MongoDB database from dump directory using Docker
|
|
238
|
+
* @param {string} dumpDir - Path to dump directory (contains database folder with .bson files)
|
|
239
|
+
* @param {Object} connectionInfo - Target database connection details
|
|
240
|
+
* @param {boolean} dropBeforeRestore - Whether to drop collections before restore (default: true)
|
|
241
|
+
* @param {string} composeFile - Optional path to docker-compose.yml for Docker Compose network access
|
|
242
|
+
* @returns {Promise<{success: boolean, error?: string}>}
|
|
243
|
+
*/
|
|
244
|
+
export async function restoreDatabase(dumpDir, connectionInfo, dropBeforeRestore = true, composeFile = null) {
|
|
245
|
+
try {
|
|
246
|
+
const { user, password, host, port, database, authDatabase, isSrv } = connectionInfo;
|
|
247
|
+
|
|
248
|
+
// Verify dump directory exists
|
|
249
|
+
const dumpDbDir = join(dumpDir, database);
|
|
250
|
+
if (!existsSync(dumpDbDir)) {
|
|
251
|
+
throw new Error(`Dump directory not found: ${dumpDbDir}`);
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
// Verify BSON files exist
|
|
255
|
+
const files = readdirSync(dumpDbDir);
|
|
256
|
+
const bsonFiles = files.filter(f => f.endsWith('.bson'));
|
|
257
|
+
if (bsonFiles.length === 0) {
|
|
258
|
+
throw new Error(`No BSON files found in dump directory: ${dumpDbDir}`);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// If using Docker Compose (for local database in Docker network)
|
|
262
|
+
if (composeFile) {
|
|
263
|
+
// Copy dump directory into mongo container
|
|
264
|
+
const containerDumpDir = `/tmp/mongodump_${Date.now()}`;
|
|
265
|
+
await execa('docker', [
|
|
266
|
+
'cp',
|
|
267
|
+
dumpDir,
|
|
268
|
+
`ante-mongo:${containerDumpDir}`
|
|
269
|
+
], {
|
|
270
|
+
timeout: 120000 // 2 minute timeout for large files
|
|
271
|
+
});
|
|
272
|
+
|
|
273
|
+
try {
|
|
274
|
+
// Build mongorestore command
|
|
275
|
+
const restoreArgs = [
|
|
276
|
+
'compose',
|
|
277
|
+
'-f', composeFile,
|
|
278
|
+
'exec',
|
|
279
|
+
'-T',
|
|
280
|
+
'mongo',
|
|
281
|
+
'mongorestore'
|
|
282
|
+
];
|
|
283
|
+
|
|
284
|
+
// Add connection parameters
|
|
285
|
+
if (user) restoreArgs.push('--username', user);
|
|
286
|
+
if (password) restoreArgs.push('--password', password);
|
|
287
|
+
if (authDatabase) restoreArgs.push('--authenticationDatabase', authDatabase);
|
|
288
|
+
restoreArgs.push('--db', database);
|
|
289
|
+
|
|
290
|
+
// Drop collections before restore if requested
|
|
291
|
+
if (dropBeforeRestore) {
|
|
292
|
+
restoreArgs.push('--drop');
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// Add dump directory path
|
|
296
|
+
restoreArgs.push(`${containerDumpDir}/${database}`);
|
|
297
|
+
|
|
298
|
+
// Restore from inside the container
|
|
299
|
+
await execa('docker', restoreArgs, {
|
|
300
|
+
timeout: 600000, // 10 minute timeout
|
|
301
|
+
reject: false // Don't reject on non-zero exit (mongorestore might have warnings)
|
|
302
|
+
});
|
|
303
|
+
} finally {
|
|
304
|
+
// Clean up: Remove dump directory from container
|
|
305
|
+
try {
|
|
306
|
+
await execa('docker', [
|
|
307
|
+
'compose',
|
|
308
|
+
'-f', composeFile,
|
|
309
|
+
'exec',
|
|
310
|
+
'-T',
|
|
311
|
+
'mongo',
|
|
312
|
+
'rm',
|
|
313
|
+
'-rf',
|
|
314
|
+
containerDumpDir
|
|
315
|
+
], {
|
|
316
|
+
timeout: 10000,
|
|
317
|
+
reject: false
|
|
318
|
+
});
|
|
319
|
+
} catch {
|
|
320
|
+
// Ignore cleanup errors
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
return { success: true };
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
// Standalone Docker for remote databases
|
|
328
|
+
const absoluteDumpDir = resolve(dumpDir);
|
|
329
|
+
const restoreArgs = ['run', '--rm', '-v', `${absoluteDumpDir}:/backups`, 'mongo:latest', 'mongorestore'];
|
|
330
|
+
|
|
331
|
+
// Build connection URI or parameters
|
|
332
|
+
let restoreHost = host;
|
|
333
|
+
if (host === 'localhost' || host === '127.0.0.1') {
|
|
334
|
+
restoreHost = 'host.docker.internal';
|
|
335
|
+
restoreArgs.splice(1, 0, '--add-host=host.docker.internal:host-gateway');
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
// Add connection parameters
|
|
339
|
+
if (isSrv) {
|
|
340
|
+
// For SRV connections, use --uri
|
|
341
|
+
const protocol = 'mongodb+srv://';
|
|
342
|
+
const auth = user && password ? `${user}:${password}@` : '';
|
|
343
|
+
const authDbPart = authDatabase && authDatabase !== database ? `?authSource=${authDatabase}` : '';
|
|
344
|
+
const uri = `${protocol}${auth}${restoreHost}/${database}${authDbPart}`;
|
|
345
|
+
restoreArgs.push('--uri', uri);
|
|
346
|
+
} else {
|
|
347
|
+
// For standard connections, use individual parameters
|
|
348
|
+
restoreArgs.push('--host', restoreHost);
|
|
349
|
+
if (port) restoreArgs.push('--port', port);
|
|
350
|
+
if (user) restoreArgs.push('--username', user);
|
|
351
|
+
if (password) restoreArgs.push('--password', password);
|
|
352
|
+
if (authDatabase) restoreArgs.push('--authenticationDatabase', authDatabase);
|
|
353
|
+
restoreArgs.push('--db', database);
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
// Drop collections before restore if requested
|
|
357
|
+
if (dropBeforeRestore) {
|
|
358
|
+
restoreArgs.push('--drop');
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
// Add dump directory path
|
|
362
|
+
restoreArgs.push(`/backups/${database}`);
|
|
363
|
+
|
|
364
|
+
// Execute mongorestore
|
|
365
|
+
await execa('docker', restoreArgs, {
|
|
366
|
+
timeout: 600000, // 10 minute timeout
|
|
367
|
+
reject: false // Don't reject on non-zero exit (mongorestore might have warnings)
|
|
368
|
+
});
|
|
369
|
+
|
|
370
|
+
return { success: true };
|
|
371
|
+
} catch (error) {
|
|
372
|
+
return {
|
|
373
|
+
success: false,
|
|
374
|
+
error: error.message || 'Restore failed'
|
|
375
|
+
};
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
/**
|
|
380
|
+
* Verify data was restored by counting collections and documents
|
|
381
|
+
* @param {Object} connectionInfo - Connection details
|
|
382
|
+
* @param {string} composeFile - Optional path to docker-compose.yml
|
|
383
|
+
* @returns {Promise<{success: boolean, collectionCount: number, totalDocuments: number, error?: string}>}
|
|
384
|
+
*/
|
|
385
|
+
export async function verifyDataRestored(connectionInfo, composeFile = null) {
|
|
386
|
+
try {
|
|
387
|
+
const mongoUrl = buildMongoUrl(connectionInfo);
|
|
388
|
+
|
|
389
|
+
// JavaScript to execute in MongoDB
|
|
390
|
+
const verifyScript = `
|
|
391
|
+
const collections = db.getCollectionNames();
|
|
392
|
+
let totalDocs = 0;
|
|
393
|
+
for (const coll of collections) {
|
|
394
|
+
totalDocs += db.getCollection(coll).countDocuments();
|
|
395
|
+
}
|
|
396
|
+
print(JSON.stringify({ collections: collections.length, documents: totalDocs }));
|
|
397
|
+
`;
|
|
398
|
+
|
|
399
|
+
let result;
|
|
400
|
+
|
|
401
|
+
if (composeFile) {
|
|
402
|
+
// Use Docker Compose exec
|
|
403
|
+
result = await execa('docker', [
|
|
404
|
+
'compose',
|
|
405
|
+
'-f', composeFile,
|
|
406
|
+
'exec',
|
|
407
|
+
'-T',
|
|
408
|
+
'mongo',
|
|
409
|
+
'mongosh',
|
|
410
|
+
mongoUrl,
|
|
411
|
+
'--quiet',
|
|
412
|
+
'--eval', verifyScript
|
|
413
|
+
], {
|
|
414
|
+
timeout: 30000 // 30 second timeout
|
|
415
|
+
});
|
|
416
|
+
} else {
|
|
417
|
+
// Standalone Docker
|
|
418
|
+
let testUrl = mongoUrl;
|
|
419
|
+
const dockerArgs = ['run', '--rm'];
|
|
420
|
+
|
|
421
|
+
if (connectionInfo.host === 'localhost' || connectionInfo.host === '127.0.0.1') {
|
|
422
|
+
const protocol = connectionInfo.isSrv ? 'mongodb+srv://' : 'mongodb://';
|
|
423
|
+
const auth = connectionInfo.user && connectionInfo.password ?
|
|
424
|
+
`${connectionInfo.user}:${connectionInfo.password}@` : '';
|
|
425
|
+
const portPart = connectionInfo.port && !connectionInfo.isSrv ? `:${connectionInfo.port}` : '';
|
|
426
|
+
const authDbPart = connectionInfo.authDatabase && connectionInfo.authDatabase !== connectionInfo.database ?
|
|
427
|
+
`?authSource=${connectionInfo.authDatabase}` : '';
|
|
428
|
+
|
|
429
|
+
testUrl = `${protocol}${auth}host.docker.internal${portPart}/${connectionInfo.database}${authDbPart}`;
|
|
430
|
+
dockerArgs.push('--add-host=host.docker.internal:host-gateway');
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
dockerArgs.push(
|
|
434
|
+
'mongo:latest',
|
|
435
|
+
'mongosh',
|
|
436
|
+
testUrl,
|
|
437
|
+
'--quiet',
|
|
438
|
+
'--eval', verifyScript
|
|
439
|
+
);
|
|
440
|
+
|
|
441
|
+
result = await execa('docker', dockerArgs, {
|
|
442
|
+
timeout: 30000 // 30 second timeout
|
|
443
|
+
});
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
// Parse output
|
|
447
|
+
const output = result.stdout.trim();
|
|
448
|
+
const stats = JSON.parse(output);
|
|
449
|
+
|
|
450
|
+
return {
|
|
451
|
+
success: true,
|
|
452
|
+
collectionCount: stats.collections,
|
|
453
|
+
totalDocuments: stats.documents
|
|
454
|
+
};
|
|
455
|
+
} catch (error) {
|
|
456
|
+
return {
|
|
457
|
+
success: false,
|
|
458
|
+
collectionCount: 0,
|
|
459
|
+
totalDocuments: 0,
|
|
460
|
+
error: error.message || 'Failed to verify data'
|
|
461
|
+
};
|
|
462
|
+
}
|
|
463
|
+
}
|