genbox 1.0.48 → 1.0.50
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api.js +62 -0
- package/dist/commands/create.js +186 -1
- package/dist/commands/help.js +15 -13
- package/dist/commands/init.js +120 -17
- package/dist/commands/push.js +52 -9
- package/dist/db-utils.js +416 -0
- package/dist/index.js +1 -1
- package/dist/profile-resolver.js +22 -13
- package/package.json +3 -2
package/dist/api.js
CHANGED
|
@@ -8,6 +8,14 @@ exports.handleApiError = handleApiError;
|
|
|
8
8
|
exports.isAuthError = isAuthError;
|
|
9
9
|
exports.fetchApi = fetchApi;
|
|
10
10
|
exports.checkNameAvailability = checkNameAvailability;
|
|
11
|
+
exports.syncProject = syncProject;
|
|
12
|
+
exports.getProjectByName = getProjectByName;
|
|
13
|
+
exports.initiateSnapshotUpload = initiateSnapshotUpload;
|
|
14
|
+
exports.completeSnapshotUpload = completeSnapshotUpload;
|
|
15
|
+
exports.failSnapshotUpload = failSnapshotUpload;
|
|
16
|
+
exports.listProjectSnapshots = listProjectSnapshots;
|
|
17
|
+
exports.getLatestSnapshot = getLatestSnapshot;
|
|
18
|
+
exports.getSnapshotDownloadUrl = getSnapshotDownloadUrl;
|
|
11
19
|
const chalk_1 = __importDefault(require("chalk"));
|
|
12
20
|
const config_store_1 = require("./config-store");
|
|
13
21
|
const API_URL = process.env.GENBOX_API_URL || 'https://api.genbox.dev';
|
|
@@ -97,3 +105,57 @@ async function fetchApi(endpoint, options = {}) {
|
|
|
97
105
|
async function checkNameAvailability(name, workspace) {
|
|
98
106
|
return fetchApi(`/genboxes/check-name?name=${encodeURIComponent(name)}&workspace=${encodeURIComponent(workspace)}`);
|
|
99
107
|
}
|
|
108
|
+
async function syncProject(payload) {
|
|
109
|
+
return fetchApi('/projects', {
|
|
110
|
+
method: 'POST',
|
|
111
|
+
body: JSON.stringify(payload),
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
async function getProjectByName(name) {
|
|
115
|
+
return fetchApi(`/projects/by-name/${encodeURIComponent(name)}`);
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Initiate a snapshot upload - get pre-signed URL
|
|
119
|
+
*/
|
|
120
|
+
async function initiateSnapshotUpload(payload) {
|
|
121
|
+
return fetchApi('/database-snapshots/initiate-upload', {
|
|
122
|
+
method: 'POST',
|
|
123
|
+
body: JSON.stringify(payload),
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Complete a snapshot upload after file is uploaded to S3
|
|
128
|
+
*/
|
|
129
|
+
async function completeSnapshotUpload(snapshotId, payload) {
|
|
130
|
+
return fetchApi(`/database-snapshots/${snapshotId}/complete`, {
|
|
131
|
+
method: 'POST',
|
|
132
|
+
body: JSON.stringify(payload),
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Mark snapshot upload as failed
|
|
137
|
+
*/
|
|
138
|
+
async function failSnapshotUpload(snapshotId, error) {
|
|
139
|
+
await fetchApi(`/database-snapshots/${snapshotId}/fail`, {
|
|
140
|
+
method: 'POST',
|
|
141
|
+
body: JSON.stringify({ error }),
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
/**
|
|
145
|
+
* List snapshots for a project
|
|
146
|
+
*/
|
|
147
|
+
async function listProjectSnapshots(projectId) {
|
|
148
|
+
return fetchApi(`/database-snapshots/project/${projectId}`);
|
|
149
|
+
}
|
|
150
|
+
/**
|
|
151
|
+
* Get latest snapshot for a project by source
|
|
152
|
+
*/
|
|
153
|
+
async function getLatestSnapshot(projectId, source) {
|
|
154
|
+
return fetchApi(`/database-snapshots/project/${projectId}/latest?source=${source}`);
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Get download URL for a snapshot
|
|
158
|
+
*/
|
|
159
|
+
async function getSnapshotDownloadUrl(snapshotId) {
|
|
160
|
+
return fetchApi(`/database-snapshots/${snapshotId}/download`);
|
|
161
|
+
}
|
package/dist/commands/create.js
CHANGED
|
@@ -51,6 +51,7 @@ const ssh_config_1 = require("../ssh-config");
|
|
|
51
51
|
const schema_v4_1 = require("../schema-v4");
|
|
52
52
|
const child_process_1 = require("child_process");
|
|
53
53
|
const random_name_1 = require("../random-name");
|
|
54
|
+
const db_utils_1 = require("../db-utils");
|
|
54
55
|
// Credits consumed per hour for each size (matches API billing.config.ts)
|
|
55
56
|
const CREDITS_PER_HOUR = {
|
|
56
57
|
cx22: 1,
|
|
@@ -80,6 +81,23 @@ function spawnSshConfigSetup(genboxId, name) {
|
|
|
80
81
|
// Allow parent to exit independently
|
|
81
82
|
child.unref();
|
|
82
83
|
}
|
|
84
|
+
const DETECTED_DIR = '.genbox';
|
|
85
|
+
const PROJECT_CACHE_FILENAME = 'project.json';
|
|
86
|
+
/**
|
|
87
|
+
* Load project cache from local storage
|
|
88
|
+
*/
|
|
89
|
+
function loadProjectCache(rootDir) {
|
|
90
|
+
const cachePath = path.join(rootDir, DETECTED_DIR, PROJECT_CACHE_FILENAME);
|
|
91
|
+
if (!fs.existsSync(cachePath))
|
|
92
|
+
return null;
|
|
93
|
+
try {
|
|
94
|
+
const content = fs.readFileSync(cachePath, 'utf8');
|
|
95
|
+
return JSON.parse(content);
|
|
96
|
+
}
|
|
97
|
+
catch {
|
|
98
|
+
return null;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
83
101
|
async function provisionGenbox(payload) {
|
|
84
102
|
return (0, api_1.fetchApi)('/genboxes', {
|
|
85
103
|
method: 'POST',
|
|
@@ -220,6 +238,8 @@ exports.createCommand = new commander_1.Command('create')
|
|
|
220
238
|
.option('--api <mode>', 'API mode: local, staging, production')
|
|
221
239
|
.option('--db <mode>', 'Database mode: none, local, copy, remote')
|
|
222
240
|
.option('--db-source <source>', 'Database source: staging, production')
|
|
241
|
+
.option('--db-dump <path>', 'Use existing mongodump file instead of creating one')
|
|
242
|
+
.option('--db-copy-remote', 'Copy database on the server (requires publicly accessible DB)')
|
|
223
243
|
.option('-s, --size <size>', 'Server size: small, medium, large, xl')
|
|
224
244
|
.option('-b, --branch <branch>', 'Use existing git branch (skips new branch creation)')
|
|
225
245
|
.option('-n, --new-branch <name>', 'Create a new branch with this name (defaults to env name)')
|
|
@@ -410,8 +430,102 @@ exports.createCommand = new commander_1.Command('create')
|
|
|
410
430
|
}
|
|
411
431
|
}
|
|
412
432
|
}
|
|
433
|
+
// Handle database copy - dump locally and upload to S3
|
|
434
|
+
let snapshotId;
|
|
435
|
+
let snapshotS3Key;
|
|
436
|
+
let localDumpPath;
|
|
437
|
+
const projectCache = loadProjectCache(process.cwd());
|
|
438
|
+
const needsLocalDbCopy = resolved.database.mode === 'copy' &&
|
|
439
|
+
resolved.database.url &&
|
|
440
|
+
!options.dbCopyRemote;
|
|
441
|
+
if (needsLocalDbCopy) {
|
|
442
|
+
// Check for user-provided dump file
|
|
443
|
+
if (options.dbDump) {
|
|
444
|
+
if (!fs.existsSync(options.dbDump)) {
|
|
445
|
+
console.log(chalk_1.default.red(`Database dump file not found: ${options.dbDump}`));
|
|
446
|
+
return;
|
|
447
|
+
}
|
|
448
|
+
localDumpPath = options.dbDump;
|
|
449
|
+
console.log(chalk_1.default.dim(` Using provided dump file: ${options.dbDump}`));
|
|
450
|
+
}
|
|
451
|
+
else {
|
|
452
|
+
// Need to run mongodump locally
|
|
453
|
+
if (!(0, db_utils_1.isMongoDumpAvailable)()) {
|
|
454
|
+
console.log(chalk_1.default.red('mongodump not found. Required for database copy.'));
|
|
455
|
+
console.log('');
|
|
456
|
+
console.log((0, db_utils_1.getMongoDumpInstallInstructions)());
|
|
457
|
+
console.log('');
|
|
458
|
+
console.log(chalk_1.default.dim('Alternatively:'));
|
|
459
|
+
console.log(chalk_1.default.dim(' • Use --db-dump <path> to provide an existing dump file'));
|
|
460
|
+
console.log(chalk_1.default.dim(' • Use --db-copy-remote if your database is publicly accessible'));
|
|
461
|
+
return;
|
|
462
|
+
}
|
|
463
|
+
const dbUrl = resolved.database.url;
|
|
464
|
+
console.log('');
|
|
465
|
+
console.log(chalk_1.default.blue('=== Database Copy ==='));
|
|
466
|
+
console.log(chalk_1.default.dim(` Source: ${resolved.database.source}`));
|
|
467
|
+
console.log(chalk_1.default.dim(` URL: ${dbUrl.replace(/\/\/[^:]+:[^@]+@/, '//***:***@')}`));
|
|
468
|
+
const dumpSpinner = (0, ora_1.default)('Creating database dump...').start();
|
|
469
|
+
const dumpResult = await (0, db_utils_1.runLocalMongoDump)(dbUrl, {
|
|
470
|
+
onProgress: (msg) => dumpSpinner.text = msg,
|
|
471
|
+
});
|
|
472
|
+
if (!dumpResult.success) {
|
|
473
|
+
dumpSpinner.fail(chalk_1.default.red('Database dump failed'));
|
|
474
|
+
console.log(chalk_1.default.red(` ${dumpResult.error}`));
|
|
475
|
+
console.log('');
|
|
476
|
+
console.log(chalk_1.default.dim('You can:'));
|
|
477
|
+
console.log(chalk_1.default.dim(' • Fix the database connection and try again'));
|
|
478
|
+
console.log(chalk_1.default.dim(' • Use --db local to start with an empty database'));
|
|
479
|
+
console.log(chalk_1.default.dim(' • Use --db-dump <path> to provide an existing dump'));
|
|
480
|
+
return;
|
|
481
|
+
}
|
|
482
|
+
dumpSpinner.succeed(chalk_1.default.green(`Database dump created (${(0, db_utils_1.formatBytes)(dumpResult.sizeBytes || 0)})`));
|
|
483
|
+
localDumpPath = dumpResult.dumpPath;
|
|
484
|
+
}
|
|
485
|
+
// Upload to S3 if we have a project ID
|
|
486
|
+
if (localDumpPath && projectCache?._id) {
|
|
487
|
+
const uploadSpinner = (0, ora_1.default)('Uploading database snapshot...').start();
|
|
488
|
+
const snapshotSource = (resolved.database.source === 'staging' ? 'staging' :
|
|
489
|
+
resolved.database.source === 'production' ? 'production' : 'local');
|
|
490
|
+
const snapshotResult = await (0, db_utils_1.createAndUploadSnapshot)(localDumpPath, projectCache._id, snapshotSource, {
|
|
491
|
+
sourceUrl: resolved.database.url?.replace(/\/\/[^:]+:[^@]+@/, '//***:***@'),
|
|
492
|
+
onProgress: (msg) => uploadSpinner.text = msg,
|
|
493
|
+
});
|
|
494
|
+
if (snapshotResult.success) {
|
|
495
|
+
uploadSpinner.succeed(chalk_1.default.green('Database snapshot uploaded'));
|
|
496
|
+
snapshotId = snapshotResult.snapshotId;
|
|
497
|
+
snapshotS3Key = snapshotResult.s3Key;
|
|
498
|
+
// Cleanup local dump since it's now in S3
|
|
499
|
+
(0, db_utils_1.cleanupDump)(localDumpPath);
|
|
500
|
+
localDumpPath = undefined;
|
|
501
|
+
}
|
|
502
|
+
else {
|
|
503
|
+
uploadSpinner.warn(chalk_1.default.yellow(`Snapshot upload failed: ${snapshotResult.error}`));
|
|
504
|
+
console.log(chalk_1.default.dim(' Will fall back to direct SCP upload after genbox creation'));
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
}
|
|
413
508
|
// Build payload
|
|
414
|
-
const
|
|
509
|
+
const payloadResolved = { ...resolved };
|
|
510
|
+
if (snapshotId && snapshotS3Key) {
|
|
511
|
+
// Use S3 snapshot mode
|
|
512
|
+
payloadResolved.database = {
|
|
513
|
+
...resolved.database,
|
|
514
|
+
url: undefined,
|
|
515
|
+
mode: 'snapshot',
|
|
516
|
+
snapshotId,
|
|
517
|
+
s3Key: snapshotS3Key,
|
|
518
|
+
};
|
|
519
|
+
}
|
|
520
|
+
else if (localDumpPath) {
|
|
521
|
+
// Fall back to SCP upload mode (no project or S3 upload failed)
|
|
522
|
+
payloadResolved.database = {
|
|
523
|
+
...resolved.database,
|
|
524
|
+
url: undefined,
|
|
525
|
+
mode: 'copy-local',
|
|
526
|
+
};
|
|
527
|
+
}
|
|
528
|
+
const payload = buildPayload(payloadResolved, config, publicKey, privateKeyContent, configLoader);
|
|
415
529
|
// Create genbox
|
|
416
530
|
const spinner = (0, ora_1.default)(`Creating Genbox '${name}'...`).start();
|
|
417
531
|
try {
|
|
@@ -431,12 +545,69 @@ exports.createCommand = new commander_1.Command('create')
|
|
|
431
545
|
}
|
|
432
546
|
// Display results
|
|
433
547
|
displayGenboxInfo(genbox, resolved);
|
|
548
|
+
// Handle database upload if we have a local dump
|
|
549
|
+
if (localDumpPath && genbox.ipAddress) {
|
|
550
|
+
console.log('');
|
|
551
|
+
console.log(chalk_1.default.blue('=== Database Restore ==='));
|
|
552
|
+
// Wait for SSH access
|
|
553
|
+
const sshSpinner = (0, ora_1.default)('Waiting for SSH access...').start();
|
|
554
|
+
const sshReady = await (0, db_utils_1.waitForSshAccess)(genbox.ipAddress, 300, (msg) => {
|
|
555
|
+
sshSpinner.text = msg;
|
|
556
|
+
});
|
|
557
|
+
if (!sshReady) {
|
|
558
|
+
sshSpinner.fail(chalk_1.default.yellow('SSH not ready - database will need manual restore'));
|
|
559
|
+
console.log(chalk_1.default.dim(' Run `genbox db sync` after the server is ready'));
|
|
560
|
+
(0, db_utils_1.cleanupDump)(localDumpPath);
|
|
561
|
+
}
|
|
562
|
+
else {
|
|
563
|
+
sshSpinner.succeed('SSH connected');
|
|
564
|
+
// Upload dump
|
|
565
|
+
const uploadSpinner = (0, ora_1.default)('Uploading database dump...').start();
|
|
566
|
+
const uploadResult = await (0, db_utils_1.uploadDumpToGenbox)(localDumpPath, genbox.ipAddress, {
|
|
567
|
+
onProgress: (msg) => uploadSpinner.text = msg,
|
|
568
|
+
});
|
|
569
|
+
if (!uploadResult.success) {
|
|
570
|
+
uploadSpinner.fail(chalk_1.default.yellow('Upload failed - database will need manual restore'));
|
|
571
|
+
console.log(chalk_1.default.dim(` ${uploadResult.error}`));
|
|
572
|
+
console.log(chalk_1.default.dim(' Run `genbox db sync` after the server is ready'));
|
|
573
|
+
(0, db_utils_1.cleanupDump)(localDumpPath);
|
|
574
|
+
}
|
|
575
|
+
else {
|
|
576
|
+
uploadSpinner.succeed('Dump uploaded');
|
|
577
|
+
// Trigger restore
|
|
578
|
+
const restoreSpinner = (0, ora_1.default)('Restoring database...').start();
|
|
579
|
+
const restoreResult = await (0, db_utils_1.runRemoteMongoRestore)(genbox.ipAddress, config.project.name, { onProgress: (msg) => restoreSpinner.text = msg });
|
|
580
|
+
if (!restoreResult.success) {
|
|
581
|
+
restoreSpinner.fail(chalk_1.default.yellow('Restore failed'));
|
|
582
|
+
console.log(chalk_1.default.dim(` ${restoreResult.error}`));
|
|
583
|
+
console.log(chalk_1.default.dim(' The dump file is on the server - you can restore manually'));
|
|
584
|
+
}
|
|
585
|
+
else {
|
|
586
|
+
restoreSpinner.succeed(chalk_1.default.green('Database restored successfully!'));
|
|
587
|
+
}
|
|
588
|
+
// Cleanup local dump
|
|
589
|
+
(0, db_utils_1.cleanupDump)(localDumpPath);
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
else if (localDumpPath) {
|
|
594
|
+
// No IP yet - inform user to run db sync later
|
|
595
|
+
console.log('');
|
|
596
|
+
console.log(chalk_1.default.yellow('Database dump created but IP not yet assigned.'));
|
|
597
|
+
console.log(chalk_1.default.dim(' Run `genbox db sync` once the server is ready to restore the database.'));
|
|
598
|
+
// Keep the dump file - user might want to use it
|
|
599
|
+
console.log(chalk_1.default.dim(` Dump file: ${localDumpPath}`));
|
|
600
|
+
}
|
|
434
601
|
// Inform user about server provisioning
|
|
435
602
|
console.log('');
|
|
436
603
|
console.log(chalk_1.default.dim('Server is provisioning. Run `genbox connect` once ready.'));
|
|
437
604
|
}
|
|
438
605
|
catch (error) {
|
|
439
606
|
spinner.fail(chalk_1.default.red(`Failed to create Genbox: ${error.message}`));
|
|
607
|
+
// Cleanup dump on failure
|
|
608
|
+
if (localDumpPath) {
|
|
609
|
+
(0, db_utils_1.cleanupDump)(localDumpPath);
|
|
610
|
+
}
|
|
440
611
|
if (error instanceof api_1.AuthenticationError) {
|
|
441
612
|
console.log('');
|
|
442
613
|
console.log(chalk_1.default.yellow(' Please authenticate first:'));
|
|
@@ -509,6 +680,16 @@ function displayResolvedConfig(resolved) {
|
|
|
509
680
|
if (hasBackendApps && resolved.database.mode !== 'none') {
|
|
510
681
|
console.log('');
|
|
511
682
|
console.log(` ${chalk_1.default.bold('Database:')} ${resolved.database.mode}${resolved.database.source ? ` (from ${resolved.database.source})` : ''}`);
|
|
683
|
+
// Show info for database copy mode
|
|
684
|
+
if (resolved.database.mode === 'copy' && resolved.database.source) {
|
|
685
|
+
if (resolved.database.url) {
|
|
686
|
+
console.log(chalk_1.default.dim(` Source URL: ${resolved.database.url.replace(/\/\/[^:]+:[^@]+@/, '//***:***@')}`));
|
|
687
|
+
console.log(chalk_1.default.dim(` Database will be dumped locally and uploaded to the genbox`));
|
|
688
|
+
}
|
|
689
|
+
else {
|
|
690
|
+
console.log(chalk_1.default.yellow(` ⚠ No database URL found in environments.${resolved.database.source}.urls.mongodb`));
|
|
691
|
+
}
|
|
692
|
+
}
|
|
512
693
|
}
|
|
513
694
|
if (Object.keys(resolved.env).length > 0) {
|
|
514
695
|
console.log('');
|
|
@@ -843,6 +1024,8 @@ function buildPayload(resolved, config, publicKey, privateKey, configLoader) {
|
|
|
843
1024
|
}
|
|
844
1025
|
// Get local git config for commits
|
|
845
1026
|
const gitConfig = getGitConfig();
|
|
1027
|
+
// Load project cache to get project ID
|
|
1028
|
+
const projectCache = loadProjectCache(process.cwd());
|
|
846
1029
|
return {
|
|
847
1030
|
name: resolved.name,
|
|
848
1031
|
size: resolved.size,
|
|
@@ -882,6 +1065,8 @@ function buildPayload(resolved, config, publicKey, privateKey, configLoader) {
|
|
|
882
1065
|
branch: resolved.repos[0]?.branch,
|
|
883
1066
|
newBranch: resolved.repos[0]?.newBranch,
|
|
884
1067
|
sourceBranch: resolved.repos[0]?.sourceBranch,
|
|
1068
|
+
// Project reference
|
|
1069
|
+
project: projectCache?._id,
|
|
885
1070
|
};
|
|
886
1071
|
}
|
|
887
1072
|
/**
|
package/dist/commands/help.js
CHANGED
|
@@ -14,6 +14,8 @@ exports.helpCommand = new commander_1.Command('help')
|
|
|
14
14
|
console.log(chalk_1.default.bold('Genbox CLI - AI-Powered Development Environments'));
|
|
15
15
|
console.log(chalk_1.default.dim('Create isolated cloud development environments on demand'));
|
|
16
16
|
console.log('');
|
|
17
|
+
console.log(chalk_1.default.green('TIP:') + ' Use ' + chalk_1.default.cyan('gb') + ' as a shorthand for ' + chalk_1.default.cyan('genbox') + ' (e.g., ' + chalk_1.default.dim('gb list') + ')');
|
|
18
|
+
console.log('');
|
|
17
19
|
if (command) {
|
|
18
20
|
showCommandHelp(command);
|
|
19
21
|
}
|
|
@@ -45,34 +47,34 @@ function showGeneralHelp() {
|
|
|
45
47
|
console.log(chalk_1.default.bold('QUICK START'));
|
|
46
48
|
console.log('');
|
|
47
49
|
console.log(' 1. Initialize your project:');
|
|
48
|
-
console.log(chalk_1.default.dim(' $
|
|
50
|
+
console.log(chalk_1.default.dim(' $ gb init'));
|
|
49
51
|
console.log('');
|
|
50
52
|
console.log(' 2. Create a development environment:');
|
|
51
|
-
console.log(chalk_1.default.dim(' $
|
|
53
|
+
console.log(chalk_1.default.dim(' $ gb create feature-auth'));
|
|
52
54
|
console.log('');
|
|
53
55
|
console.log(' 3. Check progress (setup takes a few minutes):');
|
|
54
|
-
console.log(chalk_1.default.dim(' $
|
|
56
|
+
console.log(chalk_1.default.dim(' $ gb status feature-auth'));
|
|
55
57
|
console.log('');
|
|
56
58
|
console.log(' 4. Connect and start coding:');
|
|
57
|
-
console.log(chalk_1.default.dim(' $
|
|
59
|
+
console.log(chalk_1.default.dim(' $ gb connect feature-auth'));
|
|
58
60
|
console.log('');
|
|
59
61
|
console.log(' 5. Clean up when done:');
|
|
60
|
-
console.log(chalk_1.default.dim(' $
|
|
62
|
+
console.log(chalk_1.default.dim(' $ gb destroy feature-auth'));
|
|
61
63
|
console.log('');
|
|
62
64
|
console.log(chalk_1.default.bold('WORKFLOW EXAMPLES'));
|
|
63
65
|
console.log('');
|
|
64
66
|
console.log(chalk_1.default.yellow(' Feature Development:'));
|
|
65
|
-
console.log(chalk_1.default.dim(' $
|
|
66
|
-
console.log(chalk_1.default.dim(' $
|
|
67
|
-
console.log(chalk_1.default.dim(' $
|
|
68
|
-
console.log(chalk_1.default.dim(' $
|
|
67
|
+
console.log(chalk_1.default.dim(' $ gb create feat-new-checkout'));
|
|
68
|
+
console.log(chalk_1.default.dim(' $ gb status feat-new-checkout # Wait for setup'));
|
|
69
|
+
console.log(chalk_1.default.dim(' $ gb urls feat-new-checkout # Get service URLs'));
|
|
70
|
+
console.log(chalk_1.default.dim(' $ gb connect feat-new-checkout # SSH to code'));
|
|
69
71
|
console.log('');
|
|
70
72
|
console.log(chalk_1.default.yellow(' Local Debugging with Port Forwarding:'));
|
|
71
|
-
console.log(chalk_1.default.dim(' $
|
|
73
|
+
console.log(chalk_1.default.dim(' $ gb forward feat-new-checkout'));
|
|
72
74
|
console.log(chalk_1.default.dim(' # Now access remote services at localhost:3050, etc.'));
|
|
73
75
|
console.log('');
|
|
74
76
|
console.log(chalk_1.default.yellow(' Database Sync:'));
|
|
75
|
-
console.log(chalk_1.default.dim(' $
|
|
77
|
+
console.log(chalk_1.default.dim(' $ gb restore-db feat-new-checkout'));
|
|
76
78
|
console.log(chalk_1.default.dim(' # Copies your local MongoDB to the Genbox'));
|
|
77
79
|
console.log('');
|
|
78
80
|
console.log(chalk_1.default.bold('CONFIGURATION FILES'));
|
|
@@ -80,12 +82,12 @@ function showGeneralHelp() {
|
|
|
80
82
|
console.log(` ${chalk_1.default.cyan('genbox.yaml')} Project configuration (services, ports, repos)`);
|
|
81
83
|
console.log(` ${chalk_1.default.cyan('.env.genbox')} Environment variables (secrets, tokens)`);
|
|
82
84
|
console.log('');
|
|
83
|
-
console.log(chalk_1.default.dim(' Run `
|
|
85
|
+
console.log(chalk_1.default.dim(' Run `gb init` to create these files interactively.'));
|
|
84
86
|
console.log('');
|
|
85
87
|
console.log(chalk_1.default.bold('MORE HELP'));
|
|
86
88
|
console.log('');
|
|
87
89
|
console.log(' Get help for a specific command:');
|
|
88
|
-
console.log(chalk_1.default.dim(' $
|
|
90
|
+
console.log(chalk_1.default.dim(' $ gb help <command>'));
|
|
89
91
|
console.log('');
|
|
90
92
|
console.log(' Documentation: https://genbox.dev/docs');
|
|
91
93
|
console.log(' Issues: https://github.com/goodpass-co/genbox/issues');
|
package/dist/commands/init.js
CHANGED
|
@@ -59,12 +59,63 @@ const fs_1 = __importDefault(require("fs"));
|
|
|
59
59
|
const yaml = __importStar(require("js-yaml"));
|
|
60
60
|
const process = __importStar(require("process"));
|
|
61
61
|
const scanner_1 = require("../scanner");
|
|
62
|
+
const api_1 = require("../api");
|
|
63
|
+
const config_store_1 = require("../config-store");
|
|
62
64
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
63
65
|
const { version } = require('../../package.json');
|
|
64
66
|
const CONFIG_FILENAME = 'genbox.yaml';
|
|
65
67
|
const ENV_FILENAME = '.env.genbox';
|
|
66
68
|
const DETECTED_DIR = '.genbox';
|
|
67
69
|
const DETECTED_FILENAME = 'detected.yaml';
|
|
70
|
+
const PROJECT_CACHE_FILENAME = 'project.json';
|
|
71
|
+
/**
|
|
72
|
+
* Save project ID and info to local cache
|
|
73
|
+
*/
|
|
74
|
+
function saveProjectCache(rootDir, cache) {
|
|
75
|
+
const genboxDir = path_1.default.join(rootDir, DETECTED_DIR);
|
|
76
|
+
if (!fs_1.default.existsSync(genboxDir)) {
|
|
77
|
+
fs_1.default.mkdirSync(genboxDir, { recursive: true });
|
|
78
|
+
}
|
|
79
|
+
const cachePath = path_1.default.join(genboxDir, PROJECT_CACHE_FILENAME);
|
|
80
|
+
fs_1.default.writeFileSync(cachePath, JSON.stringify(cache, null, 2));
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Load project cache from local storage
|
|
84
|
+
*/
|
|
85
|
+
function loadProjectCache(rootDir) {
|
|
86
|
+
const cachePath = path_1.default.join(rootDir, DETECTED_DIR, PROJECT_CACHE_FILENAME);
|
|
87
|
+
if (!fs_1.default.existsSync(cachePath))
|
|
88
|
+
return null;
|
|
89
|
+
try {
|
|
90
|
+
const content = fs_1.default.readFileSync(cachePath, 'utf8');
|
|
91
|
+
return JSON.parse(content);
|
|
92
|
+
}
|
|
93
|
+
catch {
|
|
94
|
+
return null;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Convert GenboxConfig to ProjectSyncPayload
|
|
99
|
+
*/
|
|
100
|
+
function configToSyncPayload(config, envVars, privateKey) {
|
|
101
|
+
return {
|
|
102
|
+
name: config.project.name,
|
|
103
|
+
version: config.version,
|
|
104
|
+
project: config.project,
|
|
105
|
+
apps: config.apps,
|
|
106
|
+
provides: config.provides,
|
|
107
|
+
environments: config.environments,
|
|
108
|
+
profiles: config.profiles,
|
|
109
|
+
defaults: config.defaults,
|
|
110
|
+
repos: config.repos,
|
|
111
|
+
hooks: config.hooks,
|
|
112
|
+
scripts: config.scripts,
|
|
113
|
+
strict: config.strict,
|
|
114
|
+
git_auth: config.git_auth,
|
|
115
|
+
envVars,
|
|
116
|
+
privateKey,
|
|
117
|
+
};
|
|
118
|
+
}
|
|
68
119
|
// =============================================================================
|
|
69
120
|
// Scan Phase
|
|
70
121
|
// =============================================================================
|
|
@@ -935,15 +986,16 @@ function generateDefaultProfiles(detected, environments) {
|
|
|
935
986
|
};
|
|
936
987
|
}
|
|
937
988
|
}
|
|
938
|
-
// Full local development
|
|
939
|
-
if (frontendApps.length >
|
|
940
|
-
const [frontendName]
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
989
|
+
// Full local development - only create if multiple frontends exist (otherwise full-stack covers it)
|
|
990
|
+
if (frontendApps.length > 1 && backendApps.length > 0) {
|
|
991
|
+
for (const [frontendName] of frontendApps.slice(0, 2)) {
|
|
992
|
+
profiles[`${frontendName}-full`] = {
|
|
993
|
+
description: `${frontendName} + local backend` + (remoteEnv ? ' + DB copy' : ''),
|
|
994
|
+
size: 'large',
|
|
995
|
+
apps: [frontendName, ...backendApps.map(([n]) => n)],
|
|
996
|
+
database: remoteEnv ? { mode: 'copy', source: remoteEnv } : { mode: 'local' },
|
|
997
|
+
};
|
|
998
|
+
}
|
|
947
999
|
}
|
|
948
1000
|
// Backend development
|
|
949
1001
|
for (const [name] of backendApps.slice(0, 2)) {
|
|
@@ -954,11 +1006,11 @@ function generateDefaultProfiles(detected, environments) {
|
|
|
954
1006
|
database: { mode: 'local' },
|
|
955
1007
|
};
|
|
956
1008
|
}
|
|
957
|
-
// Full stack
|
|
1009
|
+
// Full stack - includes all runnable apps
|
|
958
1010
|
if (allRunnableApps.length > 1) {
|
|
959
1011
|
profiles['full-stack'] = {
|
|
960
1012
|
description: 'Everything local' + (remoteEnv ? ' with DB copy' : ''),
|
|
961
|
-
size: '
|
|
1013
|
+
size: 'large',
|
|
962
1014
|
apps: allRunnableApps.map(([n]) => n),
|
|
963
1015
|
database: remoteEnv ? { mode: 'copy', source: remoteEnv } : { mode: 'local' },
|
|
964
1016
|
};
|
|
@@ -1500,14 +1552,14 @@ exports.initCommand = new commander_1.Command('init')
|
|
|
1500
1552
|
let detected;
|
|
1501
1553
|
const existingDetected = loadDetectedConfig(rootDir);
|
|
1502
1554
|
if (existingDetected && !nonInteractive) {
|
|
1503
|
-
console.log(chalk_1.default.dim(`Found
|
|
1504
|
-
const
|
|
1505
|
-
message: '
|
|
1506
|
-
default:
|
|
1555
|
+
console.log(chalk_1.default.dim(`Found cached project scan from ${existingDetected._meta.generated_at}`));
|
|
1556
|
+
const rescan = await prompts.confirm({
|
|
1557
|
+
message: 'Rescan the project?',
|
|
1558
|
+
default: false,
|
|
1507
1559
|
});
|
|
1508
|
-
if (
|
|
1560
|
+
if (!rescan) {
|
|
1509
1561
|
detected = existingDetected;
|
|
1510
|
-
console.log(chalk_1.default.dim('Using
|
|
1562
|
+
console.log(chalk_1.default.dim('Using cached scan results.'));
|
|
1511
1563
|
}
|
|
1512
1564
|
else {
|
|
1513
1565
|
const spinner = (0, ora_1.default)('Scanning project...').start();
|
|
@@ -1554,6 +1606,25 @@ exports.initCommand = new commander_1.Command('init')
|
|
|
1554
1606
|
const envContent = generateEnvFile(settings.projectName, detected, { ...gitEnvVars, LOCAL_API_URL: 'http://localhost:3050' }, []);
|
|
1555
1607
|
fs_1.default.writeFileSync(path_1.default.join(rootDir, ENV_FILENAME), envContent);
|
|
1556
1608
|
console.log(chalk_1.default.green(`✔ Created ${ENV_FILENAME}`));
|
|
1609
|
+
// Sync project to API if logged in
|
|
1610
|
+
if (config_store_1.ConfigStore.getToken()) {
|
|
1611
|
+
try {
|
|
1612
|
+
const payload = configToSyncPayload(config, { ...gitEnvVars, LOCAL_API_URL: 'http://localhost:3050' });
|
|
1613
|
+
const result = await (0, api_1.syncProject)(payload);
|
|
1614
|
+
saveProjectCache(rootDir, {
|
|
1615
|
+
_id: result._id,
|
|
1616
|
+
name: result.name,
|
|
1617
|
+
lastSyncedAt: new Date().toISOString(),
|
|
1618
|
+
action: result.action,
|
|
1619
|
+
});
|
|
1620
|
+
console.log(chalk_1.default.green(`✔ Project synced to Genbox (${result.action})`));
|
|
1621
|
+
}
|
|
1622
|
+
catch (error) {
|
|
1623
|
+
if (!(0, api_1.isAuthError)(error)) {
|
|
1624
|
+
console.log(chalk_1.default.yellow(` Warning: Could not sync project - ${error.message}`));
|
|
1625
|
+
}
|
|
1626
|
+
}
|
|
1627
|
+
}
|
|
1557
1628
|
return;
|
|
1558
1629
|
}
|
|
1559
1630
|
// =========================================
|
|
@@ -1604,6 +1675,38 @@ exports.initCommand = new commander_1.Command('init')
|
|
|
1604
1675
|
}
|
|
1605
1676
|
}
|
|
1606
1677
|
// =========================================
|
|
1678
|
+
// PHASE 8: Sync Project to API
|
|
1679
|
+
// =========================================
|
|
1680
|
+
if (config_store_1.ConfigStore.getToken()) {
|
|
1681
|
+
const syncSpinner = (0, ora_1.default)('Syncing project to Genbox...').start();
|
|
1682
|
+
try {
|
|
1683
|
+
const payload = configToSyncPayload(config, allEnvVars);
|
|
1684
|
+
const result = await (0, api_1.syncProject)(payload);
|
|
1685
|
+
saveProjectCache(rootDir, {
|
|
1686
|
+
_id: result._id,
|
|
1687
|
+
name: result.name,
|
|
1688
|
+
lastSyncedAt: new Date().toISOString(),
|
|
1689
|
+
action: result.action,
|
|
1690
|
+
});
|
|
1691
|
+
syncSpinner.succeed(`Project synced to Genbox (${result.action})`);
|
|
1692
|
+
if (result.changes && result.changes.length > 0) {
|
|
1693
|
+
console.log(chalk_1.default.dim(` Changes: ${result.changes.join(', ')}`));
|
|
1694
|
+
}
|
|
1695
|
+
}
|
|
1696
|
+
catch (error) {
|
|
1697
|
+
if ((0, api_1.isAuthError)(error)) {
|
|
1698
|
+
syncSpinner.warn('Not logged in - project not synced');
|
|
1699
|
+
console.log(chalk_1.default.dim(' Run `genbox login` to sync projects to your account'));
|
|
1700
|
+
}
|
|
1701
|
+
else {
|
|
1702
|
+
syncSpinner.warn(`Could not sync project - ${error.message}`);
|
|
1703
|
+
}
|
|
1704
|
+
}
|
|
1705
|
+
}
|
|
1706
|
+
else {
|
|
1707
|
+
console.log(chalk_1.default.dim(' Project not synced (not logged in). Run `genbox login` to sync.'));
|
|
1708
|
+
}
|
|
1709
|
+
// =========================================
|
|
1607
1710
|
// Show Instructions
|
|
1608
1711
|
// =========================================
|
|
1609
1712
|
const hasBackend = Object.values(detected.apps).some(a => a.type === 'backend' || a.type === 'gateway');
|
package/dist/commands/push.js
CHANGED
|
@@ -45,6 +45,19 @@ const path = __importStar(require("path"));
|
|
|
45
45
|
const os = __importStar(require("os"));
|
|
46
46
|
const config_loader_1 = require("../config-loader");
|
|
47
47
|
const api_1 = require("../api");
|
|
48
|
+
const DETECTED_DIR = '.genbox';
|
|
49
|
+
const PROJECT_CACHE_FILENAME = 'project.json';
|
|
50
|
+
/**
|
|
51
|
+
* Save project ID and info to local cache
|
|
52
|
+
*/
|
|
53
|
+
function saveProjectCache(rootDir, cache) {
|
|
54
|
+
const genboxDir = path.join(rootDir, DETECTED_DIR);
|
|
55
|
+
if (!fs.existsSync(genboxDir)) {
|
|
56
|
+
fs.mkdirSync(genboxDir, { recursive: true });
|
|
57
|
+
}
|
|
58
|
+
const cachePath = path.join(genboxDir, PROJECT_CACHE_FILENAME);
|
|
59
|
+
fs.writeFileSync(cachePath, JSON.stringify(cache, null, 2));
|
|
60
|
+
}
|
|
48
61
|
function getPrivateSshKey() {
|
|
49
62
|
const home = os.homedir();
|
|
50
63
|
const potentialKeys = [
|
|
@@ -58,6 +71,28 @@ function getPrivateSshKey() {
|
|
|
58
71
|
}
|
|
59
72
|
return undefined;
|
|
60
73
|
}
|
|
74
|
+
/**
|
|
75
|
+
* Convert GenboxConfig to ProjectSyncPayload
|
|
76
|
+
*/
|
|
77
|
+
function configToSyncPayload(config, envVars, privateKey) {
|
|
78
|
+
return {
|
|
79
|
+
name: config.project.name,
|
|
80
|
+
version: config.version,
|
|
81
|
+
project: config.project,
|
|
82
|
+
apps: config.apps,
|
|
83
|
+
provides: config.provides,
|
|
84
|
+
environments: config.environments,
|
|
85
|
+
profiles: config.profiles,
|
|
86
|
+
defaults: config.defaults,
|
|
87
|
+
repos: config.repos,
|
|
88
|
+
hooks: config.hooks,
|
|
89
|
+
scripts: config.scripts,
|
|
90
|
+
strict: config.strict,
|
|
91
|
+
git_auth: config.git_auth,
|
|
92
|
+
envVars,
|
|
93
|
+
privateKey,
|
|
94
|
+
};
|
|
95
|
+
}
|
|
61
96
|
function buildLegacyPayload(config, envVars) {
|
|
62
97
|
return {
|
|
63
98
|
name: config.project_name,
|
|
@@ -232,18 +267,26 @@ exports.pushCommand = new commander_1.Command('push')
|
|
|
232
267
|
console.log(JSON.stringify(previewPayload, null, 2));
|
|
233
268
|
return;
|
|
234
269
|
}
|
|
235
|
-
// Upload to server
|
|
236
|
-
const spinner = (0, ora_1.default)('
|
|
270
|
+
// Upload to server using the new projects endpoint
|
|
271
|
+
const spinner = (0, ora_1.default)('Syncing project configuration...').start();
|
|
237
272
|
try {
|
|
238
|
-
const
|
|
239
|
-
|
|
240
|
-
|
|
273
|
+
const v4Config = config;
|
|
274
|
+
const syncPayload = configToSyncPayload(v4Config, envVars, options.includeKey ? payload.privateKey : undefined);
|
|
275
|
+
const result = await (0, api_1.syncProject)(syncPayload);
|
|
276
|
+
// Save project cache locally
|
|
277
|
+
saveProjectCache(process.cwd(), {
|
|
278
|
+
_id: result._id,
|
|
279
|
+
name: result.name,
|
|
280
|
+
lastSyncedAt: new Date().toISOString(),
|
|
281
|
+
action: result.action,
|
|
241
282
|
});
|
|
242
|
-
spinner.succeed(chalk_1.default.green(
|
|
283
|
+
spinner.succeed(chalk_1.default.green(`Project synced successfully (${result.action})!`));
|
|
284
|
+
if (result.changes && result.changes.length > 0) {
|
|
285
|
+
console.log(chalk_1.default.dim(` Changes: ${result.changes.join(', ')}`));
|
|
286
|
+
}
|
|
243
287
|
console.log('');
|
|
244
288
|
console.log(chalk_1.default.dim('Your configuration is now available in the cloud.'));
|
|
245
289
|
console.log(chalk_1.default.dim('Team members can run "genbox create <name>" to provision environments.'));
|
|
246
|
-
const v4Config = config;
|
|
247
290
|
const profileNames = Object.keys(v4Config.profiles || {});
|
|
248
291
|
if (profileNames.length > 0) {
|
|
249
292
|
console.log('');
|
|
@@ -263,11 +306,11 @@ exports.pushCommand = new commander_1.Command('push')
|
|
|
263
306
|
}
|
|
264
307
|
}
|
|
265
308
|
catch (error) {
|
|
266
|
-
spinner.fail(chalk_1.default.red('Failed to
|
|
309
|
+
spinner.fail(chalk_1.default.red('Failed to sync project configuration'));
|
|
267
310
|
console.error(chalk_1.default.red(`Error: ${error.message}`));
|
|
268
311
|
if (error instanceof api_1.AuthenticationError || error.message.includes('401') || error.message.includes('Unauthorized')) {
|
|
269
312
|
console.log('');
|
|
270
|
-
console.log(chalk_1.default.yellow('You need to be logged in to push
|
|
313
|
+
console.log(chalk_1.default.yellow('You need to be logged in to push project configuration.'));
|
|
271
314
|
console.log(chalk_1.default.dim('Run "genbox login" to authenticate.'));
|
|
272
315
|
}
|
|
273
316
|
}
|
package/dist/db-utils.js
ADDED
|
@@ -0,0 +1,416 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Database Utilities
|
|
4
|
+
*
|
|
5
|
+
* Handles local mongodump/mongorestore operations and
|
|
6
|
+
* uploading database dumps to genboxes.
|
|
7
|
+
*/
|
|
8
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
9
|
+
if (k2 === undefined) k2 = k;
|
|
10
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
11
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
12
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
13
|
+
}
|
|
14
|
+
Object.defineProperty(o, k2, desc);
|
|
15
|
+
}) : (function(o, m, k, k2) {
|
|
16
|
+
if (k2 === undefined) k2 = k;
|
|
17
|
+
o[k2] = m[k];
|
|
18
|
+
}));
|
|
19
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
20
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
21
|
+
}) : function(o, v) {
|
|
22
|
+
o["default"] = v;
|
|
23
|
+
});
|
|
24
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
25
|
+
var ownKeys = function(o) {
|
|
26
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
27
|
+
var ar = [];
|
|
28
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
29
|
+
return ar;
|
|
30
|
+
};
|
|
31
|
+
return ownKeys(o);
|
|
32
|
+
};
|
|
33
|
+
return function (mod) {
|
|
34
|
+
if (mod && mod.__esModule) return mod;
|
|
35
|
+
var result = {};
|
|
36
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
37
|
+
__setModuleDefault(result, mod);
|
|
38
|
+
return result;
|
|
39
|
+
};
|
|
40
|
+
})();
|
|
41
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
42
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
43
|
+
};
|
|
44
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
45
|
+
exports.isMongoDumpAvailable = isMongoDumpAvailable;
|
|
46
|
+
exports.getMongoDumpInstallInstructions = getMongoDumpInstallInstructions;
|
|
47
|
+
exports.runLocalMongoDump = runLocalMongoDump;
|
|
48
|
+
exports.uploadDumpToGenbox = uploadDumpToGenbox;
|
|
49
|
+
exports.runRemoteMongoRestore = runRemoteMongoRestore;
|
|
50
|
+
exports.cleanupDump = cleanupDump;
|
|
51
|
+
exports.formatBytes = formatBytes;
|
|
52
|
+
exports.waitForSshAccess = waitForSshAccess;
|
|
53
|
+
exports.uploadDumpToS3 = uploadDumpToS3;
|
|
54
|
+
exports.createAndUploadSnapshot = createAndUploadSnapshot;
|
|
55
|
+
const child_process_1 = require("child_process");
|
|
56
|
+
const fs = __importStar(require("fs"));
|
|
57
|
+
const path = __importStar(require("path"));
|
|
58
|
+
const os = __importStar(require("os"));
|
|
59
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
60
|
+
/**
|
|
61
|
+
* Check if mongodump is available locally
|
|
62
|
+
*/
|
|
63
|
+
function isMongoDumpAvailable() {
|
|
64
|
+
try {
|
|
65
|
+
(0, child_process_1.execSync)('mongodump --version', { stdio: 'ignore' });
|
|
66
|
+
return true;
|
|
67
|
+
}
|
|
68
|
+
catch {
|
|
69
|
+
return false;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Get instructions for installing mongodb-database-tools
|
|
74
|
+
*/
|
|
75
|
+
function getMongoDumpInstallInstructions() {
|
|
76
|
+
const platform = process.platform;
|
|
77
|
+
if (platform === 'darwin') {
|
|
78
|
+
return `Install MongoDB Database Tools:
|
|
79
|
+
${chalk_1.default.cyan('brew tap mongodb/brew')}
|
|
80
|
+
${chalk_1.default.cyan('brew install mongodb-database-tools')}`;
|
|
81
|
+
}
|
|
82
|
+
else if (platform === 'linux') {
|
|
83
|
+
return `Install MongoDB Database Tools:
|
|
84
|
+
${chalk_1.default.cyan('# For Ubuntu/Debian:')}
|
|
85
|
+
${chalk_1.default.cyan('wget -qO- https://www.mongodb.org/static/pgp/server-7.0.asc | sudo gpg --dearmor -o /usr/share/keyrings/mongodb-server-7.0.gpg')}
|
|
86
|
+
${chalk_1.default.cyan('echo "deb [ signed-by=/usr/share/keyrings/mongodb-server-7.0.gpg ] https://repo.mongodb.org/apt/ubuntu jammy/mongodb-org/7.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-7.0.list')}
|
|
87
|
+
${chalk_1.default.cyan('sudo apt-get update && sudo apt-get install -y mongodb-database-tools')}`;
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
return `Install MongoDB Database Tools from:
|
|
91
|
+
${chalk_1.default.cyan('https://www.mongodb.com/try/download/database-tools')}`;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Run mongodump locally
|
|
96
|
+
*/
|
|
97
|
+
async function runLocalMongoDump(sourceUrl, options = {}) {
|
|
98
|
+
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'genbox-dbdump-'));
|
|
99
|
+
const dumpPath = path.join(tempDir, 'dump.gz');
|
|
100
|
+
return new Promise((resolve) => {
|
|
101
|
+
const args = [
|
|
102
|
+
`--uri=${sourceUrl}`,
|
|
103
|
+
`--archive=${dumpPath}`,
|
|
104
|
+
'--gzip',
|
|
105
|
+
];
|
|
106
|
+
// Add collection filters
|
|
107
|
+
if (options.collections && options.collections.length > 0) {
|
|
108
|
+
for (const col of options.collections) {
|
|
109
|
+
args.push(`--collection=${col}`);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
if (options.excludeCollections && options.excludeCollections.length > 0) {
|
|
113
|
+
for (const col of options.excludeCollections) {
|
|
114
|
+
args.push(`--excludeCollection=${col}`);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
options.onProgress?.('Starting mongodump...');
|
|
118
|
+
const proc = (0, child_process_1.spawn)('mongodump', args, {
|
|
119
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
120
|
+
});
|
|
121
|
+
let stderr = '';
|
|
122
|
+
proc.stderr?.on('data', (data) => {
|
|
123
|
+
const line = data.toString();
|
|
124
|
+
stderr += line;
|
|
125
|
+
// Parse progress from mongodump output
|
|
126
|
+
if (line.includes('done dumping')) {
|
|
127
|
+
options.onProgress?.(line.trim());
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
proc.on('close', (code) => {
|
|
131
|
+
if (code === 0 && fs.existsSync(dumpPath)) {
|
|
132
|
+
const stats = fs.statSync(dumpPath);
|
|
133
|
+
resolve({
|
|
134
|
+
success: true,
|
|
135
|
+
dumpPath,
|
|
136
|
+
sizeBytes: stats.size,
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
// Cleanup on failure
|
|
141
|
+
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
142
|
+
// Parse common errors
|
|
143
|
+
let errorMessage = 'mongodump failed';
|
|
144
|
+
if (stderr.includes('authentication failed')) {
|
|
145
|
+
errorMessage = 'Authentication failed - check your database credentials';
|
|
146
|
+
}
|
|
147
|
+
else if (stderr.includes('connection refused') || stderr.includes('no reachable servers')) {
|
|
148
|
+
errorMessage = 'Connection failed - check if the database URL is correct and accessible';
|
|
149
|
+
}
|
|
150
|
+
else if (stderr.includes('timed out')) {
|
|
151
|
+
errorMessage = 'Connection timed out - the database may not be accessible from your network';
|
|
152
|
+
}
|
|
153
|
+
else if (stderr) {
|
|
154
|
+
errorMessage = stderr.split('\n')[0] || errorMessage;
|
|
155
|
+
}
|
|
156
|
+
resolve({
|
|
157
|
+
success: false,
|
|
158
|
+
error: errorMessage,
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
});
|
|
162
|
+
proc.on('error', (err) => {
|
|
163
|
+
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
164
|
+
resolve({
|
|
165
|
+
success: false,
|
|
166
|
+
error: `Failed to run mongodump: ${err.message}`,
|
|
167
|
+
});
|
|
168
|
+
});
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Upload dump file to genbox via SCP
|
|
173
|
+
*/
|
|
174
|
+
async function uploadDumpToGenbox(dumpPath, ipAddress, options = {}) {
|
|
175
|
+
return new Promise((resolve) => {
|
|
176
|
+
const remotePath = '/home/dev/.db-dump.gz';
|
|
177
|
+
options.onProgress?.(`Uploading dump to genbox (${formatBytes(fs.statSync(dumpPath).size)})...`);
|
|
178
|
+
const proc = (0, child_process_1.spawn)('scp', [
|
|
179
|
+
'-o', 'StrictHostKeyChecking=no',
|
|
180
|
+
'-o', 'UserKnownHostsFile=/dev/null',
|
|
181
|
+
'-o', 'ConnectTimeout=30',
|
|
182
|
+
dumpPath,
|
|
183
|
+
`dev@${ipAddress}:${remotePath}`,
|
|
184
|
+
], {
|
|
185
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
186
|
+
});
|
|
187
|
+
let stderr = '';
|
|
188
|
+
proc.stderr?.on('data', (data) => {
|
|
189
|
+
stderr += data.toString();
|
|
190
|
+
});
|
|
191
|
+
proc.on('close', (code) => {
|
|
192
|
+
if (code === 0) {
|
|
193
|
+
resolve({ success: true });
|
|
194
|
+
}
|
|
195
|
+
else {
|
|
196
|
+
let errorMessage = 'SCP upload failed';
|
|
197
|
+
if (stderr.includes('Connection refused') || stderr.includes('Connection timed out')) {
|
|
198
|
+
errorMessage = 'Cannot connect to genbox - it may still be provisioning';
|
|
199
|
+
}
|
|
200
|
+
else if (stderr.includes('Permission denied')) {
|
|
201
|
+
errorMessage = 'Permission denied - SSH key may not be configured';
|
|
202
|
+
}
|
|
203
|
+
else if (stderr) {
|
|
204
|
+
errorMessage = stderr.split('\n')[0] || errorMessage;
|
|
205
|
+
}
|
|
206
|
+
resolve({ success: false, error: errorMessage });
|
|
207
|
+
}
|
|
208
|
+
});
|
|
209
|
+
proc.on('error', (err) => {
|
|
210
|
+
resolve({ success: false, error: `Failed to run scp: ${err.message}` });
|
|
211
|
+
});
|
|
212
|
+
});
|
|
213
|
+
}
|
|
214
|
+
/**
|
|
215
|
+
* Run mongorestore on genbox via SSH
|
|
216
|
+
*/
|
|
217
|
+
async function runRemoteMongoRestore(ipAddress, dbName, options = {}) {
|
|
218
|
+
return new Promise((resolve) => {
|
|
219
|
+
options.onProgress?.('Restoring database on genbox...');
|
|
220
|
+
// The restore command - uses the uploaded dump file
|
|
221
|
+
const restoreCmd = `
|
|
222
|
+
# Wait for MongoDB to be ready
|
|
223
|
+
for i in {1..30}; do
|
|
224
|
+
if docker exec goodpass-mongodb mongosh --quiet --eval "db.runCommand({ping:1})" 2>/dev/null; then
|
|
225
|
+
break
|
|
226
|
+
fi
|
|
227
|
+
echo "Waiting for MongoDB... ($i/30)"
|
|
228
|
+
sleep 2
|
|
229
|
+
done
|
|
230
|
+
|
|
231
|
+
# Restore the database
|
|
232
|
+
if [ -f /home/dev/.db-dump.gz ]; then
|
|
233
|
+
echo "Restoring database..."
|
|
234
|
+
mongorestore --uri="mongodb://localhost:27017" --db="${dbName}" --archive=/home/dev/.db-dump.gz --gzip --drop
|
|
235
|
+
rm -f /home/dev/.db-dump.gz
|
|
236
|
+
echo "Database restored successfully"
|
|
237
|
+
else
|
|
238
|
+
echo "Error: Dump file not found"
|
|
239
|
+
exit 1
|
|
240
|
+
fi
|
|
241
|
+
`;
|
|
242
|
+
const proc = (0, child_process_1.spawn)('ssh', [
|
|
243
|
+
'-o', 'StrictHostKeyChecking=no',
|
|
244
|
+
'-o', 'UserKnownHostsFile=/dev/null',
|
|
245
|
+
'-o', 'ConnectTimeout=30',
|
|
246
|
+
`dev@${ipAddress}`,
|
|
247
|
+
'bash', '-c', `'${restoreCmd.replace(/'/g, "'\\''")}'`,
|
|
248
|
+
], {
|
|
249
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
250
|
+
});
|
|
251
|
+
let stdout = '';
|
|
252
|
+
let stderr = '';
|
|
253
|
+
proc.stdout?.on('data', (data) => {
|
|
254
|
+
const line = data.toString();
|
|
255
|
+
stdout += line;
|
|
256
|
+
if (line.includes('Restoring') || line.includes('restored')) {
|
|
257
|
+
options.onProgress?.(line.trim());
|
|
258
|
+
}
|
|
259
|
+
});
|
|
260
|
+
proc.stderr?.on('data', (data) => {
|
|
261
|
+
stderr += data.toString();
|
|
262
|
+
});
|
|
263
|
+
proc.on('close', (code) => {
|
|
264
|
+
if (code === 0) {
|
|
265
|
+
resolve({ success: true });
|
|
266
|
+
}
|
|
267
|
+
else {
|
|
268
|
+
resolve({
|
|
269
|
+
success: false,
|
|
270
|
+
error: stderr || stdout || 'Restore failed',
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
});
|
|
274
|
+
proc.on('error', (err) => {
|
|
275
|
+
resolve({ success: false, error: `Failed to run ssh: ${err.message}` });
|
|
276
|
+
});
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Clean up temporary dump files
|
|
281
|
+
*/
|
|
282
|
+
function cleanupDump(dumpPath) {
|
|
283
|
+
try {
|
|
284
|
+
const dir = path.dirname(dumpPath);
|
|
285
|
+
if (dir.includes('genbox-dbdump-')) {
|
|
286
|
+
fs.rmSync(dir, { recursive: true, force: true });
|
|
287
|
+
}
|
|
288
|
+
else {
|
|
289
|
+
fs.rmSync(dumpPath, { force: true });
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
catch {
|
|
293
|
+
// Ignore cleanup errors
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
/**
|
|
297
|
+
* Format bytes to human readable
|
|
298
|
+
*/
|
|
299
|
+
function formatBytes(bytes) {
|
|
300
|
+
if (bytes < 1024)
|
|
301
|
+
return `${bytes} B`;
|
|
302
|
+
if (bytes < 1024 * 1024)
|
|
303
|
+
return `${(bytes / 1024).toFixed(1)} KB`;
|
|
304
|
+
if (bytes < 1024 * 1024 * 1024)
|
|
305
|
+
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
|
306
|
+
return `${(bytes / (1024 * 1024 * 1024)).toFixed(1)} GB`;
|
|
307
|
+
}
|
|
308
|
+
/**
|
|
309
|
+
* Wait for genbox to be accessible via SSH
|
|
310
|
+
*/
|
|
311
|
+
async function waitForSshAccess(ipAddress, maxWaitSeconds = 300, onProgress) {
|
|
312
|
+
const startTime = Date.now();
|
|
313
|
+
const checkInterval = 5000; // 5 seconds
|
|
314
|
+
while (Date.now() - startTime < maxWaitSeconds * 1000) {
|
|
315
|
+
try {
|
|
316
|
+
(0, child_process_1.execSync)(`ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ConnectTimeout=5 dev@${ipAddress} echo ok`, { stdio: 'ignore' });
|
|
317
|
+
return true;
|
|
318
|
+
}
|
|
319
|
+
catch {
|
|
320
|
+
const elapsed = Math.floor((Date.now() - startTime) / 1000);
|
|
321
|
+
onProgress?.(`Waiting for genbox to be ready (${elapsed}s)...`);
|
|
322
|
+
await new Promise((r) => setTimeout(r, checkInterval));
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
return false;
|
|
326
|
+
}
|
|
327
|
+
/**
|
|
328
|
+
* Upload dump file to S3 using pre-signed URL
|
|
329
|
+
*/
|
|
330
|
+
async function uploadDumpToS3(dumpPath, uploadUrl, options = {}) {
|
|
331
|
+
const fileSize = fs.statSync(dumpPath).size;
|
|
332
|
+
options.onProgress?.(`Uploading snapshot to cloud (${formatBytes(fileSize)})...`, 0);
|
|
333
|
+
try {
|
|
334
|
+
// Read file as buffer
|
|
335
|
+
const fileBuffer = fs.readFileSync(dumpPath);
|
|
336
|
+
// Upload to S3 using pre-signed URL
|
|
337
|
+
const response = await fetch(uploadUrl, {
|
|
338
|
+
method: 'PUT',
|
|
339
|
+
headers: {
|
|
340
|
+
'Content-Type': 'application/gzip',
|
|
341
|
+
'Content-Length': fileSize.toString(),
|
|
342
|
+
},
|
|
343
|
+
body: fileBuffer,
|
|
344
|
+
});
|
|
345
|
+
if (!response.ok) {
|
|
346
|
+
const errorText = await response.text();
|
|
347
|
+
return {
|
|
348
|
+
success: false,
|
|
349
|
+
error: `Upload failed: ${response.status} ${response.statusText} - ${errorText}`,
|
|
350
|
+
};
|
|
351
|
+
}
|
|
352
|
+
options.onProgress?.('Upload complete', 100);
|
|
353
|
+
return { success: true };
|
|
354
|
+
}
|
|
355
|
+
catch (error) {
|
|
356
|
+
return {
|
|
357
|
+
success: false,
|
|
358
|
+
error: `Upload failed: ${error.message}`,
|
|
359
|
+
};
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
async function createAndUploadSnapshot(dumpPath, projectId, source, options = {}) {
|
|
363
|
+
// Import API functions dynamically to avoid circular deps
|
|
364
|
+
const { initiateSnapshotUpload, completeSnapshotUpload, failSnapshotUpload, } = await Promise.resolve().then(() => __importStar(require('./api')));
|
|
365
|
+
const fileSize = fs.statSync(dumpPath).size;
|
|
366
|
+
const snapshotName = options.name || `${source}-${new Date().toISOString().split('T')[0]}`;
|
|
367
|
+
let snapshotId;
|
|
368
|
+
try {
|
|
369
|
+
// Step 1: Initiate upload to get pre-signed URL
|
|
370
|
+
options.onProgress?.('Preparing snapshot upload...');
|
|
371
|
+
const initResult = await initiateSnapshotUpload({
|
|
372
|
+
name: snapshotName,
|
|
373
|
+
projectId,
|
|
374
|
+
source,
|
|
375
|
+
sizeBytes: fileSize,
|
|
376
|
+
contentType: 'application/gzip',
|
|
377
|
+
});
|
|
378
|
+
snapshotId = initResult.snapshotId;
|
|
379
|
+
// Step 2: Upload to S3
|
|
380
|
+
const uploadResult = await uploadDumpToS3(dumpPath, initResult.uploadUrl, {
|
|
381
|
+
onProgress: options.onProgress,
|
|
382
|
+
});
|
|
383
|
+
if (!uploadResult.success) {
|
|
384
|
+
// Mark as failed
|
|
385
|
+
await failSnapshotUpload(snapshotId, uploadResult.error || 'Upload failed');
|
|
386
|
+
return { success: false, error: uploadResult.error };
|
|
387
|
+
}
|
|
388
|
+
// Step 3: Complete the upload
|
|
389
|
+
options.onProgress?.('Finalizing snapshot...');
|
|
390
|
+
await completeSnapshotUpload(snapshotId, {
|
|
391
|
+
sizeBytes: fileSize,
|
|
392
|
+
compressedSizeBytes: fileSize, // Already gzipped
|
|
393
|
+
sourceUrl: options.sourceUrl,
|
|
394
|
+
});
|
|
395
|
+
return {
|
|
396
|
+
success: true,
|
|
397
|
+
snapshotId,
|
|
398
|
+
s3Key: initResult.s3Key,
|
|
399
|
+
};
|
|
400
|
+
}
|
|
401
|
+
catch (error) {
|
|
402
|
+
// Try to mark as failed if we have a snapshot ID
|
|
403
|
+
if (snapshotId) {
|
|
404
|
+
try {
|
|
405
|
+
await failSnapshotUpload(snapshotId, error.message);
|
|
406
|
+
}
|
|
407
|
+
catch {
|
|
408
|
+
// Ignore cleanup error
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
return {
|
|
412
|
+
success: false,
|
|
413
|
+
error: error.message,
|
|
414
|
+
};
|
|
415
|
+
}
|
|
416
|
+
}
|
package/dist/index.js
CHANGED
|
@@ -8,7 +8,7 @@ const program = new commander_1.Command();
|
|
|
8
8
|
const { version } = require('../package.json');
|
|
9
9
|
program
|
|
10
10
|
.name('genbox')
|
|
11
|
-
.description('Genbox CLI - AI-Powered Development Environments')
|
|
11
|
+
.description('Genbox CLI - AI-Powered Development Environments\n\nTIP: Use "gb" as a shorthand (e.g., gb list, gb create)')
|
|
12
12
|
.version(version, '-v, --version', 'Output the current version');
|
|
13
13
|
const create_1 = require("./commands/create");
|
|
14
14
|
const list_1 = require("./commands/list");
|
package/dist/profile-resolver.js
CHANGED
|
@@ -321,11 +321,18 @@ class ProfileResolver {
|
|
|
321
321
|
* Resolve database mode
|
|
322
322
|
*/
|
|
323
323
|
async resolveDatabaseMode(config, options, profile) {
|
|
324
|
+
// Helper to get MongoDB URL for an environment
|
|
325
|
+
const getMongoUrl = (source) => {
|
|
326
|
+
const envConfig = config.environments?.[source];
|
|
327
|
+
return envConfig?.urls?.mongodb;
|
|
328
|
+
};
|
|
324
329
|
// CLI flag takes precedence
|
|
325
330
|
if (options.db) {
|
|
331
|
+
const source = options.dbSource || profile.database?.source || 'staging';
|
|
326
332
|
return {
|
|
327
333
|
mode: options.db,
|
|
328
|
-
source
|
|
334
|
+
source,
|
|
335
|
+
url: options.db === 'copy' ? getMongoUrl(source) : undefined,
|
|
329
336
|
};
|
|
330
337
|
}
|
|
331
338
|
// Profile setting
|
|
@@ -333,16 +340,16 @@ class ProfileResolver {
|
|
|
333
340
|
return {
|
|
334
341
|
mode: profile.database.mode,
|
|
335
342
|
source: profile.database.source,
|
|
343
|
+
url: profile.database.mode === 'copy' && profile.database.source
|
|
344
|
+
? getMongoUrl(profile.database.source)
|
|
345
|
+
: undefined,
|
|
336
346
|
};
|
|
337
347
|
}
|
|
338
|
-
// If
|
|
348
|
+
// If default_connection is set, use remote
|
|
339
349
|
const profileConnection = (0, config_loader_1.getProfileConnection)(profile);
|
|
340
350
|
if (profileConnection) {
|
|
341
351
|
const envConfig = config.environments?.[profileConnection];
|
|
342
|
-
|
|
343
|
-
const mongoUrl = true
|
|
344
|
-
? envConfig?.urls?.mongodb
|
|
345
|
-
: envConfig?.mongodb?.url;
|
|
352
|
+
const mongoUrl = envConfig?.urls?.mongodb;
|
|
346
353
|
return {
|
|
347
354
|
mode: 'remote',
|
|
348
355
|
source: profileConnection,
|
|
@@ -354,9 +361,12 @@ class ProfileResolver {
|
|
|
354
361
|
return await this.selectDatabaseModeInteractive(config);
|
|
355
362
|
}
|
|
356
363
|
// Default
|
|
364
|
+
const defaultMode = config.defaults?.database?.mode || 'local';
|
|
365
|
+
const defaultSource = config.defaults?.database?.source;
|
|
357
366
|
return {
|
|
358
|
-
mode:
|
|
359
|
-
source:
|
|
367
|
+
mode: defaultMode,
|
|
368
|
+
source: defaultSource,
|
|
369
|
+
url: defaultMode === 'copy' && defaultSource ? getMongoUrl(defaultSource) : undefined,
|
|
360
370
|
};
|
|
361
371
|
}
|
|
362
372
|
/**
|
|
@@ -395,15 +405,14 @@ class ProfileResolver {
|
|
|
395
405
|
}
|
|
396
406
|
else if (answer.startsWith('copy-')) {
|
|
397
407
|
const source = answer.replace('copy-', '');
|
|
398
|
-
|
|
408
|
+
const envConfig = config.environments?.[source];
|
|
409
|
+
const mongoUrl = envConfig?.urls?.mongodb;
|
|
410
|
+
return { mode: 'copy', source, url: mongoUrl };
|
|
399
411
|
}
|
|
400
412
|
else if (answer.startsWith('remote-')) {
|
|
401
413
|
const source = answer.replace('remote-', '');
|
|
402
414
|
const envConfig = config.environments?.[source];
|
|
403
|
-
|
|
404
|
-
const mongoUrl = true
|
|
405
|
-
? envConfig?.urls?.mongodb
|
|
406
|
-
: envConfig?.mongodb?.url;
|
|
415
|
+
const mongoUrl = envConfig?.urls?.mongodb;
|
|
407
416
|
return {
|
|
408
417
|
mode: 'remote',
|
|
409
418
|
source,
|
package/package.json
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "genbox",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.50",
|
|
4
4
|
"description": "Genbox CLI - AI-Powered Development Environments",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"bin": {
|
|
7
|
-
"genbox": "./dist/index.js"
|
|
7
|
+
"genbox": "./dist/index.js",
|
|
8
|
+
"gb": "./dist/index.js"
|
|
8
9
|
},
|
|
9
10
|
"files": [
|
|
10
11
|
"dist/**/*"
|