genbox 1.0.50 → 1.0.52

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -439,69 +439,124 @@ exports.createCommand = new commander_1.Command('create')
439
439
  resolved.database.url &&
440
440
  !options.dbCopyRemote;
441
441
  if (needsLocalDbCopy) {
442
- // Check for user-provided dump file
443
- if (options.dbDump) {
444
- if (!fs.existsSync(options.dbDump)) {
445
- console.log(chalk_1.default.red(`Database dump file not found: ${options.dbDump}`));
446
- return;
447
- }
448
- localDumpPath = options.dbDump;
449
- console.log(chalk_1.default.dim(` Using provided dump file: ${options.dbDump}`));
450
- }
451
- else {
452
- // Need to run mongodump locally
453
- if (!(0, db_utils_1.isMongoDumpAvailable)()) {
454
- console.log(chalk_1.default.red('mongodump not found. Required for database copy.'));
455
- console.log('');
456
- console.log((0, db_utils_1.getMongoDumpInstallInstructions)());
457
- console.log('');
458
- console.log(chalk_1.default.dim('Alternatively:'));
459
- console.log(chalk_1.default.dim(' Use --db-dump <path> to provide an existing dump file'));
460
- console.log(chalk_1.default.dim(' • Use --db-copy-remote if your database is publicly accessible'));
461
- return;
442
+ const snapshotSource = (resolved.database.source === 'staging' ? 'staging' :
443
+ resolved.database.source === 'production' ? 'production' : 'local');
444
+ // Check for existing snapshot if project is synced
445
+ let useExistingSnapshot = false;
446
+ let existingSnapshot = null;
447
+ if (projectCache?._id && !options.dbDump) {
448
+ try {
449
+ existingSnapshot = await (0, api_1.getLatestSnapshot)(projectCache._id, snapshotSource);
450
+ if (existingSnapshot && existingSnapshot.status === 'ready') {
451
+ const snapshotAge = Date.now() - new Date(existingSnapshot.createdAt).getTime();
452
+ const hoursAgo = Math.floor(snapshotAge / (1000 * 60 * 60));
453
+ const timeAgoStr = hoursAgo < 1 ? 'less than an hour ago' :
454
+ hoursAgo === 1 ? '1 hour ago' :
455
+ hoursAgo < 24 ? `${hoursAgo} hours ago` :
456
+ `${Math.floor(hoursAgo / 24)} days ago`;
457
+ console.log('');
458
+ console.log(chalk_1.default.blue('=== Database Copy ==='));
459
+ console.log(chalk_1.default.dim(` Source: ${resolved.database.source}`));
460
+ if (!options.yes) {
461
+ const snapshotChoice = await prompts.select({
462
+ message: 'Database snapshot:',
463
+ choices: [
464
+ {
465
+ name: `Use existing snapshot (${timeAgoStr}, ${(0, db_utils_1.formatBytes)(existingSnapshot.sizeBytes)})`,
466
+ value: 'existing',
467
+ },
468
+ {
469
+ name: 'Create fresh snapshot (dump now)',
470
+ value: 'fresh',
471
+ },
472
+ ],
473
+ });
474
+ useExistingSnapshot = snapshotChoice === 'existing';
475
+ }
476
+ else {
477
+ // In non-interactive mode, use existing if less than 24 hours old
478
+ if (hoursAgo < 24) {
479
+ useExistingSnapshot = true;
480
+ console.log(chalk_1.default.dim(` Using existing snapshot from ${timeAgoStr}`));
481
+ }
482
+ }
483
+ if (useExistingSnapshot) {
484
+ snapshotId = existingSnapshot._id;
485
+ snapshotS3Key = existingSnapshot.s3Key;
486
+ console.log(chalk_1.default.green(` ✓ Using existing snapshot`));
487
+ }
488
+ }
462
489
  }
463
- const dbUrl = resolved.database.url;
464
- console.log('');
465
- console.log(chalk_1.default.blue('=== Database Copy ==='));
466
- console.log(chalk_1.default.dim(` Source: ${resolved.database.source}`));
467
- console.log(chalk_1.default.dim(` URL: ${dbUrl.replace(/\/\/[^:]+:[^@]+@/, '//***:***@')}`));
468
- const dumpSpinner = (0, ora_1.default)('Creating database dump...').start();
469
- const dumpResult = await (0, db_utils_1.runLocalMongoDump)(dbUrl, {
470
- onProgress: (msg) => dumpSpinner.text = msg,
471
- });
472
- if (!dumpResult.success) {
473
- dumpSpinner.fail(chalk_1.default.red('Database dump failed'));
474
- console.log(chalk_1.default.red(` ${dumpResult.error}`));
475
- console.log('');
476
- console.log(chalk_1.default.dim('You can:'));
477
- console.log(chalk_1.default.dim(' • Fix the database connection and try again'));
478
- console.log(chalk_1.default.dim(' • Use --db local to start with an empty database'));
479
- console.log(chalk_1.default.dim(' • Use --db-dump <path> to provide an existing dump'));
480
- return;
490
+ catch {
491
+ // Silently continue if we can't fetch snapshots
481
492
  }
482
- dumpSpinner.succeed(chalk_1.default.green(`Database dump created (${(0, db_utils_1.formatBytes)(dumpResult.sizeBytes || 0)})`));
483
- localDumpPath = dumpResult.dumpPath;
484
493
  }
485
- // Upload to S3 if we have a project ID
486
- if (localDumpPath && projectCache?._id) {
487
- const uploadSpinner = (0, ora_1.default)('Uploading database snapshot...').start();
488
- const snapshotSource = (resolved.database.source === 'staging' ? 'staging' :
489
- resolved.database.source === 'production' ? 'production' : 'local');
490
- const snapshotResult = await (0, db_utils_1.createAndUploadSnapshot)(localDumpPath, projectCache._id, snapshotSource, {
491
- sourceUrl: resolved.database.url?.replace(/\/\/[^:]+:[^@]+@/, '//***:***@'),
492
- onProgress: (msg) => uploadSpinner.text = msg,
493
- });
494
- if (snapshotResult.success) {
495
- uploadSpinner.succeed(chalk_1.default.green('Database snapshot uploaded'));
496
- snapshotId = snapshotResult.snapshotId;
497
- snapshotS3Key = snapshotResult.s3Key;
498
- // Cleanup local dump since it's now in S3
499
- (0, db_utils_1.cleanupDump)(localDumpPath);
500
- localDumpPath = undefined;
494
+ // If not using existing snapshot, create a new one
495
+ if (!useExistingSnapshot) {
496
+ // Check for user-provided dump file
497
+ if (options.dbDump) {
498
+ if (!fs.existsSync(options.dbDump)) {
499
+ console.log(chalk_1.default.red(`Database dump file not found: ${options.dbDump}`));
500
+ return;
501
+ }
502
+ localDumpPath = options.dbDump;
503
+ console.log(chalk_1.default.dim(` Using provided dump file: ${options.dbDump}`));
501
504
  }
502
505
  else {
503
- uploadSpinner.warn(chalk_1.default.yellow(`Snapshot upload failed: ${snapshotResult.error}`));
504
- console.log(chalk_1.default.dim(' Will fall back to direct SCP upload after genbox creation'));
506
+ // Need to run mongodump locally
507
+ if (!(0, db_utils_1.isMongoDumpAvailable)()) {
508
+ console.log(chalk_1.default.red('mongodump not found. Required for database copy.'));
509
+ console.log('');
510
+ console.log((0, db_utils_1.getMongoDumpInstallInstructions)());
511
+ console.log('');
512
+ console.log(chalk_1.default.dim('Alternatively:'));
513
+ console.log(chalk_1.default.dim(' • Use --db-dump <path> to provide an existing dump file'));
514
+ console.log(chalk_1.default.dim(' • Use --db-copy-remote if your database is publicly accessible'));
515
+ return;
516
+ }
517
+ const dbUrl = resolved.database.url;
518
+ if (!existingSnapshot) {
519
+ console.log('');
520
+ console.log(chalk_1.default.blue('=== Database Copy ==='));
521
+ console.log(chalk_1.default.dim(` Source: ${resolved.database.source}`));
522
+ }
523
+ console.log(chalk_1.default.dim(` URL: ${dbUrl.replace(/\/\/[^:]+:[^@]+@/, '//***:***@')}`));
524
+ const dumpSpinner = (0, ora_1.default)('Creating database dump...').start();
525
+ const dumpResult = await (0, db_utils_1.runLocalMongoDump)(dbUrl, {
526
+ onProgress: (msg) => dumpSpinner.text = msg,
527
+ });
528
+ if (!dumpResult.success) {
529
+ dumpSpinner.fail(chalk_1.default.red('Database dump failed'));
530
+ console.log(chalk_1.default.red(` ${dumpResult.error}`));
531
+ console.log('');
532
+ console.log(chalk_1.default.dim('You can:'));
533
+ console.log(chalk_1.default.dim(' • Fix the database connection and try again'));
534
+ console.log(chalk_1.default.dim(' • Use --db local to start with an empty database'));
535
+ console.log(chalk_1.default.dim(' • Use --db-dump <path> to provide an existing dump'));
536
+ return;
537
+ }
538
+ dumpSpinner.succeed(chalk_1.default.green(`Database dump created (${(0, db_utils_1.formatBytes)(dumpResult.sizeBytes || 0)})`));
539
+ localDumpPath = dumpResult.dumpPath;
540
+ }
541
+ // Upload to S3 if we have a project ID
542
+ if (localDumpPath && projectCache?._id) {
543
+ const uploadSpinner = (0, ora_1.default)('Uploading database snapshot...').start();
544
+ const snapshotResult = await (0, db_utils_1.createAndUploadSnapshot)(localDumpPath, projectCache._id, snapshotSource, {
545
+ sourceUrl: resolved.database.url?.replace(/\/\/[^:]+:[^@]+@/, '//***:***@'),
546
+ onProgress: (msg) => uploadSpinner.text = msg,
547
+ });
548
+ if (snapshotResult.success) {
549
+ uploadSpinner.succeed(chalk_1.default.green('Database snapshot uploaded'));
550
+ snapshotId = snapshotResult.snapshotId;
551
+ snapshotS3Key = snapshotResult.s3Key;
552
+ // Cleanup local dump since it's now in S3
553
+ (0, db_utils_1.cleanupDump)(localDumpPath);
554
+ localDumpPath = undefined;
555
+ }
556
+ else {
557
+ uploadSpinner.warn(chalk_1.default.yellow(`Snapshot upload failed: ${snapshotResult.error}`));
558
+ console.log(chalk_1.default.dim(' Will fall back to direct SCP upload after genbox creation'));
559
+ }
505
560
  }
506
561
  }
507
562
  }
@@ -875,9 +875,14 @@ async function setupProfiles(detected, environments) {
875
875
  value: name,
876
876
  }));
877
877
  const profilesToEdit = await prompts.checkbox({
878
- message: 'Select profiles to edit:',
878
+ message: 'Select profiles to edit (space to select, enter to confirm):',
879
879
  choices: profileChoices,
880
+ required: true,
880
881
  });
882
+ if (profilesToEdit.length === 0) {
883
+ console.log(chalk_1.default.dim('No profiles selected for editing.'));
884
+ return profiles;
885
+ }
881
886
  for (const profileName of profilesToEdit) {
882
887
  profiles[profileName] = await editSingleProfile(profileName, profiles[profileName], detected, environments);
883
888
  }
@@ -940,7 +945,7 @@ async function editSingleProfile(name, profile, detected, environments) {
940
945
  const dbMode = await prompts.select({
941
946
  message: 'Database mode:',
942
947
  choices: [
943
- { name: 'local (fresh local database)', value: 'local' },
948
+ { name: 'fresh (empty database)', value: 'local' },
944
949
  { name: 'copy (copy from remote)', value: 'copy' },
945
950
  { name: 'none (no database)', value: 'none' },
946
951
  ],
@@ -963,6 +968,20 @@ async function editSingleProfile(name, profile, detected, environments) {
963
968
  console.log(chalk_1.default.green(`✓ Updated profile: ${name}`));
964
969
  return result;
965
970
  }
971
+ /**
972
+ * Calculate recommended server size based on apps + infrastructure count
973
+ * 1-2: small, 3-5: medium, 6-9: large, 10+: xl
974
+ */
975
+ function calculateSize(appCount, infraCount = 0) {
976
+ const total = appCount + infraCount;
977
+ if (total <= 2)
978
+ return 'small';
979
+ if (total <= 5)
980
+ return 'medium';
981
+ if (total <= 9)
982
+ return 'large';
983
+ return 'xl';
984
+ }
966
985
  /**
967
986
  * Generate default profiles based on detected apps and environments
968
987
  */
@@ -971,16 +990,17 @@ function generateDefaultProfiles(detected, environments) {
971
990
  const frontendApps = Object.entries(detected.apps).filter(([, app]) => app.type === 'frontend');
972
991
  const backendApps = Object.entries(detected.apps).filter(([, app]) => app.type === 'backend' || app.type === 'gateway');
973
992
  const allRunnableApps = Object.entries(detected.apps).filter(([, app]) => app.type !== 'library');
993
+ const infraCount = detected.infrastructure?.length || 0;
974
994
  const envNames = Object.keys(environments || {});
975
995
  const remoteEnv = envNames.includes('staging') ? 'staging' :
976
996
  envNames.includes('production') ? 'production' :
977
997
  envNames[0];
978
- // Quick UI profiles for frontends
998
+ // Quick UI profiles for frontends (no local infra needed when connecting to remote)
979
999
  if (remoteEnv) {
980
1000
  for (const [name] of frontendApps.slice(0, 2)) {
981
1001
  profiles[`${name}-quick`] = {
982
1002
  description: `${name} only, connected to ${remoteEnv}`,
983
- size: 'small',
1003
+ size: calculateSize(1, 0),
984
1004
  apps: [name],
985
1005
  default_connection: remoteEnv,
986
1006
  };
@@ -989,28 +1009,29 @@ function generateDefaultProfiles(detected, environments) {
989
1009
  // Full local development - only create if multiple frontends exist (otherwise full-stack covers it)
990
1010
  if (frontendApps.length > 1 && backendApps.length > 0) {
991
1011
  for (const [frontendName] of frontendApps.slice(0, 2)) {
1012
+ const appCount = 1 + backendApps.length;
992
1013
  profiles[`${frontendName}-full`] = {
993
1014
  description: `${frontendName} + local backend` + (remoteEnv ? ' + DB copy' : ''),
994
- size: 'large',
1015
+ size: calculateSize(appCount, infraCount),
995
1016
  apps: [frontendName, ...backendApps.map(([n]) => n)],
996
1017
  database: remoteEnv ? { mode: 'copy', source: remoteEnv } : { mode: 'local' },
997
1018
  };
998
1019
  }
999
1020
  }
1000
- // Backend development
1021
+ // Backend development (includes local infra like database)
1001
1022
  for (const [name] of backendApps.slice(0, 2)) {
1002
1023
  profiles[`${name}-dev`] = {
1003
1024
  description: `${name} with local infrastructure`,
1004
- size: 'medium',
1025
+ size: calculateSize(1, infraCount),
1005
1026
  apps: [name],
1006
1027
  database: { mode: 'local' },
1007
1028
  };
1008
1029
  }
1009
- // Full stack - includes all runnable apps
1030
+ // Full stack - includes all runnable apps + infra
1010
1031
  if (allRunnableApps.length > 1) {
1011
1032
  profiles['full-stack'] = {
1012
1033
  description: 'Everything local' + (remoteEnv ? ' with DB copy' : ''),
1013
- size: 'large',
1034
+ size: calculateSize(allRunnableApps.length, infraCount),
1014
1035
  apps: allRunnableApps.map(([n]) => n),
1015
1036
  database: remoteEnv ? { mode: 'copy', source: remoteEnv } : { mode: 'local' },
1016
1037
  };
@@ -255,7 +255,7 @@ exports.profilesCommand
255
255
  message: 'Database mode:',
256
256
  choices: [
257
257
  { name: 'None', value: 'none' },
258
- { name: 'Local empty', value: 'local' },
258
+ { name: 'Fresh (empty)', value: 'local' },
259
259
  { name: 'Copy from staging', value: 'copy-staging' },
260
260
  { name: 'Connect to staging', value: 'remote-staging' },
261
261
  ],
@@ -376,13 +376,13 @@ class ProfileResolver {
376
376
  console.log(chalk_1.default.cyan('\n🗄️ Database Configuration:\n'));
377
377
  const modeChoices = [
378
378
  { name: 'None (no database)', value: 'none' },
379
- { name: 'Local empty database', value: 'local' },
380
- { name: 'Copy from staging (snapshot)', value: 'copy-staging' },
379
+ { name: 'Fresh (empty database)', value: 'local' },
380
+ { name: 'Copy from staging', value: 'copy-staging' },
381
381
  ];
382
382
  // Add production copy if available
383
383
  if (config.environments?.production) {
384
384
  modeChoices.push({
385
- name: 'Copy from production (snapshot)',
385
+ name: 'Copy from production',
386
386
  value: 'copy-production',
387
387
  });
388
388
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "genbox",
3
- "version": "1.0.50",
3
+ "version": "1.0.52",
4
4
  "description": "Genbox CLI - AI-Powered Development Environments",
5
5
  "main": "dist/index.js",
6
6
  "bin": {