@vibecodemax/cli 0.1.5 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -1,16 +1,15 @@
1
1
  #!/usr/bin/env node
2
2
  import * as fs from "node:fs";
3
- import * as os from "node:os";
4
3
  import * as path from "node:path";
5
4
  import { spawnSync } from "node:child_process";
6
- import { checkS3Context, setupS3Storage, smokeTestS3 } from "./storageS3.js";
5
+ import { checkS3Context, setupS3Storage } from "./storageS3.js";
7
6
  const SETUP_STATE_PATH = path.join(".vibecodemax", "setup-state.json");
7
+ const SETUP_CONFIG_PATH = path.join(".vibecodemax", "setup-config.json");
8
8
  const MANAGEMENT_API_BASE = "https://api.supabase.com";
9
9
  const DEFAULT_LOCALHOST_URL = "http://localhost:3000";
10
10
  const STORAGE_PUBLIC_BUCKET = "public-assets";
11
11
  const STORAGE_PRIVATE_BUCKET = "private-uploads";
12
12
  const STORAGE_REQUIRED_FILE_SIZE_LIMIT = 10 * 1024 * 1024;
13
- const STORAGE_HEALTHCHECK_PREFIX = "_vibecodemax/healthcheck/default";
14
13
  const STORAGE_MIME_TYPES_BY_CATEGORY = {
15
14
  images: ["image/jpeg", "image/png", "image/webp", "image/gif"],
16
15
  documents: [
@@ -24,16 +23,6 @@ const STORAGE_MIME_TYPES_BY_CATEGORY = {
24
23
  video: ["video/mp4", "video/webm", "video/quicktime"],
25
24
  };
26
25
  const STORAGE_DEFAULT_MIME_CATEGORIES = ["images"];
27
- const STORAGE_REQUIRED_POLICY_NAMES = [
28
- "public_assets_select_own",
29
- "public_assets_insert_own",
30
- "public_assets_update_own",
31
- "public_assets_delete_own",
32
- "private_uploads_select_own",
33
- "private_uploads_insert_own",
34
- "private_uploads_update_own",
35
- "private_uploads_delete_own",
36
- ];
37
26
  function printJson(value) {
38
27
  process.stdout.write(`${JSON.stringify(value)}\n`);
39
28
  }
@@ -41,6 +30,19 @@ function fail(code, message, exitCode = 1, extra = {}) {
41
30
  printJson({ ok: false, code, message, ...extra });
42
31
  process.exit(exitCode);
43
32
  }
33
+ /** Extract a human-readable error message from a Supabase/provider JSON response. */
34
+ function extractErrorMessage(json) {
35
+ if (!json || typeof json !== "object")
36
+ return null;
37
+ const obj = json;
38
+ if (isNonEmptyString(obj.error))
39
+ return obj.error;
40
+ if (isNonEmptyString(obj.message))
41
+ return obj.message;
42
+ if (isNonEmptyString(obj.msg))
43
+ return obj.msg;
44
+ return null;
45
+ }
44
46
  function parseArgs(argv) {
45
47
  const [command, ...rest] = argv;
46
48
  let subcommand;
@@ -102,6 +104,18 @@ function loadLocalEnv(cwd = process.cwd()) {
102
104
  },
103
105
  };
104
106
  }
107
+ function readSetupConfig(cwd = process.cwd()) {
108
+ const raw = readFileIfExists(path.join(cwd, SETUP_CONFIG_PATH)).trim();
109
+ if (!raw)
110
+ return {};
111
+ try {
112
+ const parsed = JSON.parse(raw);
113
+ return parsed && typeof parsed === "object" && !Array.isArray(parsed) ? parsed : {};
114
+ }
115
+ catch {
116
+ return {};
117
+ }
118
+ }
105
119
  function isNonEmptyString(value) {
106
120
  return typeof value === "string" && value.trim().length > 0;
107
121
  }
@@ -630,6 +644,58 @@ function runShellCommand(command, cwd) {
630
644
  }
631
645
  return typeof result.stdout === "string" ? result.stdout.trim() : "";
632
646
  }
647
+ function runLinkedSupabaseCommand(command, cwd, failureCode, fallbackMessage) {
648
+ const result = spawnSync(process.env.SHELL || "/bin/zsh", ["-lc", command], {
649
+ cwd,
650
+ env: process.env,
651
+ encoding: "utf8",
652
+ });
653
+ if (result.status !== 0) {
654
+ const message = [result.stderr, result.stdout]
655
+ .map((value) => (typeof value === "string" ? value.trim() : ""))
656
+ .find(Boolean) || fallbackMessage;
657
+ fail(failureCode, message);
658
+ }
659
+ return typeof result.stdout === "string" ? result.stdout.trim() : "";
660
+ }
661
+ function isMigrationOrderingConflict(message) {
662
+ return /Found local migration files to be inserted before the last migration on remote database\./i.test(message);
663
+ }
664
+ function runStorageMigrationPush(commandBase, cwd) {
665
+ const firstResult = spawnSync(process.env.SHELL || "/bin/zsh", ["-lc", commandBase], {
666
+ cwd,
667
+ env: process.env,
668
+ encoding: "utf8",
669
+ });
670
+ if (firstResult.status === 0) {
671
+ return {
672
+ stdout: typeof firstResult.stdout === "string" ? firstResult.stdout.trim() : "",
673
+ includeAll: false,
674
+ };
675
+ }
676
+ const firstMessage = [firstResult.stderr, firstResult.stdout]
677
+ .map((value) => (typeof value === "string" ? value.trim() : ""))
678
+ .find(Boolean) || "Failed to apply storage migrations to the linked Supabase project.";
679
+ if (!isMigrationOrderingConflict(firstMessage)) {
680
+ fail("STORAGE_POLICY_MIGRATION_APPLY_FAILED", firstMessage);
681
+ }
682
+ const retryCommand = `${commandBase} --include-all`;
683
+ const retryResult = spawnSync(process.env.SHELL || "/bin/zsh", ["-lc", retryCommand], {
684
+ cwd,
685
+ env: process.env,
686
+ encoding: "utf8",
687
+ });
688
+ if (retryResult.status !== 0) {
689
+ const retryMessage = [retryResult.stderr, retryResult.stdout]
690
+ .map((value) => (typeof value === "string" ? value.trim() : ""))
691
+ .find(Boolean) || "Failed to apply storage migrations to the linked Supabase project.";
692
+ fail("STORAGE_POLICY_MIGRATION_APPLY_FAILED", retryMessage);
693
+ }
694
+ return {
695
+ stdout: typeof retryResult.stdout === "string" ? retryResult.stdout.trim() : "",
696
+ includeAll: true,
697
+ };
698
+ }
633
699
  function normalizeStorageMimeCategories(rawValue) {
634
700
  const requested = rawValue
635
701
  .split(",")
@@ -638,11 +704,18 @@ function normalizeStorageMimeCategories(rawValue) {
638
704
  const selected = requested.filter((value) => Object.prototype.hasOwnProperty.call(STORAGE_MIME_TYPES_BY_CATEGORY, value));
639
705
  return selected.length > 0 ? [...new Set(selected)] : [...STORAGE_DEFAULT_MIME_CATEGORIES];
640
706
  }
641
- function resolveStorageMimeCategories(flags) {
642
- const raw = readStringFlag(flags, "mime-categories");
643
- if (!raw)
707
+ function resolveStorageMimeCategories(cwd) {
708
+ const setupConfig = readSetupConfig(cwd);
709
+ const storageConfig = setupConfig.storage && typeof setupConfig.storage === "object"
710
+ ? setupConfig.storage
711
+ : {};
712
+ const rawCategories = Array.isArray(storageConfig.mimeCategories)
713
+ ? storageConfig.mimeCategories.filter((value) => isNonEmptyString(value))
714
+ : [];
715
+ if (rawCategories.length === 0) {
644
716
  return [...STORAGE_DEFAULT_MIME_CATEGORIES];
645
- return normalizeStorageMimeCategories(raw);
717
+ }
718
+ return normalizeStorageMimeCategories(rawCategories.join(","));
646
719
  }
647
720
  function expandStorageMimeCategories(categories) {
648
721
  const mimeTypes = [];
@@ -668,28 +741,47 @@ function parseStorageMigrationFilename(filename) {
668
741
  return null;
669
742
  }
670
743
  function discoverStorageMigrationFiles(cwd) {
671
- const migrationsRoot = path.join(cwd, "supabase", "migrations");
672
- if (!fs.existsSync(migrationsRoot)) {
744
+ const storageMigrationsRoot = path.join(cwd, "supabase", "storage-migrations");
745
+ const activeMigrationsRoot = path.join(cwd, "supabase", "migrations");
746
+ if (!fs.existsSync(activeMigrationsRoot)) {
673
747
  fail("MISSING_STORAGE_MIGRATIONS", "supabase/migrations is missing. Run bootstrap.base first so the local Supabase project is initialized.");
674
748
  }
675
- const selected = fs.readdirSync(migrationsRoot)
749
+ if (!fs.existsSync(storageMigrationsRoot)) {
750
+ fail("MISSING_STORAGE_MIGRATIONS", "supabase/storage-migrations is missing. Regenerate the project so storage-owned migrations are available locally.");
751
+ }
752
+ const policyFiles = fs.readdirSync(storageMigrationsRoot)
676
753
  .map((filename) => parseStorageMigrationFilename(filename))
677
- .filter((entry) => entry !== null && entry.scope.startsWith("storage_"))
754
+ .filter((entry) => entry !== null && /(^|_)storage_policies$/.test(entry.scope))
678
755
  .sort((a, b) => a.filename.localeCompare(b.filename));
679
- if (selected.length === 0) {
680
- fail("MISSING_STORAGE_MIGRATIONS", "No storage migration files were found. Add a migration matching YYYYMMDDHHMMSS_storage_*.sql in supabase/migrations.");
681
- }
682
- const policyFiles = selected
683
- .filter((entry) => /(^|_)storage_policies\.sql$/.test(entry.filename))
684
- .map((entry) => entry.filename);
685
756
  if (policyFiles.length === 0) {
686
- fail("MISSING_STORAGE_POLICY_MIGRATION", "No storage policy migration file was found. Add a migration ending in storage_policies.sql in supabase/migrations.");
757
+ fail("MISSING_STORAGE_POLICY_MIGRATION", "No storage policy migration file was found in supabase/storage-migrations. This directory is for storage policy SQL only. Buckets are created by storage bootstrap through the Supabase Storage API. Regenerate the project or add a file ending in _storage_policies.sql.");
687
758
  }
688
759
  return {
689
- files: selected.map((entry) => entry.filename),
690
- policyFiles,
760
+ files: policyFiles.map((entry) => entry.filename),
761
+ policyFiles: policyFiles.map((entry) => entry.filename),
691
762
  };
692
763
  }
764
+ function materializeStorageMigrationFiles(cwd, files) {
765
+ const storageMigrationsRoot = path.join(cwd, "supabase", "storage-migrations");
766
+ const activeMigrationsRoot = path.join(cwd, "supabase", "migrations");
767
+ const preparedFiles = [];
768
+ for (const filename of files) {
769
+ const sourcePath = path.join(storageMigrationsRoot, filename);
770
+ const targetPath = path.join(activeMigrationsRoot, filename);
771
+ const sourceSql = fs.readFileSync(sourcePath, "utf8");
772
+ if (fs.existsSync(targetPath)) {
773
+ const existingSql = fs.readFileSync(targetPath, "utf8");
774
+ if (existingSql !== sourceSql) {
775
+ fail("STORAGE_MIGRATION_CONFLICT", `Active migration ${filename} does not match supabase/storage-migrations/${filename}.`);
776
+ }
777
+ }
778
+ else {
779
+ fs.copyFileSync(sourcePath, targetPath);
780
+ }
781
+ preparedFiles.push(filename);
782
+ }
783
+ return preparedFiles;
784
+ }
693
785
  async function storageRequest(params) {
694
786
  const response = await fetch(`${params.supabaseUrl}${params.endpoint}`, {
695
787
  method: params.method,
@@ -708,13 +800,7 @@ async function storageRequest(params) {
708
800
  json = null;
709
801
  }
710
802
  if (!response.ok) {
711
- const message = isNonEmptyString(json?.error)
712
- ? json.error
713
- : isNonEmptyString(json?.message)
714
- ? json.message
715
- : isNonEmptyString(json?.msg)
716
- ? json.msg
717
- : `Supabase returned ${response.status}`;
803
+ const message = extractErrorMessage(json) || `Supabase returned ${response.status}`;
718
804
  fail("SUPABASE_STORAGE_ERROR", message, 1, { status: response.status });
719
805
  }
720
806
  return json;
@@ -764,14 +850,14 @@ async function readStorageBucket(supabaseUrl, serviceRoleKey, bucketId) {
764
850
  }
765
851
  if (response.status === 404)
766
852
  return null;
853
+ // Supabase may return 400 (not 404) for non-existent buckets in some API versions
854
+ if (response.status === 400) {
855
+ const extracted = extractErrorMessage(json);
856
+ if (extracted && /bucket\s*not\s*found/i.test(extracted))
857
+ return null;
858
+ }
767
859
  if (!response.ok) {
768
- const message = isNonEmptyString(json?.error)
769
- ? json.error
770
- : isNonEmptyString(json?.message)
771
- ? json.message
772
- : isNonEmptyString(json?.msg)
773
- ? json.msg
774
- : `Supabase returned ${response.status}`;
860
+ const message = extractErrorMessage(json) || `Supabase returned ${response.status}`;
775
861
  fail("SUPABASE_STORAGE_ERROR", message, 1, { status: response.status });
776
862
  }
777
863
  return json && typeof json === "object" ? json : {};
@@ -822,16 +908,6 @@ async function ensureStorageBucket(supabaseUrl, serviceRoleKey, bucketId, isPubl
822
908
  verified: true,
823
909
  };
824
910
  }
825
- function verifyRequiredStoragePolicies(dumpContent) {
826
- const normalized = dumpContent.toLowerCase();
827
- const missing = STORAGE_REQUIRED_POLICY_NAMES.filter((policyName) => !normalized.includes(`create policy \"${policyName}\"`));
828
- if (missing.length > 0) {
829
- fail("STORAGE_POLICY_VERIFY_FAILED", `Missing required storage policies after local Supabase CLI apply: ${missing.join(", ")}.`);
830
- }
831
- }
832
- function buildStorageHealthcheckObjectPath(runId) {
833
- return `${STORAGE_HEALTHCHECK_PREFIX}/${runId}/upload.txt`;
834
- }
835
911
  async function checkSupabaseContext() {
836
912
  const { values, envLocalPath } = loadLocalEnv();
837
913
  const missingKeys = [];
@@ -867,22 +943,17 @@ async function setupSupabaseStorage(flags) {
867
943
  const serviceRoleKey = requireServiceRoleKey(values);
868
944
  const dependencyManager = detectDependencyManager(cwd, flags);
869
945
  const supabaseRunner = getSupabaseRunner(dependencyManager);
870
- const mimeCategories = resolveStorageMimeCategories(flags);
946
+ const mimeCategories = resolveStorageMimeCategories(cwd);
871
947
  const allowedMimeTypes = expandStorageMimeCategories(mimeCategories);
948
+ const migrations = discoverStorageMigrationFiles(cwd);
949
+ const preparedMigrationFiles = materializeStorageMigrationFiles(cwd, migrations.files);
950
+ const migrationPush = runStorageMigrationPush(`${supabaseRunner} db push --linked`, cwd);
872
951
  const publicBucket = await ensureStorageBucket(supabaseUrl, serviceRoleKey, STORAGE_PUBLIC_BUCKET, true, allowedMimeTypes);
873
952
  const privateBucket = await ensureStorageBucket(supabaseUrl, serviceRoleKey, STORAGE_PRIVATE_BUCKET, false, allowedMimeTypes);
874
953
  mergeEnvFile(envLocalPath, {
875
954
  SUPABASE_PUBLIC_BUCKET: STORAGE_PUBLIC_BUCKET,
876
955
  SUPABASE_PRIVATE_BUCKET: STORAGE_PRIVATE_BUCKET,
877
956
  });
878
- const migrations = discoverStorageMigrationFiles(cwd);
879
- runShellCommand(`${supabaseRunner} db push --linked`, cwd);
880
- const dumpDir = fs.mkdtempSync(path.join(os.tmpdir(), "vibecodemax-storage-dump-"));
881
- const dumpPath = path.join(dumpDir, "storage.sql");
882
- runShellCommand(`${supabaseRunner} db dump --linked --schema storage -f ${shellQuote(dumpPath)} >/dev/null`, cwd);
883
- const dumpContent = readFileIfExists(dumpPath);
884
- verifyRequiredStoragePolicies(dumpContent);
885
- fs.rmSync(dumpDir, { recursive: true, force: true });
886
957
  printJson({
887
958
  ok: true,
888
959
  command: "storage setup-supabase",
@@ -895,56 +966,13 @@ async function setupSupabaseStorage(flags) {
895
966
  fileSizeLimit: STORAGE_REQUIRED_FILE_SIZE_LIMIT,
896
967
  migrationFiles: migrations.files,
897
968
  policyFiles: migrations.policyFiles,
969
+ preparedMigrationFiles,
898
970
  policyMigrationsDiscovered: true,
899
971
  policyMigrationsApplied: true,
900
- policiesVerified: true,
972
+ migrationPushIncludeAll: migrationPush.includeAll,
901
973
  envWritten: ["SUPABASE_PUBLIC_BUCKET", "SUPABASE_PRIVATE_BUCKET"],
902
974
  });
903
975
  }
904
- async function smokeTestSupabase() {
905
- const { values } = loadLocalEnv();
906
- const supabaseUrl = requireSupabaseUrl(values);
907
- const serviceRoleKey = requireServiceRoleKey(values);
908
- const runId = `run_${Date.now()}`;
909
- const objectPath = buildStorageHealthcheckObjectPath(runId);
910
- const prefix = `${STORAGE_HEALTHCHECK_PREFIX}/${runId}/`;
911
- await storageRequest({
912
- supabaseUrl,
913
- serviceRoleKey,
914
- method: "POST",
915
- endpoint: `/storage/v1/object/${STORAGE_PUBLIC_BUCKET}/${objectPath}`,
916
- rawBody: "healthcheck probe",
917
- contentType: "text/plain",
918
- });
919
- const listResult = await storageRequest({
920
- supabaseUrl,
921
- serviceRoleKey,
922
- method: "POST",
923
- endpoint: `/storage/v1/object/list/${STORAGE_PUBLIC_BUCKET}`,
924
- body: { prefix },
925
- contentType: "application/json",
926
- });
927
- await storageRequest({
928
- supabaseUrl,
929
- serviceRoleKey,
930
- method: "DELETE",
931
- endpoint: `/storage/v1/object/${STORAGE_PUBLIC_BUCKET}`,
932
- body: { prefixes: [objectPath] },
933
- contentType: "application/json",
934
- });
935
- printJson({
936
- ok: true,
937
- command: "storage smoke-test-supabase",
938
- runId,
939
- bucketId: STORAGE_PUBLIC_BUCKET,
940
- objectPath,
941
- prefix,
942
- upload: true,
943
- list: true,
944
- delete: true,
945
- listedItems: Array.isArray(listResult) ? listResult.length : 0,
946
- });
947
- }
948
976
  async function main() {
949
977
  const { command, subcommand, flags } = parseArgs(process.argv.slice(2));
950
978
  if (!command || command === "--help" || command === "help") {
@@ -955,10 +983,8 @@ async function main() {
955
983
  "admin ensure-admin",
956
984
  "storage check-supabase-context",
957
985
  "storage setup-supabase",
958
- "storage smoke-test-supabase",
959
986
  "storage check-s3-context",
960
987
  "storage setup-s3",
961
- "storage smoke-test-s3",
962
988
  "configure-site-redirects",
963
989
  "configure-email-password",
964
990
  "enable-google-provider",
@@ -967,6 +993,12 @@ async function main() {
967
993
  });
968
994
  return;
969
995
  }
996
+ // Handle --help for any resolved command before dispatching (avoids live API calls)
997
+ if (flags.help || flags.h) {
998
+ const resolvedCommand = subcommand ? `${command} ${subcommand}` : command;
999
+ printJson({ ok: true, command: resolvedCommand, help: true, message: `Usage: npx @vibecodemax/cli ${resolvedCommand} [options]` });
1000
+ return;
1001
+ }
970
1002
  if (command === "read-setup-state")
971
1003
  return readSetupState();
972
1004
  if (command === "admin" && subcommand === "ensure-admin")
@@ -975,14 +1007,10 @@ async function main() {
975
1007
  return checkSupabaseContext();
976
1008
  if (command === "storage" && subcommand === "setup-supabase")
977
1009
  return setupSupabaseStorage(flags);
978
- if (command === "storage" && subcommand === "smoke-test-supabase")
979
- return smokeTestSupabase();
980
1010
  if (command === "storage" && subcommand === "check-s3-context")
981
1011
  return checkS3Context(flags);
982
1012
  if (command === "storage" && subcommand === "setup-s3")
983
1013
  return setupS3Storage(flags);
984
- if (command === "storage" && subcommand === "smoke-test-s3")
985
- return smokeTestS3(flags);
986
1014
  if (command === "configure-site-redirects")
987
1015
  return configureSiteRedirects(flags);
988
1016
  if (command === "configure-email-password")
package/dist/storageS3.js CHANGED
@@ -1,7 +1,7 @@
1
1
  import * as fs from "node:fs";
2
2
  import * as path from "node:path";
3
3
  import * as crypto from "node:crypto";
4
- import { S3Client, HeadBucketCommand, CreateBucketCommand, PutPublicAccessBlockCommand, PutBucketEncryptionCommand, PutBucketOwnershipControlsCommand, PutBucketPolicyCommand, PutBucketCorsCommand, GetBucketLocationCommand, GetPublicAccessBlockCommand, GetBucketEncryptionCommand, GetBucketOwnershipControlsCommand, GetBucketPolicyCommand, GetBucketCorsCommand, PutObjectCommand, ListObjectsV2Command, DeleteObjectCommand, GetObjectCommand, } from "@aws-sdk/client-s3";
4
+ import { S3Client, HeadBucketCommand, CreateBucketCommand, PutPublicAccessBlockCommand, PutBucketEncryptionCommand, PutBucketOwnershipControlsCommand, PutBucketPolicyCommand, PutBucketCorsCommand, GetBucketLocationCommand, GetPublicAccessBlockCommand, GetBucketEncryptionCommand, GetBucketOwnershipControlsCommand, GetBucketPolicyCommand, GetBucketCorsCommand, } from "@aws-sdk/client-s3";
5
5
  import { STSClient, GetCallerIdentityCommand } from "@aws-sdk/client-sts";
6
6
  const SETUP_CONFIG_PATH = path.join(".vibecodemax", "setup-config.json");
7
7
  const DEFAULT_BUCKETS = {
@@ -10,7 +10,6 @@ const DEFAULT_BUCKETS = {
10
10
  };
11
11
  const DEFAULT_PROJECT_SLUG = "vibecodemax";
12
12
  const DEFAULT_REGION = "us-east-1";
13
- const HEALTHCHECK_PREFIX = "_vibecodemax/healthcheck/default";
14
13
  const PUBLIC_ACCESS_BLOCK_PRIVATE = {
15
14
  BlockPublicAcls: true,
16
15
  IgnorePublicAcls: true,
@@ -482,55 +481,6 @@ async function verifyTwoBuckets(region, credentials, buckets) {
482
481
  });
483
482
  }
484
483
  }
485
- function buildObjectUrl(bucket, region, key) {
486
- const encoded = key.split("/").map((segment) => encodeURIComponent(segment)).join("/");
487
- if (region === "us-east-1")
488
- return `https://${bucket}.s3.amazonaws.com/${encoded}`;
489
- return `https://${bucket}.s3.${region}.amazonaws.com/${encoded}`;
490
- }
491
- async function smokeTestBuckets(region, credentials, buckets) {
492
- const { s3Client } = createAwsClients(region, credentials);
493
- const runId = `run_${Date.now()}`;
494
- const prefix = `${HEALTHCHECK_PREFIX}/${runId}`;
495
- const publicKey = `${prefix}/public-probe.txt`;
496
- const privateKey = `${prefix}/private-probe.txt`;
497
- await s3Client.send(new PutObjectCommand({ Bucket: buckets.public, Key: publicKey, Body: "public healthcheck probe", ContentType: "text/plain" }));
498
- await s3Client.send(new PutObjectCommand({ Bucket: buckets.private, Key: privateKey, Body: "private healthcheck probe", ContentType: "text/plain" }));
499
- const publicList = await s3Client.send(new ListObjectsV2Command({ Bucket: buckets.public, Prefix: prefix }));
500
- const privateList = await s3Client.send(new ListObjectsV2Command({ Bucket: buckets.private, Prefix: prefix }));
501
- const listPassed = Number(publicList.KeyCount || 0) > 0 && Number(privateList.KeyCount || 0) > 0;
502
- if (!listPassed) {
503
- fail("AWS_SMOKE_TEST_FAILED", "AWS S3 smoke-test list operation did not return the uploaded healthcheck objects.", 1, { prefix });
504
- }
505
- const publicUrl = buildObjectUrl(buckets.public, region, publicKey);
506
- const privateUrl = buildObjectUrl(buckets.private, region, privateKey);
507
- const publicReadResponse = await fetch(publicUrl);
508
- const privateReadResponse = await fetch(privateUrl);
509
- const publicRead = publicReadResponse.ok;
510
- const privateReadBlocked = !privateReadResponse.ok;
511
- if (!publicRead || !privateReadBlocked) {
512
- fail("AWS_SMOKE_TEST_FAILED", "AWS S3 smoke-test public/private read checks failed.", 1, {
513
- publicReadStatus: publicReadResponse.status,
514
- privateReadStatus: privateReadResponse.status,
515
- });
516
- }
517
- await s3Client.send(new GetObjectCommand({ Bucket: buckets.private, Key: privateKey }));
518
- await s3Client.send(new DeleteObjectCommand({ Bucket: buckets.public, Key: publicKey }));
519
- await s3Client.send(new DeleteObjectCommand({ Bucket: buckets.private, Key: privateKey }));
520
- return {
521
- ok: true,
522
- command: "storage smoke-test-s3",
523
- runId,
524
- prefix,
525
- buckets,
526
- upload: true,
527
- list: true,
528
- publicRead: true,
529
- privateRead: true,
530
- delete: true,
531
- publicObjectUrl: publicUrl,
532
- };
533
- }
534
484
  export async function checkS3Context(flags) {
535
485
  const context = readAwsContext(flags);
536
486
  if (context.missingKeys.length > 0) {
@@ -603,28 +553,6 @@ export async function setupS3Storage(flags) {
603
553
  envWritten: ["AWS_REGION", "AWS_S3_PUBLIC_BUCKET", "AWS_S3_PRIVATE_BUCKET"],
604
554
  });
605
555
  }
606
- export async function smokeTestS3(flags) {
607
- const context = readAwsContext(flags);
608
- const publicBucket = context.localEnv.values.AWS_S3_PUBLIC_BUCKET || "";
609
- const privateBucket = context.localEnv.values.AWS_S3_PRIVATE_BUCKET || "";
610
- if (context.missingKeys.length > 0) {
611
- fail("MISSING_ENV", `Missing required AWS values: ${context.missingKeys.join(", ")}. Add them to .env.bootstrap.local.`);
612
- }
613
- if (!isNonEmptyString(publicBucket) || !isNonEmptyString(privateBucket)) {
614
- fail("MISSING_ENV", "AWS_S3_PUBLIC_BUCKET or AWS_S3_PRIVATE_BUCKET is missing. Run storage setup first so .env.local is populated.");
615
- }
616
- const credentials = {
617
- accessKeyId: context.accessKeyId,
618
- secretAccessKey: context.secretAccessKey,
619
- ...(context.sessionToken ? { sessionToken: context.sessionToken } : {}),
620
- };
621
- await validateCredentials(context.region, credentials);
622
- const result = await smokeTestBuckets(context.region, credentials, {
623
- public: publicBucket.trim(),
624
- private: privateBucket.trim(),
625
- });
626
- printJson(result);
627
- }
628
556
  export function __testOnlyDeterministicBuckets(projectSlug, accountId) {
629
557
  return deterministicBuckets({ projectSlug, accountId });
630
558
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vibecodemax/cli",
3
- "version": "0.1.5",
3
+ "version": "0.1.7",
4
4
  "description": "VibeCodeMax CLI — local provider setup for bootstrap and project configuration",
5
5
  "type": "module",
6
6
  "bin": {