hatchkit 0.1.41 → 0.1.43

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. package/dist/adopt.d.ts +77 -0
  2. package/dist/adopt.d.ts.map +1 -1
  3. package/dist/adopt.js +757 -170
  4. package/dist/adopt.js.map +1 -1
  5. package/dist/config.d.ts +32 -10
  6. package/dist/config.d.ts.map +1 -1
  7. package/dist/config.js +91 -38
  8. package/dist/config.js.map +1 -1
  9. package/dist/deploy/coolify-app.d.ts.map +1 -1
  10. package/dist/deploy/coolify-app.js +0 -7
  11. package/dist/deploy/coolify-app.js.map +1 -1
  12. package/dist/deploy/coolify.d.ts.map +1 -1
  13. package/dist/deploy/coolify.js +20 -1
  14. package/dist/deploy/coolify.js.map +1 -1
  15. package/dist/deploy/ghcr.d.ts +4 -2
  16. package/dist/deploy/ghcr.d.ts.map +1 -1
  17. package/dist/deploy/ghcr.js +1 -1
  18. package/dist/deploy/ghcr.js.map +1 -1
  19. package/dist/deploy/github.d.ts +4 -3
  20. package/dist/deploy/github.d.ts.map +1 -1
  21. package/dist/deploy/github.js +5 -2
  22. package/dist/deploy/github.js.map +1 -1
  23. package/dist/deploy/pages.d.ts.map +1 -1
  24. package/dist/deploy/pages.js +8 -14
  25. package/dist/deploy/pages.js.map +1 -1
  26. package/dist/deploy/regen-infra.d.ts.map +1 -1
  27. package/dist/deploy/regen-infra.js +1 -11
  28. package/dist/deploy/regen-infra.js.map +1 -1
  29. package/dist/deploy/rollback.d.ts.map +1 -1
  30. package/dist/deploy/rollback.js +30 -6
  31. package/dist/deploy/rollback.js.map +1 -1
  32. package/dist/deploy/terraform.d.ts.map +1 -1
  33. package/dist/deploy/terraform.js +20 -37
  34. package/dist/deploy/terraform.js.map +1 -1
  35. package/dist/dns.d.ts.map +1 -1
  36. package/dist/dns.js +4 -5
  37. package/dist/dns.js.map +1 -1
  38. package/dist/doctor.d.ts +15 -0
  39. package/dist/doctor.d.ts.map +1 -1
  40. package/dist/doctor.js +110 -36
  41. package/dist/doctor.js.map +1 -1
  42. package/dist/email/index.d.ts +31 -0
  43. package/dist/email/index.d.ts.map +1 -0
  44. package/dist/email/index.js +251 -0
  45. package/dist/email/index.js.map +1 -0
  46. package/dist/email/presets.d.ts +14 -0
  47. package/dist/email/presets.d.ts.map +1 -0
  48. package/dist/email/presets.js +33 -0
  49. package/dist/email/presets.js.map +1 -0
  50. package/dist/email/setup.d.ts +93 -0
  51. package/dist/email/setup.d.ts.map +1 -0
  52. package/dist/email/setup.js +263 -0
  53. package/dist/email/setup.js.map +1 -0
  54. package/dist/email/spf.d.ts +56 -0
  55. package/dist/email/spf.d.ts.map +1 -0
  56. package/dist/email/spf.js +102 -0
  57. package/dist/email/spf.js.map +1 -0
  58. package/dist/index.js +113 -4
  59. package/dist/index.js.map +1 -1
  60. package/dist/inventory.d.ts.map +1 -1
  61. package/dist/inventory.js +34 -11
  62. package/dist/inventory.js.map +1 -1
  63. package/dist/overview.d.ts.map +1 -1
  64. package/dist/overview.js +43 -15
  65. package/dist/overview.js.map +1 -1
  66. package/dist/prompts.d.ts +5 -0
  67. package/dist/prompts.d.ts.map +1 -1
  68. package/dist/prompts.js +29 -7
  69. package/dist/prompts.js.map +1 -1
  70. package/dist/provision/index.d.ts +20 -1
  71. package/dist/provision/index.d.ts.map +1 -1
  72. package/dist/provision/index.js +115 -0
  73. package/dist/provision/index.js.map +1 -1
  74. package/dist/provision/s3-buckets.d.ts.map +1 -1
  75. package/dist/provision/s3-buckets.js +45 -25
  76. package/dist/provision/s3-buckets.js.map +1 -1
  77. package/dist/provision/write-env.d.ts +6 -0
  78. package/dist/provision/write-env.d.ts.map +1 -1
  79. package/dist/provision/write-env.js +17 -0
  80. package/dist/provision/write-env.js.map +1 -1
  81. package/dist/scaffold/app.d.ts.map +1 -1
  82. package/dist/scaffold/app.js +15 -7
  83. package/dist/scaffold/app.js.map +1 -1
  84. package/dist/scaffold/build-pipeline.d.ts +40 -0
  85. package/dist/scaffold/build-pipeline.d.ts.map +1 -1
  86. package/dist/scaffold/build-pipeline.js +201 -5
  87. package/dist/scaffold/build-pipeline.js.map +1 -1
  88. package/dist/scaffold/infra.d.ts +4 -5
  89. package/dist/scaffold/infra.d.ts.map +1 -1
  90. package/dist/scaffold/infra.js +11 -56
  91. package/dist/scaffold/infra.js.map +1 -1
  92. package/dist/scaffold/manifest.d.ts.map +1 -1
  93. package/dist/scaffold/manifest.js +1 -0
  94. package/dist/scaffold/manifest.js.map +1 -1
  95. package/dist/scaffold/pages-heuristics.d.ts.map +1 -1
  96. package/dist/scaffold/pages-heuristics.js +10 -10
  97. package/dist/scaffold/pages-heuristics.js.map +1 -1
  98. package/dist/scaffold/pages-mode.js +2 -4
  99. package/dist/scaffold/pages-mode.js.map +1 -1
  100. package/dist/scaffold/pkg-json.d.ts +4 -0
  101. package/dist/scaffold/pkg-json.d.ts.map +1 -1
  102. package/dist/scaffold/pkg-json.js +17 -0
  103. package/dist/scaffold/pkg-json.js.map +1 -1
  104. package/dist/scaffold/update.js +1 -1
  105. package/dist/scaffold/update.js.map +1 -1
  106. package/dist/templates/build-pipeline/Dockerfile.nextjs-monorepo.hbs +107 -0
  107. package/dist/templates/build-pipeline/Dockerfile.nextjs.hbs +103 -0
  108. package/dist/templates/build-pipeline/docker-compose.yml.hbs +37 -8
  109. package/dist/utils/cloudflare-api.d.ts +146 -20
  110. package/dist/utils/cloudflare-api.d.ts.map +1 -1
  111. package/dist/utils/cloudflare-api.js +203 -11
  112. package/dist/utils/cloudflare-api.js.map +1 -1
  113. package/dist/utils/run-ledger.d.ts +22 -1
  114. package/dist/utils/run-ledger.d.ts.map +1 -1
  115. package/dist/utils/run-ledger.js.map +1 -1
  116. package/dist/utils/s3-admin.d.ts +9 -0
  117. package/dist/utils/s3-admin.d.ts.map +1 -0
  118. package/dist/utils/s3-admin.js +46 -0
  119. package/dist/utils/s3-admin.js.map +1 -0
  120. package/package.json +1 -1
package/dist/adopt.js CHANGED
@@ -33,7 +33,7 @@
33
33
  * a second run on the same dir notices the existing manifest and
34
34
  * exits early with a "use `hatchkit update` instead" hint.
35
35
  */
36
- import { existsSync, readFileSync, readdirSync } from "node:fs";
36
+ import { existsSync, readFileSync, readdirSync, statSync } from "node:fs";
37
37
  import { join, relative } from "node:path";
38
38
  import { Separator, confirm, input, select } from "@inquirer/prompts";
39
39
  import chalk from "chalk";
@@ -43,6 +43,7 @@ import { pushInitialBranch } from "./deploy/github.js";
43
43
  import { pushProjectKeyToCoolify, pushProjectKeyToGh } from "./deploy/keys.js";
44
44
  import { handleAdoptFailure } from "./deploy/rollback.js";
45
45
  import { runProvision } from "./provision/index.js";
46
+ import { readEnvKeys } from "./provision/write-env.js";
46
47
  import { detectBuildPipeline, scaffoldBuildPipeline } from "./scaffold/build-pipeline.js";
47
48
  import { MANIFEST_FILENAME, readManifest, writeManifest, } from "./scaffold/manifest.js";
48
49
  import { installCancelHandler, isCancelInProgress, uninstallCancelHandler, } from "./utils/cancel-handler.js";
@@ -143,7 +144,13 @@ export async function runAdopt(cwd, opts = {}) {
143
144
  // private one and produces "permission denied" on deploy.
144
145
  isPrivate: state.gitRemoteIsPrivate ?? true,
145
146
  appPort: "3000",
146
- scaffoldBuildPipeline: true,
147
+ // Pipeline scaffold defaults OFF when the layout is unrecognised.
148
+ // The Dockerfile templates assume a single-package project rooted
149
+ // at /; for a workspace with no standard server/client dirs the
150
+ // generated files build the wrong thing (or nothing). User can
151
+ // still flip this on in the review loop — buildAdoptGroups parks
152
+ // the cursor on the row in this case so the choice is explicit.
153
+ scaffoldBuildPipeline: !state.unknownWorkspaceLayout,
147
154
  // Provisioning is opt-in. Each service mints real resources on a
148
155
  // third-party (GlitchTip project, OpenPanel project, Resend API
149
156
  // key) and cleaning those up after the fact is a chore — better
@@ -209,7 +216,7 @@ export async function runAdopt(cwd, opts = {}) {
209
216
  // ---------------------------------------------------------------------------
210
217
  // Detection
211
218
  // ---------------------------------------------------------------------------
212
- async function detectProject(projectDir) {
219
+ export async function detectProject(projectDir) {
213
220
  const hasManifest = existsSync(join(projectDir, MANIFEST_FILENAME));
214
221
  const existingManifest = hasManifest ? (readManifest(projectDir) ?? undefined) : undefined;
215
222
  let packageName;
@@ -228,6 +235,31 @@ async function detectProject(projectDir) {
228
235
  catch {
229
236
  // No package.json at root — that's fine for a non-Node project.
230
237
  }
238
+ // Workspace markers — pnpm, yarn/npm, turbo, lerna, rush. When a
239
+ // marker is present but the standard server/client dir scan below
240
+ // turns up nothing, we're looking at a layout the scaffolder's
241
+ // surface-based Dockerfile templates can't handle. Flagged below as
242
+ // `unknownWorkspaceLayout`.
243
+ const workspaceMarkers = [
244
+ "pnpm-workspace.yaml",
245
+ "pnpm-workspace.yml",
246
+ "turbo.json",
247
+ "lerna.json",
248
+ "rush.json",
249
+ ];
250
+ let hasWorkspaceMarker = workspaceMarkers.some((f) => existsSync(join(projectDir, f)));
251
+ if (!hasWorkspaceMarker) {
252
+ // npm/yarn workspaces live as a `workspaces` field inside the root
253
+ // package.json (string array or { packages: [...] } object).
254
+ try {
255
+ const rootPkg = JSON.parse(readFileSync(join(projectDir, "package.json"), "utf-8"));
256
+ if (rootPkg.workspaces)
257
+ hasWorkspaceMarker = true;
258
+ }
259
+ catch {
260
+ // No / unreadable root package.json — leave hasWorkspaceMarker false.
261
+ }
262
+ }
231
263
  // Walk a generous set of common monorepo layouts.
232
264
  const serverDir = firstExisting(projectDir, [
233
265
  "packages/server",
@@ -349,6 +381,10 @@ async function detectProject(projectDir) {
349
381
  // undefined and let the stepper fall back to a sensible default.
350
382
  }
351
383
  }
384
+ const unknownWorkspaceLayout = hasWorkspaceMarker && !serverDir && !clientDir;
385
+ const standaloneBuildCandidates = unknownWorkspaceLayout
386
+ ? findStandaloneBuildCandidates(projectDir)
387
+ : [];
352
388
  return {
353
389
  projectDir,
354
390
  packageName,
@@ -356,6 +392,8 @@ async function detectProject(projectDir) {
356
392
  hasManifest,
357
393
  serverDir,
358
394
  clientDir,
395
+ unknownWorkspaceLayout,
396
+ standaloneBuildCandidates,
359
397
  features,
360
398
  prodEnvIsEncrypted,
361
399
  hasEnvKeys,
@@ -368,6 +406,57 @@ async function detectProject(projectDir) {
368
406
  existingManifest,
369
407
  };
370
408
  }
409
+ /** Scan first-level subdirs for a standalone-buildable project — own
410
+ * `package.json` AND its own lockfile (the marker that pnpm/npm/yarn
411
+ * would install it independently of the parent workspace). `.npmrc`
412
+ * with `ignore-workspace=true` is a stronger signal: that's the
413
+ * explicit "treat me as standalone" toggle Docusaurus / marketing
414
+ * sites use when they live next to a CLI workspace.
415
+ *
416
+ * Returns first-level matches only; we don't recurse because the
417
+ * intent is "show the user a starting point", not enumerate every
418
+ * buildable subtree. */
419
+ function findStandaloneBuildCandidates(projectDir) {
420
+ const out = [];
421
+ let entries;
422
+ try {
423
+ entries = readdirSync(projectDir);
424
+ }
425
+ catch {
426
+ return out;
427
+ }
428
+ for (const name of entries) {
429
+ if (name.startsWith(".") || name === "node_modules")
430
+ continue;
431
+ const dir = join(projectDir, name);
432
+ let isDir = false;
433
+ try {
434
+ isDir = statSync(dir).isDirectory();
435
+ }
436
+ catch {
437
+ continue;
438
+ }
439
+ if (!isDir)
440
+ continue;
441
+ if (!existsSync(join(dir, "package.json")))
442
+ continue;
443
+ const hasOwnLockfile = existsSync(join(dir, "pnpm-lock.yaml")) ||
444
+ existsSync(join(dir, "package-lock.json")) ||
445
+ existsSync(join(dir, "yarn.lock"));
446
+ if (!hasOwnLockfile)
447
+ continue;
448
+ let hasIgnoreWorkspace = false;
449
+ try {
450
+ const npmrc = readFileSync(join(dir, ".npmrc"), "utf-8");
451
+ hasIgnoreWorkspace = /^\s*ignore-workspace\s*=\s*true\s*$/m.test(npmrc);
452
+ }
453
+ catch {
454
+ // No .npmrc — still a candidate (own lockfile is the main signal).
455
+ }
456
+ out.push({ dir, hasIgnoreWorkspace });
457
+ }
458
+ return out;
459
+ }
371
460
  function firstExisting(root, candidates) {
372
461
  for (const c of candidates) {
373
462
  const full = join(root, c);
@@ -500,6 +589,22 @@ function printDetected(state) {
500
589
  ? chalk.yellow("repo present, no `origin` set")
501
590
  : chalk.dim("not a git repo yet")));
502
591
  lines.push(row("features (guess)", state.features.length > 0 ? state.features.join(", ") : chalk.dim("none detected")));
592
+ if (state.unknownWorkspaceLayout) {
593
+ lines.push("");
594
+ lines.push(chalk.yellow(" ! Workspace marker detected (pnpm-workspace.yaml / workspaces / turbo / lerna)"));
595
+ lines.push(chalk.yellow(" but no standard server/client dir matched. Adopt will skip the Docker"));
596
+ lines.push(chalk.yellow(" + GH Actions pipeline scaffold — the templates assume a single-package"));
597
+ lines.push(chalk.yellow(" project and would build the wrong thing here."));
598
+ if (state.standaloneBuildCandidates.length > 0) {
599
+ lines.push("");
600
+ lines.push(chalk.dim(" Standalone-buildable subdirs (own lockfile):"));
601
+ for (const c of state.standaloneBuildCandidates) {
602
+ const tag = c.hasIgnoreWorkspace ? chalk.green(" ignore-workspace=true") : "";
603
+ lines.push(chalk.dim(` · ${relativeTo(c.dir)}${tag}`));
604
+ }
605
+ lines.push(chalk.dim(" Point a hand-authored Dockerfile at one of those and re-run adopt."));
606
+ }
607
+ }
503
608
  for (const l of lines)
504
609
  console.log(l);
505
610
  console.log();
@@ -677,7 +782,10 @@ function buildAdoptGroups(state, plan) {
677
782
  {
678
783
  key: "scaffoldBuildPipeline",
679
784
  label: "Docker + GH Actions",
680
- set: true,
785
+ // Park cursor here when the layout is unrecognised — the
786
+ // user should make an explicit yes/no rather than walk
787
+ // past a defaulted-off row without seeing it.
788
+ set: !state.unknownWorkspaceLayout,
681
789
  summary: renderBuildPipelineSummary(state, plan),
682
790
  },
683
791
  ],
@@ -834,7 +942,9 @@ async function editAdoptStep(state, plan, step) {
834
942
  // Also: switching away from client-only invalidates gh-pages
835
943
  // (Pages can't host a backend). Snap deploymentMode back to
836
944
  // coolify in that case so the user doesn't keep an invalid combo.
837
- const nextDeploymentMode = plan.deploymentMode === "gh-pages" && next !== "client-only" ? "coolify" : plan.deploymentMode;
945
+ const nextDeploymentMode = plan.deploymentMode === "gh-pages" && next !== "client-only"
946
+ ? "coolify"
947
+ : plan.deploymentMode;
838
948
  if (plan.deploymentMode === "gh-pages" && next !== "client-only") {
839
949
  console.log(chalk.yellow(" ⚠ gh-pages requires client-only surfaces — switched deployment mode back to coolify."));
840
950
  }
@@ -958,10 +1068,15 @@ async function editAdoptStep(state, plan, step) {
958
1068
  checked: plan.services.includes("openpanel"),
959
1069
  },
960
1070
  {
961
- name: "Resend (email)",
1071
+ name: "Resend (transactional email)",
962
1072
  value: "resend",
963
1073
  checked: plan.services.includes("resend"),
964
1074
  },
1075
+ {
1076
+ name: "Email forwarding (Cloudflare Email Routing → your inbox)",
1077
+ value: "email",
1078
+ checked: plan.services.includes("email"),
1079
+ },
965
1080
  ],
966
1081
  });
967
1082
  return { ...plan, services };
@@ -1041,6 +1156,47 @@ async function editAdoptStep(state, plan, step) {
1041
1156
  }
1042
1157
  return plan;
1043
1158
  }
1159
+ /** Canonical env key per service — used by `filterServicesForResume`
1160
+ * to decide whether a service's credentials are already wired into
1161
+ * the project's env files. If the key is present, re-minting on a
1162
+ * resume would orphan whatever's there (Resend mints a fresh API
1163
+ * key each call; OpenPanel mints a fresh project; Stripe re-creates
1164
+ * the webhook endpoint). `email` is intentionally absent — Email
1165
+ * Routing is zone-state with no env footprint, and its provisioner
1166
+ * is already 409-idempotent. */
1167
+ const RESUME_SERVICE_ENV_KEY = {
1168
+ glitchtip: { server: "GLITCHTIP_DSN", client: "PUBLIC_GLITCHTIP_DSN" },
1169
+ openpanel: { server: "OPENPANEL_CLIENT_ID", client: "PUBLIC_OPENPANEL_CLIENT_ID" },
1170
+ resend: { server: "RESEND_API_KEY" },
1171
+ s3: { server: "R2_ENDPOINT" },
1172
+ email: {},
1173
+ };
1174
+ /** Filter the services list for `runProvision` on `--resume`: drop
1175
+ * every service whose canonical env keys are already in the target
1176
+ * env files. A non-resume run returns the list unchanged. */
1177
+ function filterServicesForResume(args) {
1178
+ if (!args.resume)
1179
+ return args.services;
1180
+ const serverKeys = args.serverEnvPath ? readEnvKeys(args.serverEnvPath) : new Set();
1181
+ const clientKeys = args.clientEnvPath ? readEnvKeys(args.clientEnvPath) : new Set();
1182
+ const kept = [];
1183
+ for (const svc of args.services) {
1184
+ const want = RESUME_SERVICE_ENV_KEY[svc];
1185
+ if (!want || (!want.server && !want.client)) {
1186
+ kept.push(svc);
1187
+ continue;
1188
+ }
1189
+ const serverOk = !want.server || serverKeys.has(want.server);
1190
+ const clientOk = !want.client || clientKeys.has(want.client);
1191
+ if (serverOk && clientOk) {
1192
+ const which = [want.server, want.client].filter(Boolean).join(" + ");
1193
+ console.log(chalk.dim(` · Skipping ${svc} on --resume — ${which} already in .env.production.`));
1194
+ continue;
1195
+ }
1196
+ kept.push(svc);
1197
+ }
1198
+ return kept;
1199
+ }
1044
1200
  async function executePlan(state, plan, opts = { resume: false }) {
1045
1201
  console.log(chalk.bold("\n ── Adopting ──────────────────────────────────────────────\n"));
1046
1202
  const caveats = [];
@@ -1127,10 +1283,14 @@ async function executePlan(state, plan, opts = { resume: false }) {
1127
1283
  // Gated on coolify mode — the Coolify-targeted Dockerfile + deploy
1128
1284
  // webhook workflow aren't useful for gh-pages, which uses its own
1129
1285
  // `gh-pages.yml` workflow written later in step 3c-pages.
1286
+ let scaffoldedAbsPaths = [];
1287
+ let overwrittenAbsPaths = [];
1130
1288
  if (plan.scaffoldBuildPipeline && plan.deploymentMode === "coolify") {
1131
1289
  const pipeResult = await scaffoldBuildPipelineNow(state, plan, remoteUrl, {
1132
1290
  force: !!opts.regeneratePipeline,
1133
1291
  });
1292
+ scaffoldedAbsPaths = pipeResult.createdAbsPaths;
1293
+ overwrittenAbsPaths = pipeResult.overwrittenAbsPaths;
1134
1294
  // Record only files we *created*. The `overwritten` list is
1135
1295
  // deliberately not recorded — those files existed before this
1136
1296
  // run (the user's), and a later `hatchkit destroy` must never
@@ -1258,16 +1418,38 @@ async function executePlan(state, plan, opts = { resume: false }) {
1258
1418
  //
1259
1419
  // Skipped for gh-pages — there's no Coolify webhook to hit.
1260
1420
  const appUuidForSecrets = coolifyResult?.appUuid ?? state.coolifyAppMatch?.uuid;
1261
- if (plan.scaffoldBuildPipeline &&
1262
- plan.deploymentMode === "coolify" &&
1263
- appUuidForSecrets) {
1421
+ if (plan.scaffoldBuildPipeline && plan.deploymentMode === "coolify" && appUuidForSecrets) {
1264
1422
  const slug = repoSlugFromRemote(remoteUrl);
1265
1423
  if (slug) {
1266
- await setCoolifyDeploySecrets({
1267
- projectDir: state.projectDir,
1268
- repoSlug: slug,
1269
- apps: [{ uuid: appUuidForSecrets }],
1270
- });
1424
+ // --resume gate: if every secret this step would push is
1425
+ // already present on the repo, skip the push. The values
1426
+ // themselves aren't readable through `gh secret list` (write-
1427
+ // only), so we trust name-presence as the signal — same
1428
+ // contract `ghSecretExists` uses for the ledger-record gate
1429
+ // below. The user's recourse for a rotated Coolify token is
1430
+ // re-running adopt *without* --resume.
1431
+ const coolifySecretNames = [
1432
+ "COOLIFY_BASE_URL",
1433
+ "COOLIFY_API_TOKEN",
1434
+ "COOLIFY_TOKEN",
1435
+ "COOLIFY_WEBHOOK_URL",
1436
+ "COOLIFY_RESOURCE_UUID",
1437
+ ];
1438
+ let skipCoolifySecrets = false;
1439
+ if (opts.resume) {
1440
+ const checks = await Promise.all(coolifySecretNames.map((n) => ghSecretExists(state.projectDir, slug, n)));
1441
+ if (checks.every(Boolean)) {
1442
+ skipCoolifySecrets = true;
1443
+ console.log(chalk.dim(` · Skipping Coolify GH Actions secrets on --resume — all ${coolifySecretNames.length} already on ${slug}.`));
1444
+ }
1445
+ }
1446
+ if (!skipCoolifySecrets) {
1447
+ await setCoolifyDeploySecrets({
1448
+ projectDir: state.projectDir,
1449
+ repoSlug: slug,
1450
+ apps: [{ uuid: appUuidForSecrets }],
1451
+ });
1452
+ }
1271
1453
  }
1272
1454
  else {
1273
1455
  console.log(chalk.dim(" · Couldn't resolve owner/repo from git remote — set the deploy secrets manually."));
@@ -1296,21 +1478,26 @@ async function executePlan(state, plan, opts = { resume: false }) {
1296
1478
  // we're the creator preserves the "destroy never deletes
1297
1479
  // pre-existing user data" invariant — see LedgerStep doc.
1298
1480
  const preExisted = await ghSecretExists(state.projectDir, slug, secretName);
1299
- try {
1300
- await pushProjectKeyToGh(plan.name, slug);
1301
- if (!preExisted) {
1302
- ledger.record({ kind: "ghActionsSecret", repo: slug, name: secretName });
1303
- }
1481
+ if (opts.resume && preExisted) {
1482
+ console.log(chalk.dim(` · Skipping ${secretName} push on --resume — secret already on ${slug}.`));
1304
1483
  }
1305
- catch (err) {
1306
- caveats.push({
1307
- title: `${secretName} not set on GitHub Actions`,
1308
- reason: err.message,
1309
- recovery: [
1310
- `hatchkit keys push ${plan.name} --target gh --repo ${slug}`,
1311
- `(or copy from \`hatchkit keys show ${plan.name}\` and run \`gh secret set ${secretName} --repo ${slug} --body <key>\`)`,
1312
- ],
1313
- });
1484
+ else {
1485
+ try {
1486
+ await pushProjectKeyToGh(plan.name, slug);
1487
+ if (!preExisted) {
1488
+ ledger.record({ kind: "ghActionsSecret", repo: slug, name: secretName });
1489
+ }
1490
+ }
1491
+ catch (err) {
1492
+ caveats.push({
1493
+ title: `${secretName} not set on GitHub Actions`,
1494
+ reason: err.message,
1495
+ recovery: [
1496
+ `hatchkit keys push ${plan.name} --target gh --repo ${slug}`,
1497
+ `(or copy from \`hatchkit keys show ${plan.name}\` and run \`gh secret set ${secretName} --repo ${slug} --body <key>\`)`,
1498
+ ],
1499
+ });
1500
+ }
1314
1501
  }
1315
1502
  }
1316
1503
  else if (remoteUrl) {
@@ -1385,13 +1572,36 @@ async function executePlan(state, plan, opts = { resume: false }) {
1385
1572
  });
1386
1573
  }
1387
1574
  }
1388
- // Step 3d: push the working branch to origin. Done AFTER secrets
1389
- // are set so the workflow's first run can hit the Coolify webhook
1390
- // without falling through to the "secret not set" branch. Skipped
1391
- // when there's no remote yet (e.g. user opted out of GitHub) or
1392
- // when origin already had history before adopt.
1393
- if (plan.setupGitHub && remoteUrl && !state.gitRemoteUrl) {
1394
- await pushInitialBranch(state.projectDir);
1575
+ // Step 3d: push to origin so GitHub Actions builds + pushes the
1576
+ // GHCR image. Done AFTER Coolify wiring + secrets so the workflow's
1577
+ // first run can hit the redeploy webhook on its own.
1578
+ //
1579
+ // Two paths:
1580
+ // · Brand-new remote (this run just ran `gh repo create`)
1581
+ // pushInitialBranch pushes the whole tree.
1582
+ // · Pre-existing remote → commitAndPushScaffold makes a
1583
+ // pathspec-scoped commit of just the files hatchkit wrote
1584
+ // (manifest + build-pipeline scaffold) and pushes it. Without
1585
+ // this push the workflow file lives only in the working tree,
1586
+ // Actions never fires, and the GHCR-wait below times out.
1587
+ //
1588
+ // `pushedThisRun` gates the GHCR step below — we only wait for a
1589
+ // new image when a push actually went out.
1590
+ let pushedThisRun = false;
1591
+ if (remoteUrl) {
1592
+ if (plan.setupGitHub && !state.gitRemoteUrl) {
1593
+ pushedThisRun = await pushInitialBranch(state.projectDir);
1594
+ }
1595
+ else if (state.gitRemoteUrl) {
1596
+ const result = await commitAndPushScaffold(state, {
1597
+ scaffoldedAbsPaths,
1598
+ overwrittenAbsPaths,
1599
+ manifestPath,
1600
+ });
1601
+ pushedThisRun = result.pushed;
1602
+ if (result.caveat)
1603
+ caveats.push(result.caveat);
1604
+ }
1395
1605
  }
1396
1606
  // Step 3e: GHCR setup. Two paths, gated on the user's earlier
1397
1607
  // public/private choice:
@@ -1414,7 +1624,25 @@ async function executePlan(state, plan, opts = { resume: false }) {
1414
1624
  remoteUrl &&
1415
1625
  coolifyResult !== undefined) {
1416
1626
  const slug = repoSlugFromRemote(remoteUrl);
1417
- if (slug) {
1627
+ if (slug && !pushedThisRun && !plan.isPrivate) {
1628
+ // No push went out (either nothing changed on disk this run,
1629
+ // or the auto-commit-push failed). Without a push the
1630
+ // build-and-deploy workflow doesn't run, so polling GHCR for
1631
+ // a brand-new image would just time out. Defer the visibility
1632
+ // PATCH to the next `--resume` once the user has pushed.
1633
+ caveats.push({
1634
+ title: "GHCR visibility not set — no push triggered",
1635
+ reason: "Adopt didn't push to origin this run, so the build-and-deploy workflow hasn't been triggered to publish the GHCR image.",
1636
+ recovery: [
1637
+ "Commit + push so the workflow runs:",
1638
+ ` cd ${state.projectDir}`,
1639
+ ` git add . && git commit -m "chore: adopt hatchkit"`,
1640
+ ` git push`,
1641
+ "Then re-run: hatchkit adopt --resume",
1642
+ ],
1643
+ });
1644
+ }
1645
+ else if (slug) {
1418
1646
  const { makeGhcrPackagePublic, registerGhcrCredsWithCoolify } = await import("./deploy/ghcr.js");
1419
1647
  if (plan.isPrivate) {
1420
1648
  // Read the full GHCR config (token + the PAT owner's GitHub
@@ -1463,7 +1691,23 @@ async function executePlan(state, plan, opts = { resume: false }) {
1463
1691
  // to a normal `hatchkit add`. Forward the surface choice — runProvision
1464
1692
  // uses the same vocabulary, so a client-only adopt produces a
1465
1693
  // client-only `add`.
1466
- if (plan.services.length > 0) {
1694
+ //
1695
+ // --resume contract: filter out services whose canonical env keys
1696
+ // are already present in the target env files. Re-minting Resend
1697
+ // keys / OpenPanel projects / Stripe webhooks on every resume
1698
+ // orphans live credentials and rotates secrets the user didn't
1699
+ // ask to rotate. The keychain caches some of these per-service,
1700
+ // but those caches don't survive a fresh machine — the env file
1701
+ // is the durable signal, so we trust it. A service is re-included
1702
+ // if it's newly in `plan.services` (added since the last attempt)
1703
+ // or its canonical env key is missing.
1704
+ const resumeServices = filterServicesForResume({
1705
+ services: plan.services,
1706
+ resume: opts.resume === true,
1707
+ serverEnvPath: plan.serverDir ? join(plan.serverDir, ".env.production") : null,
1708
+ clientEnvPath: plan.clientDir ? join(plan.clientDir, ".env.production") : null,
1709
+ });
1710
+ if (resumeServices.length > 0) {
1467
1711
  console.log();
1468
1712
  const provisionMode = plan.surfaces === "both"
1469
1713
  ? "shared"
@@ -1472,7 +1716,7 @@ async function executePlan(state, plan, opts = { resume: false }) {
1472
1716
  : "client-only";
1473
1717
  await runProvision({
1474
1718
  baseName: plan.name,
1475
- services: plan.services,
1719
+ services: resumeServices,
1476
1720
  surfaces: {
1477
1721
  mode: provisionMode,
1478
1722
  serverEnvDir: plan.serverDir,
@@ -1492,6 +1736,44 @@ async function executePlan(state, plan, opts = { resume: false }) {
1492
1736
  else if (event.service === "resend") {
1493
1737
  ledger.record({ kind: "resend", client: event.client });
1494
1738
  }
1739
+ else if (event.service === "email") {
1740
+ // Email setup creates three kinds of mutable state on
1741
+ // Cloudflare: the destination address (account-scoped), the
1742
+ // forwarding rules (zone-scoped), and the apex MX/SPF/DMARC
1743
+ // records (also zone-scoped). We only record what THIS run
1744
+ // created — `destinationCreatedThisRun` and `r.created` /
1745
+ // `dnsRecords` (which the provision orchestrator already
1746
+ // pre-filtered to `created: true` entries). MX/SPF/DMARC
1747
+ // upserts on a zone that already had them stay out of the
1748
+ // ledger so destroy never yanks pre-existing records.
1749
+ if (event.destinationCreatedThisRun) {
1750
+ ledger.record({
1751
+ kind: "cloudflareEmailDestination",
1752
+ accountId: event.accountId,
1753
+ destinationId: event.destinationId,
1754
+ email: event.destinationEmail,
1755
+ });
1756
+ }
1757
+ for (const dns of event.dnsRecords) {
1758
+ ledger.record({
1759
+ kind: "cloudflareDnsRecord",
1760
+ zoneId: event.zoneId,
1761
+ recordId: dns.id,
1762
+ name: dns.name,
1763
+ type: dns.type,
1764
+ });
1765
+ }
1766
+ for (const rule of event.rules) {
1767
+ if (!rule.created)
1768
+ continue;
1769
+ ledger.record({
1770
+ kind: "cloudflareEmailRoutingRule",
1771
+ zoneId: event.zoneId,
1772
+ ruleId: rule.id,
1773
+ address: rule.address,
1774
+ });
1775
+ }
1776
+ }
1495
1777
  },
1496
1778
  });
1497
1779
  }
@@ -1504,96 +1786,117 @@ async function executePlan(state, plan, opts = { resume: false }) {
1504
1786
  // (globally via `hatchkit config add s3 r2`, which re-pastes +
1505
1787
  // verifies it) and re-run `hatchkit provision s3` to finish.
1506
1788
  if (plan.features.includes("s3")) {
1507
- try {
1508
- const { provisionS3ForProject, defaultBucketHostname, existingCustomHostname } = await import("./provision/s3-buckets.js");
1509
- // Resolve the public assets-bucket custom domain. If a previous
1510
- // run already attached one, the manifest records it — reuse
1511
- // that without re-prompting. Only ask on first adopt (or when
1512
- // the manifest has no hostname yet, e.g. a previous run picked
1513
- // the managed r2.dev URL or never got that far). Blank answer →
1514
- // managed r2.dev.
1515
- let publicHostname;
1516
- const existingManifest = readManifest(state.projectDir);
1517
- const recordedHostname = existingManifest ? existingCustomHostname(existingManifest) : null;
1518
- if (recordedHostname) {
1519
- publicHostname = recordedHostname;
1520
- }
1521
- else if (process.stdin.isTTY) {
1522
- const answer = (await input({
1523
- message: "Custom domain for the public assets bucket (leave empty to use the managed r2.dev URL):",
1524
- default: defaultBucketHostname(plan.domain),
1525
- })).trim();
1526
- publicHostname = answer === "" ? null : answer;
1527
- }
1528
- // Only create the public assets bucket here. The private "state"
1529
- // bucket is an explicit opt-in even when the project has a
1530
- // server most don't need it, and adding one silently means
1531
- // an extra R2 bucket + env var the user has to clean up later.
1532
- // Users who want one re-run `hatchkit provision s3 --with-state-bucket`.
1533
- const r = await provisionS3ForProject({
1534
- projectDir: state.projectDir,
1535
- publicHostname,
1536
- });
1537
- // Ledger: record any *fresh* bucket creations + a fresh token
1538
- // mint so destroy can revoke them. Reused buckets/tokens (from
1539
- // a prior adopt run) stay out — those are already in the
1540
- // earlier run's ledger or pre-existed before hatchkit ran.
1541
- if (r.assets.created) {
1542
- ledger.record({
1543
- kind: "r2Bucket",
1544
- bucketName: r.assets.name,
1545
- accountId: r.accountId,
1546
- });
1547
- }
1548
- if (r.state?.created) {
1549
- ledger.record({
1550
- kind: "r2Bucket",
1551
- bucketName: r.state.name,
1552
- accountId: r.accountId,
1789
+ // --resume gate: when the manifest already records the assets
1790
+ // bucket AND .env.production has a working access/secret pair,
1791
+ // there's nothing to provision. Skip the whole step rather than
1792
+ // re-attaching custom domains / re-reconciling CORS / re-probing
1793
+ // tokens, all of which are network round-trips with no payoff
1794
+ // when nothing has changed since the last attempt.
1795
+ const s3ManifestSnapshot = readManifest(state.projectDir);
1796
+ const s3EnvPath = join(state.projectDir, ".env.production");
1797
+ const s3EnvKeys = readEnvKeys(s3EnvPath);
1798
+ const s3HasEnvCreds = (s3EnvKeys.has("R2_ACCESS_KEY_ID") && s3EnvKeys.has("R2_SECRET_ACCESS_KEY")) ||
1799
+ (s3EnvKeys.has("S3_ACCESS_KEY_ID") && s3EnvKeys.has("S3_SECRET_ACCESS_KEY")) ||
1800
+ (s3EnvKeys.has("AWS_ACCESS_KEY_ID") && s3EnvKeys.has("AWS_SECRET_ACCESS_KEY"));
1801
+ const s3ManifestComplete = !!s3ManifestSnapshot?.s3Buckets?.assets?.name;
1802
+ const s3AlreadyWired = opts.resume && s3HasEnvCreds && s3ManifestComplete;
1803
+ if (s3AlreadyWired) {
1804
+ console.log(chalk.dim(` · Skipping S3 on --resume — manifest records ${s3ManifestSnapshot?.s3Buckets?.assets?.name} and .env.production has access/secret keys.`));
1805
+ }
1806
+ else {
1807
+ try {
1808
+ const { provisionS3ForProject, defaultBucketHostname, existingCustomHostname } = await import("./provision/s3-buckets.js");
1809
+ // Resolve the public assets-bucket custom domain. If a previous
1810
+ // run already attached one, the manifest records it reuse
1811
+ // that without re-prompting. Only ask on first adopt (or when
1812
+ // the manifest has no hostname yet, e.g. a previous run picked
1813
+ // the managed r2.dev URL or never got that far). Blank answer
1814
+ // managed r2.dev.
1815
+ let publicHostname;
1816
+ const existingManifest = readManifest(state.projectDir);
1817
+ const recordedHostname = existingManifest
1818
+ ? existingCustomHostname(existingManifest)
1819
+ : null;
1820
+ if (recordedHostname) {
1821
+ publicHostname = recordedHostname;
1822
+ }
1823
+ else if (process.stdin.isTTY) {
1824
+ const answer = (await input({
1825
+ message: "Custom domain for the public assets bucket (leave empty to use the managed r2.dev URL):",
1826
+ default: defaultBucketHostname(plan.domain),
1827
+ })).trim();
1828
+ publicHostname = answer === "" ? null : answer;
1829
+ }
1830
+ // Only create the public assets bucket here. The private "state"
1831
+ // bucket is an explicit opt-in even when the project has a
1832
+ // server — most don't need it, and adding one silently means
1833
+ // an extra R2 bucket + env var the user has to clean up later.
1834
+ // Users who want one re-run `hatchkit provision s3 --with-state-bucket`.
1835
+ const r = await provisionS3ForProject({
1836
+ projectDir: state.projectDir,
1837
+ publicHostname,
1553
1838
  });
1839
+ // Ledger: record any *fresh* bucket creations + a fresh token
1840
+ // mint so destroy can revoke them. Reused buckets/tokens (from
1841
+ // a prior adopt run) stay out — those are already in the
1842
+ // earlier run's ledger or pre-existed before hatchkit ran.
1843
+ if (r.assets.created) {
1844
+ ledger.record({
1845
+ kind: "r2Bucket",
1846
+ bucketName: r.assets.name,
1847
+ accountId: r.accountId,
1848
+ });
1849
+ }
1850
+ if (r.state?.created) {
1851
+ ledger.record({
1852
+ kind: "r2Bucket",
1853
+ bucketName: r.state.name,
1854
+ accountId: r.accountId,
1855
+ });
1856
+ }
1857
+ if (r.tokenCreated) {
1858
+ ledger.record({
1859
+ kind: "r2Token",
1860
+ tokenId: r.tokenCreated.tokenId,
1861
+ accountId: r.accountId,
1862
+ audience: r.tokenCreated.audience,
1863
+ });
1864
+ }
1865
+ console.log(chalk.green(` ✓ S3 assets bucket ready — ${r.assets.publicUrl}`));
1866
+ console.log(chalk.dim(` Wrote ${r.envWritten.length} encrypted entries. ` +
1867
+ "(Need a private server-side bucket too? Run `hatchkit provision s3 --with-state-bucket`.)"));
1868
+ // The fresh bucket is empty. Existing projects almost always
1869
+ // have assets sitting in some other store — surface the one
1870
+ // command that copies them in. Cheap line to print, easy to
1871
+ // miss without it.
1872
+ console.log(chalk.dim(` Have existing assets to bring over? hatchkit assets migrate \\\n` +
1873
+ ` --from-endpoint=<old-s3-endpoint> --from-bucket=<name> \\\n` +
1874
+ ` --from-key=<access-key> --from-secret=<secret>`));
1554
1875
  }
1555
- if (r.tokenCreated) {
1556
- ledger.record({
1557
- kind: "r2Token",
1558
- tokenId: r.tokenCreated.tokenId,
1559
- accountId: r.accountId,
1560
- audience: r.tokenCreated.audience,
1876
+ catch (err) {
1877
+ console.log(chalk.yellow(`\n ✗ S3 bucket provisioning failed: ${err.message.split("\n")[0]}`));
1878
+ // Two kinds of recovery — pick based on whether the underlying
1879
+ // error looks like an admin-token problem (global) vs. a
1880
+ // bucket-side problem (per-project). Admin-token failures point
1881
+ // the user at the global config command (which validates the
1882
+ // token); everything else points at the per-project re-runner.
1883
+ const msg = err.message;
1884
+ const isAdminTokenIssue = /admin token|invalid api token|9109|10000|10001|HTTP 401|HTTP 403/i.test(msg);
1885
+ caveats.push({
1886
+ title: "S3 buckets not provisioned",
1887
+ reason: msg,
1888
+ recovery: isAdminTokenIssue
1889
+ ? [
1890
+ "Looks like an R2 admin-token problem.",
1891
+ "Fix globally with: hatchkit config add s3 r2 (re-paste + verify perms)",
1892
+ `Then re-run from the project dir: cd ${plan.name} && hatchkit provision s3`,
1893
+ ]
1894
+ : [
1895
+ "Once fixed, finish with: hatchkit provision s3",
1896
+ "(safe to re-run — bucket creation and env writes are idempotent)",
1897
+ ],
1561
1898
  });
1562
1899
  }
1563
- console.log(chalk.green(` ✓ S3 assets bucket ready — ${r.assets.publicUrl}`));
1564
- console.log(chalk.dim(` Wrote ${r.envWritten.length} encrypted entries. ` +
1565
- "(Need a private server-side bucket too? Run `hatchkit provision s3 --with-state-bucket`.)"));
1566
- // The fresh bucket is empty. Existing projects almost always
1567
- // have assets sitting in some other store — surface the one
1568
- // command that copies them in. Cheap line to print, easy to
1569
- // miss without it.
1570
- console.log(chalk.dim(` Have existing assets to bring over? hatchkit assets migrate \\\n` +
1571
- ` --from-endpoint=<old-s3-endpoint> --from-bucket=<name> \\\n` +
1572
- ` --from-key=<access-key> --from-secret=<secret>`));
1573
- }
1574
- catch (err) {
1575
- console.log(chalk.yellow(`\n ✗ S3 bucket provisioning failed: ${err.message.split("\n")[0]}`));
1576
- // Two kinds of recovery — pick based on whether the underlying
1577
- // error looks like an admin-token problem (global) vs. a
1578
- // bucket-side problem (per-project). Admin-token failures point
1579
- // the user at the global config command (which validates the
1580
- // token); everything else points at the per-project re-runner.
1581
- const msg = err.message;
1582
- const isAdminTokenIssue = /admin token|invalid api token|9109|10000|10001|HTTP 401|HTTP 403/i.test(msg);
1583
- caveats.push({
1584
- title: "S3 buckets not provisioned",
1585
- reason: msg,
1586
- recovery: isAdminTokenIssue
1587
- ? [
1588
- "Looks like an R2 admin-token problem.",
1589
- "Fix globally with: hatchkit config add s3 r2 (re-paste + verify perms)",
1590
- `Then re-run from the project dir: cd ${plan.name} && hatchkit provision s3`,
1591
- ]
1592
- : [
1593
- "Once fixed, finish with: hatchkit provision s3",
1594
- "(safe to re-run — bucket creation and env writes are idempotent)",
1595
- ],
1596
- });
1597
1900
  }
1598
1901
  }
1599
1902
  // Step 4c: Stripe — strictly separate from `create`'s Stripe block
@@ -1620,55 +1923,69 @@ async function executePlan(state, plan, opts = { resume: false }) {
1620
1923
  });
1621
1924
  }
1622
1925
  else {
1623
- const result = await provisionStripeProject({
1624
- projectName: plan.name,
1625
- domain: plan.domain,
1626
- });
1926
+ // --resume gate: if Stripe keys are already encrypted in
1927
+ // .env.production AND set in .env.development, the env is
1928
+ // wired — skip the provisioner entirely. Re-running it on
1929
+ // a cache miss (e.g. fresh machine) would reprompt for the
1930
+ // sk/pk and re-create the webhook endpoint, leaving the
1931
+ // old endpoint orphaned in the user's Stripe dashboard.
1627
1932
  const devEnvPath = join(plan.serverDir, ".env.development");
1628
1933
  const prodEnvPath = join(plan.serverDir, ".env.production");
1629
- const devLabel = relative(state.projectDir, devEnvPath);
1630
- const prodLabel = relative(state.projectDir, prodEnvPath);
1631
- if (result.test) {
1632
- if (result.test.kind === "skipped") {
1633
- appendCommentBlock(devEnvPath, renderStripeSkipComment("test", devLabel));
1634
- }
1635
- const pairs = parseEnvLines(renderStripeEnv(result.test));
1636
- writeDevEnv(devEnvPath, pairs);
1637
- if (result.test.kind === "configured") {
1638
- ledger.record({
1639
- kind: "keychain",
1640
- account: SECRET_KEYS.stripeProjectWebhookId(plan.name, "test"),
1641
- });
1642
- }
1643
- console.log(chalk.green(result.test.kind === "skipped"
1644
- ? ` ✓ Stripe sandbox placeholders → ${devLabel} (fill in later)`
1645
- : ` ✓ Stripe sandbox creds → ${devLabel} (${pairs.length} keys)`));
1934
+ const stripeAlreadyWired = opts.resume &&
1935
+ readEnvKeys(prodEnvPath).has("STRIPE_SECRET_KEY") &&
1936
+ readEnvKeys(devEnvPath).has("STRIPE_SECRET_KEY");
1937
+ if (stripeAlreadyWired) {
1938
+ console.log(chalk.dim(` · Skipping Stripe on --resume — STRIPE_SECRET_KEY present in both .env.production and .env.development.`));
1646
1939
  }
1647
- if (result.live) {
1648
- if (result.live.kind === "skipped") {
1649
- appendCommentBlock(prodEnvPath, renderStripeSkipComment("live", prodLabel));
1940
+ else {
1941
+ const result = await provisionStripeProject({
1942
+ projectName: plan.name,
1943
+ domain: plan.domain,
1944
+ });
1945
+ const devLabel = relative(state.projectDir, devEnvPath);
1946
+ const prodLabel = relative(state.projectDir, prodEnvPath);
1947
+ if (result.test) {
1948
+ if (result.test.kind === "skipped") {
1949
+ appendCommentBlock(devEnvPath, renderStripeSkipComment("test", devLabel));
1950
+ }
1951
+ const pairs = parseEnvLines(renderStripeEnv(result.test));
1952
+ writeDevEnv(devEnvPath, pairs);
1953
+ if (result.test.kind === "configured") {
1954
+ ledger.record({
1955
+ kind: "keychain",
1956
+ account: SECRET_KEYS.stripeProjectWebhookId(plan.name, "test"),
1957
+ });
1958
+ }
1959
+ console.log(chalk.green(result.test.kind === "skipped"
1960
+ ? ` ✓ Stripe sandbox placeholders → ${devLabel} (fill in later)`
1961
+ : ` ✓ Stripe sandbox creds → ${devLabel} (${pairs.length} keys)`));
1650
1962
  }
1651
- const pairs = parseEnvLines(renderStripeEnv(result.live));
1652
- writeProdEnv(prodEnvPath, pairs);
1653
- if (result.live.kind === "configured") {
1654
- ledger.record({
1655
- kind: "keychain",
1656
- account: SECRET_KEYS.stripeProjectWebhookId(plan.name, "live"),
1963
+ if (result.live) {
1964
+ if (result.live.kind === "skipped") {
1965
+ appendCommentBlock(prodEnvPath, renderStripeSkipComment("live", prodLabel));
1966
+ }
1967
+ const pairs = parseEnvLines(renderStripeEnv(result.live));
1968
+ writeProdEnv(prodEnvPath, pairs);
1969
+ if (result.live.kind === "configured") {
1970
+ ledger.record({
1971
+ kind: "keychain",
1972
+ account: SECRET_KEYS.stripeProjectWebhookId(plan.name, "live"),
1973
+ });
1974
+ }
1975
+ console.log(chalk.green(result.live.kind === "skipped"
1976
+ ? ` ✓ Stripe live placeholders → ${prodLabel} (encrypted CHANGE_ME values, fill in later)`
1977
+ : ` ✓ Stripe live creds → ${prodLabel} (encrypted, ${pairs.length} keys)`));
1978
+ }
1979
+ if (!result.test && !result.live) {
1980
+ caveats.push({
1981
+ title: "Stripe wiring skipped",
1982
+ reason: "No Stripe master key configured — neither test nor live mode could be wired.",
1983
+ recovery: [
1984
+ "Run `hatchkit config add stripe` to add at least one master key,",
1985
+ `then re-run \`hatchkit adopt --resume\` from ${state.projectDir}.`,
1986
+ ],
1657
1987
  });
1658
1988
  }
1659
- console.log(chalk.green(result.live.kind === "skipped"
1660
- ? ` ✓ Stripe live placeholders → ${prodLabel} (encrypted CHANGE_ME values, fill in later)`
1661
- : ` ✓ Stripe live creds → ${prodLabel} (encrypted, ${pairs.length} keys)`));
1662
- }
1663
- if (!result.test && !result.live) {
1664
- caveats.push({
1665
- title: "Stripe wiring skipped",
1666
- reason: "No Stripe master key configured — neither test nor live mode could be wired.",
1667
- recovery: [
1668
- "Run `hatchkit config add stripe` to add at least one master key,",
1669
- `then re-run \`hatchkit adopt --resume\` from ${state.projectDir}.`,
1670
- ],
1671
- });
1672
1989
  }
1673
1990
  }
1674
1991
  }
@@ -1942,6 +2259,267 @@ async function ensureEnvProductionCommitted(state, plan) {
1942
2259
  ` git -C ${relativeTo(state.projectDir)} commit -m "chore(dotenvx): commit encrypted .env.production" -- ${relativeTo(prodPath, state.projectDir)}`));
1943
2260
  }
1944
2261
  }
2262
+ /**
2263
+ * Sniff the working tree for state that would make auto-commit + push
2264
+ * surprising or destructive. We refuse to touch git when:
2265
+ *
2266
+ * · A merge / rebase / cherry-pick / revert / bisect is in progress
2267
+ * — adopt isn't allowed to add commits on top of half-resolved
2268
+ * conflicts.
2269
+ * · Any tracked file *outside* hatchkit's path list has unstaged or
2270
+ * staged changes. Even though the commit itself is pathspec-scoped
2271
+ * (so the user's modifications wouldn't be swept in), the push
2272
+ * would still land hatchkit's commit on top of the user's WIP on
2273
+ * the same branch — entangling their unpushed work with the
2274
+ * hatchkit commit on origin. Make them park or commit it first.
2275
+ *
2276
+ * Untracked files (status `??`) are deliberately ignored — they're
2277
+ * common debris (editor swaps, build artifacts not in gitignore, etc.)
2278
+ * and never end up in our pathspec commit anyway.
2279
+ */
2280
+ async function detectUserWip(projectDir, hatchkitAbsPaths) {
2281
+ // In-progress operations: probe the .git dir for marker files. We
2282
+ // resolve --git-dir via git itself so this works in worktrees (where
2283
+ // .git is a file, not a directory) and submodules.
2284
+ const gitDirRes = await exec("git", ["rev-parse", "--git-dir"], {
2285
+ cwd: projectDir,
2286
+ silent: true,
2287
+ });
2288
+ if (gitDirRes.exitCode === 0) {
2289
+ const raw = gitDirRes.stdout.trim();
2290
+ const gitDir = raw.startsWith("/") ? raw : join(projectDir, raw);
2291
+ const markers = [
2292
+ ["MERGE_HEAD", "merge"],
2293
+ ["CHERRY_PICK_HEAD", "cherry-pick"],
2294
+ ["REVERT_HEAD", "revert"],
2295
+ ["rebase-merge", "rebase"],
2296
+ ["rebase-apply", "rebase"],
2297
+ ["BISECT_LOG", "bisect"],
2298
+ ];
2299
+ for (const [marker, op] of markers) {
2300
+ if (existsSync(join(gitDir, marker)))
2301
+ return { kind: "in-progress", op };
2302
+ }
2303
+ }
2304
+ // Repo-root-relative path matching. `git status --porcelain` emits
2305
+ // paths relative to the repo root, not necessarily our cwd, so we
2306
+ // normalize hatchkit's absolute paths the same way before comparing.
2307
+ const rootRes = await exec("git", ["rev-parse", "--show-toplevel"], {
2308
+ cwd: projectDir,
2309
+ silent: true,
2310
+ });
2311
+ if (rootRes.exitCode !== 0) {
2312
+ return { kind: "error", reason: "git rev-parse --show-toplevel failed" };
2313
+ }
2314
+ const repoRoot = rootRes.stdout.trim();
2315
+ const hatchkitRelToRoot = new Set(hatchkitAbsPaths.map((p) => relative(repoRoot, p)));
2316
+ const status = await exec("git", ["status", "--porcelain", "--untracked-files=no"], {
2317
+ cwd: projectDir,
2318
+ silent: true,
2319
+ });
2320
+ if (status.exitCode !== 0) {
2321
+ return { kind: "error", reason: "git status failed" };
2322
+ }
2323
+ const userFiles = [];
2324
+ for (const line of status.stdout.split("\n")) {
2325
+ if (line.length < 4)
2326
+ continue;
2327
+ const code = line.slice(0, 2);
2328
+ let rest = line.slice(3);
2329
+ // Renames/copies show up as "OLD -> NEW". We want the new path.
2330
+ if (rest.includes(" -> "))
2331
+ rest = rest.split(" -> ").pop() ?? rest;
2332
+ // Git quotes paths with special chars in C-style. If a path is
2333
+ // quoted we conservatively treat it as user WIP rather than try
2334
+ // to unquote and risk a false negative.
2335
+ const path = rest.startsWith('"') && rest.endsWith('"') ? rest.slice(1, -1) : rest;
2336
+ if (!hatchkitRelToRoot.has(path)) {
2337
+ userFiles.push({ status: code, path });
2338
+ }
2339
+ }
2340
+ if (userFiles.length > 0)
2341
+ return { kind: "user-changes", files: userFiles };
2342
+ return { kind: "ok" };
2343
+ }
2344
+ /**
2345
+ * Commit + push the files hatchkit wrote this run (manifest +
2346
+ * scaffolded build pipeline) to a pre-existing remote so the
2347
+ * build-and-deploy workflow fires.
2348
+ *
2349
+ * Pathspec-scoped on purpose: a plain `git add -A` would sweep up
2350
+ * whatever WIP the user happened to have in the working tree —
2351
+ * surprising behavior for an adopt. By listing only the paths
2352
+ * hatchkit just wrote, the resulting commit is exactly "the adopt
2353
+ * step", and anything else stays staged in the user's hands.
2354
+ *
2355
+ * Hard-stops with a caveat when `detectUserWip` finds unrelated user
2356
+ * changes or an in-progress git operation. The push would otherwise
2357
+ * land hatchkit's commit on top of WIP that isn't part of adopt — a
2358
+ * surprise we explicitly refuse to do.
2359
+ *
2360
+ * Returns `{ pushed: false }` (no caveat) for the idempotent case —
2361
+ * everything hatchkit wrote was already byte-identical to HEAD, so
2362
+ * there was nothing to push. Failures during commit/push surface as
2363
+ * a caveat with a copy-pasteable manual recipe.
2364
+ */
2365
+ async function commitAndPushScaffold(state, paths) {
2366
+ const all = [
2367
+ ...paths.scaffoldedAbsPaths,
2368
+ ...paths.overwrittenAbsPaths,
2369
+ paths.manifestPath,
2370
+ ].filter((p, i, arr) => arr.indexOf(p) === i && existsSync(p));
2371
+ if (all.length === 0)
2372
+ return { pushed: false };
2373
+ // Hard stop: refuse to auto-commit on top of in-progress git ops
2374
+ // or unrelated user changes. See detectUserWip docstring for the
2375
+ // exact policy.
2376
+ const wip = await detectUserWip(state.projectDir, all);
2377
+ if (wip.kind === "in-progress") {
2378
+ return {
2379
+ pushed: false,
2380
+ caveat: {
2381
+ title: `Refusing to auto-commit — git ${wip.op} in progress`,
2382
+ reason: `A ${wip.op} is in progress in ${state.projectDir}. Adopt won't stack commits on top of half-resolved git state.`,
2383
+ recovery: [
2384
+ `Finish or abort the ${wip.op}, then re-run adopt:`,
2385
+ wip.op === "rebase"
2386
+ ? ` git rebase --continue # or: git rebase --abort`
2387
+ : ` git ${wip.op} --abort # or finish it manually and commit`,
2388
+ "Then: hatchkit adopt --resume",
2389
+ ],
2390
+ },
2391
+ };
2392
+ }
2393
+ if (wip.kind === "user-changes") {
2394
+ const preview = wip.files.slice(0, 8).map((f) => ` ${f.status} ${f.path}`);
2395
+ const extra = wip.files.length > 8 ? [` ... and ${wip.files.length - 8} more`] : [];
2396
+ return {
2397
+ pushed: false,
2398
+ caveat: {
2399
+ title: "Refusing to auto-commit — working tree has unrelated changes",
2400
+ reason: `Found ${wip.files.length} modified file(s) outside the hatchkit scaffold. Auto-committing + pushing now would land the adopt commit on top of WIP that isn't part of the adopt step.`,
2401
+ recovery: [
2402
+ "Hatchkit wanted to commit + push these files:",
2403
+ ...all.map((p) => ` + ${relativeTo(p, state.projectDir)}`),
2404
+ "",
2405
+ "Your working tree also has changes to:",
2406
+ ...preview,
2407
+ ...extra,
2408
+ "",
2409
+ "Park, commit, or discard your WIP first — whichever fits:",
2410
+ ` git stash push -u -m "pre-hatchkit-adopt" # park on the side`,
2411
+ ` # or: git add . && git commit -m "..." # keep in history`,
2412
+ ` # or: git checkout -- <file> # discard a file`,
2413
+ "Then re-run: hatchkit adopt --resume",
2414
+ ],
2415
+ },
2416
+ };
2417
+ }
2418
+ if (wip.kind === "error") {
2419
+ return {
2420
+ pushed: false,
2421
+ caveat: {
2422
+ title: "Refusing to auto-commit — couldn't verify a clean working tree",
2423
+ reason: `Working-tree detection failed: ${wip.reason}. Adopt won't auto-commit without knowing what else is in the tree.`,
2424
+ recovery: [
2425
+ "Commit + push the scaffold manually:",
2426
+ ` cd ${state.projectDir}`,
2427
+ ` git add ${all.map((p) => relativeTo(p, state.projectDir)).join(" ")}`,
2428
+ ` git commit -m "chore(hatchkit): adopt scaffold + manifest"`,
2429
+ ` git push`,
2430
+ "Then re-run: hatchkit adopt --resume",
2431
+ ],
2432
+ },
2433
+ };
2434
+ }
2435
+ // Definitive pre-commit notice. The user opted into adopt, but an
2436
+ // auto-commit-and-push on a pre-existing remote is a meaningful
2437
+ // side effect — they should see exactly what's happening before it
2438
+ // lands on origin.
2439
+ console.log();
2440
+ console.log(chalk.bold.yellow(" ⚠ hatchkit is about to commit + push to origin:"));
2441
+ for (const p of all) {
2442
+ console.log(chalk.yellow(` + ${relativeTo(p, state.projectDir)}`));
2443
+ }
2444
+ console.log(chalk.dim(" (working tree verified clean of unrelated changes — auto-commit is safe)"));
2445
+ console.log();
2446
+ // Pathspec stage: only the hatchkit-owned files. `--` separates
2447
+ // pathspecs from refs so a file named "main" doesn't get confused
2448
+ // for a branch.
2449
+ const stage = await exec("git", ["add", "--", ...all], {
2450
+ cwd: state.projectDir,
2451
+ silent: true,
2452
+ });
2453
+ if (stage.exitCode !== 0) {
2454
+ return {
2455
+ pushed: false,
2456
+ caveat: {
2457
+ title: "Couldn't stage hatchkit scaffold for commit",
2458
+ reason: (stage.stderr || stage.stdout).split(/\r?\n/)[0] || "git add failed",
2459
+ recovery: [
2460
+ "Stage + commit + push manually so the workflow runs:",
2461
+ ` cd ${state.projectDir}`,
2462
+ ` git add ${all.map((p) => relativeTo(p, state.projectDir)).join(" ")}`,
2463
+ ` git commit -m "chore(hatchkit): adopt scaffold + manifest"`,
2464
+ ` git push`,
2465
+ "Then re-run: hatchkit adopt --resume",
2466
+ ],
2467
+ },
2468
+ };
2469
+ }
2470
+ // Nothing in the staged index means every file was byte-identical
2471
+ // to HEAD — this is the idempotent re-run case. No push needed.
2472
+ const cleanStaged = await execOk("git", ["diff", "--cached", "--quiet"], {
2473
+ cwd: state.projectDir,
2474
+ });
2475
+ if (cleanStaged)
2476
+ return { pushed: false };
2477
+ // Pathspec on the commit too — anything else the user happened to
2478
+ // stage themselves before running adopt stays out of this commit.
2479
+ const commit = await exec("git", ["commit", "-m", "chore(hatchkit): adopt scaffold + manifest", "--", ...all], { cwd: state.projectDir, silent: true });
2480
+ if (commit.exitCode !== 0) {
2481
+ return {
2482
+ pushed: false,
2483
+ caveat: {
2484
+ title: "Couldn't commit hatchkit scaffold automatically",
2485
+ reason: (commit.stderr || commit.stdout).split(/\r?\n/)[0] || "git commit failed",
2486
+ recovery: [
2487
+ "Commit + push the scaffold manually:",
2488
+ ` cd ${state.projectDir}`,
2489
+ ` git commit -m "chore(hatchkit): adopt scaffold + manifest" -- ${all.map((p) => relativeTo(p, state.projectDir)).join(" ")}`,
2490
+ ` git push`,
2491
+ "Then re-run: hatchkit adopt --resume",
2492
+ ],
2493
+ },
2494
+ };
2495
+ }
2496
+ console.log(chalk.green(` ✓ Committed hatchkit scaffold (${all.length} files)`));
2497
+ const headRes = await exec("git", ["symbolic-ref", "--short", "HEAD"], {
2498
+ cwd: state.projectDir,
2499
+ silent: true,
2500
+ });
2501
+ const branch = headRes.exitCode === 0 ? headRes.stdout.trim() : "main";
2502
+ const push = await exec("git", ["push", "origin", branch], {
2503
+ cwd: state.projectDir,
2504
+ spinner: `Pushing ${branch} to origin...`,
2505
+ });
2506
+ if (push.exitCode !== 0) {
2507
+ return {
2508
+ pushed: false,
2509
+ caveat: {
2510
+ title: `Couldn't push ${branch} to origin`,
2511
+ reason: (push.stderr || push.stdout).split(/\r?\n/)[0] || `git push exited ${push.exitCode}`,
2512
+ recovery: [
2513
+ "Push the new commit so Actions can build the image:",
2514
+ ` cd ${state.projectDir}`,
2515
+ ` git push origin ${branch}`,
2516
+ "Then re-run: hatchkit adopt --resume",
2517
+ ],
2518
+ },
2519
+ };
2520
+ }
2521
+ return { pushed: true };
2522
+ }
1945
2523
  async function setupGitHubRemote(state, plan) {
1946
2524
  // Pre-flight gh CLI auth. ensureGitHub prompts the user to log in
1947
2525
  // when needed; if they cancel, surface a clear "you can do this
@@ -2092,8 +2670,11 @@ async function listStagedFiles(cwd) {
2092
2670
  * noting which files already exist (will be left alone) vs which
2093
2671
  * will be scaffolded. */
2094
2672
  function renderBuildPipelineSummary(state, plan) {
2095
- if (!plan.scaffoldBuildPipeline)
2096
- return chalk.dim("no — leave files as-is");
2673
+ if (!plan.scaffoldBuildPipeline) {
2674
+ return state.unknownWorkspaceLayout
2675
+ ? chalk.dim("no — unrecognised workspace layout, hand-author your own")
2676
+ : chalk.dim("no — leave files as-is");
2677
+ }
2097
2678
  const pipe = detectBuildPipeline(state.projectDir);
2098
2679
  const willWrite = [];
2099
2680
  const kept = [];
@@ -2113,7 +2694,13 @@ function renderBuildPipelineSummary(state, plan) {
2113
2694
  return chalk.dim("all files already present — nothing to write");
2114
2695
  const writePart = `write ${willWrite.join(", ")}`;
2115
2696
  const keepPart = kept.length > 0 ? chalk.dim(` · keep ${kept.join(", ")}`) : "";
2116
- return `${writePart}${keepPart}`;
2697
+ // Strong warning when the user has overridden the unknown-layout
2698
+ // default. We still write the files (their call), but flag that the
2699
+ // templates' single-package assumption probably doesn't fit this repo.
2700
+ const layoutWarn = state.unknownWorkspaceLayout
2701
+ ? ` ${chalk.yellow("(unrecognised workspace — templates may build the wrong thing)")}`
2702
+ : "";
2703
+ return `${writePart}${keepPart}${layoutWarn}`;
2117
2704
  }
2118
2705
  function detectDockerComposeDomainServiceName(projectDir, surfaces) {
2119
2706
  const pipe = detectBuildPipeline(projectDir);