hatchkit 0.1.39 → 0.1.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/dist/adopt.d.ts.map +1 -1
  2. package/dist/adopt.js +311 -77
  3. package/dist/adopt.js.map +1 -1
  4. package/dist/deploy/coolify-app.d.ts +9 -9
  5. package/dist/deploy/coolify-app.d.ts.map +1 -1
  6. package/dist/deploy/coolify-app.js +14 -19
  7. package/dist/deploy/coolify-app.js.map +1 -1
  8. package/dist/deploy/coolify.d.ts.map +1 -1
  9. package/dist/deploy/coolify.js +6 -2
  10. package/dist/deploy/coolify.js.map +1 -1
  11. package/dist/deploy/keys.d.ts +7 -2
  12. package/dist/deploy/keys.d.ts.map +1 -1
  13. package/dist/deploy/keys.js +27 -7
  14. package/dist/deploy/keys.js.map +1 -1
  15. package/dist/deploy/pages.d.ts +41 -0
  16. package/dist/deploy/pages.d.ts.map +1 -1
  17. package/dist/deploy/pages.js +360 -13
  18. package/dist/deploy/pages.js.map +1 -1
  19. package/dist/deploy/regen-infra.js +4 -0
  20. package/dist/deploy/regen-infra.js.map +1 -1
  21. package/dist/deploy/rollback.d.ts.map +1 -1
  22. package/dist/deploy/rollback.js +94 -22
  23. package/dist/deploy/rollback.js.map +1 -1
  24. package/dist/deploy/sync.d.ts +10 -7
  25. package/dist/deploy/sync.d.ts.map +1 -1
  26. package/dist/deploy/sync.js +13 -9
  27. package/dist/deploy/sync.js.map +1 -1
  28. package/dist/index.js +269 -23
  29. package/dist/index.js.map +1 -1
  30. package/dist/inventory.d.ts +144 -0
  31. package/dist/inventory.d.ts.map +1 -0
  32. package/dist/inventory.js +1980 -0
  33. package/dist/inventory.js.map +1 -0
  34. package/dist/overview.d.ts +101 -0
  35. package/dist/overview.d.ts.map +1 -0
  36. package/dist/overview.js +852 -0
  37. package/dist/overview.js.map +1 -0
  38. package/dist/prompts.d.ts +22 -7
  39. package/dist/prompts.d.ts.map +1 -1
  40. package/dist/prompts.js +240 -40
  41. package/dist/prompts.js.map +1 -1
  42. package/dist/scaffold/app.js +1 -1
  43. package/dist/scaffold/app.js.map +1 -1
  44. package/dist/scaffold/infra.d.ts.map +1 -1
  45. package/dist/scaffold/infra.js +8 -2
  46. package/dist/scaffold/infra.js.map +1 -1
  47. package/dist/scaffold/manifest.d.ts +6 -0
  48. package/dist/scaffold/manifest.d.ts.map +1 -1
  49. package/dist/scaffold/manifest.js +1 -0
  50. package/dist/scaffold/manifest.js.map +1 -1
  51. package/dist/scaffold/pages-heuristics.d.ts +17 -0
  52. package/dist/scaffold/pages-heuristics.d.ts.map +1 -0
  53. package/dist/scaffold/pages-heuristics.js +344 -0
  54. package/dist/scaffold/pages-heuristics.js.map +1 -0
  55. package/dist/scaffold/pages-mode.d.ts +10 -0
  56. package/dist/scaffold/pages-mode.d.ts.map +1 -0
  57. package/dist/scaffold/pages-mode.js +109 -0
  58. package/dist/scaffold/pages-mode.js.map +1 -0
  59. package/dist/scaffold/surfaces.d.ts.map +1 -1
  60. package/dist/scaffold/surfaces.js +12 -1
  61. package/dist/scaffold/surfaces.js.map +1 -1
  62. package/dist/utils/cloudflare-api.d.ts +19 -0
  63. package/dist/utils/cloudflare-api.d.ts.map +1 -1
  64. package/dist/utils/cloudflare-api.js +16 -0
  65. package/dist/utils/cloudflare-api.js.map +1 -1
  66. package/dist/utils/coolify-api.d.ts +20 -0
  67. package/dist/utils/coolify-api.d.ts.map +1 -1
  68. package/dist/utils/coolify-api.js +51 -0
  69. package/dist/utils/coolify-api.js.map +1 -1
  70. package/dist/utils/coolify-server-ips.d.ts +6 -12
  71. package/dist/utils/coolify-server-ips.d.ts.map +1 -1
  72. package/dist/utils/coolify-server-ips.js +26 -81
  73. package/dist/utils/coolify-server-ips.js.map +1 -1
  74. package/dist/utils/run-ledger.d.ts +20 -0
  75. package/dist/utils/run-ledger.d.ts.map +1 -1
  76. package/dist/utils/run-ledger.js.map +1 -1
  77. package/package.json +1 -1
@@ -0,0 +1,1980 @@
1
+ /*
2
+ * `hatchkit inventory` — read-only survey of what already exists for a
3
+ * project/site across every configured provider.
4
+ *
5
+ * Different beast from `status` and `doctor`:
6
+ * · `status` answers "which provider credentials have I stored?"
7
+ * · `doctor` answers "are those credentials still valid?"
8
+ * · `inventory` answers "given THIS project (cwd / name / domain /
9
+ * repo), what resources already exist on the providers?"
10
+ *
11
+ * The flow:
12
+ * 1. Infer identity from the current directory — manifest, package.json,
13
+ * git remote, CNAME file, etc. Asks the user only for what couldn't
14
+ * be inferred (and confirms inferred values unless --yes).
15
+ * 2. Scan every configured provider in parallel for resources matching
16
+ * that identity (Coolify app by name, R2 buckets, DNS zone records,
17
+ * GitHub Pages config, Resend domain verification, etc.).
18
+ * 3. Cross-reference findings to flag drift — e.g. Coolify app fqdn
19
+ * doesn't match DNS, manifest bucket name doesn't exist live,
20
+ * gh-pages workflow committed but Pages isn't enabled, CORS on the
21
+ * live bucket differs from what the manifest records.
22
+ * 4. Render — grouped tree (human) or `--json` for parsing.
23
+ *
24
+ * Everything is read-only. No mutations. Safe to run anywhere.
25
+ */
26
+ import { existsSync, readFileSync, readdirSync, statSync, writeFileSync } from "node:fs";
27
+ import { join, resolve } from "node:path";
28
+ import { confirm, input } from "@inquirer/prompts";
29
+ import chalk from "chalk";
30
+ import { getCoolifyConfig, getDnsConfig, getGlitchtipConfig, getOpenpanelConfig, getResendConfig, getS3Config, getStripeConfig, } from "./config.js";
31
+ import { locateEnvKeysFile, locateEnvProductionFile } from "./deploy/keys.js";
32
+ import { MANIFEST_FILENAME, MANIFEST_VERSION, readManifest, } from "./scaffold/manifest.js";
33
+ import { CloudflareApi } from "./utils/cloudflare-api.js";
34
+ import { CoolifyApi } from "./utils/coolify-api.js";
35
+ import { exec, execOk } from "./utils/exec.js";
36
+ import { SECRET_KEYS, getSecret } from "./utils/secrets.js";
37
+ import { getCliVersion } from "./utils/version.js";
38
+ export async function runInventory(cwd, opts = {}) {
39
+ const report = await collectInventory(cwd, {
40
+ input: opts.input,
41
+ interactive: !opts.json && !opts.yes,
42
+ autoAccept: opts.yes ?? false,
43
+ });
44
+ if (opts.json) {
45
+ // Sets serialize as `{}` by default — surface them as arrays so
46
+ // JSON consumers can actually read the detected signals.
47
+ console.log(JSON.stringify(report, (_k, v) => (v instanceof Set ? Array.from(v).sort() : v), 2));
48
+ return;
49
+ }
50
+ console.log(renderInventoryHuman(report));
51
+ // Persist inferred identity as a minimal `.hatchkit.json` unless
52
+ // suppressed. Skip when there's already a manifest (adopt owns it
53
+ // — we don't overwrite), when the inputs aren't sufficient (name +
54
+ // domain are required by the schema), or when `--no-save` was passed.
55
+ if (opts.noSave)
56
+ return;
57
+ if (report.local.manifestPresent)
58
+ return;
59
+ if (!report.inferred.name || !report.inferred.domain)
60
+ return;
61
+ const absCwd = resolve(cwd);
62
+ const writeIt = () => {
63
+ writeMinimalManifest(absCwd, report.inferred, report.local);
64
+ console.log(chalk.dim(` Wrote minimal ${MANIFEST_FILENAME}. Run \`hatchkit adopt --resume\` to wire features + deploy config.`));
65
+ };
66
+ if (opts.save) {
67
+ writeIt();
68
+ return;
69
+ }
70
+ // Only ask in interactive mode (TTY + not --yes).
71
+ if (!process.stdin.isTTY || opts.yes)
72
+ return;
73
+ const ok = await confirm({
74
+ message: `Save inferred identity as a minimal ${MANIFEST_FILENAME}? (Run \`hatchkit adopt --resume\` afterwards to flesh out features + deploy config.)`,
75
+ default: true,
76
+ });
77
+ if (ok)
78
+ writeIt();
79
+ }
80
+ export async function collectInventory(cwd, opts = {}) {
81
+ const absCwd = resolve(cwd);
82
+ const local = inferLocal(absCwd);
83
+ // Resolve git remote unconditionally — both for repo inference and
84
+ // for the repo-vs-Coolify-source drift check. Cheap (~one subprocess
85
+ // call) and the result is the same whether or not we prompt.
86
+ const git = await resolveGitRemote(local);
87
+ local.gitRemote = git.remote;
88
+ local.gitDefaultBranch = git.defaultBranch;
89
+ local.hasGitHubRemote = git.hasGitHubRemote;
90
+ // Identity inference — order: explicit input > manifest > package > git/CNAME.
91
+ const { input: inferred, sources } = inferIdentity(local, opts.input ?? {});
92
+ if (git.repo && !inferred.repo) {
93
+ inferred.repo = git.repo;
94
+ sources.repo = "git-remote";
95
+ }
96
+ let identity = inferred;
97
+ if (opts.interactive) {
98
+ identity = await promptForGaps(local, inferred, sources, !!opts.autoAccept);
99
+ }
100
+ // Per-provider expectation flags — drives which `missing` findings
101
+ // surface as red ✗ vs dim ·. A library with no Coolify-deploy signals
102
+ // gets dim · for "no Coolify app named foo" (expected absence), but a
103
+ // project whose .env.development declares STRIPE_* gets red ✗ if no
104
+ // matching webhook exists (genuine missing).
105
+ const expectations = computeExpectations(local, identity);
106
+ // Provider scans — every one is best-effort and returns its own
107
+ // findings + skip reason. Running them in parallel keeps wall-time
108
+ // close to the slowest single round-trip.
109
+ const scanResults = await Promise.all([
110
+ scanCoolify(identity, expectations.coolify),
111
+ scanDns(identity, expectations.dns),
112
+ scanR2(identity, local.manifest, expectations.r2),
113
+ scanS3Other(identity),
114
+ scanGitHub(identity, { github: expectations.github, githubPages: expectations.githubPages }),
115
+ scanResend(identity, expectations.resend),
116
+ scanGlitchtip(identity, expectations.glitchtip),
117
+ scanOpenpanel(identity, expectations.openpanel),
118
+ scanStripe(identity, expectations.stripe),
119
+ ]);
120
+ const findings = [];
121
+ const skipped = [];
122
+ for (const r of scanResults) {
123
+ findings.push(...r.findings);
124
+ skipped.push(...r.skipped);
125
+ }
126
+ // Drift checks build on already-collected findings + a few extra
127
+ // targeted lookups. They produce more findings (status: "drift" or
128
+ // sometimes "missing") which we append to the same list.
129
+ const driftFindings = await detectDrift(identity, local, scanResults);
130
+ findings.push(...driftFindings);
131
+ const drifts = findings.filter((f) => f.status === "drift");
132
+ const present = findings.filter((f) => f.status === "present").length;
133
+ const missingAll = findings.filter((f) => f.status === "missing");
134
+ const expectedMissing = missingAll.filter((f) => f.expected).length;
135
+ return {
136
+ cliVersion: getCliVersion(),
137
+ cwd: absCwd,
138
+ inferred: identity,
139
+ sources,
140
+ local,
141
+ findings,
142
+ drifts,
143
+ skipped,
144
+ summary: {
145
+ present,
146
+ drift: drifts.length,
147
+ missing: missingAll.length,
148
+ expectedMissing,
149
+ skipped: skipped.length,
150
+ },
151
+ };
152
+ }
153
+ // ---------------------------------------------------------------------------
154
+ // Local inference (no network, no prompts)
155
+ // ---------------------------------------------------------------------------
156
+ function inferLocal(cwd) {
157
+ const manifestPresent = existsSync(join(cwd, MANIFEST_FILENAME));
158
+ let manifest;
159
+ if (manifestPresent) {
160
+ try {
161
+ manifest = readManifest(cwd) ?? undefined;
162
+ }
163
+ catch {
164
+ // Malformed manifest — leave undefined; inferIdentity falls
165
+ // through to other signals.
166
+ }
167
+ }
168
+ let packageName;
169
+ let packageDescription;
170
+ try {
171
+ const pkg = JSON.parse(readFileSync(join(cwd, "package.json"), "utf-8"));
172
+ if (typeof pkg.name === "string")
173
+ packageName = pkg.name.replace(/^@[^/]+\//, "");
174
+ if (typeof pkg.description === "string")
175
+ packageDescription = pkg.description.trim();
176
+ }
177
+ catch {
178
+ // No package.json — fine.
179
+ }
180
+ const serverDir = firstExistingDir(cwd, [
181
+ "packages/server",
182
+ "apps/server",
183
+ "apps/api",
184
+ "apps/backend",
185
+ "server",
186
+ "backend",
187
+ "api",
188
+ "src/server",
189
+ "services/server",
190
+ ]);
191
+ const clientDir = firstExistingDir(cwd, [
192
+ "packages/client",
193
+ "packages/web",
194
+ "packages/frontend",
195
+ "apps/web",
196
+ "apps/client",
197
+ "apps/frontend",
198
+ "client",
199
+ "frontend",
200
+ "web",
201
+ "src/client",
202
+ ]);
203
+ const composeCandidates = [
204
+ "compose.yaml",
205
+ "compose.yml",
206
+ "docker-compose.yaml",
207
+ "docker-compose.yml",
208
+ ];
209
+ const composePath = composeCandidates.map((n) => join(cwd, n)).find((p) => existsSync(p));
210
+ const hasDockerfile = existsSync(join(cwd, "Dockerfile"));
211
+ // Workflow detection. We classify a workflow as "pages" when its body
212
+ // mentions `actions/deploy-pages` (the standard GitHub Pages action)
213
+ // or `peaceiris/actions-gh-pages` (the popular community alternative).
214
+ // We classify as "deploy" when it looks like hatchkit's deploy.yml
215
+ // (mentions `COOLIFY_` env or webhook). Reading the body is the only
216
+ // reliable signal — filenames vary too much.
217
+ //
218
+ // Workflows + CNAME files live at the *repo* root by convention, not
219
+ // wherever the user invoked us. Walk up to the git root so inventory
220
+ // from a subdir (e.g. `apps/web/`) still picks them up.
221
+ const repoRoot = findGitRoot(cwd) ?? cwd;
222
+ const workflowsDir = join(repoRoot, ".github", "workflows");
223
+ let ghPagesWorkflowPath;
224
+ let deployWorkflowPath;
225
+ if (existsSync(workflowsDir)) {
226
+ try {
227
+ for (const f of readdirSync(workflowsDir)) {
228
+ if (!/\.ya?ml$/i.test(f))
229
+ continue;
230
+ const full = join(workflowsDir, f);
231
+ let body;
232
+ try {
233
+ body = readFileSync(full, "utf-8");
234
+ }
235
+ catch {
236
+ continue;
237
+ }
238
+ if (!ghPagesWorkflowPath &&
239
+ /actions\/deploy-pages|peaceiris\/actions-gh-pages/i.test(body)) {
240
+ ghPagesWorkflowPath = full;
241
+ }
242
+ if (!deployWorkflowPath && /COOLIFY_(WEBHOOK|TOKEN)|coolify\.io/i.test(body)) {
243
+ deployWorkflowPath = full;
244
+ }
245
+ }
246
+ }
247
+ catch {
248
+ // Unreadable workflows dir — skip.
249
+ }
250
+ }
251
+ // CNAME file — GitHub Pages writes this at the publish-root to bind
252
+ // a custom domain. Locations cover the common static-site layouts:
253
+ // repo root (Jekyll), `docs/` (Pages-from-docs setup), `static/` and
254
+ // `docs/static/` (Docusaurus), `public/` and `docs/public/` (Vite /
255
+ // Next.js docs starters), `site/` and `www/` for the loose
256
+ // conventions, and `website/static/` for older Docusaurus.
257
+ let cnameFile;
258
+ for (const rel of [
259
+ "CNAME",
260
+ "docs/CNAME",
261
+ "docs/static/CNAME",
262
+ "docs/public/CNAME",
263
+ "site/CNAME",
264
+ "www/CNAME",
265
+ "website/static/CNAME",
266
+ "static/CNAME",
267
+ "public/CNAME",
268
+ ]) {
269
+ const p = join(repoRoot, rel);
270
+ if (existsSync(p)) {
271
+ try {
272
+ const content = readFileSync(p, "utf-8").trim();
273
+ if (content) {
274
+ cnameFile = { path: p, content };
275
+ break;
276
+ }
277
+ }
278
+ catch {
279
+ // Unreadable — skip.
280
+ }
281
+ }
282
+ }
283
+ // dotenvx state — same probes as `hatchkit adopt`.
284
+ const envProdPath = locateEnvProductionFile(cwd);
285
+ let dotenvxEncrypted = false;
286
+ if (envProdPath && existsSync(envProdPath)) {
287
+ try {
288
+ const head = readFileSync(envProdPath, "utf-8").slice(0, 2000);
289
+ dotenvxEncrypted = /DOTENV_PUBLIC_KEY_PRODUCTION/.test(head);
290
+ }
291
+ catch {
292
+ // Unreadable — leave false.
293
+ }
294
+ }
295
+ const envKeysPresent = !!locateEnvKeysFile(cwd);
296
+ // Provider expectation signals — read plaintext .env.* files and
297
+ // every package.json under the project for hints about which
298
+ // providers this project actually depends on. Used to mark `missing`
299
+ // findings as `expected: true` so the renderer only shows red ✗ when
300
+ // local state declares the resource should exist.
301
+ const envSignals = collectEnvSignals(cwd, serverDir, clientDir);
302
+ const packageDeps = collectPackageDeps(cwd, serverDir, clientDir);
303
+ // `.git` lives at the repo root — in a worktree it's a file pointing
304
+ // at the main repo, in a normal clone it's a directory. Either form
305
+ // is fine for existsSync. Walk up from cwd so running `hatchkit
306
+ // inventory` from a subdir (e.g. `apps/web/`) still picks up the
307
+ // repo root for git lookups.
308
+ const gitRoot = findGitRoot(cwd);
309
+ return {
310
+ cwd,
311
+ isGitRepo: !!gitRoot,
312
+ hasGitHubRemote: false, // resolved below
313
+ packageName,
314
+ packageDescription,
315
+ manifestPresent,
316
+ manifest,
317
+ serverDir,
318
+ clientDir,
319
+ hasDockerfile,
320
+ composePath,
321
+ ghPagesWorkflowPath,
322
+ deployWorkflowPath,
323
+ cnameFile,
324
+ dotenvxEncrypted,
325
+ envKeysPresent,
326
+ envSignals,
327
+ packageDeps,
328
+ };
329
+ }
330
+ // ---------------------------------------------------------------------------
331
+ // Expectation-signal collectors (used to mark findings as `expected`)
332
+ // ---------------------------------------------------------------------------
333
+ /** Walk plaintext .env.example / .env.development files at the project
334
+ * root and any detected server/client dirs, returning the set of
335
+ * env-var-name prefixes encountered. Encrypted .env.production is
336
+ * intentionally NOT decrypted — we read declarative templates only. */
337
+ function collectEnvSignals(cwd, serverDir, clientDir) {
338
+ const signals = new Set();
339
+ const filenames = [".env.example", ".env.development", ".env"];
340
+ const dirs = [cwd, serverDir, clientDir].filter((d) => !!d);
341
+ // Patterns we recognize — prefix → signal name. Keep this list
342
+ // tight; over-broad matches lead to spurious "expected" flags.
343
+ const patterns = [
344
+ { re: /^\s*RESEND_/m, signal: "RESEND" },
345
+ { re: /^\s*GLITCHTIP_DSN|^\s*PUBLIC_GLITCHTIP_DSN/m, signal: "GLITCHTIP" },
346
+ { re: /^\s*SENTRY_DSN|^\s*PUBLIC_SENTRY_DSN/m, signal: "SENTRY" },
347
+ { re: /^\s*OPENPANEL_|^\s*PUBLIC_OPENPANEL_/m, signal: "OPENPANEL" },
348
+ { re: /^\s*STRIPE_/m, signal: "STRIPE" },
349
+ { re: /^\s*R2_/m, signal: "R2" },
350
+ { re: /^\s*S3_|^\s*AWS_(ACCESS_KEY_ID|SECRET_ACCESS_KEY|REGION)/m, signal: "S3" },
351
+ ];
352
+ for (const dir of dirs) {
353
+ for (const name of filenames) {
354
+ const p = join(dir, name);
355
+ if (!existsSync(p))
356
+ continue;
357
+ let body;
358
+ try {
359
+ body = readFileSync(p, "utf-8");
360
+ }
361
+ catch {
362
+ continue;
363
+ }
364
+ // Strip commented-out lines — `# RESEND_API_KEY=...` shouldn't
365
+ // count as a signal that the project uses Resend.
366
+ const live = body
367
+ .split("\n")
368
+ .filter((l) => !/^\s*#/.test(l))
369
+ .join("\n");
370
+ for (const { re, signal } of patterns) {
371
+ if (re.test(live))
372
+ signals.add(signal);
373
+ }
374
+ }
375
+ }
376
+ return signals;
377
+ }
378
+ /** Read every package.json under the project (root + server/client
379
+ * dirs) and return the union of declared dependency names (deps +
380
+ * devDeps + peerDeps). Cheap heuristic — we don't follow workspace
381
+ * globs, just probe the conventional monorepo locations. */
382
+ function collectPackageDeps(cwd, serverDir, clientDir) {
383
+ const out = new Set();
384
+ const dirs = [cwd, serverDir, clientDir].filter((d) => !!d);
385
+ for (const dir of dirs) {
386
+ const p = join(dir, "package.json");
387
+ if (!existsSync(p))
388
+ continue;
389
+ try {
390
+ const pkg = JSON.parse(readFileSync(p, "utf-8"));
391
+ for (const block of [pkg.dependencies, pkg.devDependencies, pkg.peerDependencies]) {
392
+ if (!block)
393
+ continue;
394
+ for (const name of Object.keys(block))
395
+ out.add(name);
396
+ }
397
+ }
398
+ catch {
399
+ // Unparseable package.json — skip.
400
+ }
401
+ }
402
+ return out;
403
+ }
404
+ // ---------------------------------------------------------------------------
405
+ // Minimal `.hatchkit.json` writer
406
+ // ---------------------------------------------------------------------------
407
+ //
408
+ // When inventory saves inferred identity it writes a minimal manifest
409
+ // that follows the full schema (so every other hatchkit command — adopt,
410
+ // update, sync, keys — can read it) but with conservative defaults for
411
+ // fields inventory can't reliably infer. Run `hatchkit adopt --resume`
412
+ // afterwards to flesh out features, surfaces, ports, etc. — adopt's
413
+ // stepper reads this manifest as its starting state.
414
+ export function writeMinimalManifest(cwd, identity, local) {
415
+ if (!identity.name || !identity.domain) {
416
+ throw new Error("Can't write .hatchkit.json without an inferred name and domain. " +
417
+ "Pass --name / --domain or run inventory interactively to provide them.");
418
+ }
419
+ // Surfaces from local layout — both dirs → "both", just one → that
420
+ // one, neither → fall back to "both" (most common scaffold shape).
421
+ const surfaces = local.serverDir && local.clientDir
422
+ ? "both"
423
+ : local.serverDir
424
+ ? "server-only"
425
+ : local.clientDir
426
+ ? "client-only"
427
+ : "both";
428
+ const manifest = {
429
+ version: MANIFEST_VERSION,
430
+ cliVersion: getCliVersion(),
431
+ scaffoldedAt: new Date().toISOString(),
432
+ name: identity.name,
433
+ domain: identity.domain,
434
+ features: [],
435
+ mlServices: [],
436
+ // "none" is the conservative default — even if local deps signal R2
437
+ // usage, we don't claim ownership of buckets we haven't provisioned.
438
+ // `hatchkit adopt --resume` will prompt for the real value.
439
+ s3Provider: "none",
440
+ deployTarget: "existing",
441
+ surfaces,
442
+ // Conventional defaults — `hatchkit adopt --resume` lets the user
443
+ // override if the project actually uses different ports.
444
+ ports: { server: 3000, client: 5173 },
445
+ };
446
+ writeManifestFile(cwd, manifest);
447
+ return manifest;
448
+ }
449
+ function writeManifestFile(cwd, manifest) {
450
+ writeFileSync(join(cwd, MANIFEST_FILENAME), `${JSON.stringify(manifest, null, 2)}\n`, "utf-8");
451
+ }
452
+ function findGitRoot(startDir) {
453
+ let dir = startDir;
454
+ // Cap at 12 levels — generous for any reasonable monorepo, and a
455
+ // hard ceiling against pathological symlink loops.
456
+ for (let i = 0; i < 12; i++) {
457
+ if (existsSync(join(dir, ".git")))
458
+ return dir;
459
+ const parent = join(dir, "..");
460
+ const resolved = resolve(parent);
461
+ if (resolved === dir)
462
+ return undefined; // hit filesystem root
463
+ dir = resolved;
464
+ }
465
+ return undefined;
466
+ }
467
+ function firstExistingDir(root, rels) {
468
+ for (const rel of rels) {
469
+ const p = join(root, rel);
470
+ try {
471
+ if (statSync(p).isDirectory())
472
+ return p;
473
+ }
474
+ catch {
475
+ // ENOENT — try next.
476
+ }
477
+ }
478
+ return undefined;
479
+ }
480
+ function inferIdentity(local, override) {
481
+ const sources = {};
482
+ const out = {};
483
+ // name: flag > manifest > identity-file > package.json > basename(cwd)
484
+ // name: flag > manifest > package.json > basename(cwd)
485
+ if (override.name) {
486
+ out.name = override.name;
487
+ sources.name = "flag";
488
+ }
489
+ else if (local.manifest?.name) {
490
+ out.name = local.manifest.name;
491
+ sources.name = "manifest";
492
+ }
493
+ else if (local.packageName) {
494
+ out.name = local.packageName;
495
+ sources.name = "package.json";
496
+ }
497
+ else {
498
+ const base = local.cwd.split("/").filter(Boolean).pop();
499
+ if (base && /^[a-z0-9][a-z0-9-]*$/i.test(base)) {
500
+ // Last-resort guess from cwd basename. Surfaced with its own
501
+ // source so the renderer can show low confidence — and so
502
+ // interactive mode re-confirms before letting it drive scans.
503
+ out.name = base;
504
+ sources.name = "cwd-basename";
505
+ }
506
+ }
507
+ // domain: flag > manifest > CNAME file. We deliberately don't derive
508
+ // a domain from the project name — too speculative, and the matching
509
+ // layer already tries common domain patterns against any zone we list.
510
+ if (override.domain) {
511
+ out.domain = override.domain;
512
+ sources.domain = "flag";
513
+ }
514
+ else if (local.manifest?.domain) {
515
+ out.domain = local.manifest.domain;
516
+ sources.domain = "manifest";
517
+ }
518
+ else if (local.cnameFile?.content) {
519
+ out.domain = local.cnameFile.content;
520
+ sources.domain = "cname-file";
521
+ }
522
+ // repo: flag (else filled in by caller from git remote)
523
+ if (override.repo) {
524
+ out.repo = override.repo;
525
+ sources.repo = "flag";
526
+ }
527
+ return { input: out, sources };
528
+ }
529
+ async function resolveGitRemote(local) {
530
+ if (!local.isGitRepo)
531
+ return { hasGitHubRemote: false };
532
+ // Run git from the repo root if we found one — works from any subdir.
533
+ const gitCwd = findGitRoot(local.cwd) ?? local.cwd;
534
+ let remote;
535
+ let defaultBranch;
536
+ try {
537
+ const res = await exec("git", ["remote", "get-url", "origin"], {
538
+ cwd: gitCwd,
539
+ silent: true,
540
+ });
541
+ if (res.exitCode === 0) {
542
+ const out = res.stdout.trim();
543
+ if (out)
544
+ remote = out;
545
+ }
546
+ }
547
+ catch {
548
+ // git missing — fine.
549
+ }
550
+ try {
551
+ const res = await exec("git", ["symbolic-ref", "--short", "refs/remotes/origin/HEAD"], {
552
+ cwd: gitCwd,
553
+ silent: true,
554
+ });
555
+ if (res.exitCode === 0) {
556
+ // Output is "origin/main" — strip the remote prefix.
557
+ const out = res.stdout.trim().replace(/^origin\//, "");
558
+ if (out)
559
+ defaultBranch = out;
560
+ }
561
+ }
562
+ catch {
563
+ // Unset — fine.
564
+ }
565
+ const repo = repoSlugFromUrl(remote);
566
+ return {
567
+ remote,
568
+ repo,
569
+ defaultBranch,
570
+ hasGitHubRemote: !!repo,
571
+ };
572
+ }
573
+ function repoSlugFromUrl(url) {
574
+ if (!url)
575
+ return undefined;
576
+ const ssh = url.match(/^git@github\.com:([^/]+)\/([^/]+?)(?:\.git)?$/);
577
+ if (ssh)
578
+ return `${ssh[1]}/${ssh[2]}`;
579
+ const https = url.match(/^https?:\/\/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?(?:\/.*)?$/);
580
+ if (https)
581
+ return `${https[1]}/${https[2]}`;
582
+ return undefined;
583
+ }
584
+ // ---------------------------------------------------------------------------
585
+ // Interactive prompts (only for unknowns)
586
+ // ---------------------------------------------------------------------------
587
+ async function promptForGaps(local, inferred, sources, autoAccept) {
588
+ // Print a summary of what we found before asking.
589
+ console.log(chalk.bold("\n Detected from this directory:"));
590
+ console.log(` ${labelRow("Project name", inferred.name, sources.name)}`);
591
+ console.log(` ${labelRow("Domain", inferred.domain, sources.domain)}`);
592
+ console.log(` ${labelRow("GitHub repo", inferred.repo, sources.repo)}`);
593
+ if (local.manifestPresent)
594
+ console.log(chalk.dim(` + ${MANIFEST_FILENAME} present`));
595
+ if (local.dotenvxEncrypted)
596
+ console.log(chalk.dim(" + .env.production is dotenvx-encrypted"));
597
+ if (local.composePath)
598
+ console.log(chalk.dim(` + compose: ${rel(local.cwd, local.composePath)}`));
599
+ if (local.ghPagesWorkflowPath)
600
+ console.log(chalk.dim(` + gh-pages workflow: ${rel(local.cwd, local.ghPagesWorkflowPath)}`));
601
+ if (local.deployWorkflowPath)
602
+ console.log(chalk.dim(` + Coolify deploy workflow: ${rel(local.cwd, local.deployWorkflowPath)}`));
603
+ if (local.cnameFile)
604
+ console.log(chalk.dim(` + CNAME at ${rel(local.cwd, local.cnameFile.path)} → ${local.cnameFile.content}`));
605
+ console.log("");
606
+ const out = { ...inferred };
607
+ // Name: required for most lookups. Always confirm or prompt.
608
+ if (!out.name) {
609
+ out.name = await input({
610
+ message: "Project / app name (matches Coolify apps, buckets, clients):",
611
+ validate: (v) => (v.trim().length > 0 ? true : "Required"),
612
+ });
613
+ }
614
+ else if (!autoAccept) {
615
+ const ok = await confirm({
616
+ message: `Use ${chalk.bold(out.name)} as the project name?`,
617
+ default: true,
618
+ });
619
+ if (!ok) {
620
+ out.name = await input({
621
+ message: "Project / app name:",
622
+ default: out.name,
623
+ validate: (v) => (v.trim().length > 0 ? true : "Required"),
624
+ });
625
+ }
626
+ }
627
+ // Domain: optional. Skip lookups that need it if blank.
628
+ if (!out.domain) {
629
+ const want = autoAccept
630
+ ? false
631
+ : await confirm({
632
+ message: "Want to scan for resources tied to a specific domain?",
633
+ default: true,
634
+ });
635
+ if (want) {
636
+ out.domain = await input({
637
+ message: "Primary domain (e.g. myapp.com — empty to skip):",
638
+ });
639
+ out.domain = out.domain?.trim() || undefined;
640
+ }
641
+ }
642
+ else if (!autoAccept) {
643
+ const ok = await confirm({
644
+ message: `Use ${chalk.bold(out.domain)} as the primary domain?`,
645
+ default: true,
646
+ });
647
+ if (!ok) {
648
+ out.domain = await input({ message: "Primary domain (empty to skip):", default: out.domain });
649
+ out.domain = out.domain?.trim() || undefined;
650
+ }
651
+ }
652
+ // Repo: optional. Skip GH-side lookups if blank.
653
+ if (!out.repo) {
654
+ const want = autoAccept
655
+ ? false
656
+ : await confirm({
657
+ message: "Want to scan a GitHub repo (Pages, secrets, visibility)?",
658
+ default: false,
659
+ });
660
+ if (want) {
661
+ out.repo = await input({
662
+ message: "GitHub repo slug (owner/name — empty to skip):",
663
+ validate: (v) => !v.trim() || /^[^/\s]+\/[^/\s]+$/.test(v.trim()) ? true : "Expected owner/name format",
664
+ });
665
+ out.repo = out.repo?.trim() || undefined;
666
+ }
667
+ }
668
+ else if (!autoAccept) {
669
+ const ok = await confirm({
670
+ message: `Use ${chalk.bold(out.repo)} as the GitHub repo?`,
671
+ default: true,
672
+ });
673
+ if (!ok) {
674
+ out.repo = await input({
675
+ message: "GitHub repo slug (owner/name — empty to skip):",
676
+ default: out.repo,
677
+ validate: (v) => !v.trim() || /^[^/\s]+\/[^/\s]+$/.test(v.trim()) ? true : "Expected owner/name format",
678
+ });
679
+ out.repo = out.repo?.trim() || undefined;
680
+ }
681
+ }
682
+ return out;
683
+ }
684
+ function labelRow(label, value, source) {
685
+ if (!value)
686
+ return `${label.padEnd(14)} ${chalk.dim("·")} ${chalk.dim("(not detected)")}`;
687
+ const src = source ? chalk.dim(` ← ${source}`) : chalk.dim(" ← guess");
688
+ return `${label.padEnd(14)} ${chalk.green("✓")} ${chalk.bold(value)}${src}`;
689
+ }
690
+ function rel(cwd, abs) {
691
+ if (abs.startsWith(`${cwd}/`))
692
+ return abs.slice(cwd.length + 1);
693
+ return abs;
694
+ }
695
+ export function computeExpectations(local, identity) {
696
+ const env = local.envSignals;
697
+ const deps = local.packageDeps;
698
+ const hasManifestBuckets = (() => {
699
+ const b = local.manifest?.s3Buckets;
700
+ if (!b)
701
+ return false;
702
+ return Object.values(b).some((v) => v && typeof v === "object");
703
+ })();
704
+ return {
705
+ // Coolify is the project's deploy target whenever there's a manifest
706
+ // (every hatchkit-scaffolded project deploys there) or a deploy
707
+ // workflow committed.
708
+ coolify: !!local.manifest || !!local.deployWorkflowPath,
709
+ // DNS is always relevant when we know a domain — the user wouldn't
710
+ // have a domain unless they intended to route something to it.
711
+ dns: !!identity.domain,
712
+ r2: hasManifestBuckets || env.has("R2") || env.has("S3") || deps.has("@aws-sdk/client-s3"),
713
+ github: !!identity.repo,
714
+ // Pages is expected when a deploy workflow is committed or the
715
+ // repo has a CNAME file at one of the conventional locations.
716
+ githubPages: !!local.ghPagesWorkflowPath || !!local.cnameFile,
717
+ resend: env.has("RESEND") || deps.has("resend"),
718
+ // The Sentry SDK works against GlitchTip (same wire protocol), so
719
+ // either signal counts. Same for an explicit GLITCHTIP_DSN.
720
+ glitchtip: env.has("GLITCHTIP") ||
721
+ env.has("SENTRY") ||
722
+ deps.has("glitchtip") ||
723
+ hasDepMatching(deps, /^@sentry\//),
724
+ openpanel: env.has("OPENPANEL") || hasDepMatching(deps, /^@openpanel\//) || deps.has("openpanel"),
725
+ stripe: env.has("STRIPE") || deps.has("stripe") || deps.has("@stripe/stripe-js"),
726
+ };
727
+ }
728
+ function hasDepMatching(deps, re) {
729
+ for (const d of deps)
730
+ if (re.test(d))
731
+ return true;
732
+ return false;
733
+ }
734
+ async function scanCoolify(input, expected) {
735
+ const provider = "coolify";
736
+ const findings = [];
737
+ const skipped = [];
738
+ const cfg = await getCoolifyConfig();
739
+ if (!cfg) {
740
+ skipped.push({ provider, reason: "not configured (`hatchkit config add coolify`)" });
741
+ return { provider, findings, skipped };
742
+ }
743
+ if (!input.name) {
744
+ skipped.push({ provider, reason: "no project name to match against" });
745
+ return { provider, findings, skipped };
746
+ }
747
+ const api = new CoolifyApi({ url: cfg.url, token: cfg.token });
748
+ let projects = [];
749
+ let apps = [];
750
+ try {
751
+ [projects, apps] = await Promise.all([
752
+ api.listProjects().catch(() => []),
753
+ api.listApplications().catch(() => []),
754
+ ]);
755
+ }
756
+ catch (err) {
757
+ skipped.push({
758
+ provider,
759
+ reason: `Coolify request failed: ${err.message.split("\n")[0]}`,
760
+ });
761
+ return { provider, findings, skipped };
762
+ }
763
+ const wantedNames = nameAliases(input.name);
764
+ const projectMatches = projects.filter((p) => wantedNames.includes(p.name));
765
+ const appMatches = apps.filter((a) => wantedNames.includes(a.name));
766
+ for (const p of projectMatches) {
767
+ findings.push({
768
+ provider,
769
+ kind: "project",
770
+ identity: p.name,
771
+ status: "present",
772
+ detail: `Coolify project (id: ${p.id})`,
773
+ });
774
+ }
775
+ // Hydrate each app match with its full details so drift can compare
776
+ // fqdn / git_repository / server uuid. One call per match is cheap.
777
+ const hydrated = [];
778
+ for (const a of appMatches) {
779
+ try {
780
+ const full = await api.getApplication(a.uuid);
781
+ hydrated.push(full);
782
+ const fqdns = collectFqdns(full);
783
+ const detail = [
784
+ `Coolify app (${full.buildPack ?? "?"})`,
785
+ fqdns.length ? `fqdn: ${fqdns.join(", ")}` : "no fqdn",
786
+ full.gitRepository ? `repo: ${full.gitRepository}` : undefined,
787
+ ]
788
+ .filter(Boolean)
789
+ .join(" · ");
790
+ findings.push({
791
+ provider,
792
+ kind: "application",
793
+ identity: full.name,
794
+ status: "present",
795
+ detail,
796
+ });
797
+ }
798
+ catch (err) {
799
+ findings.push({
800
+ provider,
801
+ kind: "application",
802
+ identity: a.name,
803
+ status: "info",
804
+ detail: `couldn't load detail: ${err.message.split("\n")[0]}`,
805
+ });
806
+ }
807
+ }
808
+ if (projectMatches.length === 0 && appMatches.length === 0) {
809
+ findings.push({
810
+ provider,
811
+ kind: "application",
812
+ identity: input.name,
813
+ status: "missing",
814
+ expected,
815
+ detail: `no Coolify project or app named ${wantedNames.join(" / ")} (${apps.length} app(s) total)`,
816
+ });
817
+ }
818
+ return { provider, findings, skipped, raw: { hydrated } };
819
+ }
820
+ function collectFqdns(app) {
821
+ const fqdns = [];
822
+ if (app.fqdn) {
823
+ for (const part of app.fqdn.split(",")) {
824
+ const trimmed = part
825
+ .trim()
826
+ .replace(/^https?:\/\//, "")
827
+ .replace(/\/.*$/, "");
828
+ if (trimmed)
829
+ fqdns.push(trimmed);
830
+ }
831
+ }
832
+ if (app.dockerComposeDomains) {
833
+ for (const d of app.dockerComposeDomains) {
834
+ const stripped = d.domain.replace(/^https?:\/\//, "").replace(/\/.*$/, "");
835
+ if (stripped)
836
+ fqdns.push(stripped);
837
+ }
838
+ }
839
+ return Array.from(new Set(fqdns));
840
+ }
841
+ /** Project name aliases we'll match against remote resources.
842
+ * Keep in sync with `hatchkit adopt`'s detectProject — same family of
843
+ * conventions (raw, -server, -client, -web). */
844
+ function nameAliases(name) {
845
+ return [name, `${name}-server`, `${name}-client`, `${name}-web`, `${name}-api`];
846
+ }
847
+ async function scanDns(input, expected) {
848
+ const provider = "dns";
849
+ const findings = [];
850
+ const skipped = [];
851
+ const cfg = await getDnsConfig();
852
+ if (!cfg) {
853
+ skipped.push({ provider, reason: "not configured (`hatchkit config add dns`)" });
854
+ return { provider, findings, skipped };
855
+ }
856
+ if (cfg.provider !== "cloudflare") {
857
+ skipped.push({
858
+ provider,
859
+ reason: `${cfg.provider} provider has no list API exposed (Cloudflare only for now)`,
860
+ });
861
+ return { provider, findings, skipped };
862
+ }
863
+ if (!cfg.apiToken) {
864
+ skipped.push({ provider, reason: "Cloudflare API token missing from keychain" });
865
+ return { provider, findings, skipped };
866
+ }
867
+ if (!input.domain) {
868
+ skipped.push({ provider, reason: "no domain to look up" });
869
+ return { provider, findings, skipped };
870
+ }
871
+ const cf = new CloudflareApi({ token: cfg.apiToken });
872
+ const apex = apexOf(input.domain);
873
+ let zone;
874
+ try {
875
+ zone = await cf.getZoneByName(apex);
876
+ }
877
+ catch (err) {
878
+ skipped.push({
879
+ provider,
880
+ reason: `Cloudflare zone lookup failed: ${err.message.split("\n")[0]}`,
881
+ });
882
+ return { provider, findings, skipped };
883
+ }
884
+ if (!zone) {
885
+ findings.push({
886
+ provider,
887
+ kind: "zone",
888
+ identity: apex,
889
+ status: "missing",
890
+ expected,
891
+ detail: "no Cloudflare zone for this apex",
892
+ });
893
+ return { provider, findings, skipped };
894
+ }
895
+ findings.push({
896
+ provider,
897
+ kind: "zone",
898
+ identity: zone.name,
899
+ status: "present",
900
+ detail: `zone id ${zone.id}`,
901
+ });
902
+ // Probe a curated set of relevant record names. For each we run an
903
+ // exact name+type lookup — much cheaper than listing every record in
904
+ // the zone and filtering. Misses the long tail but covers >95% of
905
+ // hatchkit-managed naming.
906
+ const probes = relevantRecordProbes(input);
907
+ const dnsRecords = [];
908
+ for (const probe of probes) {
909
+ try {
910
+ const rec = await cf.findRecord(zone.id, probe.name, probe.type);
911
+ if (rec) {
912
+ dnsRecords.push(rec);
913
+ findings.push({
914
+ provider,
915
+ kind: "dns-record",
916
+ identity: `${rec.name} ${rec.type}`,
917
+ status: "present",
918
+ detail: `${rec.content}${rec.proxied ? " (proxied)" : ""}`,
919
+ });
920
+ }
921
+ }
922
+ catch {
923
+ // Record probe failed — skip silently; the zone-level finding
924
+ // already proves auth works.
925
+ }
926
+ }
927
+ return { provider, findings, skipped, raw: { zone, dnsRecords } };
928
+ }
929
+ function apexOf(domain) {
930
+ // crude but adequate: take the last two labels.
931
+ const parts = domain.replace(/\.$/, "").split(".");
932
+ if (parts.length <= 2)
933
+ return parts.join(".");
934
+ return parts.slice(-2).join(".");
935
+ }
936
+ function relevantRecordProbes(input) {
937
+ if (!input.domain)
938
+ return [];
939
+ const apex = apexOf(input.domain);
940
+ const out = [];
941
+ const names = new Set([
942
+ input.domain,
943
+ apex,
944
+ `www.${apex}`,
945
+ `api.${apex}`,
946
+ `s3.${apex}`,
947
+ `assets.${apex}`,
948
+ `cdn.${apex}`,
949
+ `docs.${apex}`,
950
+ ]);
951
+ if (input.name)
952
+ names.add(`${input.name}.${apex}`);
953
+ for (const n of names) {
954
+ out.push({ name: n, type: "A" });
955
+ out.push({ name: n, type: "CNAME" });
956
+ }
957
+ return out;
958
+ }
959
+ async function scanR2(input, manifest, expected) {
960
+ const provider = "s3:r2";
961
+ const findings = [];
962
+ const skipped = [];
963
+ const cfg = await getS3Config("r2");
964
+ if (!cfg) {
965
+ skipped.push({ provider, reason: "R2 not configured (`hatchkit config add s3` → r2)" });
966
+ return { provider, findings, skipped };
967
+ }
968
+ const adminToken = await getSecret(SECRET_KEYS.r2AdminToken);
969
+ if (!adminToken) {
970
+ skipped.push({ provider, reason: "R2 admin token not in keychain; can't list buckets" });
971
+ return { provider, findings, skipped };
972
+ }
973
+ const accountId = manifest?.s3Buckets?.accountId ??
974
+ cfg.endpoint?.match(/https?:\/\/([0-9a-f]{32})\.r2\.cloudflarestorage\.com/i)?.[1];
975
+ if (!accountId) {
976
+ skipped.push({ provider, reason: "couldn't derive R2 account id from manifest or endpoint" });
977
+ return { provider, findings, skipped };
978
+ }
979
+ const cf = new CloudflareApi({ token: adminToken });
980
+ // Candidate bucket names: manifest entries first (authoritative —
981
+ // these are buckets hatchkit knows it created), then naming-convention
982
+ // guesses for projects without a manifest.
983
+ const candidates = new Set();
984
+ const manifestBuckets = [];
985
+ if (manifest?.s3Buckets) {
986
+ for (const [key, value] of Object.entries(manifest.s3Buckets)) {
987
+ if (!value || typeof value !== "object")
988
+ continue;
989
+ const v = value;
990
+ if (typeof v.name === "string" && v.name) {
991
+ candidates.add(v.name);
992
+ manifestBuckets.push({ name: v.name, manifestKey: key });
993
+ }
994
+ }
995
+ }
996
+ if (input.name) {
997
+ candidates.add(`${input.name}-assets`);
998
+ candidates.add(`${input.name}-state`);
999
+ candidates.add(input.name);
1000
+ }
1001
+ const live = [];
1002
+ for (const name of candidates) {
1003
+ try {
1004
+ const bucket = await cf.getR2Bucket(accountId, name);
1005
+ if (!bucket) {
1006
+ live.push({ name, bucket: null });
1007
+ continue;
1008
+ }
1009
+ const [cors, domains] = await Promise.all([
1010
+ cf.getR2BucketCors(accountId, name).catch(() => null),
1011
+ cf.listR2CustomDomains(accountId, name).catch(() => []),
1012
+ ]);
1013
+ live.push({ name, bucket, cors, customDomains: domains });
1014
+ const domainSummary = (domains ?? [])
1015
+ .filter((d) => d.enabled)
1016
+ .map((d) => d.domain)
1017
+ .join(", ");
1018
+ const corsSummary = cors?.[0]?.allowed?.origins?.length
1019
+ ? `${cors[0].allowed.origins.length} CORS origin(s)`
1020
+ : "no CORS";
1021
+ findings.push({
1022
+ provider,
1023
+ kind: "bucket",
1024
+ identity: name,
1025
+ status: "present",
1026
+ detail: [
1027
+ bucket.storage_class ? `class: ${bucket.storage_class}` : undefined,
1028
+ domainSummary ? `custom: ${domainSummary}` : undefined,
1029
+ corsSummary,
1030
+ ]
1031
+ .filter(Boolean)
1032
+ .join(" · "),
1033
+ });
1034
+ }
1035
+ catch (err) {
1036
+ // Auth probably broken — bubble as skip rather than per-bucket fail.
1037
+ skipped.push({
1038
+ provider,
1039
+ reason: `R2 lookup for "${name}" failed: ${err.message.split("\n")[0]}`,
1040
+ });
1041
+ return { provider, findings, skipped, raw: { accountId, live, manifestBuckets } };
1042
+ }
1043
+ }
1044
+ // If we looked but nothing matched, leave a breadcrumb. Without
1045
+ // this, scanR2 returns empty findings and the user wonders whether
1046
+ // we even tried.
1047
+ if (findings.length === 0 && candidates.size > 0) {
1048
+ findings.push({
1049
+ provider,
1050
+ kind: "bucket",
1051
+ identity: input.name ?? "(candidates)",
1052
+ status: "missing",
1053
+ expected,
1054
+ detail: `no R2 bucket matches ${Array.from(candidates).join(" / ")} (account ${accountId.slice(0, 6)}…)`,
1055
+ });
1056
+ }
1057
+ return { provider, findings, skipped, raw: { accountId, live, manifestBuckets } };
1058
+ }
1059
+ async function scanS3Other(_input) {
1060
+ // Hetzner Object Storage + AWS S3 don't have a "list all buckets for
1061
+ // this access key" call exposed in the existing client. We surface
1062
+ // presence only, so the user knows where else to look. A full impl
1063
+ // would need an `@aws-sdk/client-s3` `ListBuckets` call, which is
1064
+ // already a dep — but adding that here doubles the surface area;
1065
+ // ship without for now and revisit if anyone asks.
1066
+ const provider = "s3";
1067
+ const findings = [];
1068
+ const skipped = [];
1069
+ for (const p of ["hetzner", "aws"]) {
1070
+ const cfg = await getS3Config(p);
1071
+ if (cfg) {
1072
+ findings.push({
1073
+ provider: `s3:${p}`,
1074
+ kind: "credentials",
1075
+ identity: p,
1076
+ status: "info",
1077
+ detail: `endpoint: ${cfg.endpoint} — bucket inventory not implemented for ${p}`,
1078
+ });
1079
+ }
1080
+ else {
1081
+ skipped.push({ provider: `s3:${p}`, reason: "not configured" });
1082
+ }
1083
+ }
1084
+ return { provider, findings, skipped };
1085
+ }
1086
+ async function scanGitHub(input, expects) {
1087
+ const provider = "github";
1088
+ const findings = [];
1089
+ const skipped = [];
1090
+ if (!input.repo) {
1091
+ skipped.push({ provider, reason: "no GitHub repo to look up" });
1092
+ return { provider, findings, skipped };
1093
+ }
1094
+ if (!(await execOk("gh", ["--version"]))) {
1095
+ skipped.push({ provider, reason: "`gh` CLI not installed" });
1096
+ return { provider, findings, skipped };
1097
+ }
1098
+ if (!(await execOk("gh", ["auth", "status"]))) {
1099
+ skipped.push({ provider, reason: "`gh` not authenticated (run `gh auth login`)" });
1100
+ return { provider, findings, skipped };
1101
+ }
1102
+ // Repo metadata.
1103
+ let repoInfo = {};
1104
+ try {
1105
+ const res = await exec("gh", [
1106
+ "repo",
1107
+ "view",
1108
+ input.repo,
1109
+ "--json",
1110
+ "visibility,defaultBranchRef,description,homepageUrl,isArchived",
1111
+ ], { silent: true });
1112
+ if (res.exitCode === 0) {
1113
+ repoInfo = JSON.parse(res.stdout);
1114
+ findings.push({
1115
+ provider,
1116
+ kind: "repository",
1117
+ identity: input.repo,
1118
+ status: "present",
1119
+ detail: [
1120
+ repoInfo.visibility?.toLowerCase(),
1121
+ `default: ${repoInfo.defaultBranchRef?.name ?? "?"}`,
1122
+ repoInfo.isArchived ? "archived" : undefined,
1123
+ repoInfo.homepageUrl ? `homepage: ${repoInfo.homepageUrl}` : undefined,
1124
+ ]
1125
+ .filter(Boolean)
1126
+ .join(" · "),
1127
+ });
1128
+ }
1129
+ else {
1130
+ findings.push({
1131
+ provider,
1132
+ kind: "repository",
1133
+ identity: input.repo,
1134
+ status: "missing",
1135
+ expected: expects.github,
1136
+ detail: res.stderr.trim().split("\n")[0],
1137
+ });
1138
+ return { provider, findings, skipped, raw: { repoInfo } };
1139
+ }
1140
+ }
1141
+ catch (err) {
1142
+ findings.push({
1143
+ provider,
1144
+ kind: "repository",
1145
+ identity: input.repo,
1146
+ status: "info",
1147
+ detail: `gh repo view failed: ${err.message.split("\n")[0]}`,
1148
+ });
1149
+ }
1150
+ let pages = null;
1151
+ try {
1152
+ const res = await exec("gh", ["api", `repos/${input.repo}/pages`], { silent: true });
1153
+ if (res.exitCode === 0) {
1154
+ pages = JSON.parse(res.stdout);
1155
+ findings.push({
1156
+ provider: "github-pages",
1157
+ kind: "page-site",
1158
+ identity: input.repo,
1159
+ status: "present",
1160
+ detail: [
1161
+ pages?.status,
1162
+ pages?.cname ? `cname: ${pages.cname}` : "no custom domain",
1163
+ pages?.html_url,
1164
+ ]
1165
+ .filter(Boolean)
1166
+ .join(" · "),
1167
+ });
1168
+ }
1169
+ else if (/HTTP 404/.test(res.stderr)) {
1170
+ findings.push({
1171
+ provider: "github-pages",
1172
+ kind: "page-site",
1173
+ identity: input.repo,
1174
+ status: "missing",
1175
+ expected: expects.githubPages,
1176
+ detail: "Pages is not enabled on this repo",
1177
+ });
1178
+ }
1179
+ else {
1180
+ findings.push({
1181
+ provider: "github-pages",
1182
+ kind: "page-site",
1183
+ identity: input.repo,
1184
+ status: "info",
1185
+ detail: `gh api repos/<repo>/pages failed: ${res.stderr.trim().split("\n")[0]}`,
1186
+ });
1187
+ }
1188
+ }
1189
+ catch (err) {
1190
+ findings.push({
1191
+ provider: "github-pages",
1192
+ kind: "page-site",
1193
+ identity: input.repo,
1194
+ status: "info",
1195
+ detail: `pages probe failed: ${err.message.split("\n")[0]}`,
1196
+ });
1197
+ }
1198
+ // Repo secrets — surface only the ones hatchkit cares about, by name.
1199
+ const relevantSecrets = [
1200
+ "DOTENV_PRIVATE_KEY_PRODUCTION",
1201
+ "COOLIFY_API_URL",
1202
+ "COOLIFY_API_TOKEN",
1203
+ "COOLIFY_APP_UUID",
1204
+ ];
1205
+ try {
1206
+ const res = await exec("gh", ["secret", "list", "--repo", input.repo, "--json", "name,updatedAt"], { silent: true });
1207
+ if (res.exitCode === 0) {
1208
+ const all = JSON.parse(res.stdout);
1209
+ const haves = new Set(all.map((s) => s.name));
1210
+ for (const want of relevantSecrets) {
1211
+ if (haves.has(want)) {
1212
+ findings.push({
1213
+ provider,
1214
+ kind: "secret",
1215
+ identity: want,
1216
+ status: "present",
1217
+ detail: "set on repo",
1218
+ });
1219
+ }
1220
+ }
1221
+ const extras = all.length;
1222
+ findings.push({
1223
+ provider,
1224
+ kind: "secret-summary",
1225
+ identity: input.repo,
1226
+ status: "info",
1227
+ detail: `${extras} secret(s) total on repo`,
1228
+ });
1229
+ }
1230
+ }
1231
+ catch {
1232
+ // Non-fatal — secret-listing requires admin scope on the gh token.
1233
+ }
1234
+ return { provider, findings, skipped, raw: { repoInfo, pages } };
1235
+ }
1236
+ async function scanResend(input, expected) {
1237
+ const provider = "resend";
1238
+ const findings = [];
1239
+ const skipped = [];
1240
+ const cfg = await getResendConfig();
1241
+ if (!cfg) {
1242
+ skipped.push({ provider, reason: "not configured" });
1243
+ return { provider, findings, skipped };
1244
+ }
1245
+ if (!input.domain) {
1246
+ skipped.push({ provider, reason: "no domain to match against verified Resend domains" });
1247
+ return { provider, findings, skipped };
1248
+ }
1249
+ try {
1250
+ const res = await fetch("https://api.resend.com/domains", {
1251
+ headers: { Authorization: `Bearer ${cfg.apiKey}` },
1252
+ });
1253
+ if (!res.ok)
1254
+ throw new Error(`HTTP ${res.status}`);
1255
+ const body = (await res.json());
1256
+ const apex = apexOf(input.domain);
1257
+ const matches = (body.data ?? []).filter((d) => typeof d.name === "string" && (d.name === input.domain || d.name === apex));
1258
+ if (matches.length === 0) {
1259
+ findings.push({
1260
+ provider,
1261
+ kind: "verified-domain",
1262
+ identity: input.domain,
1263
+ status: "missing",
1264
+ expected,
1265
+ detail: `no Resend domain entry for ${input.domain} (${(body.data ?? []).length} domain(s) total)`,
1266
+ });
1267
+ }
1268
+ else {
1269
+ for (const m of matches) {
1270
+ findings.push({
1271
+ provider,
1272
+ kind: "verified-domain",
1273
+ identity: m.name ?? input.domain,
1274
+ status: "present",
1275
+ detail: `status: ${m.status ?? "?"}`,
1276
+ });
1277
+ }
1278
+ }
1279
+ }
1280
+ catch (err) {
1281
+ skipped.push({
1282
+ provider,
1283
+ reason: `Resend lookup failed: ${err.message.split("\n")[0]}`,
1284
+ });
1285
+ }
1286
+ return { provider, findings, skipped };
1287
+ }
1288
+ async function scanGlitchtip(input, expected) {
1289
+ const provider = "glitchtip";
1290
+ const findings = [];
1291
+ const skipped = [];
1292
+ const cfg = await getGlitchtipConfig();
1293
+ if (!cfg) {
1294
+ skipped.push({ provider, reason: "not configured" });
1295
+ return { provider, findings, skipped };
1296
+ }
1297
+ if (!input.name) {
1298
+ skipped.push({ provider, reason: "no project name to match against GlitchTip projects" });
1299
+ return { provider, findings, skipped };
1300
+ }
1301
+ try {
1302
+ const res = await fetch(`${cfg.url.replace(/\/$/, "")}/api/0/organizations/${cfg.organizationSlug}/projects/`, { headers: { Authorization: `Bearer ${cfg.token}` } });
1303
+ if (!res.ok)
1304
+ throw new Error(`HTTP ${res.status}`);
1305
+ const body = (await res.json());
1306
+ const wanted = nameAliases(input.name);
1307
+ const matches = body.filter((p) => (typeof p.name === "string" && wanted.includes(p.name)) ||
1308
+ (typeof p.slug === "string" && wanted.includes(p.slug)));
1309
+ if (matches.length === 0) {
1310
+ findings.push({
1311
+ provider,
1312
+ kind: "project",
1313
+ identity: input.name,
1314
+ status: "missing",
1315
+ expected,
1316
+ detail: `no GlitchTip project matching ${wanted.join(" / ")} (${body.length} total in org)`,
1317
+ });
1318
+ }
1319
+ else {
1320
+ for (const p of matches) {
1321
+ findings.push({
1322
+ provider,
1323
+ kind: "project",
1324
+ identity: p.slug ?? p.name ?? input.name,
1325
+ status: "present",
1326
+ detail: p.platform ? `platform: ${p.platform}` : undefined,
1327
+ });
1328
+ }
1329
+ }
1330
+ }
1331
+ catch (err) {
1332
+ skipped.push({
1333
+ provider,
1334
+ reason: `GlitchTip lookup failed: ${err.message.split("\n")[0]}`,
1335
+ });
1336
+ }
1337
+ return { provider, findings, skipped };
1338
+ }
1339
+ async function scanOpenpanel(input, expected) {
1340
+ const provider = "openpanel";
1341
+ const findings = [];
1342
+ const skipped = [];
1343
+ const cfg = await getOpenpanelConfig();
1344
+ if (!cfg) {
1345
+ skipped.push({ provider, reason: "not configured" });
1346
+ return { provider, findings, skipped };
1347
+ }
1348
+ if (!input.name) {
1349
+ skipped.push({ provider, reason: "no project name to match against OpenPanel projects" });
1350
+ return { provider, findings, skipped };
1351
+ }
1352
+ try {
1353
+ const base = (cfg.apiUrl ?? cfg.url).replace(/\/$/, "");
1354
+ const res = await fetch(`${base}/manage/projects`, {
1355
+ headers: {
1356
+ "openpanel-client-id": cfg.rootClientId,
1357
+ "openpanel-client-secret": cfg.rootClientSecret,
1358
+ },
1359
+ });
1360
+ if (!res.ok)
1361
+ throw new Error(`HTTP ${res.status}`);
1362
+ // OpenPanel's manage API sometimes returns a bare array, sometimes
1363
+ // `{ data: [...] }`. Accept either shape.
1364
+ const raw = (await res.json());
1365
+ const projects = Array.isArray(raw)
1366
+ ? raw
1367
+ : (raw.data ?? []);
1368
+ const wanted = nameAliases(input.name);
1369
+ const matches = projects.filter((p) => (typeof p.name === "string" && wanted.includes(p.name)) ||
1370
+ (typeof p.id === "string" && wanted.includes(p.id)));
1371
+ if (matches.length === 0) {
1372
+ findings.push({
1373
+ provider,
1374
+ kind: "project",
1375
+ identity: input.name,
1376
+ status: "missing",
1377
+ expected,
1378
+ detail: `no OpenPanel project matching ${wanted.join(" / ")} (${projects.length} total)`,
1379
+ });
1380
+ }
1381
+ else {
1382
+ for (const p of matches) {
1383
+ findings.push({
1384
+ provider,
1385
+ kind: "project",
1386
+ identity: p.name ?? p.id ?? input.name,
1387
+ status: "present",
1388
+ });
1389
+ }
1390
+ }
1391
+ }
1392
+ catch (err) {
1393
+ skipped.push({
1394
+ provider,
1395
+ reason: `OpenPanel lookup failed: ${err.message.split("\n")[0]}`,
1396
+ });
1397
+ }
1398
+ return { provider, findings, skipped };
1399
+ }
1400
+ async function scanStripe(input, expected) {
1401
+ const provider = "stripe";
1402
+ const findings = [];
1403
+ const skipped = [];
1404
+ const cfg = await getStripeConfig();
1405
+ if (!cfg) {
1406
+ skipped.push({ provider, reason: "not configured" });
1407
+ return { provider, findings, skipped };
1408
+ }
1409
+ if (!input.domain) {
1410
+ skipped.push({
1411
+ provider,
1412
+ reason: "no domain to match against Stripe webhook endpoints",
1413
+ });
1414
+ return { provider, findings, skipped };
1415
+ }
1416
+ // Probe each mode that has a stored master key.
1417
+ for (const mode of ["test", "live"]) {
1418
+ const key = mode === "test" ? cfg.testSecretKey : cfg.liveSecretKey;
1419
+ if (!key)
1420
+ continue;
1421
+ try {
1422
+ const res = await fetch("https://api.stripe.com/v1/webhook_endpoints?limit=100", {
1423
+ headers: { Authorization: `Bearer ${key}` },
1424
+ });
1425
+ if (!res.ok)
1426
+ throw new Error(`HTTP ${res.status}`);
1427
+ const body = (await res.json());
1428
+ const matches = (body.data ?? []).filter((w) => typeof w.url === "string" && w.url.includes(input.domain ?? ""));
1429
+ if (matches.length === 0) {
1430
+ findings.push({
1431
+ provider,
1432
+ kind: "webhook-endpoint",
1433
+ identity: `${mode} mode`,
1434
+ status: "missing",
1435
+ expected,
1436
+ detail: `no webhook endpoint with URL containing ${input.domain} (${(body.data ?? []).length} endpoint(s) in ${mode} mode)`,
1437
+ });
1438
+ }
1439
+ else {
1440
+ for (const w of matches) {
1441
+ findings.push({
1442
+ provider,
1443
+ kind: "webhook-endpoint",
1444
+ identity: `${mode}:${w.id ?? "?"}`,
1445
+ status: "present",
1446
+ detail: `${w.url} (${w.status ?? "?"})`,
1447
+ });
1448
+ }
1449
+ }
1450
+ }
1451
+ catch (err) {
1452
+ skipped.push({
1453
+ provider,
1454
+ reason: `Stripe ${mode}-mode lookup failed: ${err.message.split("\n")[0]}`,
1455
+ });
1456
+ }
1457
+ }
1458
+ return { provider, findings, skipped };
1459
+ }
1460
+ // ---------------------------------------------------------------------------
1461
+ // Drift detection (cross-references between scan results + local state)
1462
+ // ---------------------------------------------------------------------------
1463
+ async function detectDrift(input, local, scanResults) {
1464
+ const out = [];
1465
+ const byProvider = new Map(scanResults.map((r) => [r.provider, r]));
1466
+ const coolify = byProvider.get("coolify");
1467
+ const dns = byProvider.get("dns");
1468
+ const r2 = byProvider.get("s3:r2");
1469
+ const github = byProvider.get("github");
1470
+ // D1: Coolify app fqdn vs DNS A record content (when both are
1471
+ // present). We resolve via the Cloudflare zone records we already
1472
+ // fetched in scanDns — no extra network hop.
1473
+ if (coolify?.raw && dns?.raw && input.domain) {
1474
+ const hydrated = (coolify.raw.hydrated ?? []);
1475
+ const dnsRecords = (dns.raw.dnsRecords ?? []);
1476
+ for (const app of hydrated) {
1477
+ const fqdns = collectFqdns(app);
1478
+ const matchedFqdn = fqdns.find((f) => f === input.domain || f.endsWith(`.${apexOf(input.domain ?? "")}`));
1479
+ if (!matchedFqdn)
1480
+ continue;
1481
+ const aRecord = dnsRecords.find((r) => r.type === "A" && r.name === matchedFqdn);
1482
+ if (!aRecord) {
1483
+ out.push({
1484
+ provider: "drift",
1485
+ kind: "coolify-dns",
1486
+ identity: `${app.name} → ${matchedFqdn}`,
1487
+ status: "drift",
1488
+ drift: [
1489
+ `Coolify app "${app.name}" serves ${matchedFqdn} but no A record exists in Cloudflare for that name`,
1490
+ ],
1491
+ });
1492
+ continue;
1493
+ }
1494
+ // Best-effort: compare against the Coolify server's public IP
1495
+ // when we can pull it. The server uuid lives on the application.
1496
+ if (app.serverUuid) {
1497
+ try {
1498
+ const cfgC = await getCoolifyConfig();
1499
+ if (cfgC) {
1500
+ const api = new CoolifyApi({ url: cfgC.url, token: cfgC.token });
1501
+ const domains = await api.getServerDomains(app.serverUuid).catch(() => []);
1502
+ const ips = Array.from(new Set(domains.map((d) => d.ip).filter((ip) => !!ip)));
1503
+ if (ips.length > 0 && !ips.includes(aRecord.content) && !aRecord.proxied) {
1504
+ out.push({
1505
+ provider: "drift",
1506
+ kind: "coolify-dns",
1507
+ identity: `${app.name} → ${matchedFqdn}`,
1508
+ status: "drift",
1509
+ drift: [
1510
+ `Cloudflare A record points to ${aRecord.content}`,
1511
+ `Coolify server IP(s): ${ips.join(", ")}`,
1512
+ `(record is not proxied — direct IP mismatch will black-hole traffic)`,
1513
+ ],
1514
+ });
1515
+ }
1516
+ }
1517
+ }
1518
+ catch {
1519
+ // Couldn't resolve server IP — skip silently.
1520
+ }
1521
+ }
1522
+ }
1523
+ }
1524
+ // D2: Coolify app git_repository vs local git remote — same project
1525
+ // name on different repos is a common gotcha during renames.
1526
+ if (coolify?.raw && local.gitRemote) {
1527
+ const hydrated = (coolify.raw.hydrated ?? []);
1528
+ const localSlug = repoSlugFromUrl(local.gitRemote);
1529
+ for (const app of hydrated) {
1530
+ if (!app.gitRepository)
1531
+ continue;
1532
+ const remoteSlug = repoSlugFromUrl(app.gitRepository) ??
1533
+ app.gitRepository.replace(/^https?:\/\/github\.com\//, "").replace(/\.git$/, "");
1534
+ if (localSlug && remoteSlug && localSlug.toLowerCase() !== remoteSlug.toLowerCase()) {
1535
+ out.push({
1536
+ provider: "drift",
1537
+ kind: "coolify-source",
1538
+ identity: app.name,
1539
+ status: "drift",
1540
+ drift: [
1541
+ `Coolify app deploys from: ${app.gitRepository}`,
1542
+ `Local git remote: ${local.gitRemote}`,
1543
+ ],
1544
+ });
1545
+ }
1546
+ }
1547
+ }
1548
+ // D3: Manifest-listed buckets that don't actually exist live.
1549
+ if (r2?.raw) {
1550
+ const manifestBuckets = (r2.raw.manifestBuckets ?? []);
1551
+ const live = (r2.raw.live ?? []);
1552
+ for (const mb of manifestBuckets) {
1553
+ const hit = live.find((l) => l.name === mb.name);
1554
+ if (!hit || !hit.bucket) {
1555
+ out.push({
1556
+ provider: "drift",
1557
+ kind: "bucket",
1558
+ identity: mb.name,
1559
+ status: "drift",
1560
+ drift: [
1561
+ `Manifest s3Buckets.${mb.manifestKey} = "${mb.name}"`,
1562
+ `Live R2: no bucket with that name`,
1563
+ `Fix: \`hatchkit provision s3\` to reconcile, or remove the entry from .hatchkit.json`,
1564
+ ],
1565
+ });
1566
+ }
1567
+ }
1568
+ }
1569
+ // D4: R2 CORS — manifest-recorded origins vs live policy. Mirrors
1570
+ // `doctor.checkProjectS3CorsState` but inventory runs against the
1571
+ // buckets it scanned instead of re-walking the manifest.
1572
+ if (r2?.raw && local.manifest?.s3Buckets) {
1573
+ const live = (r2.raw.live ?? []);
1574
+ const assets = local.manifest.s3Buckets.assets;
1575
+ if (assets &&
1576
+ typeof assets === "object" &&
1577
+ "cors" in assets &&
1578
+ assets.cors &&
1579
+ !assets.cors.skipped) {
1580
+ const recorded = (assets.cors.origins ?? []).slice().sort();
1581
+ const hit = live.find((l) => l.name === assets.name);
1582
+ const liveOrigins = (hit?.cors?.[0]?.allowed?.origins ?? []).slice().sort();
1583
+ const same = recorded.length === liveOrigins.length && recorded.every((o, i) => o === liveOrigins[i]);
1584
+ if (!same && recorded.length > 0) {
1585
+ out.push({
1586
+ provider: "drift",
1587
+ kind: "bucket-cors",
1588
+ identity: assets.name,
1589
+ status: "drift",
1590
+ drift: [
1591
+ `Manifest origins: ${recorded.join(", ") || "(empty)"}`,
1592
+ `Live origins: ${liveOrigins.join(", ") || "(empty)"}`,
1593
+ `Fix: \`hatchkit provision s3\` to reconcile`,
1594
+ ],
1595
+ });
1596
+ }
1597
+ }
1598
+ }
1599
+ // D5: gh-pages workflow on disk but Pages isn't enabled — or vice
1600
+ // versa (Pages enabled but no workflow committed).
1601
+ if (github?.raw) {
1602
+ const pages = github.raw.pages;
1603
+ if (local.ghPagesWorkflowPath && !pages) {
1604
+ out.push({
1605
+ provider: "drift",
1606
+ kind: "github-pages-state",
1607
+ identity: input.repo ?? "(repo)",
1608
+ status: "drift",
1609
+ drift: [
1610
+ `Local workflow exists: ${rel(local.cwd, local.ghPagesWorkflowPath)}`,
1611
+ `GitHub Pages: not enabled`,
1612
+ `Fix: \`hatchkit gh-pages\` or enable Pages in repo Settings`,
1613
+ ],
1614
+ });
1615
+ }
1616
+ if (!local.ghPagesWorkflowPath && pages && pages.status) {
1617
+ out.push({
1618
+ provider: "drift",
1619
+ kind: "github-pages-state",
1620
+ identity: input.repo ?? "(repo)",
1621
+ status: "drift",
1622
+ drift: [
1623
+ `GitHub Pages enabled (status: ${pages.status})`,
1624
+ `No Pages-deploying workflow in .github/workflows`,
1625
+ `Fix: commit a Pages-deploying workflow or disable Pages in repo Settings`,
1626
+ ],
1627
+ });
1628
+ }
1629
+ // D5b: CNAME file vs Pages custom domain. If both exist and disagree,
1630
+ // one will silently win on next deploy — usually painful.
1631
+ if (local.cnameFile && pages?.cname && local.cnameFile.content !== pages.cname) {
1632
+ out.push({
1633
+ provider: "drift",
1634
+ kind: "github-pages-cname",
1635
+ identity: input.repo ?? "(repo)",
1636
+ status: "drift",
1637
+ drift: [
1638
+ `CNAME file: ${local.cnameFile.content} (at ${rel(local.cwd, local.cnameFile.path)})`,
1639
+ `Pages setting: ${pages.cname}`,
1640
+ ],
1641
+ });
1642
+ }
1643
+ }
1644
+ // D6: dotenvx in use locally but no DOTENV_PRIVATE_KEY_PRODUCTION
1645
+ // secret on the GitHub repo. The deploy workflow will need that
1646
+ // to decrypt at runtime.
1647
+ if (github?.raw && local.dotenvxEncrypted) {
1648
+ const repoSecrets = github.findings.filter((f) => f.kind === "secret");
1649
+ const hasKey = repoSecrets.some((s) => s.identity === "DOTENV_PRIVATE_KEY_PRODUCTION");
1650
+ if (!hasKey && input.name) {
1651
+ out.push({
1652
+ provider: "drift",
1653
+ kind: "missing-secret",
1654
+ identity: "DOTENV_PRIVATE_KEY_PRODUCTION",
1655
+ status: "drift",
1656
+ drift: [
1657
+ ".env.production is dotenvx-encrypted locally",
1658
+ `GitHub repo ${input.repo} has no DOTENV_PRIVATE_KEY_PRODUCTION secret`,
1659
+ `Fix: \`hatchkit keys push ${input.name} --target=gh --repo ${input.repo}\``,
1660
+ ],
1661
+ });
1662
+ }
1663
+ }
1664
+ return out;
1665
+ }
1666
+ // ---------------------------------------------------------------------------
1667
+ // Renderer (human)
1668
+ // ---------------------------------------------------------------------------
1669
+ export function renderInventoryHuman(report) {
1670
+ const lines = [];
1671
+ lines.push(chalk.bold(" hatchkit inventory"));
1672
+ lines.push(chalk.dim(` cwd: ${report.cwd}`));
1673
+ lines.push("");
1674
+ lines.push(chalk.bold(" Identity:"));
1675
+ lines.push(` name ${report.inferred.name ? chalk.bold(report.inferred.name) : chalk.dim("·")}${sourceTag(report.sources.name)}`);
1676
+ lines.push(` domain ${report.inferred.domain ? chalk.bold(report.inferred.domain) : chalk.dim("·")}${sourceTag(report.sources.domain)}`);
1677
+ lines.push(` repo ${report.inferred.repo ? chalk.bold(report.inferred.repo) : chalk.dim("·")}${sourceTag(report.sources.repo)}`);
1678
+ lines.push("");
1679
+ // Group findings by provider for display.
1680
+ const grouped = new Map();
1681
+ for (const f of report.findings) {
1682
+ const key = f.provider;
1683
+ const existing = grouped.get(key);
1684
+ if (existing)
1685
+ existing.push(f);
1686
+ else
1687
+ grouped.set(key, [f]);
1688
+ }
1689
+ // Drifts first — they're the actionable thing.
1690
+ if (report.drifts.length > 0) {
1691
+ lines.push(chalk.bold.yellow(" ⚠ Drift detected:"));
1692
+ for (const d of report.drifts) {
1693
+ lines.push(` ${chalk.yellow("⚠")} ${chalk.bold(d.identity)} ${chalk.dim(`(${d.kind})`)}`);
1694
+ for (const line of d.drift ?? []) {
1695
+ lines.push(` ${chalk.dim("→")} ${line}`);
1696
+ }
1697
+ }
1698
+ lines.push("");
1699
+ }
1700
+ // Per-provider sections (skip "drift" pseudo-provider — already
1701
+ // surfaced above).
1702
+ for (const [providerKey, findings] of grouped) {
1703
+ if (providerKey === "drift")
1704
+ continue;
1705
+ lines.push(chalk.bold(` ${providerKey}`));
1706
+ for (const f of findings) {
1707
+ const icon = findingIcon(f);
1708
+ const kind = chalk.dim(`(${f.kind})`);
1709
+ const detail = f.detail ? chalk.dim(` — ${f.detail}`) : "";
1710
+ lines.push(` ${icon} ${f.identity} ${kind}${detail}`);
1711
+ }
1712
+ lines.push("");
1713
+ }
1714
+ if (report.skipped.length > 0) {
1715
+ lines.push(chalk.dim(" Skipped:"));
1716
+ for (const s of report.skipped) {
1717
+ lines.push(chalk.dim(` · ${s.provider}: ${s.reason}`));
1718
+ }
1719
+ lines.push("");
1720
+ }
1721
+ // Final one-page summary — compact per-provider roll-up, no detail
1722
+ // fluff. The reader can scroll up for specifics; this block answers
1723
+ // "what does this project have, and what needs attention?" at a glance.
1724
+ lines.push(renderInventorySummary(report));
1725
+ return lines.join("\n");
1726
+ }
1727
+ function sourceTag(s) {
1728
+ if (!s)
1729
+ return chalk.dim(" (will prompt)");
1730
+ return chalk.dim(` ← ${s}`);
1731
+ }
1732
+ /** Status icon for a single finding. Red ✗ is reserved for `missing`
1733
+ * findings the project actually expects (declared in manifest, env,
1734
+ * package deps, workflows). An unexpected absence — e.g. "no Coolify
1735
+ * app named foo" for a library that doesn't deploy — is dim · instead. */
1736
+ function findingIcon(f) {
1737
+ if (f.status === "present")
1738
+ return chalk.green("✓");
1739
+ if (f.status === "drift")
1740
+ return chalk.yellow("⚠");
1741
+ if (f.status === "missing")
1742
+ return f.expected ? chalk.red("✗") : chalk.dim("·");
1743
+ return chalk.dim("·");
1744
+ }
1745
+ function renderInventorySummary(report) {
1746
+ const lines = [];
1747
+ lines.push(chalk.dim(` ${"─".repeat(58)}`));
1748
+ lines.push(chalk.bold(" Summary"));
1749
+ lines.push("");
1750
+ // Group findings by *effective* provider — drift findings get
1751
+ // re-attributed to the provider they're about, so each provider's
1752
+ // summary line reflects all its state (including drift).
1753
+ const grouped = new Map();
1754
+ for (const f of report.findings) {
1755
+ const effective = f.provider === "drift" ? driftToProvider(f.kind) : f.provider;
1756
+ const list = grouped.get(effective);
1757
+ if (list)
1758
+ list.push(f);
1759
+ else
1760
+ grouped.set(effective, [f]);
1761
+ }
1762
+ const rolls = [];
1763
+ for (const [providerKey, findings] of grouped) {
1764
+ rolls.push(rollUpProvider(providerKey, findings));
1765
+ }
1766
+ // Append skipped providers (not in grouped) as dim rows so the
1767
+ // summary lists every provider hatchkit knows about, not just the
1768
+ // ones that returned findings.
1769
+ const seen = new Set(grouped.keys());
1770
+ for (const s of report.skipped) {
1771
+ if (seen.has(s.provider))
1772
+ continue;
1773
+ seen.add(s.provider);
1774
+ rolls.push({
1775
+ label: providerLabel(s.provider),
1776
+ icon: chalk.dim("·"),
1777
+ text: chalk.dim(s.reason),
1778
+ });
1779
+ }
1780
+ const labelWidth = Math.max(...rolls.map((r) => r.label.length), 10);
1781
+ for (const r of rolls) {
1782
+ lines.push(` ${r.label.padEnd(labelWidth + 2)} ${r.icon} ${r.text}`);
1783
+ }
1784
+ // Bottom-line takeaway — counts only `expected` missing (the
1785
+ // actionable subset). Unexpected absences ("no Coolify app named foo"
1786
+ // for a CLI library) are excluded; they're not problems to fix.
1787
+ lines.push("");
1788
+ const drift = report.summary.drift;
1789
+ const missing = report.summary.expectedMissing;
1790
+ const present = report.summary.present;
1791
+ if (drift > 0 || missing > 0) {
1792
+ const parts = [];
1793
+ if (drift > 0) {
1794
+ parts.push(chalk.yellow(`⚠ ${drift} drift${drift === 1 ? "" : "s"} to reconcile`));
1795
+ }
1796
+ if (missing > 0) {
1797
+ parts.push(chalk.red(`✗ ${missing} expected resource${missing === 1 ? "" : "s"} missing`));
1798
+ }
1799
+ lines.push(` ${parts.join(chalk.dim(" · "))}`);
1800
+ }
1801
+ else if (present > 0) {
1802
+ lines.push(` ${chalk.green("✓ All clear")} — ${present} resource${present === 1 ? "" : "s"} tracked, nothing out of sync.`);
1803
+ }
1804
+ else {
1805
+ lines.push(chalk.dim(" Nothing matched — try `--name`, `--domain`, or `--repo` to narrow."));
1806
+ }
1807
+ lines.push("");
1808
+ return lines.join("\n");
1809
+ }
1810
+ /** Drift findings live under provider "drift" in `report.findings`,
1811
+ * but conceptually they belong to the provider they describe. Pin
1812
+ * each drift `kind` to its source provider for the summary roll-up. */
1813
+ function driftToProvider(driftKind) {
1814
+ if (driftKind.startsWith("coolify"))
1815
+ return "coolify";
1816
+ if (driftKind === "bucket" || driftKind === "bucket-cors")
1817
+ return "s3:r2";
1818
+ if (driftKind.startsWith("github-pages"))
1819
+ return "github-pages";
1820
+ if (driftKind === "missing-secret")
1821
+ return "github";
1822
+ return "drift";
1823
+ }
1824
+ function rollUpProvider(providerKey, findings) {
1825
+ const drifts = findings.filter((f) => f.status === "drift");
1826
+ const present = findings.filter((f) => f.status === "present");
1827
+ const missing = findings.filter((f) => f.status === "missing");
1828
+ const label = providerLabel(providerKey);
1829
+ if (drifts.length > 0) {
1830
+ return {
1831
+ label,
1832
+ icon: chalk.yellow("⚠"),
1833
+ text: chalk.yellow(`${drifts.length} drift${drifts.length === 1 ? "" : "s"} — see above`),
1834
+ };
1835
+ }
1836
+ if (present.length > 0) {
1837
+ return {
1838
+ label,
1839
+ icon: chalk.green("✓"),
1840
+ text: summarizePresent(providerKey, present, missing),
1841
+ };
1842
+ }
1843
+ if (missing.length > 0) {
1844
+ // Red ✗ only when the project locally declares this resource
1845
+ // should exist. Otherwise dim · with a softer "no match" label —
1846
+ // a CLI library shouldn't get a red mark for "no Coolify app".
1847
+ const anyExpected = missing.some((f) => f.expected);
1848
+ return {
1849
+ label,
1850
+ icon: anyExpected ? chalk.red("✗") : chalk.dim("·"),
1851
+ text: anyExpected
1852
+ ? chalk.red(summarizeMissing(providerKey))
1853
+ : chalk.dim("no match (not declared by this project)"),
1854
+ };
1855
+ }
1856
+ // Only `info`-level findings → no actionable state to surface.
1857
+ return {
1858
+ label,
1859
+ icon: chalk.dim("·"),
1860
+ text: chalk.dim(findings[0]?.identity ?? ""),
1861
+ };
1862
+ }
1863
+ /** Compact "what's here" string for a provider that has at least one
1864
+ * present finding. Per-provider tuned to keep the line short. */
1865
+ function summarizePresent(providerKey, present, missing) {
1866
+ const partial = missing.length > 0 ? chalk.dim(` (${missing.length} missing)`) : "";
1867
+ switch (providerKey) {
1868
+ case "coolify": {
1869
+ const apps = present.filter((f) => f.kind === "application").map((f) => f.identity);
1870
+ const projects = present.filter((f) => f.kind === "project");
1871
+ const parts = [];
1872
+ if (apps.length)
1873
+ parts.push(`${apps.length} app: ${apps.join(", ")}`);
1874
+ if (projects.length)
1875
+ parts.push(`${projects.length} project${projects.length === 1 ? "" : "s"}`);
1876
+ return (parts.join(", ") || present[0].identity) + partial;
1877
+ }
1878
+ case "dns": {
1879
+ const zone = present.find((f) => f.kind === "zone");
1880
+ const records = present.filter((f) => f.kind === "dns-record");
1881
+ const base = zone
1882
+ ? `${zone.identity} (${records.length} record${records.length === 1 ? "" : "s"})`
1883
+ : `${records.length} record${records.length === 1 ? "" : "s"}`;
1884
+ return base + partial;
1885
+ }
1886
+ case "s3:r2": {
1887
+ const buckets = present.filter((f) => f.kind === "bucket").map((f) => f.identity);
1888
+ return (`${buckets.length} bucket${buckets.length === 1 ? "" : "s"}: ${buckets.join(", ")}` +
1889
+ partial);
1890
+ }
1891
+ case "github": {
1892
+ const repo = present.find((f) => f.kind === "repository");
1893
+ if (repo) {
1894
+ // Detail is "private · default: main · …" — pull just the
1895
+ // visibility (first segment) to keep the line short.
1896
+ const visibility = repo.detail?.split(" · ")[0];
1897
+ return `${repo.identity}${visibility ? chalk.dim(` (${visibility})`) : ""}`;
1898
+ }
1899
+ return present[0].identity;
1900
+ }
1901
+ case "github-pages": {
1902
+ const site = present.find((f) => f.kind === "page-site");
1903
+ if (site) {
1904
+ const cname = site.detail?.match(/cname:\s*([^\s·]+)/)?.[1];
1905
+ return cname ? `live at ${cname}` : "enabled";
1906
+ }
1907
+ return "enabled";
1908
+ }
1909
+ case "resend": {
1910
+ const domains = present.filter((f) => f.kind === "verified-domain").map((f) => f.identity);
1911
+ return domains.join(", ") + partial;
1912
+ }
1913
+ case "glitchtip":
1914
+ case "openpanel": {
1915
+ const projects = present.filter((f) => f.kind === "project").map((f) => f.identity);
1916
+ return projects.join(", ") + partial;
1917
+ }
1918
+ case "stripe": {
1919
+ const hooks = present.filter((f) => f.kind === "webhook-endpoint");
1920
+ return `${hooks.length} webhook${hooks.length === 1 ? "" : "s"}` + partial;
1921
+ }
1922
+ default:
1923
+ return present[0].identity + partial;
1924
+ }
1925
+ }
1926
+ /** Compact "what's not here" string for a provider where every
1927
+ * finding is `missing`. The detailed reason is in the per-provider
1928
+ * block above — this is just the headline. */
1929
+ function summarizeMissing(providerKey) {
1930
+ switch (providerKey) {
1931
+ case "coolify":
1932
+ return "no matching app";
1933
+ case "dns":
1934
+ return "no zone for this domain";
1935
+ case "s3:r2":
1936
+ return "no matching buckets";
1937
+ case "github":
1938
+ return "repo not found";
1939
+ case "github-pages":
1940
+ return "Pages not enabled";
1941
+ case "resend":
1942
+ return "domain not verified";
1943
+ case "glitchtip":
1944
+ case "openpanel":
1945
+ return "no matching project";
1946
+ case "stripe":
1947
+ return "no webhook for this domain";
1948
+ default:
1949
+ return "not found";
1950
+ }
1951
+ }
1952
+ function providerLabel(key) {
1953
+ switch (key) {
1954
+ case "coolify":
1955
+ return "Coolify";
1956
+ case "dns":
1957
+ return "DNS";
1958
+ case "s3:r2":
1959
+ return "R2";
1960
+ case "s3:hetzner":
1961
+ return "Hetzner S3";
1962
+ case "s3:aws":
1963
+ return "AWS S3";
1964
+ case "github":
1965
+ return "GitHub";
1966
+ case "github-pages":
1967
+ return "Pages";
1968
+ case "resend":
1969
+ return "Resend";
1970
+ case "glitchtip":
1971
+ return "GlitchTip";
1972
+ case "openpanel":
1973
+ return "OpenPanel";
1974
+ case "stripe":
1975
+ return "Stripe";
1976
+ default:
1977
+ return key;
1978
+ }
1979
+ }
1980
+ //# sourceMappingURL=inventory.js.map