hatchkit 0.1.39 → 0.1.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/dist/adopt.js +6 -4
  2. package/dist/adopt.js.map +1 -1
  3. package/dist/deploy/coolify-app.d.ts +9 -9
  4. package/dist/deploy/coolify-app.d.ts.map +1 -1
  5. package/dist/deploy/coolify-app.js +14 -19
  6. package/dist/deploy/coolify-app.js.map +1 -1
  7. package/dist/deploy/coolify.d.ts.map +1 -1
  8. package/dist/deploy/coolify.js +6 -2
  9. package/dist/deploy/coolify.js.map +1 -1
  10. package/dist/deploy/keys.d.ts +7 -2
  11. package/dist/deploy/keys.d.ts.map +1 -1
  12. package/dist/deploy/keys.js +27 -7
  13. package/dist/deploy/keys.js.map +1 -1
  14. package/dist/deploy/rollback.d.ts.map +1 -1
  15. package/dist/deploy/rollback.js +80 -22
  16. package/dist/deploy/rollback.js.map +1 -1
  17. package/dist/deploy/sync.d.ts +10 -7
  18. package/dist/deploy/sync.d.ts.map +1 -1
  19. package/dist/deploy/sync.js +13 -9
  20. package/dist/deploy/sync.js.map +1 -1
  21. package/dist/index.js +77 -6
  22. package/dist/index.js.map +1 -1
  23. package/dist/inventory.d.ts +107 -0
  24. package/dist/inventory.d.ts.map +1 -0
  25. package/dist/inventory.js +1522 -0
  26. package/dist/inventory.js.map +1 -0
  27. package/dist/prompts.d.ts +0 -7
  28. package/dist/prompts.d.ts.map +1 -1
  29. package/dist/prompts.js +1 -7
  30. package/dist/prompts.js.map +1 -1
  31. package/dist/scaffold/app.js +1 -1
  32. package/dist/scaffold/app.js.map +1 -1
  33. package/dist/scaffold/infra.js +1 -1
  34. package/dist/utils/coolify-api.d.ts +11 -0
  35. package/dist/utils/coolify-api.d.ts.map +1 -1
  36. package/dist/utils/coolify-api.js +25 -0
  37. package/dist/utils/coolify-api.js.map +1 -1
  38. package/dist/utils/coolify-server-ips.d.ts +6 -12
  39. package/dist/utils/coolify-server-ips.d.ts.map +1 -1
  40. package/dist/utils/coolify-server-ips.js +26 -81
  41. package/dist/utils/coolify-server-ips.js.map +1 -1
  42. package/package.json +1 -1
@@ -0,0 +1,1522 @@
1
+ /*
2
+ * `hatchkit inventory` — read-only survey of what already exists for a
3
+ * project/site across every configured provider.
4
+ *
5
+ * Different beast from `status` and `doctor`:
6
+ * · `status` answers "which provider credentials have I stored?"
7
+ * · `doctor` answers "are those credentials still valid?"
8
+ * · `inventory` answers "given THIS project (cwd / name / domain /
9
+ * repo), what resources already exist on the providers?"
10
+ *
11
+ * The flow:
12
+ * 1. Infer identity from the current directory — manifest, package.json,
13
+ * git remote, CNAME file, etc. Asks the user only for what couldn't
14
+ * be inferred (and confirms inferred values unless --yes).
15
+ * 2. Scan every configured provider in parallel for resources matching
16
+ * that identity (Coolify app by name, R2 buckets, DNS zone records,
17
+ * GitHub Pages config, Resend domain verification, etc.).
18
+ * 3. Cross-reference findings to flag drift — e.g. Coolify app fqdn
19
+ * doesn't match DNS, manifest bucket name doesn't exist live,
20
+ * gh-pages workflow committed but Pages isn't enabled, CORS on the
21
+ * live bucket differs from what the manifest records.
22
+ * 4. Render — grouped tree (human) or `--json` for parsing.
23
+ *
24
+ * Everything is read-only. No mutations. Safe to run anywhere.
25
+ */
26
+ import { existsSync, readFileSync, readdirSync, statSync } from "node:fs";
27
+ import { join, resolve } from "node:path";
28
+ import { confirm, input } from "@inquirer/prompts";
29
+ import chalk from "chalk";
30
+ import { getCoolifyConfig, getDnsConfig, getGlitchtipConfig, getOpenpanelConfig, getResendConfig, getS3Config, getStripeConfig, } from "./config.js";
31
+ import { locateEnvKeysFile, locateEnvProductionFile } from "./deploy/keys.js";
32
+ import { MANIFEST_FILENAME, readManifest } from "./scaffold/manifest.js";
33
+ import { CloudflareApi } from "./utils/cloudflare-api.js";
34
+ import { CoolifyApi } from "./utils/coolify-api.js";
35
+ import { exec, execOk } from "./utils/exec.js";
36
+ import { SECRET_KEYS, getSecret } from "./utils/secrets.js";
37
+ import { getCliVersion } from "./utils/version.js";
38
+ export async function runInventory(cwd, opts = {}) {
39
+ const report = await collectInventory(cwd, {
40
+ input: opts.input,
41
+ interactive: !opts.json && !opts.yes,
42
+ autoAccept: opts.yes ?? false,
43
+ });
44
+ if (opts.json) {
45
+ console.log(JSON.stringify(report, null, 2));
46
+ return;
47
+ }
48
+ console.log(renderInventoryHuman(report));
49
+ }
50
+ export async function collectInventory(cwd, opts = {}) {
51
+ const absCwd = resolve(cwd);
52
+ const local = inferLocal(absCwd);
53
+ // Resolve git remote unconditionally — both for repo inference and
54
+ // for the repo-vs-Coolify-source drift check. Cheap (~one subprocess
55
+ // call) and the result is the same whether or not we prompt.
56
+ const git = await resolveGitRemote(local);
57
+ local.gitRemote = git.remote;
58
+ local.gitDefaultBranch = git.defaultBranch;
59
+ local.hasGitHubRemote = git.hasGitHubRemote;
60
+ // Identity inference — order: explicit input > manifest > package > git/CNAME.
61
+ const { input: inferred, sources } = inferIdentity(local, opts.input ?? {});
62
+ if (git.repo && !inferred.repo) {
63
+ inferred.repo = git.repo;
64
+ sources.repo = "git-remote";
65
+ }
66
+ let identity = inferred;
67
+ if (opts.interactive) {
68
+ identity = await promptForGaps(local, inferred, sources, !!opts.autoAccept);
69
+ }
70
+ // Provider scans — every one is best-effort and returns its own
71
+ // findings + skip reason. Running them in parallel keeps wall-time
72
+ // close to the slowest single round-trip.
73
+ const scanResults = await Promise.all([
74
+ scanCoolify(identity),
75
+ scanDns(identity),
76
+ scanR2(identity, local.manifest),
77
+ scanS3Other(identity),
78
+ scanGitHub(identity),
79
+ scanResend(identity),
80
+ scanGlitchtip(identity),
81
+ scanOpenpanel(identity),
82
+ scanStripe(identity),
83
+ ]);
84
+ const findings = [];
85
+ const skipped = [];
86
+ for (const r of scanResults) {
87
+ findings.push(...r.findings);
88
+ skipped.push(...r.skipped);
89
+ }
90
+ // Drift checks build on already-collected findings + a few extra
91
+ // targeted lookups. They produce more findings (status: "drift" or
92
+ // sometimes "missing") which we append to the same list.
93
+ const driftFindings = await detectDrift(identity, local, scanResults);
94
+ findings.push(...driftFindings);
95
+ const drifts = findings.filter((f) => f.status === "drift");
96
+ const present = findings.filter((f) => f.status === "present").length;
97
+ const missing = findings.filter((f) => f.status === "missing").length;
98
+ return {
99
+ cliVersion: getCliVersion(),
100
+ cwd: absCwd,
101
+ inferred: identity,
102
+ sources,
103
+ local,
104
+ findings,
105
+ drifts,
106
+ skipped,
107
+ summary: { present, drift: drifts.length, missing, skipped: skipped.length },
108
+ };
109
+ }
110
+ // ---------------------------------------------------------------------------
111
+ // Local inference (no network, no prompts)
112
+ // ---------------------------------------------------------------------------
113
+ function inferLocal(cwd) {
114
+ const manifestPresent = existsSync(join(cwd, MANIFEST_FILENAME));
115
+ let manifest;
116
+ if (manifestPresent) {
117
+ try {
118
+ manifest = readManifest(cwd) ?? undefined;
119
+ }
120
+ catch {
121
+ // Malformed manifest — leave undefined; inferIdentity falls
122
+ // through to other signals.
123
+ }
124
+ }
125
+ let packageName;
126
+ let packageDescription;
127
+ try {
128
+ const pkg = JSON.parse(readFileSync(join(cwd, "package.json"), "utf-8"));
129
+ if (typeof pkg.name === "string")
130
+ packageName = pkg.name.replace(/^@[^/]+\//, "");
131
+ if (typeof pkg.description === "string")
132
+ packageDescription = pkg.description.trim();
133
+ }
134
+ catch {
135
+ // No package.json — fine.
136
+ }
137
+ const serverDir = firstExistingDir(cwd, [
138
+ "packages/server",
139
+ "apps/server",
140
+ "apps/api",
141
+ "apps/backend",
142
+ "server",
143
+ "backend",
144
+ "api",
145
+ "src/server",
146
+ "services/server",
147
+ ]);
148
+ const clientDir = firstExistingDir(cwd, [
149
+ "packages/client",
150
+ "packages/web",
151
+ "packages/frontend",
152
+ "apps/web",
153
+ "apps/client",
154
+ "apps/frontend",
155
+ "client",
156
+ "frontend",
157
+ "web",
158
+ "src/client",
159
+ ]);
160
+ const composeCandidates = [
161
+ "compose.yaml",
162
+ "compose.yml",
163
+ "docker-compose.yaml",
164
+ "docker-compose.yml",
165
+ ];
166
+ const composePath = composeCandidates.map((n) => join(cwd, n)).find((p) => existsSync(p));
167
+ const hasDockerfile = existsSync(join(cwd, "Dockerfile"));
168
+ // Workflow detection. We classify a workflow as "pages" when its body
169
+ // mentions `actions/deploy-pages` (the standard GitHub Pages action)
170
+ // or `peaceiris/actions-gh-pages` (the popular community alternative).
171
+ // We classify as "deploy" when it looks like hatchkit's deploy.yml
172
+ // (mentions `COOLIFY_` env or webhook). Reading the body is the only
173
+ // reliable signal — filenames vary too much.
174
+ //
175
+ // Workflows + CNAME files live at the *repo* root by convention, not
176
+ // wherever the user invoked us. Walk up to the git root so inventory
177
+ // from a subdir (e.g. `apps/web/`) still picks them up.
178
+ const repoRoot = findGitRoot(cwd) ?? cwd;
179
+ const workflowsDir = join(repoRoot, ".github", "workflows");
180
+ let ghPagesWorkflowPath;
181
+ let deployWorkflowPath;
182
+ if (existsSync(workflowsDir)) {
183
+ try {
184
+ for (const f of readdirSync(workflowsDir)) {
185
+ if (!/\.ya?ml$/i.test(f))
186
+ continue;
187
+ const full = join(workflowsDir, f);
188
+ let body;
189
+ try {
190
+ body = readFileSync(full, "utf-8");
191
+ }
192
+ catch {
193
+ continue;
194
+ }
195
+ if (!ghPagesWorkflowPath &&
196
+ /actions\/deploy-pages|peaceiris\/actions-gh-pages/i.test(body)) {
197
+ ghPagesWorkflowPath = full;
198
+ }
199
+ if (!deployWorkflowPath && /COOLIFY_(WEBHOOK|TOKEN)|coolify\.io/i.test(body)) {
200
+ deployWorkflowPath = full;
201
+ }
202
+ }
203
+ }
204
+ catch {
205
+ // Unreadable workflows dir — skip.
206
+ }
207
+ }
208
+ // CNAME file — GitHub Pages writes this at the publish-root to bind
209
+ // a custom domain. Locations cover the common static-site layouts:
210
+ // repo root (Jekyll), `docs/` (Pages-from-docs setup), `static/` and
211
+ // `docs/static/` (Docusaurus), `public/` and `docs/public/` (Vite /
212
+ // Next.js docs starters), `site/` and `www/` for the loose
213
+ // conventions, and `website/static/` for older Docusaurus.
214
+ let cnameFile;
215
+ for (const rel of [
216
+ "CNAME",
217
+ "docs/CNAME",
218
+ "docs/static/CNAME",
219
+ "docs/public/CNAME",
220
+ "site/CNAME",
221
+ "www/CNAME",
222
+ "website/static/CNAME",
223
+ "static/CNAME",
224
+ "public/CNAME",
225
+ ]) {
226
+ const p = join(repoRoot, rel);
227
+ if (existsSync(p)) {
228
+ try {
229
+ const content = readFileSync(p, "utf-8").trim();
230
+ if (content) {
231
+ cnameFile = { path: p, content };
232
+ break;
233
+ }
234
+ }
235
+ catch {
236
+ // Unreadable — skip.
237
+ }
238
+ }
239
+ }
240
+ // dotenvx state — same probes as `hatchkit adopt`.
241
+ const envProdPath = locateEnvProductionFile(cwd);
242
+ let dotenvxEncrypted = false;
243
+ if (envProdPath && existsSync(envProdPath)) {
244
+ try {
245
+ const head = readFileSync(envProdPath, "utf-8").slice(0, 2000);
246
+ dotenvxEncrypted = /DOTENV_PUBLIC_KEY_PRODUCTION/.test(head);
247
+ }
248
+ catch {
249
+ // Unreadable — leave false.
250
+ }
251
+ }
252
+ const envKeysPresent = !!locateEnvKeysFile(cwd);
253
+ // `.git` lives at the repo root — in a worktree it's a file pointing
254
+ // at the main repo, in a normal clone it's a directory. Either form
255
+ // is fine for existsSync. Walk up from cwd so running `hatchkit
256
+ // inventory` from a subdir (e.g. `apps/web/`) still picks up the
257
+ // repo root for git lookups.
258
+ const gitRoot = findGitRoot(cwd);
259
+ return {
260
+ cwd,
261
+ isGitRepo: !!gitRoot,
262
+ hasGitHubRemote: false, // resolved below
263
+ packageName,
264
+ packageDescription,
265
+ manifestPresent,
266
+ manifest,
267
+ serverDir,
268
+ clientDir,
269
+ hasDockerfile,
270
+ composePath,
271
+ ghPagesWorkflowPath,
272
+ deployWorkflowPath,
273
+ cnameFile,
274
+ dotenvxEncrypted,
275
+ envKeysPresent,
276
+ };
277
+ }
278
+ function findGitRoot(startDir) {
279
+ let dir = startDir;
280
+ // Cap at 12 levels — generous for any reasonable monorepo, and a
281
+ // hard ceiling against pathological symlink loops.
282
+ for (let i = 0; i < 12; i++) {
283
+ if (existsSync(join(dir, ".git")))
284
+ return dir;
285
+ const parent = join(dir, "..");
286
+ const resolved = resolve(parent);
287
+ if (resolved === dir)
288
+ return undefined; // hit filesystem root
289
+ dir = resolved;
290
+ }
291
+ return undefined;
292
+ }
293
+ function firstExistingDir(root, rels) {
294
+ for (const rel of rels) {
295
+ const p = join(root, rel);
296
+ try {
297
+ if (statSync(p).isDirectory())
298
+ return p;
299
+ }
300
+ catch {
301
+ // ENOENT — try next.
302
+ }
303
+ }
304
+ return undefined;
305
+ }
306
+ function inferIdentity(local, override) {
307
+ const sources = {};
308
+ const out = {};
309
+ // name: flag > manifest > package.json > basename(cwd) as last resort
310
+ if (override.name) {
311
+ out.name = override.name;
312
+ sources.name = "flag";
313
+ }
314
+ else if (local.manifest?.name) {
315
+ out.name = local.manifest.name;
316
+ sources.name = "manifest";
317
+ }
318
+ else if (local.packageName) {
319
+ out.name = local.packageName;
320
+ sources.name = "package.json";
321
+ }
322
+ else {
323
+ const base = local.cwd.split("/").filter(Boolean).pop();
324
+ if (base && /^[a-z0-9][a-z0-9-]*$/i.test(base)) {
325
+ // Last-resort guess from cwd basename. Surfaced with its own
326
+ // source so the renderer can show low confidence — and so
327
+ // interactive mode re-confirms before letting it drive scans.
328
+ out.name = base;
329
+ sources.name = "cwd-basename";
330
+ }
331
+ }
332
+ // domain: flag > manifest > CNAME file > package homepage url? (skip;
333
+ // too noisy). We deliberately don't derive a domain from the project
334
+ // name — too speculative, and the matching layer already tries common
335
+ // domain patterns against any zone we list.
336
+ if (override.domain) {
337
+ out.domain = override.domain;
338
+ sources.domain = "flag";
339
+ }
340
+ else if (local.manifest?.domain) {
341
+ out.domain = local.manifest.domain;
342
+ sources.domain = "manifest";
343
+ }
344
+ else if (local.cnameFile?.content) {
345
+ out.domain = local.cnameFile.content;
346
+ sources.domain = "cname-file";
347
+ }
348
+ // repo: flag > git remote
349
+ if (override.repo) {
350
+ out.repo = override.repo;
351
+ sources.repo = "flag";
352
+ }
353
+ else {
354
+ // git remote is resolved async — leave undefined here; the caller
355
+ // fills it via resolveGitRemote (run before prompting).
356
+ }
357
+ return { input: out, sources };
358
+ }
359
+ async function resolveGitRemote(local) {
360
+ if (!local.isGitRepo)
361
+ return { hasGitHubRemote: false };
362
+ // Run git from the repo root if we found one — works from any subdir.
363
+ const gitCwd = findGitRoot(local.cwd) ?? local.cwd;
364
+ let remote;
365
+ let defaultBranch;
366
+ try {
367
+ const res = await exec("git", ["remote", "get-url", "origin"], {
368
+ cwd: gitCwd,
369
+ silent: true,
370
+ });
371
+ if (res.exitCode === 0) {
372
+ const out = res.stdout.trim();
373
+ if (out)
374
+ remote = out;
375
+ }
376
+ }
377
+ catch {
378
+ // git missing — fine.
379
+ }
380
+ try {
381
+ const res = await exec("git", ["symbolic-ref", "--short", "refs/remotes/origin/HEAD"], {
382
+ cwd: gitCwd,
383
+ silent: true,
384
+ });
385
+ if (res.exitCode === 0) {
386
+ // Output is "origin/main" — strip the remote prefix.
387
+ const out = res.stdout.trim().replace(/^origin\//, "");
388
+ if (out)
389
+ defaultBranch = out;
390
+ }
391
+ }
392
+ catch {
393
+ // Unset — fine.
394
+ }
395
+ const repo = repoSlugFromUrl(remote);
396
+ return {
397
+ remote,
398
+ repo,
399
+ defaultBranch,
400
+ hasGitHubRemote: !!repo,
401
+ };
402
+ }
403
+ function repoSlugFromUrl(url) {
404
+ if (!url)
405
+ return undefined;
406
+ const ssh = url.match(/^git@github\.com:([^/]+)\/([^/]+?)(?:\.git)?$/);
407
+ if (ssh)
408
+ return `${ssh[1]}/${ssh[2]}`;
409
+ const https = url.match(/^https?:\/\/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?(?:\/.*)?$/);
410
+ if (https)
411
+ return `${https[1]}/${https[2]}`;
412
+ return undefined;
413
+ }
414
+ // ---------------------------------------------------------------------------
415
+ // Interactive prompts (only for unknowns)
416
+ // ---------------------------------------------------------------------------
417
+ async function promptForGaps(local, inferred, sources, autoAccept) {
418
+ // Print a summary of what we found before asking.
419
+ console.log(chalk.bold("\n Detected from this directory:"));
420
+ console.log(` ${labelRow("Project name", inferred.name, sources.name)}`);
421
+ console.log(` ${labelRow("Domain", inferred.domain, sources.domain)}`);
422
+ console.log(` ${labelRow("GitHub repo", inferred.repo, sources.repo)}`);
423
+ if (local.manifestPresent)
424
+ console.log(chalk.dim(` + ${MANIFEST_FILENAME} present`));
425
+ if (local.dotenvxEncrypted)
426
+ console.log(chalk.dim(" + .env.production is dotenvx-encrypted"));
427
+ if (local.composePath)
428
+ console.log(chalk.dim(` + compose: ${rel(local.cwd, local.composePath)}`));
429
+ if (local.ghPagesWorkflowPath)
430
+ console.log(chalk.dim(` + gh-pages workflow: ${rel(local.cwd, local.ghPagesWorkflowPath)}`));
431
+ if (local.deployWorkflowPath)
432
+ console.log(chalk.dim(` + Coolify deploy workflow: ${rel(local.cwd, local.deployWorkflowPath)}`));
433
+ if (local.cnameFile)
434
+ console.log(chalk.dim(` + CNAME at ${rel(local.cwd, local.cnameFile.path)} → ${local.cnameFile.content}`));
435
+ console.log("");
436
+ const out = { ...inferred };
437
+ // Name: required for most lookups. Always confirm or prompt.
438
+ if (!out.name) {
439
+ out.name = await input({
440
+ message: "Project / app name (matches Coolify apps, buckets, clients):",
441
+ validate: (v) => (v.trim().length > 0 ? true : "Required"),
442
+ });
443
+ }
444
+ else if (!autoAccept) {
445
+ const ok = await confirm({
446
+ message: `Use ${chalk.bold(out.name)} as the project name?`,
447
+ default: true,
448
+ });
449
+ if (!ok) {
450
+ out.name = await input({
451
+ message: "Project / app name:",
452
+ default: out.name,
453
+ validate: (v) => (v.trim().length > 0 ? true : "Required"),
454
+ });
455
+ }
456
+ }
457
+ // Domain: optional. Skip lookups that need it if blank.
458
+ if (!out.domain) {
459
+ const want = autoAccept
460
+ ? false
461
+ : await confirm({
462
+ message: "Want to scan for resources tied to a specific domain?",
463
+ default: true,
464
+ });
465
+ if (want) {
466
+ out.domain = await input({
467
+ message: "Primary domain (e.g. myapp.com — empty to skip):",
468
+ });
469
+ out.domain = out.domain?.trim() || undefined;
470
+ }
471
+ }
472
+ else if (!autoAccept) {
473
+ const ok = await confirm({
474
+ message: `Use ${chalk.bold(out.domain)} as the primary domain?`,
475
+ default: true,
476
+ });
477
+ if (!ok) {
478
+ out.domain = await input({ message: "Primary domain (empty to skip):", default: out.domain });
479
+ out.domain = out.domain?.trim() || undefined;
480
+ }
481
+ }
482
+ // Repo: optional. Skip GH-side lookups if blank.
483
+ if (!out.repo) {
484
+ const want = autoAccept
485
+ ? false
486
+ : await confirm({
487
+ message: "Want to scan a GitHub repo (Pages, secrets, visibility)?",
488
+ default: false,
489
+ });
490
+ if (want) {
491
+ out.repo = await input({
492
+ message: "GitHub repo slug (owner/name — empty to skip):",
493
+ validate: (v) => !v.trim() || /^[^/\s]+\/[^/\s]+$/.test(v.trim()) ? true : "Expected owner/name format",
494
+ });
495
+ out.repo = out.repo?.trim() || undefined;
496
+ }
497
+ }
498
+ else if (!autoAccept) {
499
+ const ok = await confirm({
500
+ message: `Use ${chalk.bold(out.repo)} as the GitHub repo?`,
501
+ default: true,
502
+ });
503
+ if (!ok) {
504
+ out.repo = await input({
505
+ message: "GitHub repo slug (owner/name — empty to skip):",
506
+ default: out.repo,
507
+ validate: (v) => !v.trim() || /^[^/\s]+\/[^/\s]+$/.test(v.trim()) ? true : "Expected owner/name format",
508
+ });
509
+ out.repo = out.repo?.trim() || undefined;
510
+ }
511
+ }
512
+ return out;
513
+ }
514
+ function labelRow(label, value, source) {
515
+ if (!value)
516
+ return `${label.padEnd(14)} ${chalk.dim("·")} ${chalk.dim("(not detected)")}`;
517
+ const src = source ? chalk.dim(` ← ${source}`) : chalk.dim(" ← guess");
518
+ return `${label.padEnd(14)} ${chalk.green("✓")} ${chalk.bold(value)}${src}`;
519
+ }
520
+ function rel(cwd, abs) {
521
+ if (abs.startsWith(`${cwd}/`))
522
+ return abs.slice(cwd.length + 1);
523
+ return abs;
524
+ }
525
+ async function scanCoolify(input) {
526
+ const provider = "coolify";
527
+ const findings = [];
528
+ const skipped = [];
529
+ const cfg = await getCoolifyConfig();
530
+ if (!cfg) {
531
+ skipped.push({ provider, reason: "not configured (`hatchkit config add coolify`)" });
532
+ return { provider, findings, skipped };
533
+ }
534
+ if (!input.name) {
535
+ skipped.push({ provider, reason: "no project name to match against" });
536
+ return { provider, findings, skipped };
537
+ }
538
+ const api = new CoolifyApi({ url: cfg.url, token: cfg.token });
539
+ let projects = [];
540
+ let apps = [];
541
+ try {
542
+ [projects, apps] = await Promise.all([
543
+ api.listProjects().catch(() => []),
544
+ api.listApplications().catch(() => []),
545
+ ]);
546
+ }
547
+ catch (err) {
548
+ skipped.push({
549
+ provider,
550
+ reason: `Coolify request failed: ${err.message.split("\n")[0]}`,
551
+ });
552
+ return { provider, findings, skipped };
553
+ }
554
+ const wantedNames = nameAliases(input.name);
555
+ const projectMatches = projects.filter((p) => wantedNames.includes(p.name));
556
+ const appMatches = apps.filter((a) => wantedNames.includes(a.name));
557
+ for (const p of projectMatches) {
558
+ findings.push({
559
+ provider,
560
+ kind: "project",
561
+ identity: p.name,
562
+ status: "present",
563
+ detail: `Coolify project (id: ${p.id})`,
564
+ });
565
+ }
566
+ // Hydrate each app match with its full details so drift can compare
567
+ // fqdn / git_repository / server uuid. One call per match is cheap.
568
+ const hydrated = [];
569
+ for (const a of appMatches) {
570
+ try {
571
+ const full = await api.getApplication(a.uuid);
572
+ hydrated.push(full);
573
+ const fqdns = collectFqdns(full);
574
+ const detail = [
575
+ `Coolify app (${full.buildPack ?? "?"})`,
576
+ fqdns.length ? `fqdn: ${fqdns.join(", ")}` : "no fqdn",
577
+ full.gitRepository ? `repo: ${full.gitRepository}` : undefined,
578
+ ]
579
+ .filter(Boolean)
580
+ .join(" · ");
581
+ findings.push({
582
+ provider,
583
+ kind: "application",
584
+ identity: full.name,
585
+ status: "present",
586
+ detail,
587
+ });
588
+ }
589
+ catch (err) {
590
+ findings.push({
591
+ provider,
592
+ kind: "application",
593
+ identity: a.name,
594
+ status: "info",
595
+ detail: `couldn't load detail: ${err.message.split("\n")[0]}`,
596
+ });
597
+ }
598
+ }
599
+ if (projectMatches.length === 0 && appMatches.length === 0) {
600
+ findings.push({
601
+ provider,
602
+ kind: "application",
603
+ identity: input.name,
604
+ status: "missing",
605
+ detail: `no Coolify project or app named ${wantedNames.join(" / ")} (${apps.length} app(s) total)`,
606
+ });
607
+ }
608
+ return { provider, findings, skipped, raw: { hydrated } };
609
+ }
610
+ function collectFqdns(app) {
611
+ const fqdns = [];
612
+ if (app.fqdn) {
613
+ for (const part of app.fqdn.split(",")) {
614
+ const trimmed = part
615
+ .trim()
616
+ .replace(/^https?:\/\//, "")
617
+ .replace(/\/.*$/, "");
618
+ if (trimmed)
619
+ fqdns.push(trimmed);
620
+ }
621
+ }
622
+ if (app.dockerComposeDomains) {
623
+ for (const d of app.dockerComposeDomains) {
624
+ const stripped = d.domain.replace(/^https?:\/\//, "").replace(/\/.*$/, "");
625
+ if (stripped)
626
+ fqdns.push(stripped);
627
+ }
628
+ }
629
+ return Array.from(new Set(fqdns));
630
+ }
631
+ /** Project name aliases we'll match against remote resources.
632
+ * Keep in sync with `hatchkit adopt`'s detectProject — same family of
633
+ * conventions (raw, -server, -client, -web). */
634
+ function nameAliases(name) {
635
+ return [name, `${name}-server`, `${name}-client`, `${name}-web`, `${name}-api`];
636
+ }
637
+ async function scanDns(input) {
638
+ const provider = "dns";
639
+ const findings = [];
640
+ const skipped = [];
641
+ const cfg = await getDnsConfig();
642
+ if (!cfg) {
643
+ skipped.push({ provider, reason: "not configured (`hatchkit config add dns`)" });
644
+ return { provider, findings, skipped };
645
+ }
646
+ if (cfg.provider !== "cloudflare") {
647
+ skipped.push({
648
+ provider,
649
+ reason: `${cfg.provider} provider has no list API exposed (Cloudflare only for now)`,
650
+ });
651
+ return { provider, findings, skipped };
652
+ }
653
+ if (!cfg.apiToken) {
654
+ skipped.push({ provider, reason: "Cloudflare API token missing from keychain" });
655
+ return { provider, findings, skipped };
656
+ }
657
+ if (!input.domain) {
658
+ skipped.push({ provider, reason: "no domain to look up" });
659
+ return { provider, findings, skipped };
660
+ }
661
+ const cf = new CloudflareApi({ token: cfg.apiToken });
662
+ const apex = apexOf(input.domain);
663
+ let zone;
664
+ try {
665
+ zone = await cf.getZoneByName(apex);
666
+ }
667
+ catch (err) {
668
+ skipped.push({
669
+ provider,
670
+ reason: `Cloudflare zone lookup failed: ${err.message.split("\n")[0]}`,
671
+ });
672
+ return { provider, findings, skipped };
673
+ }
674
+ if (!zone) {
675
+ findings.push({
676
+ provider,
677
+ kind: "zone",
678
+ identity: apex,
679
+ status: "missing",
680
+ detail: "no Cloudflare zone for this apex",
681
+ });
682
+ return { provider, findings, skipped };
683
+ }
684
+ findings.push({
685
+ provider,
686
+ kind: "zone",
687
+ identity: zone.name,
688
+ status: "present",
689
+ detail: `zone id ${zone.id}`,
690
+ });
691
+ // Probe a curated set of relevant record names. For each we run an
692
+ // exact name+type lookup — much cheaper than listing every record in
693
+ // the zone and filtering. Misses the long tail but covers >95% of
694
+ // hatchkit-managed naming.
695
+ const probes = relevantRecordProbes(input);
696
+ const dnsRecords = [];
697
+ for (const probe of probes) {
698
+ try {
699
+ const rec = await cf.findRecord(zone.id, probe.name, probe.type);
700
+ if (rec) {
701
+ dnsRecords.push(rec);
702
+ findings.push({
703
+ provider,
704
+ kind: "dns-record",
705
+ identity: `${rec.name} ${rec.type}`,
706
+ status: "present",
707
+ detail: `${rec.content}${rec.proxied ? " (proxied)" : ""}`,
708
+ });
709
+ }
710
+ }
711
+ catch {
712
+ // Record probe failed — skip silently; the zone-level finding
713
+ // already proves auth works.
714
+ }
715
+ }
716
+ return { provider, findings, skipped, raw: { zone, dnsRecords } };
717
+ }
718
+ function apexOf(domain) {
719
+ // crude but adequate: take the last two labels.
720
+ const parts = domain.replace(/\.$/, "").split(".");
721
+ if (parts.length <= 2)
722
+ return parts.join(".");
723
+ return parts.slice(-2).join(".");
724
+ }
725
+ function relevantRecordProbes(input) {
726
+ if (!input.domain)
727
+ return [];
728
+ const apex = apexOf(input.domain);
729
+ const out = [];
730
+ const names = new Set([
731
+ input.domain,
732
+ apex,
733
+ `www.${apex}`,
734
+ `api.${apex}`,
735
+ `s3.${apex}`,
736
+ `assets.${apex}`,
737
+ `cdn.${apex}`,
738
+ `docs.${apex}`,
739
+ ]);
740
+ if (input.name)
741
+ names.add(`${input.name}.${apex}`);
742
+ for (const n of names) {
743
+ out.push({ name: n, type: "A" });
744
+ out.push({ name: n, type: "CNAME" });
745
+ }
746
+ return out;
747
+ }
748
+ async function scanR2(input, manifest) {
749
+ const provider = "s3:r2";
750
+ const findings = [];
751
+ const skipped = [];
752
+ const cfg = await getS3Config("r2");
753
+ if (!cfg) {
754
+ skipped.push({ provider, reason: "R2 not configured (`hatchkit config add s3` → r2)" });
755
+ return { provider, findings, skipped };
756
+ }
757
+ const adminToken = await getSecret(SECRET_KEYS.r2AdminToken);
758
+ if (!adminToken) {
759
+ skipped.push({ provider, reason: "R2 admin token not in keychain; can't list buckets" });
760
+ return { provider, findings, skipped };
761
+ }
762
+ const accountId = manifest?.s3Buckets?.accountId ??
763
+ cfg.endpoint?.match(/https?:\/\/([0-9a-f]{32})\.r2\.cloudflarestorage\.com/i)?.[1];
764
+ if (!accountId) {
765
+ skipped.push({ provider, reason: "couldn't derive R2 account id from manifest or endpoint" });
766
+ return { provider, findings, skipped };
767
+ }
768
+ const cf = new CloudflareApi({ token: adminToken });
769
+ // Candidate bucket names: manifest entries first (authoritative —
770
+ // these are buckets hatchkit knows it created), then naming-convention
771
+ // guesses for projects without a manifest.
772
+ const candidates = new Set();
773
+ const manifestBuckets = [];
774
+ if (manifest?.s3Buckets) {
775
+ for (const [key, value] of Object.entries(manifest.s3Buckets)) {
776
+ if (!value || typeof value !== "object")
777
+ continue;
778
+ const v = value;
779
+ if (typeof v.name === "string" && v.name) {
780
+ candidates.add(v.name);
781
+ manifestBuckets.push({ name: v.name, manifestKey: key });
782
+ }
783
+ }
784
+ }
785
+ if (input.name) {
786
+ candidates.add(`${input.name}-assets`);
787
+ candidates.add(`${input.name}-state`);
788
+ candidates.add(input.name);
789
+ }
790
+ const live = [];
791
+ for (const name of candidates) {
792
+ try {
793
+ const bucket = await cf.getR2Bucket(accountId, name);
794
+ if (!bucket) {
795
+ live.push({ name, bucket: null });
796
+ continue;
797
+ }
798
+ const [cors, domains] = await Promise.all([
799
+ cf.getR2BucketCors(accountId, name).catch(() => null),
800
+ cf.listR2CustomDomains(accountId, name).catch(() => []),
801
+ ]);
802
+ live.push({ name, bucket, cors, customDomains: domains });
803
+ const domainSummary = (domains ?? [])
804
+ .filter((d) => d.enabled)
805
+ .map((d) => d.domain)
806
+ .join(", ");
807
+ const corsSummary = cors?.[0]?.allowed?.origins?.length
808
+ ? `${cors[0].allowed.origins.length} CORS origin(s)`
809
+ : "no CORS";
810
+ findings.push({
811
+ provider,
812
+ kind: "bucket",
813
+ identity: name,
814
+ status: "present",
815
+ detail: [
816
+ bucket.storage_class ? `class: ${bucket.storage_class}` : undefined,
817
+ domainSummary ? `custom: ${domainSummary}` : undefined,
818
+ corsSummary,
819
+ ]
820
+ .filter(Boolean)
821
+ .join(" · "),
822
+ });
823
+ }
824
+ catch (err) {
825
+ // Auth probably broken — bubble as skip rather than per-bucket fail.
826
+ skipped.push({
827
+ provider,
828
+ reason: `R2 lookup for "${name}" failed: ${err.message.split("\n")[0]}`,
829
+ });
830
+ return { provider, findings, skipped, raw: { accountId, live, manifestBuckets } };
831
+ }
832
+ }
833
+ // If we looked but nothing matched, leave a breadcrumb. Without
834
+ // this, scanR2 returns empty findings and the user wonders whether
835
+ // we even tried.
836
+ if (findings.length === 0 && candidates.size > 0) {
837
+ findings.push({
838
+ provider,
839
+ kind: "bucket",
840
+ identity: input.name ?? "(candidates)",
841
+ status: "missing",
842
+ detail: `no R2 bucket matches ${Array.from(candidates).join(" / ")} (account ${accountId.slice(0, 6)}…)`,
843
+ });
844
+ }
845
+ return { provider, findings, skipped, raw: { accountId, live, manifestBuckets } };
846
+ }
847
+ async function scanS3Other(_input) {
848
+ // Hetzner Object Storage + AWS S3 don't have a "list all buckets for
849
+ // this access key" call exposed in the existing client. We surface
850
+ // presence only, so the user knows where else to look. A full impl
851
+ // would need an `@aws-sdk/client-s3` `ListBuckets` call, which is
852
+ // already a dep — but adding that here doubles the surface area;
853
+ // ship without for now and revisit if anyone asks.
854
+ const provider = "s3";
855
+ const findings = [];
856
+ const skipped = [];
857
+ for (const p of ["hetzner", "aws"]) {
858
+ const cfg = await getS3Config(p);
859
+ if (cfg) {
860
+ findings.push({
861
+ provider: `s3:${p}`,
862
+ kind: "credentials",
863
+ identity: p,
864
+ status: "info",
865
+ detail: `endpoint: ${cfg.endpoint} — bucket inventory not implemented for ${p}`,
866
+ });
867
+ }
868
+ else {
869
+ skipped.push({ provider: `s3:${p}`, reason: "not configured" });
870
+ }
871
+ }
872
+ return { provider, findings, skipped };
873
+ }
874
+ async function scanGitHub(input) {
875
+ const provider = "github";
876
+ const findings = [];
877
+ const skipped = [];
878
+ if (!input.repo) {
879
+ skipped.push({ provider, reason: "no GitHub repo to look up" });
880
+ return { provider, findings, skipped };
881
+ }
882
+ if (!(await execOk("gh", ["--version"]))) {
883
+ skipped.push({ provider, reason: "`gh` CLI not installed" });
884
+ return { provider, findings, skipped };
885
+ }
886
+ if (!(await execOk("gh", ["auth", "status"]))) {
887
+ skipped.push({ provider, reason: "`gh` not authenticated (run `gh auth login`)" });
888
+ return { provider, findings, skipped };
889
+ }
890
+ // Repo metadata.
891
+ let repoInfo = {};
892
+ try {
893
+ const res = await exec("gh", [
894
+ "repo",
895
+ "view",
896
+ input.repo,
897
+ "--json",
898
+ "visibility,defaultBranchRef,description,homepageUrl,isArchived",
899
+ ], { silent: true });
900
+ if (res.exitCode === 0) {
901
+ repoInfo = JSON.parse(res.stdout);
902
+ findings.push({
903
+ provider,
904
+ kind: "repository",
905
+ identity: input.repo,
906
+ status: "present",
907
+ detail: [
908
+ repoInfo.visibility?.toLowerCase(),
909
+ `default: ${repoInfo.defaultBranchRef?.name ?? "?"}`,
910
+ repoInfo.isArchived ? "archived" : undefined,
911
+ repoInfo.homepageUrl ? `homepage: ${repoInfo.homepageUrl}` : undefined,
912
+ ]
913
+ .filter(Boolean)
914
+ .join(" · "),
915
+ });
916
+ }
917
+ else {
918
+ findings.push({
919
+ provider,
920
+ kind: "repository",
921
+ identity: input.repo,
922
+ status: "missing",
923
+ detail: res.stderr.trim().split("\n")[0],
924
+ });
925
+ return { provider, findings, skipped, raw: { repoInfo } };
926
+ }
927
+ }
928
+ catch (err) {
929
+ findings.push({
930
+ provider,
931
+ kind: "repository",
932
+ identity: input.repo,
933
+ status: "info",
934
+ detail: `gh repo view failed: ${err.message.split("\n")[0]}`,
935
+ });
936
+ }
937
+ let pages = null;
938
+ try {
939
+ const res = await exec("gh", ["api", `repos/${input.repo}/pages`], { silent: true });
940
+ if (res.exitCode === 0) {
941
+ pages = JSON.parse(res.stdout);
942
+ findings.push({
943
+ provider: "github-pages",
944
+ kind: "page-site",
945
+ identity: input.repo,
946
+ status: "present",
947
+ detail: [
948
+ pages?.status,
949
+ pages?.cname ? `cname: ${pages.cname}` : "no custom domain",
950
+ pages?.html_url,
951
+ ]
952
+ .filter(Boolean)
953
+ .join(" · "),
954
+ });
955
+ }
956
+ else if (/HTTP 404/.test(res.stderr)) {
957
+ findings.push({
958
+ provider: "github-pages",
959
+ kind: "page-site",
960
+ identity: input.repo,
961
+ status: "missing",
962
+ detail: "Pages is not enabled on this repo",
963
+ });
964
+ }
965
+ else {
966
+ findings.push({
967
+ provider: "github-pages",
968
+ kind: "page-site",
969
+ identity: input.repo,
970
+ status: "info",
971
+ detail: `gh api repos/<repo>/pages failed: ${res.stderr.trim().split("\n")[0]}`,
972
+ });
973
+ }
974
+ }
975
+ catch (err) {
976
+ findings.push({
977
+ provider: "github-pages",
978
+ kind: "page-site",
979
+ identity: input.repo,
980
+ status: "info",
981
+ detail: `pages probe failed: ${err.message.split("\n")[0]}`,
982
+ });
983
+ }
984
+ // Repo secrets — surface only the ones hatchkit cares about, by name.
985
+ const relevantSecrets = [
986
+ "DOTENV_PRIVATE_KEY_PRODUCTION",
987
+ "COOLIFY_API_URL",
988
+ "COOLIFY_API_TOKEN",
989
+ "COOLIFY_APP_UUID",
990
+ ];
991
+ try {
992
+ const res = await exec("gh", ["secret", "list", "--repo", input.repo, "--json", "name,updatedAt"], { silent: true });
993
+ if (res.exitCode === 0) {
994
+ const all = JSON.parse(res.stdout);
995
+ const haves = new Set(all.map((s) => s.name));
996
+ for (const want of relevantSecrets) {
997
+ if (haves.has(want)) {
998
+ findings.push({
999
+ provider,
1000
+ kind: "secret",
1001
+ identity: want,
1002
+ status: "present",
1003
+ detail: "set on repo",
1004
+ });
1005
+ }
1006
+ }
1007
+ const extras = all.length;
1008
+ findings.push({
1009
+ provider,
1010
+ kind: "secret-summary",
1011
+ identity: input.repo,
1012
+ status: "info",
1013
+ detail: `${extras} secret(s) total on repo`,
1014
+ });
1015
+ }
1016
+ }
1017
+ catch {
1018
+ // Non-fatal — secret-listing requires admin scope on the gh token.
1019
+ }
1020
+ return { provider, findings, skipped, raw: { repoInfo, pages } };
1021
+ }
1022
+ async function scanResend(input) {
1023
+ const provider = "resend";
1024
+ const findings = [];
1025
+ const skipped = [];
1026
+ const cfg = await getResendConfig();
1027
+ if (!cfg) {
1028
+ skipped.push({ provider, reason: "not configured" });
1029
+ return { provider, findings, skipped };
1030
+ }
1031
+ if (!input.domain) {
1032
+ skipped.push({ provider, reason: "no domain to match against verified Resend domains" });
1033
+ return { provider, findings, skipped };
1034
+ }
1035
+ try {
1036
+ const res = await fetch("https://api.resend.com/domains", {
1037
+ headers: { Authorization: `Bearer ${cfg.apiKey}` },
1038
+ });
1039
+ if (!res.ok)
1040
+ throw new Error(`HTTP ${res.status}`);
1041
+ const body = (await res.json());
1042
+ const apex = apexOf(input.domain);
1043
+ const matches = (body.data ?? []).filter((d) => typeof d.name === "string" && (d.name === input.domain || d.name === apex));
1044
+ if (matches.length === 0) {
1045
+ findings.push({
1046
+ provider,
1047
+ kind: "verified-domain",
1048
+ identity: input.domain,
1049
+ status: "missing",
1050
+ detail: `no Resend domain entry for ${input.domain} (${(body.data ?? []).length} domain(s) total)`,
1051
+ });
1052
+ }
1053
+ else {
1054
+ for (const m of matches) {
1055
+ findings.push({
1056
+ provider,
1057
+ kind: "verified-domain",
1058
+ identity: m.name ?? input.domain,
1059
+ status: "present",
1060
+ detail: `status: ${m.status ?? "?"}`,
1061
+ });
1062
+ }
1063
+ }
1064
+ }
1065
+ catch (err) {
1066
+ skipped.push({
1067
+ provider,
1068
+ reason: `Resend lookup failed: ${err.message.split("\n")[0]}`,
1069
+ });
1070
+ }
1071
+ return { provider, findings, skipped };
1072
+ }
1073
+ async function scanGlitchtip(input) {
1074
+ const provider = "glitchtip";
1075
+ const findings = [];
1076
+ const skipped = [];
1077
+ const cfg = await getGlitchtipConfig();
1078
+ if (!cfg) {
1079
+ skipped.push({ provider, reason: "not configured" });
1080
+ return { provider, findings, skipped };
1081
+ }
1082
+ if (!input.name) {
1083
+ skipped.push({ provider, reason: "no project name to match against GlitchTip projects" });
1084
+ return { provider, findings, skipped };
1085
+ }
1086
+ try {
1087
+ const res = await fetch(`${cfg.url.replace(/\/$/, "")}/api/0/organizations/${cfg.organizationSlug}/projects/`, { headers: { Authorization: `Bearer ${cfg.token}` } });
1088
+ if (!res.ok)
1089
+ throw new Error(`HTTP ${res.status}`);
1090
+ const body = (await res.json());
1091
+ const wanted = nameAliases(input.name);
1092
+ const matches = body.filter((p) => (typeof p.name === "string" && wanted.includes(p.name)) ||
1093
+ (typeof p.slug === "string" && wanted.includes(p.slug)));
1094
+ if (matches.length === 0) {
1095
+ findings.push({
1096
+ provider,
1097
+ kind: "project",
1098
+ identity: input.name,
1099
+ status: "missing",
1100
+ detail: `no GlitchTip project matching ${wanted.join(" / ")} (${body.length} total in org)`,
1101
+ });
1102
+ }
1103
+ else {
1104
+ for (const p of matches) {
1105
+ findings.push({
1106
+ provider,
1107
+ kind: "project",
1108
+ identity: p.slug ?? p.name ?? input.name,
1109
+ status: "present",
1110
+ detail: p.platform ? `platform: ${p.platform}` : undefined,
1111
+ });
1112
+ }
1113
+ }
1114
+ }
1115
+ catch (err) {
1116
+ skipped.push({
1117
+ provider,
1118
+ reason: `GlitchTip lookup failed: ${err.message.split("\n")[0]}`,
1119
+ });
1120
+ }
1121
+ return { provider, findings, skipped };
1122
+ }
1123
+ async function scanOpenpanel(input) {
1124
+ const provider = "openpanel";
1125
+ const findings = [];
1126
+ const skipped = [];
1127
+ const cfg = await getOpenpanelConfig();
1128
+ if (!cfg) {
1129
+ skipped.push({ provider, reason: "not configured" });
1130
+ return { provider, findings, skipped };
1131
+ }
1132
+ if (!input.name) {
1133
+ skipped.push({ provider, reason: "no project name to match against OpenPanel projects" });
1134
+ return { provider, findings, skipped };
1135
+ }
1136
+ try {
1137
+ const base = (cfg.apiUrl ?? cfg.url).replace(/\/$/, "");
1138
+ const res = await fetch(`${base}/manage/projects`, {
1139
+ headers: {
1140
+ "openpanel-client-id": cfg.rootClientId,
1141
+ "openpanel-client-secret": cfg.rootClientSecret,
1142
+ },
1143
+ });
1144
+ if (!res.ok)
1145
+ throw new Error(`HTTP ${res.status}`);
1146
+ // OpenPanel's manage API sometimes returns a bare array, sometimes
1147
+ // `{ data: [...] }`. Accept either shape.
1148
+ const raw = (await res.json());
1149
+ const projects = Array.isArray(raw)
1150
+ ? raw
1151
+ : (raw.data ?? []);
1152
+ const wanted = nameAliases(input.name);
1153
+ const matches = projects.filter((p) => (typeof p.name === "string" && wanted.includes(p.name)) ||
1154
+ (typeof p.id === "string" && wanted.includes(p.id)));
1155
+ if (matches.length === 0) {
1156
+ findings.push({
1157
+ provider,
1158
+ kind: "project",
1159
+ identity: input.name,
1160
+ status: "missing",
1161
+ detail: `no OpenPanel project matching ${wanted.join(" / ")} (${projects.length} total)`,
1162
+ });
1163
+ }
1164
+ else {
1165
+ for (const p of matches) {
1166
+ findings.push({
1167
+ provider,
1168
+ kind: "project",
1169
+ identity: p.name ?? p.id ?? input.name,
1170
+ status: "present",
1171
+ });
1172
+ }
1173
+ }
1174
+ }
1175
+ catch (err) {
1176
+ skipped.push({
1177
+ provider,
1178
+ reason: `OpenPanel lookup failed: ${err.message.split("\n")[0]}`,
1179
+ });
1180
+ }
1181
+ return { provider, findings, skipped };
1182
+ }
1183
+ async function scanStripe(input) {
1184
+ const provider = "stripe";
1185
+ const findings = [];
1186
+ const skipped = [];
1187
+ const cfg = await getStripeConfig();
1188
+ if (!cfg) {
1189
+ skipped.push({ provider, reason: "not configured" });
1190
+ return { provider, findings, skipped };
1191
+ }
1192
+ if (!input.domain) {
1193
+ skipped.push({
1194
+ provider,
1195
+ reason: "no domain to match against Stripe webhook endpoints",
1196
+ });
1197
+ return { provider, findings, skipped };
1198
+ }
1199
+ // Probe each mode that has a stored master key.
1200
+ for (const mode of ["test", "live"]) {
1201
+ const key = mode === "test" ? cfg.testSecretKey : cfg.liveSecretKey;
1202
+ if (!key)
1203
+ continue;
1204
+ try {
1205
+ const res = await fetch("https://api.stripe.com/v1/webhook_endpoints?limit=100", {
1206
+ headers: { Authorization: `Bearer ${key}` },
1207
+ });
1208
+ if (!res.ok)
1209
+ throw new Error(`HTTP ${res.status}`);
1210
+ const body = (await res.json());
1211
+ const matches = (body.data ?? []).filter((w) => typeof w.url === "string" && w.url.includes(input.domain ?? ""));
1212
+ if (matches.length === 0) {
1213
+ findings.push({
1214
+ provider,
1215
+ kind: "webhook-endpoint",
1216
+ identity: `${mode} mode`,
1217
+ status: "missing",
1218
+ detail: `no webhook endpoint with URL containing ${input.domain} (${(body.data ?? []).length} endpoint(s) in ${mode} mode)`,
1219
+ });
1220
+ }
1221
+ else {
1222
+ for (const w of matches) {
1223
+ findings.push({
1224
+ provider,
1225
+ kind: "webhook-endpoint",
1226
+ identity: `${mode}:${w.id ?? "?"}`,
1227
+ status: "present",
1228
+ detail: `${w.url} (${w.status ?? "?"})`,
1229
+ });
1230
+ }
1231
+ }
1232
+ }
1233
+ catch (err) {
1234
+ skipped.push({
1235
+ provider,
1236
+ reason: `Stripe ${mode}-mode lookup failed: ${err.message.split("\n")[0]}`,
1237
+ });
1238
+ }
1239
+ }
1240
+ return { provider, findings, skipped };
1241
+ }
1242
+ // ---------------------------------------------------------------------------
1243
+ // Drift detection (cross-references between scan results + local state)
1244
+ // ---------------------------------------------------------------------------
1245
+ async function detectDrift(input, local, scanResults) {
1246
+ const out = [];
1247
+ const byProvider = new Map(scanResults.map((r) => [r.provider, r]));
1248
+ const coolify = byProvider.get("coolify");
1249
+ const dns = byProvider.get("dns");
1250
+ const r2 = byProvider.get("s3:r2");
1251
+ const github = byProvider.get("github");
1252
+ // D1: Coolify app fqdn vs DNS A record content (when both are
1253
+ // present). We resolve via the Cloudflare zone records we already
1254
+ // fetched in scanDns — no extra network hop.
1255
+ if (coolify?.raw && dns?.raw && input.domain) {
1256
+ const hydrated = (coolify.raw.hydrated ?? []);
1257
+ const dnsRecords = (dns.raw.dnsRecords ?? []);
1258
+ for (const app of hydrated) {
1259
+ const fqdns = collectFqdns(app);
1260
+ const matchedFqdn = fqdns.find((f) => f === input.domain || f.endsWith(`.${apexOf(input.domain ?? "")}`));
1261
+ if (!matchedFqdn)
1262
+ continue;
1263
+ const aRecord = dnsRecords.find((r) => r.type === "A" && r.name === matchedFqdn);
1264
+ if (!aRecord) {
1265
+ out.push({
1266
+ provider: "drift",
1267
+ kind: "coolify-dns",
1268
+ identity: `${app.name} → ${matchedFqdn}`,
1269
+ status: "drift",
1270
+ drift: [
1271
+ `Coolify app "${app.name}" serves ${matchedFqdn} but no A record exists in Cloudflare for that name`,
1272
+ ],
1273
+ });
1274
+ continue;
1275
+ }
1276
+ // Best-effort: compare against the Coolify server's public IP
1277
+ // when we can pull it. The server uuid lives on the application.
1278
+ if (app.serverUuid) {
1279
+ try {
1280
+ const cfgC = await getCoolifyConfig();
1281
+ if (cfgC) {
1282
+ const api = new CoolifyApi({ url: cfgC.url, token: cfgC.token });
1283
+ const domains = await api.getServerDomains(app.serverUuid).catch(() => []);
1284
+ const ips = Array.from(new Set(domains.map((d) => d.ip).filter((ip) => !!ip)));
1285
+ if (ips.length > 0 && !ips.includes(aRecord.content) && !aRecord.proxied) {
1286
+ out.push({
1287
+ provider: "drift",
1288
+ kind: "coolify-dns",
1289
+ identity: `${app.name} → ${matchedFqdn}`,
1290
+ status: "drift",
1291
+ drift: [
1292
+ `Cloudflare A record points to ${aRecord.content}`,
1293
+ `Coolify server IP(s): ${ips.join(", ")}`,
1294
+ `(record is not proxied — direct IP mismatch will black-hole traffic)`,
1295
+ ],
1296
+ });
1297
+ }
1298
+ }
1299
+ }
1300
+ catch {
1301
+ // Couldn't resolve server IP — skip silently.
1302
+ }
1303
+ }
1304
+ }
1305
+ }
1306
+ // D2: Coolify app git_repository vs local git remote — same project
1307
+ // name on different repos is a common gotcha during renames.
1308
+ if (coolify?.raw && local.gitRemote) {
1309
+ const hydrated = (coolify.raw.hydrated ?? []);
1310
+ const localSlug = repoSlugFromUrl(local.gitRemote);
1311
+ for (const app of hydrated) {
1312
+ if (!app.gitRepository)
1313
+ continue;
1314
+ const remoteSlug = repoSlugFromUrl(app.gitRepository) ??
1315
+ app.gitRepository.replace(/^https?:\/\/github\.com\//, "").replace(/\.git$/, "");
1316
+ if (localSlug && remoteSlug && localSlug.toLowerCase() !== remoteSlug.toLowerCase()) {
1317
+ out.push({
1318
+ provider: "drift",
1319
+ kind: "coolify-source",
1320
+ identity: app.name,
1321
+ status: "drift",
1322
+ drift: [
1323
+ `Coolify app deploys from: ${app.gitRepository}`,
1324
+ `Local git remote: ${local.gitRemote}`,
1325
+ ],
1326
+ });
1327
+ }
1328
+ }
1329
+ }
1330
+ // D3: Manifest-listed buckets that don't actually exist live.
1331
+ if (r2?.raw) {
1332
+ const manifestBuckets = (r2.raw.manifestBuckets ?? []);
1333
+ const live = (r2.raw.live ?? []);
1334
+ for (const mb of manifestBuckets) {
1335
+ const hit = live.find((l) => l.name === mb.name);
1336
+ if (!hit || !hit.bucket) {
1337
+ out.push({
1338
+ provider: "drift",
1339
+ kind: "bucket",
1340
+ identity: mb.name,
1341
+ status: "drift",
1342
+ drift: [
1343
+ `Manifest s3Buckets.${mb.manifestKey} = "${mb.name}"`,
1344
+ `Live R2: no bucket with that name`,
1345
+ `Fix: \`hatchkit provision s3\` to reconcile, or remove the entry from .hatchkit.json`,
1346
+ ],
1347
+ });
1348
+ }
1349
+ }
1350
+ }
1351
+ // D4: R2 CORS — manifest-recorded origins vs live policy. Mirrors
1352
+ // `doctor.checkProjectS3CorsState` but inventory runs against the
1353
+ // buckets it scanned instead of re-walking the manifest.
1354
+ if (r2?.raw && local.manifest?.s3Buckets) {
1355
+ const live = (r2.raw.live ?? []);
1356
+ const assets = local.manifest.s3Buckets.assets;
1357
+ if (assets &&
1358
+ typeof assets === "object" &&
1359
+ "cors" in assets &&
1360
+ assets.cors &&
1361
+ !assets.cors.skipped) {
1362
+ const recorded = (assets.cors.origins ?? []).slice().sort();
1363
+ const hit = live.find((l) => l.name === assets.name);
1364
+ const liveOrigins = (hit?.cors?.[0]?.allowed?.origins ?? []).slice().sort();
1365
+ const same = recorded.length === liveOrigins.length && recorded.every((o, i) => o === liveOrigins[i]);
1366
+ if (!same && recorded.length > 0) {
1367
+ out.push({
1368
+ provider: "drift",
1369
+ kind: "bucket-cors",
1370
+ identity: assets.name,
1371
+ status: "drift",
1372
+ drift: [
1373
+ `Manifest origins: ${recorded.join(", ") || "(empty)"}`,
1374
+ `Live origins: ${liveOrigins.join(", ") || "(empty)"}`,
1375
+ `Fix: \`hatchkit provision s3\` to reconcile`,
1376
+ ],
1377
+ });
1378
+ }
1379
+ }
1380
+ }
1381
+ // D5: gh-pages workflow on disk but Pages isn't enabled — or vice
1382
+ // versa (Pages enabled but no workflow committed).
1383
+ if (github?.raw) {
1384
+ const pages = github.raw.pages;
1385
+ if (local.ghPagesWorkflowPath && !pages) {
1386
+ out.push({
1387
+ provider: "drift",
1388
+ kind: "github-pages-state",
1389
+ identity: input.repo ?? "(repo)",
1390
+ status: "drift",
1391
+ drift: [
1392
+ `Local workflow exists: ${rel(local.cwd, local.ghPagesWorkflowPath)}`,
1393
+ `GitHub Pages: not enabled`,
1394
+ `Fix: \`hatchkit gh-pages\` or enable Pages in repo Settings`,
1395
+ ],
1396
+ });
1397
+ }
1398
+ if (!local.ghPagesWorkflowPath && pages && pages.status) {
1399
+ out.push({
1400
+ provider: "drift",
1401
+ kind: "github-pages-state",
1402
+ identity: input.repo ?? "(repo)",
1403
+ status: "drift",
1404
+ drift: [
1405
+ `GitHub Pages enabled (status: ${pages.status})`,
1406
+ `No Pages-deploying workflow in .github/workflows`,
1407
+ `Fix: commit a Pages-deploying workflow or disable Pages in repo Settings`,
1408
+ ],
1409
+ });
1410
+ }
1411
+ // D5b: CNAME file vs Pages custom domain. If both exist and disagree,
1412
+ // one will silently win on next deploy — usually painful.
1413
+ if (local.cnameFile && pages?.cname && local.cnameFile.content !== pages.cname) {
1414
+ out.push({
1415
+ provider: "drift",
1416
+ kind: "github-pages-cname",
1417
+ identity: input.repo ?? "(repo)",
1418
+ status: "drift",
1419
+ drift: [
1420
+ `CNAME file: ${local.cnameFile.content} (at ${rel(local.cwd, local.cnameFile.path)})`,
1421
+ `Pages setting: ${pages.cname}`,
1422
+ ],
1423
+ });
1424
+ }
1425
+ }
1426
+ // D6: dotenvx in use locally but no DOTENV_PRIVATE_KEY_PRODUCTION
1427
+ // secret on the GitHub repo. The deploy workflow will need that
1428
+ // to decrypt at runtime.
1429
+ if (github?.raw && local.dotenvxEncrypted) {
1430
+ const repoSecrets = github.findings.filter((f) => f.kind === "secret");
1431
+ const hasKey = repoSecrets.some((s) => s.identity === "DOTENV_PRIVATE_KEY_PRODUCTION");
1432
+ if (!hasKey && input.name) {
1433
+ out.push({
1434
+ provider: "drift",
1435
+ kind: "missing-secret",
1436
+ identity: "DOTENV_PRIVATE_KEY_PRODUCTION",
1437
+ status: "drift",
1438
+ drift: [
1439
+ ".env.production is dotenvx-encrypted locally",
1440
+ `GitHub repo ${input.repo} has no DOTENV_PRIVATE_KEY_PRODUCTION secret`,
1441
+ `Fix: \`hatchkit keys push ${input.name} --target=gh --repo ${input.repo}\``,
1442
+ ],
1443
+ });
1444
+ }
1445
+ }
1446
+ return out;
1447
+ }
1448
+ // ---------------------------------------------------------------------------
1449
+ // Renderer (human)
1450
+ // ---------------------------------------------------------------------------
1451
+ export function renderInventoryHuman(report) {
1452
+ const lines = [];
1453
+ lines.push(chalk.bold(" hatchkit inventory"));
1454
+ lines.push(chalk.dim(` cwd: ${report.cwd}`));
1455
+ lines.push("");
1456
+ lines.push(chalk.bold(" Identity:"));
1457
+ lines.push(` name ${report.inferred.name ? chalk.bold(report.inferred.name) : chalk.dim("·")}${sourceTag(report.sources.name)}`);
1458
+ lines.push(` domain ${report.inferred.domain ? chalk.bold(report.inferred.domain) : chalk.dim("·")}${sourceTag(report.sources.domain)}`);
1459
+ lines.push(` repo ${report.inferred.repo ? chalk.bold(report.inferred.repo) : chalk.dim("·")}${sourceTag(report.sources.repo)}`);
1460
+ lines.push("");
1461
+ // Group findings by provider for display.
1462
+ const grouped = new Map();
1463
+ for (const f of report.findings) {
1464
+ const key = f.provider;
1465
+ const existing = grouped.get(key);
1466
+ if (existing)
1467
+ existing.push(f);
1468
+ else
1469
+ grouped.set(key, [f]);
1470
+ }
1471
+ // Drifts first — they're the actionable thing.
1472
+ if (report.drifts.length > 0) {
1473
+ lines.push(chalk.bold.yellow(" ⚠ Drift detected:"));
1474
+ for (const d of report.drifts) {
1475
+ lines.push(` ${chalk.yellow("⚠")} ${chalk.bold(d.identity)} ${chalk.dim(`(${d.kind})`)}`);
1476
+ for (const line of d.drift ?? []) {
1477
+ lines.push(` ${chalk.dim("→")} ${line}`);
1478
+ }
1479
+ }
1480
+ lines.push("");
1481
+ }
1482
+ // Per-provider sections (skip "drift" pseudo-provider — already
1483
+ // surfaced above).
1484
+ for (const [providerKey, findings] of grouped) {
1485
+ if (providerKey === "drift")
1486
+ continue;
1487
+ lines.push(chalk.bold(` ${providerKey}`));
1488
+ for (const f of findings) {
1489
+ const icon = f.status === "present"
1490
+ ? chalk.green("✓")
1491
+ : f.status === "missing"
1492
+ ? chalk.red("✗")
1493
+ : f.status === "drift"
1494
+ ? chalk.yellow("⚠")
1495
+ : chalk.dim("·");
1496
+ const kind = chalk.dim(`(${f.kind})`);
1497
+ const detail = f.detail ? chalk.dim(` — ${f.detail}`) : "";
1498
+ lines.push(` ${icon} ${f.identity} ${kind}${detail}`);
1499
+ }
1500
+ lines.push("");
1501
+ }
1502
+ if (report.skipped.length > 0) {
1503
+ lines.push(chalk.dim(" Skipped:"));
1504
+ for (const s of report.skipped) {
1505
+ lines.push(chalk.dim(` · ${s.provider}: ${s.reason}`));
1506
+ }
1507
+ lines.push("");
1508
+ }
1509
+ lines.push(` ${chalk.green(`${report.summary.present} present`)} ${report.summary.drift > 0
1510
+ ? chalk.yellow(`${report.summary.drift} drift`)
1511
+ : chalk.dim("0 drift")} ${report.summary.missing > 0
1512
+ ? chalk.red(`${report.summary.missing} missing`)
1513
+ : chalk.dim("0 missing")} ${chalk.dim(`${report.summary.skipped} skipped`)}`);
1514
+ lines.push("");
1515
+ return lines.join("\n");
1516
+ }
1517
+ function sourceTag(s) {
1518
+ if (!s)
1519
+ return chalk.dim(" (will prompt)");
1520
+ return chalk.dim(` ← ${s}`);
1521
+ }
1522
+ //# sourceMappingURL=inventory.js.map