@celilo/cli 0.3.10 → 0.3.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@celilo/cli",
3
- "version": "0.3.10",
3
+ "version": "0.3.12",
4
4
  "description": "Celilo — home lab orchestration CLI",
5
5
  "type": "module",
6
6
  "bin": {
@@ -53,7 +53,7 @@
53
53
  "dependencies": {
54
54
  "@aws-sdk/client-s3": "^3.1024.0",
55
55
  "@celilo/capabilities": "^0.1.10",
56
- "@celilo/cli-display": "^0.1.7",
56
+ "@celilo/cli-display": "^0.1.9",
57
57
  "@celilo/event-bus": "^0.1.4",
58
58
  "@clack/prompts": "^1.1.0",
59
59
  "ajv": "^8.18.0",
@@ -66,6 +66,18 @@ export const COMMANDS: CommandDef[] = [
66
66
  name: 'status',
67
67
  description: 'Show system and module status',
68
68
  },
69
+ {
70
+ name: 'doctor',
71
+ description: 'Diagnose @celilo/* version drift between the running CLI and the workspace',
72
+ flags: [
73
+ {
74
+ name: 'fix',
75
+ description:
76
+ 'Repair drift by `bun link`-ing each drifted @celilo/* package from the workspace',
77
+ takesValue: false,
78
+ },
79
+ ],
80
+ },
69
81
  {
70
82
  name: 'audit',
71
83
  description: 'Top-level alias for `system audit`',
@@ -0,0 +1,36 @@
1
+ import { describe, expect, test } from 'bun:test';
2
+ import { compareVersions } from './doctor';
3
+
4
+ describe('compareVersions', () => {
5
+ test('detects ascending major/minor/patch', () => {
6
+ expect(compareVersions('1.0.0', '2.0.0')).toBe(-1);
7
+ expect(compareVersions('1.0.0', '1.1.0')).toBe(-1);
8
+ expect(compareVersions('1.0.0', '1.0.1')).toBe(-1);
9
+ });
10
+
11
+ test('detects descending major/minor/patch', () => {
12
+ expect(compareVersions('2.0.0', '1.0.0')).toBe(1);
13
+ expect(compareVersions('1.1.0', '1.0.0')).toBe(1);
14
+ expect(compareVersions('1.0.1', '1.0.0')).toBe(1);
15
+ });
16
+
17
+ test('treats equal versions as equal', () => {
18
+ expect(compareVersions('1.2.3', '1.2.3')).toBe(0);
19
+ expect(compareVersions('0.1.9', '0.1.9')).toBe(0);
20
+ });
21
+
22
+ test('catches the canonical drift case (loaded < workspace)', () => {
23
+ // The case that triggered #2 in the first place: globally-installed
24
+ // 0.1.8 vs. workspace 0.1.9.
25
+ expect(compareVersions('0.1.8', '0.1.9')).toBe(-1);
26
+ });
27
+
28
+ test('treats missing trailing segments as zeros', () => {
29
+ expect(compareVersions('1.0', '1.0.0')).toBe(0);
30
+ expect(compareVersions('1.0', '1.0.1')).toBe(-1);
31
+ });
32
+
33
+ test('strips a leading v prefix', () => {
34
+ expect(compareVersions('v1.2.3', '1.2.3')).toBe(0);
35
+ });
36
+ });
@@ -0,0 +1,385 @@
1
+ /**
2
+ * `celilo doctor` — diagnose @celilo/* version drift between the running CLI
3
+ * and the surrounding workspace (if any).
4
+ *
5
+ * Catches the canonical "I edited the workspace but my global celilo is
6
+ * still running an older published version" failure mode.
7
+ *
8
+ * Resolution strategy:
9
+ * - The running CLI's package.json comes from a relative import — that
10
+ * anchors us to whatever copy of `@celilo/cli` is actually executing
11
+ * (workspace TS source or globally-installed node_modules tree).
12
+ * - For each `@celilo/*` dependency, we ask the runtime where it
13
+ * resolves the package's `package.json` and read the version there.
14
+ * - If we can find a workspace root by walking up from `process.cwd()`,
15
+ * we read each `packages/*\/package.json` and flag anything where the
16
+ * loaded version is older than the workspace.
17
+ */
18
+
19
+ import { spawnSync } from 'node:child_process';
20
+ import { existsSync, readFileSync } from 'node:fs';
21
+ import { createRequire } from 'node:module';
22
+ import { dirname, join, resolve } from 'node:path';
23
+ import cliPkg from '../../../package.json' with { type: 'json' };
24
+ import type { CommandResult } from '../types';
25
+
26
+ interface CeliloPkgInfo {
27
+ name: string;
28
+ declaredRange: string;
29
+ loadedVersion: string | null;
30
+ loadedFrom: string | null;
31
+ resolveError: string | null;
32
+ }
33
+
34
+ interface WorkspaceVersion {
35
+ name: string;
36
+ version: string;
37
+ path: string;
38
+ }
39
+
40
+ const ANSI = {
41
+ reset: '\x1b[0m',
42
+ dim: '\x1b[2m',
43
+ green: '\x1b[32m',
44
+ yellow: '\x1b[33m',
45
+ red: '\x1b[31m',
46
+ };
47
+
48
+ /**
49
+ * Discover every `@celilo/*` entry in the running CLI's package.json
50
+ * dependencies and resolve where each one is actually loaded from.
51
+ */
52
+ function inspectCeliloDeps(): CeliloPkgInfo[] {
53
+ const deps: Record<string, string> = {
54
+ ...((cliPkg as { dependencies?: Record<string, string> }).dependencies ?? {}),
55
+ };
56
+ const celiloDeps = Object.entries(deps)
57
+ .filter(([name]) => name.startsWith('@celilo/'))
58
+ .sort(([a], [b]) => a.localeCompare(b));
59
+
60
+ const require = createRequire(import.meta.url);
61
+
62
+ return celiloDeps.map(([name, declaredRange]) => {
63
+ try {
64
+ const pkgJsonPath = require.resolve(`${name}/package.json`);
65
+ const pkg = JSON.parse(readFileSync(pkgJsonPath, 'utf-8')) as { version: string };
66
+ return {
67
+ name,
68
+ declaredRange,
69
+ loadedVersion: pkg.version,
70
+ loadedFrom: dirname(pkgJsonPath),
71
+ resolveError: null,
72
+ };
73
+ } catch (err) {
74
+ return {
75
+ name,
76
+ declaredRange,
77
+ loadedVersion: null,
78
+ loadedFrom: null,
79
+ resolveError: err instanceof Error ? err.message : String(err),
80
+ };
81
+ }
82
+ });
83
+ }
84
+
85
+ /**
86
+ * Walk up from `start` until we find a `package.json` whose `workspaces`
87
+ * key is non-empty. Returns the directory containing that file or null.
88
+ */
89
+ function findWorkspaceRoot(start: string): string | null {
90
+ let dir = resolve(start);
91
+ while (true) {
92
+ const candidate = join(dir, 'package.json');
93
+ if (existsSync(candidate)) {
94
+ try {
95
+ const pkg = JSON.parse(readFileSync(candidate, 'utf-8')) as {
96
+ workspaces?: string[] | { packages?: string[] };
97
+ };
98
+ const workspaces = Array.isArray(pkg.workspaces)
99
+ ? pkg.workspaces
100
+ : (pkg.workspaces?.packages ?? []);
101
+ if (workspaces.length > 0) return dir;
102
+ } catch {
103
+ /* malformed package.json — keep walking */
104
+ }
105
+ }
106
+ const parent = dirname(dir);
107
+ if (parent === dir) return null;
108
+ dir = parent;
109
+ }
110
+ }
111
+
112
+ /**
113
+ * Read every `@celilo/*` package.json in the workspace and return the
114
+ * version each one declares. Used to compare against what the running
115
+ * CLI actually loaded.
116
+ */
117
+ function collectWorkspaceVersions(workspaceRoot: string): WorkspaceVersion[] {
118
+ const out: WorkspaceVersion[] = [];
119
+ // Hard-code the two glob roots used in this monorepo to avoid pulling
120
+ // in a glob library. Both directories are scanned the same way: read
121
+ // each immediate child, look for a package.json that names a @celilo/*
122
+ // package.
123
+ for (const dir of ['packages', 'apps']) {
124
+ const root = join(workspaceRoot, dir);
125
+ if (!existsSync(root)) continue;
126
+ for (const entry of readSubdirs(root)) {
127
+ const pkgJsonPath = join(root, entry, 'package.json');
128
+ if (!existsSync(pkgJsonPath)) continue;
129
+ try {
130
+ const pkg = JSON.parse(readFileSync(pkgJsonPath, 'utf-8')) as {
131
+ name?: string;
132
+ version?: string;
133
+ };
134
+ if (pkg.name?.startsWith('@celilo/') && pkg.version) {
135
+ out.push({ name: pkg.name, version: pkg.version, path: join(root, entry) });
136
+ }
137
+ } catch {
138
+ /* skip malformed */
139
+ }
140
+ }
141
+ }
142
+ return out;
143
+ }
144
+
145
+ function readSubdirs(dir: string): string[] {
146
+ // node:fs readdirSync — keep stdlib, no extra deps.
147
+ // Hidden dirs filtered out.
148
+ try {
149
+ const fs = require('node:fs') as typeof import('node:fs');
150
+ return fs
151
+ .readdirSync(dir, { withFileTypes: true })
152
+ .filter((d) => d.isDirectory() && !d.name.startsWith('.'))
153
+ .map((d) => d.name);
154
+ } catch {
155
+ return [];
156
+ }
157
+ }
158
+
159
+ /**
160
+ * Compare two semver-ish version strings. Returns -1 if a < b, 0 if
161
+ * equal, 1 if a > b. Tolerates non-numeric prerelease tags by comparing
162
+ * them as strings after the numeric segments.
163
+ *
164
+ * Exported for unit testing.
165
+ */
166
+ export function compareVersions(a: string, b: string): number {
167
+ const split = (v: string) => v.replace(/^[v=]+/, '').split(/[.+-]/);
168
+ const aParts = split(a);
169
+ const bParts = split(b);
170
+ const len = Math.max(aParts.length, bParts.length);
171
+ for (let i = 0; i < len; i++) {
172
+ const ap = aParts[i] ?? '0';
173
+ const bp = bParts[i] ?? '0';
174
+ const an = Number(ap);
175
+ const bn = Number(bp);
176
+ if (!Number.isNaN(an) && !Number.isNaN(bn)) {
177
+ if (an !== bn) return an < bn ? -1 : 1;
178
+ } else if (ap !== bp) {
179
+ return ap < bp ? -1 : 1;
180
+ }
181
+ }
182
+ return 0;
183
+ }
184
+
185
+ interface DriftedDep {
186
+ name: string;
187
+ loadedVersion: string;
188
+ workspaceVersion: string;
189
+ workspacePath: string;
190
+ }
191
+
192
+ /**
193
+ * Run a command, capture stdout/stderr, return whether it succeeded.
194
+ * Used for the `bun link` calls that --fix orchestrates.
195
+ */
196
+ function runCommand(
197
+ cmd: string,
198
+ args: string[],
199
+ cwd: string,
200
+ ): { ok: boolean; stdout: string; stderr: string } {
201
+ const r = spawnSync(cmd, args, { cwd, encoding: 'utf-8' });
202
+ return {
203
+ ok: r.status === 0,
204
+ stdout: (r.stdout ?? '').trim(),
205
+ stderr: (r.stderr ?? '').trim(),
206
+ };
207
+ }
208
+
209
+ /**
210
+ * Repair drift by `bun link`-ing each drifted package from the
211
+ * workspace into the running CLI's package directory.
212
+ *
213
+ * Two-step bun link workflow:
214
+ * 1. From each workspace package dir: `bun link` registers it
215
+ * globally under its package name.
216
+ * 2. From the running CLI's package dir: `bun link <pkgname>`
217
+ * replaces the resolved copy with the symlink to the workspace.
218
+ *
219
+ * `bun unlink` reverses both steps if the user wants to revert.
220
+ *
221
+ * Only safe to run when running from a globally-installed CLI; from
222
+ * a workspace TS-source invocation there's nothing to repair.
223
+ */
224
+ function applyFix(drifted: DriftedDep[], cliRoot: string): string[] {
225
+ const lines: string[] = [];
226
+ for (const d of drifted) {
227
+ lines.push(` ${d.name}: linking ${d.workspaceVersion} from ${d.workspacePath}`);
228
+
229
+ const reg = runCommand('bun', ['link'], d.workspacePath);
230
+ if (!reg.ok) {
231
+ lines.push(
232
+ ` ${ANSI.red}✗${ANSI.reset} register failed: ${reg.stderr || reg.stdout || 'no output'}`,
233
+ );
234
+ continue;
235
+ }
236
+
237
+ const link = runCommand('bun', ['link', d.name], cliRoot);
238
+ if (!link.ok) {
239
+ lines.push(
240
+ ` ${ANSI.red}✗${ANSI.reset} link failed: ${link.stderr || link.stdout || 'no output'}`,
241
+ );
242
+ continue;
243
+ }
244
+
245
+ lines.push(` ${ANSI.green}✔${ANSI.reset} linked`);
246
+ }
247
+ return lines;
248
+ }
249
+
250
+ export async function handleDoctor(
251
+ _args: string[],
252
+ flags: Record<string, string | boolean>,
253
+ ): Promise<CommandResult> {
254
+ const lines: string[] = [];
255
+
256
+ const cliVersion = (cliPkg as { version: string }).version;
257
+ const cliName = (cliPkg as { name: string }).name;
258
+ // Where is *this* file loaded from? Anchors the "running from" line.
259
+ const cliRoot = resolve(dirname(new URL(import.meta.url).pathname), '../../..');
260
+ lines.push(`${cliName} ${cliVersion}`);
261
+ lines.push(`${ANSI.dim}running from ${cliRoot}${ANSI.reset}`);
262
+ lines.push('');
263
+
264
+ const workspaceRoot = findWorkspaceRoot(process.cwd());
265
+ const workspaceVersions = workspaceRoot ? collectWorkspaceVersions(workspaceRoot) : [];
266
+ const workspaceMap = new Map(workspaceVersions.map((w) => [w.name, w]));
267
+
268
+ if (workspaceRoot) {
269
+ lines.push(`workspace: ${workspaceRoot}`);
270
+ } else {
271
+ lines.push(`${ANSI.dim}no workspace detected from ${process.cwd()}${ANSI.reset}`);
272
+ }
273
+ lines.push('');
274
+
275
+ const deps = inspectCeliloDeps();
276
+ // Track whether anything is amiss so we can summarize and exit non-zero.
277
+ let driftCount = 0;
278
+ let unresolvedCount = 0;
279
+ const drifted: DriftedDep[] = [];
280
+
281
+ // Compute column widths for a clean table.
282
+ const nameCol = Math.max(...deps.map((d) => d.name.length), 12);
283
+ const declCol = Math.max(...deps.map((d) => d.declaredRange.length), 8);
284
+ const loadedCol = Math.max(...deps.map((d) => (d.loadedVersion ?? '?').length), 8);
285
+
286
+ lines.push(
287
+ ` ${'package'.padEnd(nameCol)} ${'declares'.padEnd(declCol)} ${'loaded'.padEnd(loadedCol)} notes`,
288
+ );
289
+ lines.push(` ${'-'.repeat(nameCol)} ${'-'.repeat(declCol)} ${'-'.repeat(loadedCol)} -----`);
290
+
291
+ for (const dep of deps) {
292
+ const loaded = dep.loadedVersion ?? '?';
293
+ const notes: string[] = [];
294
+ let glyph = `${ANSI.green}✔${ANSI.reset}`;
295
+
296
+ if (dep.resolveError) {
297
+ glyph = `${ANSI.red}✗${ANSI.reset}`;
298
+ notes.push(`unresolved: ${dep.resolveError.split('\n')[0]}`);
299
+ unresolvedCount++;
300
+ } else if (dep.loadedVersion) {
301
+ const ws = workspaceMap.get(dep.name);
302
+ if (ws) {
303
+ const cmp = compareVersions(dep.loadedVersion, ws.version);
304
+ if (cmp < 0) {
305
+ glyph = `${ANSI.yellow}⚠${ANSI.reset}`;
306
+ notes.push(`workspace has ${ws.version} — running CLI is behind`);
307
+ driftCount++;
308
+ drifted.push({
309
+ name: dep.name,
310
+ loadedVersion: dep.loadedVersion,
311
+ workspaceVersion: ws.version,
312
+ workspacePath: ws.path,
313
+ });
314
+ } else if (cmp > 0) {
315
+ notes.push(`workspace has ${ws.version} (older — unpublished bump?)`);
316
+ }
317
+ }
318
+ if (dep.loadedFrom) {
319
+ const shortPath = dep.loadedFrom.replace(process.env.HOME ?? '', '~');
320
+ notes.push(`from ${shortPath}`);
321
+ }
322
+ }
323
+
324
+ lines.push(
325
+ `${glyph} ${dep.name.padEnd(nameCol)} ${dep.declaredRange.padEnd(declCol)} ${loaded.padEnd(loadedCol)} ${ANSI.dim}${notes.join('; ')}${ANSI.reset}`,
326
+ );
327
+ }
328
+
329
+ // Workspace packages that the CLI doesn't depend on — surface them so
330
+ // the operator sees the full set of @celilo/* in play.
331
+ const declaredNames = new Set(deps.map((d) => d.name));
332
+ const extras = workspaceVersions.filter((w) => !declaredNames.has(w.name));
333
+ if (extras.length > 0) {
334
+ lines.push('');
335
+ lines.push(`${ANSI.dim}other workspace packages (not depended on by this CLI):${ANSI.reset}`);
336
+ for (const w of extras) {
337
+ lines.push(` ${w.name} ${w.version} ${ANSI.dim}${w.path}${ANSI.reset}`);
338
+ }
339
+ }
340
+
341
+ lines.push('');
342
+
343
+ const fix = flags.fix === true;
344
+
345
+ if (fix && drifted.length > 0) {
346
+ lines.push(`Repairing ${drifted.length} drifted package(s) with \`bun link\`:`);
347
+ lines.push(...applyFix(drifted, cliRoot));
348
+ lines.push('');
349
+ lines.push(
350
+ `${ANSI.dim}Re-run \`celilo doctor\` to verify; \`bun unlink\` from each workspace dir reverses.${ANSI.reset}`,
351
+ );
352
+ return {
353
+ success: true,
354
+ message: lines.join('\n'),
355
+ rawOutput: true,
356
+ };
357
+ }
358
+
359
+ if (fix && drifted.length === 0) {
360
+ lines.push(`${ANSI.dim}--fix: nothing to repair.${ANSI.reset}`);
361
+ }
362
+
363
+ if (driftCount > 0 || unresolvedCount > 0) {
364
+ const summary: string[] = [];
365
+ if (driftCount > 0) summary.push(`${driftCount} package(s) behind workspace`);
366
+ if (unresolvedCount > 0) summary.push(`${unresolvedCount} unresolved`);
367
+ if (drifted.length > 0) {
368
+ lines.push(
369
+ `${ANSI.dim}Run \`celilo doctor --fix\` to bun-link drifted packages from the workspace.${ANSI.reset}`,
370
+ );
371
+ }
372
+ return {
373
+ success: false,
374
+ error: `Drift detected: ${summary.join(', ')}`,
375
+ details: lines.join('\n'),
376
+ };
377
+ }
378
+
379
+ lines.push(`${ANSI.green}OK${ANSI.reset} — no drift detected`);
380
+ return {
381
+ success: true,
382
+ message: lines.join('\n'),
383
+ rawOutput: true,
384
+ };
385
+ }
@@ -133,7 +133,43 @@ export async function handleModuleGenerate(
133
133
  const moduleSecretsMissing = await validateModuleSecrets(moduleId, db);
134
134
 
135
135
  if (moduleSecretsMissing.length > 0) {
136
- // Always try to interview for missing secrets
136
+ // interviewForMissingSecrets fires `secret.required.*` bus events for
137
+ // user_provided secrets and waits forever for a responder. In a
138
+ // non-interactive context (no TTY, no piped responder), that's a
139
+ // silent hang. Probe the bus first; if nothing answers, fail fast
140
+ // with an actionable error instead of stalling indefinitely.
141
+ //
142
+ // Auto-generated secrets (manifest `generate:` field or schema
143
+ // source: 'generated') don't go through the bus, so missing-but-
144
+ // auto-generatable doesn't need a responder. Filter those out
145
+ // before deciding whether to probe.
146
+ if (!process.stdin.isTTY) {
147
+ const { getSecretMetadata } = await import('../../services/secret-schema-loader');
148
+ const promptable: typeof moduleSecretsMissing = [];
149
+ for (const s of moduleSecretsMissing) {
150
+ if (s.generate) continue; // manifest-declared auto-generate
151
+ const meta = await getSecretMetadata(moduleId, s.name, db);
152
+ if (meta?.source === 'generated') continue; // schema-declared auto-generate
153
+ promptable.push(s);
154
+ }
155
+
156
+ if (promptable.length > 0) {
157
+ const { probeForResponder } = await import('../../services/responder-probe');
158
+ const { getEventBusPath } = await import('../../config/paths');
159
+ const responderAvailable = await probeForResponder(getEventBusPath());
160
+ if (!responderAvailable) {
161
+ const names = promptable.map((s) => s.name).join(', ');
162
+ const setCommands = promptable
163
+ .map((s) => ` celilo module secret set ${moduleId} ${s.name} <value>`)
164
+ .join('\n');
165
+ return {
166
+ success: false,
167
+ error: `Missing required secret(s): ${names}\n\nNo responder is running and stdin isn't a TTY, so module generate can't prompt for them. Either:\n 1. Run interactively (in a terminal)\n 2. Pre-set the secrets:\n${setCommands}\n 3. Run a responder in another shell:\n celilo events respond --values values.json`,
168
+ };
169
+ }
170
+ }
171
+ }
172
+
137
173
  // Auto-generated secrets work in non-interactive mode
138
174
  const result = await interviewForMissingSecrets(moduleId, moduleSecretsMissing, db);
139
175
  if (!result.success) {
@@ -28,20 +28,22 @@ export async function getCompletions(words: string[], current: number): Promise<
28
28
  // currentIndex === 0 means we're completing the first word (the command)
29
29
  if (currentIndex === 0) {
30
30
  const commands = [
31
+ 'audit',
31
32
  'backup',
32
33
  'capability',
34
+ 'completion',
35
+ 'doctor',
36
+ 'events',
33
37
  'help',
34
38
  'hook',
35
- 'package',
39
+ 'ipam',
40
+ 'machine',
36
41
  'module',
42
+ 'package',
37
43
  'service',
44
+ 'status',
38
45
  'storage',
39
- 'machine',
40
46
  'system',
41
- 'ipam',
42
- 'completion',
43
- 'status',
44
- 'audit',
45
47
  'version',
46
48
  ];
47
49
  return filterSuggestions(commands, args[0] || '');
@@ -66,6 +68,28 @@ export async function getCompletions(words: string[], current: number): Promise<
66
68
  return filterSuggestions(capabilityNames, args[2] || '');
67
69
  }
68
70
 
71
+ // Events subcommands — keep this list in sync with command-registry.ts.
72
+ if (command === 'events' && currentIndex === 1) {
73
+ const subcommands = [
74
+ 'status',
75
+ 'tail',
76
+ 'list-subscribers',
77
+ 'list-pending',
78
+ 'drain',
79
+ 'run',
80
+ 'emit',
81
+ 'ack',
82
+ 'fail',
83
+ 'repair',
84
+ 'resume',
85
+ 'respond',
86
+ 'install-daemon',
87
+ 'uninstall-daemon',
88
+ 'show-daemon',
89
+ ];
90
+ return filterSuggestions(subcommands, args[1] || '');
91
+ }
92
+
69
93
  // Hook subcommands
70
94
  if (command === 'hook' && currentIndex === 1) {
71
95
  const subcommands = ['run'];
package/src/cli/index.ts CHANGED
@@ -10,6 +10,7 @@ import { COMMANDS, type CommandDef } from './command-registry';
10
10
  import { handleCapabilityInfo } from './commands/capability-info';
11
11
  import { handleCapabilityList } from './commands/capability-list';
12
12
  import { handleCompletion } from './commands/completion';
13
+ import { handleDoctor } from './commands/doctor';
13
14
  import {
14
15
  handleEventsAck,
15
16
  handleEventsDrain,
@@ -147,7 +148,9 @@ Usage:
147
148
 
148
149
  Commands:
149
150
  status Show system and module status
151
+ doctor Diagnose @celilo/* version drift between the running CLI and the workspace
150
152
  audit Top-level alias for 'system audit'
153
+ events SQLite event-bus operations (status, tail, run dispatcher, etc.)
151
154
  capability View registered module capabilities
152
155
  package Create distributable .netapp packages from module source
153
156
  module Manage modules (import, list, configure, build, generate)
@@ -900,6 +903,11 @@ export async function runCli(argv: string[]): Promise<CommandResult> {
900
903
  return handleStatus();
901
904
  }
902
905
 
906
+ // Handle doctor command
907
+ if (parsed.command === 'doctor') {
908
+ return handleDoctor(parsed.args, parsed.flags);
909
+ }
910
+
903
911
  // Top-level alias: `celilo audit` → `celilo system audit`
904
912
  if (parsed.command === 'audit') {
905
913
  return handleSystemAudit(parsed.args, parsed.flags);
@@ -429,62 +429,100 @@ async function installScriptDependencies(
429
429
  export async function importModule(options: ModuleImportOptions): Promise<ModuleImportResult> {
430
430
  const { sourcePath, targetBasePath = getDefaultTargetBase(), db = getDb(), flags = {} } = options;
431
431
 
432
- let actualSourcePath = sourcePath;
432
+ // Directory imports route through the packager: build a temporary .netapp
433
+ // and re-enter through the package path. This makes the .netapp pipeline
434
+ // the single import path — no second flow that drifts. The packager is
435
+ // also the only place a manifest build runs, with CELILO_MODULE_SOURCE_DIR
436
+ // pointing at the unstaged source so sibling-package paths in a monorepo
437
+ // resolve (e.g. celilo-registry's `cd $CELILO_MODULE_SOURCE_DIR/../../packages/registry-server`).
438
+ // After this point the module is detached from the source tree, so a
439
+ // deploy-time rebuild can't work — bake the artifacts in here, once.
440
+ if (!sourcePath.endsWith('.netapp')) {
441
+ const dirError = validateModuleDirectory(sourcePath);
442
+ if (dirError) return { success: false, error: dirError };
443
+
444
+ const manifestResult = await readModuleManifest(sourcePath);
445
+ if (!manifestResult.success) return { success: false, error: manifestResult.error };
446
+
447
+ if (moduleExists(manifestResult.manifest.id, db)) {
448
+ return {
449
+ success: false,
450
+ error: `Module '${manifestResult.manifest.id}' already exists. Use update or remove it first.`,
451
+ };
452
+ }
453
+
454
+ const { mkdtempSync, rmSync } = await import('node:fs');
455
+ const { tmpdir } = await import('node:os');
456
+ const tempPkgDir = mkdtempSync(join(tmpdir(), 'celilo-import-pkg-'));
457
+ const tempPkgPath = join(tempPkgDir, `${manifestResult.manifest.id}.netapp`);
458
+
459
+ try {
460
+ const { buildModule } = await import('./packaging/build');
461
+ const buildResult = await buildModule({ sourceDir: sourcePath, outputPath: tempPkgPath });
462
+ if (!buildResult.success) {
463
+ return {
464
+ success: false,
465
+ error: buildResult.error || 'Failed to package module for import',
466
+ };
467
+ }
468
+ return await importModule({ ...options, sourcePath: tempPkgPath });
469
+ } finally {
470
+ rmSync(tempPkgDir, { recursive: true, force: true });
471
+ }
472
+ }
473
+
433
474
  let tempDir: string | null = null;
434
475
  let checksums: Record<string, string> | null = null;
435
476
  let signature: string | null = null;
436
477
 
437
478
  try {
438
- // Check if source is a .netapp package
439
- if (sourcePath.endsWith('.netapp')) {
440
- // Extract and verify package
441
- const extractResult = await extractPackage(sourcePath);
442
- if (!extractResult.success || !extractResult.tempDir) {
443
- return { success: false, error: extractResult.error || 'Failed to extract package' };
444
- }
479
+ // sourcePath always ends with .netapp at this point — directory inputs
480
+ // were re-routed through the packager above and recursed back in here.
481
+ const extractResult = await extractPackage(sourcePath);
482
+ if (!extractResult.success || !extractResult.tempDir) {
483
+ return { success: false, error: extractResult.error || 'Failed to extract package' };
484
+ }
445
485
 
446
- tempDir = extractResult.tempDir;
447
-
448
- // Verify package integrity (unless --skip-verify)
449
- const skipVerify = flags['skip-verify'] === true;
450
- if (skipVerify) {
451
- log.warn('Skipping package signature verification (--skip-verify)');
452
- } else {
453
- const verifyResult = await verifyPackageIntegrity(tempDir);
454
- if (!verifyResult.success) {
455
- await cleanupTempDir(tempDir);
456
- return {
457
- success: false,
458
- error: verifyResult.error || 'Package integrity verification failed',
459
- };
460
- }
461
- }
486
+ tempDir = extractResult.tempDir;
462
487
 
463
- // Read checksums and signature for database storage (both optional with --skip-verify)
464
- try {
465
- const checksumsJson = await readFile(join(tempDir, 'checksums.json'), 'utf-8');
466
- const ChecksumsFileSchema = z.object({
467
- files: z.record(z.string(), z.string()),
468
- });
469
- const checksumsData = parseJsonWithValidation(
470
- checksumsJson,
471
- ChecksumsFileSchema,
472
- 'package checksums.json',
473
- );
474
- checksums = checksumsData.files;
475
- } catch (err) {
476
- if (!skipVerify) throw err;
477
- }
478
- try {
479
- signature = await readFile(join(tempDir, 'signature.sig'), 'utf-8');
480
- } catch (err) {
481
- if (!skipVerify) throw err;
488
+ // Verify package integrity (unless --skip-verify)
489
+ const skipVerify = flags['skip-verify'] === true;
490
+ if (skipVerify) {
491
+ log.warn('Skipping package signature verification (--skip-verify)');
492
+ } else {
493
+ const verifyResult = await verifyPackageIntegrity(tempDir);
494
+ if (!verifyResult.success) {
495
+ await cleanupTempDir(tempDir);
496
+ return {
497
+ success: false,
498
+ error: verifyResult.error || 'Package integrity verification failed',
499
+ };
482
500
  }
501
+ }
483
502
 
484
- // Use extracted directory as source
485
- actualSourcePath = tempDir;
503
+ // Read checksums and signature for database storage (both optional with --skip-verify)
504
+ try {
505
+ const checksumsJson = await readFile(join(tempDir, 'checksums.json'), 'utf-8');
506
+ const ChecksumsFileSchema = z.object({
507
+ files: z.record(z.string(), z.string()),
508
+ });
509
+ const checksumsData = parseJsonWithValidation(
510
+ checksumsJson,
511
+ ChecksumsFileSchema,
512
+ 'package checksums.json',
513
+ );
514
+ checksums = checksumsData.files;
515
+ } catch (err) {
516
+ if (!skipVerify) throw err;
517
+ }
518
+ try {
519
+ signature = await readFile(join(tempDir, 'signature.sig'), 'utf-8');
520
+ } catch (err) {
521
+ if (!skipVerify) throw err;
486
522
  }
487
523
 
524
+ const actualSourcePath = tempDir;
525
+
488
526
  // Policy: Validate directory structure
489
527
  const dirError = validateModuleDirectory(actualSourcePath);
490
528
  if (dirError) {
@@ -639,29 +677,14 @@ export async function importModule(options: ModuleImportOptions): Promise<Module
639
677
  );
640
678
  }
641
679
 
642
- // Execution: Store integrity data
643
- // For .netapp packages: store checksums + signature
644
- // For directory imports: calculate and store checksums (no signature)
680
+ // Execution: Store integrity data from the package's checksums + signature.
681
+ // Directory imports go through the packager too (see top of importModule),
682
+ // so by the time we reach this point we always have these.
645
683
  try {
646
- let finalChecksums: Record<string, string>;
647
- let finalSignature: string | null = null;
648
-
649
- if (checksums && signature) {
650
- // From .netapp package - already verified
651
- finalChecksums = checksums;
652
- finalSignature = signature.trim();
653
- } else {
654
- // From directory - calculate checksums now
655
- const { computeChecksums } = await import('./packaging/build');
656
- const checksumsData = await computeChecksums(targetPath);
657
- finalChecksums = checksumsData.files;
658
- // No signature for directory imports
659
- }
660
-
661
684
  const integrityData: NewModuleIntegrity = {
662
685
  moduleId: manifest.id,
663
- checksums: finalChecksums,
664
- signature: finalSignature,
686
+ checksums: checksums ?? {},
687
+ signature: signature?.trim() ?? null,
665
688
  };
666
689
  db.insert(moduleIntegrity).values(integrityData).run();
667
690
  } catch (error) {
@@ -669,21 +692,22 @@ export async function importModule(options: ModuleImportOptions): Promise<Module
669
692
  console.warn('Warning: Failed to store module integrity data', error);
670
693
  }
671
694
 
672
- // For .netapp packages with build artifacts: record a successful build
673
- // so that `module generate` doesn't require a rebuild
674
- if (sourcePath.endsWith('.netapp') && manifest.build?.artifacts) {
675
- const { existsSync: fileExists } = await import('node:fs');
676
- const artifactPaths = manifest.build.artifacts
695
+ // Record a successful build entry so deploy-time validation sees the
696
+ // artifacts in place and skips rebuild. The packager runs the manifest
697
+ // build into the staged tree before tar, so any declared artifacts are
698
+ // already on disk under targetPath at this point.
699
+ if (manifest.build?.artifacts) {
700
+ const recordedArtifactPaths = manifest.build.artifacts
677
701
  .map((a: string) => join(targetPath, a))
678
- .filter((p: string) => fileExists(p));
702
+ .filter((p: string) => existsSync(p));
679
703
 
680
- if (artifactPaths.length > 0) {
704
+ if (recordedArtifactPaths.length > 0) {
681
705
  const { moduleBuilds } = await import('../db/schema');
682
706
  db.insert(moduleBuilds)
683
707
  .values({
684
708
  moduleId: manifest.id,
685
709
  version: manifest.version,
686
- artifacts: artifactPaths,
710
+ artifacts: recordedArtifactPaths,
687
711
  status: 'success',
688
712
  buildLog: 'Pre-built artifacts from .netapp package',
689
713
  })
@@ -4,6 +4,8 @@
4
4
  * Validates module readiness and auto-prepares (generate/build) if needed
5
5
  */
6
6
 
7
+ import { existsSync } from 'node:fs';
8
+ import { join } from 'node:path';
7
9
  import { compareConsumerToProvider } from '@celilo/capabilities';
8
10
  import { and, eq } from 'drizzle-orm';
9
11
  import type { DbClient } from '../db/client';
@@ -123,10 +125,22 @@ export async function validateAndPrepareDeployment(
123
125
  // Auto-build if required and not built
124
126
  if (manifest.build) {
125
127
  const buildStatus = await getModuleBuildStatus(moduleId, db);
128
+ // Cross-check the manifest's declared artifacts against the actual
129
+ // filesystem in addition to whatever's in the build record. A "success"
130
+ // record with an empty artifact list (e.g. from a build that exited 0
131
+ // but produced nothing, or a synthetic record from a partial .netapp
132
+ // import) would otherwise pass `verifyArtifactsExist([])` trivially and
133
+ // let the deploy run on without binaries — exactly how Ansible ends up
134
+ // failing on a missing `src:` file.
135
+ const declaredArtifacts = manifest.build.artifacts ?? [];
136
+ const declaredArtifactsOk = declaredArtifacts.every((rel) =>
137
+ existsSync(join(module.sourcePath, rel)),
138
+ );
126
139
  const needsBuild =
127
140
  !buildStatus ||
128
141
  buildStatus.status !== 'success' ||
129
- !verifyArtifactsExist(buildStatus.artifacts);
142
+ !verifyArtifactsExist(buildStatus.artifacts) ||
143
+ (declaredArtifacts.length > 0 && !declaredArtifactsOk);
130
144
 
131
145
  if (needsBuild) {
132
146
  const buildResult = await buildModuleFromSource(moduleId, db);
@@ -207,6 +207,44 @@ describe('Module Build Service', () => {
207
207
  expect(buildRecord?.status).toBe('success');
208
208
  expect(buildRecord?.environment).toBe('system'); // No flake.nix
209
209
  }, 10000); // 10 second timeout for ansible execution
210
+
211
+ // Regression: a build.command that references $CELILO_MODULE_SOURCE_DIR
212
+ // (the variable celilo-registry's manifest uses to find sibling packages
213
+ // in the monorepo) must work the same at deploy time as at packaging
214
+ // time. Before the fix, executeBuildCommand didn't set the env var, so
215
+ // any module that imported from a local directory and relied on it
216
+ // would silently fail before producing artifacts.
217
+ test('build.command receives CELILO_MODULE_SOURCE_DIR pointing at modulePath', async () => {
218
+ await mkdir(TEST_MODULE_DIR, { recursive: true });
219
+
220
+ db.insert(modules)
221
+ .values({
222
+ id: 'env-test',
223
+ name: 'Env Test',
224
+ version: '1.0.0',
225
+ sourcePath: TEST_MODULE_DIR,
226
+ manifestData: {
227
+ id: 'env-test',
228
+ name: 'Env Test',
229
+ version: '1.0.0',
230
+ build: {
231
+ // Write the env var's value to a file. The build runs with
232
+ // cwd=modulePath, so a bare relative `built.marker` always
233
+ // lands there — but we want to prove the env var itself was
234
+ // set, so we fail explicitly if it's empty.
235
+ command:
236
+ 'test -n "$CELILO_MODULE_SOURCE_DIR" && echo "$CELILO_MODULE_SOURCE_DIR" > built.marker',
237
+ artifacts: ['built.marker'],
238
+ },
239
+ },
240
+ })
241
+ .run();
242
+
243
+ const result = await buildModuleFromSource('env-test', db);
244
+
245
+ expect(result.success).toBe(true);
246
+ expect(existsSync(`${TEST_MODULE_DIR}/built.marker`)).toBe(true);
247
+ }, 10000);
210
248
  });
211
249
 
212
250
  describe('getModuleBuildStatus', () => {
@@ -104,6 +104,11 @@ async function executeBuildCommand(
104
104
  command,
105
105
  args: [],
106
106
  cwd: modulePath,
107
+ // Match packaging/build.ts so deploy-time builds work the same as
108
+ // package-time builds. celilo-registry's manifest.build.command uses
109
+ // $CELILO_MODULE_SOURCE_DIR to find sibling packages in the monorepo;
110
+ // without this, that cd resolves to "/" and the build silently fails.
111
+ env: { CELILO_MODULE_SOURCE_DIR: modulePath },
107
112
  });
108
113
 
109
114
  return { success: result.success, output: result.output, error: result.error };
@@ -143,6 +148,7 @@ async function executeBuildScript(
143
148
  command,
144
149
  args,
145
150
  cwd: modulePath,
151
+ env: { CELILO_MODULE_SOURCE_DIR: modulePath },
146
152
  });
147
153
 
148
154
  return { success: result.success, output: result.output, error: result.error };
@@ -35,6 +35,7 @@ import { executeTerraform, parseTerraformOutputs } from './deploy-terraform';
35
35
  import { validateAndPrepareDeployment } from './deploy-validation';
36
36
  import { resolveInfrastructureVariables } from './infrastructure-variable-resolver';
37
37
  import { findMachineForModule } from './machine-pool';
38
+ import { checkProxmoxReachable, formatProxmoxUnreachableError } from './proxmox-preflight';
38
39
  import { deleteTemporarySshKey, writeTemporarySshKey } from './ssh-key-manager';
39
40
  import { buildTerraformEnvForService } from './terraform-env';
40
41
 
@@ -814,6 +815,20 @@ async function deployModuleImpl(
814
815
  };
815
816
  }
816
817
  terraformEnvVars = await buildTerraformEnvForService(plan.infrastructure.serviceId);
818
+
819
+ // Fail fast if the proxmox API host is unreachable (e.g. VPN
820
+ // down). The terraform provider would otherwise stall on a
821
+ // SYN_SENT connect for ~60s with no visible feedback.
822
+ if (service.providerName === 'proxmox' && terraformEnvVars.TF_VAR_proxmox_api_url) {
823
+ const probe = await checkProxmoxReachable(terraformEnvVars.TF_VAR_proxmox_api_url);
824
+ if (!probe.reachable) {
825
+ return {
826
+ success: false,
827
+ error: formatProxmoxUnreachableError(probe),
828
+ phases,
829
+ };
830
+ }
831
+ }
817
832
  }
818
833
 
819
834
  const terraformResult = await executeTerraform(generatedPath, phases, terraformEnvVars, {
@@ -276,6 +276,20 @@ export function startProgrammaticResponder(
276
276
  answered.push({ type: event.type, key: lookupKey });
277
277
  });
278
278
 
279
+ // Liveness probe: a non-interactive caller (e.g. `module generate`
280
+ // with no TTY) emits `responder.probe` to detect whether any
281
+ // responder is listening before calling busInterview (which waits
282
+ // forever). We reply with our kind so the caller's probe completes
283
+ // and the deploy/generate proceeds.
284
+ const probeWatch = bus.watch('responder.probe', async (event) => {
285
+ if (event.replyFor !== null) return;
286
+ bus.emitRaw(
287
+ `${event.type}.reply`,
288
+ { kind: 'programmatic', emittedBy: me },
289
+ { replyFor: event.id, emittedBy: me },
290
+ );
291
+ });
292
+
279
293
  return {
280
294
  lastActivityAt: () => lastActivityAt,
281
295
  eventCount: () => answered.length + missed.length,
@@ -288,6 +302,7 @@ export function startProgrammaticResponder(
288
302
  configWatch.close();
289
303
  secretWatch.close();
290
304
  ensureWatch.close();
305
+ probeWatch.close();
291
306
  bus.close();
292
307
  },
293
308
  };
@@ -0,0 +1,63 @@
1
+ import { afterEach, describe, expect, test } from 'bun:test';
2
+ import { type AddressInfo, type Server, createServer } from 'node:net';
3
+ import { checkProxmoxReachable, formatProxmoxUnreachableError } from './proxmox-preflight';
4
+
5
+ describe('checkProxmoxReachable', () => {
6
+ let server: Server | null = null;
7
+
8
+ afterEach(async () => {
9
+ if (server) {
10
+ await new Promise<void>((resolve) => server?.close(() => resolve()));
11
+ server = null;
12
+ }
13
+ });
14
+
15
+ test('returns reachable=true when the host accepts the connection', async () => {
16
+ server = createServer();
17
+ await new Promise<void>((resolve) => server?.listen(0, '127.0.0.1', () => resolve()));
18
+ const port = (server.address() as AddressInfo).port;
19
+
20
+ const result = await checkProxmoxReachable(`https://127.0.0.1:${port}/api2/json`, 1000);
21
+ expect(result.reachable).toBe(true);
22
+ expect(result.host).toBe('127.0.0.1');
23
+ expect(result.port).toBe(port);
24
+ expect(result.error).toBeUndefined();
25
+ });
26
+
27
+ test('returns reachable=false on connection refused', async () => {
28
+ // Port 1 is reserved and reliably refuses on loopback.
29
+ const result = await checkProxmoxReachable('https://127.0.0.1:1/api2/json', 1000);
30
+ expect(result.reachable).toBe(false);
31
+ expect(result.host).toBe('127.0.0.1');
32
+ expect(result.port).toBe(1);
33
+ expect(result.error).toBeDefined();
34
+ });
35
+
36
+ test('returns reachable=false with timeout error on stalled connect', async () => {
37
+ // 192.0.2.1 is in TEST-NET-1 (RFC 5737) — guaranteed unroutable.
38
+ // SYN_SENT will hang until our timeout fires.
39
+ const result = await checkProxmoxReachable('https://192.0.2.1:8006/api2/json', 200);
40
+ expect(result.reachable).toBe(false);
41
+ expect(result.error).toMatch(/timed out/i);
42
+ }, 5000);
43
+
44
+ test('returns error for an invalid URL', async () => {
45
+ const result = await checkProxmoxReachable('not-a-url', 100);
46
+ expect(result.reachable).toBe(false);
47
+ expect(result.error).toMatch(/invalid/i);
48
+ });
49
+ });
50
+
51
+ describe('formatProxmoxUnreachableError', () => {
52
+ test('mentions VPN and the host:port in the message', () => {
53
+ const message = formatProxmoxUnreachableError({
54
+ reachable: false,
55
+ host: '10.0.30.102',
56
+ port: 8006,
57
+ error: 'Connection refused',
58
+ });
59
+ expect(message).toContain('10.0.30.102:8006');
60
+ expect(message).toContain('VPN');
61
+ expect(message).toContain('Connection refused');
62
+ });
63
+ });
@@ -0,0 +1,100 @@
1
+ /**
2
+ * Proxmox preflight reachability check.
3
+ *
4
+ * The Proxmox Terraform provider, when given an unreachable api_url,
5
+ * sits in TCP `SYN_SENT` until the kernel-level connect timeout fires
6
+ * — typically 30–75s on macOS. During that window the user sees a
7
+ * frozen progress display and no clue what's wrong (common cause: VPN
8
+ * is down). We probe the api_url's host:port with a short timeout
9
+ * before invoking terraform so we can fail fast with an actionable
10
+ * message instead of letting the provider hang.
11
+ *
12
+ * Scoped intentionally to the proxmox container-service deploy path —
13
+ * other terraform invocations (DigitalOcean, system audit) talk to
14
+ * different endpoints with their own failure modes.
15
+ */
16
+
17
+ import { Socket } from 'node:net';
18
+
19
+ export interface ProxmoxReachabilityResult {
20
+ reachable: boolean;
21
+ host: string;
22
+ port: number;
23
+ /** Populated when `reachable` is false. */
24
+ error?: string;
25
+ }
26
+
27
+ /**
28
+ * Probe `host:port` with a TCP connect attempt. Resolves on connect,
29
+ * timeout, or socket error — never rejects. Caller decides what to do
30
+ * with the result.
31
+ */
32
+ export async function checkProxmoxReachable(
33
+ apiUrl: string,
34
+ timeoutMs = 3000,
35
+ ): Promise<ProxmoxReachabilityResult> {
36
+ let host: string;
37
+ let port: number;
38
+ try {
39
+ const url = new URL(apiUrl);
40
+ host = url.hostname;
41
+ port = url.port ? Number(url.port) : url.protocol === 'https:' ? 443 : 80;
42
+ } catch {
43
+ return {
44
+ reachable: false,
45
+ host: apiUrl,
46
+ port: 0,
47
+ error: `Invalid Proxmox API URL: ${apiUrl}`,
48
+ };
49
+ }
50
+
51
+ return await new Promise<ProxmoxReachabilityResult>((resolve) => {
52
+ const socket = new Socket();
53
+ let settled = false;
54
+
55
+ const finish = (result: ProxmoxReachabilityResult) => {
56
+ if (settled) return;
57
+ settled = true;
58
+ socket.destroy();
59
+ resolve(result);
60
+ };
61
+
62
+ socket.setTimeout(timeoutMs);
63
+ socket.once('connect', () => finish({ reachable: true, host, port }));
64
+ socket.once('timeout', () =>
65
+ finish({
66
+ reachable: false,
67
+ host,
68
+ port,
69
+ error: `Connection to ${host}:${port} timed out after ${timeoutMs}ms`,
70
+ }),
71
+ );
72
+ socket.once('error', (err) =>
73
+ finish({
74
+ reachable: false,
75
+ host,
76
+ port,
77
+ error: err.message,
78
+ }),
79
+ );
80
+
81
+ socket.connect(port, host);
82
+ });
83
+ }
84
+
85
+ /**
86
+ * Format an unreachable result into a multi-line error message
87
+ * suitable for display in the deploy progress panel.
88
+ */
89
+ export function formatProxmoxUnreachableError(result: ProxmoxReachabilityResult): string {
90
+ return [
91
+ `Proxmox unreachable at ${result.host}:${result.port}`,
92
+ '',
93
+ 'Check:',
94
+ ' - VPN connection (if Proxmox is on a private network)',
95
+ ' - Is the Proxmox host running and reachable from this machine?',
96
+ ` - Try: nc -zv ${result.host} ${result.port}`,
97
+ '',
98
+ `(${result.error ?? 'no further detail'})`,
99
+ ].join('\n');
100
+ }
@@ -0,0 +1,45 @@
1
+ /**
2
+ * Responder liveness probe.
3
+ *
4
+ * busInterview waits forever for a reply (timeoutMs: 0 by design). For
5
+ * non-interactive callers (no TTY, no piped responder) that means a
6
+ * silent hang. The probe gives those callers a way to fail fast: emit
7
+ * a short-timeout query on the bus, return whether any responder was
8
+ * listening.
9
+ *
10
+ * Responders register a `responder.probe` watch and reply with their
11
+ * kind. As long as one is running on the same bus DB, the probe sees
12
+ * a reply and returns true.
13
+ */
14
+
15
+ import { defineEvents, openBus } from '@celilo/event-bus';
16
+
17
+ const NO_SCHEMAS = defineEvents({});
18
+
19
+ export const RESPONDER_PROBE_EVENT = 'responder.probe' as const;
20
+
21
+ export interface ResponderProbeReply {
22
+ kind: 'terminal' | 'programmatic' | 'daemon';
23
+ emittedBy: string;
24
+ }
25
+
26
+ /**
27
+ * Probe the bus for a live responder.
28
+ *
29
+ * @param busDbPath sqlite path the bus and any responder share
30
+ * @param timeoutMs how long to wait for a reply (default 1500ms)
31
+ * @returns true if at least one responder replied within the window
32
+ */
33
+ export async function probeForResponder(busDbPath: string, timeoutMs = 1500): Promise<boolean> {
34
+ const bus = openBus({ dbPath: busDbPath, events: NO_SCHEMAS });
35
+ try {
36
+ const replies = await bus.query(RESPONDER_PROBE_EVENT as never, {} as never, {
37
+ timeoutMs,
38
+ pollIntervalMs: 100,
39
+ expect: 'first',
40
+ });
41
+ return replies.length > 0;
42
+ } finally {
43
+ bus.close();
44
+ }
45
+ }
@@ -283,11 +283,24 @@ export function startTerminalResponder(): TerminalResponderHandle {
283
283
  });
284
284
  });
285
285
 
286
+ // Liveness probe: lets a non-interactive caller in another shell
287
+ // (e.g. `module generate`) detect that a terminal-responder is
288
+ // running here and that calling busInterview is safe.
289
+ const probeWatch = bus.watch('responder.probe', async (event) => {
290
+ if (event.replyFor !== null) return;
291
+ bus.emitRaw(
292
+ `${event.type}.reply`,
293
+ { kind: 'terminal', emittedBy: me },
294
+ { replyFor: event.id, emittedBy: me },
295
+ );
296
+ });
297
+
286
298
  return {
287
299
  close() {
288
300
  configWatch.close();
289
301
  secretWatch.close();
290
302
  ensureWatch.close();
303
+ probeWatch.close();
291
304
  bus.close();
292
305
  },
293
306
  };