@inceptionstack/roundhouse 0.5.22 → 0.5.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,328 @@
1
+ /**
2
+ * session-repair.ts — File-level session repair for corrupted pi-ai session files.
3
+ *
4
+ * Pi-ai persists sessions as JSONL at ~/.roundhouse/sessions/<thread>/<id>.jsonl.
5
+ * Each line is a `FileEntry` in a tree (parentId links). Message entries wrap
6
+ * pi-ai `Message` objects (role: user | assistant | toolResult).
7
+ *
8
+ * Corruption scenarios (mid-session):
9
+ * - Tool execution aborted → toolCall entry written, toolResult never lands
10
+ * - Process crash between tool completion and result persist
11
+ * - Manual Ctrl-C mid-tool
12
+ *
13
+ * On next resume, pi-ai loads these entries → sends history to the model →
14
+ * model rejects with "toolUse without toolResult" (Bedrock/Anthropic 400).
15
+ *
16
+ * This module detects and repairs orphaned tool pairs at the file level,
17
+ * preserving the parentId tree by re-parenting children of dropped entries.
18
+ *
19
+ * Delegates tool-pairing logic to message-validator.ts.
20
+ */
21
+
22
+ import { readFileSync, writeFileSync, renameSync, existsSync, copyFileSync } from 'node:fs';
23
+ import { dirname, basename, join } from 'node:path';
24
+ import { validateToolPairing } from './message-validator.js';
25
+ import type { Message, ToolCall, AssistantMessage, ToolResultMessage } from '@earendil-works/pi-ai';
26
+
27
+ /** Minimal structural type for a pi-ai session file entry (we only touch message entries). */
28
+ interface SessionFileEntry {
29
+ type: string;
30
+ id?: string;
31
+ parentId?: string | null;
32
+ message?: Message;
33
+ // other fields preserved as-is
34
+ [key: string]: unknown;
35
+ }
36
+
37
+ export interface SessionRepairReport {
38
+ repaired: boolean;
39
+ droppedEntryIds: string[];
40
+ droppedToolCallIds: string[];
41
+ droppedToolResultIds: string[];
42
+ backupPath?: string;
43
+ totalEntries: number;
44
+ }
45
+
46
+ /** Parse a .jsonl session file. Tolerant of trailing blank lines. Throws on malformed JSON. */
47
+ export function parseSessionFile(path: string): SessionFileEntry[] {
48
+ const raw = readFileSync(path, 'utf8');
49
+ const lines = raw.split('\n');
50
+ const entries: SessionFileEntry[] = [];
51
+ for (let i = 0; i < lines.length; i++) {
52
+ const line = lines[i];
53
+ if (!line.trim()) continue;
54
+ try {
55
+ entries.push(JSON.parse(line) as SessionFileEntry);
56
+ } catch (err) {
57
+ throw new Error(`Session file parse error at line ${i + 1}: ${(err as Error).message}`);
58
+ }
59
+ }
60
+ return entries;
61
+ }
62
+
63
+ /**
64
+ * Extract `Message[]` from file entries in the order they appear.
65
+ * Only includes entries of type "message" (skips session header, model_change, etc).
66
+ */
67
+ function extractMessages(entries: SessionFileEntry[]): { messages: Message[]; entryIndex: number[] } {
68
+ const messages: Message[] = [];
69
+ const entryIndex: number[] = []; // parallel array: messages[i] came from entries[entryIndex[i]]
70
+ for (let i = 0; i < entries.length; i++) {
71
+ const e = entries[i];
72
+ if (e.type === 'message' && e.message) {
73
+ messages.push(e.message);
74
+ entryIndex.push(i);
75
+ }
76
+ }
77
+ return { messages, entryIndex };
78
+ }
79
+
80
+ /**
81
+ * Re-parent children of dropped entries to preserve tree validity.
82
+ * If entry X is dropped and entry Y has parentId=X, set Y.parentId = X.parentId.
83
+ */
84
+ function reparentDroppedEntries(
85
+ entries: SessionFileEntry[],
86
+ droppedEntryIds: Set<string>
87
+ ): SessionFileEntry[] {
88
+ // Build a map: droppedId → nearest non-dropped ancestor (walk up the tree)
89
+ const entryById = new Map<string, SessionFileEntry>();
90
+ for (const e of entries) {
91
+ if (e.id) entryById.set(e.id, e);
92
+ }
93
+
94
+ const remap = new Map<string, string | null>();
95
+ const resolveAncestor = (id: string, visited: Set<string> = new Set()): string | null => {
96
+ if (remap.has(id)) return remap.get(id)!;
97
+ if (!droppedEntryIds.has(id)) return id;
98
+ if (visited.has(id)) {
99
+ // Cycle in parentId chain (self-parent or loop) — bail with null rather than
100
+ // blow the stack. Should never happen in a well-formed session file.
101
+ remap.set(id, null);
102
+ return null;
103
+ }
104
+ visited.add(id);
105
+ const e = entryById.get(id);
106
+ const parent = e?.parentId ?? null;
107
+ const resolved = parent === null ? null : resolveAncestor(parent, visited);
108
+ remap.set(id, resolved);
109
+ return resolved;
110
+ };
111
+
112
+ const kept: SessionFileEntry[] = [];
113
+ for (const e of entries) {
114
+ if (e.id && droppedEntryIds.has(e.id)) continue;
115
+ if (e.parentId && droppedEntryIds.has(e.parentId)) {
116
+ kept.push({ ...e, parentId: resolveAncestor(e.parentId) });
117
+ } else {
118
+ kept.push(e);
119
+ }
120
+ }
121
+ return kept;
122
+ }
123
+
124
+ /**
125
+ * Compute the set of entry IDs to drop based on orphaned tool IDs.
126
+ *
127
+ * - Orphaned toolResult message → drop the whole entry
128
+ * - Orphaned toolCall inside an assistant message → drop the entry only if the
129
+ * toolCall was the *only* content block (otherwise keep the entry with the
130
+ * block stripped; handled separately in applyEntryEdits)
131
+ */
132
+ function findEntriesToDrop(
133
+ entries: SessionFileEntry[],
134
+ orphanedToolCallIds: Set<string>,
135
+ orphanedToolResultIds: Set<string>
136
+ ): { entriesToDrop: Set<string>; entriesToEdit: Map<string, string[]> } {
137
+ const entriesToDrop = new Set<string>();
138
+ const entriesToEdit = new Map<string, string[]>(); // entryId → toolCallIds to strip
139
+
140
+ for (const e of entries) {
141
+ if (e.type !== 'message' || !e.message || !e.id) continue;
142
+ const msg = e.message;
143
+
144
+ if (msg.role === 'toolResult') {
145
+ const tr = msg as ToolResultMessage;
146
+ if (orphanedToolResultIds.has(tr.toolCallId)) {
147
+ entriesToDrop.add(e.id);
148
+ }
149
+ continue;
150
+ }
151
+
152
+ if (msg.role === 'assistant') {
153
+ const am = msg as AssistantMessage;
154
+ const orphanCallIds: string[] = [];
155
+ let hasNonOrphanContent = false;
156
+ for (const block of am.content) {
157
+ if ((block as ToolCall).type === 'toolCall') {
158
+ const callId = (block as ToolCall).id;
159
+ if (orphanedToolCallIds.has(callId)) {
160
+ orphanCallIds.push(callId);
161
+ } else {
162
+ hasNonOrphanContent = true;
163
+ }
164
+ } else {
165
+ hasNonOrphanContent = true;
166
+ }
167
+ }
168
+ if (orphanCallIds.length === 0) continue;
169
+ if (hasNonOrphanContent) {
170
+ entriesToEdit.set(e.id, orphanCallIds);
171
+ } else {
172
+ entriesToDrop.add(e.id);
173
+ }
174
+ }
175
+ }
176
+
177
+ return { entriesToDrop, entriesToEdit };
178
+ }
179
+
180
+ /** Apply in-place edits to assistant entries: strip orphaned toolCall blocks. */
181
+ function applyEntryEdits(
182
+ entries: SessionFileEntry[],
183
+ entriesToEdit: Map<string, string[]>
184
+ ): SessionFileEntry[] {
185
+ if (entriesToEdit.size === 0) return entries;
186
+ return entries.map(e => {
187
+ if (!e.id || !entriesToEdit.has(e.id)) return e;
188
+ const orphanIds = new Set(entriesToEdit.get(e.id)!);
189
+ const msg = e.message as AssistantMessage;
190
+ const cleanedContent = msg.content.filter(block => {
191
+ if ((block as ToolCall).type === 'toolCall') {
192
+ return !orphanIds.has((block as ToolCall).id);
193
+ }
194
+ return true;
195
+ });
196
+ return { ...e, message: { ...msg, content: cleanedContent } };
197
+ });
198
+ }
199
+
200
+ /** Atomic write: tmp file + rename. Preserves partial-failure safety. */
201
+ function atomicWrite(path: string, content: string): void {
202
+ const tmp = `${path}.tmp-${process.pid}-${Date.now()}`;
203
+ writeFileSync(tmp, content, { encoding: 'utf8' });
204
+ renameSync(tmp, path);
205
+ }
206
+
207
+ /** Back up the original file before mutation. Returns the backup path. */
208
+ function backupFile(path: string): string {
209
+ const ts = Date.now();
210
+ const backupPath = join(dirname(path), `${basename(path)}.bak-${ts}`);
211
+ copyFileSync(path, backupPath);
212
+ return backupPath;
213
+ }
214
+
215
+ /**
216
+ * Validate a session file for orphaned tool pairs without modifying it.
217
+ * Useful for pre-flight checks and tests.
218
+ */
219
+ export function inspectSessionFile(path: string): {
220
+ hasOrphans: boolean;
221
+ orphanedToolCallIds: string[];
222
+ orphanedToolResultIds: string[];
223
+ totalEntries: number;
224
+ totalMessages: number;
225
+ } {
226
+ const entries = parseSessionFile(path);
227
+ const { messages } = extractMessages(entries);
228
+ const validation = validateToolPairing(messages);
229
+ return {
230
+ hasOrphans: !validation.isValid,
231
+ orphanedToolCallIds: validation.orphanedToolCallIds,
232
+ orphanedToolResultIds: validation.orphanedToolResultIds,
233
+ totalEntries: entries.length,
234
+ totalMessages: messages.length,
235
+ };
236
+ }
237
+
238
+ /**
239
+ * Repair a corrupted session file in place. Creates a .bak-<ts> backup first.
240
+ *
241
+ * Safety:
242
+ * - Backup always written before mutation
243
+ * - Atomic tmp+rename for the repaired file
244
+ * - No-op if no orphans detected (returns repaired: false)
245
+ * - Preserves session tree by re-parenting children of dropped entries
246
+ *
247
+ * @returns report describing what was repaired
248
+ */
249
+ export function repairSessionFile(path: string): SessionRepairReport {
250
+ if (!existsSync(path)) {
251
+ throw new Error(`Session file not found: ${path}`);
252
+ }
253
+
254
+ const entries = parseSessionFile(path);
255
+ const { messages } = extractMessages(entries);
256
+ const validation = validateToolPairing(messages);
257
+
258
+ if (validation.isValid) {
259
+ return {
260
+ repaired: false,
261
+ droppedEntryIds: [],
262
+ droppedToolCallIds: [],
263
+ droppedToolResultIds: [],
264
+ totalEntries: entries.length,
265
+ };
266
+ }
267
+
268
+ const orphanedCalls = new Set(validation.orphanedToolCallIds);
269
+ const orphanedResults = new Set(validation.orphanedToolResultIds);
270
+
271
+ const { entriesToDrop, entriesToEdit } = findEntriesToDrop(entries, orphanedCalls, orphanedResults);
272
+ const edited = applyEntryEdits(entries, entriesToEdit);
273
+ const kept = reparentDroppedEntries(edited, entriesToDrop);
274
+
275
+ const backupPath = backupFile(path);
276
+ const newContent = kept.map(e => JSON.stringify(e)).join('\n') + '\n';
277
+ atomicWrite(path, newContent);
278
+
279
+ return {
280
+ repaired: true,
281
+ droppedEntryIds: Array.from(entriesToDrop),
282
+ droppedToolCallIds: validation.orphanedToolCallIds,
283
+ droppedToolResultIds: validation.orphanedToolResultIds,
284
+ backupPath,
285
+ totalEntries: entries.length,
286
+ };
287
+ }
288
+
289
+ /**
290
+ * Detect whether an error from pi-ai / the model provider indicates a
291
+ * tool-pairing mismatch that can be recovered by session repair.
292
+ *
293
+ * Matches Bedrock Converse and Anthropic error shapes. Intentionally narrow —
294
+ * we don't want to repair on unrelated 400s.
295
+ */
296
+ export function isToolPairingError(err: unknown): boolean {
297
+ if (!err) return false;
298
+ const msg = (err as { message?: string }).message ?? String(err);
299
+ const name = (err as { name?: string }).name ?? '';
300
+
301
+ // Bedrock Converse: "messages.N: `tool_use` ids were found without `tool_result` blocks..."
302
+ // Anthropic direct: similar phrasing
303
+ const patterns = [
304
+ /tool_use.*without.*tool_result/i,
305
+ /tool_result.*without.*tool_use/i,
306
+ /toolUse.*without.*toolResult/i,
307
+ /unmatched.*tool.?use/i,
308
+ /orphan.*tool/i,
309
+ ];
310
+
311
+ if (patterns.some(p => p.test(msg))) return true;
312
+
313
+ // Bedrock ValidationException may carry the pairing text in nested fields
314
+ // (e.g. err.cause.message, $metadata). Only stringify-search when the error
315
+ // *looks* like a Bedrock validation error — avoid noisy matches on unrelated
316
+ // messages that happen to contain '400'.
317
+ const httpStatus =
318
+ (err as { $metadata?: { httpStatusCode?: number } }).$metadata?.httpStatusCode;
319
+ if (name === 'ValidationException' || httpStatus === 400) {
320
+ try {
321
+ const full = JSON.stringify(err);
322
+ if (patterns.some(p => p.test(full))) return true;
323
+ } catch {
324
+ /* circular structure — give up */
325
+ }
326
+ }
327
+ return false;
328
+ }
@@ -12,8 +12,21 @@ import { validateSchedule } from "../cron/schedule";
12
12
  import { validateTemplate } from "../cron/template";
13
13
  import { parseDuration } from "../cron/durations";
14
14
  import type { CronJobConfig, CronSchedule } from "../cron/types";
15
- import { DEFAULT_TIMEOUT_MS, DEFAULT_TIMEZONE, VALID_NOTIFY_ON, DEFAULT_RUNS_LIMIT } from "../cron/constants";
15
+ import { DEFAULT_TIMEZONE, VALID_NOTIFY_ON, DEFAULT_RUNS_LIMIT } from "../cron/constants";
16
16
  import { formatSchedule, formatRunCounts, formatJobSummary, formatJobDetail, formatRunLine, runStatusIcon, jobEnabledIcon } from "../cron/format";
17
+ import { sendIpc } from "../ipc/client";
18
+
19
+ /**
20
+ * Send a notification about a cron management action via IPC to the gateway.
21
+ * Non-fatal — if gateway is not running, silently skip.
22
+ */
23
+ async function notifyCronAction(message: string): Promise<void> {
24
+ try {
25
+ await sendIpc({ type: "notify", text: message });
26
+ } catch {
27
+ // Gateway not running — CLI is standalone, skip notification
28
+ }
29
+ }
17
30
 
18
31
  function rejectBuiltin(id: string): void {
19
32
  if (isBuiltinJob(id)) {
@@ -22,6 +35,22 @@ function rejectBuiltin(id: string): void {
22
35
  }
23
36
  }
24
37
 
38
+ /** Validate ID, reject builtins, load job or exit with error. */
39
+ async function requireJob(store: CronStore, positional: string[], usage: string): Promise<{ id: string; job: CronJobConfig }> {
40
+ const { id, job } = await loadJob(store, positional, usage);
41
+ rejectBuiltin(id);
42
+ return { id, job };
43
+ }
44
+
45
+ /** Validate ID and load job or exit with error (no builtin check — for read-only commands). */
46
+ async function loadJob(store: CronStore, positional: string[], usage: string): Promise<{ id: string; job: CronJobConfig }> {
47
+ const id = positional[1];
48
+ if (!id) { console.error(`Usage: ${usage}`); process.exit(1); }
49
+ const job = await store.getJob(id);
50
+ if (!job) { console.error(`Job not found: ${id}`); process.exit(1); }
51
+ return { id, job };
52
+ }
53
+
25
54
  function validateNotifyOn(value?: string): "always" | "success" | "failure" {
26
55
  const v = value ?? "always";
27
56
  if (!(VALID_NOTIFY_ON as readonly string[]).includes(v)) {
@@ -100,8 +129,12 @@ export async function cronAdd(store: CronStore, positional: string[], flags: Rec
100
129
  };
101
130
 
102
131
  await store.writeJob(job);
103
- console.log(`✅ Cron job "${id}" ${existing ? "updated" : "created"}.`);
132
+ const verb = existing ? "updated" : "created";
133
+ console.log(`✅ Cron job "${id}" ${verb}.`);
104
134
  if (flags.json) console.log(JSON.stringify(job, null, 2));
135
+
136
+ const schedDesc = flags.cron ? `cron: ${flags.cron}` : flags.every ? `every ${flags.every}` : flags.at ? `once at ${flags.at}` : "";
137
+ await notifyCronAction(`📋 Cron job **${verb}**: \`${id}\`\n${schedDesc}${flags.description ? " — " + flags.description : ""}`);
105
138
  }
106
139
 
107
140
  export async function cronList(store: CronStore, _positional: string[], flags: Record<string, string>): Promise<void> {
@@ -135,10 +168,7 @@ export async function cronList(store: CronStore, _positional: string[], flags: R
135
168
  }
136
169
 
137
170
  export async function cronShow(store: CronStore, positional: string[], flags: Record<string, string>): Promise<void> {
138
- const id = positional[1];
139
- if (!id) { console.error("Usage: roundhouse cron show <id>"); process.exit(1); }
140
- const job = await store.getJob(id);
141
- if (!job) { console.error(`Job not found: ${id}`); process.exit(1); }
171
+ const { id, job } = await loadJob(store, positional, "roundhouse cron show <id>");
142
172
  const state = await store.getState(id);
143
173
  const runs = await store.listRuns(id, 5);
144
174
  if (flags.json) {
@@ -149,11 +179,7 @@ export async function cronShow(store: CronStore, positional: string[], flags: Re
149
179
  }
150
180
 
151
181
  export async function cronTrigger(store: CronStore, positional: string[], _flags: Record<string, string>): Promise<void> {
152
- const id = positional[1];
153
- if (!id) { console.error("Usage: roundhouse cron trigger <id>"); process.exit(1); }
154
- rejectBuiltin(id);
155
- const job = await store.getJob(id);
156
- if (!job) { console.error(`Job not found: ${id}`); process.exit(1); }
182
+ const { id, job } = await requireJob(store, positional, "roundhouse cron trigger <id>");
157
183
  console.log(`Triggering ${id}...`);
158
184
  const runner = new CronRunner(store);
159
185
  const record = await runner.runJob(job, new Date(), "manual");
@@ -164,8 +190,7 @@ export async function cronTrigger(store: CronStore, positional: string[], _flags
164
190
  }
165
191
 
166
192
  export async function cronRuns(store: CronStore, positional: string[], flags: Record<string, string>): Promise<void> {
167
- const id = positional[1];
168
- if (!id) { console.error("Usage: roundhouse cron runs <id>"); process.exit(1); }
193
+ const { id } = await loadJob(store, positional, "roundhouse cron runs <id>");
169
194
  const runs = await store.listRuns(id, parseInt(flags.limit ?? String(DEFAULT_RUNS_LIMIT), 10));
170
195
  if (runs.length === 0) {
171
196
  console.log(`No runs for ${id}.`);
@@ -179,35 +204,27 @@ export async function cronRuns(store: CronStore, positional: string[], flags: Re
179
204
  }
180
205
 
181
206
  export async function cronPause(store: CronStore, positional: string[], _flags: Record<string, string>): Promise<void> {
182
- const id = positional[1];
183
- if (!id) { console.error("Usage: roundhouse cron pause <id>"); process.exit(1); }
184
- rejectBuiltin(id);
185
- const job = await store.getJob(id);
186
- if (!job) { console.error(`Job not found: ${id}`); process.exit(1); }
187
- job.enabled = false;
188
- job.updatedAt = new Date().toISOString();
189
- await store.writeJob(job);
190
- console.log(`⏸️ Job "${id}" paused.`);
207
+ await cronToggleEnabled(store, positional, false);
191
208
  }
192
209
 
193
210
  export async function cronResume(store: CronStore, positional: string[], _flags: Record<string, string>): Promise<void> {
194
- const id = positional[1];
195
- if (!id) { console.error("Usage: roundhouse cron resume <id>"); process.exit(1); }
196
- rejectBuiltin(id);
197
- const job = await store.getJob(id);
198
- if (!job) { console.error(`Job not found: ${id}`); process.exit(1); }
199
- job.enabled = true;
211
+ await cronToggleEnabled(store, positional, true);
212
+ }
213
+
214
+ async function cronToggleEnabled(store: CronStore, positional: string[], enabled: boolean): Promise<void> {
215
+ const cmd = enabled ? "resume" : "pause";
216
+ const { id, job } = await requireJob(store, positional, `roundhouse cron ${cmd} <id>`);
217
+ job.enabled = enabled;
200
218
  job.updatedAt = new Date().toISOString();
201
219
  await store.writeJob(job);
202
- console.log(`▶️ Job "${id}" resumed.`);
220
+ const emoji = enabled ? "▶️" : "⏸️";
221
+ const verb = enabled ? "resumed" : "paused";
222
+ console.log(`${emoji} Job "${id}" ${verb}.`);
223
+ await notifyCronAction(`${emoji} Cron job **${verb}**: \`${id}\``);
203
224
  }
204
225
 
205
226
  export async function cronEdit(store: CronStore, positional: string[], flags: Record<string, string>): Promise<void> {
206
- const id = positional[1];
207
- if (!id) { console.error("Usage: roundhouse cron edit <id> [--prompt '...'] [--cron '...'] ..."); process.exit(1); }
208
- rejectBuiltin(id);
209
- const job = await store.getJob(id);
210
- if (!job) { console.error(`Job not found: ${id}`); process.exit(1); }
227
+ const { id, job } = await requireJob(store, positional, "roundhouse cron edit <id> [--prompt '...'] [--cron '...'] ...");
211
228
 
212
229
  if (flags.prompt) job.prompt = flags.prompt;
213
230
  if (flags.description) job.description = flags.description;
@@ -232,16 +249,14 @@ export async function cronEdit(store: CronStore, positional: string[], flags: Re
232
249
  job.updatedAt = new Date().toISOString();
233
250
  await store.writeJob(job);
234
251
  console.log(`✅ Job "${id}" updated.`);
252
+ await notifyCronAction(`✏️ Cron job **edited**: \`${id}\``);
235
253
  }
236
254
 
237
255
  export async function cronDelete(store: CronStore, positional: string[], _flags: Record<string, string>): Promise<void> {
238
- const id = positional[1];
239
- if (!id) { console.error("Usage: roundhouse cron delete <id>"); process.exit(1); }
240
- rejectBuiltin(id);
241
- const job = await store.getJob(id);
242
- if (!job) { console.error(`Job not found: ${id}`); process.exit(1); }
256
+ const { id } = await requireJob(store, positional, "roundhouse cron delete <id>");
243
257
  await store.deleteJob(id);
244
258
  console.log(`🗑️ Job "${id}" deleted.`);
259
+ await notifyCronAction(`🗑️ Cron job **deleted**: \`${id}\``);
245
260
  }
246
261
 
247
262
  export function cronHelp(): void {
@@ -0,0 +1,158 @@
1
+ /**
2
+ * gateway/command-registry.ts — Descriptor-based command registration
3
+ *
4
+ * Problem: previously, adding a new Telegram command required editing
5
+ * `gateway.ts` in 2–3 places (import, text-dispatch branch, onAction
6
+ * subscription). That's an OCP violation — gateway wasn't closed for
7
+ * modification. At 2 commands it was fine. At 5–8 it becomes noise.
8
+ *
9
+ * Solution: each command module exports a `CommandDescriptor` bundling
10
+ * its trigger tokens, an `invoke()` closure, optional inline-keyboard
11
+ * action handlers, and a `stage` hint. The gateway iterates a single
12
+ * `COMMANDS` array to wire everything. Adding a new command = one new
13
+ * descriptor + one line in the array.
14
+ *
15
+ * Pattern: Command (GoF) + Observer registration. Not a Mediator — the
16
+ * gateway is still the composition root, it just stops special-casing.
17
+ *
18
+ * Stages:
19
+ * - "pre-turn" fires before the allowlist/pairing gate inside
20
+ * handleOrAbort(). Used for abort-style commands like
21
+ * /stop that must interrupt an in-flight agent run.
22
+ * Handlers own their own allowlist check if needed.
23
+ * - "in-turn" fires inside the main message handler, after pairing,
24
+ * allowlist, and the "is this even text" guards. The
25
+ * default — most commands belong here.
26
+ *
27
+ * Action handlers (inline-keyboard callbacks) are registered unconditionally
28
+ * at startup via chat.onAction(). The gateway doesn't care which descriptor
29
+ * owns which action id.
30
+ */
31
+
32
+ import type { ChatThreadLike } from "./inline-keyboard";
33
+
34
+ /** Dispatch stages — see module doc. */
35
+ export type CommandStage = "pre-turn" | "in-turn";
36
+
37
+ /**
38
+ * What the gateway passes to a descriptor's `invoke()`. Thin by design —
39
+ * the command closure captures everything else from its own module or
40
+ * from the gateway's `buildCommandContext()`.
41
+ */
42
+ export interface CommandInvocation {
43
+ /** The chat thread (subscribed). */
44
+ thread: any;
45
+ /** The raw incoming message object from the Chat SDK. */
46
+ message: any;
47
+ /** The already-trimmed text of the message. */
48
+ text: string;
49
+ /** The resolved agent thread id (post topic-override). */
50
+ agentThreadId: string;
51
+ }
52
+
53
+ /**
54
+ * Inline-keyboard callback event shape. Matches what
55
+ * `chat.onAction(actionId, handler)` already provides today.
56
+ */
57
+ export interface ActionInvocation {
58
+ value?: string;
59
+ thread: ChatThreadLike;
60
+ }
61
+
62
+ /**
63
+ * A single command's self-describing registration metadata.
64
+ *
65
+ * Design notes:
66
+ * - `triggers` is a list so we can declare aliases like `/crons` + `/jobs`
67
+ * without duplicating descriptors.
68
+ * - `acceptsArgs` controls whether we match `/cmd foo` as well as bare
69
+ * `/cmd` (maps to the existing `isCommandWithArgs` helper).
70
+ * - `invoke` is the closure that does the actual work. It returns `void`
71
+ * but may be `async`. The gateway awaits it and then short-circuits
72
+ * further dispatch — so descriptors are "run first match wins".
73
+ * - `actions` wires `chat.onAction(id, …)` at startup. Keys are the
74
+ * ACTION_ID constants, values are handler closures. Co-locates button
75
+ * protocol with the command that owns it (SRP).
76
+ */
77
+ export interface CommandDescriptor {
78
+ /** Command strings including the leading slash, e.g. `"/topic"`. */
79
+ triggers: readonly string[];
80
+ /** Default `"in-turn"`. */
81
+ stage?: CommandStage;
82
+ /** If true, `/cmd arg1 arg2` also matches. Default false. */
83
+ acceptsArgs?: boolean;
84
+ /** Do the work. Return (or resolve) when done — gateway will skip further dispatch. */
85
+ invoke: (inv: CommandInvocation) => Promise<void> | void;
86
+ /** Optional inline-keyboard callback handlers keyed by action id. */
87
+ actions?: Record<string, (inv: ActionInvocation) => Promise<void> | void>;
88
+ }
89
+
90
+ /**
91
+ * Helper: has this descriptor opted into pre-turn dispatch?
92
+ * Extracted as a tiny predicate so gateway.ts reads naturally:
93
+ * `if (isPreTurn(cmd)) { … }`
94
+ */
95
+ export function isPreTurn(cmd: CommandDescriptor): boolean {
96
+ return cmd.stage === "pre-turn";
97
+ }
98
+
99
+ /**
100
+ * Does `text` invoke any of this descriptor's triggers?
101
+ *
102
+ * Matching is delegated to the caller via `matchers` so this module doesn't
103
+ * depend on the specific `isCommand` / `isCommandWithArgs` implementations
104
+ * (keeps it pure & unit-testable). `acceptsArgs` controls whether the
105
+ * args-matcher is also consulted.
106
+ */
107
+ export interface CommandMatchers {
108
+ /** Exact-match: `/cmd` or `/cmd@botname`, no trailing args. */
109
+ isCommand: (text: string, cmd: string) => boolean;
110
+ /** Args-match: `/cmd arg1` or `/cmd@botname arg1 arg2`. */
111
+ isCommandWithArgs: (text: string, cmd: string) => boolean;
112
+ }
113
+
114
+ export function matchesDescriptor(
115
+ desc: CommandDescriptor,
116
+ text: string,
117
+ matchers: CommandMatchers,
118
+ ): boolean {
119
+ for (const trigger of desc.triggers) {
120
+ if (matchers.isCommand(text, trigger)) return true;
121
+ if (desc.acceptsArgs && matchers.isCommandWithArgs(text, trigger)) return true;
122
+ }
123
+ return false;
124
+ }
125
+
126
+ /**
127
+ * Validate that no two descriptors claim the same action id.
128
+ *
129
+ * Why: duplicate registrations at `chat.onAction(actionId, …)` produce
130
+ * silent misbehavior — last-wins on some adapters, double-fire on others.
131
+ * Failing fast at startup makes the coupling surface explicit.
132
+ *
133
+ * Throws on the first collision with both owners' trigger lists for easy
134
+ * diagnosis. Returns the set of (actionId, handler) pairs in registration
135
+ * order so the caller can iterate without re-walking descriptors.
136
+ */
137
+ export function collectAndValidateActions(
138
+ descriptors: readonly CommandDescriptor[],
139
+ ): Array<{ actionId: string; handler: NonNullable<CommandDescriptor["actions"]>[string]; ownerTriggers: readonly string[] }> {
140
+ const result: Array<{ actionId: string; handler: any; ownerTriggers: readonly string[] }> = [];
141
+ const ownerByAction = new Map<string, readonly string[]>();
142
+
143
+ for (const desc of descriptors) {
144
+ if (!desc.actions) continue;
145
+ for (const [actionId, handler] of Object.entries(desc.actions)) {
146
+ const prior = ownerByAction.get(actionId);
147
+ if (prior) {
148
+ throw new Error(
149
+ `[command-registry] duplicate action id '${actionId}': claimed by ` +
150
+ `[${prior.join(",")}] and [${desc.triggers.join(",")}]. Action IDs must be unique.`,
151
+ );
152
+ }
153
+ ownerByAction.set(actionId, desc.triggers);
154
+ result.push({ actionId, handler, ownerTriggers: desc.triggers });
155
+ }
156
+ }
157
+ return result;
158
+ }