bloby-bot 0.32.1 → 0.33.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bloby-bot",
3
- "version": "0.32.1",
3
+ "version": "0.33.0",
4
4
  "releaseNotes": [
5
5
  "1. # voice note (PTT bubble)",
6
6
  "2. # audio file + caption",
@@ -1,73 +1,591 @@
1
1
  /**
2
- * Codex (OpenAI app-server) harness — STUB for Phase 1.
2
+ * Codex (OpenAI app-server) harness — Phase 2.
3
3
  *
4
- * Phase 1 introduces the harness split with zero behavior change for Claude
5
- * users. Codex routing is wired up but every method emits a clear "not
6
- * implemented yet" error so we can spot any accidental route during testing.
4
+ * One long-lived `codex app-server` subprocess per conversation. We talk to
5
+ * it via JSON-RPC 2.0 over stdio bidirectional: client server requests,
6
+ * server client notifications. Tokens come from `~/.codex/auth.json`
7
+ * (codex reads them itself; we don't pass anything).
7
8
  *
8
- * Phase 2 will spawn `codex app-server` over stdio JSON-RPC, manage threads,
9
- * and translate item/turn notifications into the shared `bot:*` event vocab.
9
+ * Lifecycle per live conversation:
10
+ * spawn initialize initialized thread/start turn/start (per
11
+ * user message; turn/steer to inject mid-turn) → turn/completed → idle
12
+ * → endConversation → turn/interrupt (if needed) → kill subprocess
13
+ *
14
+ * Lifecycle per one-shot query: same as above, but the subprocess is killed
15
+ * as soon as `turn/completed` arrives.
16
+ *
17
+ * Model strings accept either bare ids (`gpt-5.5`) or `<id>:<effort>` where
18
+ * effort is one of low|medium|high|xhigh — the suffix is split off and
19
+ * passed as `effort` on `turn/start`.
20
+ *
21
+ * Notes on parity with Claude harness:
22
+ * - System prompt → `baseInstructions` on `thread/start`
23
+ * - Sub-agents → not implemented (Codex has Skills, different model)
24
+ * - MCP servers → not wired yet (Codex has its own MCP layer)
25
+ * - Mid-turn input uses `turn/steer` (better than Claude's queue)
10
26
  */
11
27
 
28
+ import { spawn, type ChildProcessWithoutNullStreams } from 'child_process';
29
+ import readline from 'readline';
30
+ import fs from 'fs';
31
+ import path from 'path';
12
32
  import { log } from '../../shared/logger.js';
13
- import type { OnAgentMessage, RecentMessage, AgentAttachment } from './types.js';
33
+ import { WORKSPACE_DIR } from '../../shared/paths.js';
14
34
  import type { SavedFile } from '../file-saver.js';
35
+ import { getCodexAccessToken } from '../../worker/codex-auth.js';
36
+ import { assembleSystemPrompt } from '../../worker/prompts/prompt-assembler.js';
37
+ import type { OnAgentMessage, RecentMessage, AgentAttachment } from './types.js';
38
+ export type { RecentMessage, AgentAttachment };
39
+
40
+ /* ── Constants ─────────────────────────────────────────────────────────── */
41
+
42
+ const CLIENT_INFO = { name: 'bloby', title: 'Bloby', version: '1' };
43
+ const REQUEST_TIMEOUT_MS = 60_000;
44
+ const VALID_EFFORTS = new Set(['low', 'medium', 'high', 'xhigh']);
45
+
46
+ /* ── Prompt-assembly helpers (duplicated from claude.ts to keep that file
47
+ * untouched per the project rule) ───────────────────────────────────── */
48
+
49
+ function readMemoryFile(filename: string): string {
50
+ try {
51
+ const content = fs.readFileSync(path.join(WORKSPACE_DIR, filename), 'utf-8').trim();
52
+ return content || '(empty)';
53
+ } catch {
54
+ return '(empty)';
55
+ }
56
+ }
57
+
58
+ function readMemoryFiles() {
59
+ return {
60
+ myself: readMemoryFile('MYSELF.md'),
61
+ myhuman: readMemoryFile('MYHUMAN.md'),
62
+ memory: readMemoryFile('MEMORY.md'),
63
+ pulse: readMemoryFile('PULSE.json'),
64
+ crons: readMemoryFile('CRONS.json'),
65
+ };
66
+ }
67
+
68
+ function formatConversationHistory(messages: RecentMessage[]): string {
69
+ if (!messages.length) return '';
70
+ return messages.map((m) => `${m.role}: ${m.content}`).join('\n\n');
71
+ }
72
+
73
+ async function assembleBaseInstructions(
74
+ names?: { botName: string; humanName: string },
75
+ recentMessages?: RecentMessage[],
76
+ ): Promise<string> {
77
+ const memoryFiles = readMemoryFiles();
78
+ const basePrompt = await assembleSystemPrompt(names?.botName, names?.humanName);
79
+ let prompt = basePrompt;
80
+ prompt += `\n\n---\n# Your Memory Files\n\n## MYSELF.md\n${memoryFiles.myself}\n\n## MYHUMAN.md\n${memoryFiles.myhuman}\n\n## MEMORY.md\n${memoryFiles.memory}\n\n---\n# Your Config Files\n\n## PULSE.json\n${memoryFiles.pulse}\n\n## CRONS.json\n${memoryFiles.crons}`;
81
+
82
+ try {
83
+ const { loadConfig: loadCfg } = await import('../../shared/config.js');
84
+ const cfg = loadCfg();
85
+ const channels = (cfg as any).channels;
86
+ if (channels) {
87
+ prompt += `\n\n---\n# Channel Config\n\`\`\`json\n${JSON.stringify(channels, null, 2)}\n\`\`\``;
88
+ }
89
+ } catch {}
90
+
91
+ if (recentMessages?.length) {
92
+ prompt += `\n\n---\n# Recent Conversation\n${formatConversationHistory(recentMessages)}`;
93
+ }
94
+
95
+ return prompt;
96
+ }
97
+
98
+ /** Split `gpt-5.5:high` into `{ id: 'gpt-5.5', effort: 'high' }`. */
99
+ function parseModelString(model: string): { id: string; effort?: string } {
100
+ const idx = model.lastIndexOf(':');
101
+ if (idx <= 0) return { id: model };
102
+ const candidate = model.slice(idx + 1);
103
+ if (!VALID_EFFORTS.has(candidate)) return { id: model };
104
+ return { id: model.slice(0, idx), effort: candidate };
105
+ }
106
+
107
+ /* ── JSON-RPC client over stdio ────────────────────────────────────────── */
108
+
109
+ type RpcResult<T = any> = { id: number; result?: T; error?: { code?: number; message: string } };
110
+ type RpcNotification = { method: string; params?: any };
111
+
112
+ interface PendingRequest {
113
+ resolve: (value: any) => void;
114
+ reject: (err: Error) => void;
115
+ timer: NodeJS.Timeout;
116
+ }
117
+
118
+ class CodexRpc {
119
+ private proc: ChildProcessWithoutNullStreams | null = null;
120
+ private pending = new Map<number, PendingRequest>();
121
+ private nextId = 1;
122
+ private notificationHandler: (n: RpcNotification) => void = () => {};
123
+ private closeHandler: (code: number | null) => void = () => {};
124
+ private closed = false;
125
+ private stderrBuf = '';
126
+
127
+ start(): void {
128
+ this.proc = spawn('codex', ['app-server'], { stdio: ['pipe', 'pipe', 'pipe'] });
129
+ const rl = readline.createInterface({ input: this.proc.stdout });
130
+ rl.on('line', (line) => this.onLine(line));
131
+
132
+ this.proc.stderr.on('data', (chunk) => {
133
+ this.stderrBuf += chunk.toString();
134
+ // Trim if growing unbounded.
135
+ if (this.stderrBuf.length > 16_000) this.stderrBuf = this.stderrBuf.slice(-8_000);
136
+ });
137
+
138
+ this.proc.on('exit', (code) => {
139
+ if (this.closed) return;
140
+ this.closed = true;
141
+ const err = new Error(`codex app-server exited (code=${code}). Stderr tail:\n${this.stderrBuf.trim().slice(-1000)}`);
142
+ for (const p of this.pending.values()) {
143
+ clearTimeout(p.timer);
144
+ p.reject(err);
145
+ }
146
+ this.pending.clear();
147
+ this.closeHandler(code);
148
+ });
149
+
150
+ this.proc.on('error', (err) => {
151
+ if (this.closed) return;
152
+ this.closed = true;
153
+ log.warn(`[codex-rpc] spawn error: ${err.message}`);
154
+ for (const p of this.pending.values()) {
155
+ clearTimeout(p.timer);
156
+ p.reject(err);
157
+ }
158
+ this.pending.clear();
159
+ this.closeHandler(null);
160
+ });
161
+ }
162
+
163
+ onNotification(handler: (n: RpcNotification) => void): void { this.notificationHandler = handler; }
164
+ onClose(handler: (code: number | null) => void): void { this.closeHandler = handler; }
165
+
166
+ private onLine(line: string): void {
167
+ if (!line.trim()) return;
168
+ let msg: any;
169
+ try { msg = JSON.parse(line); } catch {
170
+ log.warn(`[codex-rpc] malformed JSON from server: ${line.slice(0, 200)}`);
171
+ return;
172
+ }
173
+ if (typeof msg.id === 'number') {
174
+ const pending = this.pending.get(msg.id);
175
+ if (!pending) return;
176
+ this.pending.delete(msg.id);
177
+ clearTimeout(pending.timer);
178
+ if (msg.error) pending.reject(new Error(msg.error.message || 'RPC error'));
179
+ else pending.resolve(msg.result);
180
+ return;
181
+ }
182
+ if (typeof msg.method === 'string') {
183
+ this.notificationHandler({ method: msg.method, params: msg.params });
184
+ }
185
+ }
186
+
187
+ request<T = any>(method: string, params?: any, timeoutMs = REQUEST_TIMEOUT_MS): Promise<T> {
188
+ if (this.closed || !this.proc) return Promise.reject(new Error('RPC connection closed'));
189
+ const id = this.nextId++;
190
+ return new Promise<T>((resolve, reject) => {
191
+ const timer = setTimeout(() => {
192
+ this.pending.delete(id);
193
+ reject(new Error(`codex app-server: ${method} timed out after ${timeoutMs}ms`));
194
+ }, timeoutMs);
195
+ this.pending.set(id, { resolve, reject, timer });
196
+ try {
197
+ this.proc!.stdin.write(JSON.stringify({ method, id, params }) + '\n');
198
+ } catch (err: any) {
199
+ this.pending.delete(id);
200
+ clearTimeout(timer);
201
+ reject(err);
202
+ }
203
+ });
204
+ }
205
+
206
+ notify(method: string, params?: any): void {
207
+ if (this.closed || !this.proc) return;
208
+ try {
209
+ this.proc.stdin.write(JSON.stringify({ method, params }) + '\n');
210
+ } catch (err: any) {
211
+ log.warn(`[codex-rpc] notify ${method} failed: ${err.message}`);
212
+ }
213
+ }
214
+
215
+ close(): void {
216
+ if (this.closed) return;
217
+ this.closed = true;
218
+ for (const p of this.pending.values()) {
219
+ clearTimeout(p.timer);
220
+ p.reject(new Error('RPC connection closed'));
221
+ }
222
+ this.pending.clear();
223
+ try { this.proc?.stdin.end(); } catch {}
224
+ try { this.proc?.kill('SIGTERM'); } catch {}
225
+ this.proc = null;
226
+ }
227
+ }
228
+
229
+ /* ── Per-conversation state ────────────────────────────────────────────── */
230
+
231
+ interface CodexConversation {
232
+ id: string;
233
+ rpc: CodexRpc;
234
+ threadId: string;
235
+ effort?: string;
236
+ onMessage: OnAgentMessage;
237
+ /** Currently in-flight turn id (set on `turn/started`, cleared on `turn/completed`). */
238
+ currentTurnId: string | null;
239
+ /** Streaming text accumulator for the current turn's agentMessage items. */
240
+ fullText: string;
241
+ /** Tools/items used during the current turn, for the bot:turn-complete payload. */
242
+ usedFileTools: boolean;
243
+ /**
244
+ * Queue of messages submitted via `pushMessage` that arrived while no turn
245
+ * was active *and* we hadn't yet returned from the previous turn — almost
246
+ * always empty, but covers a tight push-during-completed race.
247
+ */
248
+ pendingInputs: string[];
249
+ /** True once the harness has emitted the per-turn completion event. */
250
+ busy: boolean;
251
+ /** True for one-shot queries — the conversation ends after the first turn completes. */
252
+ oneShot: boolean;
253
+ }
254
+
255
+ const conversations = new Map<string, CodexConversation>();
256
+
257
+ /* ── Helpers ───────────────────────────────────────────────────────────── */
258
+
259
+ function buildUserInput(text: string, savedFiles?: SavedFile[]): Array<Record<string, any>> {
260
+ const input: Array<Record<string, any>> = [];
261
+
262
+ let promptText = text || '(attached files)';
263
+ if (savedFiles?.length) {
264
+ const lines = savedFiles.map((f) => `- ${f.name} -> ${f.relPath}`);
265
+ promptText += `\n\n[Attached files saved to disk]\n${lines.join('\n')}\nYou can read or reference these files using the paths above (relative to your cwd).`;
266
+ }
267
+ input.push({ type: 'text', text: promptText });
15
268
 
16
- const NOT_READY = 'OpenAI/Codex harness is not implemented yet. Switch the provider back to Anthropic, or wait for Phase 2 of the harness work.';
269
+ // Codex understands `localImage` (path on disk) Bloby's file-saver already
270
+ // wrote attachments to disk, so we just point at the absolute path.
271
+ if (savedFiles?.length) {
272
+ for (const f of savedFiles) {
273
+ if (f.type === 'image') input.push({ type: 'localImage', path: f.absPath });
274
+ }
275
+ }
17
276
 
18
- function emitNotReady(conversationId: string, onMessage: OnAgentMessage): void {
19
- log.warn(`[codex-harness] ${NOT_READY}`);
20
- onMessage('bot:error', { conversationId, error: NOT_READY });
21
- onMessage('bot:done', { conversationId, usedFileTools: false });
277
+ return input;
278
+ }
279
+
280
+ async function startTurn(conv: CodexConversation, content: string, savedFiles?: SavedFile[]): Promise<void> {
281
+ const input = buildUserInput(content, savedFiles);
282
+ conv.busy = true;
283
+ conv.fullText = '';
284
+ conv.usedFileTools = false;
285
+ conv.onMessage('bot:typing', { conversationId: conv.id });
286
+ try {
287
+ const params: Record<string, any> = { threadId: conv.threadId, input };
288
+ if (conv.effort) params.effort = conv.effort;
289
+ await conv.rpc.request('turn/start', params);
290
+ } catch (err: any) {
291
+ conv.busy = false;
292
+ conv.onMessage('bot:error', { conversationId: conv.id, error: `turn/start failed: ${err.message}` });
293
+ }
294
+ }
295
+
296
+ async function steerOrQueue(conv: CodexConversation, content: string, savedFiles?: SavedFile[]): Promise<void> {
297
+ if (!conv.currentTurnId) {
298
+ // No active turn — start a fresh one.
299
+ await startTurn(conv, content, savedFiles);
300
+ return;
301
+ }
302
+ // Active turn — inject mid-flight.
303
+ const input = buildUserInput(content, savedFiles);
304
+ try {
305
+ await conv.rpc.request('turn/steer', {
306
+ threadId: conv.threadId,
307
+ expectedTurnId: conv.currentTurnId,
308
+ input,
309
+ });
310
+ conv.onMessage('bot:typing', { conversationId: conv.id });
311
+ } catch (err: any) {
312
+ // expectedTurnId mismatch most likely means the turn just finished —
313
+ // retry as a fresh turn.
314
+ log.warn(`[codex] turn/steer failed (${err.message}); falling back to turn/start`);
315
+ if (!conv.currentTurnId) await startTurn(conv, content, savedFiles);
316
+ else conv.pendingInputs.push(content);
317
+ }
318
+ }
319
+
320
+ function handleNotification(conv: CodexConversation, n: { method: string; params?: any }): void {
321
+ const p = n.params || {};
322
+ switch (n.method) {
323
+ case 'turn/started': {
324
+ conv.currentTurnId = p.turn?.id || null;
325
+ conv.fullText = '';
326
+ conv.usedFileTools = false;
327
+ break;
328
+ }
329
+
330
+ case 'item/agentMessage/delta': {
331
+ const delta: string = p.delta || '';
332
+ if (!delta) break;
333
+ conv.fullText += delta;
334
+ conv.onMessage('bot:token', { conversationId: conv.id, token: delta });
335
+ break;
336
+ }
337
+
338
+ case 'item/started': {
339
+ const item = p.item || {};
340
+ // Surface tool-like items so the dashboard can show activity.
341
+ switch (item.type) {
342
+ case 'commandExecution':
343
+ conv.onMessage('bot:tool', {
344
+ conversationId: conv.id,
345
+ name: 'shell',
346
+ input: { command: item.command || item.commandLine || '' },
347
+ });
348
+ break;
349
+ case 'mcpToolCall':
350
+ conv.onMessage('bot:tool', {
351
+ conversationId: conv.id,
352
+ name: item.toolName || item.name || 'mcp_tool',
353
+ input: item.arguments || item.input || {},
354
+ });
355
+ break;
356
+ case 'fileChange':
357
+ conv.usedFileTools = true;
358
+ conv.onMessage('bot:tool', {
359
+ conversationId: conv.id,
360
+ name: 'file_change',
361
+ input: { changes: (item.changes || []).map((c: any) => c.path).filter(Boolean) },
362
+ });
363
+ break;
364
+ case 'webSearch':
365
+ conv.onMessage('bot:tool', {
366
+ conversationId: conv.id,
367
+ name: 'web_search',
368
+ input: { query: item.query || '' },
369
+ });
370
+ break;
371
+ // userMessage / agentMessage / reasoning — no tool-style event.
372
+ }
373
+ break;
374
+ }
375
+
376
+ case 'item/completed': {
377
+ const item = p.item || {};
378
+ if (item.type === 'fileChange') conv.usedFileTools = true;
379
+ // If a final agentMessage arrives without preceding deltas (rare), grab it now.
380
+ if (item.type === 'agentMessage' && !conv.fullText) {
381
+ const text = (item.content || []).map((c: any) => c.text || '').join('') || item.text || '';
382
+ if (text) {
383
+ conv.fullText = text;
384
+ conv.onMessage('bot:token', { conversationId: conv.id, token: text });
385
+ }
386
+ }
387
+ break;
388
+ }
389
+
390
+ case 'turn/completed': {
391
+ const status: string = p.turn?.status || 'completed';
392
+ const turnError = p.turn?.error;
393
+
394
+ conv.currentTurnId = null;
395
+ conv.busy = false;
396
+
397
+ if (status === 'failed' || status === 'systemError') {
398
+ conv.onMessage('bot:error', {
399
+ conversationId: conv.id,
400
+ error: turnError?.message || 'Codex turn failed.',
401
+ });
402
+ } else if (conv.fullText) {
403
+ conv.onMessage('bot:response', { conversationId: conv.id, content: conv.fullText });
404
+ }
405
+
406
+ if (conv.oneShot) {
407
+ conv.onMessage('bot:done', { conversationId: conv.id, usedFileTools: conv.usedFileTools });
408
+ teardownConversation(conv.id);
409
+ } else {
410
+ conv.onMessage('bot:turn-complete', { conversationId: conv.id, usedFileTools: conv.usedFileTools });
411
+
412
+ // Drain any messages that were submitted while we were busy.
413
+ const next = conv.pendingInputs.shift();
414
+ if (next !== undefined) void startTurn(conv, next);
415
+ }
416
+ break;
417
+ }
418
+
419
+ case 'error': {
420
+ const errMsg = p.error?.message || 'Codex error notification';
421
+ conv.onMessage('bot:error', { conversationId: conv.id, error: errMsg });
422
+ break;
423
+ }
424
+
425
+ // thread/started, thread/status/changed, mcpServer/startupStatus/updated,
426
+ // remoteControl/status/changed — informational, no-op for the dashboard.
427
+ }
428
+ }
429
+
430
+ function teardownConversation(conversationId: string): void {
431
+ const conv = conversations.get(conversationId);
432
+ if (!conv) return;
433
+ conversations.delete(conversationId);
434
+ try { conv.rpc.close(); } catch {}
435
+ conv.onMessage('bot:conversation-ended', { conversationId });
436
+ }
437
+
438
+ async function spawnAndInitialize(
439
+ conversationId: string,
440
+ model: string,
441
+ onMessage: OnAgentMessage,
442
+ baseInstructions: string,
443
+ oneShot: boolean,
444
+ ): Promise<CodexConversation | null> {
445
+ // Pre-flight: confirm we have valid OAuth tokens before spending time spawning.
446
+ const token = await getCodexAccessToken();
447
+ if (!token) {
448
+ onMessage('bot:error', {
449
+ conversationId,
450
+ error: 'Codex credentials not found or expired. Re-authenticate from the dashboard.',
451
+ });
452
+ return null;
453
+ }
454
+
455
+ const { id: modelId, effort } = parseModelString(model);
456
+ const rpc = new CodexRpc();
457
+ rpc.start();
458
+
459
+ const conv: CodexConversation = {
460
+ id: conversationId,
461
+ rpc,
462
+ threadId: '',
463
+ effort,
464
+ onMessage,
465
+ currentTurnId: null,
466
+ fullText: '',
467
+ usedFileTools: false,
468
+ pendingInputs: [],
469
+ busy: false,
470
+ oneShot,
471
+ };
472
+
473
+ rpc.onNotification((n) => handleNotification(conv, n));
474
+ rpc.onClose(() => {
475
+ if (conversations.get(conversationId) === conv) {
476
+ conversations.delete(conversationId);
477
+ onMessage('bot:conversation-ended', { conversationId });
478
+ }
479
+ });
480
+
481
+ try {
482
+ log.info(`[codex] init conversation ${conversationId} (model=${modelId}${effort ? `, effort=${effort}` : ''})`);
483
+ await rpc.request('initialize', { clientInfo: CLIENT_INFO });
484
+ rpc.notify('initialized', {});
485
+ const startResult = await rpc.request<{ thread: { id: string } }>('thread/start', {
486
+ cwd: WORKSPACE_DIR,
487
+ model: modelId,
488
+ baseInstructions,
489
+ });
490
+ conv.threadId = startResult.thread.id;
491
+ conversations.set(conversationId, conv);
492
+ log.ok(`[codex] thread started ${conv.threadId}`);
493
+ return conv;
494
+ } catch (err: any) {
495
+ rpc.close();
496
+ onMessage('bot:error', { conversationId, error: `Failed to initialize Codex: ${err.message}` });
497
+ return null;
498
+ }
499
+ }
500
+
501
+ /* ── Harness implementation ────────────────────────────────────────────── */
502
+
503
+ export function hasConversation(conversationId: string): boolean {
504
+ return conversations.has(conversationId);
505
+ }
506
+
507
+ export function isConversationBusy(conversationId: string): boolean {
508
+ return conversations.get(conversationId)?.busy ?? false;
22
509
  }
23
510
 
24
511
  export async function startConversation(
25
512
  conversationId: string,
26
- _model: string,
513
+ model: string,
27
514
  onMessage: OnAgentMessage,
28
- _names?: { botName: string; humanName: string },
29
- _recentMessages?: RecentMessage[],
515
+ names?: { botName: string; humanName: string },
516
+ recentMessages?: RecentMessage[],
30
517
  ): Promise<boolean> {
31
- emitNotReady(conversationId, onMessage);
32
- return false;
518
+ if (conversations.has(conversationId)) endConversation(conversationId);
519
+ const baseInstructions = await assembleBaseInstructions(names, recentMessages);
520
+ const conv = await spawnAndInitialize(conversationId, model, onMessage, baseInstructions, false);
521
+ return !!conv;
33
522
  }
34
523
 
35
524
  export function pushMessage(
36
525
  conversationId: string,
37
- _content: string,
526
+ content: string,
38
527
  _attachments?: AgentAttachment[],
39
- _savedFiles?: SavedFile[],
528
+ savedFiles?: SavedFile[],
40
529
  ): boolean {
41
- log.warn(`[codex-harness] pushMessage(${conversationId}) — ${NOT_READY}`);
42
- return false;
530
+ const conv = conversations.get(conversationId);
531
+ if (!conv) {
532
+ log.warn(`[codex] pushMessage: no live conversation ${conversationId}`);
533
+ return false;
534
+ }
535
+ void steerOrQueue(conv, content, savedFiles);
536
+ return true;
537
+ }
538
+
539
+ export function endConversation(conversationId: string): void {
540
+ const conv = conversations.get(conversationId);
541
+ if (!conv) return;
542
+ log.info(`[codex] ending conversation ${conversationId}`);
543
+ if (conv.currentTurnId) {
544
+ void conv.rpc.request('turn/interrupt', {
545
+ threadId: conv.threadId,
546
+ turnId: conv.currentTurnId,
547
+ }).catch(() => {});
548
+ }
549
+ teardownConversation(conversationId);
550
+ }
551
+
552
+ export function endAllConversations(): void {
553
+ for (const id of Array.from(conversations.keys())) endConversation(id);
43
554
  }
44
555
 
45
- export function hasConversation(_conversationId: string): boolean {
46
- return false;
556
+ export async function stopSubAgentTask(_conversationId: string, _taskId: string): Promise<void> {
557
+ // Codex doesn't expose Claude-style sub-agent tasks. No-op for now.
47
558
  }
48
559
 
49
- export function endConversation(_conversationId: string): void { /* no-op */ }
50
- export function endAllConversations(): void { /* no-op */ }
51
- export function isConversationBusy(_conversationId: string): boolean { return false; }
52
- export async function stopSubAgentTask(_conversationId: string, _taskId: string): Promise<void> { /* no-op */ }
53
560
  export async function warmUpForLiveConversation(
54
561
  _model: string,
55
562
  _names?: { botName: string; humanName: string },
56
- ): Promise<void> { /* no-op */ }
563
+ ): Promise<void> {
564
+ // No subprocess pre-warming yet — `codex app-server` startup is fast enough
565
+ // (~hundreds of ms). Re-evaluate if it becomes noticeable on the Pi.
566
+ }
57
567
 
58
568
  export async function startBlobyAgentQuery(
59
569
  conversationId: string,
60
- _prompt: string,
61
- _model: string,
570
+ prompt: string,
571
+ model: string,
62
572
  onMessage: OnAgentMessage,
63
573
  _attachments?: AgentAttachment[],
64
- _savedFiles?: SavedFile[],
65
- _names?: { botName: string; humanName: string },
66
- _recentMessages?: RecentMessage[],
67
- _supportPrompt?: string,
574
+ savedFiles?: SavedFile[],
575
+ names?: { botName: string; humanName: string },
576
+ recentMessages?: RecentMessage[],
577
+ supportPrompt?: string,
68
578
  _maxTurns?: number,
69
579
  ): Promise<void> {
70
- emitNotReady(conversationId, onMessage);
580
+ if (conversations.has(conversationId)) endConversation(conversationId);
581
+ const baseInstructions = supportPrompt
582
+ ? supportPrompt
583
+ : await assembleBaseInstructions(names, recentMessages);
584
+ const conv = await spawnAndInitialize(conversationId, model, onMessage, baseInstructions, true);
585
+ if (!conv) return;
586
+ await startTurn(conv, prompt, savedFiles);
71
587
  }
72
588
 
73
- export function stopBlobyAgentQuery(_conversationId: string): void { /* no-op */ }
589
+ export function stopBlobyAgentQuery(conversationId: string): void {
590
+ endConversation(conversationId);
591
+ }