iris-chatbot 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +49 -0
  3. package/bin/iris.mjs +267 -0
  4. package/package.json +61 -0
  5. package/template/LICENSE +21 -0
  6. package/template/README.md +49 -0
  7. package/template/eslint.config.mjs +18 -0
  8. package/template/next.config.ts +7 -0
  9. package/template/package-lock.json +9193 -0
  10. package/template/package.json +46 -0
  11. package/template/postcss.config.mjs +7 -0
  12. package/template/public/file.svg +1 -0
  13. package/template/public/globe.svg +1 -0
  14. package/template/public/next.svg +1 -0
  15. package/template/public/vercel.svg +1 -0
  16. package/template/public/window.svg +1 -0
  17. package/template/src/app/api/chat/route.ts +2445 -0
  18. package/template/src/app/api/connections/models/route.ts +255 -0
  19. package/template/src/app/api/connections/test/route.ts +124 -0
  20. package/template/src/app/api/local-sync/route.ts +74 -0
  21. package/template/src/app/api/tool-approval/route.ts +47 -0
  22. package/template/src/app/favicon.ico +0 -0
  23. package/template/src/app/globals.css +808 -0
  24. package/template/src/app/layout.tsx +74 -0
  25. package/template/src/app/page.tsx +444 -0
  26. package/template/src/components/ChatView.tsx +1537 -0
  27. package/template/src/components/Composer.tsx +160 -0
  28. package/template/src/components/MapView.tsx +244 -0
  29. package/template/src/components/MessageCard.tsx +955 -0
  30. package/template/src/components/SearchModal.tsx +72 -0
  31. package/template/src/components/SettingsModal.tsx +1257 -0
  32. package/template/src/components/Sidebar.tsx +153 -0
  33. package/template/src/components/TopBar.tsx +164 -0
  34. package/template/src/lib/connections.ts +275 -0
  35. package/template/src/lib/data.ts +324 -0
  36. package/template/src/lib/db.ts +49 -0
  37. package/template/src/lib/hooks.ts +76 -0
  38. package/template/src/lib/local-sync.ts +192 -0
  39. package/template/src/lib/memory.ts +695 -0
  40. package/template/src/lib/model-presets.ts +251 -0
  41. package/template/src/lib/store.ts +36 -0
  42. package/template/src/lib/tooling/approvals.ts +78 -0
  43. package/template/src/lib/tooling/providers/anthropic.ts +155 -0
  44. package/template/src/lib/tooling/providers/ollama.ts +73 -0
  45. package/template/src/lib/tooling/providers/openai.ts +267 -0
  46. package/template/src/lib/tooling/providers/openai_compatible.ts +16 -0
  47. package/template/src/lib/tooling/providers/types.ts +44 -0
  48. package/template/src/lib/tooling/registry.ts +103 -0
  49. package/template/src/lib/tooling/runtime.ts +189 -0
  50. package/template/src/lib/tooling/safety.ts +165 -0
  51. package/template/src/lib/tooling/tools/apps.ts +108 -0
  52. package/template/src/lib/tooling/tools/apps_plus.ts +153 -0
  53. package/template/src/lib/tooling/tools/communication.ts +883 -0
  54. package/template/src/lib/tooling/tools/files.ts +395 -0
  55. package/template/src/lib/tooling/tools/music.ts +988 -0
  56. package/template/src/lib/tooling/tools/notes.ts +461 -0
  57. package/template/src/lib/tooling/tools/notes_plus.ts +294 -0
  58. package/template/src/lib/tooling/tools/numbers.ts +175 -0
  59. package/template/src/lib/tooling/tools/schedule.ts +579 -0
  60. package/template/src/lib/tooling/tools/system.ts +142 -0
  61. package/template/src/lib/tooling/tools/web.ts +212 -0
  62. package/template/src/lib/tooling/tools/workflow.ts +218 -0
  63. package/template/src/lib/tooling/types.ts +27 -0
  64. package/template/src/lib/types.ts +309 -0
  65. package/template/src/lib/utils.ts +108 -0
  66. package/template/tsconfig.json +34 -0
@@ -0,0 +1,1537 @@
1
+ "use client";
2
+
3
+ import { ArrowDown } from "lucide-react";
4
+ import { useCallback, useEffect, useMemo, useRef, useState } from "react";
5
+ import type {
6
+ ChatCitationSource,
7
+ ModelConnection,
8
+ ChatStreamChunk,
9
+ MemoryContextPayload,
10
+ MessageNode,
11
+ Settings,
12
+ Thread,
13
+ ToolApproval,
14
+ ToolEvent,
15
+ } from "../lib/types";
16
+ import {
17
+ appendUserAndAssistant,
18
+ createThreadFromMessage,
19
+ deleteThread as deleteThreadById,
20
+ } from "../lib/data";
21
+ import { buildPath, embedSourcesInContent, splitContentAndSources } from "../lib/utils";
22
+ import { db } from "../lib/db";
23
+ import {
24
+ buildMemoryContextForPrompt,
25
+ captureMemoriesFromUserTurn,
26
+ markMemoriesUsed,
27
+ } from "../lib/memory";
28
+ import { useUIStore } from "../lib/store";
29
+ import { toChatConnectionPayload } from "../lib/connections";
30
+ import MessageCard from "./MessageCard";
31
+ import Composer from "./Composer";
32
+
33
+ const SYSTEM_PROMPT =
34
+ "You are a helpful assistant. Always return valid Markdown. Use ATX headings (##, ###), bold labels, lists, and tables when useful.";
35
+
36
+ const EMPTY_PROMPTS = [
37
+ "What are you working on?",
38
+ "What should we tackle today?",
39
+ "What do you want to build next?",
40
+ "What are we solving right now?",
41
+ "What is on your agenda today?",
42
+ "Where should we start?",
43
+ ];
44
+
45
+ const MAX_SIBLING_THREADS_FOR_CONTEXT = 4;
46
+ const MAX_MESSAGES_PER_SIBLING_THREAD = 8;
47
+ const MAX_MESSAGE_CHARS_FOR_CONTEXT = 320;
48
+ const MAX_CONTEXT_CHARS_TOTAL = 8000;
49
+ const STREAM_TYPING_FRAME_MS = 16;
50
+ const STREAM_TYPING_CHARS_PER_FRAME_MIN = 1;
51
+ const STREAM_TYPING_CHARS_PER_FRAME_MAX = 24;
52
+ const STREAM_TYPING_FIRST_PAINT_CHARS = 2;
53
+ const STREAM_DB_FLUSH_MS = 120;
54
+ const VOICE_SILENCE_AUTOSEND_MS = 1300;
55
+
56
+ type SpeechRecognitionAlternativeLike = {
57
+ transcript?: string;
58
+ };
59
+
60
+ type SpeechRecognitionResultLike = {
61
+ isFinal?: boolean;
62
+ [index: number]: SpeechRecognitionAlternativeLike;
63
+ };
64
+
65
+ type SpeechRecognitionEventLike = {
66
+ resultIndex?: number;
67
+ results?: ArrayLike<SpeechRecognitionResultLike>;
68
+ };
69
+
70
+ type SpeechRecognitionErrorEventLike = {
71
+ error?: string;
72
+ };
73
+
74
+ type SpeechRecognitionLike = {
75
+ continuous: boolean;
76
+ interimResults: boolean;
77
+ lang: string;
78
+ onresult: ((event: SpeechRecognitionEventLike) => void) | null;
79
+ onerror: ((event: SpeechRecognitionErrorEventLike) => void) | null;
80
+ onend: (() => void) | null;
81
+ start: () => void;
82
+ stop: () => void;
83
+ };
84
+
85
+ type SpeechRecognitionCtor = new () => SpeechRecognitionLike;
86
+
87
+ function getSpeechRecognitionCtor(): SpeechRecognitionCtor | null {
88
+ if (typeof window === "undefined") {
89
+ return null;
90
+ }
91
+ const w = window as Window & {
92
+ SpeechRecognition?: SpeechRecognitionCtor;
93
+ webkitSpeechRecognition?: SpeechRecognitionCtor;
94
+ };
95
+ return w.SpeechRecognition ?? w.webkitSpeechRecognition ?? null;
96
+ }
97
+
98
+ function normalizeSpeechText(input: string): string {
99
+ return input.replace(/\s+/g, " ").trim();
100
+ }
101
+
102
+ function compactTextForContext(input: string, maxChars = MAX_MESSAGE_CHARS_FOR_CONTEXT) {
103
+ const compact = splitContentAndSources(input).content.replace(/\s+/g, " ").trim();
104
+ if (!compact) {
105
+ return "(empty)";
106
+ }
107
+ return compact.length > maxChars ? `${compact.slice(0, maxChars)}…` : compact;
108
+ }
109
+
110
+ function shouldIncludeSiblingThreadContext(input: string): boolean {
111
+ const text = input.toLowerCase();
112
+ return (
113
+ /\b(other thread|another thread|across threads|between threads|all threads|both threads)\b/.test(
114
+ text,
115
+ ) ||
116
+ /\b(compare|contrast|summarize)\b[\s\S]{0,40}\bthread/.test(text)
117
+ );
118
+ }
119
+
120
+ function buildSiblingThreadsContext(params: {
121
+ allThreads: Thread[];
122
+ activeThread: Thread;
123
+ messageMap: Map<string, MessageNode>;
124
+ }): string | null {
125
+ const siblingThreads = params.allThreads
126
+ .filter(
127
+ (thread) =>
128
+ thread.conversationId === params.activeThread.conversationId &&
129
+ thread.id !== params.activeThread.id,
130
+ )
131
+ .sort((a, b) => b.updatedAt - a.updatedAt)
132
+ .slice(0, MAX_SIBLING_THREADS_FOR_CONTEXT);
133
+
134
+ if (siblingThreads.length === 0) {
135
+ return null;
136
+ }
137
+
138
+ let context = "Sibling thread context from this same conversation:\n";
139
+
140
+ for (const thread of siblingThreads) {
141
+ const path = buildPath(thread.headMessageId, params.messageMap)
142
+ .filter((message) => message.role === "user" || message.role === "assistant")
143
+ .slice(-MAX_MESSAGES_PER_SIBLING_THREAD);
144
+
145
+ if (path.length === 0) {
146
+ continue;
147
+ }
148
+
149
+ let threadBlock = `\n[${thread.title}]\n`;
150
+ for (const message of path) {
151
+ const speaker = message.role === "assistant" ? "Assistant" : "User";
152
+ threadBlock += `${speaker}: ${compactTextForContext(message.content)}\n`;
153
+ }
154
+
155
+ if (context.length + threadBlock.length > MAX_CONTEXT_CHARS_TOTAL) {
156
+ break;
157
+ }
158
+ context += threadBlock;
159
+ }
160
+
161
+ if (context === "Sibling thread context from this same conversation:\n") {
162
+ return null;
163
+ }
164
+
165
+ return context.trim();
166
+ }
167
+
168
+ function normalizeAliasMemoryText(input: string): string {
169
+ return input
170
+ .toLowerCase()
171
+ .replace(/[^a-z0-9]+/g, " ")
172
+ .replace(/\s+/g, " ")
173
+ .trim();
174
+ }
175
+
176
+ function shouldLearnMusicAlias(alias: string, target: string): boolean {
177
+ const normalizedAlias = normalizeAliasMemoryText(alias);
178
+ const normalizedTarget = normalizeAliasMemoryText(target);
179
+ if (!normalizedAlias || !normalizedTarget || normalizedAlias === normalizedTarget) {
180
+ return false;
181
+ }
182
+ if (normalizedAlias.length < 2 || normalizedAlias.length > 48) {
183
+ return false;
184
+ }
185
+ if (normalizedAlias.split(" ").length > 5) {
186
+ return false;
187
+ }
188
+ if (/\b(song|track|album|playlist|by)\b/i.test(alias)) {
189
+ return false;
190
+ }
191
+ return true;
192
+ }
193
+
194
+ function extractMessageRecipientAliasFromPrompt(prompt: string): string | null {
195
+ const text = prompt.replace(/\s+/g, " ").trim();
196
+ if (!text) {
197
+ return null;
198
+ }
199
+
200
+ const patterns = [
201
+ /^\s*(?:text|message)\s+(.+?)\s+(?:that\s+)?(.+)$/i,
202
+ /^\s*send\s+(?:a\s+)?(?:text(?:\s+message)?|message)\s+to\s+(.+?)\s+(?:that\s+)?(.+)$/i,
203
+ /^\s*send\s+(.+?)\s+(?:a\s+)?(?:text(?:\s+message)?|message)\s+(?:that\s+)?(.+)$/i,
204
+ ];
205
+
206
+ const quotedSendMatch = text.match(/^\s*send\s+["'“”](.+?)["'“”]\s+to\s+(.+?)\s*$/i);
207
+ if (quotedSendMatch?.[2]) {
208
+ return quotedSendMatch[2]
209
+ .trim()
210
+ .split(/\s*,\s*|\s+and\s+/i)[0]
211
+ ?.trim()
212
+ ?.replace(/[,:;]+$/g, "")
213
+ ?.replace(/^['"]|['"]$/g, "") || null;
214
+ }
215
+
216
+ for (const pattern of patterns) {
217
+ const match = text.match(pattern);
218
+ if (!match?.[1] || !match?.[2]) {
219
+ continue;
220
+ }
221
+ const alias = match[1]
222
+ .replace(/^to\s+/i, "")
223
+ .split(/\s*,\s*|\s+and\s+/i)[0]
224
+ ?.trim()
225
+ ?.replace(/[,:;]+$/g, "")
226
+ ?.replace(/^['"]|['"]$/g, "");
227
+ if (!alias) {
228
+ return null;
229
+ }
230
+ return alias;
231
+ }
232
+ return null;
233
+ }
234
+
235
+ function extractMusicAliasFromPrompt(prompt: string): string | null {
236
+ const match = prompt.match(/^\s*play\s+(.+?)(?:\s+at\s+\d{1,3}\s*%|\s+volume|\s*$)/i);
237
+ if (!match?.[1]) {
238
+ return null;
239
+ }
240
+ return match[1]
241
+ .replace(/\bvia apple music app\b/gi, "")
242
+ .replace(/\bon apple music\b/gi, "")
243
+ .trim()
244
+ .replace(/^['"]|['"]$/g, "")
245
+ .replace(/[.?!]+$/g, "");
246
+ }
247
+
248
+ function resolveMessageTargetFromResult(result: unknown): string | null {
249
+ if (!result || typeof result !== "object") {
250
+ return null;
251
+ }
252
+ const payload = result as {
253
+ recipients?: Array<{ name?: string | null; input?: string; handle?: string }>;
254
+ };
255
+ const first = payload.recipients?.[0];
256
+ if (!first) {
257
+ return null;
258
+ }
259
+ return first.handle?.trim() || first.name?.trim() || first.input?.trim() || null;
260
+ }
261
+
262
+ function resolveMusicTargetFromResult(result: unknown): string | null {
263
+ if (!result || typeof result !== "object") {
264
+ return null;
265
+ }
266
+ const payload = result as {
267
+ playlistName?: string | null;
268
+ title?: string | null;
269
+ artist?: string | null;
270
+ };
271
+ if (typeof payload.playlistName === "string" && payload.playlistName.trim()) {
272
+ return payload.playlistName.trim();
273
+ }
274
+ const title = typeof payload.title === "string" ? payload.title.trim() : "";
275
+ const artist = typeof payload.artist === "string" ? payload.artist.trim() : "";
276
+ if (!title) {
277
+ return null;
278
+ }
279
+ return artist ? `${title} by ${artist}` : title;
280
+ }
281
+
282
+ function toApprovalStatus(decision: "approve" | "deny" | "timeout") {
283
+ if (decision === "approve") {
284
+ return "approved" as const;
285
+ }
286
+ if (decision === "deny") {
287
+ return "denied" as const;
288
+ }
289
+ return "timeout" as const;
290
+ }
291
+
292
+ type MicMode = "off" | "dictation";
293
+ type MicState = "idle" | "listening" | "processing";
294
+
295
+ export default function ChatView({
296
+ thread,
297
+ messages,
298
+ toolEvents,
299
+ toolApprovals,
300
+ settings,
301
+ connection,
302
+ model,
303
+ threads,
304
+ activeThreadId,
305
+ onSelectThread,
306
+ onOpenSettings,
307
+ }: {
308
+ thread: Thread | null;
309
+ messages: MessageNode[];
310
+ toolEvents: ToolEvent[];
311
+ toolApprovals: ToolApproval[];
312
+ settings: Settings | null;
313
+ connection: ModelConnection | null;
314
+ model: string;
315
+ threads: Thread[];
316
+ activeThreadId: string | null;
317
+ onSelectThread: (threadId: string) => void;
318
+ onOpenSettings: () => void;
319
+ }) {
320
+ const [input, setInput] = useState("");
321
+ const [error, setError] = useState<string | null>(null);
322
+ const [streamingByThread, setStreamingByThread] = useState<Record<string, string>>({});
323
+ const [streamDraftByMessageId, setStreamDraftByMessageId] = useState<Record<string, string>>({});
324
+ const [approvalBusyIds, setApprovalBusyIds] = useState<Record<string, boolean>>({});
325
+ const [showJumpToBottom, setShowJumpToBottom] = useState(false);
326
+ const [micMode, setMicMode] = useState<MicMode>("off");
327
+ const [micState, setMicState] = useState<MicState>("idle");
328
+ const focusedMessageId = useUIStore((state) => state.focusedMessageId);
329
+ const setFocusedMessageId = useUIStore((state) => state.setFocusedMessageId);
330
+ const chatScrollRef = useRef<HTMLDivElement | null>(null);
331
+ const bottomRef = useRef<HTMLDivElement | null>(null);
332
+ const isUserNearBottomRef = useRef(true);
333
+ const shouldStickToBottomRef = useRef(true);
334
+ const pendingSendScrollThreadIdRef = useRef<string | null>(null);
335
+ const followScrollRafRef = useRef<number | null>(null);
336
+ const lastFollowScrollAtRef = useRef(0);
337
+ const smoothFollowUntilRef = useRef(0);
338
+ const abortControllersRef = useRef<Map<string, AbortController>>(new Map());
339
+ const inflightThreadIdsRef = useRef<Set<string>>(new Set());
340
+ const micModeRef = useRef<MicMode>("off");
341
+ const micStateRef = useRef<MicState>("idle");
342
+ const currentInputRef = useRef("");
343
+ const recognitionRef = useRef<SpeechRecognitionLike | null>(null);
344
+ const recognitionBaseInputRef = useRef("");
345
+ const recognitionFinalTranscriptRef = useRef("");
346
+ const recognitionInterimTranscriptRef = useRef("");
347
+ const micSilenceTimerRef = useRef<number | null>(null);
348
+ const handleSendRef = useRef<(overrideInput?: string) => Promise<void>>(async () => {});
349
+
350
+ const scrollToBottom = useCallback((behavior: ScrollBehavior = "smooth") => {
351
+ const container = chatScrollRef.current;
352
+ if (container) {
353
+ container.scrollTo({ top: container.scrollHeight, behavior });
354
+ return;
355
+ }
356
+ bottomRef.current?.scrollIntoView({ behavior, block: "end" });
357
+ }, []);
358
+
359
+ const updateBottomVisibility = useCallback(() => {
360
+ const container = chatScrollRef.current;
361
+ if (!container) {
362
+ setShowJumpToBottom(false);
363
+ isUserNearBottomRef.current = true;
364
+ shouldStickToBottomRef.current = true;
365
+ return;
366
+ }
367
+
368
+ const distanceFromBottom =
369
+ container.scrollHeight - container.scrollTop - container.clientHeight;
370
+ const isNearBottom = distanceFromBottom <= 48;
371
+ isUserNearBottomRef.current = isNearBottom;
372
+ shouldStickToBottomRef.current = isNearBottom;
373
+ setShowJumpToBottom(!isNearBottom);
374
+ }, []);
375
+
376
+ const maybeAutoScrollToBottom = useCallback(() => {
377
+ if (!shouldStickToBottomRef.current) {
378
+ return;
379
+ }
380
+ if (followScrollRafRef.current !== null) {
381
+ return;
382
+ }
383
+ followScrollRafRef.current = requestAnimationFrame(() => {
384
+ followScrollRafRef.current = null;
385
+ const container = chatScrollRef.current;
386
+ const now = performance.now();
387
+ const smoothActive = now < smoothFollowUntilRef.current;
388
+ if (!container) {
389
+ scrollToBottom(smoothActive ? "smooth" : "auto");
390
+ lastFollowScrollAtRef.current = now;
391
+ return;
392
+ }
393
+ const distanceFromBottom =
394
+ container.scrollHeight - container.scrollTop - container.clientHeight;
395
+ if (distanceFromBottom <= 2) {
396
+ return;
397
+ }
398
+ const elapsed = now - lastFollowScrollAtRef.current;
399
+ const behavior: ScrollBehavior = smoothActive || elapsed > 180 ? "smooth" : "auto";
400
+ container.scrollTo({ top: container.scrollHeight, behavior });
401
+ lastFollowScrollAtRef.current = now;
402
+ });
403
+ }, [scrollToBottom]);
404
+
405
+ const messageMap = useMemo(
406
+ () => new Map(messages.map((message) => [message.id, message])),
407
+ [messages]
408
+ );
409
+
410
+ const threadMessages = useMemo(() => {
411
+ if (!thread?.headMessageId) return [];
412
+ return buildPath(thread.headMessageId, messageMap);
413
+ }, [thread?.headMessageId, messageMap]);
414
+
415
+ const emptyPrompt = useMemo(() => {
416
+ if (!thread?.id) return EMPTY_PROMPTS[0];
417
+ let hash = 0;
418
+ for (let i = 0; i < thread.id.length; i += 1) {
419
+ hash = (hash * 31 + thread.id.charCodeAt(i)) >>> 0;
420
+ }
421
+ return EMPTY_PROMPTS[hash % EMPTY_PROMPTS.length];
422
+ }, [thread?.id]);
423
+
424
+ const threadsInConversation = useMemo(
425
+ () =>
426
+ threads.filter(
427
+ (item) => item.conversationId === thread?.conversationId
428
+ ),
429
+ [threads, thread?.conversationId]
430
+ );
431
+
432
+ const threadsByMessage = useMemo(() => {
433
+ const map = new Map<string, Thread[]>();
434
+ const getThreadNumber = (title: string) => {
435
+ const match = title.match(/Thread\s+(\d+)/i);
436
+ return match ? Number(match[1]) : Number.POSITIVE_INFINITY;
437
+ };
438
+ threadsInConversation.forEach((item) => {
439
+ if (!item.forkedFromMessageId) return;
440
+ const list = map.get(item.forkedFromMessageId) || [];
441
+ list.push(item);
442
+ map.set(item.forkedFromMessageId, list);
443
+ });
444
+ map.forEach((list, key) => {
445
+ list.sort((a, b) => {
446
+ const aNum = getThreadNumber(a.title);
447
+ const bNum = getThreadNumber(b.title);
448
+ if (aNum !== bNum) return aNum - bNum;
449
+ return a.updatedAt - b.updatedAt;
450
+ });
451
+ map.set(key, list);
452
+ });
453
+ return map;
454
+ }, [threadsInConversation]);
455
+
456
+ const baseThreadByMessage = useMemo(() => {
457
+ const result = new Map<string, string>();
458
+ const threadById = new Map(
459
+ threadsInConversation.map((item) => [item.id, item])
460
+ );
461
+
462
+ threadsInConversation.forEach((item) => {
463
+ let cursor = item.headMessageId;
464
+ while (cursor) {
465
+ const currentBaseId = result.get(cursor);
466
+ if (!currentBaseId) {
467
+ result.set(cursor, item.id);
468
+ } else {
469
+ const currentBase = threadById.get(currentBaseId);
470
+ const currentIsForkAtNode =
471
+ currentBase?.forkedFromMessageId === cursor;
472
+ const candidateIsBaseAtNode = item.forkedFromMessageId !== cursor;
473
+ if (currentIsForkAtNode && candidateIsBaseAtNode) {
474
+ result.set(cursor, item.id);
475
+ }
476
+ }
477
+
478
+ cursor = messageMap.get(cursor)?.parentId ?? null;
479
+ }
480
+ });
481
+
482
+ return result;
483
+ }, [threadsInConversation, messageMap]);
484
+
485
+ const activeStreamingMessageId = useMemo(() => {
486
+ if (!thread?.id) {
487
+ return null;
488
+ }
489
+ return streamingByThread[thread.id] ?? null;
490
+ }, [streamingByThread, thread?.id]);
491
+ const micSupported = getSpeechRecognitionCtor() !== null;
492
+
493
+ const toolEventsByMessage = useMemo(() => {
494
+ const map = new Map<string, ToolEvent[]>();
495
+ for (const event of toolEvents) {
496
+ const list = map.get(event.assistantMessageId) || [];
497
+ list.push(event);
498
+ map.set(event.assistantMessageId, list);
499
+ }
500
+ map.forEach((list, key) => {
501
+ list.sort((a, b) => a.createdAt - b.createdAt);
502
+ map.set(key, list);
503
+ });
504
+ return map;
505
+ }, [toolEvents]);
506
+
507
+ const toolApprovalsByMessage = useMemo(() => {
508
+ const map = new Map<string, ToolApproval[]>();
509
+ for (const approval of toolApprovals) {
510
+ const list = map.get(approval.assistantMessageId) || [];
511
+ list.push(approval);
512
+ map.set(approval.assistantMessageId, list);
513
+ }
514
+ map.forEach((list, key) => {
515
+ list.sort((a, b) => a.requestedAt - b.requestedAt);
516
+ map.set(key, list);
517
+ });
518
+ return map;
519
+ }, [toolApprovals]);
520
+
521
+ const clearMicSilenceTimer = useCallback(() => {
522
+ if (micSilenceTimerRef.current !== null) {
523
+ window.clearTimeout(micSilenceTimerRef.current);
524
+ micSilenceTimerRef.current = null;
525
+ }
526
+ }, []);
527
+
528
+ const stopMicRecognitionSession = useCallback(() => {
529
+ clearMicSilenceTimer();
530
+ const recognition = recognitionRef.current;
531
+ recognitionRef.current = null;
532
+ if (!recognition) {
533
+ return;
534
+ }
535
+ recognition.onresult = null;
536
+ recognition.onerror = null;
537
+ recognition.onend = null;
538
+ try {
539
+ recognition.stop();
540
+ } catch {
541
+ // best-effort stop
542
+ }
543
+ }, [clearMicSilenceTimer]);
544
+
545
+ const applyRecognitionInput = useCallback(() => {
546
+ const parts = [
547
+ normalizeSpeechText(recognitionBaseInputRef.current),
548
+ normalizeSpeechText(recognitionFinalTranscriptRef.current),
549
+ normalizeSpeechText(recognitionInterimTranscriptRef.current),
550
+ ].filter(Boolean);
551
+ setInput(parts.join(" ").trim());
552
+ }, []);
553
+
554
+ useEffect(() => {
555
+ const controllers = abortControllersRef.current;
556
+ const inflightThreadIds = inflightThreadIdsRef.current;
557
+ return () => {
558
+ if (followScrollRafRef.current !== null) {
559
+ cancelAnimationFrame(followScrollRafRef.current);
560
+ followScrollRafRef.current = null;
561
+ }
562
+ stopMicRecognitionSession();
563
+ controllers.forEach((controller) => controller.abort());
564
+ controllers.clear();
565
+ inflightThreadIds.clear();
566
+ };
567
+ }, [stopMicRecognitionSession]);
568
+
569
+ useEffect(() => {
570
+ currentInputRef.current = input;
571
+ }, [input]);
572
+
573
+ useEffect(() => {
574
+ micModeRef.current = micMode;
575
+ }, [micMode]);
576
+
577
+ useEffect(() => {
578
+ micStateRef.current = micState;
579
+ }, [micState]);
580
+
581
+ const handleBranch = useCallback(
582
+ async (message: MessageNode) => {
583
+ const resolvedThread =
584
+ thread || (activeThreadId ? await db.threads.get(activeThreadId) : null);
585
+ if (!resolvedThread) return;
586
+ try {
587
+ await createThreadFromMessage(message.id, resolvedThread.conversationId);
588
+ } catch (err) {
589
+ const message =
590
+ err instanceof Error ? err.message : "Unable to add thread.";
591
+ setError(message);
592
+ }
593
+ },
594
+ [thread, activeThreadId]
595
+ );
596
+
597
+ const handleDeleteBranch = useCallback(
598
+ async (threadId: string, fallbackThreadId: string | null) => {
599
+ await deleteThreadById(threadId);
600
+ if (activeThreadId === threadId) {
601
+ if (fallbackThreadId) {
602
+ onSelectThread(fallbackThreadId);
603
+ return;
604
+ }
605
+ const resolvedThread =
606
+ thread || (activeThreadId ? await db.threads.get(activeThreadId) : null);
607
+ if (resolvedThread) {
608
+ onSelectThread(resolvedThread.id);
609
+ }
610
+ }
611
+ },
612
+ [activeThreadId, onSelectThread, thread]
613
+ );
614
+
615
+ const handleStop = useCallback(() => {
616
+ const threadId = thread?.id || activeThreadId;
617
+ if (!threadId) {
618
+ return;
619
+ }
620
+ const controller = abortControllersRef.current.get(threadId);
621
+ controller?.abort();
622
+ abortControllersRef.current.delete(threadId);
623
+ setStreamingByThread((current) => {
624
+ if (!current[threadId]) {
625
+ return current;
626
+ }
627
+ const next = { ...current };
628
+ delete next[threadId];
629
+ return next;
630
+ });
631
+ }, [thread?.id, activeThreadId]);
632
+
633
+ const handleApprovalDecision = useCallback(
634
+ async (approvalId: string, decision: "approve" | "deny") => {
635
+ setApprovalBusyIds((current) => ({
636
+ ...current,
637
+ [approvalId]: true,
638
+ }));
639
+
640
+ try {
641
+ const response = await fetch("/api/tool-approval", {
642
+ method: "POST",
643
+ headers: { "Content-Type": "application/json" },
644
+ body: JSON.stringify({ approvalId, decision }),
645
+ });
646
+
647
+ if (!response.ok) {
648
+ const payload = await response.json().catch(() => ({}));
649
+ throw new Error(payload.error || "Failed to resolve approval.");
650
+ }
651
+
652
+ await db.toolApprovals.update(approvalId, {
653
+ status: toApprovalStatus(decision),
654
+ resolvedAt: Date.now(),
655
+ });
656
+ } catch (err) {
657
+ const message =
658
+ err instanceof Error ? err.message : "Unable to resolve approval.";
659
+ setError(message);
660
+ } finally {
661
+ setApprovalBusyIds((current) => {
662
+ if (!current[approvalId]) {
663
+ return current;
664
+ }
665
+ const next = { ...current };
666
+ delete next[approvalId];
667
+ return next;
668
+ });
669
+ }
670
+ },
671
+ []
672
+ );
673
+
674
+ const startMicRecognitionSession = useCallback(
675
+ (mode: Exclude<MicMode, "off">) => {
676
+ const RecognitionCtor = getSpeechRecognitionCtor();
677
+ if (!RecognitionCtor) {
678
+ setError("Voice input is not supported in this browser.");
679
+ setMicMode("off");
680
+ setMicState("idle");
681
+ return false;
682
+ }
683
+
684
+ clearMicSilenceTimer();
685
+ stopMicRecognitionSession();
686
+ recognitionBaseInputRef.current = normalizeSpeechText(currentInputRef.current);
687
+ recognitionFinalTranscriptRef.current = "";
688
+ recognitionInterimTranscriptRef.current = "";
689
+ setMicMode(mode);
690
+ setMicState("listening");
691
+ setError(null);
692
+
693
+ const recognition = new RecognitionCtor();
694
+ recognition.continuous = true;
695
+ recognition.interimResults = true;
696
+ recognition.lang = typeof navigator !== "undefined" ? navigator.language || "en-US" : "en-US";
697
+
698
+ recognition.onresult = (event) => {
699
+ const results = event.results ?? [];
700
+ const startIndex = Math.max(0, Number(event.resultIndex ?? 0));
701
+
702
+ let appendedFinal = "";
703
+ let interim = "";
704
+ for (let index = startIndex; index < results.length; index += 1) {
705
+ const item = results[index];
706
+ const transcript = normalizeSpeechText(item?.[0]?.transcript ?? "");
707
+ if (!transcript) {
708
+ continue;
709
+ }
710
+ if (item?.isFinal) {
711
+ appendedFinal += `${transcript} `;
712
+ } else {
713
+ interim += `${transcript} `;
714
+ }
715
+ }
716
+
717
+ if (appendedFinal.trim()) {
718
+ recognitionFinalTranscriptRef.current = normalizeSpeechText(
719
+ `${recognitionFinalTranscriptRef.current} ${appendedFinal}`,
720
+ );
721
+ }
722
+ recognitionInterimTranscriptRef.current = normalizeSpeechText(interim);
723
+ applyRecognitionInput();
724
+
725
+ if (!recognitionFinalTranscriptRef.current) {
726
+ return;
727
+ }
728
+
729
+ clearMicSilenceTimer();
730
+ micSilenceTimerRef.current = window.setTimeout(() => {
731
+ const prompt = normalizeSpeechText(
732
+ [recognitionBaseInputRef.current, recognitionFinalTranscriptRef.current].join(" "),
733
+ );
734
+ if (!prompt) {
735
+ return;
736
+ }
737
+ void handleSendRef.current(prompt);
738
+ }, VOICE_SILENCE_AUTOSEND_MS);
739
+ };
740
+
741
+ recognition.onerror = (event) => {
742
+ if (event.error === "no-speech" || event.error === "aborted") {
743
+ return;
744
+ }
745
+ if (event.error === "not-allowed" || event.error === "service-not-allowed") {
746
+ setError("Microphone access was denied. Allow mic access to use voice input.");
747
+ setMicMode("off");
748
+ setMicState("idle");
749
+ stopMicRecognitionSession();
750
+ return;
751
+ }
752
+ setError("Voice input failed. Please try again.");
753
+ };
754
+
755
+ recognition.onend = () => {
756
+ if (micModeRef.current !== mode || micStateRef.current !== "listening") {
757
+ recognitionRef.current = null;
758
+ return;
759
+ }
760
+ window.setTimeout(() => {
761
+ if (micModeRef.current !== mode || micStateRef.current !== "listening") {
762
+ recognitionRef.current = null;
763
+ return;
764
+ }
765
+ try {
766
+ recognition.start();
767
+ recognitionRef.current = recognition;
768
+ } catch {
769
+ recognitionRef.current = null;
770
+ setMicState("idle");
771
+ }
772
+ }, 120);
773
+ };
774
+
775
+ recognitionRef.current = recognition;
776
+ try {
777
+ recognition.start();
778
+ applyRecognitionInput();
779
+ return true;
780
+ } catch (error) {
781
+ const message =
782
+ error instanceof Error ? error.message : "Could not start voice input.";
783
+ setError(message);
784
+ setMicMode("off");
785
+ setMicState("idle");
786
+ stopMicRecognitionSession();
787
+ return false;
788
+ }
789
+ },
790
+ [
791
+ applyRecognitionInput,
792
+ clearMicSilenceTimer,
793
+ stopMicRecognitionSession,
794
+ ],
795
+ );
796
+
797
+ const stopMicMode = useCallback(() => {
798
+ clearMicSilenceTimer();
799
+ stopMicRecognitionSession();
800
+ recognitionBaseInputRef.current = normalizeSpeechText(currentInputRef.current);
801
+ recognitionFinalTranscriptRef.current = "";
802
+ recognitionInterimTranscriptRef.current = "";
803
+ setMicMode("off");
804
+ setMicState("idle");
805
+ }, [clearMicSilenceTimer, stopMicRecognitionSession]);
806
+
807
+ const handleMicToggle = useCallback(() => {
808
+ if (micModeRef.current !== "off") {
809
+ stopMicMode();
810
+ return;
811
+ }
812
+
813
+ if (!micSupported) {
814
+ setError("Voice input is not supported in this browser.");
815
+ return;
816
+ }
817
+
818
+ startMicRecognitionSession("dictation");
819
+ }, [micSupported, startMicRecognitionSession, stopMicMode]);
820
+
821
+ const handleSend = useCallback(async (overrideInput?: string) => {
822
+ const resolvedThread =
823
+ thread || (activeThreadId ? await db.threads.get(activeThreadId) : null);
824
+ if (!resolvedThread) return;
825
+ const trimmed = (overrideInput ?? input).trim();
826
+ if (!trimmed) return;
827
+ const sendTriggeredByMic =
828
+ typeof overrideInput === "string" || micModeRef.current !== "off";
829
+ const stopMicAfterValidationFailure = () => {
830
+ if (!sendTriggeredByMic) {
831
+ return;
832
+ }
833
+ stopMicMode();
834
+ };
835
+
836
+ if (!settings) {
837
+ setError("Settings not loaded yet.");
838
+ stopMicAfterValidationFailure();
839
+ return;
840
+ }
841
+
842
+ if (!connection) {
843
+ setError("Select a model connection before sending a message.");
844
+ onOpenSettings();
845
+ stopMicAfterValidationFailure();
846
+ return;
847
+ }
848
+
849
+ const connectionPayload = toChatConnectionPayload(connection, settings);
850
+ const requiresApiKey =
851
+ connectionPayload.kind === "builtin" &&
852
+ (connectionPayload.provider === "openai" ||
853
+ connectionPayload.provider === "anthropic" ||
854
+ connectionPayload.provider === "google");
855
+ if (requiresApiKey && !connectionPayload.apiKey) {
856
+ setError("Add an API key in Settings before sending a message.");
857
+ onOpenSettings();
858
+ stopMicAfterValidationFailure();
859
+ return;
860
+ }
861
+
862
+ if (!model) {
863
+ setError("Select a model before sending a message.");
864
+ stopMicAfterValidationFailure();
865
+ return;
866
+ }
867
+
868
+ if (sendTriggeredByMic) {
869
+ clearMicSilenceTimer();
870
+ stopMicRecognitionSession();
871
+ recognitionBaseInputRef.current = "";
872
+ recognitionFinalTranscriptRef.current = "";
873
+ recognitionInterimTranscriptRef.current = "";
874
+ setMicState("processing");
875
+ }
876
+
877
+ if (
878
+ streamingByThread[resolvedThread.id] ||
879
+ inflightThreadIdsRef.current.has(resolvedThread.id)
880
+ ) {
881
+ setError("This thread is already responding.");
882
+ return;
883
+ }
884
+
885
+ inflightThreadIdsRef.current.add(resolvedThread.id);
886
+ setError(null);
887
+ setInput("");
888
+ setFocusedMessageId(null);
889
+ pendingSendScrollThreadIdRef.current = resolvedThread.id;
890
+ isUserNearBottomRef.current = true;
891
+ shouldStickToBottomRef.current = true;
892
+ setShowJumpToBottom(false);
893
+ smoothFollowUntilRef.current = performance.now() + 1200;
894
+ scrollToBottom("smooth");
895
+
896
+ const { userMessage, assistantMessage } = await appendUserAndAssistant({
897
+ thread: resolvedThread,
898
+ content: trimmed,
899
+ provider: connection.kind === "builtin" ? connection.provider ?? connection.id : connection.id,
900
+ model,
901
+ });
902
+ const memoryEnabled = settings.memory?.enabled !== false;
903
+ const memoryAutoCaptureEnabled =
904
+ memoryEnabled && settings.memory?.autoCapture !== false;
905
+ const messageAliasCandidate = memoryEnabled
906
+ ? extractMessageRecipientAliasFromPrompt(trimmed)
907
+ : null;
908
+ const musicAliasCandidate = memoryEnabled
909
+ ? extractMusicAliasFromPrompt(trimmed)
910
+ : null;
911
+ let learnedMessageAlias = false;
912
+ let learnedMusicAlias = false;
913
+ if (memoryEnabled) {
914
+ void captureMemoriesFromUserTurn({
915
+ text: trimmed,
916
+ conversationId: resolvedThread.conversationId,
917
+ settingsMemory: settings.memory,
918
+ }).catch(() => {
919
+ // Memory capture is best-effort and must not block chat.
920
+ });
921
+ }
922
+
923
+ setStreamingByThread((current) => ({
924
+ ...current,
925
+ [resolvedThread.id]: assistantMessage.id,
926
+ }));
927
+ setStreamDraftByMessageId((current) => ({
928
+ ...current,
929
+ [assistantMessage.id]: "",
930
+ }));
931
+ requestAnimationFrame(() => scrollToBottom("smooth"));
932
+
933
+ const map = new Map(messageMap);
934
+ map.set(userMessage.id, userMessage);
935
+ map.set(assistantMessage.id, assistantMessage);
936
+ const history = buildPath(userMessage.id, map);
937
+ const payloadMessages: Array<{ role: "system" | "user" | "assistant"; content: string }> = [];
938
+ let memoryPayload: MemoryContextPayload | undefined;
939
+
940
+ if (memoryEnabled) {
941
+ try {
942
+ const memoryContext = await buildMemoryContextForPrompt({
943
+ query: trimmed,
944
+ conversationId: resolvedThread.conversationId,
945
+ enabled: true,
946
+ toolInfluence: settings.memory?.toolInfluence !== false,
947
+ });
948
+ if (memoryContext.systemMessage) {
949
+ payloadMessages.push({
950
+ role: "system",
951
+ content: memoryContext.systemMessage,
952
+ });
953
+ }
954
+ memoryPayload = memoryContext.payload;
955
+ void markMemoriesUsed(memoryContext.usedEntryIds);
956
+ } catch {
957
+ // Memory context retrieval is best-effort and must not block chat.
958
+ }
959
+ }
960
+
961
+ const siblingThreadsContext = shouldIncludeSiblingThreadContext(trimmed)
962
+ ? buildSiblingThreadsContext({
963
+ allThreads: threads,
964
+ activeThread: resolvedThread,
965
+ messageMap: map,
966
+ })
967
+ : null;
968
+ if (siblingThreadsContext) {
969
+ payloadMessages.push({
970
+ role: "system",
971
+ content:
972
+ "You may use sibling-thread context when the user refers to other threads or asks for summaries across threads.\n\n" +
973
+ siblingThreadsContext,
974
+ });
975
+ }
976
+
977
+ payloadMessages.push(
978
+ ...history
979
+ .filter((message) => message.role !== "system")
980
+ .map((message) => ({ role: message.role, content: message.content })),
981
+ );
982
+
983
+ const controller = new AbortController();
984
+ abortControllersRef.current.set(resolvedThread.id, controller);
985
+
986
+ try {
987
+ const response = await fetch("/api/chat", {
988
+ method: "POST",
989
+ headers: { "Content-Type": "application/json" },
990
+ body: JSON.stringify({
991
+ connection: connectionPayload,
992
+ model,
993
+ system: SYSTEM_PROMPT,
994
+ messages: payloadMessages,
995
+ stream: true,
996
+ localTools: settings.localTools,
997
+ memory: memoryPayload,
998
+ enableWebSources: settings.enableWebSources,
999
+ meta: {
1000
+ threadId: resolvedThread.id,
1001
+ conversationId: resolvedThread.conversationId,
1002
+ assistantMessageId: assistantMessage.id,
1003
+ },
1004
+ }),
1005
+ signal: controller.signal,
1006
+ });
1007
+
1008
+ if (!response.ok || !response.body) {
1009
+ const text = await response.text();
1010
+ throw new Error(
1011
+ text || `Failed to start streaming response (${response.status}).`
1012
+ );
1013
+ }
1014
+
1015
+ const reader = response.body.getReader();
1016
+ const decoder = new TextDecoder();
1017
+ let buffer = "";
1018
+ let assembled = "";
1019
+ let rendered = "";
1020
+ let receivedSources: ChatCitationSource[] = [];
1021
+ let pendingTokens = "";
1022
+ let drainTimer: number | null = null;
1023
+ let lastPersistAt = 0;
1024
+ const queueDbWrite = (operation: Promise<unknown>) => {
1025
+ void operation.catch(() => {
1026
+ // Persisting tool metadata is best-effort and must not block response streaming.
1027
+ });
1028
+ };
1029
+ const toolEventIdForCall = (callId: string) => `${assistantMessage.id}:${callId}`;
1030
+ const upsertSources = (incoming: ChatCitationSource[]) => {
1031
+ const next = new Map<string, ChatCitationSource>();
1032
+ for (const source of [...receivedSources, ...incoming]) {
1033
+ const url = typeof source.url === "string" ? source.url.trim() : "";
1034
+ if (!url) {
1035
+ continue;
1036
+ }
1037
+ const title =
1038
+ typeof source.title === "string" && source.title.trim()
1039
+ ? source.title.trim()
1040
+ : undefined;
1041
+ const existing = next.get(url);
1042
+ if (!existing || (!existing.title && title)) {
1043
+ next.set(url, { url, title });
1044
+ }
1045
+ }
1046
+ receivedSources = [...next.values()];
1047
+ };
1048
+ const renderWithSources = (value: string) => embedSourcesInContent(value, receivedSources);
1049
+ const pushRendered = () => {
1050
+ const nextContent = renderWithSources(rendered);
1051
+ setStreamDraftByMessageId((current) => {
1052
+ if (current[assistantMessage.id] === nextContent) {
1053
+ return current;
1054
+ }
1055
+ return {
1056
+ ...current,
1057
+ [assistantMessage.id]: nextContent,
1058
+ };
1059
+ });
1060
+ maybeAutoScrollToBottom();
1061
+ };
1062
+ const persistRendered = async (force = false) => {
1063
+ const now = Date.now();
1064
+ if (!force && now - lastPersistAt < STREAM_DB_FLUSH_MS) {
1065
+ return;
1066
+ }
1067
+ lastPersistAt = now;
1068
+ await db.messages.update(assistantMessage.id, { content: rendered });
1069
+ };
1070
+ const scheduleDrain = () => {
1071
+ if (drainTimer !== null) {
1072
+ return;
1073
+ }
1074
+ drainTimer = window.setTimeout(() => {
1075
+ drainTimer = null;
1076
+ if (!pendingTokens) {
1077
+ return;
1078
+ }
1079
+ const charsThisFrame = Math.min(
1080
+ STREAM_TYPING_CHARS_PER_FRAME_MAX,
1081
+ Math.max(
1082
+ STREAM_TYPING_CHARS_PER_FRAME_MIN,
1083
+ Math.ceil(pendingTokens.length / 4),
1084
+ ),
1085
+ );
1086
+ const nextSlice = pendingTokens.slice(0, charsThisFrame);
1087
+ pendingTokens = pendingTokens.slice(nextSlice.length);
1088
+ rendered += nextSlice;
1089
+ pushRendered();
1090
+ void persistRendered(false);
1091
+ if (pendingTokens.length > 0) {
1092
+ scheduleDrain();
1093
+ return;
1094
+ }
1095
+ }, STREAM_TYPING_FRAME_MS);
1096
+ };
1097
+ const enqueueToken = (value: string) => {
1098
+ if (!value) {
1099
+ return;
1100
+ }
1101
+ assembled += value;
1102
+ pendingTokens += value;
1103
+ if (!rendered && drainTimer === null) {
1104
+ const firstSlice = pendingTokens.slice(0, STREAM_TYPING_FIRST_PAINT_CHARS);
1105
+ pendingTokens = pendingTokens.slice(firstSlice.length);
1106
+ rendered += firstSlice;
1107
+ pushRendered();
1108
+ }
1109
+ scheduleDrain();
1110
+ };
1111
+ const waitForDrain = async () => {
1112
+ while (pendingTokens.length > 0 || drainTimer !== null) {
1113
+ await new Promise((resolve) => window.setTimeout(resolve, STREAM_TYPING_FRAME_MS));
1114
+ }
1115
+ };
1116
+
1117
+ while (true) {
1118
+ const { done, value } = await reader.read();
1119
+ if (done) break;
1120
+ buffer += decoder.decode(value, { stream: true });
1121
+ const parts = buffer.split("\n\n");
1122
+ buffer = parts.pop() || "";
1123
+
1124
+ for (const part of parts) {
1125
+ const line = part.trim();
1126
+ if (!line.startsWith("data:")) continue;
1127
+ const json = line.replace(/^data:\s*/, "");
1128
+ if (!json) continue;
1129
+ const chunk = JSON.parse(json) as ChatStreamChunk;
1130
+
1131
+ if (chunk.type === "token") {
1132
+ enqueueToken(chunk.value ?? "");
1133
+ continue;
1134
+ }
1135
+
1136
+ if (chunk.type === "sources") {
1137
+ upsertSources(chunk.items ?? []);
1138
+ pushRendered();
1139
+ continue;
1140
+ }
1141
+
1142
+ if (chunk.type === "tool_call") {
1143
+ queueDbWrite(db.toolEvents.put({
1144
+ id: toolEventIdForCall(chunk.callId),
1145
+ conversationId: resolvedThread.conversationId,
1146
+ threadId: resolvedThread.id,
1147
+ assistantMessageId: assistantMessage.id,
1148
+ toolCallId: chunk.callId,
1149
+ toolName: chunk.name,
1150
+ stage: "call",
1151
+ payloadJson: JSON.stringify(chunk.args ?? {}),
1152
+ createdAt: Date.now(),
1153
+ }));
1154
+ maybeAutoScrollToBottom();
1155
+ continue;
1156
+ }
1157
+
1158
+ if (chunk.type === "tool_progress") {
1159
+ queueDbWrite(db.toolEvents.put({
1160
+ id: toolEventIdForCall(chunk.callId),
1161
+ conversationId: resolvedThread.conversationId,
1162
+ threadId: resolvedThread.id,
1163
+ assistantMessageId: assistantMessage.id,
1164
+ toolCallId: chunk.callId,
1165
+ toolName: chunk.name,
1166
+ stage: "progress",
1167
+ message: chunk.message,
1168
+ createdAt: Date.now(),
1169
+ }));
1170
+ maybeAutoScrollToBottom();
1171
+ continue;
1172
+ }
1173
+
1174
+ if (chunk.type === "tool_result") {
1175
+ queueDbWrite(db.toolEvents.put({
1176
+ id: toolEventIdForCall(chunk.callId),
1177
+ conversationId: resolvedThread.conversationId,
1178
+ threadId: resolvedThread.id,
1179
+ assistantMessageId: assistantMessage.id,
1180
+ toolCallId: chunk.callId,
1181
+ toolName: chunk.name,
1182
+ stage: "result",
1183
+ message: chunk.ok ? "success" : "failure",
1184
+ payloadJson: JSON.stringify(chunk.result ?? {}),
1185
+ createdAt: Date.now(),
1186
+ }));
1187
+ if (chunk.ok && memoryAutoCaptureEnabled) {
1188
+ if (!learnedMessageAlias && chunk.name === "messages_send") {
1189
+ const alias = messageAliasCandidate;
1190
+ const target = resolveMessageTargetFromResult(chunk.result);
1191
+ if (
1192
+ alias &&
1193
+ target &&
1194
+ normalizeAliasMemoryText(alias) !== normalizeAliasMemoryText(target)
1195
+ ) {
1196
+ learnedMessageAlias = true;
1197
+ void captureMemoriesFromUserTurn({
1198
+ text: `when I say ${alias} text ${target}`,
1199
+ conversationId: resolvedThread.conversationId,
1200
+ settingsMemory: settings.memory,
1201
+ }).catch(() => {
1202
+ // Alias learning is best-effort and must not block chat.
1203
+ });
1204
+ }
1205
+ }
1206
+
1207
+ if (!learnedMusicAlias && chunk.name === "music_play") {
1208
+ const alias = musicAliasCandidate;
1209
+ const target = resolveMusicTargetFromResult(chunk.result);
1210
+ if (
1211
+ alias &&
1212
+ target &&
1213
+ shouldLearnMusicAlias(alias, target)
1214
+ ) {
1215
+ learnedMusicAlias = true;
1216
+ void captureMemoriesFromUserTurn({
1217
+ text: `when I say ${alias} play ${target}`,
1218
+ conversationId: resolvedThread.conversationId,
1219
+ settingsMemory: settings.memory,
1220
+ }).catch(() => {
1221
+ // Alias learning is best-effort and must not block chat.
1222
+ });
1223
+ }
1224
+ }
1225
+ }
1226
+ maybeAutoScrollToBottom();
1227
+ continue;
1228
+ }
1229
+
1230
+ if (chunk.type === "approval_requested") {
1231
+ queueDbWrite(db.toolApprovals.put({
1232
+ id: chunk.approvalId,
1233
+ conversationId: resolvedThread.conversationId,
1234
+ threadId: resolvedThread.id,
1235
+ assistantMessageId: assistantMessage.id,
1236
+ toolCallId: chunk.callId,
1237
+ toolName: chunk.name,
1238
+ argsJson: JSON.stringify(chunk.args ?? {}),
1239
+ reason: chunk.reason,
1240
+ status: "requested",
1241
+ requestedAt: Date.now(),
1242
+ }));
1243
+ maybeAutoScrollToBottom();
1244
+ continue;
1245
+ }
1246
+
1247
+ if (chunk.type === "approval_resolved") {
1248
+ queueDbWrite((async () => {
1249
+ const existing = await db.toolApprovals.get(chunk.approvalId);
1250
+ if (existing) {
1251
+ await db.toolApprovals.update(chunk.approvalId, {
1252
+ status: toApprovalStatus(chunk.decision),
1253
+ resolvedAt: Date.now(),
1254
+ });
1255
+ }
1256
+ })());
1257
+ maybeAutoScrollToBottom();
1258
+ continue;
1259
+ }
1260
+
1261
+ if (chunk.type === "error") {
1262
+ if (drainTimer !== null) {
1263
+ window.clearTimeout(drainTimer);
1264
+ drainTimer = null;
1265
+ }
1266
+ pendingTokens = "";
1267
+ assembled = `**Error:** ${chunk.error || "Unknown error"}`;
1268
+ rendered = assembled;
1269
+ pushRendered();
1270
+ await db.messages.update(assistantMessage.id, { content: assembled });
1271
+ setError(chunk.error || "Unknown error");
1272
+ maybeAutoScrollToBottom();
1273
+ continue;
1274
+ }
1275
+
1276
+ if (chunk.type === "done") {
1277
+ continue;
1278
+ }
1279
+ }
1280
+ }
1281
+ await waitForDrain();
1282
+ if (assembled && rendered !== assembled) {
1283
+ rendered = assembled;
1284
+ pushRendered();
1285
+ }
1286
+ const finalizedContent = renderWithSources(assembled || rendered);
1287
+ if (assembled || receivedSources.length > 0) {
1288
+ await db.messages.update(assistantMessage.id, { content: finalizedContent });
1289
+ } else {
1290
+ await persistRendered(true);
1291
+ }
1292
+ } catch (err) {
1293
+ if ((err as Error).name === "AbortError") {
1294
+ return;
1295
+ }
1296
+ const message = err instanceof Error ? err.message : "Unknown error";
1297
+ setStreamDraftByMessageId((current) => ({
1298
+ ...current,
1299
+ [assistantMessage.id]: `**Error:** ${message}`,
1300
+ }));
1301
+ await db.messages.update(assistantMessage.id, {
1302
+ content: `**Error:** ${message}`,
1303
+ });
1304
+ setError(message);
1305
+ } finally {
1306
+ inflightThreadIdsRef.current.delete(resolvedThread.id);
1307
+ abortControllersRef.current.delete(resolvedThread.id);
1308
+ setStreamingByThread((current) => {
1309
+ if (!current[resolvedThread.id]) {
1310
+ return current;
1311
+ }
1312
+ const next = { ...current };
1313
+ delete next[resolvedThread.id];
1314
+ return next;
1315
+ });
1316
+ window.setTimeout(() => {
1317
+ setStreamDraftByMessageId((current) => {
1318
+ if (!(assistantMessage.id in current)) {
1319
+ return current;
1320
+ }
1321
+ const next = { ...current };
1322
+ delete next[assistantMessage.id];
1323
+ return next;
1324
+ });
1325
+ }, 1200);
1326
+
1327
+ if (sendTriggeredByMic) {
1328
+ if (micModeRef.current === "off") {
1329
+ setMicState("idle");
1330
+ } else {
1331
+ const resumed = startMicRecognitionSession("dictation");
1332
+ if (!resumed) {
1333
+ setMicState("idle");
1334
+ }
1335
+ }
1336
+ }
1337
+ }
1338
+ }, [
1339
+ thread,
1340
+ input,
1341
+ settings,
1342
+ connection,
1343
+ model,
1344
+ threads,
1345
+ activeThreadId,
1346
+ onOpenSettings,
1347
+ streamingByThread,
1348
+ scrollToBottom,
1349
+ setFocusedMessageId,
1350
+ maybeAutoScrollToBottom,
1351
+ clearMicSilenceTimer,
1352
+ stopMicRecognitionSession,
1353
+ startMicRecognitionSession,
1354
+ stopMicMode,
1355
+ messageMap,
1356
+ ]);
1357
+
1358
+ useEffect(() => {
1359
+ handleSendRef.current = handleSend;
1360
+ }, [handleSend]);
1361
+
1362
+ useEffect(() => {
1363
+ const pendingThreadId = pendingSendScrollThreadIdRef.current;
1364
+ if (!pendingThreadId || pendingThreadId !== thread?.id) {
1365
+ return;
1366
+ }
1367
+ if (threadMessages.length === 0 && !activeStreamingMessageId) {
1368
+ return;
1369
+ }
1370
+ pendingSendScrollThreadIdRef.current = null;
1371
+ requestAnimationFrame(() => {
1372
+ scrollToBottom("smooth");
1373
+ });
1374
+ }, [thread?.id, threadMessages.length, activeStreamingMessageId, scrollToBottom]);
1375
+
1376
+ useEffect(() => {
1377
+ if (!focusedMessageId || !threadMessages.length) return;
1378
+ const target = document.querySelector<HTMLElement>(
1379
+ `[data-message-id="${focusedMessageId}"]`
1380
+ );
1381
+ if (!target) return;
1382
+ requestAnimationFrame(() => {
1383
+ target.scrollIntoView({ behavior: "smooth", block: "center" });
1384
+ setFocusedMessageId(null);
1385
+ });
1386
+ }, [focusedMessageId, threadMessages, setFocusedMessageId]);
1387
+
1388
+ useEffect(() => {
1389
+ maybeAutoScrollToBottom();
1390
+ }, [toolEvents.length, toolApprovals.length, maybeAutoScrollToBottom]);
1391
+
1392
+ useEffect(() => {
1393
+ const rafId = requestAnimationFrame(() => {
1394
+ updateBottomVisibility();
1395
+ });
1396
+ const handleResize = () => updateBottomVisibility();
1397
+ window.addEventListener("resize", handleResize);
1398
+ return () => {
1399
+ cancelAnimationFrame(rafId);
1400
+ window.removeEventListener("resize", handleResize);
1401
+ };
1402
+ }, [threadMessages, activeThreadId, updateBottomVisibility]);
1403
+
1404
+ const renderedThreadRows = useMemo(
1405
+ () =>
1406
+ threadMessages.map((message) => {
1407
+ const shelfThreads = threadsByMessage.get(message.id) || [];
1408
+ const baseThreadId = baseThreadByMessage.get(message.id) || null;
1409
+ const messageToolEvents = toolEventsByMessage.get(message.id) || [];
1410
+ const messageApprovals = toolApprovalsByMessage.get(message.id) || [];
1411
+
1412
+ return (
1413
+ <div
1414
+ key={message.id}
1415
+ data-message-id={message.id}
1416
+ className={`message-row ${message.role}`}
1417
+ >
1418
+ <MessageCard
1419
+ message={
1420
+ streamDraftByMessageId[message.id] === undefined
1421
+ ? message
1422
+ : { ...message, content: streamDraftByMessageId[message.id] }
1423
+ }
1424
+ onAddThread={handleBranch}
1425
+ threads={shelfThreads}
1426
+ baseThreadId={baseThreadId}
1427
+ activeThreadId={activeThreadId}
1428
+ onSelectThread={onSelectThread}
1429
+ onDeleteThread={handleDeleteBranch}
1430
+ isStreaming={activeStreamingMessageId === message.id}
1431
+ toolEvents={messageToolEvents}
1432
+ approvals={messageApprovals}
1433
+ onResolveApproval={handleApprovalDecision}
1434
+ approvalBusyIds={approvalBusyIds}
1435
+ />
1436
+ </div>
1437
+ );
1438
+ }),
1439
+ [
1440
+ threadMessages,
1441
+ threadsByMessage,
1442
+ baseThreadByMessage,
1443
+ handleBranch,
1444
+ activeThreadId,
1445
+ onSelectThread,
1446
+ handleDeleteBranch,
1447
+ activeStreamingMessageId,
1448
+ toolEventsByMessage,
1449
+ toolApprovalsByMessage,
1450
+ handleApprovalDecision,
1451
+ approvalBusyIds,
1452
+ streamDraftByMessageId,
1453
+ ]
1454
+ );
1455
+
1456
+ return (
1457
+ <div className="relative flex h-full flex-col">
1458
+ <div
1459
+ ref={chatScrollRef}
1460
+ onScroll={updateBottomVisibility}
1461
+ className={`chat-scroll flex-1 overflow-y-auto px-6 py-8${
1462
+ threadMessages.length === 0 ? " empty" : ""
1463
+ }`}
1464
+ >
1465
+ {threadMessages.length === 0 ? (
1466
+ <div className="empty-hero">
1467
+ <h1 className="empty-hero-title text-2xl font-semibold text-[var(--text-primary)]">
1468
+ {emptyPrompt}
1469
+ </h1>
1470
+ <div className="empty-composer">
1471
+ <Composer
1472
+ value={input}
1473
+ onChange={setInput}
1474
+ onSend={() => {
1475
+ void handleSend();
1476
+ }}
1477
+ onStop={handleStop}
1478
+ isStreaming={Boolean(activeStreamingMessageId)}
1479
+ onMicToggle={handleMicToggle}
1480
+ micState={micState}
1481
+ micDisabled={Boolean(activeStreamingMessageId)}
1482
+ />
1483
+ </div>
1484
+ </div>
1485
+ ) : (
1486
+ <div className="mx-auto flex w-full max-w-3xl flex-col gap-5 pb-28">
1487
+ {renderedThreadRows}
1488
+ </div>
1489
+ )}
1490
+ <div ref={bottomRef} className="h-2" />
1491
+ </div>
1492
+ {threadMessages.length > 0 ? (
1493
+ <>
1494
+ {showJumpToBottom ? (
1495
+ <div className="pointer-events-none absolute bottom-24 right-8 z-30">
1496
+ <button
1497
+ className="pointer-events-auto inline-flex h-11 w-11 items-center justify-center rounded-full border border-[var(--border-strong)] bg-[var(--panel)] text-[var(--text-primary)] shadow-[var(--shadow)] transition hover:translate-y-[1px] hover:border-[var(--accent)]"
1498
+ onClick={() => {
1499
+ shouldStickToBottomRef.current = true;
1500
+ setShowJumpToBottom(false);
1501
+ smoothFollowUntilRef.current = performance.now() + 900;
1502
+ bottomRef.current?.scrollIntoView({ behavior: "smooth", block: "end" });
1503
+ }}
1504
+ aria-label="Jump to latest message"
1505
+ title="Jump to latest"
1506
+ >
1507
+ <ArrowDown className="h-5 w-5" />
1508
+ </button>
1509
+ </div>
1510
+ ) : null}
1511
+
1512
+ <div className="composer-bar px-6 py-4">
1513
+ <div className="mx-auto w-full max-w-3xl">
1514
+ {error ? (
1515
+ <div className="mb-3 rounded-lg border border-[var(--border)] bg-[var(--panel-2)] px-3 py-2 text-xs text-[var(--danger)]">
1516
+ {error}
1517
+ </div>
1518
+ ) : null}
1519
+ <Composer
1520
+ value={input}
1521
+ onChange={setInput}
1522
+ onSend={() => {
1523
+ void handleSend();
1524
+ }}
1525
+ onStop={handleStop}
1526
+ isStreaming={Boolean(activeStreamingMessageId)}
1527
+ onMicToggle={handleMicToggle}
1528
+ micState={micState}
1529
+ micDisabled={Boolean(activeStreamingMessageId)}
1530
+ />
1531
+ </div>
1532
+ </div>
1533
+ </>
1534
+ ) : null}
1535
+ </div>
1536
+ );
1537
+ }