@open-mercato/ui 0.5.1-develop.3036.f02c281f23 → 0.5.1-develop.3045.b4b3320cc2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +1 -1
- package/AGENTS.md +2 -1
- package/__integration__/TC-AI-UI-003-aichat-registry.spec.tsx +204 -0
- package/dist/ai/AiAssistantLauncher.js +596 -0
- package/dist/ai/AiAssistantLauncher.js.map +7 -0
- package/dist/ai/AiChat.js +1092 -0
- package/dist/ai/AiChat.js.map +7 -0
- package/dist/ai/AiChatSessions.js +297 -0
- package/dist/ai/AiChatSessions.js.map +7 -0
- package/dist/ai/AiDock.js +347 -0
- package/dist/ai/AiDock.js.map +7 -0
- package/dist/ai/AiMessageContent.js +369 -0
- package/dist/ai/AiMessageContent.js.map +7 -0
- package/dist/ai/ChatPaneTabs.js +251 -0
- package/dist/ai/ChatPaneTabs.js.map +7 -0
- package/dist/ai/index.js +115 -0
- package/dist/ai/index.js.map +7 -0
- package/dist/ai/parts/ConfirmationCard.js +211 -0
- package/dist/ai/parts/ConfirmationCard.js.map +7 -0
- package/dist/ai/parts/FieldDiffCard.js +119 -0
- package/dist/ai/parts/FieldDiffCard.js.map +7 -0
- package/dist/ai/parts/MutationPreviewCard.js +224 -0
- package/dist/ai/parts/MutationPreviewCard.js.map +7 -0
- package/dist/ai/parts/MutationResultCard.js +240 -0
- package/dist/ai/parts/MutationResultCard.js.map +7 -0
- package/dist/ai/parts/approval-cards-map.js +15 -0
- package/dist/ai/parts/approval-cards-map.js.map +7 -0
- package/dist/ai/parts/index.js +24 -0
- package/dist/ai/parts/index.js.map +7 -0
- package/dist/ai/parts/pending-action-api.js +60 -0
- package/dist/ai/parts/pending-action-api.js.map +7 -0
- package/dist/ai/parts/types.js +1 -0
- package/dist/ai/parts/types.js.map +7 -0
- package/dist/ai/parts/useAiPendingActionPolling.js +126 -0
- package/dist/ai/parts/useAiPendingActionPolling.js.map +7 -0
- package/dist/ai/records/ActivityCard.js +83 -0
- package/dist/ai/records/ActivityCard.js.map +7 -0
- package/dist/ai/records/CompanyCard.js +81 -0
- package/dist/ai/records/CompanyCard.js.map +7 -0
- package/dist/ai/records/DealCard.js +76 -0
- package/dist/ai/records/DealCard.js.map +7 -0
- package/dist/ai/records/PersonCard.js +68 -0
- package/dist/ai/records/PersonCard.js.map +7 -0
- package/dist/ai/records/ProductCard.js +68 -0
- package/dist/ai/records/ProductCard.js.map +7 -0
- package/dist/ai/records/RecordCard.js +29 -0
- package/dist/ai/records/RecordCard.js.map +7 -0
- package/dist/ai/records/RecordCardShell.js +103 -0
- package/dist/ai/records/RecordCardShell.js.map +7 -0
- package/dist/ai/records/index.js +31 -0
- package/dist/ai/records/index.js.map +7 -0
- package/dist/ai/records/registry.js +51 -0
- package/dist/ai/records/registry.js.map +7 -0
- package/dist/ai/records/types.js +1 -0
- package/dist/ai/records/types.js.map +7 -0
- package/dist/ai/ui-part-registry.js +112 -0
- package/dist/ai/ui-part-registry.js.map +7 -0
- package/dist/ai/ui-part-slots.js +14 -0
- package/dist/ai/ui-part-slots.js.map +7 -0
- package/dist/ai/ui-parts/pending-phase3-placeholder.js +35 -0
- package/dist/ai/ui-parts/pending-phase3-placeholder.js.map +7 -0
- package/dist/ai/upload-adapter.js +256 -0
- package/dist/ai/upload-adapter.js.map +7 -0
- package/dist/ai/useAiChat.js +549 -0
- package/dist/ai/useAiChat.js.map +7 -0
- package/dist/ai/useAiChatUpload.js +127 -0
- package/dist/ai/useAiChatUpload.js.map +7 -0
- package/dist/ai/useAiShortcuts.js +43 -0
- package/dist/ai/useAiShortcuts.js.map +7 -0
- package/dist/backend/AppShell.js +8 -4
- package/dist/backend/AppShell.js.map +2 -2
- package/dist/backend/BackendChromeProvider.js +2 -0
- package/dist/backend/BackendChromeProvider.js.map +2 -2
- package/dist/backend/DataTable.js +19 -2
- package/dist/backend/DataTable.js.map +2 -2
- package/dist/backend/FilterBar.js +19 -15
- package/dist/backend/FilterBar.js.map +2 -2
- package/dist/backend/dashboard/DashboardScreen.js +31 -3
- package/dist/backend/dashboard/DashboardScreen.js.map +2 -2
- package/dist/backend/injection/spotIds.js +6 -0
- package/dist/backend/injection/spotIds.js.map +2 -2
- package/dist/backend/notifications/useNotificationEffect.js +38 -2
- package/dist/backend/notifications/useNotificationEffect.js.map +2 -2
- package/dist/index.js +1 -0
- package/dist/index.js.map +2 -2
- package/jest.config.cjs +7 -1
- package/jest.markdown-mock.tsx +7 -0
- package/package.json +10 -4
- package/src/ai/AiAssistantLauncher.tsx +805 -0
- package/src/ai/AiChat.tsx +1483 -0
- package/src/ai/AiChatSessions.tsx +429 -0
- package/src/ai/AiDock.tsx +505 -0
- package/src/ai/AiMessageContent.tsx +515 -0
- package/src/ai/ChatPaneTabs.tsx +310 -0
- package/src/ai/__tests__/AiChat.conversation.test.tsx +160 -0
- package/src/ai/__tests__/AiChat.debug.test.tsx +152 -0
- package/src/ai/__tests__/AiChat.registry.test.tsx +213 -0
- package/src/ai/__tests__/AiChat.test.tsx +257 -0
- package/src/ai/__tests__/AiDock.test.tsx +124 -0
- package/src/ai/__tests__/AiMessageContent.test.ts +111 -0
- package/src/ai/__tests__/ui-part-registry.test.ts +199 -0
- package/src/ai/__tests__/ui-part-slots.test.ts +43 -0
- package/src/ai/__tests__/upload-adapter.test.ts +213 -0
- package/src/ai/__tests__/useAiChatUpload.test.tsx +163 -0
- package/src/ai/__tests__/useAiShortcuts.test.tsx +100 -0
- package/src/ai/index.ts +125 -0
- package/src/ai/parts/ConfirmationCard.tsx +310 -0
- package/src/ai/parts/FieldDiffCard.tsx +173 -0
- package/src/ai/parts/MutationPreviewCard.tsx +302 -0
- package/src/ai/parts/MutationResultCard.tsx +360 -0
- package/src/ai/parts/__tests__/ConfirmationCard.test.tsx +169 -0
- package/src/ai/parts/__tests__/FieldDiffCard.test.tsx +74 -0
- package/src/ai/parts/__tests__/MutationPreviewCard.test.tsx +177 -0
- package/src/ai/parts/__tests__/MutationResultCard.test.tsx +127 -0
- package/src/ai/parts/__tests__/useAiPendingActionPolling.test.tsx +151 -0
- package/src/ai/parts/approval-cards-map.ts +24 -0
- package/src/ai/parts/index.ts +27 -0
- package/src/ai/parts/pending-action-api.ts +123 -0
- package/src/ai/parts/types.ts +84 -0
- package/src/ai/parts/useAiPendingActionPolling.ts +210 -0
- package/src/ai/records/ActivityCard.tsx +102 -0
- package/src/ai/records/CompanyCard.tsx +89 -0
- package/src/ai/records/DealCard.tsx +85 -0
- package/src/ai/records/PersonCard.tsx +77 -0
- package/src/ai/records/ProductCard.tsx +83 -0
- package/src/ai/records/RecordCard.tsx +37 -0
- package/src/ai/records/RecordCardShell.tsx +169 -0
- package/src/ai/records/index.ts +30 -0
- package/src/ai/records/registry.tsx +80 -0
- package/src/ai/records/types.ts +90 -0
- package/src/ai/ui-part-registry.ts +233 -0
- package/src/ai/ui-part-slots.ts +32 -0
- package/src/ai/ui-parts/pending-phase3-placeholder.tsx +50 -0
- package/src/ai/upload-adapter.ts +421 -0
- package/src/ai/useAiChat.ts +865 -0
- package/src/ai/useAiChatUpload.ts +180 -0
- package/src/ai/useAiShortcuts.ts +79 -0
- package/src/backend/AppShell.tsx +12 -5
- package/src/backend/BackendChromeProvider.tsx +2 -0
- package/src/backend/DataTable.tsx +20 -1
- package/src/backend/FilterBar.tsx +26 -13
- package/src/backend/__tests__/BackendChromeProvider.test.tsx +45 -0
- package/src/backend/dashboard/DashboardScreen.tsx +38 -3
- package/src/backend/dashboard/__tests__/DashboardScreen.test.tsx +24 -1
- package/src/backend/injection/spotIds.ts +6 -0
- package/src/backend/notifications/__tests__/useNotificationEffect.test.tsx +77 -0
- package/src/backend/notifications/useNotificationEffect.ts +47 -2
- package/src/index.ts +1 -0
|
@@ -0,0 +1,865 @@
|
|
|
1
|
+
"use client"
|
|
2
|
+
|
|
3
|
+
import * as React from 'react'
|
|
4
|
+
import { createAiAgentTransport } from '@open-mercato/ai-assistant/modules/ai_assistant/lib/agent-transport'
|
|
5
|
+
import { apiFetch } from '../backend/utils/api'
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Chat message shape used by {@link AiChat}. Kept intentionally minimal so the
|
|
9
|
+
* component stays independent of the AI SDK's evolving `UIMessage` type. The
|
|
10
|
+
* dispatcher route (`POST /api/ai_assistant/ai/chat`) accepts exactly this
|
|
11
|
+
* shape for `messages`.
|
|
12
|
+
*/
|
|
13
|
+
export interface AiChatMessageFile {
|
|
14
|
+
name: string
|
|
15
|
+
type: string
|
|
16
|
+
previewUrl?: string
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export interface AiChatToolCallSnapshot {
|
|
20
|
+
id: string
|
|
21
|
+
toolName: string
|
|
22
|
+
state: 'pending' | 'complete' | 'error'
|
|
23
|
+
input?: unknown
|
|
24
|
+
output?: unknown
|
|
25
|
+
errorMessage?: string
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export interface AiChatMessageUiPart {
|
|
29
|
+
componentId: string
|
|
30
|
+
payload?: unknown
|
|
31
|
+
pendingActionId?: string
|
|
32
|
+
/** Stable id used as React key when rendering. */
|
|
33
|
+
key: string
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export interface AiChatMessage {
|
|
37
|
+
id: string
|
|
38
|
+
role: 'user' | 'assistant'
|
|
39
|
+
content: string
|
|
40
|
+
files?: AiChatMessageFile[]
|
|
41
|
+
reasoning?: string
|
|
42
|
+
reasoningStreaming?: boolean
|
|
43
|
+
toolCalls?: AiChatToolCallSnapshot[]
|
|
44
|
+
/**
|
|
45
|
+
* UI parts emitted by the agent during this message's lifecycle. Today
|
|
46
|
+
* the only producer is `prepareMutation` (mutation approval flow):
|
|
47
|
+
* the dispatcher's mutation tool returns an `awaiting-confirmation`
|
|
48
|
+
* envelope, useAiChat parses it and attaches a `mutation-preview-card`
|
|
49
|
+
* part here so AiChat can render the approval card inline. Phase 3
|
|
50
|
+
* WS-C wiring — without this, the `MutationPreviewCard` registered in
|
|
51
|
+
* the UI-part registry never surfaces.
|
|
52
|
+
*/
|
|
53
|
+
uiParts?: AiChatMessageUiPart[]
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export interface UseAiChatInput {
|
|
57
|
+
agent: string
|
|
58
|
+
apiPath?: string
|
|
59
|
+
pageContext?: Record<string, unknown>
|
|
60
|
+
attachmentIds?: string[]
|
|
61
|
+
debug?: boolean
|
|
62
|
+
initialMessages?: Array<Pick<AiChatMessage, 'role' | 'content'>>
|
|
63
|
+
onError?: (err: { code?: string; message: string }) => void
|
|
64
|
+
/**
|
|
65
|
+
* Optional stable conversation id. When provided, the same id is forwarded
|
|
66
|
+
* to the dispatcher on every turn so `prepareMutation`'s idempotency hash
|
|
67
|
+
* (Step 5.6) stays stable across mutation preview / confirm / retry cycles.
|
|
68
|
+
* When omitted, the hook mints a fresh random id once on mount and reuses
|
|
69
|
+
* it for the lifetime of the component — callers can still override via
|
|
70
|
+
* props at any time to reset the conversation.
|
|
71
|
+
*/
|
|
72
|
+
conversationId?: string
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export interface AiChatErrorEnvelope {
|
|
76
|
+
code?: string
|
|
77
|
+
message: string
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export interface UseAiChatResult {
|
|
81
|
+
messages: AiChatMessage[]
|
|
82
|
+
status: 'idle' | 'submitting' | 'streaming'
|
|
83
|
+
error: AiChatErrorEnvelope | null
|
|
84
|
+
lastRequestDebug: { url: string; body: unknown } | null
|
|
85
|
+
lastResponseDebug: { status: number; text: string } | null
|
|
86
|
+
/**
|
|
87
|
+
* The conversation id currently in use for this chat instance. Equal to
|
|
88
|
+
* the caller-provided `conversationId` input when one is supplied;
|
|
89
|
+
* otherwise the random id minted on mount. Stable across re-renders for a
|
|
90
|
+
* given mount (Phase 3 WS-D contract with `prepareMutation`).
|
|
91
|
+
*/
|
|
92
|
+
conversationId: string
|
|
93
|
+
sendMessage: (input: string, files?: AiChatMessageFile[]) => Promise<void>
|
|
94
|
+
cancel: () => void
|
|
95
|
+
reset: () => void
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
function makeMessageId(): string {
|
|
99
|
+
const random = Math.random().toString(36).slice(2, 10)
|
|
100
|
+
const time = Date.now().toString(36)
|
|
101
|
+
return `msg_${time}_${random}`
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function makeConversationId(): string {
|
|
105
|
+
// Use crypto.randomUUID() when the browser exposes it (all evergreen
|
|
106
|
+
// runtimes do), otherwise fall back to a low-entropy token that is still
|
|
107
|
+
// unique enough for the idempotency-hash use case.
|
|
108
|
+
const g = globalThis as unknown as { crypto?: { randomUUID?: () => string } }
|
|
109
|
+
if (g.crypto && typeof g.crypto.randomUUID === 'function') {
|
|
110
|
+
try {
|
|
111
|
+
return g.crypto.randomUUID()
|
|
112
|
+
} catch {
|
|
113
|
+
// fall through to the random fallback
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
const rand = () => Math.random().toString(16).slice(2, 10)
|
|
117
|
+
return `conv_${Date.now().toString(16)}_${rand()}${rand()}`
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const SESSION_STORAGE_PREFIX = 'om-ai-chat:'
|
|
121
|
+
const SESSION_STORAGE_VERSION = 1
|
|
122
|
+
|
|
123
|
+
interface PersistedAiChatSession {
|
|
124
|
+
v: number
|
|
125
|
+
conversationId: string
|
|
126
|
+
messages: AiChatMessage[]
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
function getSessionStorageKey(agent: string, conversationId?: string | null): string {
|
|
130
|
+
// When the caller pins a `conversationId` (e.g. via the AiChatSessions
|
|
131
|
+
// provider's tabs), namespace the persisted slot per session so multiple
|
|
132
|
+
// open conversations for the same agent don't overwrite each other. The
|
|
133
|
+
// legacy single-session-per-agent layout (no externally-supplied id) is
|
|
134
|
+
// kept for backward compatibility with code that still relies on it.
|
|
135
|
+
if (typeof conversationId === 'string' && conversationId.length > 0) {
|
|
136
|
+
return `${SESSION_STORAGE_PREFIX}${agent}:${conversationId}`
|
|
137
|
+
}
|
|
138
|
+
return `${SESSION_STORAGE_PREFIX}${agent}`
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
function readPersistedSession(
|
|
142
|
+
agent: string,
|
|
143
|
+
conversationId?: string | null,
|
|
144
|
+
): PersistedAiChatSession | null {
|
|
145
|
+
if (typeof window === 'undefined') return null
|
|
146
|
+
try {
|
|
147
|
+
const raw = window.localStorage.getItem(getSessionStorageKey(agent, conversationId))
|
|
148
|
+
if (!raw) return null
|
|
149
|
+
const parsed = JSON.parse(raw) as PersistedAiChatSession | null
|
|
150
|
+
if (!parsed || parsed.v !== SESSION_STORAGE_VERSION) return null
|
|
151
|
+
if (typeof parsed.conversationId !== 'string') return null
|
|
152
|
+
if (!Array.isArray(parsed.messages)) return null
|
|
153
|
+
const messages = parsed.messages.filter((entry): entry is AiChatMessage => {
|
|
154
|
+
return (
|
|
155
|
+
!!entry &&
|
|
156
|
+
typeof entry === 'object' &&
|
|
157
|
+
typeof (entry as AiChatMessage).id === 'string' &&
|
|
158
|
+
typeof (entry as AiChatMessage).content === 'string' &&
|
|
159
|
+
((entry as AiChatMessage).role === 'user' || (entry as AiChatMessage).role === 'assistant')
|
|
160
|
+
)
|
|
161
|
+
})
|
|
162
|
+
return { v: SESSION_STORAGE_VERSION, conversationId: parsed.conversationId, messages }
|
|
163
|
+
} catch {
|
|
164
|
+
return null
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
function writePersistedSession(
|
|
169
|
+
agent: string,
|
|
170
|
+
session: PersistedAiChatSession,
|
|
171
|
+
conversationId?: string | null,
|
|
172
|
+
): void {
|
|
173
|
+
if (typeof window === 'undefined') return
|
|
174
|
+
try {
|
|
175
|
+
// Strip transient blob/object preview URLs before persisting (they would
|
|
176
|
+
// not survive a reload). Self-contained `data:` URLs are kept so image
|
|
177
|
+
// previews come back unchanged after the chat is reopened — public
|
|
178
|
+
// attachment URLs are intentionally not used because the LLM provider
|
|
179
|
+
// cannot reach a localhost origin and we want a single durable shape
|
|
180
|
+
// that works for both transport and reload.
|
|
181
|
+
const messages = session.messages.map((message) => {
|
|
182
|
+
if (!message.files || message.files.length === 0) return message
|
|
183
|
+
const safeFiles = message.files.map(({ name, type, previewUrl }) => {
|
|
184
|
+
const durable =
|
|
185
|
+
typeof previewUrl === 'string' && previewUrl.startsWith('data:')
|
|
186
|
+
? previewUrl
|
|
187
|
+
: undefined
|
|
188
|
+
return durable ? { name, type, previewUrl: durable } : { name, type }
|
|
189
|
+
})
|
|
190
|
+
return { ...message, files: safeFiles }
|
|
191
|
+
})
|
|
192
|
+
window.localStorage.setItem(
|
|
193
|
+
getSessionStorageKey(agent, conversationId),
|
|
194
|
+
JSON.stringify({ ...session, messages }),
|
|
195
|
+
)
|
|
196
|
+
} catch {
|
|
197
|
+
// Quota exceeded / privacy mode — silently drop persistence.
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
function clearPersistedSession(agent: string, conversationId?: string | null): void {
|
|
202
|
+
if (typeof window === 'undefined') return
|
|
203
|
+
try {
|
|
204
|
+
window.localStorage.removeItem(getSessionStorageKey(agent, conversationId))
|
|
205
|
+
} catch {
|
|
206
|
+
// ignore
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
function getTransportEndpoint(agent: string, apiPath?: string): string {
|
|
211
|
+
// Reuse the transport factory so UI consumers share the dispatcher URL
|
|
212
|
+
// convention with server-side callers (e.g. runAiAgentText / Playwright
|
|
213
|
+
// fixtures). The factory returns a ChatTransport<UI_MESSAGE> whose internal
|
|
214
|
+
// endpoint we do not directly read — instead we reconstruct the same URL
|
|
215
|
+
// shape here so downstream error handling stays deterministic.
|
|
216
|
+
//
|
|
217
|
+
// When the AI SDK exposes a public endpoint getter (or the stream format
|
|
218
|
+
// switches from plain text to UIMessageChunk) we can call
|
|
219
|
+
// transport.sendMessages(...) directly.
|
|
220
|
+
const transport = createAiAgentTransport({ agentId: agent, endpoint: apiPath })
|
|
221
|
+
void transport
|
|
222
|
+
const base = apiPath && apiPath.length > 0 ? apiPath : '/api/ai_assistant/ai/chat'
|
|
223
|
+
const separator = base.includes('?') ? '&' : '?'
|
|
224
|
+
return `${base}${separator}agent=${encodeURIComponent(agent)}`
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
interface AssistantBuilderState {
|
|
228
|
+
text: string
|
|
229
|
+
reasoning: string
|
|
230
|
+
reasoningStreaming: boolean
|
|
231
|
+
toolCalls: AiChatToolCallSnapshot[]
|
|
232
|
+
uiParts: AiChatMessageUiPart[]
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
function createBuilder(): AssistantBuilderState {
|
|
236
|
+
return { text: '', reasoning: '', reasoningStreaming: false, toolCalls: [], uiParts: [] }
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
/**
|
|
240
|
+
* Generic extractor for UI parts emitted by tool outputs. A tool can
|
|
241
|
+
* surface inline UI to the chat by returning JSON in any of these
|
|
242
|
+
* shapes — each tool call produces zero or more UI parts:
|
|
243
|
+
*
|
|
244
|
+
* 1. The dispatcher's mutation envelope:
|
|
245
|
+
* `{ status: 'awaiting-confirmation', pendingActionId, expiresAt,
|
|
246
|
+
* agent, toolName, message }`
|
|
247
|
+
* → synthesizes a `mutation-preview-card` part (the registered
|
|
248
|
+
* card fetches the live diff via `useAiPendingActionPolling`).
|
|
249
|
+
*
|
|
250
|
+
* 2. A single explicit UI part:
|
|
251
|
+
* `{ uiPart: { componentId, payload?, pendingActionId? } }`
|
|
252
|
+
*
|
|
253
|
+
* 3. Multiple explicit UI parts:
|
|
254
|
+
* `{ uiParts: [{ componentId, payload? }, ...] }`
|
|
255
|
+
*
|
|
256
|
+
* Tool authors only need to JSON-encode an object whose `uiPart` /
|
|
257
|
+
* `uiParts` reference component ids that the host has registered on
|
|
258
|
+
* `defaultAiUiPartRegistry` (or a scoped registry passed through
|
|
259
|
+
* `<AiChat registry={...}/>`). Unknown component ids fall back to the
|
|
260
|
+
* `UnknownUiPartPlaceholder` so an unregistered id never blows up the
|
|
261
|
+
* transcript.
|
|
262
|
+
*/
|
|
263
|
+
function extractUiPartsFromOutput(
|
|
264
|
+
output: unknown,
|
|
265
|
+
toolCallId: string,
|
|
266
|
+
): AiChatMessageUiPart[] {
|
|
267
|
+
let parsed: unknown = output
|
|
268
|
+
if (typeof output === 'string') {
|
|
269
|
+
const trimmed = output.trim()
|
|
270
|
+
if (!trimmed.startsWith('{') && !trimmed.startsWith('[')) return []
|
|
271
|
+
try {
|
|
272
|
+
parsed = JSON.parse(trimmed)
|
|
273
|
+
} catch {
|
|
274
|
+
return []
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
if (!parsed || typeof parsed !== 'object') return []
|
|
278
|
+
const value = parsed as Record<string, unknown>
|
|
279
|
+
const parts: AiChatMessageUiPart[] = []
|
|
280
|
+
|
|
281
|
+
// (1) Mutation approval envelope. The dispatcher's `prepareMutation`
|
|
282
|
+
// interceptor in `agent-tools.ts` formats the result via
|
|
283
|
+
// `formatPendingActionToolResult` as
|
|
284
|
+
// { status: 'pending-confirmation', agentId, toolName, pendingActionId,
|
|
285
|
+
// expiresAt, message }
|
|
286
|
+
// (NOTE: status is `pending-confirmation` and the field is `agentId`,
|
|
287
|
+
// not `agent`). We also accept `awaiting-confirmation` / `agent` for
|
|
288
|
+
// forward compat with older / alternative dispatchers.
|
|
289
|
+
if (value.status === 'pending-confirmation' || value.status === 'awaiting-confirmation') {
|
|
290
|
+
const pendingActionId =
|
|
291
|
+
typeof value.pendingActionId === 'string' && value.pendingActionId.length > 0
|
|
292
|
+
? value.pendingActionId
|
|
293
|
+
: null
|
|
294
|
+
if (pendingActionId) {
|
|
295
|
+
const agentId =
|
|
296
|
+
typeof value.agentId === 'string'
|
|
297
|
+
? value.agentId
|
|
298
|
+
: typeof value.agent === 'string'
|
|
299
|
+
? value.agent
|
|
300
|
+
: undefined
|
|
301
|
+
parts.push({
|
|
302
|
+
componentId: 'mutation-preview-card',
|
|
303
|
+
pendingActionId,
|
|
304
|
+
payload: {
|
|
305
|
+
pendingActionId,
|
|
306
|
+
expiresAt: typeof value.expiresAt === 'string' ? value.expiresAt : undefined,
|
|
307
|
+
agentId,
|
|
308
|
+
toolName: typeof value.toolName === 'string' ? value.toolName : undefined,
|
|
309
|
+
},
|
|
310
|
+
key: `${toolCallId}:mutation-preview-card`,
|
|
311
|
+
})
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
// (2) Explicit single UI part.
|
|
316
|
+
if (value.uiPart && typeof value.uiPart === 'object') {
|
|
317
|
+
const part = value.uiPart as Record<string, unknown>
|
|
318
|
+
if (typeof part.componentId === 'string' && part.componentId.length > 0) {
|
|
319
|
+
parts.push({
|
|
320
|
+
componentId: part.componentId,
|
|
321
|
+
payload: part.payload,
|
|
322
|
+
pendingActionId:
|
|
323
|
+
typeof part.pendingActionId === 'string' ? part.pendingActionId : undefined,
|
|
324
|
+
key: `${toolCallId}:${part.componentId}`,
|
|
325
|
+
})
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
// (3) Explicit list of UI parts.
|
|
330
|
+
if (Array.isArray(value.uiParts)) {
|
|
331
|
+
value.uiParts.forEach((entry, index) => {
|
|
332
|
+
if (!entry || typeof entry !== 'object') return
|
|
333
|
+
const part = entry as Record<string, unknown>
|
|
334
|
+
if (typeof part.componentId !== 'string' || part.componentId.length === 0) return
|
|
335
|
+
parts.push({
|
|
336
|
+
componentId: part.componentId,
|
|
337
|
+
payload: part.payload,
|
|
338
|
+
pendingActionId:
|
|
339
|
+
typeof part.pendingActionId === 'string' ? part.pendingActionId : undefined,
|
|
340
|
+
key: `${toolCallId}:${index}:${part.componentId}`,
|
|
341
|
+
})
|
|
342
|
+
})
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
return parts
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
function updateToolCall(
|
|
349
|
+
state: AssistantBuilderState,
|
|
350
|
+
id: string,
|
|
351
|
+
patch: Partial<AiChatToolCallSnapshot> & { toolName?: string },
|
|
352
|
+
): AssistantBuilderState {
|
|
353
|
+
if (!id) return state
|
|
354
|
+
const idx = state.toolCalls.findIndex((entry) => entry.id === id)
|
|
355
|
+
if (idx === -1) {
|
|
356
|
+
const next: AiChatToolCallSnapshot = {
|
|
357
|
+
id,
|
|
358
|
+
toolName: patch.toolName ?? 'tool',
|
|
359
|
+
state: patch.state ?? 'pending',
|
|
360
|
+
input: patch.input,
|
|
361
|
+
output: patch.output,
|
|
362
|
+
errorMessage: patch.errorMessage,
|
|
363
|
+
}
|
|
364
|
+
return { ...state, toolCalls: [...state.toolCalls, next] }
|
|
365
|
+
}
|
|
366
|
+
const current = state.toolCalls[idx]
|
|
367
|
+
const merged: AiChatToolCallSnapshot = {
|
|
368
|
+
...current,
|
|
369
|
+
toolName: patch.toolName ?? current.toolName,
|
|
370
|
+
state: patch.state ?? current.state,
|
|
371
|
+
input: patch.input !== undefined ? patch.input : current.input,
|
|
372
|
+
output: patch.output !== undefined ? patch.output : current.output,
|
|
373
|
+
errorMessage: patch.errorMessage ?? current.errorMessage,
|
|
374
|
+
}
|
|
375
|
+
const nextCalls = state.toolCalls.slice()
|
|
376
|
+
nextCalls[idx] = merged
|
|
377
|
+
return { ...state, toolCalls: nextCalls }
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
function applyChunk(
|
|
381
|
+
state: AssistantBuilderState,
|
|
382
|
+
chunk: { type: string; [key: string]: unknown },
|
|
383
|
+
): AssistantBuilderState {
|
|
384
|
+
switch (chunk.type) {
|
|
385
|
+
case 'text-delta':
|
|
386
|
+
return {
|
|
387
|
+
...state,
|
|
388
|
+
text: state.text + (typeof chunk.delta === 'string' ? chunk.delta : ''),
|
|
389
|
+
}
|
|
390
|
+
case 'reasoning-start':
|
|
391
|
+
return { ...state, reasoningStreaming: true }
|
|
392
|
+
case 'reasoning-delta':
|
|
393
|
+
return {
|
|
394
|
+
...state,
|
|
395
|
+
reasoning:
|
|
396
|
+
state.reasoning + (typeof chunk.delta === 'string' ? chunk.delta : ''),
|
|
397
|
+
reasoningStreaming: true,
|
|
398
|
+
}
|
|
399
|
+
case 'reasoning-end':
|
|
400
|
+
return { ...state, reasoningStreaming: false }
|
|
401
|
+
case 'tool-input-start':
|
|
402
|
+
return updateToolCall(state, String(chunk.toolCallId ?? ''), {
|
|
403
|
+
toolName: typeof chunk.toolName === 'string' ? chunk.toolName : undefined,
|
|
404
|
+
state: 'pending',
|
|
405
|
+
})
|
|
406
|
+
case 'tool-input-available':
|
|
407
|
+
return updateToolCall(state, String(chunk.toolCallId ?? ''), {
|
|
408
|
+
toolName: typeof chunk.toolName === 'string' ? chunk.toolName : undefined,
|
|
409
|
+
input: chunk.input,
|
|
410
|
+
state: 'pending',
|
|
411
|
+
})
|
|
412
|
+
case 'tool-output-available': {
|
|
413
|
+
const toolCallId = String(chunk.toolCallId ?? '')
|
|
414
|
+
const next = updateToolCall(state, toolCallId, {
|
|
415
|
+
output: chunk.output,
|
|
416
|
+
state: 'complete',
|
|
417
|
+
})
|
|
418
|
+
// Phase 3 WS-C — surface ANY UI parts the tool output advertises:
|
|
419
|
+
// the legacy `awaiting-confirmation` mutation envelope plus the
|
|
420
|
+
// generic `{ uiPart }` / `{ uiParts: [...] }` shapes. This lets
|
|
421
|
+
// module authors define their own dynamic cards (stats panels,
|
|
422
|
+
// record summaries, charts…) without touching the dispatcher or
|
|
423
|
+
// the chat client.
|
|
424
|
+
const newParts = extractUiPartsFromOutput(chunk.output, toolCallId)
|
|
425
|
+
if (newParts.length === 0) return next
|
|
426
|
+
const seen = new Set(next.uiParts.map((entry) => entry.key))
|
|
427
|
+
const merged = [...next.uiParts]
|
|
428
|
+
for (const part of newParts) {
|
|
429
|
+
if (seen.has(part.key)) continue
|
|
430
|
+
seen.add(part.key)
|
|
431
|
+
merged.push(part)
|
|
432
|
+
}
|
|
433
|
+
if (merged.length === next.uiParts.length) return next
|
|
434
|
+
return { ...next, uiParts: merged }
|
|
435
|
+
}
|
|
436
|
+
case 'tool-output-error':
|
|
437
|
+
return updateToolCall(state, String(chunk.toolCallId ?? ''), {
|
|
438
|
+
state: 'error',
|
|
439
|
+
errorMessage:
|
|
440
|
+
typeof chunk.errorText === 'string' ? chunk.errorText : 'Tool error',
|
|
441
|
+
})
|
|
442
|
+
case 'tool-input-error':
|
|
443
|
+
return updateToolCall(state, String(chunk.toolCallId ?? ''), {
|
|
444
|
+
toolName: typeof chunk.toolName === 'string' ? chunk.toolName : undefined,
|
|
445
|
+
input: chunk.input,
|
|
446
|
+
state: 'error',
|
|
447
|
+
errorMessage:
|
|
448
|
+
typeof chunk.errorText === 'string' ? chunk.errorText : 'Tool error',
|
|
449
|
+
})
|
|
450
|
+
default:
|
|
451
|
+
return state
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
function mergeAssistantMessage(
|
|
456
|
+
current: AiChatMessage,
|
|
457
|
+
state: AssistantBuilderState,
|
|
458
|
+
): AiChatMessage {
|
|
459
|
+
return {
|
|
460
|
+
...current,
|
|
461
|
+
content: state.text,
|
|
462
|
+
reasoning: state.reasoning ? state.reasoning : undefined,
|
|
463
|
+
reasoningStreaming: state.reasoning ? state.reasoningStreaming : undefined,
|
|
464
|
+
toolCalls: state.toolCalls.length > 0 ? state.toolCalls : undefined,
|
|
465
|
+
uiParts: state.uiParts.length > 0 ? state.uiParts : undefined,
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
function parseSseLines(buffer: string): { events: string[]; rest: string } {
|
|
470
|
+
const events: string[] = []
|
|
471
|
+
let rest = buffer
|
|
472
|
+
for (;;) {
|
|
473
|
+
const idx = rest.indexOf('\n\n')
|
|
474
|
+
if (idx === -1) break
|
|
475
|
+
events.push(rest.slice(0, idx))
|
|
476
|
+
rest = rest.slice(idx + 2)
|
|
477
|
+
}
|
|
478
|
+
return { events, rest }
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
function extractDataPayload(eventBlock: string): string | null {
|
|
482
|
+
const lines = eventBlock.split('\n')
|
|
483
|
+
const dataLines: string[] = []
|
|
484
|
+
for (const line of lines) {
|
|
485
|
+
if (line.startsWith('data: ')) {
|
|
486
|
+
dataLines.push(line.slice(6))
|
|
487
|
+
} else if (line.startsWith('data:')) {
|
|
488
|
+
dataLines.push(line.slice(5))
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
if (dataLines.length === 0) return null
|
|
492
|
+
return dataLines.join('\n')
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
async function readErrorEnvelope(response: Response): Promise<AiChatErrorEnvelope> {
|
|
496
|
+
try {
|
|
497
|
+
const data = (await response.clone().json()) as
|
|
498
|
+
| { error?: unknown; code?: unknown; message?: unknown }
|
|
499
|
+
| null
|
|
500
|
+
if (data && typeof data === 'object') {
|
|
501
|
+
const rawMessage =
|
|
502
|
+
(typeof data.error === 'string' && data.error) ||
|
|
503
|
+
(typeof data.message === 'string' && data.message) ||
|
|
504
|
+
''
|
|
505
|
+
const rawCode = typeof data.code === 'string' ? data.code : undefined
|
|
506
|
+
if (rawMessage || rawCode) {
|
|
507
|
+
return {
|
|
508
|
+
code: rawCode,
|
|
509
|
+
message: rawMessage || 'Agent dispatch failed.',
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
} catch {
|
|
514
|
+
// Fall through to text fallback
|
|
515
|
+
}
|
|
516
|
+
const text = await response.text().catch(() => '')
|
|
517
|
+
return { message: text || `Agent dispatch failed (${response.status}).` }
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
export function useAiChat(input: UseAiChatInput): UseAiChatResult {
|
|
521
|
+
const { agent, apiPath, pageContext, attachmentIds, debug, initialMessages, onError, conversationId: conversationIdInput } = input
|
|
522
|
+
|
|
523
|
+
// Minted once on mount when the caller does not supply a conversationId.
|
|
524
|
+
// The ref keeps the id stable across re-renders and is reused for every
|
|
525
|
+
// turn so the Phase 3 WS-C `prepareMutation` idempotency hash stays
|
|
526
|
+
// stable within the same chat. When the agent has a persisted session in
|
|
527
|
+
// localStorage we re-hydrate the conversationId from it so re-opening the
|
|
528
|
+
// chat continues the previous turn instead of starting fresh.
|
|
529
|
+
const persistedRef = React.useRef<PersistedAiChatSession | null | 'unread'>('unread')
|
|
530
|
+
if (persistedRef.current === 'unread') {
|
|
531
|
+
// When the caller pins a `conversationId` (multi-tab session mode) we
|
|
532
|
+
// read ONLY from that per-conversation slot. Falling back to the
|
|
533
|
+
// legacy agent-only slot here would make every brand-new tab inherit
|
|
534
|
+
// the previous tab's messages — the "+ shows the same chat" bug — so
|
|
535
|
+
// unknown conversationIds always start clean. Without a pinned id we
|
|
536
|
+
// keep the legacy single-session-per-agent layout for backward
|
|
537
|
+
// compatibility.
|
|
538
|
+
persistedRef.current =
|
|
539
|
+
typeof conversationIdInput === 'string' && conversationIdInput.length > 0
|
|
540
|
+
? readPersistedSession(agent, conversationIdInput)
|
|
541
|
+
: readPersistedSession(agent)
|
|
542
|
+
}
|
|
543
|
+
const persisted = persistedRef.current
|
|
544
|
+
|
|
545
|
+
const mintedConversationIdRef = React.useRef<string | null>(null)
|
|
546
|
+
if (mintedConversationIdRef.current === null) {
|
|
547
|
+
mintedConversationIdRef.current = persisted?.conversationId ?? makeConversationId()
|
|
548
|
+
}
|
|
549
|
+
const effectiveConversationId =
|
|
550
|
+
typeof conversationIdInput === 'string' && conversationIdInput.length > 0
|
|
551
|
+
? conversationIdInput
|
|
552
|
+
: mintedConversationIdRef.current
|
|
553
|
+
|
|
554
|
+
const [messages, setMessages] = React.useState<AiChatMessage[]>(() => {
|
|
555
|
+
if (persisted && persisted.messages.length > 0) {
|
|
556
|
+
return persisted.messages
|
|
557
|
+
}
|
|
558
|
+
return (initialMessages ?? []).map((entry) => ({
|
|
559
|
+
id: makeMessageId(),
|
|
560
|
+
role: entry.role,
|
|
561
|
+
content: entry.content,
|
|
562
|
+
}))
|
|
563
|
+
})
|
|
564
|
+
|
|
565
|
+
// Persist messages + conversationId on every change. Skip during in-flight
|
|
566
|
+
// streaming so we do not write the same growing string on every chunk —
|
|
567
|
+
// the next idle tick captures the final assistant content.
|
|
568
|
+
const [status, setStatusInternal] = React.useState<'idle' | 'submitting' | 'streaming'>('idle')
|
|
569
|
+
React.useEffect(() => {
|
|
570
|
+
if (status !== 'idle') return
|
|
571
|
+
const persistKey =
|
|
572
|
+
typeof conversationIdInput === 'string' && conversationIdInput.length > 0
|
|
573
|
+
? conversationIdInput
|
|
574
|
+
: null
|
|
575
|
+
if (messages.length === 0) {
|
|
576
|
+
clearPersistedSession(agent, persistKey)
|
|
577
|
+
return
|
|
578
|
+
}
|
|
579
|
+
writePersistedSession(
|
|
580
|
+
agent,
|
|
581
|
+
{
|
|
582
|
+
v: SESSION_STORAGE_VERSION,
|
|
583
|
+
conversationId: effectiveConversationId,
|
|
584
|
+
messages,
|
|
585
|
+
},
|
|
586
|
+
persistKey,
|
|
587
|
+
)
|
|
588
|
+
}, [agent, conversationIdInput, effectiveConversationId, messages, status])
|
|
589
|
+
const setStatus = setStatusInternal
|
|
590
|
+
const [error, setError] = React.useState<AiChatErrorEnvelope | null>(null)
|
|
591
|
+
const [lastRequestDebug, setLastRequestDebug] = React.useState<
|
|
592
|
+
{ url: string; body: unknown } | null
|
|
593
|
+
>(null)
|
|
594
|
+
const [lastResponseDebug, setLastResponseDebug] = React.useState<
|
|
595
|
+
{ status: number; text: string } | null
|
|
596
|
+
>(null)
|
|
597
|
+
|
|
598
|
+
const abortRef = React.useRef<AbortController | null>(null)
|
|
599
|
+
const onErrorRef = React.useRef(onError)
|
|
600
|
+
React.useEffect(() => {
|
|
601
|
+
onErrorRef.current = onError
|
|
602
|
+
}, [onError])
|
|
603
|
+
|
|
604
|
+
const emitError = React.useCallback((envelope: AiChatErrorEnvelope) => {
|
|
605
|
+
setError(envelope)
|
|
606
|
+
try {
|
|
607
|
+
onErrorRef.current?.(envelope)
|
|
608
|
+
} catch {
|
|
609
|
+
// UI layer must never throw because a caller-supplied error handler
|
|
610
|
+
// misbehaved.
|
|
611
|
+
}
|
|
612
|
+
}, [])
|
|
613
|
+
|
|
614
|
+
const cancel = React.useCallback(() => {
|
|
615
|
+
if (abortRef.current) {
|
|
616
|
+
abortRef.current.abort()
|
|
617
|
+
abortRef.current = null
|
|
618
|
+
}
|
|
619
|
+
setStatus('idle')
|
|
620
|
+
}, [])
|
|
621
|
+
|
|
622
|
+
const reset = React.useCallback(() => {
|
|
623
|
+
cancel()
|
|
624
|
+
setMessages([])
|
|
625
|
+
setError(null)
|
|
626
|
+
setLastRequestDebug(null)
|
|
627
|
+
setLastResponseDebug(null)
|
|
628
|
+
clearPersistedSession(agent)
|
|
629
|
+
mintedConversationIdRef.current = makeConversationId()
|
|
630
|
+
}, [agent, cancel])
|
|
631
|
+
|
|
632
|
+
const sendMessage = React.useCallback(
|
|
633
|
+
async (textInput: string, files?: AiChatMessageFile[]) => {
|
|
634
|
+
const trimmed = textInput.trim()
|
|
635
|
+
if (!trimmed) return
|
|
636
|
+
if (abortRef.current) {
|
|
637
|
+
abortRef.current.abort()
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
setError(null)
|
|
641
|
+
const userMessage: AiChatMessage = {
|
|
642
|
+
id: makeMessageId(),
|
|
643
|
+
role: 'user',
|
|
644
|
+
content: trimmed,
|
|
645
|
+
files: files && files.length > 0 ? files : undefined,
|
|
646
|
+
}
|
|
647
|
+
const assistantMessage: AiChatMessage = {
|
|
648
|
+
id: makeMessageId(),
|
|
649
|
+
role: 'assistant',
|
|
650
|
+
content: '',
|
|
651
|
+
}
|
|
652
|
+
const assistantId = assistantMessage.id
|
|
653
|
+
// Snapshot prior messages for request payload so the dispatcher sees the
|
|
654
|
+
// full turn history including the just-added user message.
|
|
655
|
+
const outgoingHistory = [...messages, userMessage]
|
|
656
|
+
setMessages([...outgoingHistory, assistantMessage])
|
|
657
|
+
setStatus('submitting')
|
|
658
|
+
|
|
659
|
+
const controller = new AbortController()
|
|
660
|
+
abortRef.current = controller
|
|
661
|
+
|
|
662
|
+
const url = getTransportEndpoint(agent, apiPath)
|
|
663
|
+
const body = {
|
|
664
|
+
messages: outgoingHistory.map((message) => ({
|
|
665
|
+
role: message.role,
|
|
666
|
+
content: message.content,
|
|
667
|
+
})),
|
|
668
|
+
pageContext,
|
|
669
|
+
attachmentIds,
|
|
670
|
+
debug,
|
|
671
|
+
conversationId: effectiveConversationId,
|
|
672
|
+
}
|
|
673
|
+
setLastRequestDebug({ url, body })
|
|
674
|
+
|
|
675
|
+
let response: Response
|
|
676
|
+
try {
|
|
677
|
+
response = await apiFetch(url, {
|
|
678
|
+
method: 'POST',
|
|
679
|
+
headers: {
|
|
680
|
+
'Content-Type': 'application/json',
|
|
681
|
+
Accept: 'text/event-stream, text/plain, application/json',
|
|
682
|
+
},
|
|
683
|
+
body: JSON.stringify(body),
|
|
684
|
+
signal: controller.signal,
|
|
685
|
+
})
|
|
686
|
+
} catch (requestError) {
|
|
687
|
+
if ((requestError as { name?: string })?.name === 'AbortError') {
|
|
688
|
+
setStatus('idle')
|
|
689
|
+
abortRef.current = null
|
|
690
|
+
return
|
|
691
|
+
}
|
|
692
|
+
const message =
|
|
693
|
+
requestError instanceof Error
|
|
694
|
+
? requestError.message
|
|
695
|
+
: 'Network request failed.'
|
|
696
|
+
emitError({ message })
|
|
697
|
+
setStatus('idle')
|
|
698
|
+
abortRef.current = null
|
|
699
|
+
return
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
if (!response.ok) {
|
|
703
|
+
const envelope = await readErrorEnvelope(response)
|
|
704
|
+
setLastResponseDebug({ status: response.status, text: envelope.message })
|
|
705
|
+
emitError(envelope)
|
|
706
|
+
setStatus('idle')
|
|
707
|
+
setMessages((current) => current.filter((entry) => entry.id !== assistantId))
|
|
708
|
+
abortRef.current = null
|
|
709
|
+
return
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
const bodyStream = response.body
|
|
713
|
+
if (!bodyStream) {
|
|
714
|
+
setLastResponseDebug({ status: response.status, text: '' })
|
|
715
|
+
setStatus('idle')
|
|
716
|
+
abortRef.current = null
|
|
717
|
+
return
|
|
718
|
+
}
|
|
719
|
+
|
|
720
|
+
const headerGet = (name: string): string | null => {
|
|
721
|
+
const headers = (response as { headers?: { get?: (k: string) => string | null } })
|
|
722
|
+
.headers
|
|
723
|
+
if (!headers || typeof headers.get !== 'function') return null
|
|
724
|
+
try {
|
|
725
|
+
return headers.get(name)
|
|
726
|
+
} catch {
|
|
727
|
+
return null
|
|
728
|
+
}
|
|
729
|
+
}
|
|
730
|
+
const isUiMessageStream =
|
|
731
|
+
headerGet('x-vercel-ai-ui-message-stream') !== null ||
|
|
732
|
+
(headerGet('content-type') ?? '').includes('event-stream')
|
|
733
|
+
|
|
734
|
+
setStatus('streaming')
|
|
735
|
+
const reader = bodyStream.getReader()
|
|
736
|
+
const decoder = new TextDecoder()
|
|
737
|
+
let streamedRaw = ''
|
|
738
|
+
let builder = createBuilder()
|
|
739
|
+
let sseBuffer = ''
|
|
740
|
+
const flushUiMessageBuffer = (extra?: string) => {
|
|
741
|
+
if (extra) sseBuffer += extra
|
|
742
|
+
const { events, rest } = parseSseLines(sseBuffer)
|
|
743
|
+
sseBuffer = rest
|
|
744
|
+
for (const block of events) {
|
|
745
|
+
const data = extractDataPayload(block)
|
|
746
|
+
if (!data) continue
|
|
747
|
+
if (data === '[DONE]') continue
|
|
748
|
+
try {
|
|
749
|
+
const parsed = JSON.parse(data) as { type?: string }
|
|
750
|
+
if (parsed && typeof parsed.type === 'string') {
|
|
751
|
+
builder = applyChunk(builder, parsed as { type: string })
|
|
752
|
+
}
|
|
753
|
+
} catch {
|
|
754
|
+
// Tolerate malformed events / SSE comments.
|
|
755
|
+
}
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
try {
|
|
759
|
+
while (true) {
|
|
760
|
+
const { value, done } = await reader.read()
|
|
761
|
+
if (done) break
|
|
762
|
+
if (!value) continue
|
|
763
|
+
const piece = decoder.decode(value, { stream: true })
|
|
764
|
+
if (!piece) continue
|
|
765
|
+
streamedRaw += piece
|
|
766
|
+
|
|
767
|
+
if (isUiMessageStream) {
|
|
768
|
+
flushUiMessageBuffer(piece)
|
|
769
|
+
} else {
|
|
770
|
+
// Plain text fallback (legacy `toTextStreamResponse`).
|
|
771
|
+
builder = { ...builder, text: streamedRaw }
|
|
772
|
+
}
|
|
773
|
+
const snapshotBuilder = builder
|
|
774
|
+
setMessages((current) =>
|
|
775
|
+
current.map((entry) =>
|
|
776
|
+
entry.id === assistantId
|
|
777
|
+
? mergeAssistantMessage(entry, snapshotBuilder)
|
|
778
|
+
: entry,
|
|
779
|
+
),
|
|
780
|
+
)
|
|
781
|
+
}
|
|
782
|
+
const tail = decoder.decode()
|
|
783
|
+
if (tail) {
|
|
784
|
+
streamedRaw += tail
|
|
785
|
+
if (isUiMessageStream) {
|
|
786
|
+
flushUiMessageBuffer(tail)
|
|
787
|
+
} else {
|
|
788
|
+
builder = { ...builder, text: streamedRaw }
|
|
789
|
+
}
|
|
790
|
+
}
|
|
791
|
+
if (isUiMessageStream && sseBuffer.length > 0) {
|
|
792
|
+
flushUiMessageBuffer('\n\n')
|
|
793
|
+
}
|
|
794
|
+
builder = { ...builder, reasoningStreaming: false }
|
|
795
|
+
const finalSnapshot = builder
|
|
796
|
+
setMessages((current) =>
|
|
797
|
+
current.map((entry) =>
|
|
798
|
+
entry.id === assistantId
|
|
799
|
+
? mergeAssistantMessage(entry, finalSnapshot)
|
|
800
|
+
: entry,
|
|
801
|
+
),
|
|
802
|
+
)
|
|
803
|
+
setLastResponseDebug({ status: response.status, text: streamedRaw })
|
|
804
|
+
const isEmpty =
|
|
805
|
+
!builder.text.trim() && builder.toolCalls.length === 0 && !builder.reasoning
|
|
806
|
+
if (isEmpty) {
|
|
807
|
+
emitError({
|
|
808
|
+
code: 'empty_response',
|
|
809
|
+
message:
|
|
810
|
+
'The AI agent returned an empty response. This usually means the LLM provider rejected the request (invalid API key, rate limit, or model error). Check your server logs for details.',
|
|
811
|
+
})
|
|
812
|
+
setMessages((current) => current.filter((entry) => entry.id !== assistantId))
|
|
813
|
+
}
|
|
814
|
+
} catch (streamError) {
|
|
815
|
+
if ((streamError as { name?: string })?.name === 'AbortError') {
|
|
816
|
+
// Cancelled by the user — keep whatever we have so far and exit
|
|
817
|
+
// quietly.
|
|
818
|
+
} else {
|
|
819
|
+
const rawMessage =
|
|
820
|
+
streamError instanceof Error
|
|
821
|
+
? streamError.message
|
|
822
|
+
: 'Stream interrupted.'
|
|
823
|
+
// LLM provider errors (auth failures, rate limits, invalid tool
|
|
824
|
+
// schemas) surface as stream read errors. Include a hint so the
|
|
825
|
+
// operator can check server logs for the full stack trace.
|
|
826
|
+
const message = rawMessage.includes('API')
|
|
827
|
+
? rawMessage
|
|
828
|
+
: `${rawMessage} — check server logs for LLM provider details.`
|
|
829
|
+
emitError({ code: 'stream_error', message })
|
|
830
|
+
// Remove the empty assistant placeholder so the error alert is
|
|
831
|
+
// the only visible feedback.
|
|
832
|
+
setMessages((current) => current.filter((entry) => entry.id !== assistantId))
|
|
833
|
+
}
|
|
834
|
+
} finally {
|
|
835
|
+
reader.releaseLock()
|
|
836
|
+
if (abortRef.current === controller) {
|
|
837
|
+
abortRef.current = null
|
|
838
|
+
}
|
|
839
|
+
setStatus('idle')
|
|
840
|
+
}
|
|
841
|
+
},
|
|
842
|
+
[agent, apiPath, attachmentIds, debug, effectiveConversationId, emitError, messages, pageContext],
|
|
843
|
+
)
|
|
844
|
+
|
|
845
|
+
React.useEffect(() => {
|
|
846
|
+
return () => {
|
|
847
|
+
if (abortRef.current) {
|
|
848
|
+
abortRef.current.abort()
|
|
849
|
+
abortRef.current = null
|
|
850
|
+
}
|
|
851
|
+
}
|
|
852
|
+
}, [])
|
|
853
|
+
|
|
854
|
+
return {
|
|
855
|
+
messages,
|
|
856
|
+
status,
|
|
857
|
+
error,
|
|
858
|
+
lastRequestDebug,
|
|
859
|
+
lastResponseDebug,
|
|
860
|
+
conversationId: effectiveConversationId,
|
|
861
|
+
sendMessage,
|
|
862
|
+
cancel,
|
|
863
|
+
reset,
|
|
864
|
+
}
|
|
865
|
+
}
|