@cloudflare/ai-chat 0.0.3 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.js +32 -11
- package/dist/index.js.map +1 -1
- package/package.json +4 -7
- package/src/index.ts +53 -18
- package/src/tests/chat-context.test.ts +2 -19
- package/src/tests/chat-persistence.test.ts +11 -29
- package/src/tests/client-tool-duplicate-message.test.ts +1 -5
- package/src/tests/client-tools-broadcast.test.ts +1 -19
- package/src/tests/cloudflare-test.d.ts +5 -0
- package/src/tests/non-sse-response.test.ts +186 -0
- package/src/tests/resumable-streaming.test.ts +2 -33
- package/src/tests/test-utils.ts +39 -0
- package/src/tests/worker.ts +7 -9
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,16 @@
|
|
|
1
1
|
# @cloudflare/ai-chat
|
|
2
2
|
|
|
3
|
+
## 0.0.4
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- [#761](https://github.com/cloudflare/agents/pull/761) [`0e8fc1e`](https://github.com/cloudflare/agents/commit/0e8fc1e8cca3ad5acb51f5a0c92528c5b6beb358) Thanks [@iTrooz](https://github.com/iTrooz)! - Allow returning a non-streaming reponse from onChatMessage()
|
|
8
|
+
|
|
9
|
+
- [#771](https://github.com/cloudflare/agents/pull/771) [`87dc96d`](https://github.com/cloudflare/agents/commit/87dc96d19de1d26dbb2badecbb9955a4eb8e9e2e) Thanks [@threepointone](https://github.com/threepointone)! - update dependencies
|
|
10
|
+
|
|
11
|
+
- Updated dependencies [[`cf8a1e7`](https://github.com/cloudflare/agents/commit/cf8a1e7a24ecaac62c2aefca7b0fd5bf1373e8bd), [`87dc96d`](https://github.com/cloudflare/agents/commit/87dc96d19de1d26dbb2badecbb9955a4eb8e9e2e)]:
|
|
12
|
+
- agents@0.3.4
|
|
13
|
+
|
|
3
14
|
## 0.0.3
|
|
4
15
|
|
|
5
16
|
### Patch Changes
|
package/dist/index.d.ts
CHANGED
|
@@ -193,6 +193,15 @@ declare class AIChatAgent<
|
|
|
193
193
|
*/
|
|
194
194
|
private _maybeCleanupOldStreams;
|
|
195
195
|
private _broadcastChatMessage;
|
|
196
|
+
/**
|
|
197
|
+
* Broadcasts a text event for non-SSE responses.
|
|
198
|
+
* This ensures plain text responses follow the AI SDK v5 stream protocol.
|
|
199
|
+
*
|
|
200
|
+
* @param streamId - The stream identifier for chunk storage
|
|
201
|
+
* @param event - The text event payload (text-start, text-delta with delta, or text-end)
|
|
202
|
+
* @param continuation - Whether this is a continuation of a previous stream
|
|
203
|
+
*/
|
|
204
|
+
private _broadcastTextEvent;
|
|
196
205
|
private _loadMessagesFromDb;
|
|
197
206
|
onRequest(request: Request): Promise<Response>;
|
|
198
207
|
private _tryCatchChat;
|
package/dist/index.js
CHANGED
|
@@ -402,6 +402,25 @@ var AIChatAgent = class extends Agent {
|
|
|
402
402
|
_broadcastChatMessage(message, exclude) {
|
|
403
403
|
this.broadcast(JSON.stringify(message), exclude);
|
|
404
404
|
}
|
|
405
|
+
/**
|
|
406
|
+
* Broadcasts a text event for non-SSE responses.
|
|
407
|
+
* This ensures plain text responses follow the AI SDK v5 stream protocol.
|
|
408
|
+
*
|
|
409
|
+
* @param streamId - The stream identifier for chunk storage
|
|
410
|
+
* @param event - The text event payload (text-start, text-delta with delta, or text-end)
|
|
411
|
+
* @param continuation - Whether this is a continuation of a previous stream
|
|
412
|
+
*/
|
|
413
|
+
_broadcastTextEvent(streamId, event, continuation) {
|
|
414
|
+
const body = JSON.stringify(event);
|
|
415
|
+
this._storeStreamChunk(streamId, body);
|
|
416
|
+
this._broadcastChatMessage({
|
|
417
|
+
body,
|
|
418
|
+
done: false,
|
|
419
|
+
id: event.id,
|
|
420
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
|
|
421
|
+
...continuation && { continuation: true }
|
|
422
|
+
});
|
|
423
|
+
}
|
|
405
424
|
_loadMessagesFromDb() {
|
|
406
425
|
return (this.sql`select * from cf_ai_chat_agent_messages order by created_at` || []).map((row) => {
|
|
407
426
|
try {
|
|
@@ -755,11 +774,20 @@ var AIChatAgent = class extends Agent {
|
|
|
755
774
|
...metadata
|
|
756
775
|
} : metadata;
|
|
757
776
|
}
|
|
777
|
+
const isSSE = (response.headers.get("content-type") || "").includes("text/event-stream");
|
|
778
|
+
if (!isSSE) this._broadcastTextEvent(streamId, {
|
|
779
|
+
type: "text-start",
|
|
780
|
+
id
|
|
781
|
+
}, continuation);
|
|
758
782
|
let streamCompleted = false;
|
|
759
783
|
try {
|
|
760
784
|
while (true) {
|
|
761
785
|
const { done, value } = await reader.read();
|
|
762
786
|
if (done) {
|
|
787
|
+
if (!isSSE) this._broadcastTextEvent(streamId, {
|
|
788
|
+
type: "text-end",
|
|
789
|
+
id
|
|
790
|
+
}, continuation);
|
|
763
791
|
this._completeStream(streamId);
|
|
764
792
|
streamCompleted = true;
|
|
765
793
|
this._broadcastChatMessage({
|
|
@@ -772,7 +800,7 @@ var AIChatAgent = class extends Agent {
|
|
|
772
800
|
break;
|
|
773
801
|
}
|
|
774
802
|
const chunk = decoder.decode(value);
|
|
775
|
-
if (
|
|
803
|
+
if (isSSE) {
|
|
776
804
|
const lines = chunk.split("\n");
|
|
777
805
|
for (const line of lines) if (line.startsWith("data: ") && line !== "data: [DONE]") try {
|
|
778
806
|
const data = JSON.parse(line.slice(6));
|
|
@@ -1029,18 +1057,11 @@ var AIChatAgent = class extends Agent {
|
|
|
1029
1057
|
type: "text",
|
|
1030
1058
|
text: chunk
|
|
1031
1059
|
});
|
|
1032
|
-
|
|
1060
|
+
this._broadcastTextEvent(streamId, {
|
|
1033
1061
|
type: "text-delta",
|
|
1034
|
-
delta: chunk
|
|
1035
|
-
});
|
|
1036
|
-
this._storeStreamChunk(streamId, chunkBody);
|
|
1037
|
-
this._broadcastChatMessage({
|
|
1038
|
-
body: chunkBody,
|
|
1039
|
-
done: false,
|
|
1040
1062
|
id,
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
});
|
|
1063
|
+
delta: chunk
|
|
1064
|
+
}, continuation);
|
|
1044
1065
|
}
|
|
1045
1066
|
}
|
|
1046
1067
|
} catch (error) {
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["ctx","data: IncomingMessage","newMetadata: ProviderMetadata | undefined","message: ChatMessage | undefined","updatedMessage: ChatMessage","message: ChatMessage","activeTextParts: Record<string, TextUIPart>","activeReasoningParts: Record<string, ReasoningUIPart>","partialToolCalls: Record<\n string,\n { text: string; index: number; toolName: string; dynamic?: boolean }\n >","part","options","isToolUIPart","data: UIMessageChunk","textPart: TextUIPart","reasoningPart: ReasoningUIPart","getToolName","eventToSend: unknown","mergedMessage: ChatMessage"],"sources":["../src/index.ts"],"sourcesContent":["import type {\n UIMessage as ChatMessage,\n DynamicToolUIPart,\n JSONSchema7,\n ProviderMetadata,\n ReasoningUIPart,\n StreamTextOnFinishCallback,\n TextUIPart,\n Tool,\n ToolSet,\n ToolUIPart,\n UIMessageChunk\n} from \"ai\";\nimport { tool, jsonSchema } from \"ai\";\nimport {\n Agent,\n type AgentContext,\n type Connection,\n type ConnectionContext,\n type WSMessage\n} from \"agents\";\n\nimport {\n MessageType,\n type IncomingMessage,\n type OutgoingMessage\n} from \"./types\";\nimport { autoTransformMessages } from \"./ai-chat-v5-migration\";\nimport { nanoid } from \"nanoid\";\n\nimport { agentContext } from \"agents/internal_context\";\n\n/**\n * Schema for a client-defined tool sent from the browser.\n * These tools are executed on the client, not the server.\n *\n * Note: Uses `parameters` (JSONSchema7) rather than AI SDK's `inputSchema` (FlexibleSchema)\n * because this is the wire format. Zod schemas cannot be serialized.\n *\n * @deprecated Define tools on the server using `tool()` from \"ai\" instead.\n * For tools that need client-side execution, omit the `execute` function\n * and handle them via the `onToolCall` callback in `useAgentChat`.\n */\nexport type ClientToolSchema = {\n /** Unique name for the tool */\n name: string;\n /** Human-readable description of what the tool does */\n description?: Tool[\"description\"];\n /** JSON Schema defining the tool's input parameters */\n parameters?: JSONSchema7;\n};\n\n/**\n * Options passed to the onChatMessage handler.\n */\nexport type OnChatMessageOptions = {\n /** AbortSignal for cancelling the request */\n abortSignal?: AbortSignal;\n /**\n * Tool schemas sent from the client for dynamic tool registration.\n * These represent tools that will be executed on the client side.\n * Use `createToolsFromClientSchemas()` to convert these to AI SDK tool format.\n *\n * @deprecated Define tools on the server instead. Use `onToolCall` callback\n * in `useAgentChat` for client-side execution.\n */\n clientTools?: ClientToolSchema[];\n};\n\n/**\n * Converts client tool schemas to AI SDK tool format.\n *\n * These tools have no `execute` function - when the AI model calls them,\n * the tool call is sent back to the client for execution.\n *\n * @param clientTools - Array of tool schemas from the client\n * @returns Record of AI SDK tools that can be spread into your tools object\n *\n * @deprecated Define tools on the server using `tool()` from \"ai\" instead.\n * For tools that need client-side execution, omit the `execute` function\n * and handle them via the `onToolCall` callback in `useAgentChat`.\n *\n * @example\n * ```typescript\n * // Server: Define tool without execute\n * const tools = {\n * getLocation: tool({\n * description: \"Get user's location\",\n * inputSchema: z.object({})\n * // No execute = client must handle\n * })\n * };\n *\n * // Client: Handle in onToolCall\n * useAgentChat({\n * onToolCall: async ({ toolCall, addToolOutput }) => {\n * if (toolCall.toolName === 'getLocation') {\n * const pos = await navigator.geolocation.getCurrentPosition();\n * addToolOutput({ toolCallId: toolCall.toolCallId, output: pos });\n * }\n * }\n * });\n * ```\n */\nexport function createToolsFromClientSchemas(\n clientTools?: ClientToolSchema[]\n): ToolSet {\n if (!clientTools || clientTools.length === 0) {\n return {};\n }\n\n // Check for duplicate tool names\n const seenNames = new Set<string>();\n for (const t of clientTools) {\n if (seenNames.has(t.name)) {\n console.warn(\n `[createToolsFromClientSchemas] Duplicate tool name \"${t.name}\" found. Later definitions will override earlier ones.`\n );\n }\n seenNames.add(t.name);\n }\n\n return Object.fromEntries(\n clientTools.map((t) => [\n t.name,\n tool({\n description: t.description ?? \"\",\n inputSchema: jsonSchema(t.parameters ?? { type: \"object\" })\n // No execute function = tool call is sent back to client\n })\n ])\n );\n}\n\n/** Number of chunks to buffer before flushing to SQLite */\nconst CHUNK_BUFFER_SIZE = 10;\n/** Maximum buffer size to prevent memory issues on rapid reconnections */\nconst CHUNK_BUFFER_MAX_SIZE = 100;\n/** Maximum age for a \"streaming\" stream before considering it stale (ms) - 5 minutes */\nconst STREAM_STALE_THRESHOLD_MS = 5 * 60 * 1000;\n/** Default cleanup interval for old streams (ms) - every 10 minutes */\nconst CLEANUP_INTERVAL_MS = 10 * 60 * 1000;\n/** Default age threshold for cleaning up completed streams (ms) - 24 hours */\nconst CLEANUP_AGE_THRESHOLD_MS = 24 * 60 * 60 * 1000;\n\nconst decoder = new TextDecoder();\n\n/**\n * Stored stream chunk for resumable streaming\n */\ntype StreamChunk = {\n id: string;\n stream_id: string;\n body: string;\n chunk_index: number;\n created_at: number;\n};\n\n/**\n * Stream metadata for tracking active streams\n */\ntype StreamMetadata = {\n id: string;\n request_id: string;\n status: \"streaming\" | \"completed\" | \"error\";\n created_at: number;\n completed_at: number | null;\n};\n\n/**\n * Extension of Agent with built-in chat capabilities\n * @template Env Environment type containing bindings\n */\nexport class AIChatAgent<\n Env extends Cloudflare.Env = Cloudflare.Env,\n State = unknown\n> extends Agent<Env, State> {\n /**\n * Map of message `id`s to `AbortController`s\n * useful to propagate request cancellation signals for any external calls made by the agent\n */\n private _chatMessageAbortControllers: Map<string, AbortController>;\n\n /**\n * Currently active stream ID for resumable streaming.\n * Stored in memory for quick access; persisted in stream_metadata table.\n * @internal Protected for testing purposes.\n */\n protected _activeStreamId: string | null = null;\n\n /**\n * Request ID associated with the active stream.\n * @internal Protected for testing purposes.\n */\n protected _activeRequestId: string | null = null;\n\n /**\n * The message currently being streamed. Used to apply tool results\n * before the message is persisted.\n * @internal\n */\n private _streamingMessage: ChatMessage | null = null;\n\n /**\n * Promise that resolves when the current stream completes.\n * Used to wait for message persistence before continuing after tool results.\n * @internal\n */\n private _streamCompletionPromise: Promise<void> | null = null;\n private _streamCompletionResolve: (() => void) | null = null;\n\n /**\n * Current chunk index for the active stream\n */\n private _streamChunkIndex = 0;\n\n /**\n * Buffer for stream chunks pending write to SQLite.\n * Chunks are batched and flushed when buffer reaches CHUNK_BUFFER_SIZE.\n */\n private _chunkBuffer: Array<{\n id: string;\n streamId: string;\n body: string;\n index: number;\n }> = [];\n\n /**\n * Lock to prevent concurrent flush operations\n */\n private _isFlushingChunks = false;\n\n /**\n * Timestamp of the last cleanup operation for old streams\n */\n private _lastCleanupTime = 0;\n\n /** Array of chat messages for the current conversation */\n messages: ChatMessage[];\n\n constructor(ctx: AgentContext, env: Env) {\n super(ctx, env);\n this.sql`create table if not exists cf_ai_chat_agent_messages (\n id text primary key,\n message text not null,\n created_at datetime default current_timestamp\n )`;\n\n // Create tables for automatic resumable streaming\n this.sql`create table if not exists cf_ai_chat_stream_chunks (\n id text primary key,\n stream_id text not null,\n body text not null,\n chunk_index integer not null,\n created_at integer not null\n )`;\n\n this.sql`create table if not exists cf_ai_chat_stream_metadata (\n id text primary key,\n request_id text not null,\n status text not null,\n created_at integer not null,\n completed_at integer\n )`;\n\n this.sql`create index if not exists idx_stream_chunks_stream_id \n on cf_ai_chat_stream_chunks(stream_id, chunk_index)`;\n\n // Load messages and automatically transform them to v5 format\n const rawMessages = this._loadMessagesFromDb();\n\n // Automatic migration following https://jhak.im/blog/ai-sdk-migration-handling-previously-saved-messages\n this.messages = autoTransformMessages(rawMessages);\n\n this._chatMessageAbortControllers = new Map();\n\n // Check for any active streams from a previous session\n this._restoreActiveStream();\n const _onConnect = this.onConnect.bind(this);\n this.onConnect = async (connection: Connection, ctx: ConnectionContext) => {\n // Notify client about active streams that can be resumed\n if (this._activeStreamId) {\n this._notifyStreamResuming(connection);\n }\n // Call consumer's onConnect\n return _onConnect(connection, ctx);\n };\n\n // Wrap onMessage\n const _onMessage = this.onMessage.bind(this);\n this.onMessage = async (connection: Connection, message: WSMessage) => {\n // Handle AIChatAgent's internal messages first\n if (typeof message === \"string\") {\n let data: IncomingMessage;\n try {\n data = JSON.parse(message) as IncomingMessage;\n } catch (_error) {\n // Not JSON, forward to consumer\n return _onMessage(connection, message);\n }\n\n // Handle chat request\n if (\n data.type === MessageType.CF_AGENT_USE_CHAT_REQUEST &&\n data.init.method === \"POST\"\n ) {\n const { body } = data.init;\n const parsed = JSON.parse(body as string);\n const { messages, clientTools } = parsed as {\n messages: ChatMessage[];\n clientTools?: ClientToolSchema[];\n };\n\n // Automatically transform any incoming messages\n const transformedMessages = autoTransformMessages(messages);\n\n this._broadcastChatMessage(\n {\n messages: transformedMessages,\n type: MessageType.CF_AGENT_CHAT_MESSAGES\n },\n [connection.id]\n );\n\n await this.persistMessages(transformedMessages, [connection.id]);\n\n this.observability?.emit(\n {\n displayMessage: \"Chat message request\",\n id: data.id,\n payload: {},\n timestamp: Date.now(),\n type: \"message:request\"\n },\n this.ctx\n );\n\n const chatMessageId = data.id;\n const abortSignal = this._getAbortSignal(chatMessageId);\n\n return this._tryCatchChat(async () => {\n // Wrap in agentContext.run() to propagate connection context to onChatMessage\n // This ensures getCurrentAgent() returns the connection inside tool execute functions\n return agentContext.run(\n { agent: this, connection, request: undefined, email: undefined },\n async () => {\n const response = await this.onChatMessage(\n async (_finishResult) => {\n this._removeAbortController(chatMessageId);\n\n this.observability?.emit(\n {\n displayMessage: \"Chat message response\",\n id: data.id,\n payload: {},\n timestamp: Date.now(),\n type: \"message:response\"\n },\n this.ctx\n );\n },\n {\n abortSignal,\n clientTools\n }\n );\n\n if (response) {\n await this._reply(data.id, response, [connection.id]);\n } else {\n console.warn(\n `[AIChatAgent] onChatMessage returned no response for chatMessageId: ${chatMessageId}`\n );\n this._broadcastChatMessage(\n {\n body: \"No response was generated by the agent.\",\n done: true,\n id: data.id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE\n },\n [connection.id]\n );\n }\n }\n );\n });\n }\n\n // Handle clear chat\n if (data.type === MessageType.CF_AGENT_CHAT_CLEAR) {\n this._destroyAbortControllers();\n this.sql`delete from cf_ai_chat_agent_messages`;\n this.sql`delete from cf_ai_chat_stream_chunks`;\n this.sql`delete from cf_ai_chat_stream_metadata`;\n this._activeStreamId = null;\n this._activeRequestId = null;\n this._streamChunkIndex = 0;\n this.messages = [];\n this._broadcastChatMessage(\n { type: MessageType.CF_AGENT_CHAT_CLEAR },\n [connection.id]\n );\n return;\n }\n\n // Handle message replacement\n if (data.type === MessageType.CF_AGENT_CHAT_MESSAGES) {\n const transformedMessages = autoTransformMessages(data.messages);\n await this.persistMessages(transformedMessages, [connection.id]);\n return;\n }\n\n // Handle request cancellation\n if (data.type === MessageType.CF_AGENT_CHAT_REQUEST_CANCEL) {\n this._cancelChatRequest(data.id);\n return;\n }\n\n // Handle stream resume acknowledgment\n if (data.type === MessageType.CF_AGENT_STREAM_RESUME_ACK) {\n if (\n this._activeStreamId &&\n this._activeRequestId &&\n this._activeRequestId === data.id\n ) {\n this._sendStreamChunks(\n connection,\n this._activeStreamId,\n this._activeRequestId\n );\n }\n return;\n }\n\n // Handle client-side tool result\n if (data.type === MessageType.CF_AGENT_TOOL_RESULT) {\n const { toolCallId, toolName, output, autoContinue } = data;\n\n // Apply the tool result\n this._applyToolResult(toolCallId, toolName, output).then(\n (applied) => {\n // Only auto-continue if client requested it (opt-in behavior)\n // This mimics server-executed tool behavior where the LLM\n // automatically continues after seeing tool results\n if (applied && autoContinue) {\n // Wait for the original stream to complete and message to be persisted\n // before calling onChatMessage, so this.messages includes the tool result\n const waitForStream = async () => {\n if (this._streamCompletionPromise) {\n await this._streamCompletionPromise;\n } else {\n // If no promise, wait a bit for the stream to finish\n await new Promise((resolve) => setTimeout(resolve, 500));\n }\n };\n\n waitForStream().then(() => {\n const continuationId = nanoid();\n const abortSignal = this._getAbortSignal(continuationId);\n\n this._tryCatchChat(async () => {\n return agentContext.run(\n {\n agent: this,\n connection,\n request: undefined,\n email: undefined\n },\n async () => {\n const response = await this.onChatMessage(\n async (_finishResult) => {\n this._removeAbortController(continuationId);\n\n this.observability?.emit(\n {\n displayMessage:\n \"Chat message response (tool continuation)\",\n id: continuationId,\n payload: {},\n timestamp: Date.now(),\n type: \"message:response\"\n },\n this.ctx\n );\n },\n {\n abortSignal\n }\n );\n\n if (response) {\n // Pass continuation flag to merge parts into last assistant message\n // Note: We pass an empty excludeBroadcastIds array because the sender\n // NEEDS to receive the continuation stream. Unlike regular chat requests\n // where aiFetch handles the response, tool continuations have no listener\n // waiting - the client relies on the broadcast.\n await this._reply(\n continuationId,\n response,\n [], // Don't exclude sender - they need the continuation\n { continuation: true }\n );\n }\n }\n );\n });\n });\n }\n }\n );\n return;\n }\n }\n\n // Forward unhandled messages to consumer's onMessage\n return _onMessage(connection, message);\n };\n }\n\n /**\n * Restore active stream state if the agent was restarted during streaming.\n * Called during construction to recover any interrupted streams.\n * Validates stream freshness to avoid sending stale resume notifications.\n * @internal Protected for testing purposes.\n */\n protected _restoreActiveStream() {\n const activeStreams = this.sql<StreamMetadata>`\n select * from cf_ai_chat_stream_metadata \n where status = 'streaming' \n order by created_at desc \n limit 1\n `;\n\n if (activeStreams && activeStreams.length > 0) {\n const stream = activeStreams[0];\n const streamAge = Date.now() - stream.created_at;\n\n // Check if stream is stale; delete to free storage\n if (streamAge > STREAM_STALE_THRESHOLD_MS) {\n this\n .sql`delete from cf_ai_chat_stream_chunks where stream_id = ${stream.id}`;\n this\n .sql`delete from cf_ai_chat_stream_metadata where id = ${stream.id}`;\n console.warn(\n `[AIChatAgent] Deleted stale stream ${stream.id} (age: ${Math.round(streamAge / 1000)}s)`\n );\n return;\n }\n\n this._activeStreamId = stream.id;\n this._activeRequestId = stream.request_id;\n\n // Get the last chunk index\n const lastChunk = this.sql<{ max_index: number }>`\n select max(chunk_index) as max_index \n from cf_ai_chat_stream_chunks \n where stream_id = ${this._activeStreamId}\n `;\n this._streamChunkIndex =\n lastChunk && lastChunk[0]?.max_index != null\n ? lastChunk[0].max_index + 1\n : 0;\n }\n }\n\n /**\n * Notify a connection about an active stream that can be resumed.\n * The client should respond with CF_AGENT_STREAM_RESUME_ACK to receive chunks.\n * Uses in-memory state for request ID - no extra DB lookup needed.\n * @param connection - The WebSocket connection to notify\n */\n private _notifyStreamResuming(connection: Connection) {\n if (!this._activeStreamId || !this._activeRequestId) {\n return;\n }\n\n // Notify client - they will send ACK when ready\n connection.send(\n JSON.stringify({\n type: MessageType.CF_AGENT_STREAM_RESUMING,\n id: this._activeRequestId\n })\n );\n }\n\n /**\n * Send stream chunks to a connection after receiving ACK.\n * @param connection - The WebSocket connection\n * @param streamId - The stream to replay\n * @param requestId - The original request ID\n */\n private _sendStreamChunks(\n connection: Connection,\n streamId: string,\n requestId: string\n ) {\n // Flush any pending chunks first to ensure we have the latest\n this._flushChunkBuffer();\n\n const chunks = this.sql<StreamChunk>`\n select * from cf_ai_chat_stream_chunks \n where stream_id = ${streamId} \n order by chunk_index asc\n `;\n\n // Send all stored chunks\n for (const chunk of chunks || []) {\n connection.send(\n JSON.stringify({\n body: chunk.body,\n done: false,\n id: requestId,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE\n })\n );\n }\n\n // If the stream is no longer active (completed), send done signal\n // We track active state in memory, no need to query DB\n if (this._activeStreamId !== streamId) {\n connection.send(\n JSON.stringify({\n body: \"\",\n done: true,\n id: requestId,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE\n })\n );\n }\n }\n\n /**\n * Buffer a stream chunk for batch write to SQLite.\n * @param streamId - The stream this chunk belongs to\n * @param body - The serialized chunk body\n * @internal Protected for testing purposes.\n */\n protected _storeStreamChunk(streamId: string, body: string) {\n // Force flush if buffer is at max to prevent memory issues\n if (this._chunkBuffer.length >= CHUNK_BUFFER_MAX_SIZE) {\n this._flushChunkBuffer();\n }\n\n this._chunkBuffer.push({\n id: nanoid(),\n streamId,\n body,\n index: this._streamChunkIndex\n });\n this._streamChunkIndex++;\n\n // Flush when buffer reaches threshold\n if (this._chunkBuffer.length >= CHUNK_BUFFER_SIZE) {\n this._flushChunkBuffer();\n }\n }\n\n /**\n * Flush buffered chunks to SQLite in a single batch.\n * Uses a lock to prevent concurrent flush operations.\n * @internal Protected for testing purposes.\n */\n protected _flushChunkBuffer() {\n // Prevent concurrent flushes\n if (this._isFlushingChunks || this._chunkBuffer.length === 0) {\n return;\n }\n\n this._isFlushingChunks = true;\n try {\n const chunks = this._chunkBuffer;\n this._chunkBuffer = [];\n\n // Batch insert all chunks\n const now = Date.now();\n for (const chunk of chunks) {\n this.sql`\n insert into cf_ai_chat_stream_chunks (id, stream_id, body, chunk_index, created_at)\n values (${chunk.id}, ${chunk.streamId}, ${chunk.body}, ${chunk.index}, ${now})\n `;\n }\n } finally {\n this._isFlushingChunks = false;\n }\n }\n\n /**\n * Start tracking a new stream for resumable streaming.\n * Creates metadata entry in SQLite and sets up tracking state.\n * @param requestId - The unique ID of the chat request\n * @returns The generated stream ID\n * @internal Protected for testing purposes.\n */\n protected _startStream(requestId: string): string {\n // Flush any pending chunks from previous streams to prevent mixing\n this._flushChunkBuffer();\n\n const streamId = nanoid();\n this._activeStreamId = streamId;\n this._activeRequestId = requestId;\n this._streamChunkIndex = 0;\n\n this.sql`\n insert into cf_ai_chat_stream_metadata (id, request_id, status, created_at)\n values (${streamId}, ${requestId}, 'streaming', ${Date.now()})\n `;\n\n return streamId;\n }\n\n /**\n * Mark a stream as completed and flush any pending chunks.\n * @param streamId - The stream to mark as completed\n * @internal Protected for testing purposes.\n */\n protected _completeStream(streamId: string) {\n // Flush any pending chunks before completing\n this._flushChunkBuffer();\n\n this.sql`\n update cf_ai_chat_stream_metadata \n set status = 'completed', completed_at = ${Date.now()} \n where id = ${streamId}\n `;\n this._activeStreamId = null;\n this._activeRequestId = null;\n this._streamChunkIndex = 0;\n\n // Periodically clean up old streams (not on every completion)\n this._maybeCleanupOldStreams();\n }\n\n /**\n * Clean up old completed streams if enough time has passed since last cleanup.\n * This prevents database growth while avoiding cleanup overhead on every stream completion.\n */\n private _maybeCleanupOldStreams() {\n const now = Date.now();\n if (now - this._lastCleanupTime < CLEANUP_INTERVAL_MS) {\n return;\n }\n this._lastCleanupTime = now;\n\n const cutoff = now - CLEANUP_AGE_THRESHOLD_MS;\n this.sql`\n delete from cf_ai_chat_stream_chunks \n where stream_id in (\n select id from cf_ai_chat_stream_metadata \n where status = 'completed' and completed_at < ${cutoff}\n )\n `;\n this.sql`\n delete from cf_ai_chat_stream_metadata \n where status = 'completed' and completed_at < ${cutoff}\n `;\n }\n\n private _broadcastChatMessage(message: OutgoingMessage, exclude?: string[]) {\n this.broadcast(JSON.stringify(message), exclude);\n }\n\n private _loadMessagesFromDb(): ChatMessage[] {\n const rows =\n this.sql`select * from cf_ai_chat_agent_messages order by created_at` ||\n [];\n return rows\n .map((row) => {\n try {\n return JSON.parse(row.message as string);\n } catch (error) {\n console.error(`Failed to parse message ${row.id}:`, error);\n return null;\n }\n })\n .filter((msg): msg is ChatMessage => msg !== null);\n }\n\n override async onRequest(request: Request): Promise<Response> {\n return this._tryCatchChat(async () => {\n const url = new URL(request.url);\n\n if (url.pathname.endsWith(\"/get-messages\")) {\n const messages = this._loadMessagesFromDb();\n return Response.json(messages);\n }\n\n return super.onRequest(request);\n });\n }\n\n private async _tryCatchChat<T>(fn: () => T | Promise<T>) {\n try {\n return await fn();\n } catch (e) {\n throw this.onError(e);\n }\n }\n\n /**\n * Handle incoming chat messages and generate a response\n * @param onFinish Callback to be called when the response is finished\n * @param options Options including abort signal and client-defined tools\n * @returns Response to send to the client or undefined\n */\n async onChatMessage(\n // biome-ignore lint/correctness/noUnusedFunctionParameters: overridden later\n onFinish: StreamTextOnFinishCallback<ToolSet>,\n // biome-ignore lint/correctness/noUnusedFunctionParameters: overridden later\n options?: OnChatMessageOptions\n ): Promise<Response | undefined> {\n throw new Error(\n \"recieved a chat message, override onChatMessage and return a Response to send to the client\"\n );\n }\n\n /**\n * Save messages on the server side\n * @param messages Chat messages to save\n */\n async saveMessages(messages: ChatMessage[]) {\n await this.persistMessages(messages);\n await this._tryCatchChat(async () => {\n const response = await this.onChatMessage(() => {});\n if (response) this._reply(crypto.randomUUID(), response);\n });\n }\n\n async persistMessages(\n messages: ChatMessage[],\n excludeBroadcastIds: string[] = []\n ) {\n // Merge incoming messages with existing server state to preserve tool outputs.\n // This is critical for client-side tools: the client sends messages without\n // tool outputs, but the server has them via _applyToolResult.\n const mergedMessages = this._mergeIncomingWithServerState(messages);\n\n // Persist the merged messages\n for (const message of mergedMessages) {\n // Strip OpenAI item IDs to prevent \"Duplicate item found\" errors\n // when using the OpenAI Responses API. These IDs are assigned by OpenAI\n // and if sent back in subsequent requests, cause duplicate detection.\n const sanitizedMessage = this._sanitizeMessageForPersistence(message);\n const messageToSave = this._resolveMessageForToolMerge(sanitizedMessage);\n this.sql`\n insert into cf_ai_chat_agent_messages (id, message)\n values (${messageToSave.id}, ${JSON.stringify(messageToSave)})\n on conflict(id) do update set message = excluded.message\n `;\n }\n\n // refresh in-memory messages\n const persisted = this._loadMessagesFromDb();\n this.messages = autoTransformMessages(persisted);\n this._broadcastChatMessage(\n {\n messages: mergedMessages,\n type: MessageType.CF_AGENT_CHAT_MESSAGES\n },\n excludeBroadcastIds\n );\n }\n\n /**\n * Merges incoming messages with existing server state.\n * This preserves tool outputs that the server has (via _applyToolResult)\n * but the client doesn't have yet.\n *\n * @param incomingMessages - Messages from the client\n * @returns Messages with server's tool outputs preserved\n */\n private _mergeIncomingWithServerState(\n incomingMessages: ChatMessage[]\n ): ChatMessage[] {\n // Build a map of toolCallId -> output from existing server messages\n const serverToolOutputs = new Map<string, unknown>();\n for (const msg of this.messages) {\n if (msg.role !== \"assistant\") continue;\n for (const part of msg.parts) {\n if (\n \"toolCallId\" in part &&\n \"state\" in part &&\n part.state === \"output-available\" &&\n \"output\" in part\n ) {\n serverToolOutputs.set(\n part.toolCallId as string,\n (part as { output: unknown }).output\n );\n }\n }\n }\n\n // If server has no tool outputs, return incoming messages as-is\n if (serverToolOutputs.size === 0) {\n return incomingMessages;\n }\n\n // Merge server's tool outputs into incoming messages\n return incomingMessages.map((msg) => {\n if (msg.role !== \"assistant\") return msg;\n\n let hasChanges = false;\n const updatedParts = msg.parts.map((part) => {\n // If this is a tool part in input-available state and server has the output\n if (\n \"toolCallId\" in part &&\n \"state\" in part &&\n part.state === \"input-available\" &&\n serverToolOutputs.has(part.toolCallId as string)\n ) {\n hasChanges = true;\n return {\n ...part,\n state: \"output-available\" as const,\n output: serverToolOutputs.get(part.toolCallId as string)\n };\n }\n return part;\n }) as ChatMessage[\"parts\"];\n\n return hasChanges ? { ...msg, parts: updatedParts } : msg;\n });\n }\n\n /**\n * Resolves a message for persistence, handling tool result merging.\n * If the message contains tool parts with output-available state, checks if there's\n * an existing message with the same toolCallId that should be updated instead of\n * creating a duplicate. This prevents the \"Duplicate item found\" error from OpenAI\n * when client-side tool results arrive in a new request.\n *\n * @param message - The message to potentially merge\n * @returns The message with the correct ID (either original or merged)\n */\n private _resolveMessageForToolMerge(message: ChatMessage): ChatMessage {\n if (message.role !== \"assistant\") {\n return message;\n }\n\n // Check if this message has tool parts with output-available state\n for (const part of message.parts) {\n if (\n \"toolCallId\" in part &&\n \"state\" in part &&\n part.state === \"output-available\"\n ) {\n const toolCallId = part.toolCallId as string;\n\n // Look for an existing message with this toolCallId in input-available state\n const existingMessage = this._findMessageByToolCallId(toolCallId);\n if (existingMessage && existingMessage.id !== message.id) {\n // Found a match - merge by using the existing message's ID\n // This ensures the SQL upsert updates the existing row\n return {\n ...message,\n id: existingMessage.id\n };\n }\n }\n }\n\n return message;\n }\n\n /**\n * Finds an existing assistant message that contains a tool part with the given toolCallId.\n * Used to detect when a tool result should update an existing message rather than\n * creating a new one.\n *\n * @param toolCallId - The tool call ID to search for\n * @returns The existing message if found, undefined otherwise\n */\n private _findMessageByToolCallId(\n toolCallId: string\n ): ChatMessage | undefined {\n for (const msg of this.messages) {\n if (msg.role !== \"assistant\") continue;\n\n for (const part of msg.parts) {\n if (\"toolCallId\" in part && part.toolCallId === toolCallId) {\n return msg;\n }\n }\n }\n return undefined;\n }\n\n /**\n * Sanitizes a message for persistence by removing ephemeral provider-specific\n * data that should not be stored or sent back in subsequent requests.\n *\n * This handles two issues with the OpenAI Responses API:\n *\n * 1. **Duplicate item IDs**: The AI SDK's @ai-sdk/openai provider (v2.0.x+)\n * defaults to using OpenAI's Responses API which assigns unique itemIds\n * to each message part. When these IDs are persisted and sent back,\n * OpenAI rejects them as duplicates.\n *\n * 2. **Empty reasoning parts**: OpenAI may return reasoning parts with empty\n * text and encrypted content. These cause \"Non-OpenAI reasoning parts are\n * not supported\" warnings when sent back via convertToModelMessages().\n *\n * @param message - The message to sanitize\n * @returns A new message with ephemeral provider data removed\n */\n private _sanitizeMessageForPersistence(message: ChatMessage): ChatMessage {\n // First, filter out empty reasoning parts (they have no useful content)\n const filteredParts = message.parts.filter((part) => {\n if (part.type === \"reasoning\") {\n const reasoningPart = part as ReasoningUIPart;\n // Remove reasoning parts that have no text content\n // These are typically placeholders with only encrypted content\n if (!reasoningPart.text || reasoningPart.text.trim() === \"\") {\n return false;\n }\n }\n return true;\n });\n\n // Then sanitize remaining parts by stripping OpenAI-specific ephemeral data\n const sanitizedParts = filteredParts.map((part) => {\n let sanitizedPart = part;\n\n // Strip providerMetadata.openai.itemId and reasoningEncryptedContent\n if (\n \"providerMetadata\" in sanitizedPart &&\n sanitizedPart.providerMetadata &&\n typeof sanitizedPart.providerMetadata === \"object\" &&\n \"openai\" in sanitizedPart.providerMetadata\n ) {\n sanitizedPart = this._stripOpenAIMetadata(\n sanitizedPart,\n \"providerMetadata\"\n );\n }\n\n // Also check callProviderMetadata for tool parts\n if (\n \"callProviderMetadata\" in sanitizedPart &&\n sanitizedPart.callProviderMetadata &&\n typeof sanitizedPart.callProviderMetadata === \"object\" &&\n \"openai\" in sanitizedPart.callProviderMetadata\n ) {\n sanitizedPart = this._stripOpenAIMetadata(\n sanitizedPart,\n \"callProviderMetadata\"\n );\n }\n\n return sanitizedPart;\n }) as ChatMessage[\"parts\"];\n\n return { ...message, parts: sanitizedParts };\n }\n\n /**\n * Helper to strip OpenAI-specific ephemeral fields from a metadata object.\n * Removes itemId and reasoningEncryptedContent while preserving other fields.\n */\n private _stripOpenAIMetadata<T extends ChatMessage[\"parts\"][number]>(\n part: T,\n metadataKey: \"providerMetadata\" | \"callProviderMetadata\"\n ): T {\n const metadata = (part as Record<string, unknown>)[metadataKey] as {\n openai?: Record<string, unknown>;\n [key: string]: unknown;\n };\n\n if (!metadata?.openai) return part;\n\n const openaiMeta = metadata.openai;\n\n // Remove ephemeral fields: itemId and reasoningEncryptedContent\n const {\n itemId: _itemId,\n reasoningEncryptedContent: _rec,\n ...restOpenai\n } = openaiMeta;\n\n // Determine what to keep\n const hasOtherOpenaiFields = Object.keys(restOpenai).length > 0;\n const { openai: _openai, ...restMetadata } = metadata;\n\n let newMetadata: ProviderMetadata | undefined;\n if (hasOtherOpenaiFields) {\n newMetadata = {\n ...restMetadata,\n openai: restOpenai\n } as ProviderMetadata;\n } else if (Object.keys(restMetadata).length > 0) {\n newMetadata = restMetadata as ProviderMetadata;\n }\n\n // Create new part without the old metadata\n const { [metadataKey]: _oldMeta, ...restPart } = part as Record<\n string,\n unknown\n >;\n\n if (newMetadata) {\n return { ...restPart, [metadataKey]: newMetadata } as T;\n }\n return restPart as T;\n }\n\n /**\n * Applies a tool result to an existing assistant message.\n * This is used when the client sends CF_AGENT_TOOL_RESULT for client-side tools.\n * The server is the source of truth, so we update the message here and broadcast\n * the update to all clients.\n *\n * @param toolCallId - The tool call ID this result is for\n * @param toolName - The name of the tool\n * @param output - The output from the tool execution\n * @returns true if the result was applied, false if the message was not found\n */\n private async _applyToolResult(\n toolCallId: string,\n _toolName: string,\n output: unknown\n ): Promise<boolean> {\n // Find the message with this tool call\n // First check the currently streaming message\n let message: ChatMessage | undefined;\n\n // Check streaming message first\n if (this._streamingMessage) {\n for (const part of this._streamingMessage.parts) {\n if (\"toolCallId\" in part && part.toolCallId === toolCallId) {\n message = this._streamingMessage;\n break;\n }\n }\n }\n\n // If not found in streaming message, retry persisted messages\n if (!message) {\n for (let attempt = 0; attempt < 10; attempt++) {\n message = this._findMessageByToolCallId(toolCallId);\n if (message) break;\n // Wait 100ms before retrying\n await new Promise((resolve) => setTimeout(resolve, 100));\n }\n }\n\n if (!message) {\n // The tool result will be included when\n // the client sends the follow-up message via sendMessage().\n console.warn(\n `[AIChatAgent] _applyToolResult: Could not find message with toolCallId ${toolCallId} after retries`\n );\n return false;\n }\n\n // Check if this is the streaming message (not yet persisted)\n const isStreamingMessage = message === this._streamingMessage;\n\n // Update the tool part with the output\n let updated = false;\n if (isStreamingMessage) {\n // Update in place - the message will be persisted when streaming completes\n for (const part of message.parts) {\n if (\n \"toolCallId\" in part &&\n part.toolCallId === toolCallId &&\n \"state\" in part &&\n part.state === \"input-available\"\n ) {\n (part as { state: string; output?: unknown }).state =\n \"output-available\";\n (part as { state: string; output?: unknown }).output = output;\n updated = true;\n break;\n }\n }\n } else {\n // For persisted messages, create updated parts\n const updatedParts = message.parts.map((part) => {\n if (\n \"toolCallId\" in part &&\n part.toolCallId === toolCallId &&\n \"state\" in part &&\n part.state === \"input-available\"\n ) {\n updated = true;\n return {\n ...part,\n state: \"output-available\" as const,\n output\n };\n }\n return part;\n }) as ChatMessage[\"parts\"];\n\n if (updated) {\n // Create the updated message and strip OpenAI item IDs\n const updatedMessage: ChatMessage = this._sanitizeMessageForPersistence(\n {\n ...message,\n parts: updatedParts\n }\n );\n\n // Persist the updated message\n this.sql`\n update cf_ai_chat_agent_messages \n set message = ${JSON.stringify(updatedMessage)}\n where id = ${message.id}\n `;\n\n // Reload messages to update in-memory state\n const persisted = this._loadMessagesFromDb();\n this.messages = autoTransformMessages(persisted);\n }\n }\n\n if (!updated) {\n console.warn(\n `[AIChatAgent] _applyToolResult: Tool part with toolCallId ${toolCallId} not in input-available state`\n );\n return false;\n }\n\n // Broadcast the update to all clients (only for persisted messages)\n // For streaming messages, the update will be included when persisted\n if (!isStreamingMessage) {\n // Re-fetch the message for broadcast since we modified it\n const broadcastMessage = this._findMessageByToolCallId(toolCallId);\n if (broadcastMessage) {\n this._broadcastChatMessage({\n type: MessageType.CF_AGENT_MESSAGE_UPDATED,\n message: broadcastMessage\n });\n }\n }\n\n // Note: We don't automatically continue the conversation here.\n // The client is responsible for sending a follow-up request if needed.\n // This avoids re-entering onChatMessage with unexpected state.\n\n return true;\n }\n\n private async _reply(\n id: string,\n response: Response,\n excludeBroadcastIds: string[] = [],\n options: { continuation?: boolean } = {}\n ) {\n const { continuation = false } = options;\n\n return this._tryCatchChat(async () => {\n if (!response.body) {\n // Send empty response if no body\n this._broadcastChatMessage({\n body: \"\",\n done: true,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,\n ...(continuation && { continuation: true })\n });\n return;\n }\n\n // Start tracking this stream for resumability\n const streamId = this._startStream(id);\n\n /* Lazy loading ai sdk, because putting it in module scope is\n * causing issues with startup time.\n * The only place it's used is in _reply, which only matters after\n * a chat message is received.\n * So it's safe to delay loading it until a chat message is received.\n */\n const { getToolName, isToolUIPart, parsePartialJson } =\n await import(\"ai\");\n\n const reader = response.body.getReader();\n\n // Parsing state adapted from:\n // https://github.com/vercel/ai/blob/main/packages/ai/src/ui-message-stream/ui-message-chunks.ts#L295\n const message: ChatMessage = {\n id: `assistant_${Date.now()}_${Math.random().toString(36).slice(2, 11)}`, // default\n role: \"assistant\",\n parts: []\n };\n // Track the streaming message so tool results can be applied before persistence\n this._streamingMessage = message;\n // Set up completion promise for tool continuation to wait on\n this._streamCompletionPromise = new Promise((resolve) => {\n this._streamCompletionResolve = resolve;\n });\n let activeTextParts: Record<string, TextUIPart> = {};\n let activeReasoningParts: Record<string, ReasoningUIPart> = {};\n const partialToolCalls: Record<\n string,\n { text: string; index: number; toolName: string; dynamic?: boolean }\n > = {};\n\n function updateDynamicToolPart(\n options: {\n toolName: string;\n toolCallId: string;\n providerExecuted?: boolean;\n } & (\n | {\n state: \"input-streaming\";\n input: unknown;\n }\n | {\n state: \"input-available\";\n input: unknown;\n providerMetadata?: ProviderMetadata;\n }\n | {\n state: \"output-available\";\n input: unknown;\n output: unknown;\n preliminary: boolean | undefined;\n }\n | {\n state: \"output-error\";\n input: unknown;\n errorText: string;\n providerMetadata?: ProviderMetadata;\n }\n )\n ) {\n const part = message.parts.find(\n (part) =>\n part.type === \"dynamic-tool\" &&\n part.toolCallId === options.toolCallId\n ) as DynamicToolUIPart | undefined;\n\n const anyOptions = options as Record<string, unknown>;\n const anyPart = part as Record<string, unknown>;\n\n if (part != null) {\n part.state = options.state;\n anyPart.toolName = options.toolName;\n anyPart.input = anyOptions.input;\n anyPart.output = anyOptions.output;\n anyPart.errorText = anyOptions.errorText;\n anyPart.rawInput = anyOptions.rawInput ?? anyPart.rawInput;\n anyPart.preliminary = anyOptions.preliminary;\n\n if (\n anyOptions.providerMetadata != null &&\n part.state === \"input-available\"\n ) {\n part.callProviderMetadata =\n anyOptions.providerMetadata as ProviderMetadata;\n }\n } else {\n message.parts.push({\n type: \"dynamic-tool\",\n toolName: options.toolName,\n toolCallId: options.toolCallId,\n state: options.state,\n input: anyOptions.input,\n output: anyOptions.output,\n errorText: anyOptions.errorText,\n preliminary: anyOptions.preliminary,\n ...(anyOptions.providerMetadata != null\n ? { callProviderMetadata: anyOptions.providerMetadata }\n : {})\n } as DynamicToolUIPart);\n }\n }\n\n function updateToolPart(\n options: {\n toolName: string;\n toolCallId: string;\n providerExecuted?: boolean;\n } & (\n | {\n state: \"input-streaming\";\n input: unknown;\n providerExecuted?: boolean;\n }\n | {\n state: \"input-available\";\n input: unknown;\n providerExecuted?: boolean;\n providerMetadata?: ProviderMetadata;\n }\n | {\n state: \"output-available\";\n input: unknown;\n output: unknown;\n providerExecuted?: boolean;\n preliminary?: boolean;\n }\n | {\n state: \"output-error\";\n input: unknown;\n rawInput?: unknown;\n errorText: string;\n providerExecuted?: boolean;\n providerMetadata?: ProviderMetadata;\n }\n )\n ) {\n const part = message.parts.find(\n (part) =>\n isToolUIPart(part) &&\n (part as ToolUIPart).toolCallId === options.toolCallId\n ) as ToolUIPart | undefined;\n\n const anyOptions = options as Record<string, unknown>;\n const anyPart = part as Record<string, unknown>;\n\n if (part != null) {\n part.state = options.state;\n anyPart.input = anyOptions.input;\n anyPart.output = anyOptions.output;\n anyPart.errorText = anyOptions.errorText;\n anyPart.rawInput = anyOptions.rawInput;\n anyPart.preliminary = anyOptions.preliminary;\n\n // once providerExecuted is set, it stays for streaming\n anyPart.providerExecuted =\n anyOptions.providerExecuted ?? part.providerExecuted;\n\n if (\n anyOptions.providerMetadata != null &&\n part.state === \"input-available\"\n ) {\n part.callProviderMetadata =\n anyOptions.providerMetadata as ProviderMetadata;\n }\n } else {\n message.parts.push({\n type: `tool-${options.toolName}`,\n toolCallId: options.toolCallId,\n state: options.state,\n input: anyOptions.input,\n output: anyOptions.output,\n rawInput: anyOptions.rawInput,\n errorText: anyOptions.errorText,\n providerExecuted: anyOptions.providerExecuted,\n preliminary: anyOptions.preliminary,\n ...(anyOptions.providerMetadata != null\n ? { callProviderMetadata: anyOptions.providerMetadata }\n : {})\n } as ToolUIPart);\n }\n }\n\n async function updateMessageMetadata(metadata: unknown) {\n if (metadata != null) {\n const mergedMetadata =\n message.metadata != null\n ? { ...message.metadata, ...metadata } // TODO: do proper merging\n : metadata;\n\n message.metadata = mergedMetadata;\n }\n }\n\n let streamCompleted = false;\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n // Mark the stream as completed\n this._completeStream(streamId);\n streamCompleted = true;\n // Send final completion signal\n this._broadcastChatMessage({\n body: \"\",\n done: true,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,\n ...(continuation && { continuation: true })\n });\n break;\n }\n\n const chunk = decoder.decode(value);\n\n // Determine response format based on content-type\n const contentType = response.headers.get(\"content-type\") || \"\";\n const isSSE = contentType.includes(\"text/event-stream\");\n\n // After streaming is complete, persist the complete assistant's response\n if (isSSE) {\n // Parse AI SDK v5 SSE format and extract text deltas\n const lines = chunk.split(\"\\n\");\n for (const line of lines) {\n if (line.startsWith(\"data: \") && line !== \"data: [DONE]\") {\n try {\n const data: UIMessageChunk = JSON.parse(line.slice(6)); // Remove 'data: ' prefix\n switch (data.type) {\n case \"text-start\": {\n const textPart: TextUIPart = {\n type: \"text\",\n text: \"\",\n providerMetadata: data.providerMetadata,\n state: \"streaming\"\n };\n activeTextParts[data.id] = textPart;\n message.parts.push(textPart);\n break;\n }\n\n case \"text-delta\": {\n const textPart = activeTextParts[data.id];\n textPart.text += data.delta;\n textPart.providerMetadata =\n data.providerMetadata ?? textPart.providerMetadata;\n break;\n }\n\n case \"text-end\": {\n const textPart = activeTextParts[data.id];\n textPart.state = \"done\";\n textPart.providerMetadata =\n data.providerMetadata ?? textPart.providerMetadata;\n delete activeTextParts[data.id];\n break;\n }\n\n case \"reasoning-start\": {\n const reasoningPart: ReasoningUIPart = {\n type: \"reasoning\",\n text: \"\",\n providerMetadata: data.providerMetadata,\n state: \"streaming\"\n };\n activeReasoningParts[data.id] = reasoningPart;\n message.parts.push(reasoningPart);\n break;\n }\n\n case \"reasoning-delta\": {\n const reasoningPart = activeReasoningParts[data.id];\n reasoningPart.text += data.delta;\n reasoningPart.providerMetadata =\n data.providerMetadata ?? reasoningPart.providerMetadata;\n break;\n }\n\n case \"reasoning-end\": {\n const reasoningPart = activeReasoningParts[data.id];\n reasoningPart.providerMetadata =\n data.providerMetadata ?? reasoningPart.providerMetadata;\n reasoningPart.state = \"done\";\n delete activeReasoningParts[data.id];\n\n break;\n }\n\n case \"file\": {\n message.parts.push({\n type: \"file\",\n mediaType: data.mediaType,\n url: data.url\n });\n\n break;\n }\n\n case \"source-url\": {\n message.parts.push({\n type: \"source-url\",\n sourceId: data.sourceId,\n url: data.url,\n title: data.title,\n providerMetadata: data.providerMetadata\n });\n\n break;\n }\n\n case \"source-document\": {\n message.parts.push({\n type: \"source-document\",\n sourceId: data.sourceId,\n mediaType: data.mediaType,\n title: data.title,\n filename: data.filename,\n providerMetadata: data.providerMetadata\n });\n\n break;\n }\n\n case \"tool-input-start\": {\n const toolInvocations =\n message.parts.filter(isToolUIPart);\n\n // add the partial tool call to the map\n partialToolCalls[data.toolCallId] = {\n text: \"\",\n toolName: data.toolName,\n index: toolInvocations.length,\n dynamic: data.dynamic\n };\n\n if (data.dynamic) {\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"input-streaming\",\n input: undefined\n });\n } else {\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"input-streaming\",\n input: undefined\n });\n }\n\n break;\n }\n\n case \"tool-input-delta\": {\n const partialToolCall = partialToolCalls[data.toolCallId];\n\n partialToolCall.text += data.inputTextDelta;\n\n const partialArgsResult = await parsePartialJson(\n partialToolCall.text\n );\n const partialArgs = (\n partialArgsResult as { value: Record<string, unknown> }\n ).value;\n\n if (partialToolCall.dynamic) {\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: partialToolCall.toolName,\n state: \"input-streaming\",\n input: partialArgs\n });\n } else {\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: partialToolCall.toolName,\n state: \"input-streaming\",\n input: partialArgs\n });\n }\n\n break;\n }\n\n case \"tool-input-available\": {\n if (data.dynamic) {\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"input-available\",\n input: data.input,\n providerMetadata: data.providerMetadata\n });\n } else {\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"input-available\",\n input: data.input,\n providerExecuted: data.providerExecuted,\n providerMetadata: data.providerMetadata\n });\n }\n\n // TODO: Do we want to expose onToolCall?\n\n // invoke the onToolCall callback if it exists. This is blocking.\n // In the future we should make this non-blocking, which\n // requires additional state management for error handling etc.\n // Skip calling onToolCall for provider-executed tools since they are already executed\n // if (onToolCall && !data.providerExecuted) {\n // await onToolCall({\n // toolCall: data\n // });\n // }\n break;\n }\n\n case \"tool-input-error\": {\n if (data.dynamic) {\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"output-error\",\n input: data.input,\n errorText: data.errorText,\n providerMetadata: data.providerMetadata\n });\n } else {\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"output-error\",\n input: undefined,\n rawInput: data.input,\n errorText: data.errorText,\n providerExecuted: data.providerExecuted,\n providerMetadata: data.providerMetadata\n });\n }\n\n break;\n }\n\n case \"tool-output-available\": {\n if (data.dynamic) {\n const toolInvocations = message.parts.filter(\n (part) => part.type === \"dynamic-tool\"\n ) as DynamicToolUIPart[];\n\n const toolInvocation = toolInvocations.find(\n (invocation) =>\n invocation.toolCallId === data.toolCallId\n );\n\n if (!toolInvocation)\n throw new Error(\"Tool invocation not found\");\n\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: toolInvocation.toolName,\n state: \"output-available\",\n input: toolInvocation.input,\n output: data.output,\n preliminary: data.preliminary\n });\n } else {\n const toolInvocations = message.parts.filter(\n isToolUIPart\n ) as ToolUIPart[];\n\n const toolInvocation = toolInvocations.find(\n (invocation) =>\n invocation.toolCallId === data.toolCallId\n );\n\n if (!toolInvocation)\n throw new Error(\"Tool invocation not found\");\n\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: getToolName(toolInvocation),\n state: \"output-available\",\n input: toolInvocation.input,\n output: data.output,\n providerExecuted: data.providerExecuted,\n preliminary: data.preliminary\n });\n }\n\n break;\n }\n\n case \"tool-output-error\": {\n if (data.dynamic) {\n const toolInvocations = message.parts.filter(\n (part) => part.type === \"dynamic-tool\"\n ) as DynamicToolUIPart[];\n\n const toolInvocation = toolInvocations.find(\n (invocation) =>\n invocation.toolCallId === data.toolCallId\n );\n\n if (!toolInvocation)\n throw new Error(\"Tool invocation not found\");\n\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: toolInvocation.toolName,\n state: \"output-error\",\n input: toolInvocation.input,\n errorText: data.errorText\n });\n } else {\n const toolInvocations = message.parts.filter(\n isToolUIPart\n ) as ToolUIPart[];\n\n const toolInvocation = toolInvocations.find(\n (invocation) =>\n invocation.toolCallId === data.toolCallId\n );\n\n if (!toolInvocation)\n throw new Error(\"Tool invocation not found\");\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: getToolName(toolInvocation),\n state: \"output-error\",\n input: toolInvocation.input,\n rawInput:\n \"rawInput\" in toolInvocation\n ? toolInvocation.rawInput\n : undefined,\n errorText: data.errorText\n });\n }\n\n break;\n }\n\n case \"start-step\": {\n // add a step boundary part to the message\n message.parts.push({ type: \"step-start\" });\n break;\n }\n\n case \"finish-step\": {\n // reset the current text and reasoning parts\n activeTextParts = {};\n activeReasoningParts = {};\n break;\n }\n\n case \"start\": {\n if (data.messageId != null) {\n message.id = data.messageId;\n }\n\n await updateMessageMetadata(data.messageMetadata);\n\n break;\n }\n\n case \"finish\": {\n await updateMessageMetadata(data.messageMetadata);\n break;\n }\n\n case \"message-metadata\": {\n await updateMessageMetadata(data.messageMetadata);\n break;\n }\n\n case \"error\": {\n this._broadcastChatMessage({\n error: true,\n body: data.errorText ?? JSON.stringify(data),\n done: false,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE\n });\n\n break;\n }\n // Do we want to handle data parts?\n }\n\n // Convert internal AI SDK stream events to valid UIMessageStreamPart format.\n // The \"finish\" event with \"finishReason\" is an internal LanguageModelV3StreamPart,\n // not a UIMessageStreamPart (which expects \"messageMetadata\" instead).\n // See: https://github.com/cloudflare/agents/issues/677\n let eventToSend: unknown = data;\n if (data.type === \"finish\" && \"finishReason\" in data) {\n const { finishReason, ...rest } = data as {\n finishReason: string;\n [key: string]: unknown;\n };\n eventToSend = {\n ...rest,\n type: \"finish\",\n messageMetadata: { finishReason }\n };\n }\n\n // Store chunk for replay on reconnection\n const chunkBody = JSON.stringify(eventToSend);\n this._storeStreamChunk(streamId, chunkBody);\n\n // Forward the converted event to the client\n this._broadcastChatMessage({\n body: chunkBody,\n done: false,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,\n ...(continuation && { continuation: true })\n });\n } catch (_error) {\n // Skip malformed JSON lines silently\n }\n }\n }\n } else {\n // Handle plain text responses (e.g., from generateText)\n // Treat the entire chunk as a text delta to preserve exact formatting\n if (chunk.length > 0) {\n message.parts.push({ type: \"text\", text: chunk });\n // Synthesize a text-delta event so clients can stream-render\n const chunkBody = JSON.stringify({\n type: \"text-delta\",\n delta: chunk\n });\n // Store chunk for replay on reconnection\n this._storeStreamChunk(streamId, chunkBody);\n this._broadcastChatMessage({\n body: chunkBody,\n done: false,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,\n ...(continuation && { continuation: true })\n });\n }\n }\n }\n } catch (error) {\n // Mark stream as error if not already completed\n if (!streamCompleted) {\n this._markStreamError(streamId);\n // Notify clients of the error\n this._broadcastChatMessage({\n body: error instanceof Error ? error.message : \"Stream error\",\n done: true,\n error: true,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,\n ...(continuation && { continuation: true })\n });\n }\n throw error;\n } finally {\n reader.releaseLock();\n }\n\n if (message.parts.length > 0) {\n if (continuation) {\n // Find the last assistant message and append parts to it\n let lastAssistantIdx = -1;\n for (let i = this.messages.length - 1; i >= 0; i--) {\n if (this.messages[i].role === \"assistant\") {\n lastAssistantIdx = i;\n break;\n }\n }\n if (lastAssistantIdx >= 0) {\n const lastAssistant = this.messages[lastAssistantIdx];\n const mergedMessage: ChatMessage = {\n ...lastAssistant,\n parts: [...lastAssistant.parts, ...message.parts]\n };\n const updatedMessages = [...this.messages];\n updatedMessages[lastAssistantIdx] = mergedMessage;\n await this.persistMessages(updatedMessages, excludeBroadcastIds);\n } else {\n // No assistant message to append to, create new one\n await this.persistMessages(\n [...this.messages, message],\n excludeBroadcastIds\n );\n }\n } else {\n await this.persistMessages(\n [...this.messages, message],\n excludeBroadcastIds\n );\n }\n }\n\n // Clear the streaming message reference and resolve completion promise\n this._streamingMessage = null;\n if (this._streamCompletionResolve) {\n this._streamCompletionResolve();\n this._streamCompletionResolve = null;\n this._streamCompletionPromise = null;\n }\n });\n }\n\n /**\n * Mark a stream as errored and clean up state.\n * @param streamId - The stream to mark as errored\n * @internal Protected for testing purposes.\n */\n protected _markStreamError(streamId: string) {\n // Flush any pending chunks before marking error\n this._flushChunkBuffer();\n\n this.sql`\n update cf_ai_chat_stream_metadata \n set status = 'error', completed_at = ${Date.now()} \n where id = ${streamId}\n `;\n this._activeStreamId = null;\n this._activeRequestId = null;\n this._streamChunkIndex = 0;\n }\n\n /**\n * For the given message id, look up its associated AbortController\n * If the AbortController does not exist, create and store one in memory\n *\n * returns the AbortSignal associated with the AbortController\n */\n private _getAbortSignal(id: string): AbortSignal | undefined {\n // Defensive check, since we're coercing message types at the moment\n if (typeof id !== \"string\") {\n return undefined;\n }\n\n if (!this._chatMessageAbortControllers.has(id)) {\n this._chatMessageAbortControllers.set(id, new AbortController());\n }\n\n return this._chatMessageAbortControllers.get(id)?.signal;\n }\n\n /**\n * Remove an abort controller from the cache of pending message responses\n */\n private _removeAbortController(id: string) {\n this._chatMessageAbortControllers.delete(id);\n }\n\n /**\n * Propagate an abort signal for any requests associated with the given message id\n */\n private _cancelChatRequest(id: string) {\n if (this._chatMessageAbortControllers.has(id)) {\n const abortController = this._chatMessageAbortControllers.get(id);\n abortController?.abort();\n }\n }\n\n /**\n * Abort all pending requests and clear the cache of AbortControllers\n */\n private _destroyAbortControllers() {\n for (const controller of this._chatMessageAbortControllers.values()) {\n controller?.abort();\n }\n this._chatMessageAbortControllers.clear();\n }\n\n /**\n * When the DO is destroyed, cancel all pending requests and clean up resources\n */\n async destroy() {\n this._destroyAbortControllers();\n\n // Flush any remaining chunks before cleanup\n this._flushChunkBuffer();\n\n // Clean up stream tables\n this.sql`drop table if exists cf_ai_chat_stream_chunks`;\n this.sql`drop table if exists cf_ai_chat_stream_metadata`;\n\n // Clear active stream state\n this._activeStreamId = null;\n this._activeRequestId = null;\n\n await super.destroy();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwGA,SAAgB,6BACd,aACS;AACT,KAAI,CAAC,eAAe,YAAY,WAAW,EACzC,QAAO,EAAE;CAIX,MAAM,4BAAY,IAAI,KAAa;AACnC,MAAK,MAAM,KAAK,aAAa;AAC3B,MAAI,UAAU,IAAI,EAAE,KAAK,CACvB,SAAQ,KACN,uDAAuD,EAAE,KAAK,wDAC/D;AAEH,YAAU,IAAI,EAAE,KAAK;;AAGvB,QAAO,OAAO,YACZ,YAAY,KAAK,MAAM,CACrB,EAAE,MACF,KAAK;EACH,aAAa,EAAE,eAAe;EAC9B,aAAa,WAAW,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;EAE5D,CAAC,CACH,CAAC,CACH;;;AAIH,MAAM,oBAAoB;;AAE1B,MAAM,wBAAwB;;AAE9B,MAAM,4BAA4B,MAAS;;AAE3C,MAAM,sBAAsB,MAAU;;AAEtC,MAAM,2BAA2B,OAAU,KAAK;AAEhD,MAAM,UAAU,IAAI,aAAa;;;;;AA4BjC,IAAa,cAAb,cAGU,MAAkB;CAgE1B,YAAY,KAAmB,KAAU;AACvC,QAAM,KAAK,IAAI;yBArD0B;0BAMC;2BAOI;kCAOS;kCACD;2BAK5B;sBAWvB,EAAE;2BAKqB;0BAKD;AAOzB,OAAK,GAAG;;;;;AAOR,OAAK,GAAG;;;;;;;AAQR,OAAK,GAAG;;;;;;;AAQR,OAAK,GAAG;;AAOR,OAAK,WAAW,sBAHI,KAAK,qBAAqB,CAGI;AAElD,OAAK,+CAA+B,IAAI,KAAK;AAG7C,OAAK,sBAAsB;EAC3B,MAAM,aAAa,KAAK,UAAU,KAAK,KAAK;AAC5C,OAAK,YAAY,OAAO,YAAwB,UAA2B;AAEzE,OAAI,KAAK,gBACP,MAAK,sBAAsB,WAAW;AAGxC,UAAO,WAAW,YAAYA,MAAI;;EAIpC,MAAM,aAAa,KAAK,UAAU,KAAK,KAAK;AAC5C,OAAK,YAAY,OAAO,YAAwB,YAAuB;AAErE,OAAI,OAAO,YAAY,UAAU;IAC/B,IAAIC;AACJ,QAAI;AACF,YAAO,KAAK,MAAM,QAAQ;aACnB,QAAQ;AAEf,YAAO,WAAW,YAAY,QAAQ;;AAIxC,QACE,KAAK,SAAS,YAAY,6BAC1B,KAAK,KAAK,WAAW,QACrB;KACA,MAAM,EAAE,SAAS,KAAK;KAEtB,MAAM,EAAE,UAAU,gBADH,KAAK,MAAM,KAAe;KAOzC,MAAM,sBAAsB,sBAAsB,SAAS;AAE3D,UAAK,sBACH;MACE,UAAU;MACV,MAAM,YAAY;MACnB,EACD,CAAC,WAAW,GAAG,CAChB;AAED,WAAM,KAAK,gBAAgB,qBAAqB,CAAC,WAAW,GAAG,CAAC;AAEhE,UAAK,eAAe,KAClB;MACE,gBAAgB;MAChB,IAAI,KAAK;MACT,SAAS,EAAE;MACX,WAAW,KAAK,KAAK;MACrB,MAAM;MACP,EACD,KAAK,IACN;KAED,MAAM,gBAAgB,KAAK;KAC3B,MAAM,cAAc,KAAK,gBAAgB,cAAc;AAEvD,YAAO,KAAK,cAAc,YAAY;AAGpC,aAAO,aAAa,IAClB;OAAE,OAAO;OAAM;OAAY,SAAS;OAAW,OAAO;OAAW,EACjE,YAAY;OACV,MAAM,WAAW,MAAM,KAAK,cAC1B,OAAO,kBAAkB;AACvB,aAAK,uBAAuB,cAAc;AAE1C,aAAK,eAAe,KAClB;SACE,gBAAgB;SAChB,IAAI,KAAK;SACT,SAAS,EAAE;SACX,WAAW,KAAK,KAAK;SACrB,MAAM;SACP,EACD,KAAK,IACN;UAEH;QACE;QACA;QACD,CACF;AAED,WAAI,SACF,OAAM,KAAK,OAAO,KAAK,IAAI,UAAU,CAAC,WAAW,GAAG,CAAC;YAChD;AACL,gBAAQ,KACN,uEAAuE,gBACxE;AACD,aAAK,sBACH;SACE,MAAM;SACN,MAAM;SACN,IAAI,KAAK;SACT,MAAM,YAAY;SACnB,EACD,CAAC,WAAW,GAAG,CAChB;;QAGN;OACD;;AAIJ,QAAI,KAAK,SAAS,YAAY,qBAAqB;AACjD,UAAK,0BAA0B;AAC/B,UAAK,GAAG;AACR,UAAK,GAAG;AACR,UAAK,GAAG;AACR,UAAK,kBAAkB;AACvB,UAAK,mBAAmB;AACxB,UAAK,oBAAoB;AACzB,UAAK,WAAW,EAAE;AAClB,UAAK,sBACH,EAAE,MAAM,YAAY,qBAAqB,EACzC,CAAC,WAAW,GAAG,CAChB;AACD;;AAIF,QAAI,KAAK,SAAS,YAAY,wBAAwB;KACpD,MAAM,sBAAsB,sBAAsB,KAAK,SAAS;AAChE,WAAM,KAAK,gBAAgB,qBAAqB,CAAC,WAAW,GAAG,CAAC;AAChE;;AAIF,QAAI,KAAK,SAAS,YAAY,8BAA8B;AAC1D,UAAK,mBAAmB,KAAK,GAAG;AAChC;;AAIF,QAAI,KAAK,SAAS,YAAY,4BAA4B;AACxD,SACE,KAAK,mBACL,KAAK,oBACL,KAAK,qBAAqB,KAAK,GAE/B,MAAK,kBACH,YACA,KAAK,iBACL,KAAK,iBACN;AAEH;;AAIF,QAAI,KAAK,SAAS,YAAY,sBAAsB;KAClD,MAAM,EAAE,YAAY,UAAU,QAAQ,iBAAiB;AAGvD,UAAK,iBAAiB,YAAY,UAAU,OAAO,CAAC,MACjD,YAAY;AAIX,UAAI,WAAW,cAAc;OAG3B,MAAM,gBAAgB,YAAY;AAChC,YAAI,KAAK,yBACP,OAAM,KAAK;YAGX,OAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;;AAI5D,sBAAe,CAAC,WAAW;QACzB,MAAM,iBAAiB,QAAQ;QAC/B,MAAM,cAAc,KAAK,gBAAgB,eAAe;AAExD,aAAK,cAAc,YAAY;AAC7B,gBAAO,aAAa,IAClB;UACE,OAAO;UACP;UACA,SAAS;UACT,OAAO;UACR,EACD,YAAY;UACV,MAAM,WAAW,MAAM,KAAK,cAC1B,OAAO,kBAAkB;AACvB,gBAAK,uBAAuB,eAAe;AAE3C,gBAAK,eAAe,KAClB;YACE,gBACE;YACF,IAAI;YACJ,SAAS,EAAE;YACX,WAAW,KAAK,KAAK;YACrB,MAAM;YACP,EACD,KAAK,IACN;aAEH,EACE,aACD,CACF;AAED,cAAI,SAMF,OAAM,KAAK,OACT,gBACA,UACA,EAAE,EACF,EAAE,cAAc,MAAM,CACvB;WAGN;UACD;SACF;;OAGP;AACD;;;AAKJ,UAAO,WAAW,YAAY,QAAQ;;;;;;;;;CAU1C,AAAU,uBAAuB;EAC/B,MAAM,gBAAgB,KAAK,GAAmB;;;;;;AAO9C,MAAI,iBAAiB,cAAc,SAAS,GAAG;GAC7C,MAAM,SAAS,cAAc;GAC7B,MAAM,YAAY,KAAK,KAAK,GAAG,OAAO;AAGtC,OAAI,YAAY,2BAA2B;AACzC,SACG,GAAG,0DAA0D,OAAO;AACvE,SACG,GAAG,qDAAqD,OAAO;AAClE,YAAQ,KACN,sCAAsC,OAAO,GAAG,SAAS,KAAK,MAAM,YAAY,IAAK,CAAC,IACvF;AACD;;AAGF,QAAK,kBAAkB,OAAO;AAC9B,QAAK,mBAAmB,OAAO;GAG/B,MAAM,YAAY,KAAK,GAA0B;;;4BAG3B,KAAK,gBAAgB;;AAE3C,QAAK,oBACH,aAAa,UAAU,IAAI,aAAa,OACpC,UAAU,GAAG,YAAY,IACzB;;;;;;;;;CAUV,AAAQ,sBAAsB,YAAwB;AACpD,MAAI,CAAC,KAAK,mBAAmB,CAAC,KAAK,iBACjC;AAIF,aAAW,KACT,KAAK,UAAU;GACb,MAAM,YAAY;GAClB,IAAI,KAAK;GACV,CAAC,CACH;;;;;;;;CASH,AAAQ,kBACN,YACA,UACA,WACA;AAEA,OAAK,mBAAmB;EAExB,MAAM,SAAS,KAAK,GAAgB;;0BAEd,SAAS;;;AAK/B,OAAK,MAAM,SAAS,UAAU,EAAE,CAC9B,YAAW,KACT,KAAK,UAAU;GACb,MAAM,MAAM;GACZ,MAAM;GACN,IAAI;GACJ,MAAM,YAAY;GACnB,CAAC,CACH;AAKH,MAAI,KAAK,oBAAoB,SAC3B,YAAW,KACT,KAAK,UAAU;GACb,MAAM;GACN,MAAM;GACN,IAAI;GACJ,MAAM,YAAY;GACnB,CAAC,CACH;;;;;;;;CAUL,AAAU,kBAAkB,UAAkB,MAAc;AAE1D,MAAI,KAAK,aAAa,UAAU,sBAC9B,MAAK,mBAAmB;AAG1B,OAAK,aAAa,KAAK;GACrB,IAAI,QAAQ;GACZ;GACA;GACA,OAAO,KAAK;GACb,CAAC;AACF,OAAK;AAGL,MAAI,KAAK,aAAa,UAAU,kBAC9B,MAAK,mBAAmB;;;;;;;CAS5B,AAAU,oBAAoB;AAE5B,MAAI,KAAK,qBAAqB,KAAK,aAAa,WAAW,EACzD;AAGF,OAAK,oBAAoB;AACzB,MAAI;GACF,MAAM,SAAS,KAAK;AACpB,QAAK,eAAe,EAAE;GAGtB,MAAM,MAAM,KAAK,KAAK;AACtB,QAAK,MAAM,SAAS,OAClB,MAAK,GAAG;;oBAEI,MAAM,GAAG,IAAI,MAAM,SAAS,IAAI,MAAM,KAAK,IAAI,MAAM,MAAM,IAAI,IAAI;;YAGzE;AACR,QAAK,oBAAoB;;;;;;;;;;CAW7B,AAAU,aAAa,WAA2B;AAEhD,OAAK,mBAAmB;EAExB,MAAM,WAAW,QAAQ;AACzB,OAAK,kBAAkB;AACvB,OAAK,mBAAmB;AACxB,OAAK,oBAAoB;AAEzB,OAAK,GAAG;;gBAEI,SAAS,IAAI,UAAU,iBAAiB,KAAK,KAAK,CAAC;;AAG/D,SAAO;;;;;;;CAQT,AAAU,gBAAgB,UAAkB;AAE1C,OAAK,mBAAmB;AAExB,OAAK,GAAG;;iDAEqC,KAAK,KAAK,CAAC;mBACzC,SAAS;;AAExB,OAAK,kBAAkB;AACvB,OAAK,mBAAmB;AACxB,OAAK,oBAAoB;AAGzB,OAAK,yBAAyB;;;;;;CAOhC,AAAQ,0BAA0B;EAChC,MAAM,MAAM,KAAK,KAAK;AACtB,MAAI,MAAM,KAAK,mBAAmB,oBAChC;AAEF,OAAK,mBAAmB;EAExB,MAAM,SAAS,MAAM;AACrB,OAAK,GAAG;;;;wDAI4C,OAAO;;;AAG3D,OAAK,GAAG;;sDAE0C,OAAO;;;CAI3D,AAAQ,sBAAsB,SAA0B,SAAoB;AAC1E,OAAK,UAAU,KAAK,UAAU,QAAQ,EAAE,QAAQ;;CAGlD,AAAQ,sBAAqC;AAI3C,UAFE,KAAK,GAAG,iEACR,EAAE,EAED,KAAK,QAAQ;AACZ,OAAI;AACF,WAAO,KAAK,MAAM,IAAI,QAAkB;YACjC,OAAO;AACd,YAAQ,MAAM,2BAA2B,IAAI,GAAG,IAAI,MAAM;AAC1D,WAAO;;IAET,CACD,QAAQ,QAA4B,QAAQ,KAAK;;CAGtD,MAAe,UAAU,SAAqC;AAC5D,SAAO,KAAK,cAAc,YAAY;AAGpC,OAFY,IAAI,IAAI,QAAQ,IAAI,CAExB,SAAS,SAAS,gBAAgB,EAAE;IAC1C,MAAM,WAAW,KAAK,qBAAqB;AAC3C,WAAO,SAAS,KAAK,SAAS;;AAGhC,UAAO,MAAM,UAAU,QAAQ;IAC/B;;CAGJ,MAAc,cAAiB,IAA0B;AACvD,MAAI;AACF,UAAO,MAAM,IAAI;WACV,GAAG;AACV,SAAM,KAAK,QAAQ,EAAE;;;;;;;;;CAUzB,MAAM,cAEJ,UAEA,SAC+B;AAC/B,QAAM,IAAI,MACR,8FACD;;;;;;CAOH,MAAM,aAAa,UAAyB;AAC1C,QAAM,KAAK,gBAAgB,SAAS;AACpC,QAAM,KAAK,cAAc,YAAY;GACnC,MAAM,WAAW,MAAM,KAAK,oBAAoB,GAAG;AACnD,OAAI,SAAU,MAAK,OAAO,OAAO,YAAY,EAAE,SAAS;IACxD;;CAGJ,MAAM,gBACJ,UACA,sBAAgC,EAAE,EAClC;EAIA,MAAM,iBAAiB,KAAK,8BAA8B,SAAS;AAGnE,OAAK,MAAM,WAAW,gBAAgB;GAIpC,MAAM,mBAAmB,KAAK,+BAA+B,QAAQ;GACrE,MAAM,gBAAgB,KAAK,4BAA4B,iBAAiB;AACxE,QAAK,GAAG;;kBAEI,cAAc,GAAG,IAAI,KAAK,UAAU,cAAc,CAAC;;;;AAOjE,OAAK,WAAW,sBADE,KAAK,qBAAqB,CACI;AAChD,OAAK,sBACH;GACE,UAAU;GACV,MAAM,YAAY;GACnB,EACD,oBACD;;;;;;;;;;CAWH,AAAQ,8BACN,kBACe;EAEf,MAAM,oCAAoB,IAAI,KAAsB;AACpD,OAAK,MAAM,OAAO,KAAK,UAAU;AAC/B,OAAI,IAAI,SAAS,YAAa;AAC9B,QAAK,MAAM,QAAQ,IAAI,MACrB,KACE,gBAAgB,QAChB,WAAW,QACX,KAAK,UAAU,sBACf,YAAY,KAEZ,mBAAkB,IAChB,KAAK,YACJ,KAA6B,OAC/B;;AAMP,MAAI,kBAAkB,SAAS,EAC7B,QAAO;AAIT,SAAO,iBAAiB,KAAK,QAAQ;AACnC,OAAI,IAAI,SAAS,YAAa,QAAO;GAErC,IAAI,aAAa;GACjB,MAAM,eAAe,IAAI,MAAM,KAAK,SAAS;AAE3C,QACE,gBAAgB,QAChB,WAAW,QACX,KAAK,UAAU,qBACf,kBAAkB,IAAI,KAAK,WAAqB,EAChD;AACA,kBAAa;AACb,YAAO;MACL,GAAG;MACH,OAAO;MACP,QAAQ,kBAAkB,IAAI,KAAK,WAAqB;MACzD;;AAEH,WAAO;KACP;AAEF,UAAO,aAAa;IAAE,GAAG;IAAK,OAAO;IAAc,GAAG;IACtD;;;;;;;;;;;;CAaJ,AAAQ,4BAA4B,SAAmC;AACrE,MAAI,QAAQ,SAAS,YACnB,QAAO;AAIT,OAAK,MAAM,QAAQ,QAAQ,MACzB,KACE,gBAAgB,QAChB,WAAW,QACX,KAAK,UAAU,oBACf;GACA,MAAM,aAAa,KAAK;GAGxB,MAAM,kBAAkB,KAAK,yBAAyB,WAAW;AACjE,OAAI,mBAAmB,gBAAgB,OAAO,QAAQ,GAGpD,QAAO;IACL,GAAG;IACH,IAAI,gBAAgB;IACrB;;AAKP,SAAO;;;;;;;;;;CAWT,AAAQ,yBACN,YACyB;AACzB,OAAK,MAAM,OAAO,KAAK,UAAU;AAC/B,OAAI,IAAI,SAAS,YAAa;AAE9B,QAAK,MAAM,QAAQ,IAAI,MACrB,KAAI,gBAAgB,QAAQ,KAAK,eAAe,WAC9C,QAAO;;;;;;;;;;;;;;;;;;;;;CAyBf,AAAQ,+BAA+B,SAAmC;EAexE,MAAM,iBAbgB,QAAQ,MAAM,QAAQ,SAAS;AACnD,OAAI,KAAK,SAAS,aAAa;IAC7B,MAAM,gBAAgB;AAGtB,QAAI,CAAC,cAAc,QAAQ,cAAc,KAAK,MAAM,KAAK,GACvD,QAAO;;AAGX,UAAO;IACP,CAGmC,KAAK,SAAS;GACjD,IAAI,gBAAgB;AAGpB,OACE,sBAAsB,iBACtB,cAAc,oBACd,OAAO,cAAc,qBAAqB,YAC1C,YAAY,cAAc,iBAE1B,iBAAgB,KAAK,qBACnB,eACA,mBACD;AAIH,OACE,0BAA0B,iBAC1B,cAAc,wBACd,OAAO,cAAc,yBAAyB,YAC9C,YAAY,cAAc,qBAE1B,iBAAgB,KAAK,qBACnB,eACA,uBACD;AAGH,UAAO;IACP;AAEF,SAAO;GAAE,GAAG;GAAS,OAAO;GAAgB;;;;;;CAO9C,AAAQ,qBACN,MACA,aACG;EACH,MAAM,WAAY,KAAiC;AAKnD,MAAI,CAAC,UAAU,OAAQ,QAAO;EAK9B,MAAM,EACJ,QAAQ,SACR,2BAA2B,MAC3B,GAAG,eANc,SAAS;EAU5B,MAAM,uBAAuB,OAAO,KAAK,WAAW,CAAC,SAAS;EAC9D,MAAM,EAAE,QAAQ,SAAS,GAAG,iBAAiB;EAE7C,IAAIC;AACJ,MAAI,qBACF,eAAc;GACZ,GAAG;GACH,QAAQ;GACT;WACQ,OAAO,KAAK,aAAa,CAAC,SAAS,EAC5C,eAAc;EAIhB,MAAM,GAAG,cAAc,UAAU,GAAG,aAAa;AAKjD,MAAI,YACF,QAAO;GAAE,GAAG;IAAW,cAAc;GAAa;AAEpD,SAAO;;;;;;;;;;;;;CAcT,MAAc,iBACZ,YACA,WACA,QACkB;EAGlB,IAAIC;AAGJ,MAAI,KAAK,mBACP;QAAK,MAAM,QAAQ,KAAK,kBAAkB,MACxC,KAAI,gBAAgB,QAAQ,KAAK,eAAe,YAAY;AAC1D,cAAU,KAAK;AACf;;;AAMN,MAAI,CAAC,QACH,MAAK,IAAI,UAAU,GAAG,UAAU,IAAI,WAAW;AAC7C,aAAU,KAAK,yBAAyB,WAAW;AACnD,OAAI,QAAS;AAEb,SAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;;AAI5D,MAAI,CAAC,SAAS;AAGZ,WAAQ,KACN,0EAA0E,WAAW,gBACtF;AACD,UAAO;;EAIT,MAAM,qBAAqB,YAAY,KAAK;EAG5C,IAAI,UAAU;AACd,MAAI,oBAEF;QAAK,MAAM,QAAQ,QAAQ,MACzB,KACE,gBAAgB,QAChB,KAAK,eAAe,cACpB,WAAW,QACX,KAAK,UAAU,mBACf;AACA,IAAC,KAA6C,QAC5C;AACF,IAAC,KAA6C,SAAS;AACvD,cAAU;AACV;;SAGC;GAEL,MAAM,eAAe,QAAQ,MAAM,KAAK,SAAS;AAC/C,QACE,gBAAgB,QAChB,KAAK,eAAe,cACpB,WAAW,QACX,KAAK,UAAU,mBACf;AACA,eAAU;AACV,YAAO;MACL,GAAG;MACH,OAAO;MACP;MACD;;AAEH,WAAO;KACP;AAEF,OAAI,SAAS;IAEX,MAAMC,iBAA8B,KAAK,+BACvC;KACE,GAAG;KACH,OAAO;KACR,CACF;AAGD,SAAK,GAAG;;0BAEU,KAAK,UAAU,eAAe,CAAC;uBAClC,QAAQ,GAAG;;AAK1B,SAAK,WAAW,sBADE,KAAK,qBAAqB,CACI;;;AAIpD,MAAI,CAAC,SAAS;AACZ,WAAQ,KACN,6DAA6D,WAAW,+BACzE;AACD,UAAO;;AAKT,MAAI,CAAC,oBAAoB;GAEvB,MAAM,mBAAmB,KAAK,yBAAyB,WAAW;AAClE,OAAI,iBACF,MAAK,sBAAsB;IACzB,MAAM,YAAY;IAClB,SAAS;IACV,CAAC;;AAQN,SAAO;;CAGT,MAAc,OACZ,IACA,UACA,sBAAgC,EAAE,EAClC,UAAsC,EAAE,EACxC;EACA,MAAM,EAAE,eAAe,UAAU;AAEjC,SAAO,KAAK,cAAc,YAAY;AACpC,OAAI,CAAC,SAAS,MAAM;AAElB,SAAK,sBAAsB;KACzB,MAAM;KACN,MAAM;KACN;KACA,MAAM,YAAY;KAClB,GAAI,gBAAgB,EAAE,cAAc,MAAM;KAC3C,CAAC;AACF;;GAIF,MAAM,WAAW,KAAK,aAAa,GAAG;GAQtC,MAAM,EAAE,4BAAa,8BAAc,qBACjC,MAAM,OAAO;GAEf,MAAM,SAAS,SAAS,KAAK,WAAW;GAIxC,MAAMC,UAAuB;IAC3B,IAAI,aAAa,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,GAAG,GAAG;IACtE,MAAM;IACN,OAAO,EAAE;IACV;AAED,QAAK,oBAAoB;AAEzB,QAAK,2BAA2B,IAAI,SAAS,YAAY;AACvD,SAAK,2BAA2B;KAChC;GACF,IAAIC,kBAA8C,EAAE;GACpD,IAAIC,uBAAwD,EAAE;GAC9D,MAAMC,mBAGF,EAAE;GAEN,SAAS,sBACP,WA2BA;IACA,MAAM,OAAO,QAAQ,MAAM,MACxB,WACCC,OAAK,SAAS,kBACdA,OAAK,eAAeC,UAAQ,WAC/B;IAED,MAAM,aAAaA;IACnB,MAAM,UAAU;AAEhB,QAAI,QAAQ,MAAM;AAChB,UAAK,QAAQA,UAAQ;AACrB,aAAQ,WAAWA,UAAQ;AAC3B,aAAQ,QAAQ,WAAW;AAC3B,aAAQ,SAAS,WAAW;AAC5B,aAAQ,YAAY,WAAW;AAC/B,aAAQ,WAAW,WAAW,YAAY,QAAQ;AAClD,aAAQ,cAAc,WAAW;AAEjC,SACE,WAAW,oBAAoB,QAC/B,KAAK,UAAU,kBAEf,MAAK,uBACH,WAAW;UAGf,SAAQ,MAAM,KAAK;KACjB,MAAM;KACN,UAAUA,UAAQ;KAClB,YAAYA,UAAQ;KACpB,OAAOA,UAAQ;KACf,OAAO,WAAW;KAClB,QAAQ,WAAW;KACnB,WAAW,WAAW;KACtB,aAAa,WAAW;KACxB,GAAI,WAAW,oBAAoB,OAC/B,EAAE,sBAAsB,WAAW,kBAAkB,GACrD,EAAE;KACP,CAAsB;;GAI3B,SAAS,eACP,WAgCA;IACA,MAAM,OAAO,QAAQ,MAAM,MACxB,WACCC,eAAaF,OAAK,IACjBA,OAAoB,eAAeC,UAAQ,WAC/C;IAED,MAAM,aAAaA;IACnB,MAAM,UAAU;AAEhB,QAAI,QAAQ,MAAM;AAChB,UAAK,QAAQA,UAAQ;AACrB,aAAQ,QAAQ,WAAW;AAC3B,aAAQ,SAAS,WAAW;AAC5B,aAAQ,YAAY,WAAW;AAC/B,aAAQ,WAAW,WAAW;AAC9B,aAAQ,cAAc,WAAW;AAGjC,aAAQ,mBACN,WAAW,oBAAoB,KAAK;AAEtC,SACE,WAAW,oBAAoB,QAC/B,KAAK,UAAU,kBAEf,MAAK,uBACH,WAAW;UAGf,SAAQ,MAAM,KAAK;KACjB,MAAM,QAAQA,UAAQ;KACtB,YAAYA,UAAQ;KACpB,OAAOA,UAAQ;KACf,OAAO,WAAW;KAClB,QAAQ,WAAW;KACnB,UAAU,WAAW;KACrB,WAAW,WAAW;KACtB,kBAAkB,WAAW;KAC7B,aAAa,WAAW;KACxB,GAAI,WAAW,oBAAoB,OAC/B,EAAE,sBAAsB,WAAW,kBAAkB,GACrD,EAAE;KACP,CAAe;;GAIpB,eAAe,sBAAsB,UAAmB;AACtD,QAAI,YAAY,KAMd,SAAQ,WAJN,QAAQ,YAAY,OAChB;KAAE,GAAG,QAAQ;KAAU,GAAG;KAAU,GACpC;;GAMV,IAAI,kBAAkB;AACtB,OAAI;AACF,WAAO,MAAM;KACX,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,SAAI,MAAM;AAER,WAAK,gBAAgB,SAAS;AAC9B,wBAAkB;AAElB,WAAK,sBAAsB;OACzB,MAAM;OACN,MAAM;OACN;OACA,MAAM,YAAY;OAClB,GAAI,gBAAgB,EAAE,cAAc,MAAM;OAC3C,CAAC;AACF;;KAGF,MAAM,QAAQ,QAAQ,OAAO,MAAM;AAOnC,UAJoB,SAAS,QAAQ,IAAI,eAAe,IAAI,IAClC,SAAS,oBAAoB,EAG5C;MAET,MAAM,QAAQ,MAAM,MAAM,KAAK;AAC/B,WAAK,MAAM,QAAQ,MACjB,KAAI,KAAK,WAAW,SAAS,IAAI,SAAS,eACxC,KAAI;OACF,MAAME,OAAuB,KAAK,MAAM,KAAK,MAAM,EAAE,CAAC;AACtD,eAAQ,KAAK,MAAb;QACE,KAAK,cAAc;SACjB,MAAMC,WAAuB;UAC3B,MAAM;UACN,MAAM;UACN,kBAAkB,KAAK;UACvB,OAAO;UACR;AACD,yBAAgB,KAAK,MAAM;AAC3B,iBAAQ,MAAM,KAAK,SAAS;AAC5B;;QAGF,KAAK,cAAc;SACjB,MAAM,WAAW,gBAAgB,KAAK;AACtC,kBAAS,QAAQ,KAAK;AACtB,kBAAS,mBACP,KAAK,oBAAoB,SAAS;AACpC;;QAGF,KAAK,YAAY;SACf,MAAM,WAAW,gBAAgB,KAAK;AACtC,kBAAS,QAAQ;AACjB,kBAAS,mBACP,KAAK,oBAAoB,SAAS;AACpC,gBAAO,gBAAgB,KAAK;AAC5B;;QAGF,KAAK,mBAAmB;SACtB,MAAMC,gBAAiC;UACrC,MAAM;UACN,MAAM;UACN,kBAAkB,KAAK;UACvB,OAAO;UACR;AACD,8BAAqB,KAAK,MAAM;AAChC,iBAAQ,MAAM,KAAK,cAAc;AACjC;;QAGF,KAAK,mBAAmB;SACtB,MAAM,gBAAgB,qBAAqB,KAAK;AAChD,uBAAc,QAAQ,KAAK;AAC3B,uBAAc,mBACZ,KAAK,oBAAoB,cAAc;AACzC;;QAGF,KAAK,iBAAiB;SACpB,MAAM,gBAAgB,qBAAqB,KAAK;AAChD,uBAAc,mBACZ,KAAK,oBAAoB,cAAc;AACzC,uBAAc,QAAQ;AACtB,gBAAO,qBAAqB,KAAK;AAEjC;;QAGF,KAAK;AACH,iBAAQ,MAAM,KAAK;UACjB,MAAM;UACN,WAAW,KAAK;UAChB,KAAK,KAAK;UACX,CAAC;AAEF;QAGF,KAAK;AACH,iBAAQ,MAAM,KAAK;UACjB,MAAM;UACN,UAAU,KAAK;UACf,KAAK,KAAK;UACV,OAAO,KAAK;UACZ,kBAAkB,KAAK;UACxB,CAAC;AAEF;QAGF,KAAK;AACH,iBAAQ,MAAM,KAAK;UACjB,MAAM;UACN,UAAU,KAAK;UACf,WAAW,KAAK;UAChB,OAAO,KAAK;UACZ,UAAU,KAAK;UACf,kBAAkB,KAAK;UACxB,CAAC;AAEF;QAGF,KAAK,oBAAoB;SACvB,MAAM,kBACJ,QAAQ,MAAM,OAAOH,eAAa;AAGpC,0BAAiB,KAAK,cAAc;UAClC,MAAM;UACN,UAAU,KAAK;UACf,OAAO,gBAAgB;UACvB,SAAS,KAAK;UACf;AAED,aAAI,KAAK,QACP,uBAAsB;UACpB,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO;UACR,CAAC;aAEF,gBAAe;UACb,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO;UACR,CAAC;AAGJ;;QAGF,KAAK,oBAAoB;SACvB,MAAM,kBAAkB,iBAAiB,KAAK;AAE9C,yBAAgB,QAAQ,KAAK;SAK7B,MAAM,eAHoB,MAAM,iBAC9B,gBAAgB,KACjB,EAGC;AAEF,aAAI,gBAAgB,QAClB,uBAAsB;UACpB,YAAY,KAAK;UACjB,UAAU,gBAAgB;UAC1B,OAAO;UACP,OAAO;UACR,CAAC;aAEF,gBAAe;UACb,YAAY,KAAK;UACjB,UAAU,gBAAgB;UAC1B,OAAO;UACP,OAAO;UACR,CAAC;AAGJ;;QAGF,KAAK;AACH,aAAI,KAAK,QACP,uBAAsB;UACpB,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO,KAAK;UACZ,kBAAkB,KAAK;UACxB,CAAC;aAEF,gBAAe;UACb,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO,KAAK;UACZ,kBAAkB,KAAK;UACvB,kBAAkB,KAAK;UACxB,CAAC;AAcJ;QAGF,KAAK;AACH,aAAI,KAAK,QACP,uBAAsB;UACpB,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO,KAAK;UACZ,WAAW,KAAK;UAChB,kBAAkB,KAAK;UACxB,CAAC;aAEF,gBAAe;UACb,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO;UACP,UAAU,KAAK;UACf,WAAW,KAAK;UAChB,kBAAkB,KAAK;UACvB,kBAAkB,KAAK;UACxB,CAAC;AAGJ;QAGF,KAAK;AACH,aAAI,KAAK,SAAS;UAKhB,MAAM,iBAJkB,QAAQ,MAAM,QACnC,SAAS,KAAK,SAAS,eACzB,CAEsC,MACpC,eACC,WAAW,eAAe,KAAK,WAClC;AAED,cAAI,CAAC,eACH,OAAM,IAAI,MAAM,4BAA4B;AAE9C,gCAAsB;WACpB,YAAY,KAAK;WACjB,UAAU,eAAe;WACzB,OAAO;WACP,OAAO,eAAe;WACtB,QAAQ,KAAK;WACb,aAAa,KAAK;WACnB,CAAC;gBACG;UAKL,MAAM,iBAJkB,QAAQ,MAAM,OACpCA,eACD,CAEsC,MACpC,eACC,WAAW,eAAe,KAAK,WAClC;AAED,cAAI,CAAC,eACH,OAAM,IAAI,MAAM,4BAA4B;AAE9C,yBAAe;WACb,YAAY,KAAK;WACjB,UAAUI,cAAY,eAAe;WACrC,OAAO;WACP,OAAO,eAAe;WACtB,QAAQ,KAAK;WACb,kBAAkB,KAAK;WACvB,aAAa,KAAK;WACnB,CAAC;;AAGJ;QAGF,KAAK;AACH,aAAI,KAAK,SAAS;UAKhB,MAAM,iBAJkB,QAAQ,MAAM,QACnC,SAAS,KAAK,SAAS,eACzB,CAEsC,MACpC,eACC,WAAW,eAAe,KAAK,WAClC;AAED,cAAI,CAAC,eACH,OAAM,IAAI,MAAM,4BAA4B;AAE9C,gCAAsB;WACpB,YAAY,KAAK;WACjB,UAAU,eAAe;WACzB,OAAO;WACP,OAAO,eAAe;WACtB,WAAW,KAAK;WACjB,CAAC;gBACG;UAKL,MAAM,iBAJkB,QAAQ,MAAM,OACpCJ,eACD,CAEsC,MACpC,eACC,WAAW,eAAe,KAAK,WAClC;AAED,cAAI,CAAC,eACH,OAAM,IAAI,MAAM,4BAA4B;AAC9C,yBAAe;WACb,YAAY,KAAK;WACjB,UAAUI,cAAY,eAAe;WACrC,OAAO;WACP,OAAO,eAAe;WACtB,UACE,cAAc,iBACV,eAAe,WACf;WACN,WAAW,KAAK;WACjB,CAAC;;AAGJ;QAGF,KAAK;AAEH,iBAAQ,MAAM,KAAK,EAAE,MAAM,cAAc,CAAC;AAC1C;QAGF,KAAK;AAEH,2BAAkB,EAAE;AACpB,gCAAuB,EAAE;AACzB;QAGF,KAAK;AACH,aAAI,KAAK,aAAa,KACpB,SAAQ,KAAK,KAAK;AAGpB,eAAM,sBAAsB,KAAK,gBAAgB;AAEjD;QAGF,KAAK;AACH,eAAM,sBAAsB,KAAK,gBAAgB;AACjD;QAGF,KAAK;AACH,eAAM,sBAAsB,KAAK,gBAAgB;AACjD;QAGF,KAAK;AACH,cAAK,sBAAsB;UACzB,OAAO;UACP,MAAM,KAAK,aAAa,KAAK,UAAU,KAAK;UAC5C,MAAM;UACN;UACA,MAAM,YAAY;UACnB,CAAC;AAEF;;OASJ,IAAIC,cAAuB;AAC3B,WAAI,KAAK,SAAS,YAAY,kBAAkB,MAAM;QACpD,MAAM,EAAE,cAAc,GAAG,SAAS;AAIlC,sBAAc;SACZ,GAAG;SACH,MAAM;SACN,iBAAiB,EAAE,cAAc;SAClC;;OAIH,MAAM,YAAY,KAAK,UAAU,YAAY;AAC7C,YAAK,kBAAkB,UAAU,UAAU;AAG3C,YAAK,sBAAsB;QACzB,MAAM;QACN,MAAM;QACN;QACA,MAAM,YAAY;QAClB,GAAI,gBAAgB,EAAE,cAAc,MAAM;QAC3C,CAAC;eACK,QAAQ;gBAQjB,MAAM,SAAS,GAAG;AACpB,cAAQ,MAAM,KAAK;OAAE,MAAM;OAAQ,MAAM;OAAO,CAAC;MAEjD,MAAM,YAAY,KAAK,UAAU;OAC/B,MAAM;OACN,OAAO;OACR,CAAC;AAEF,WAAK,kBAAkB,UAAU,UAAU;AAC3C,WAAK,sBAAsB;OACzB,MAAM;OACN,MAAM;OACN;OACA,MAAM,YAAY;OAClB,GAAI,gBAAgB,EAAE,cAAc,MAAM;OAC3C,CAAC;;;YAID,OAAO;AAEd,QAAI,CAAC,iBAAiB;AACpB,UAAK,iBAAiB,SAAS;AAE/B,UAAK,sBAAsB;MACzB,MAAM,iBAAiB,QAAQ,MAAM,UAAU;MAC/C,MAAM;MACN,OAAO;MACP;MACA,MAAM,YAAY;MAClB,GAAI,gBAAgB,EAAE,cAAc,MAAM;MAC3C,CAAC;;AAEJ,UAAM;aACE;AACR,WAAO,aAAa;;AAGtB,OAAI,QAAQ,MAAM,SAAS,EACzB,KAAI,cAAc;IAEhB,IAAI,mBAAmB;AACvB,SAAK,IAAI,IAAI,KAAK,SAAS,SAAS,GAAG,KAAK,GAAG,IAC7C,KAAI,KAAK,SAAS,GAAG,SAAS,aAAa;AACzC,wBAAmB;AACnB;;AAGJ,QAAI,oBAAoB,GAAG;KACzB,MAAM,gBAAgB,KAAK,SAAS;KACpC,MAAMC,gBAA6B;MACjC,GAAG;MACH,OAAO,CAAC,GAAG,cAAc,OAAO,GAAG,QAAQ,MAAM;MAClD;KACD,MAAM,kBAAkB,CAAC,GAAG,KAAK,SAAS;AAC1C,qBAAgB,oBAAoB;AACpC,WAAM,KAAK,gBAAgB,iBAAiB,oBAAoB;UAGhE,OAAM,KAAK,gBACT,CAAC,GAAG,KAAK,UAAU,QAAQ,EAC3B,oBACD;SAGH,OAAM,KAAK,gBACT,CAAC,GAAG,KAAK,UAAU,QAAQ,EAC3B,oBACD;AAKL,QAAK,oBAAoB;AACzB,OAAI,KAAK,0BAA0B;AACjC,SAAK,0BAA0B;AAC/B,SAAK,2BAA2B;AAChC,SAAK,2BAA2B;;IAElC;;;;;;;CAQJ,AAAU,iBAAiB,UAAkB;AAE3C,OAAK,mBAAmB;AAExB,OAAK,GAAG;;6CAEiC,KAAK,KAAK,CAAC;mBACrC,SAAS;;AAExB,OAAK,kBAAkB;AACvB,OAAK,mBAAmB;AACxB,OAAK,oBAAoB;;;;;;;;CAS3B,AAAQ,gBAAgB,IAAqC;AAE3D,MAAI,OAAO,OAAO,SAChB;AAGF,MAAI,CAAC,KAAK,6BAA6B,IAAI,GAAG,CAC5C,MAAK,6BAA6B,IAAI,IAAI,IAAI,iBAAiB,CAAC;AAGlE,SAAO,KAAK,6BAA6B,IAAI,GAAG,EAAE;;;;;CAMpD,AAAQ,uBAAuB,IAAY;AACzC,OAAK,6BAA6B,OAAO,GAAG;;;;;CAM9C,AAAQ,mBAAmB,IAAY;AACrC,MAAI,KAAK,6BAA6B,IAAI,GAAG,CAE3C,CADwB,KAAK,6BAA6B,IAAI,GAAG,EAChD,OAAO;;;;;CAO5B,AAAQ,2BAA2B;AACjC,OAAK,MAAM,cAAc,KAAK,6BAA6B,QAAQ,CACjE,aAAY,OAAO;AAErB,OAAK,6BAA6B,OAAO;;;;;CAM3C,MAAM,UAAU;AACd,OAAK,0BAA0B;AAG/B,OAAK,mBAAmB;AAGxB,OAAK,GAAG;AACR,OAAK,GAAG;AAGR,OAAK,kBAAkB;AACvB,OAAK,mBAAmB;AAExB,QAAM,MAAM,SAAS"}
|
|
1
|
+
{"version":3,"file":"index.js","names":["ctx","data: IncomingMessage","newMetadata: ProviderMetadata | undefined","message: ChatMessage | undefined","updatedMessage: ChatMessage","message: ChatMessage","activeTextParts: Record<string, TextUIPart>","activeReasoningParts: Record<string, ReasoningUIPart>","partialToolCalls: Record<\n string,\n { text: string; index: number; toolName: string; dynamic?: boolean }\n >","part","options","isToolUIPart","data: UIMessageChunk","textPart: TextUIPart","reasoningPart: ReasoningUIPart","getToolName","eventToSend: unknown","mergedMessage: ChatMessage"],"sources":["../src/index.ts"],"sourcesContent":["import type {\n UIMessage as ChatMessage,\n DynamicToolUIPart,\n JSONSchema7,\n ProviderMetadata,\n ReasoningUIPart,\n StreamTextOnFinishCallback,\n TextUIPart,\n Tool,\n ToolSet,\n ToolUIPart,\n UIMessageChunk\n} from \"ai\";\nimport { tool, jsonSchema } from \"ai\";\nimport {\n Agent,\n type AgentContext,\n type Connection,\n type ConnectionContext,\n type WSMessage\n} from \"agents\";\n\nimport {\n MessageType,\n type IncomingMessage,\n type OutgoingMessage\n} from \"./types\";\nimport { autoTransformMessages } from \"./ai-chat-v5-migration\";\nimport { nanoid } from \"nanoid\";\n\nimport { agentContext } from \"agents/internal_context\";\n\n/**\n * Schema for a client-defined tool sent from the browser.\n * These tools are executed on the client, not the server.\n *\n * Note: Uses `parameters` (JSONSchema7) rather than AI SDK's `inputSchema` (FlexibleSchema)\n * because this is the wire format. Zod schemas cannot be serialized.\n *\n * @deprecated Define tools on the server using `tool()` from \"ai\" instead.\n * For tools that need client-side execution, omit the `execute` function\n * and handle them via the `onToolCall` callback in `useAgentChat`.\n */\nexport type ClientToolSchema = {\n /** Unique name for the tool */\n name: string;\n /** Human-readable description of what the tool does */\n description?: Tool[\"description\"];\n /** JSON Schema defining the tool's input parameters */\n parameters?: JSONSchema7;\n};\n\n/**\n * Options passed to the onChatMessage handler.\n */\nexport type OnChatMessageOptions = {\n /** AbortSignal for cancelling the request */\n abortSignal?: AbortSignal;\n /**\n * Tool schemas sent from the client for dynamic tool registration.\n * These represent tools that will be executed on the client side.\n * Use `createToolsFromClientSchemas()` to convert these to AI SDK tool format.\n *\n * @deprecated Define tools on the server instead. Use `onToolCall` callback\n * in `useAgentChat` for client-side execution.\n */\n clientTools?: ClientToolSchema[];\n};\n\n/**\n * Converts client tool schemas to AI SDK tool format.\n *\n * These tools have no `execute` function - when the AI model calls them,\n * the tool call is sent back to the client for execution.\n *\n * @param clientTools - Array of tool schemas from the client\n * @returns Record of AI SDK tools that can be spread into your tools object\n *\n * @deprecated Define tools on the server using `tool()` from \"ai\" instead.\n * For tools that need client-side execution, omit the `execute` function\n * and handle them via the `onToolCall` callback in `useAgentChat`.\n *\n * @example\n * ```typescript\n * // Server: Define tool without execute\n * const tools = {\n * getLocation: tool({\n * description: \"Get user's location\",\n * inputSchema: z.object({})\n * // No execute = client must handle\n * })\n * };\n *\n * // Client: Handle in onToolCall\n * useAgentChat({\n * onToolCall: async ({ toolCall, addToolOutput }) => {\n * if (toolCall.toolName === 'getLocation') {\n * const pos = await navigator.geolocation.getCurrentPosition();\n * addToolOutput({ toolCallId: toolCall.toolCallId, output: pos });\n * }\n * }\n * });\n * ```\n */\nexport function createToolsFromClientSchemas(\n clientTools?: ClientToolSchema[]\n): ToolSet {\n if (!clientTools || clientTools.length === 0) {\n return {};\n }\n\n // Check for duplicate tool names\n const seenNames = new Set<string>();\n for (const t of clientTools) {\n if (seenNames.has(t.name)) {\n console.warn(\n `[createToolsFromClientSchemas] Duplicate tool name \"${t.name}\" found. Later definitions will override earlier ones.`\n );\n }\n seenNames.add(t.name);\n }\n\n return Object.fromEntries(\n clientTools.map((t) => [\n t.name,\n tool({\n description: t.description ?? \"\",\n inputSchema: jsonSchema(t.parameters ?? { type: \"object\" })\n // No execute function = tool call is sent back to client\n })\n ])\n );\n}\n\n/** Number of chunks to buffer before flushing to SQLite */\nconst CHUNK_BUFFER_SIZE = 10;\n/** Maximum buffer size to prevent memory issues on rapid reconnections */\nconst CHUNK_BUFFER_MAX_SIZE = 100;\n/** Maximum age for a \"streaming\" stream before considering it stale (ms) - 5 minutes */\nconst STREAM_STALE_THRESHOLD_MS = 5 * 60 * 1000;\n/** Default cleanup interval for old streams (ms) - every 10 minutes */\nconst CLEANUP_INTERVAL_MS = 10 * 60 * 1000;\n/** Default age threshold for cleaning up completed streams (ms) - 24 hours */\nconst CLEANUP_AGE_THRESHOLD_MS = 24 * 60 * 60 * 1000;\n\nconst decoder = new TextDecoder();\n\n/**\n * Stored stream chunk for resumable streaming\n */\ntype StreamChunk = {\n id: string;\n stream_id: string;\n body: string;\n chunk_index: number;\n created_at: number;\n};\n\n/**\n * Stream metadata for tracking active streams\n */\ntype StreamMetadata = {\n id: string;\n request_id: string;\n status: \"streaming\" | \"completed\" | \"error\";\n created_at: number;\n completed_at: number | null;\n};\n\n/**\n * Extension of Agent with built-in chat capabilities\n * @template Env Environment type containing bindings\n */\nexport class AIChatAgent<\n Env extends Cloudflare.Env = Cloudflare.Env,\n State = unknown\n> extends Agent<Env, State> {\n /**\n * Map of message `id`s to `AbortController`s\n * useful to propagate request cancellation signals for any external calls made by the agent\n */\n private _chatMessageAbortControllers: Map<string, AbortController>;\n\n /**\n * Currently active stream ID for resumable streaming.\n * Stored in memory for quick access; persisted in stream_metadata table.\n * @internal Protected for testing purposes.\n */\n protected _activeStreamId: string | null = null;\n\n /**\n * Request ID associated with the active stream.\n * @internal Protected for testing purposes.\n */\n protected _activeRequestId: string | null = null;\n\n /**\n * The message currently being streamed. Used to apply tool results\n * before the message is persisted.\n * @internal\n */\n private _streamingMessage: ChatMessage | null = null;\n\n /**\n * Promise that resolves when the current stream completes.\n * Used to wait for message persistence before continuing after tool results.\n * @internal\n */\n private _streamCompletionPromise: Promise<void> | null = null;\n private _streamCompletionResolve: (() => void) | null = null;\n\n /**\n * Current chunk index for the active stream\n */\n private _streamChunkIndex = 0;\n\n /**\n * Buffer for stream chunks pending write to SQLite.\n * Chunks are batched and flushed when buffer reaches CHUNK_BUFFER_SIZE.\n */\n private _chunkBuffer: Array<{\n id: string;\n streamId: string;\n body: string;\n index: number;\n }> = [];\n\n /**\n * Lock to prevent concurrent flush operations\n */\n private _isFlushingChunks = false;\n\n /**\n * Timestamp of the last cleanup operation for old streams\n */\n private _lastCleanupTime = 0;\n\n /** Array of chat messages for the current conversation */\n messages: ChatMessage[];\n\n constructor(ctx: AgentContext, env: Env) {\n super(ctx, env);\n this.sql`create table if not exists cf_ai_chat_agent_messages (\n id text primary key,\n message text not null,\n created_at datetime default current_timestamp\n )`;\n\n // Create tables for automatic resumable streaming\n this.sql`create table if not exists cf_ai_chat_stream_chunks (\n id text primary key,\n stream_id text not null,\n body text not null,\n chunk_index integer not null,\n created_at integer not null\n )`;\n\n this.sql`create table if not exists cf_ai_chat_stream_metadata (\n id text primary key,\n request_id text not null,\n status text not null,\n created_at integer not null,\n completed_at integer\n )`;\n\n this.sql`create index if not exists idx_stream_chunks_stream_id \n on cf_ai_chat_stream_chunks(stream_id, chunk_index)`;\n\n // Load messages and automatically transform them to v5 format\n const rawMessages = this._loadMessagesFromDb();\n\n // Automatic migration following https://jhak.im/blog/ai-sdk-migration-handling-previously-saved-messages\n this.messages = autoTransformMessages(rawMessages);\n\n this._chatMessageAbortControllers = new Map();\n\n // Check for any active streams from a previous session\n this._restoreActiveStream();\n const _onConnect = this.onConnect.bind(this);\n this.onConnect = async (connection: Connection, ctx: ConnectionContext) => {\n // Notify client about active streams that can be resumed\n if (this._activeStreamId) {\n this._notifyStreamResuming(connection);\n }\n // Call consumer's onConnect\n return _onConnect(connection, ctx);\n };\n\n // Wrap onMessage\n const _onMessage = this.onMessage.bind(this);\n this.onMessage = async (connection: Connection, message: WSMessage) => {\n // Handle AIChatAgent's internal messages first\n if (typeof message === \"string\") {\n let data: IncomingMessage;\n try {\n data = JSON.parse(message) as IncomingMessage;\n } catch (_error) {\n // Not JSON, forward to consumer\n return _onMessage(connection, message);\n }\n\n // Handle chat request\n if (\n data.type === MessageType.CF_AGENT_USE_CHAT_REQUEST &&\n data.init.method === \"POST\"\n ) {\n const { body } = data.init;\n const parsed = JSON.parse(body as string);\n const { messages, clientTools } = parsed as {\n messages: ChatMessage[];\n clientTools?: ClientToolSchema[];\n };\n\n // Automatically transform any incoming messages\n const transformedMessages = autoTransformMessages(messages);\n\n this._broadcastChatMessage(\n {\n messages: transformedMessages,\n type: MessageType.CF_AGENT_CHAT_MESSAGES\n },\n [connection.id]\n );\n\n await this.persistMessages(transformedMessages, [connection.id]);\n\n this.observability?.emit(\n {\n displayMessage: \"Chat message request\",\n id: data.id,\n payload: {},\n timestamp: Date.now(),\n type: \"message:request\"\n },\n this.ctx\n );\n\n const chatMessageId = data.id;\n const abortSignal = this._getAbortSignal(chatMessageId);\n\n return this._tryCatchChat(async () => {\n // Wrap in agentContext.run() to propagate connection context to onChatMessage\n // This ensures getCurrentAgent() returns the connection inside tool execute functions\n return agentContext.run(\n { agent: this, connection, request: undefined, email: undefined },\n async () => {\n const response = await this.onChatMessage(\n async (_finishResult) => {\n this._removeAbortController(chatMessageId);\n\n this.observability?.emit(\n {\n displayMessage: \"Chat message response\",\n id: data.id,\n payload: {},\n timestamp: Date.now(),\n type: \"message:response\"\n },\n this.ctx\n );\n },\n {\n abortSignal,\n clientTools\n }\n );\n\n if (response) {\n await this._reply(data.id, response, [connection.id]);\n } else {\n console.warn(\n `[AIChatAgent] onChatMessage returned no response for chatMessageId: ${chatMessageId}`\n );\n this._broadcastChatMessage(\n {\n body: \"No response was generated by the agent.\",\n done: true,\n id: data.id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE\n },\n [connection.id]\n );\n }\n }\n );\n });\n }\n\n // Handle clear chat\n if (data.type === MessageType.CF_AGENT_CHAT_CLEAR) {\n this._destroyAbortControllers();\n this.sql`delete from cf_ai_chat_agent_messages`;\n this.sql`delete from cf_ai_chat_stream_chunks`;\n this.sql`delete from cf_ai_chat_stream_metadata`;\n this._activeStreamId = null;\n this._activeRequestId = null;\n this._streamChunkIndex = 0;\n this.messages = [];\n this._broadcastChatMessage(\n { type: MessageType.CF_AGENT_CHAT_CLEAR },\n [connection.id]\n );\n return;\n }\n\n // Handle message replacement\n if (data.type === MessageType.CF_AGENT_CHAT_MESSAGES) {\n const transformedMessages = autoTransformMessages(data.messages);\n await this.persistMessages(transformedMessages, [connection.id]);\n return;\n }\n\n // Handle request cancellation\n if (data.type === MessageType.CF_AGENT_CHAT_REQUEST_CANCEL) {\n this._cancelChatRequest(data.id);\n return;\n }\n\n // Handle stream resume acknowledgment\n if (data.type === MessageType.CF_AGENT_STREAM_RESUME_ACK) {\n if (\n this._activeStreamId &&\n this._activeRequestId &&\n this._activeRequestId === data.id\n ) {\n this._sendStreamChunks(\n connection,\n this._activeStreamId,\n this._activeRequestId\n );\n }\n return;\n }\n\n // Handle client-side tool result\n if (data.type === MessageType.CF_AGENT_TOOL_RESULT) {\n const { toolCallId, toolName, output, autoContinue } = data;\n\n // Apply the tool result\n this._applyToolResult(toolCallId, toolName, output).then(\n (applied) => {\n // Only auto-continue if client requested it (opt-in behavior)\n // This mimics server-executed tool behavior where the LLM\n // automatically continues after seeing tool results\n if (applied && autoContinue) {\n // Wait for the original stream to complete and message to be persisted\n // before calling onChatMessage, so this.messages includes the tool result\n const waitForStream = async () => {\n if (this._streamCompletionPromise) {\n await this._streamCompletionPromise;\n } else {\n // If no promise, wait a bit for the stream to finish\n await new Promise((resolve) => setTimeout(resolve, 500));\n }\n };\n\n waitForStream().then(() => {\n const continuationId = nanoid();\n const abortSignal = this._getAbortSignal(continuationId);\n\n this._tryCatchChat(async () => {\n return agentContext.run(\n {\n agent: this,\n connection,\n request: undefined,\n email: undefined\n },\n async () => {\n const response = await this.onChatMessage(\n async (_finishResult) => {\n this._removeAbortController(continuationId);\n\n this.observability?.emit(\n {\n displayMessage:\n \"Chat message response (tool continuation)\",\n id: continuationId,\n payload: {},\n timestamp: Date.now(),\n type: \"message:response\"\n },\n this.ctx\n );\n },\n {\n abortSignal\n }\n );\n\n if (response) {\n // Pass continuation flag to merge parts into last assistant message\n // Note: We pass an empty excludeBroadcastIds array because the sender\n // NEEDS to receive the continuation stream. Unlike regular chat requests\n // where aiFetch handles the response, tool continuations have no listener\n // waiting - the client relies on the broadcast.\n await this._reply(\n continuationId,\n response,\n [], // Don't exclude sender - they need the continuation\n { continuation: true }\n );\n }\n }\n );\n });\n });\n }\n }\n );\n return;\n }\n }\n\n // Forward unhandled messages to consumer's onMessage\n return _onMessage(connection, message);\n };\n }\n\n /**\n * Restore active stream state if the agent was restarted during streaming.\n * Called during construction to recover any interrupted streams.\n * Validates stream freshness to avoid sending stale resume notifications.\n * @internal Protected for testing purposes.\n */\n protected _restoreActiveStream() {\n const activeStreams = this.sql<StreamMetadata>`\n select * from cf_ai_chat_stream_metadata \n where status = 'streaming' \n order by created_at desc \n limit 1\n `;\n\n if (activeStreams && activeStreams.length > 0) {\n const stream = activeStreams[0];\n const streamAge = Date.now() - stream.created_at;\n\n // Check if stream is stale; delete to free storage\n if (streamAge > STREAM_STALE_THRESHOLD_MS) {\n this\n .sql`delete from cf_ai_chat_stream_chunks where stream_id = ${stream.id}`;\n this\n .sql`delete from cf_ai_chat_stream_metadata where id = ${stream.id}`;\n console.warn(\n `[AIChatAgent] Deleted stale stream ${stream.id} (age: ${Math.round(streamAge / 1000)}s)`\n );\n return;\n }\n\n this._activeStreamId = stream.id;\n this._activeRequestId = stream.request_id;\n\n // Get the last chunk index\n const lastChunk = this.sql<{ max_index: number }>`\n select max(chunk_index) as max_index \n from cf_ai_chat_stream_chunks \n where stream_id = ${this._activeStreamId}\n `;\n this._streamChunkIndex =\n lastChunk && lastChunk[0]?.max_index != null\n ? lastChunk[0].max_index + 1\n : 0;\n }\n }\n\n /**\n * Notify a connection about an active stream that can be resumed.\n * The client should respond with CF_AGENT_STREAM_RESUME_ACK to receive chunks.\n * Uses in-memory state for request ID - no extra DB lookup needed.\n * @param connection - The WebSocket connection to notify\n */\n private _notifyStreamResuming(connection: Connection) {\n if (!this._activeStreamId || !this._activeRequestId) {\n return;\n }\n\n // Notify client - they will send ACK when ready\n connection.send(\n JSON.stringify({\n type: MessageType.CF_AGENT_STREAM_RESUMING,\n id: this._activeRequestId\n })\n );\n }\n\n /**\n * Send stream chunks to a connection after receiving ACK.\n * @param connection - The WebSocket connection\n * @param streamId - The stream to replay\n * @param requestId - The original request ID\n */\n private _sendStreamChunks(\n connection: Connection,\n streamId: string,\n requestId: string\n ) {\n // Flush any pending chunks first to ensure we have the latest\n this._flushChunkBuffer();\n\n const chunks = this.sql<StreamChunk>`\n select * from cf_ai_chat_stream_chunks \n where stream_id = ${streamId} \n order by chunk_index asc\n `;\n\n // Send all stored chunks\n for (const chunk of chunks || []) {\n connection.send(\n JSON.stringify({\n body: chunk.body,\n done: false,\n id: requestId,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE\n })\n );\n }\n\n // If the stream is no longer active (completed), send done signal\n // We track active state in memory, no need to query DB\n if (this._activeStreamId !== streamId) {\n connection.send(\n JSON.stringify({\n body: \"\",\n done: true,\n id: requestId,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE\n })\n );\n }\n }\n\n /**\n * Buffer a stream chunk for batch write to SQLite.\n * @param streamId - The stream this chunk belongs to\n * @param body - The serialized chunk body\n * @internal Protected for testing purposes.\n */\n protected _storeStreamChunk(streamId: string, body: string) {\n // Force flush if buffer is at max to prevent memory issues\n if (this._chunkBuffer.length >= CHUNK_BUFFER_MAX_SIZE) {\n this._flushChunkBuffer();\n }\n\n this._chunkBuffer.push({\n id: nanoid(),\n streamId,\n body,\n index: this._streamChunkIndex\n });\n this._streamChunkIndex++;\n\n // Flush when buffer reaches threshold\n if (this._chunkBuffer.length >= CHUNK_BUFFER_SIZE) {\n this._flushChunkBuffer();\n }\n }\n\n /**\n * Flush buffered chunks to SQLite in a single batch.\n * Uses a lock to prevent concurrent flush operations.\n * @internal Protected for testing purposes.\n */\n protected _flushChunkBuffer() {\n // Prevent concurrent flushes\n if (this._isFlushingChunks || this._chunkBuffer.length === 0) {\n return;\n }\n\n this._isFlushingChunks = true;\n try {\n const chunks = this._chunkBuffer;\n this._chunkBuffer = [];\n\n // Batch insert all chunks\n const now = Date.now();\n for (const chunk of chunks) {\n this.sql`\n insert into cf_ai_chat_stream_chunks (id, stream_id, body, chunk_index, created_at)\n values (${chunk.id}, ${chunk.streamId}, ${chunk.body}, ${chunk.index}, ${now})\n `;\n }\n } finally {\n this._isFlushingChunks = false;\n }\n }\n\n /**\n * Start tracking a new stream for resumable streaming.\n * Creates metadata entry in SQLite and sets up tracking state.\n * @param requestId - The unique ID of the chat request\n * @returns The generated stream ID\n * @internal Protected for testing purposes.\n */\n protected _startStream(requestId: string): string {\n // Flush any pending chunks from previous streams to prevent mixing\n this._flushChunkBuffer();\n\n const streamId = nanoid();\n this._activeStreamId = streamId;\n this._activeRequestId = requestId;\n this._streamChunkIndex = 0;\n\n this.sql`\n insert into cf_ai_chat_stream_metadata (id, request_id, status, created_at)\n values (${streamId}, ${requestId}, 'streaming', ${Date.now()})\n `;\n\n return streamId;\n }\n\n /**\n * Mark a stream as completed and flush any pending chunks.\n * @param streamId - The stream to mark as completed\n * @internal Protected for testing purposes.\n */\n protected _completeStream(streamId: string) {\n // Flush any pending chunks before completing\n this._flushChunkBuffer();\n\n this.sql`\n update cf_ai_chat_stream_metadata \n set status = 'completed', completed_at = ${Date.now()} \n where id = ${streamId}\n `;\n this._activeStreamId = null;\n this._activeRequestId = null;\n this._streamChunkIndex = 0;\n\n // Periodically clean up old streams (not on every completion)\n this._maybeCleanupOldStreams();\n }\n\n /**\n * Clean up old completed streams if enough time has passed since last cleanup.\n * This prevents database growth while avoiding cleanup overhead on every stream completion.\n */\n private _maybeCleanupOldStreams() {\n const now = Date.now();\n if (now - this._lastCleanupTime < CLEANUP_INTERVAL_MS) {\n return;\n }\n this._lastCleanupTime = now;\n\n const cutoff = now - CLEANUP_AGE_THRESHOLD_MS;\n this.sql`\n delete from cf_ai_chat_stream_chunks \n where stream_id in (\n select id from cf_ai_chat_stream_metadata \n where status = 'completed' and completed_at < ${cutoff}\n )\n `;\n this.sql`\n delete from cf_ai_chat_stream_metadata \n where status = 'completed' and completed_at < ${cutoff}\n `;\n }\n\n private _broadcastChatMessage(message: OutgoingMessage, exclude?: string[]) {\n this.broadcast(JSON.stringify(message), exclude);\n }\n\n /**\n * Broadcasts a text event for non-SSE responses.\n * This ensures plain text responses follow the AI SDK v5 stream protocol.\n *\n * @param streamId - The stream identifier for chunk storage\n * @param event - The text event payload (text-start, text-delta with delta, or text-end)\n * @param continuation - Whether this is a continuation of a previous stream\n */\n private _broadcastTextEvent(\n streamId: string,\n event:\n | { type: \"text-start\"; id: string }\n | { type: \"text-delta\"; id: string; delta: string }\n | { type: \"text-end\"; id: string },\n continuation: boolean\n ) {\n const body = JSON.stringify(event);\n this._storeStreamChunk(streamId, body);\n this._broadcastChatMessage({\n body,\n done: false,\n id: event.id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,\n ...(continuation && { continuation: true })\n });\n }\n\n private _loadMessagesFromDb(): ChatMessage[] {\n const rows =\n this.sql`select * from cf_ai_chat_agent_messages order by created_at` ||\n [];\n return rows\n .map((row) => {\n try {\n return JSON.parse(row.message as string);\n } catch (error) {\n console.error(`Failed to parse message ${row.id}:`, error);\n return null;\n }\n })\n .filter((msg): msg is ChatMessage => msg !== null);\n }\n\n override async onRequest(request: Request): Promise<Response> {\n return this._tryCatchChat(async () => {\n const url = new URL(request.url);\n\n if (url.pathname.endsWith(\"/get-messages\")) {\n const messages = this._loadMessagesFromDb();\n return Response.json(messages);\n }\n\n return super.onRequest(request);\n });\n }\n\n private async _tryCatchChat<T>(fn: () => T | Promise<T>) {\n try {\n return await fn();\n } catch (e) {\n throw this.onError(e);\n }\n }\n\n /**\n * Handle incoming chat messages and generate a response\n * @param onFinish Callback to be called when the response is finished\n * @param options Options including abort signal and client-defined tools\n * @returns Response to send to the client or undefined\n */\n async onChatMessage(\n // biome-ignore lint/correctness/noUnusedFunctionParameters: overridden later\n onFinish: StreamTextOnFinishCallback<ToolSet>,\n // biome-ignore lint/correctness/noUnusedFunctionParameters: overridden later\n options?: OnChatMessageOptions\n ): Promise<Response | undefined> {\n throw new Error(\n \"recieved a chat message, override onChatMessage and return a Response to send to the client\"\n );\n }\n\n /**\n * Save messages on the server side\n * @param messages Chat messages to save\n */\n async saveMessages(messages: ChatMessage[]) {\n await this.persistMessages(messages);\n await this._tryCatchChat(async () => {\n const response = await this.onChatMessage(() => {});\n if (response) this._reply(crypto.randomUUID(), response);\n });\n }\n\n async persistMessages(\n messages: ChatMessage[],\n excludeBroadcastIds: string[] = []\n ) {\n // Merge incoming messages with existing server state to preserve tool outputs.\n // This is critical for client-side tools: the client sends messages without\n // tool outputs, but the server has them via _applyToolResult.\n const mergedMessages = this._mergeIncomingWithServerState(messages);\n\n // Persist the merged messages\n for (const message of mergedMessages) {\n // Strip OpenAI item IDs to prevent \"Duplicate item found\" errors\n // when using the OpenAI Responses API. These IDs are assigned by OpenAI\n // and if sent back in subsequent requests, cause duplicate detection.\n const sanitizedMessage = this._sanitizeMessageForPersistence(message);\n const messageToSave = this._resolveMessageForToolMerge(sanitizedMessage);\n this.sql`\n insert into cf_ai_chat_agent_messages (id, message)\n values (${messageToSave.id}, ${JSON.stringify(messageToSave)})\n on conflict(id) do update set message = excluded.message\n `;\n }\n\n // refresh in-memory messages\n const persisted = this._loadMessagesFromDb();\n this.messages = autoTransformMessages(persisted);\n this._broadcastChatMessage(\n {\n messages: mergedMessages,\n type: MessageType.CF_AGENT_CHAT_MESSAGES\n },\n excludeBroadcastIds\n );\n }\n\n /**\n * Merges incoming messages with existing server state.\n * This preserves tool outputs that the server has (via _applyToolResult)\n * but the client doesn't have yet.\n *\n * @param incomingMessages - Messages from the client\n * @returns Messages with server's tool outputs preserved\n */\n private _mergeIncomingWithServerState(\n incomingMessages: ChatMessage[]\n ): ChatMessage[] {\n // Build a map of toolCallId -> output from existing server messages\n const serverToolOutputs = new Map<string, unknown>();\n for (const msg of this.messages) {\n if (msg.role !== \"assistant\") continue;\n for (const part of msg.parts) {\n if (\n \"toolCallId\" in part &&\n \"state\" in part &&\n part.state === \"output-available\" &&\n \"output\" in part\n ) {\n serverToolOutputs.set(\n part.toolCallId as string,\n (part as { output: unknown }).output\n );\n }\n }\n }\n\n // If server has no tool outputs, return incoming messages as-is\n if (serverToolOutputs.size === 0) {\n return incomingMessages;\n }\n\n // Merge server's tool outputs into incoming messages\n return incomingMessages.map((msg) => {\n if (msg.role !== \"assistant\") return msg;\n\n let hasChanges = false;\n const updatedParts = msg.parts.map((part) => {\n // If this is a tool part in input-available state and server has the output\n if (\n \"toolCallId\" in part &&\n \"state\" in part &&\n part.state === \"input-available\" &&\n serverToolOutputs.has(part.toolCallId as string)\n ) {\n hasChanges = true;\n return {\n ...part,\n state: \"output-available\" as const,\n output: serverToolOutputs.get(part.toolCallId as string)\n };\n }\n return part;\n }) as ChatMessage[\"parts\"];\n\n return hasChanges ? { ...msg, parts: updatedParts } : msg;\n });\n }\n\n /**\n * Resolves a message for persistence, handling tool result merging.\n * If the message contains tool parts with output-available state, checks if there's\n * an existing message with the same toolCallId that should be updated instead of\n * creating a duplicate. This prevents the \"Duplicate item found\" error from OpenAI\n * when client-side tool results arrive in a new request.\n *\n * @param message - The message to potentially merge\n * @returns The message with the correct ID (either original or merged)\n */\n private _resolveMessageForToolMerge(message: ChatMessage): ChatMessage {\n if (message.role !== \"assistant\") {\n return message;\n }\n\n // Check if this message has tool parts with output-available state\n for (const part of message.parts) {\n if (\n \"toolCallId\" in part &&\n \"state\" in part &&\n part.state === \"output-available\"\n ) {\n const toolCallId = part.toolCallId as string;\n\n // Look for an existing message with this toolCallId in input-available state\n const existingMessage = this._findMessageByToolCallId(toolCallId);\n if (existingMessage && existingMessage.id !== message.id) {\n // Found a match - merge by using the existing message's ID\n // This ensures the SQL upsert updates the existing row\n return {\n ...message,\n id: existingMessage.id\n };\n }\n }\n }\n\n return message;\n }\n\n /**\n * Finds an existing assistant message that contains a tool part with the given toolCallId.\n * Used to detect when a tool result should update an existing message rather than\n * creating a new one.\n *\n * @param toolCallId - The tool call ID to search for\n * @returns The existing message if found, undefined otherwise\n */\n private _findMessageByToolCallId(\n toolCallId: string\n ): ChatMessage | undefined {\n for (const msg of this.messages) {\n if (msg.role !== \"assistant\") continue;\n\n for (const part of msg.parts) {\n if (\"toolCallId\" in part && part.toolCallId === toolCallId) {\n return msg;\n }\n }\n }\n return undefined;\n }\n\n /**\n * Sanitizes a message for persistence by removing ephemeral provider-specific\n * data that should not be stored or sent back in subsequent requests.\n *\n * This handles two issues with the OpenAI Responses API:\n *\n * 1. **Duplicate item IDs**: The AI SDK's @ai-sdk/openai provider (v2.0.x+)\n * defaults to using OpenAI's Responses API which assigns unique itemIds\n * to each message part. When these IDs are persisted and sent back,\n * OpenAI rejects them as duplicates.\n *\n * 2. **Empty reasoning parts**: OpenAI may return reasoning parts with empty\n * text and encrypted content. These cause \"Non-OpenAI reasoning parts are\n * not supported\" warnings when sent back via convertToModelMessages().\n *\n * @param message - The message to sanitize\n * @returns A new message with ephemeral provider data removed\n */\n private _sanitizeMessageForPersistence(message: ChatMessage): ChatMessage {\n // First, filter out empty reasoning parts (they have no useful content)\n const filteredParts = message.parts.filter((part) => {\n if (part.type === \"reasoning\") {\n const reasoningPart = part as ReasoningUIPart;\n // Remove reasoning parts that have no text content\n // These are typically placeholders with only encrypted content\n if (!reasoningPart.text || reasoningPart.text.trim() === \"\") {\n return false;\n }\n }\n return true;\n });\n\n // Then sanitize remaining parts by stripping OpenAI-specific ephemeral data\n const sanitizedParts = filteredParts.map((part) => {\n let sanitizedPart = part;\n\n // Strip providerMetadata.openai.itemId and reasoningEncryptedContent\n if (\n \"providerMetadata\" in sanitizedPart &&\n sanitizedPart.providerMetadata &&\n typeof sanitizedPart.providerMetadata === \"object\" &&\n \"openai\" in sanitizedPart.providerMetadata\n ) {\n sanitizedPart = this._stripOpenAIMetadata(\n sanitizedPart,\n \"providerMetadata\"\n );\n }\n\n // Also check callProviderMetadata for tool parts\n if (\n \"callProviderMetadata\" in sanitizedPart &&\n sanitizedPart.callProviderMetadata &&\n typeof sanitizedPart.callProviderMetadata === \"object\" &&\n \"openai\" in sanitizedPart.callProviderMetadata\n ) {\n sanitizedPart = this._stripOpenAIMetadata(\n sanitizedPart,\n \"callProviderMetadata\"\n );\n }\n\n return sanitizedPart;\n }) as ChatMessage[\"parts\"];\n\n return { ...message, parts: sanitizedParts };\n }\n\n /**\n * Helper to strip OpenAI-specific ephemeral fields from a metadata object.\n * Removes itemId and reasoningEncryptedContent while preserving other fields.\n */\n private _stripOpenAIMetadata<T extends ChatMessage[\"parts\"][number]>(\n part: T,\n metadataKey: \"providerMetadata\" | \"callProviderMetadata\"\n ): T {\n const metadata = (part as Record<string, unknown>)[metadataKey] as {\n openai?: Record<string, unknown>;\n [key: string]: unknown;\n };\n\n if (!metadata?.openai) return part;\n\n const openaiMeta = metadata.openai;\n\n // Remove ephemeral fields: itemId and reasoningEncryptedContent\n const {\n itemId: _itemId,\n reasoningEncryptedContent: _rec,\n ...restOpenai\n } = openaiMeta;\n\n // Determine what to keep\n const hasOtherOpenaiFields = Object.keys(restOpenai).length > 0;\n const { openai: _openai, ...restMetadata } = metadata;\n\n let newMetadata: ProviderMetadata | undefined;\n if (hasOtherOpenaiFields) {\n newMetadata = {\n ...restMetadata,\n openai: restOpenai\n } as ProviderMetadata;\n } else if (Object.keys(restMetadata).length > 0) {\n newMetadata = restMetadata as ProviderMetadata;\n }\n\n // Create new part without the old metadata\n const { [metadataKey]: _oldMeta, ...restPart } = part as Record<\n string,\n unknown\n >;\n\n if (newMetadata) {\n return { ...restPart, [metadataKey]: newMetadata } as T;\n }\n return restPart as T;\n }\n\n /**\n * Applies a tool result to an existing assistant message.\n * This is used when the client sends CF_AGENT_TOOL_RESULT for client-side tools.\n * The server is the source of truth, so we update the message here and broadcast\n * the update to all clients.\n *\n * @param toolCallId - The tool call ID this result is for\n * @param toolName - The name of the tool\n * @param output - The output from the tool execution\n * @returns true if the result was applied, false if the message was not found\n */\n private async _applyToolResult(\n toolCallId: string,\n _toolName: string,\n output: unknown\n ): Promise<boolean> {\n // Find the message with this tool call\n // First check the currently streaming message\n let message: ChatMessage | undefined;\n\n // Check streaming message first\n if (this._streamingMessage) {\n for (const part of this._streamingMessage.parts) {\n if (\"toolCallId\" in part && part.toolCallId === toolCallId) {\n message = this._streamingMessage;\n break;\n }\n }\n }\n\n // If not found in streaming message, retry persisted messages\n if (!message) {\n for (let attempt = 0; attempt < 10; attempt++) {\n message = this._findMessageByToolCallId(toolCallId);\n if (message) break;\n // Wait 100ms before retrying\n await new Promise((resolve) => setTimeout(resolve, 100));\n }\n }\n\n if (!message) {\n // The tool result will be included when\n // the client sends the follow-up message via sendMessage().\n console.warn(\n `[AIChatAgent] _applyToolResult: Could not find message with toolCallId ${toolCallId} after retries`\n );\n return false;\n }\n\n // Check if this is the streaming message (not yet persisted)\n const isStreamingMessage = message === this._streamingMessage;\n\n // Update the tool part with the output\n let updated = false;\n if (isStreamingMessage) {\n // Update in place - the message will be persisted when streaming completes\n for (const part of message.parts) {\n if (\n \"toolCallId\" in part &&\n part.toolCallId === toolCallId &&\n \"state\" in part &&\n part.state === \"input-available\"\n ) {\n (part as { state: string; output?: unknown }).state =\n \"output-available\";\n (part as { state: string; output?: unknown }).output = output;\n updated = true;\n break;\n }\n }\n } else {\n // For persisted messages, create updated parts\n const updatedParts = message.parts.map((part) => {\n if (\n \"toolCallId\" in part &&\n part.toolCallId === toolCallId &&\n \"state\" in part &&\n part.state === \"input-available\"\n ) {\n updated = true;\n return {\n ...part,\n state: \"output-available\" as const,\n output\n };\n }\n return part;\n }) as ChatMessage[\"parts\"];\n\n if (updated) {\n // Create the updated message and strip OpenAI item IDs\n const updatedMessage: ChatMessage = this._sanitizeMessageForPersistence(\n {\n ...message,\n parts: updatedParts\n }\n );\n\n // Persist the updated message\n this.sql`\n update cf_ai_chat_agent_messages \n set message = ${JSON.stringify(updatedMessage)}\n where id = ${message.id}\n `;\n\n // Reload messages to update in-memory state\n const persisted = this._loadMessagesFromDb();\n this.messages = autoTransformMessages(persisted);\n }\n }\n\n if (!updated) {\n console.warn(\n `[AIChatAgent] _applyToolResult: Tool part with toolCallId ${toolCallId} not in input-available state`\n );\n return false;\n }\n\n // Broadcast the update to all clients (only for persisted messages)\n // For streaming messages, the update will be included when persisted\n if (!isStreamingMessage) {\n // Re-fetch the message for broadcast since we modified it\n const broadcastMessage = this._findMessageByToolCallId(toolCallId);\n if (broadcastMessage) {\n this._broadcastChatMessage({\n type: MessageType.CF_AGENT_MESSAGE_UPDATED,\n message: broadcastMessage\n });\n }\n }\n\n // Note: We don't automatically continue the conversation here.\n // The client is responsible for sending a follow-up request if needed.\n // This avoids re-entering onChatMessage with unexpected state.\n\n return true;\n }\n\n private async _reply(\n id: string,\n response: Response,\n excludeBroadcastIds: string[] = [],\n options: { continuation?: boolean } = {}\n ) {\n const { continuation = false } = options;\n\n return this._tryCatchChat(async () => {\n if (!response.body) {\n // Send empty response if no body\n this._broadcastChatMessage({\n body: \"\",\n done: true,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,\n ...(continuation && { continuation: true })\n });\n return;\n }\n\n // Start tracking this stream for resumability\n const streamId = this._startStream(id);\n\n /* Lazy loading ai sdk, because putting it in module scope is\n * causing issues with startup time.\n * The only place it's used is in _reply, which only matters after\n * a chat message is received.\n * So it's safe to delay loading it until a chat message is received.\n */\n const { getToolName, isToolUIPart, parsePartialJson } =\n await import(\"ai\");\n\n const reader = response.body.getReader();\n\n // Parsing state adapted from:\n // https://github.com/vercel/ai/blob/main/packages/ai/src/ui-message-stream/ui-message-chunks.ts#L295\n const message: ChatMessage = {\n id: `assistant_${Date.now()}_${Math.random().toString(36).slice(2, 11)}`, // default\n role: \"assistant\",\n parts: []\n };\n // Track the streaming message so tool results can be applied before persistence\n this._streamingMessage = message;\n // Set up completion promise for tool continuation to wait on\n this._streamCompletionPromise = new Promise((resolve) => {\n this._streamCompletionResolve = resolve;\n });\n let activeTextParts: Record<string, TextUIPart> = {};\n let activeReasoningParts: Record<string, ReasoningUIPart> = {};\n const partialToolCalls: Record<\n string,\n { text: string; index: number; toolName: string; dynamic?: boolean }\n > = {};\n\n function updateDynamicToolPart(\n options: {\n toolName: string;\n toolCallId: string;\n providerExecuted?: boolean;\n } & (\n | {\n state: \"input-streaming\";\n input: unknown;\n }\n | {\n state: \"input-available\";\n input: unknown;\n providerMetadata?: ProviderMetadata;\n }\n | {\n state: \"output-available\";\n input: unknown;\n output: unknown;\n preliminary: boolean | undefined;\n }\n | {\n state: \"output-error\";\n input: unknown;\n errorText: string;\n providerMetadata?: ProviderMetadata;\n }\n )\n ) {\n const part = message.parts.find(\n (part) =>\n part.type === \"dynamic-tool\" &&\n part.toolCallId === options.toolCallId\n ) as DynamicToolUIPart | undefined;\n\n const anyOptions = options as Record<string, unknown>;\n const anyPart = part as Record<string, unknown>;\n\n if (part != null) {\n part.state = options.state;\n anyPart.toolName = options.toolName;\n anyPart.input = anyOptions.input;\n anyPart.output = anyOptions.output;\n anyPart.errorText = anyOptions.errorText;\n anyPart.rawInput = anyOptions.rawInput ?? anyPart.rawInput;\n anyPart.preliminary = anyOptions.preliminary;\n\n if (\n anyOptions.providerMetadata != null &&\n part.state === \"input-available\"\n ) {\n part.callProviderMetadata =\n anyOptions.providerMetadata as ProviderMetadata;\n }\n } else {\n message.parts.push({\n type: \"dynamic-tool\",\n toolName: options.toolName,\n toolCallId: options.toolCallId,\n state: options.state,\n input: anyOptions.input,\n output: anyOptions.output,\n errorText: anyOptions.errorText,\n preliminary: anyOptions.preliminary,\n ...(anyOptions.providerMetadata != null\n ? { callProviderMetadata: anyOptions.providerMetadata }\n : {})\n } as DynamicToolUIPart);\n }\n }\n\n function updateToolPart(\n options: {\n toolName: string;\n toolCallId: string;\n providerExecuted?: boolean;\n } & (\n | {\n state: \"input-streaming\";\n input: unknown;\n providerExecuted?: boolean;\n }\n | {\n state: \"input-available\";\n input: unknown;\n providerExecuted?: boolean;\n providerMetadata?: ProviderMetadata;\n }\n | {\n state: \"output-available\";\n input: unknown;\n output: unknown;\n providerExecuted?: boolean;\n preliminary?: boolean;\n }\n | {\n state: \"output-error\";\n input: unknown;\n rawInput?: unknown;\n errorText: string;\n providerExecuted?: boolean;\n providerMetadata?: ProviderMetadata;\n }\n )\n ) {\n const part = message.parts.find(\n (part) =>\n isToolUIPart(part) &&\n (part as ToolUIPart).toolCallId === options.toolCallId\n ) as ToolUIPart | undefined;\n\n const anyOptions = options as Record<string, unknown>;\n const anyPart = part as Record<string, unknown>;\n\n if (part != null) {\n part.state = options.state;\n anyPart.input = anyOptions.input;\n anyPart.output = anyOptions.output;\n anyPart.errorText = anyOptions.errorText;\n anyPart.rawInput = anyOptions.rawInput;\n anyPart.preliminary = anyOptions.preliminary;\n\n // once providerExecuted is set, it stays for streaming\n anyPart.providerExecuted =\n anyOptions.providerExecuted ?? part.providerExecuted;\n\n if (\n anyOptions.providerMetadata != null &&\n part.state === \"input-available\"\n ) {\n part.callProviderMetadata =\n anyOptions.providerMetadata as ProviderMetadata;\n }\n } else {\n message.parts.push({\n type: `tool-${options.toolName}`,\n toolCallId: options.toolCallId,\n state: options.state,\n input: anyOptions.input,\n output: anyOptions.output,\n rawInput: anyOptions.rawInput,\n errorText: anyOptions.errorText,\n providerExecuted: anyOptions.providerExecuted,\n preliminary: anyOptions.preliminary,\n ...(anyOptions.providerMetadata != null\n ? { callProviderMetadata: anyOptions.providerMetadata }\n : {})\n } as ToolUIPart);\n }\n }\n\n async function updateMessageMetadata(metadata: unknown) {\n if (metadata != null) {\n const mergedMetadata =\n message.metadata != null\n ? { ...message.metadata, ...metadata } // TODO: do proper merging\n : metadata;\n\n message.metadata = mergedMetadata;\n }\n }\n\n // Determine response format based on content-type\n const contentType = response.headers.get(\"content-type\") || \"\";\n const isSSE = contentType.includes(\"text/event-stream\"); // AI SDK v5 SSE format\n\n // if not AI SDK SSE format, we need to inject text-start and text-end events ourselves\n if (!isSSE) {\n this._broadcastTextEvent(\n streamId,\n { type: \"text-start\", id },\n continuation\n );\n }\n\n let streamCompleted = false;\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n if (!isSSE) {\n this._broadcastTextEvent(\n streamId,\n { type: \"text-end\", id },\n continuation\n );\n }\n\n // Mark the stream as completed\n this._completeStream(streamId);\n streamCompleted = true;\n // Send final completion signal\n this._broadcastChatMessage({\n body: \"\",\n done: true,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,\n ...(continuation && { continuation: true })\n });\n break;\n }\n\n const chunk = decoder.decode(value);\n\n // After streaming is complete, persist the complete assistant's response\n if (isSSE) {\n // Parse AI SDK v5 SSE format and extract text deltas\n const lines = chunk.split(\"\\n\");\n for (const line of lines) {\n if (line.startsWith(\"data: \") && line !== \"data: [DONE]\") {\n try {\n const data: UIMessageChunk = JSON.parse(line.slice(6)); // Remove 'data: ' prefix\n switch (data.type) {\n case \"text-start\": {\n const textPart: TextUIPart = {\n type: \"text\",\n text: \"\",\n providerMetadata: data.providerMetadata,\n state: \"streaming\"\n };\n activeTextParts[data.id] = textPart;\n message.parts.push(textPart);\n break;\n }\n\n case \"text-delta\": {\n const textPart = activeTextParts[data.id];\n textPart.text += data.delta;\n textPart.providerMetadata =\n data.providerMetadata ?? textPart.providerMetadata;\n break;\n }\n\n case \"text-end\": {\n const textPart = activeTextParts[data.id];\n textPart.state = \"done\";\n textPart.providerMetadata =\n data.providerMetadata ?? textPart.providerMetadata;\n delete activeTextParts[data.id];\n break;\n }\n\n case \"reasoning-start\": {\n const reasoningPart: ReasoningUIPart = {\n type: \"reasoning\",\n text: \"\",\n providerMetadata: data.providerMetadata,\n state: \"streaming\"\n };\n activeReasoningParts[data.id] = reasoningPart;\n message.parts.push(reasoningPart);\n break;\n }\n\n case \"reasoning-delta\": {\n const reasoningPart = activeReasoningParts[data.id];\n reasoningPart.text += data.delta;\n reasoningPart.providerMetadata =\n data.providerMetadata ?? reasoningPart.providerMetadata;\n break;\n }\n\n case \"reasoning-end\": {\n const reasoningPart = activeReasoningParts[data.id];\n reasoningPart.providerMetadata =\n data.providerMetadata ?? reasoningPart.providerMetadata;\n reasoningPart.state = \"done\";\n delete activeReasoningParts[data.id];\n\n break;\n }\n\n case \"file\": {\n message.parts.push({\n type: \"file\",\n mediaType: data.mediaType,\n url: data.url\n });\n\n break;\n }\n\n case \"source-url\": {\n message.parts.push({\n type: \"source-url\",\n sourceId: data.sourceId,\n url: data.url,\n title: data.title,\n providerMetadata: data.providerMetadata\n });\n\n break;\n }\n\n case \"source-document\": {\n message.parts.push({\n type: \"source-document\",\n sourceId: data.sourceId,\n mediaType: data.mediaType,\n title: data.title,\n filename: data.filename,\n providerMetadata: data.providerMetadata\n });\n\n break;\n }\n\n case \"tool-input-start\": {\n const toolInvocations =\n message.parts.filter(isToolUIPart);\n\n // add the partial tool call to the map\n partialToolCalls[data.toolCallId] = {\n text: \"\",\n toolName: data.toolName,\n index: toolInvocations.length,\n dynamic: data.dynamic\n };\n\n if (data.dynamic) {\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"input-streaming\",\n input: undefined\n });\n } else {\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"input-streaming\",\n input: undefined\n });\n }\n\n break;\n }\n\n case \"tool-input-delta\": {\n const partialToolCall = partialToolCalls[data.toolCallId];\n\n partialToolCall.text += data.inputTextDelta;\n\n const partialArgsResult = await parsePartialJson(\n partialToolCall.text\n );\n const partialArgs = (\n partialArgsResult as { value: Record<string, unknown> }\n ).value;\n\n if (partialToolCall.dynamic) {\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: partialToolCall.toolName,\n state: \"input-streaming\",\n input: partialArgs\n });\n } else {\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: partialToolCall.toolName,\n state: \"input-streaming\",\n input: partialArgs\n });\n }\n\n break;\n }\n\n case \"tool-input-available\": {\n if (data.dynamic) {\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"input-available\",\n input: data.input,\n providerMetadata: data.providerMetadata\n });\n } else {\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"input-available\",\n input: data.input,\n providerExecuted: data.providerExecuted,\n providerMetadata: data.providerMetadata\n });\n }\n\n // TODO: Do we want to expose onToolCall?\n\n // invoke the onToolCall callback if it exists. This is blocking.\n // In the future we should make this non-blocking, which\n // requires additional state management for error handling etc.\n // Skip calling onToolCall for provider-executed tools since they are already executed\n // if (onToolCall && !data.providerExecuted) {\n // await onToolCall({\n // toolCall: data\n // });\n // }\n break;\n }\n\n case \"tool-input-error\": {\n if (data.dynamic) {\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"output-error\",\n input: data.input,\n errorText: data.errorText,\n providerMetadata: data.providerMetadata\n });\n } else {\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: data.toolName,\n state: \"output-error\",\n input: undefined,\n rawInput: data.input,\n errorText: data.errorText,\n providerExecuted: data.providerExecuted,\n providerMetadata: data.providerMetadata\n });\n }\n\n break;\n }\n\n case \"tool-output-available\": {\n if (data.dynamic) {\n const toolInvocations = message.parts.filter(\n (part) => part.type === \"dynamic-tool\"\n ) as DynamicToolUIPart[];\n\n const toolInvocation = toolInvocations.find(\n (invocation) =>\n invocation.toolCallId === data.toolCallId\n );\n\n if (!toolInvocation)\n throw new Error(\"Tool invocation not found\");\n\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: toolInvocation.toolName,\n state: \"output-available\",\n input: toolInvocation.input,\n output: data.output,\n preliminary: data.preliminary\n });\n } else {\n const toolInvocations = message.parts.filter(\n isToolUIPart\n ) as ToolUIPart[];\n\n const toolInvocation = toolInvocations.find(\n (invocation) =>\n invocation.toolCallId === data.toolCallId\n );\n\n if (!toolInvocation)\n throw new Error(\"Tool invocation not found\");\n\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: getToolName(toolInvocation),\n state: \"output-available\",\n input: toolInvocation.input,\n output: data.output,\n providerExecuted: data.providerExecuted,\n preliminary: data.preliminary\n });\n }\n\n break;\n }\n\n case \"tool-output-error\": {\n if (data.dynamic) {\n const toolInvocations = message.parts.filter(\n (part) => part.type === \"dynamic-tool\"\n ) as DynamicToolUIPart[];\n\n const toolInvocation = toolInvocations.find(\n (invocation) =>\n invocation.toolCallId === data.toolCallId\n );\n\n if (!toolInvocation)\n throw new Error(\"Tool invocation not found\");\n\n updateDynamicToolPart({\n toolCallId: data.toolCallId,\n toolName: toolInvocation.toolName,\n state: \"output-error\",\n input: toolInvocation.input,\n errorText: data.errorText\n });\n } else {\n const toolInvocations = message.parts.filter(\n isToolUIPart\n ) as ToolUIPart[];\n\n const toolInvocation = toolInvocations.find(\n (invocation) =>\n invocation.toolCallId === data.toolCallId\n );\n\n if (!toolInvocation)\n throw new Error(\"Tool invocation not found\");\n updateToolPart({\n toolCallId: data.toolCallId,\n toolName: getToolName(toolInvocation),\n state: \"output-error\",\n input: toolInvocation.input,\n rawInput:\n \"rawInput\" in toolInvocation\n ? toolInvocation.rawInput\n : undefined,\n errorText: data.errorText\n });\n }\n\n break;\n }\n\n case \"start-step\": {\n // add a step boundary part to the message\n message.parts.push({ type: \"step-start\" });\n break;\n }\n\n case \"finish-step\": {\n // reset the current text and reasoning parts\n activeTextParts = {};\n activeReasoningParts = {};\n break;\n }\n\n case \"start\": {\n if (data.messageId != null) {\n message.id = data.messageId;\n }\n\n await updateMessageMetadata(data.messageMetadata);\n\n break;\n }\n\n case \"finish\": {\n await updateMessageMetadata(data.messageMetadata);\n break;\n }\n\n case \"message-metadata\": {\n await updateMessageMetadata(data.messageMetadata);\n break;\n }\n\n case \"error\": {\n this._broadcastChatMessage({\n error: true,\n body: data.errorText ?? JSON.stringify(data),\n done: false,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE\n });\n\n break;\n }\n // Do we want to handle data parts?\n }\n\n // Convert internal AI SDK stream events to valid UIMessageStreamPart format.\n // The \"finish\" event with \"finishReason\" is an internal LanguageModelV3StreamPart,\n // not a UIMessageStreamPart (which expects \"messageMetadata\" instead).\n // See: https://github.com/cloudflare/agents/issues/677\n let eventToSend: unknown = data;\n if (data.type === \"finish\" && \"finishReason\" in data) {\n const { finishReason, ...rest } = data as {\n finishReason: string;\n [key: string]: unknown;\n };\n eventToSend = {\n ...rest,\n type: \"finish\",\n messageMetadata: { finishReason }\n };\n }\n\n // Store chunk for replay on reconnection\n const chunkBody = JSON.stringify(eventToSend);\n this._storeStreamChunk(streamId, chunkBody);\n\n // Forward the converted event to the client\n this._broadcastChatMessage({\n body: chunkBody,\n done: false,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,\n ...(continuation && { continuation: true })\n });\n } catch (_error) {\n // Skip malformed JSON lines silently\n }\n }\n }\n } else {\n // Handle plain text responses (e.g., from generateText)\n // Treat the entire chunk as a text delta to preserve exact formatting\n if (chunk.length > 0) {\n message.parts.push({ type: \"text\", text: chunk });\n this._broadcastTextEvent(\n streamId,\n { type: \"text-delta\", id, delta: chunk },\n continuation\n );\n }\n }\n }\n } catch (error) {\n // Mark stream as error if not already completed\n if (!streamCompleted) {\n this._markStreamError(streamId);\n // Notify clients of the error\n this._broadcastChatMessage({\n body: error instanceof Error ? error.message : \"Stream error\",\n done: true,\n error: true,\n id,\n type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,\n ...(continuation && { continuation: true })\n });\n }\n throw error;\n } finally {\n reader.releaseLock();\n }\n\n if (message.parts.length > 0) {\n if (continuation) {\n // Find the last assistant message and append parts to it\n let lastAssistantIdx = -1;\n for (let i = this.messages.length - 1; i >= 0; i--) {\n if (this.messages[i].role === \"assistant\") {\n lastAssistantIdx = i;\n break;\n }\n }\n if (lastAssistantIdx >= 0) {\n const lastAssistant = this.messages[lastAssistantIdx];\n const mergedMessage: ChatMessage = {\n ...lastAssistant,\n parts: [...lastAssistant.parts, ...message.parts]\n };\n const updatedMessages = [...this.messages];\n updatedMessages[lastAssistantIdx] = mergedMessage;\n await this.persistMessages(updatedMessages, excludeBroadcastIds);\n } else {\n // No assistant message to append to, create new one\n await this.persistMessages(\n [...this.messages, message],\n excludeBroadcastIds\n );\n }\n } else {\n await this.persistMessages(\n [...this.messages, message],\n excludeBroadcastIds\n );\n }\n }\n\n // Clear the streaming message reference and resolve completion promise\n this._streamingMessage = null;\n if (this._streamCompletionResolve) {\n this._streamCompletionResolve();\n this._streamCompletionResolve = null;\n this._streamCompletionPromise = null;\n }\n });\n }\n\n /**\n * Mark a stream as errored and clean up state.\n * @param streamId - The stream to mark as errored\n * @internal Protected for testing purposes.\n */\n protected _markStreamError(streamId: string) {\n // Flush any pending chunks before marking error\n this._flushChunkBuffer();\n\n this.sql`\n update cf_ai_chat_stream_metadata \n set status = 'error', completed_at = ${Date.now()} \n where id = ${streamId}\n `;\n this._activeStreamId = null;\n this._activeRequestId = null;\n this._streamChunkIndex = 0;\n }\n\n /**\n * For the given message id, look up its associated AbortController\n * If the AbortController does not exist, create and store one in memory\n *\n * returns the AbortSignal associated with the AbortController\n */\n private _getAbortSignal(id: string): AbortSignal | undefined {\n // Defensive check, since we're coercing message types at the moment\n if (typeof id !== \"string\") {\n return undefined;\n }\n\n if (!this._chatMessageAbortControllers.has(id)) {\n this._chatMessageAbortControllers.set(id, new AbortController());\n }\n\n return this._chatMessageAbortControllers.get(id)?.signal;\n }\n\n /**\n * Remove an abort controller from the cache of pending message responses\n */\n private _removeAbortController(id: string) {\n this._chatMessageAbortControllers.delete(id);\n }\n\n /**\n * Propagate an abort signal for any requests associated with the given message id\n */\n private _cancelChatRequest(id: string) {\n if (this._chatMessageAbortControllers.has(id)) {\n const abortController = this._chatMessageAbortControllers.get(id);\n abortController?.abort();\n }\n }\n\n /**\n * Abort all pending requests and clear the cache of AbortControllers\n */\n private _destroyAbortControllers() {\n for (const controller of this._chatMessageAbortControllers.values()) {\n controller?.abort();\n }\n this._chatMessageAbortControllers.clear();\n }\n\n /**\n * When the DO is destroyed, cancel all pending requests and clean up resources\n */\n async destroy() {\n this._destroyAbortControllers();\n\n // Flush any remaining chunks before cleanup\n this._flushChunkBuffer();\n\n // Clean up stream tables\n this.sql`drop table if exists cf_ai_chat_stream_chunks`;\n this.sql`drop table if exists cf_ai_chat_stream_metadata`;\n\n // Clear active stream state\n this._activeStreamId = null;\n this._activeRequestId = null;\n\n await super.destroy();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwGA,SAAgB,6BACd,aACS;AACT,KAAI,CAAC,eAAe,YAAY,WAAW,EACzC,QAAO,EAAE;CAIX,MAAM,4BAAY,IAAI,KAAa;AACnC,MAAK,MAAM,KAAK,aAAa;AAC3B,MAAI,UAAU,IAAI,EAAE,KAAK,CACvB,SAAQ,KACN,uDAAuD,EAAE,KAAK,wDAC/D;AAEH,YAAU,IAAI,EAAE,KAAK;;AAGvB,QAAO,OAAO,YACZ,YAAY,KAAK,MAAM,CACrB,EAAE,MACF,KAAK;EACH,aAAa,EAAE,eAAe;EAC9B,aAAa,WAAW,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;EAE5D,CAAC,CACH,CAAC,CACH;;;AAIH,MAAM,oBAAoB;;AAE1B,MAAM,wBAAwB;;AAE9B,MAAM,4BAA4B,MAAS;;AAE3C,MAAM,sBAAsB,MAAU;;AAEtC,MAAM,2BAA2B,OAAU,KAAK;AAEhD,MAAM,UAAU,IAAI,aAAa;;;;;AA4BjC,IAAa,cAAb,cAGU,MAAkB;CAgE1B,YAAY,KAAmB,KAAU;AACvC,QAAM,KAAK,IAAI;yBArD0B;0BAMC;2BAOI;kCAOS;kCACD;2BAK5B;sBAWvB,EAAE;2BAKqB;0BAKD;AAOzB,OAAK,GAAG;;;;;AAOR,OAAK,GAAG;;;;;;;AAQR,OAAK,GAAG;;;;;;;AAQR,OAAK,GAAG;;AAOR,OAAK,WAAW,sBAHI,KAAK,qBAAqB,CAGI;AAElD,OAAK,+CAA+B,IAAI,KAAK;AAG7C,OAAK,sBAAsB;EAC3B,MAAM,aAAa,KAAK,UAAU,KAAK,KAAK;AAC5C,OAAK,YAAY,OAAO,YAAwB,UAA2B;AAEzE,OAAI,KAAK,gBACP,MAAK,sBAAsB,WAAW;AAGxC,UAAO,WAAW,YAAYA,MAAI;;EAIpC,MAAM,aAAa,KAAK,UAAU,KAAK,KAAK;AAC5C,OAAK,YAAY,OAAO,YAAwB,YAAuB;AAErE,OAAI,OAAO,YAAY,UAAU;IAC/B,IAAIC;AACJ,QAAI;AACF,YAAO,KAAK,MAAM,QAAQ;aACnB,QAAQ;AAEf,YAAO,WAAW,YAAY,QAAQ;;AAIxC,QACE,KAAK,SAAS,YAAY,6BAC1B,KAAK,KAAK,WAAW,QACrB;KACA,MAAM,EAAE,SAAS,KAAK;KAEtB,MAAM,EAAE,UAAU,gBADH,KAAK,MAAM,KAAe;KAOzC,MAAM,sBAAsB,sBAAsB,SAAS;AAE3D,UAAK,sBACH;MACE,UAAU;MACV,MAAM,YAAY;MACnB,EACD,CAAC,WAAW,GAAG,CAChB;AAED,WAAM,KAAK,gBAAgB,qBAAqB,CAAC,WAAW,GAAG,CAAC;AAEhE,UAAK,eAAe,KAClB;MACE,gBAAgB;MAChB,IAAI,KAAK;MACT,SAAS,EAAE;MACX,WAAW,KAAK,KAAK;MACrB,MAAM;MACP,EACD,KAAK,IACN;KAED,MAAM,gBAAgB,KAAK;KAC3B,MAAM,cAAc,KAAK,gBAAgB,cAAc;AAEvD,YAAO,KAAK,cAAc,YAAY;AAGpC,aAAO,aAAa,IAClB;OAAE,OAAO;OAAM;OAAY,SAAS;OAAW,OAAO;OAAW,EACjE,YAAY;OACV,MAAM,WAAW,MAAM,KAAK,cAC1B,OAAO,kBAAkB;AACvB,aAAK,uBAAuB,cAAc;AAE1C,aAAK,eAAe,KAClB;SACE,gBAAgB;SAChB,IAAI,KAAK;SACT,SAAS,EAAE;SACX,WAAW,KAAK,KAAK;SACrB,MAAM;SACP,EACD,KAAK,IACN;UAEH;QACE;QACA;QACD,CACF;AAED,WAAI,SACF,OAAM,KAAK,OAAO,KAAK,IAAI,UAAU,CAAC,WAAW,GAAG,CAAC;YAChD;AACL,gBAAQ,KACN,uEAAuE,gBACxE;AACD,aAAK,sBACH;SACE,MAAM;SACN,MAAM;SACN,IAAI,KAAK;SACT,MAAM,YAAY;SACnB,EACD,CAAC,WAAW,GAAG,CAChB;;QAGN;OACD;;AAIJ,QAAI,KAAK,SAAS,YAAY,qBAAqB;AACjD,UAAK,0BAA0B;AAC/B,UAAK,GAAG;AACR,UAAK,GAAG;AACR,UAAK,GAAG;AACR,UAAK,kBAAkB;AACvB,UAAK,mBAAmB;AACxB,UAAK,oBAAoB;AACzB,UAAK,WAAW,EAAE;AAClB,UAAK,sBACH,EAAE,MAAM,YAAY,qBAAqB,EACzC,CAAC,WAAW,GAAG,CAChB;AACD;;AAIF,QAAI,KAAK,SAAS,YAAY,wBAAwB;KACpD,MAAM,sBAAsB,sBAAsB,KAAK,SAAS;AAChE,WAAM,KAAK,gBAAgB,qBAAqB,CAAC,WAAW,GAAG,CAAC;AAChE;;AAIF,QAAI,KAAK,SAAS,YAAY,8BAA8B;AAC1D,UAAK,mBAAmB,KAAK,GAAG;AAChC;;AAIF,QAAI,KAAK,SAAS,YAAY,4BAA4B;AACxD,SACE,KAAK,mBACL,KAAK,oBACL,KAAK,qBAAqB,KAAK,GAE/B,MAAK,kBACH,YACA,KAAK,iBACL,KAAK,iBACN;AAEH;;AAIF,QAAI,KAAK,SAAS,YAAY,sBAAsB;KAClD,MAAM,EAAE,YAAY,UAAU,QAAQ,iBAAiB;AAGvD,UAAK,iBAAiB,YAAY,UAAU,OAAO,CAAC,MACjD,YAAY;AAIX,UAAI,WAAW,cAAc;OAG3B,MAAM,gBAAgB,YAAY;AAChC,YAAI,KAAK,yBACP,OAAM,KAAK;YAGX,OAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;;AAI5D,sBAAe,CAAC,WAAW;QACzB,MAAM,iBAAiB,QAAQ;QAC/B,MAAM,cAAc,KAAK,gBAAgB,eAAe;AAExD,aAAK,cAAc,YAAY;AAC7B,gBAAO,aAAa,IAClB;UACE,OAAO;UACP;UACA,SAAS;UACT,OAAO;UACR,EACD,YAAY;UACV,MAAM,WAAW,MAAM,KAAK,cAC1B,OAAO,kBAAkB;AACvB,gBAAK,uBAAuB,eAAe;AAE3C,gBAAK,eAAe,KAClB;YACE,gBACE;YACF,IAAI;YACJ,SAAS,EAAE;YACX,WAAW,KAAK,KAAK;YACrB,MAAM;YACP,EACD,KAAK,IACN;aAEH,EACE,aACD,CACF;AAED,cAAI,SAMF,OAAM,KAAK,OACT,gBACA,UACA,EAAE,EACF,EAAE,cAAc,MAAM,CACvB;WAGN;UACD;SACF;;OAGP;AACD;;;AAKJ,UAAO,WAAW,YAAY,QAAQ;;;;;;;;;CAU1C,AAAU,uBAAuB;EAC/B,MAAM,gBAAgB,KAAK,GAAmB;;;;;;AAO9C,MAAI,iBAAiB,cAAc,SAAS,GAAG;GAC7C,MAAM,SAAS,cAAc;GAC7B,MAAM,YAAY,KAAK,KAAK,GAAG,OAAO;AAGtC,OAAI,YAAY,2BAA2B;AACzC,SACG,GAAG,0DAA0D,OAAO;AACvE,SACG,GAAG,qDAAqD,OAAO;AAClE,YAAQ,KACN,sCAAsC,OAAO,GAAG,SAAS,KAAK,MAAM,YAAY,IAAK,CAAC,IACvF;AACD;;AAGF,QAAK,kBAAkB,OAAO;AAC9B,QAAK,mBAAmB,OAAO;GAG/B,MAAM,YAAY,KAAK,GAA0B;;;4BAG3B,KAAK,gBAAgB;;AAE3C,QAAK,oBACH,aAAa,UAAU,IAAI,aAAa,OACpC,UAAU,GAAG,YAAY,IACzB;;;;;;;;;CAUV,AAAQ,sBAAsB,YAAwB;AACpD,MAAI,CAAC,KAAK,mBAAmB,CAAC,KAAK,iBACjC;AAIF,aAAW,KACT,KAAK,UAAU;GACb,MAAM,YAAY;GAClB,IAAI,KAAK;GACV,CAAC,CACH;;;;;;;;CASH,AAAQ,kBACN,YACA,UACA,WACA;AAEA,OAAK,mBAAmB;EAExB,MAAM,SAAS,KAAK,GAAgB;;0BAEd,SAAS;;;AAK/B,OAAK,MAAM,SAAS,UAAU,EAAE,CAC9B,YAAW,KACT,KAAK,UAAU;GACb,MAAM,MAAM;GACZ,MAAM;GACN,IAAI;GACJ,MAAM,YAAY;GACnB,CAAC,CACH;AAKH,MAAI,KAAK,oBAAoB,SAC3B,YAAW,KACT,KAAK,UAAU;GACb,MAAM;GACN,MAAM;GACN,IAAI;GACJ,MAAM,YAAY;GACnB,CAAC,CACH;;;;;;;;CAUL,AAAU,kBAAkB,UAAkB,MAAc;AAE1D,MAAI,KAAK,aAAa,UAAU,sBAC9B,MAAK,mBAAmB;AAG1B,OAAK,aAAa,KAAK;GACrB,IAAI,QAAQ;GACZ;GACA;GACA,OAAO,KAAK;GACb,CAAC;AACF,OAAK;AAGL,MAAI,KAAK,aAAa,UAAU,kBAC9B,MAAK,mBAAmB;;;;;;;CAS5B,AAAU,oBAAoB;AAE5B,MAAI,KAAK,qBAAqB,KAAK,aAAa,WAAW,EACzD;AAGF,OAAK,oBAAoB;AACzB,MAAI;GACF,MAAM,SAAS,KAAK;AACpB,QAAK,eAAe,EAAE;GAGtB,MAAM,MAAM,KAAK,KAAK;AACtB,QAAK,MAAM,SAAS,OAClB,MAAK,GAAG;;oBAEI,MAAM,GAAG,IAAI,MAAM,SAAS,IAAI,MAAM,KAAK,IAAI,MAAM,MAAM,IAAI,IAAI;;YAGzE;AACR,QAAK,oBAAoB;;;;;;;;;;CAW7B,AAAU,aAAa,WAA2B;AAEhD,OAAK,mBAAmB;EAExB,MAAM,WAAW,QAAQ;AACzB,OAAK,kBAAkB;AACvB,OAAK,mBAAmB;AACxB,OAAK,oBAAoB;AAEzB,OAAK,GAAG;;gBAEI,SAAS,IAAI,UAAU,iBAAiB,KAAK,KAAK,CAAC;;AAG/D,SAAO;;;;;;;CAQT,AAAU,gBAAgB,UAAkB;AAE1C,OAAK,mBAAmB;AAExB,OAAK,GAAG;;iDAEqC,KAAK,KAAK,CAAC;mBACzC,SAAS;;AAExB,OAAK,kBAAkB;AACvB,OAAK,mBAAmB;AACxB,OAAK,oBAAoB;AAGzB,OAAK,yBAAyB;;;;;;CAOhC,AAAQ,0BAA0B;EAChC,MAAM,MAAM,KAAK,KAAK;AACtB,MAAI,MAAM,KAAK,mBAAmB,oBAChC;AAEF,OAAK,mBAAmB;EAExB,MAAM,SAAS,MAAM;AACrB,OAAK,GAAG;;;;wDAI4C,OAAO;;;AAG3D,OAAK,GAAG;;sDAE0C,OAAO;;;CAI3D,AAAQ,sBAAsB,SAA0B,SAAoB;AAC1E,OAAK,UAAU,KAAK,UAAU,QAAQ,EAAE,QAAQ;;;;;;;;;;CAWlD,AAAQ,oBACN,UACA,OAIA,cACA;EACA,MAAM,OAAO,KAAK,UAAU,MAAM;AAClC,OAAK,kBAAkB,UAAU,KAAK;AACtC,OAAK,sBAAsB;GACzB;GACA,MAAM;GACN,IAAI,MAAM;GACV,MAAM,YAAY;GAClB,GAAI,gBAAgB,EAAE,cAAc,MAAM;GAC3C,CAAC;;CAGJ,AAAQ,sBAAqC;AAI3C,UAFE,KAAK,GAAG,iEACR,EAAE,EAED,KAAK,QAAQ;AACZ,OAAI;AACF,WAAO,KAAK,MAAM,IAAI,QAAkB;YACjC,OAAO;AACd,YAAQ,MAAM,2BAA2B,IAAI,GAAG,IAAI,MAAM;AAC1D,WAAO;;IAET,CACD,QAAQ,QAA4B,QAAQ,KAAK;;CAGtD,MAAe,UAAU,SAAqC;AAC5D,SAAO,KAAK,cAAc,YAAY;AAGpC,OAFY,IAAI,IAAI,QAAQ,IAAI,CAExB,SAAS,SAAS,gBAAgB,EAAE;IAC1C,MAAM,WAAW,KAAK,qBAAqB;AAC3C,WAAO,SAAS,KAAK,SAAS;;AAGhC,UAAO,MAAM,UAAU,QAAQ;IAC/B;;CAGJ,MAAc,cAAiB,IAA0B;AACvD,MAAI;AACF,UAAO,MAAM,IAAI;WACV,GAAG;AACV,SAAM,KAAK,QAAQ,EAAE;;;;;;;;;CAUzB,MAAM,cAEJ,UAEA,SAC+B;AAC/B,QAAM,IAAI,MACR,8FACD;;;;;;CAOH,MAAM,aAAa,UAAyB;AAC1C,QAAM,KAAK,gBAAgB,SAAS;AACpC,QAAM,KAAK,cAAc,YAAY;GACnC,MAAM,WAAW,MAAM,KAAK,oBAAoB,GAAG;AACnD,OAAI,SAAU,MAAK,OAAO,OAAO,YAAY,EAAE,SAAS;IACxD;;CAGJ,MAAM,gBACJ,UACA,sBAAgC,EAAE,EAClC;EAIA,MAAM,iBAAiB,KAAK,8BAA8B,SAAS;AAGnE,OAAK,MAAM,WAAW,gBAAgB;GAIpC,MAAM,mBAAmB,KAAK,+BAA+B,QAAQ;GACrE,MAAM,gBAAgB,KAAK,4BAA4B,iBAAiB;AACxE,QAAK,GAAG;;kBAEI,cAAc,GAAG,IAAI,KAAK,UAAU,cAAc,CAAC;;;;AAOjE,OAAK,WAAW,sBADE,KAAK,qBAAqB,CACI;AAChD,OAAK,sBACH;GACE,UAAU;GACV,MAAM,YAAY;GACnB,EACD,oBACD;;;;;;;;;;CAWH,AAAQ,8BACN,kBACe;EAEf,MAAM,oCAAoB,IAAI,KAAsB;AACpD,OAAK,MAAM,OAAO,KAAK,UAAU;AAC/B,OAAI,IAAI,SAAS,YAAa;AAC9B,QAAK,MAAM,QAAQ,IAAI,MACrB,KACE,gBAAgB,QAChB,WAAW,QACX,KAAK,UAAU,sBACf,YAAY,KAEZ,mBAAkB,IAChB,KAAK,YACJ,KAA6B,OAC/B;;AAMP,MAAI,kBAAkB,SAAS,EAC7B,QAAO;AAIT,SAAO,iBAAiB,KAAK,QAAQ;AACnC,OAAI,IAAI,SAAS,YAAa,QAAO;GAErC,IAAI,aAAa;GACjB,MAAM,eAAe,IAAI,MAAM,KAAK,SAAS;AAE3C,QACE,gBAAgB,QAChB,WAAW,QACX,KAAK,UAAU,qBACf,kBAAkB,IAAI,KAAK,WAAqB,EAChD;AACA,kBAAa;AACb,YAAO;MACL,GAAG;MACH,OAAO;MACP,QAAQ,kBAAkB,IAAI,KAAK,WAAqB;MACzD;;AAEH,WAAO;KACP;AAEF,UAAO,aAAa;IAAE,GAAG;IAAK,OAAO;IAAc,GAAG;IACtD;;;;;;;;;;;;CAaJ,AAAQ,4BAA4B,SAAmC;AACrE,MAAI,QAAQ,SAAS,YACnB,QAAO;AAIT,OAAK,MAAM,QAAQ,QAAQ,MACzB,KACE,gBAAgB,QAChB,WAAW,QACX,KAAK,UAAU,oBACf;GACA,MAAM,aAAa,KAAK;GAGxB,MAAM,kBAAkB,KAAK,yBAAyB,WAAW;AACjE,OAAI,mBAAmB,gBAAgB,OAAO,QAAQ,GAGpD,QAAO;IACL,GAAG;IACH,IAAI,gBAAgB;IACrB;;AAKP,SAAO;;;;;;;;;;CAWT,AAAQ,yBACN,YACyB;AACzB,OAAK,MAAM,OAAO,KAAK,UAAU;AAC/B,OAAI,IAAI,SAAS,YAAa;AAE9B,QAAK,MAAM,QAAQ,IAAI,MACrB,KAAI,gBAAgB,QAAQ,KAAK,eAAe,WAC9C,QAAO;;;;;;;;;;;;;;;;;;;;;CAyBf,AAAQ,+BAA+B,SAAmC;EAexE,MAAM,iBAbgB,QAAQ,MAAM,QAAQ,SAAS;AACnD,OAAI,KAAK,SAAS,aAAa;IAC7B,MAAM,gBAAgB;AAGtB,QAAI,CAAC,cAAc,QAAQ,cAAc,KAAK,MAAM,KAAK,GACvD,QAAO;;AAGX,UAAO;IACP,CAGmC,KAAK,SAAS;GACjD,IAAI,gBAAgB;AAGpB,OACE,sBAAsB,iBACtB,cAAc,oBACd,OAAO,cAAc,qBAAqB,YAC1C,YAAY,cAAc,iBAE1B,iBAAgB,KAAK,qBACnB,eACA,mBACD;AAIH,OACE,0BAA0B,iBAC1B,cAAc,wBACd,OAAO,cAAc,yBAAyB,YAC9C,YAAY,cAAc,qBAE1B,iBAAgB,KAAK,qBACnB,eACA,uBACD;AAGH,UAAO;IACP;AAEF,SAAO;GAAE,GAAG;GAAS,OAAO;GAAgB;;;;;;CAO9C,AAAQ,qBACN,MACA,aACG;EACH,MAAM,WAAY,KAAiC;AAKnD,MAAI,CAAC,UAAU,OAAQ,QAAO;EAK9B,MAAM,EACJ,QAAQ,SACR,2BAA2B,MAC3B,GAAG,eANc,SAAS;EAU5B,MAAM,uBAAuB,OAAO,KAAK,WAAW,CAAC,SAAS;EAC9D,MAAM,EAAE,QAAQ,SAAS,GAAG,iBAAiB;EAE7C,IAAIC;AACJ,MAAI,qBACF,eAAc;GACZ,GAAG;GACH,QAAQ;GACT;WACQ,OAAO,KAAK,aAAa,CAAC,SAAS,EAC5C,eAAc;EAIhB,MAAM,GAAG,cAAc,UAAU,GAAG,aAAa;AAKjD,MAAI,YACF,QAAO;GAAE,GAAG;IAAW,cAAc;GAAa;AAEpD,SAAO;;;;;;;;;;;;;CAcT,MAAc,iBACZ,YACA,WACA,QACkB;EAGlB,IAAIC;AAGJ,MAAI,KAAK,mBACP;QAAK,MAAM,QAAQ,KAAK,kBAAkB,MACxC,KAAI,gBAAgB,QAAQ,KAAK,eAAe,YAAY;AAC1D,cAAU,KAAK;AACf;;;AAMN,MAAI,CAAC,QACH,MAAK,IAAI,UAAU,GAAG,UAAU,IAAI,WAAW;AAC7C,aAAU,KAAK,yBAAyB,WAAW;AACnD,OAAI,QAAS;AAEb,SAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;;AAI5D,MAAI,CAAC,SAAS;AAGZ,WAAQ,KACN,0EAA0E,WAAW,gBACtF;AACD,UAAO;;EAIT,MAAM,qBAAqB,YAAY,KAAK;EAG5C,IAAI,UAAU;AACd,MAAI,oBAEF;QAAK,MAAM,QAAQ,QAAQ,MACzB,KACE,gBAAgB,QAChB,KAAK,eAAe,cACpB,WAAW,QACX,KAAK,UAAU,mBACf;AACA,IAAC,KAA6C,QAC5C;AACF,IAAC,KAA6C,SAAS;AACvD,cAAU;AACV;;SAGC;GAEL,MAAM,eAAe,QAAQ,MAAM,KAAK,SAAS;AAC/C,QACE,gBAAgB,QAChB,KAAK,eAAe,cACpB,WAAW,QACX,KAAK,UAAU,mBACf;AACA,eAAU;AACV,YAAO;MACL,GAAG;MACH,OAAO;MACP;MACD;;AAEH,WAAO;KACP;AAEF,OAAI,SAAS;IAEX,MAAMC,iBAA8B,KAAK,+BACvC;KACE,GAAG;KACH,OAAO;KACR,CACF;AAGD,SAAK,GAAG;;0BAEU,KAAK,UAAU,eAAe,CAAC;uBAClC,QAAQ,GAAG;;AAK1B,SAAK,WAAW,sBADE,KAAK,qBAAqB,CACI;;;AAIpD,MAAI,CAAC,SAAS;AACZ,WAAQ,KACN,6DAA6D,WAAW,+BACzE;AACD,UAAO;;AAKT,MAAI,CAAC,oBAAoB;GAEvB,MAAM,mBAAmB,KAAK,yBAAyB,WAAW;AAClE,OAAI,iBACF,MAAK,sBAAsB;IACzB,MAAM,YAAY;IAClB,SAAS;IACV,CAAC;;AAQN,SAAO;;CAGT,MAAc,OACZ,IACA,UACA,sBAAgC,EAAE,EAClC,UAAsC,EAAE,EACxC;EACA,MAAM,EAAE,eAAe,UAAU;AAEjC,SAAO,KAAK,cAAc,YAAY;AACpC,OAAI,CAAC,SAAS,MAAM;AAElB,SAAK,sBAAsB;KACzB,MAAM;KACN,MAAM;KACN;KACA,MAAM,YAAY;KAClB,GAAI,gBAAgB,EAAE,cAAc,MAAM;KAC3C,CAAC;AACF;;GAIF,MAAM,WAAW,KAAK,aAAa,GAAG;GAQtC,MAAM,EAAE,4BAAa,8BAAc,qBACjC,MAAM,OAAO;GAEf,MAAM,SAAS,SAAS,KAAK,WAAW;GAIxC,MAAMC,UAAuB;IAC3B,IAAI,aAAa,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,GAAG,GAAG;IACtE,MAAM;IACN,OAAO,EAAE;IACV;AAED,QAAK,oBAAoB;AAEzB,QAAK,2BAA2B,IAAI,SAAS,YAAY;AACvD,SAAK,2BAA2B;KAChC;GACF,IAAIC,kBAA8C,EAAE;GACpD,IAAIC,uBAAwD,EAAE;GAC9D,MAAMC,mBAGF,EAAE;GAEN,SAAS,sBACP,WA2BA;IACA,MAAM,OAAO,QAAQ,MAAM,MACxB,WACCC,OAAK,SAAS,kBACdA,OAAK,eAAeC,UAAQ,WAC/B;IAED,MAAM,aAAaA;IACnB,MAAM,UAAU;AAEhB,QAAI,QAAQ,MAAM;AAChB,UAAK,QAAQA,UAAQ;AACrB,aAAQ,WAAWA,UAAQ;AAC3B,aAAQ,QAAQ,WAAW;AAC3B,aAAQ,SAAS,WAAW;AAC5B,aAAQ,YAAY,WAAW;AAC/B,aAAQ,WAAW,WAAW,YAAY,QAAQ;AAClD,aAAQ,cAAc,WAAW;AAEjC,SACE,WAAW,oBAAoB,QAC/B,KAAK,UAAU,kBAEf,MAAK,uBACH,WAAW;UAGf,SAAQ,MAAM,KAAK;KACjB,MAAM;KACN,UAAUA,UAAQ;KAClB,YAAYA,UAAQ;KACpB,OAAOA,UAAQ;KACf,OAAO,WAAW;KAClB,QAAQ,WAAW;KACnB,WAAW,WAAW;KACtB,aAAa,WAAW;KACxB,GAAI,WAAW,oBAAoB,OAC/B,EAAE,sBAAsB,WAAW,kBAAkB,GACrD,EAAE;KACP,CAAsB;;GAI3B,SAAS,eACP,WAgCA;IACA,MAAM,OAAO,QAAQ,MAAM,MACxB,WACCC,eAAaF,OAAK,IACjBA,OAAoB,eAAeC,UAAQ,WAC/C;IAED,MAAM,aAAaA;IACnB,MAAM,UAAU;AAEhB,QAAI,QAAQ,MAAM;AAChB,UAAK,QAAQA,UAAQ;AACrB,aAAQ,QAAQ,WAAW;AAC3B,aAAQ,SAAS,WAAW;AAC5B,aAAQ,YAAY,WAAW;AAC/B,aAAQ,WAAW,WAAW;AAC9B,aAAQ,cAAc,WAAW;AAGjC,aAAQ,mBACN,WAAW,oBAAoB,KAAK;AAEtC,SACE,WAAW,oBAAoB,QAC/B,KAAK,UAAU,kBAEf,MAAK,uBACH,WAAW;UAGf,SAAQ,MAAM,KAAK;KACjB,MAAM,QAAQA,UAAQ;KACtB,YAAYA,UAAQ;KACpB,OAAOA,UAAQ;KACf,OAAO,WAAW;KAClB,QAAQ,WAAW;KACnB,UAAU,WAAW;KACrB,WAAW,WAAW;KACtB,kBAAkB,WAAW;KAC7B,aAAa,WAAW;KACxB,GAAI,WAAW,oBAAoB,OAC/B,EAAE,sBAAsB,WAAW,kBAAkB,GACrD,EAAE;KACP,CAAe;;GAIpB,eAAe,sBAAsB,UAAmB;AACtD,QAAI,YAAY,KAMd,SAAQ,WAJN,QAAQ,YAAY,OAChB;KAAE,GAAG,QAAQ;KAAU,GAAG;KAAU,GACpC;;GAQV,MAAM,SADc,SAAS,QAAQ,IAAI,eAAe,IAAI,IAClC,SAAS,oBAAoB;AAGvD,OAAI,CAAC,MACH,MAAK,oBACH,UACA;IAAE,MAAM;IAAc;IAAI,EAC1B,aACD;GAGH,IAAI,kBAAkB;AACtB,OAAI;AACF,WAAO,MAAM;KACX,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,SAAI,MAAM;AACR,UAAI,CAAC,MACH,MAAK,oBACH,UACA;OAAE,MAAM;OAAY;OAAI,EACxB,aACD;AAIH,WAAK,gBAAgB,SAAS;AAC9B,wBAAkB;AAElB,WAAK,sBAAsB;OACzB,MAAM;OACN,MAAM;OACN;OACA,MAAM,YAAY;OAClB,GAAI,gBAAgB,EAAE,cAAc,MAAM;OAC3C,CAAC;AACF;;KAGF,MAAM,QAAQ,QAAQ,OAAO,MAAM;AAGnC,SAAI,OAAO;MAET,MAAM,QAAQ,MAAM,MAAM,KAAK;AAC/B,WAAK,MAAM,QAAQ,MACjB,KAAI,KAAK,WAAW,SAAS,IAAI,SAAS,eACxC,KAAI;OACF,MAAME,OAAuB,KAAK,MAAM,KAAK,MAAM,EAAE,CAAC;AACtD,eAAQ,KAAK,MAAb;QACE,KAAK,cAAc;SACjB,MAAMC,WAAuB;UAC3B,MAAM;UACN,MAAM;UACN,kBAAkB,KAAK;UACvB,OAAO;UACR;AACD,yBAAgB,KAAK,MAAM;AAC3B,iBAAQ,MAAM,KAAK,SAAS;AAC5B;;QAGF,KAAK,cAAc;SACjB,MAAM,WAAW,gBAAgB,KAAK;AACtC,kBAAS,QAAQ,KAAK;AACtB,kBAAS,mBACP,KAAK,oBAAoB,SAAS;AACpC;;QAGF,KAAK,YAAY;SACf,MAAM,WAAW,gBAAgB,KAAK;AACtC,kBAAS,QAAQ;AACjB,kBAAS,mBACP,KAAK,oBAAoB,SAAS;AACpC,gBAAO,gBAAgB,KAAK;AAC5B;;QAGF,KAAK,mBAAmB;SACtB,MAAMC,gBAAiC;UACrC,MAAM;UACN,MAAM;UACN,kBAAkB,KAAK;UACvB,OAAO;UACR;AACD,8BAAqB,KAAK,MAAM;AAChC,iBAAQ,MAAM,KAAK,cAAc;AACjC;;QAGF,KAAK,mBAAmB;SACtB,MAAM,gBAAgB,qBAAqB,KAAK;AAChD,uBAAc,QAAQ,KAAK;AAC3B,uBAAc,mBACZ,KAAK,oBAAoB,cAAc;AACzC;;QAGF,KAAK,iBAAiB;SACpB,MAAM,gBAAgB,qBAAqB,KAAK;AAChD,uBAAc,mBACZ,KAAK,oBAAoB,cAAc;AACzC,uBAAc,QAAQ;AACtB,gBAAO,qBAAqB,KAAK;AAEjC;;QAGF,KAAK;AACH,iBAAQ,MAAM,KAAK;UACjB,MAAM;UACN,WAAW,KAAK;UAChB,KAAK,KAAK;UACX,CAAC;AAEF;QAGF,KAAK;AACH,iBAAQ,MAAM,KAAK;UACjB,MAAM;UACN,UAAU,KAAK;UACf,KAAK,KAAK;UACV,OAAO,KAAK;UACZ,kBAAkB,KAAK;UACxB,CAAC;AAEF;QAGF,KAAK;AACH,iBAAQ,MAAM,KAAK;UACjB,MAAM;UACN,UAAU,KAAK;UACf,WAAW,KAAK;UAChB,OAAO,KAAK;UACZ,UAAU,KAAK;UACf,kBAAkB,KAAK;UACxB,CAAC;AAEF;QAGF,KAAK,oBAAoB;SACvB,MAAM,kBACJ,QAAQ,MAAM,OAAOH,eAAa;AAGpC,0BAAiB,KAAK,cAAc;UAClC,MAAM;UACN,UAAU,KAAK;UACf,OAAO,gBAAgB;UACvB,SAAS,KAAK;UACf;AAED,aAAI,KAAK,QACP,uBAAsB;UACpB,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO;UACR,CAAC;aAEF,gBAAe;UACb,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO;UACR,CAAC;AAGJ;;QAGF,KAAK,oBAAoB;SACvB,MAAM,kBAAkB,iBAAiB,KAAK;AAE9C,yBAAgB,QAAQ,KAAK;SAK7B,MAAM,eAHoB,MAAM,iBAC9B,gBAAgB,KACjB,EAGC;AAEF,aAAI,gBAAgB,QAClB,uBAAsB;UACpB,YAAY,KAAK;UACjB,UAAU,gBAAgB;UAC1B,OAAO;UACP,OAAO;UACR,CAAC;aAEF,gBAAe;UACb,YAAY,KAAK;UACjB,UAAU,gBAAgB;UAC1B,OAAO;UACP,OAAO;UACR,CAAC;AAGJ;;QAGF,KAAK;AACH,aAAI,KAAK,QACP,uBAAsB;UACpB,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO,KAAK;UACZ,kBAAkB,KAAK;UACxB,CAAC;aAEF,gBAAe;UACb,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO,KAAK;UACZ,kBAAkB,KAAK;UACvB,kBAAkB,KAAK;UACxB,CAAC;AAcJ;QAGF,KAAK;AACH,aAAI,KAAK,QACP,uBAAsB;UACpB,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO,KAAK;UACZ,WAAW,KAAK;UAChB,kBAAkB,KAAK;UACxB,CAAC;aAEF,gBAAe;UACb,YAAY,KAAK;UACjB,UAAU,KAAK;UACf,OAAO;UACP,OAAO;UACP,UAAU,KAAK;UACf,WAAW,KAAK;UAChB,kBAAkB,KAAK;UACvB,kBAAkB,KAAK;UACxB,CAAC;AAGJ;QAGF,KAAK;AACH,aAAI,KAAK,SAAS;UAKhB,MAAM,iBAJkB,QAAQ,MAAM,QACnC,SAAS,KAAK,SAAS,eACzB,CAEsC,MACpC,eACC,WAAW,eAAe,KAAK,WAClC;AAED,cAAI,CAAC,eACH,OAAM,IAAI,MAAM,4BAA4B;AAE9C,gCAAsB;WACpB,YAAY,KAAK;WACjB,UAAU,eAAe;WACzB,OAAO;WACP,OAAO,eAAe;WACtB,QAAQ,KAAK;WACb,aAAa,KAAK;WACnB,CAAC;gBACG;UAKL,MAAM,iBAJkB,QAAQ,MAAM,OACpCA,eACD,CAEsC,MACpC,eACC,WAAW,eAAe,KAAK,WAClC;AAED,cAAI,CAAC,eACH,OAAM,IAAI,MAAM,4BAA4B;AAE9C,yBAAe;WACb,YAAY,KAAK;WACjB,UAAUI,cAAY,eAAe;WACrC,OAAO;WACP,OAAO,eAAe;WACtB,QAAQ,KAAK;WACb,kBAAkB,KAAK;WACvB,aAAa,KAAK;WACnB,CAAC;;AAGJ;QAGF,KAAK;AACH,aAAI,KAAK,SAAS;UAKhB,MAAM,iBAJkB,QAAQ,MAAM,QACnC,SAAS,KAAK,SAAS,eACzB,CAEsC,MACpC,eACC,WAAW,eAAe,KAAK,WAClC;AAED,cAAI,CAAC,eACH,OAAM,IAAI,MAAM,4BAA4B;AAE9C,gCAAsB;WACpB,YAAY,KAAK;WACjB,UAAU,eAAe;WACzB,OAAO;WACP,OAAO,eAAe;WACtB,WAAW,KAAK;WACjB,CAAC;gBACG;UAKL,MAAM,iBAJkB,QAAQ,MAAM,OACpCJ,eACD,CAEsC,MACpC,eACC,WAAW,eAAe,KAAK,WAClC;AAED,cAAI,CAAC,eACH,OAAM,IAAI,MAAM,4BAA4B;AAC9C,yBAAe;WACb,YAAY,KAAK;WACjB,UAAUI,cAAY,eAAe;WACrC,OAAO;WACP,OAAO,eAAe;WACtB,UACE,cAAc,iBACV,eAAe,WACf;WACN,WAAW,KAAK;WACjB,CAAC;;AAGJ;QAGF,KAAK;AAEH,iBAAQ,MAAM,KAAK,EAAE,MAAM,cAAc,CAAC;AAC1C;QAGF,KAAK;AAEH,2BAAkB,EAAE;AACpB,gCAAuB,EAAE;AACzB;QAGF,KAAK;AACH,aAAI,KAAK,aAAa,KACpB,SAAQ,KAAK,KAAK;AAGpB,eAAM,sBAAsB,KAAK,gBAAgB;AAEjD;QAGF,KAAK;AACH,eAAM,sBAAsB,KAAK,gBAAgB;AACjD;QAGF,KAAK;AACH,eAAM,sBAAsB,KAAK,gBAAgB;AACjD;QAGF,KAAK;AACH,cAAK,sBAAsB;UACzB,OAAO;UACP,MAAM,KAAK,aAAa,KAAK,UAAU,KAAK;UAC5C,MAAM;UACN;UACA,MAAM,YAAY;UACnB,CAAC;AAEF;;OASJ,IAAIC,cAAuB;AAC3B,WAAI,KAAK,SAAS,YAAY,kBAAkB,MAAM;QACpD,MAAM,EAAE,cAAc,GAAG,SAAS;AAIlC,sBAAc;SACZ,GAAG;SACH,MAAM;SACN,iBAAiB,EAAE,cAAc;SAClC;;OAIH,MAAM,YAAY,KAAK,UAAU,YAAY;AAC7C,YAAK,kBAAkB,UAAU,UAAU;AAG3C,YAAK,sBAAsB;QACzB,MAAM;QACN,MAAM;QACN;QACA,MAAM,YAAY;QAClB,GAAI,gBAAgB,EAAE,cAAc,MAAM;QAC3C,CAAC;eACK,QAAQ;gBAQjB,MAAM,SAAS,GAAG;AACpB,cAAQ,MAAM,KAAK;OAAE,MAAM;OAAQ,MAAM;OAAO,CAAC;AACjD,WAAK,oBACH,UACA;OAAE,MAAM;OAAc;OAAI,OAAO;OAAO,EACxC,aACD;;;YAIA,OAAO;AAEd,QAAI,CAAC,iBAAiB;AACpB,UAAK,iBAAiB,SAAS;AAE/B,UAAK,sBAAsB;MACzB,MAAM,iBAAiB,QAAQ,MAAM,UAAU;MAC/C,MAAM;MACN,OAAO;MACP;MACA,MAAM,YAAY;MAClB,GAAI,gBAAgB,EAAE,cAAc,MAAM;MAC3C,CAAC;;AAEJ,UAAM;aACE;AACR,WAAO,aAAa;;AAGtB,OAAI,QAAQ,MAAM,SAAS,EACzB,KAAI,cAAc;IAEhB,IAAI,mBAAmB;AACvB,SAAK,IAAI,IAAI,KAAK,SAAS,SAAS,GAAG,KAAK,GAAG,IAC7C,KAAI,KAAK,SAAS,GAAG,SAAS,aAAa;AACzC,wBAAmB;AACnB;;AAGJ,QAAI,oBAAoB,GAAG;KACzB,MAAM,gBAAgB,KAAK,SAAS;KACpC,MAAMC,gBAA6B;MACjC,GAAG;MACH,OAAO,CAAC,GAAG,cAAc,OAAO,GAAG,QAAQ,MAAM;MAClD;KACD,MAAM,kBAAkB,CAAC,GAAG,KAAK,SAAS;AAC1C,qBAAgB,oBAAoB;AACpC,WAAM,KAAK,gBAAgB,iBAAiB,oBAAoB;UAGhE,OAAM,KAAK,gBACT,CAAC,GAAG,KAAK,UAAU,QAAQ,EAC3B,oBACD;SAGH,OAAM,KAAK,gBACT,CAAC,GAAG,KAAK,UAAU,QAAQ,EAC3B,oBACD;AAKL,QAAK,oBAAoB;AACzB,OAAI,KAAK,0BAA0B;AACjC,SAAK,0BAA0B;AAC/B,SAAK,2BAA2B;AAChC,SAAK,2BAA2B;;IAElC;;;;;;;CAQJ,AAAU,iBAAiB,UAAkB;AAE3C,OAAK,mBAAmB;AAExB,OAAK,GAAG;;6CAEiC,KAAK,KAAK,CAAC;mBACrC,SAAS;;AAExB,OAAK,kBAAkB;AACvB,OAAK,mBAAmB;AACxB,OAAK,oBAAoB;;;;;;;;CAS3B,AAAQ,gBAAgB,IAAqC;AAE3D,MAAI,OAAO,OAAO,SAChB;AAGF,MAAI,CAAC,KAAK,6BAA6B,IAAI,GAAG,CAC5C,MAAK,6BAA6B,IAAI,IAAI,IAAI,iBAAiB,CAAC;AAGlE,SAAO,KAAK,6BAA6B,IAAI,GAAG,EAAE;;;;;CAMpD,AAAQ,uBAAuB,IAAY;AACzC,OAAK,6BAA6B,OAAO,GAAG;;;;;CAM9C,AAAQ,mBAAmB,IAAY;AACrC,MAAI,KAAK,6BAA6B,IAAI,GAAG,CAE3C,CADwB,KAAK,6BAA6B,IAAI,GAAG,EAChD,OAAO;;;;;CAO5B,AAAQ,2BAA2B;AACjC,OAAK,MAAM,cAAc,KAAK,6BAA6B,QAAQ,CACjE,aAAY,OAAO;AAErB,OAAK,6BAA6B,OAAO;;;;;CAM3C,MAAM,UAAU;AACd,OAAK,0BAA0B;AAG/B,OAAK,mBAAmB;AAGxB,OAAK,GAAG;AACR,OAAK,GAAG;AAGR,OAAK,kBAAkB;AACvB,OAAK,mBAAmB;AAExB,QAAM,MAAM,SAAS"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@cloudflare/ai-chat",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.4",
|
|
4
4
|
"description": "Cloudflare Agents (x) AI SDK Chat",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"cloudflare",
|
|
@@ -20,13 +20,13 @@
|
|
|
20
20
|
},
|
|
21
21
|
"devDependencies": {
|
|
22
22
|
"@types/react": "^19.2.7",
|
|
23
|
-
"ai": "^6.0.
|
|
23
|
+
"ai": "^6.0.15",
|
|
24
24
|
"react": "^19.2.3",
|
|
25
25
|
"vitest-browser-react": "^1.0.1",
|
|
26
|
-
"zod": "^4.
|
|
26
|
+
"zod": "^4.3.5"
|
|
27
27
|
},
|
|
28
28
|
"peerDependencies": {
|
|
29
|
-
"agents": "^0.3.
|
|
29
|
+
"agents": "^0.3.4",
|
|
30
30
|
"ai": "^6.0.0",
|
|
31
31
|
"react": "^19.0.0",
|
|
32
32
|
"zod": "^3.25.0 || ^4.0.0"
|
|
@@ -63,8 +63,5 @@
|
|
|
63
63
|
"test": "npm run test:workers && npm run test:react",
|
|
64
64
|
"test:react": "vitest -r src/react-tests",
|
|
65
65
|
"test:workers": "vitest -r src/tests"
|
|
66
|
-
},
|
|
67
|
-
"dependencies": {
|
|
68
|
-
"ai": "^6.0.0"
|
|
69
66
|
}
|
|
70
67
|
}
|
package/src/index.ts
CHANGED
|
@@ -759,6 +759,33 @@ export class AIChatAgent<
|
|
|
759
759
|
this.broadcast(JSON.stringify(message), exclude);
|
|
760
760
|
}
|
|
761
761
|
|
|
762
|
+
/**
|
|
763
|
+
* Broadcasts a text event for non-SSE responses.
|
|
764
|
+
* This ensures plain text responses follow the AI SDK v5 stream protocol.
|
|
765
|
+
*
|
|
766
|
+
* @param streamId - The stream identifier for chunk storage
|
|
767
|
+
* @param event - The text event payload (text-start, text-delta with delta, or text-end)
|
|
768
|
+
* @param continuation - Whether this is a continuation of a previous stream
|
|
769
|
+
*/
|
|
770
|
+
private _broadcastTextEvent(
|
|
771
|
+
streamId: string,
|
|
772
|
+
event:
|
|
773
|
+
| { type: "text-start"; id: string }
|
|
774
|
+
| { type: "text-delta"; id: string; delta: string }
|
|
775
|
+
| { type: "text-end"; id: string },
|
|
776
|
+
continuation: boolean
|
|
777
|
+
) {
|
|
778
|
+
const body = JSON.stringify(event);
|
|
779
|
+
this._storeStreamChunk(streamId, body);
|
|
780
|
+
this._broadcastChatMessage({
|
|
781
|
+
body,
|
|
782
|
+
done: false,
|
|
783
|
+
id: event.id,
|
|
784
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
|
|
785
|
+
...(continuation && { continuation: true })
|
|
786
|
+
});
|
|
787
|
+
}
|
|
788
|
+
|
|
762
789
|
private _loadMessagesFromDb(): ChatMessage[] {
|
|
763
790
|
const rows =
|
|
764
791
|
this.sql`select * from cf_ai_chat_agent_messages order by created_at` ||
|
|
@@ -1458,11 +1485,32 @@ export class AIChatAgent<
|
|
|
1458
1485
|
}
|
|
1459
1486
|
}
|
|
1460
1487
|
|
|
1488
|
+
// Determine response format based on content-type
|
|
1489
|
+
const contentType = response.headers.get("content-type") || "";
|
|
1490
|
+
const isSSE = contentType.includes("text/event-stream"); // AI SDK v5 SSE format
|
|
1491
|
+
|
|
1492
|
+
// if not AI SDK SSE format, we need to inject text-start and text-end events ourselves
|
|
1493
|
+
if (!isSSE) {
|
|
1494
|
+
this._broadcastTextEvent(
|
|
1495
|
+
streamId,
|
|
1496
|
+
{ type: "text-start", id },
|
|
1497
|
+
continuation
|
|
1498
|
+
);
|
|
1499
|
+
}
|
|
1500
|
+
|
|
1461
1501
|
let streamCompleted = false;
|
|
1462
1502
|
try {
|
|
1463
1503
|
while (true) {
|
|
1464
1504
|
const { done, value } = await reader.read();
|
|
1465
1505
|
if (done) {
|
|
1506
|
+
if (!isSSE) {
|
|
1507
|
+
this._broadcastTextEvent(
|
|
1508
|
+
streamId,
|
|
1509
|
+
{ type: "text-end", id },
|
|
1510
|
+
continuation
|
|
1511
|
+
);
|
|
1512
|
+
}
|
|
1513
|
+
|
|
1466
1514
|
// Mark the stream as completed
|
|
1467
1515
|
this._completeStream(streamId);
|
|
1468
1516
|
streamCompleted = true;
|
|
@@ -1479,10 +1527,6 @@ export class AIChatAgent<
|
|
|
1479
1527
|
|
|
1480
1528
|
const chunk = decoder.decode(value);
|
|
1481
1529
|
|
|
1482
|
-
// Determine response format based on content-type
|
|
1483
|
-
const contentType = response.headers.get("content-type") || "";
|
|
1484
|
-
const isSSE = contentType.includes("text/event-stream");
|
|
1485
|
-
|
|
1486
1530
|
// After streaming is complete, persist the complete assistant's response
|
|
1487
1531
|
if (isSSE) {
|
|
1488
1532
|
// Parse AI SDK v5 SSE format and extract text deltas
|
|
@@ -1892,20 +1936,11 @@ export class AIChatAgent<
|
|
|
1892
1936
|
// Treat the entire chunk as a text delta to preserve exact formatting
|
|
1893
1937
|
if (chunk.length > 0) {
|
|
1894
1938
|
message.parts.push({ type: "text", text: chunk });
|
|
1895
|
-
|
|
1896
|
-
|
|
1897
|
-
type: "text-delta",
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
// Store chunk for replay on reconnection
|
|
1901
|
-
this._storeStreamChunk(streamId, chunkBody);
|
|
1902
|
-
this._broadcastChatMessage({
|
|
1903
|
-
body: chunkBody,
|
|
1904
|
-
done: false,
|
|
1905
|
-
id,
|
|
1906
|
-
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
|
|
1907
|
-
...(continuation && { continuation: true })
|
|
1908
|
-
});
|
|
1939
|
+
this._broadcastTextEvent(
|
|
1940
|
+
streamId,
|
|
1941
|
+
{ type: "text-delta", id, delta: chunk },
|
|
1942
|
+
continuation
|
|
1943
|
+
);
|
|
1909
1944
|
}
|
|
1910
1945
|
}
|
|
1911
1946
|
}
|
|
@@ -1,25 +1,8 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { env } from "cloudflare:test";
|
|
2
2
|
import { describe, it, expect } from "vitest";
|
|
3
|
-
import worker, { type Env } from "./worker";
|
|
4
3
|
import { MessageType } from "../types";
|
|
5
4
|
import type { UIMessage as ChatMessage } from "ai";
|
|
6
|
-
|
|
7
|
-
declare module "cloudflare:test" {
|
|
8
|
-
interface ProvidedEnv extends Env {}
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
async function connectChatWS(path: string) {
|
|
12
|
-
const ctx = createExecutionContext();
|
|
13
|
-
const req = new Request(`http://example.com${path}`, {
|
|
14
|
-
headers: { Upgrade: "websocket" }
|
|
15
|
-
});
|
|
16
|
-
const res = await worker.fetch(req, env, ctx);
|
|
17
|
-
expect(res.status).toBe(101);
|
|
18
|
-
const ws = res.webSocket as WebSocket;
|
|
19
|
-
expect(ws).toBeDefined();
|
|
20
|
-
ws.accept();
|
|
21
|
-
return { ws, ctx };
|
|
22
|
-
}
|
|
5
|
+
import { connectChatWS } from "./test-utils";
|
|
23
6
|
|
|
24
7
|
describe("AIChatAgent Connection Context - Issue #711", () => {
|
|
25
8
|
it("getCurrentAgent() should return connection in onChatMessage and nested async functions (tool execute)", async () => {
|
|
@@ -1,33 +1,15 @@
|
|
|
1
1
|
import { createExecutionContext, env } from "cloudflare:test";
|
|
2
2
|
import { describe, it, expect } from "vitest";
|
|
3
|
-
import worker
|
|
3
|
+
import worker from "./worker";
|
|
4
4
|
import { MessageType } from "../types";
|
|
5
5
|
import type { UIMessage as ChatMessage } from "ai";
|
|
6
|
+
import { connectChatWS } from "./test-utils";
|
|
6
7
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
output?: unknown;
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
declare module "cloudflare:test" {
|
|
16
|
-
interface ProvidedEnv extends Env {}
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
async function connectChatWS(path: string) {
|
|
20
|
-
const ctx = createExecutionContext();
|
|
21
|
-
const req = new Request(`http://example.com${path}`, {
|
|
22
|
-
headers: { Upgrade: "websocket" }
|
|
23
|
-
});
|
|
24
|
-
const res = await worker.fetch(req, env, ctx);
|
|
25
|
-
expect(res.status).toBe(101);
|
|
26
|
-
const ws = res.webSocket as WebSocket;
|
|
27
|
-
expect(ws).toBeDefined();
|
|
28
|
-
ws.accept();
|
|
29
|
-
return { ws, ctx };
|
|
30
|
-
}
|
|
8
|
+
// Type helper for tool call parts - extracts ToolUIPart from ChatMessage parts
|
|
9
|
+
type TestToolCallPart = Extract<
|
|
10
|
+
ChatMessage["parts"][number],
|
|
11
|
+
{ type: `tool-${string}` }
|
|
12
|
+
>;
|
|
31
13
|
|
|
32
14
|
describe("Chat Agent Persistence", () => {
|
|
33
15
|
it("persists new messages incrementally without deleting existing ones", async () => {
|
|
@@ -282,7 +264,7 @@ describe("Chat Agent Persistence", () => {
|
|
|
282
264
|
parts: [{ type: "text", text: "What time is it in London?" }]
|
|
283
265
|
};
|
|
284
266
|
|
|
285
|
-
const toolCallPart:
|
|
267
|
+
const toolCallPart: TestToolCallPart = {
|
|
286
268
|
type: "tool-getLocalTime",
|
|
287
269
|
toolCallId: "call_456",
|
|
288
270
|
state: "input-available",
|
|
@@ -305,7 +287,7 @@ describe("Chat Agent Persistence", () => {
|
|
|
305
287
|
messagesAfterToolCall.find((m) => m.id === "assistant-1")
|
|
306
288
|
).toBeDefined();
|
|
307
289
|
|
|
308
|
-
const toolResultPart:
|
|
290
|
+
const toolResultPart: TestToolCallPart = {
|
|
309
291
|
type: "tool-getLocalTime",
|
|
310
292
|
toolCallId: "call_456",
|
|
311
293
|
state: "output-available",
|
|
@@ -388,7 +370,7 @@ describe("Chat Agent Persistence", () => {
|
|
|
388
370
|
parts: [{ type: "text", text: "What time is it?" }]
|
|
389
371
|
};
|
|
390
372
|
|
|
391
|
-
const toolCallPart:
|
|
373
|
+
const toolCallPart: TestToolCallPart = {
|
|
392
374
|
type: "tool-getLocalTime",
|
|
393
375
|
toolCallId: "call_123",
|
|
394
376
|
state: "input-available",
|
|
@@ -413,7 +395,7 @@ describe("Chat Agent Persistence", () => {
|
|
|
413
395
|
assistantResponse
|
|
414
396
|
]);
|
|
415
397
|
|
|
416
|
-
const toolResultPart:
|
|
398
|
+
const toolResultPart: TestToolCallPart = {
|
|
417
399
|
type: "tool-getLocalTime",
|
|
418
400
|
toolCallId: "call_123",
|
|
419
401
|
state: "output-available",
|
|
@@ -1,12 +1,8 @@
|
|
|
1
1
|
import { createExecutionContext, env } from "cloudflare:test";
|
|
2
2
|
import { describe, it, expect } from "vitest";
|
|
3
|
-
import worker
|
|
3
|
+
import worker from "./worker";
|
|
4
4
|
import type { UIMessage as ChatMessage } from "ai";
|
|
5
5
|
|
|
6
|
-
declare module "cloudflare:test" {
|
|
7
|
-
interface ProvidedEnv extends Env {}
|
|
8
|
-
}
|
|
9
|
-
|
|
10
6
|
describe("Client-side tool duplicate message prevention", () => {
|
|
11
7
|
it("merges tool output into existing message by toolCallId", async () => {
|
|
12
8
|
const room = crypto.randomUUID();
|
|
@@ -1,25 +1,7 @@
|
|
|
1
|
-
import { createExecutionContext, env } from "cloudflare:test";
|
|
2
1
|
import { describe, it, expect } from "vitest";
|
|
3
|
-
import worker, { type Env } from "./worker";
|
|
4
2
|
import { MessageType } from "../types";
|
|
5
3
|
import type { UIMessage as ChatMessage } from "ai";
|
|
6
|
-
|
|
7
|
-
declare module "cloudflare:test" {
|
|
8
|
-
interface ProvidedEnv extends Env {}
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
async function connectChatWS(path: string) {
|
|
12
|
-
const ctx = createExecutionContext();
|
|
13
|
-
const req = new Request(`http://example.com${path}`, {
|
|
14
|
-
headers: { Upgrade: "websocket" }
|
|
15
|
-
});
|
|
16
|
-
const res = await worker.fetch(req, env, ctx);
|
|
17
|
-
expect(res.status).toBe(101);
|
|
18
|
-
const ws = res.webSocket as WebSocket;
|
|
19
|
-
expect(ws).toBeDefined();
|
|
20
|
-
ws.accept();
|
|
21
|
-
return { ws, ctx };
|
|
22
|
-
}
|
|
4
|
+
import { connectChatWS } from "./test-utils";
|
|
23
5
|
|
|
24
6
|
describe("Client Tools Broadcast", () => {
|
|
25
7
|
it("should not broadcast CF_AGENT_CHAT_MESSAGES back to the originating connection after chat request", async () => {
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
import { describe, it, expect } from "vitest";
|
|
2
|
+
import { MessageType } from "../types";
|
|
3
|
+
import type { UIMessage as ChatMessage } from "ai";
|
|
4
|
+
import { connectChatWS, isUseChatResponseMessage } from "./test-utils";
|
|
5
|
+
|
|
6
|
+
describe("Non-SSE Response Handling - PR #761", () => {
|
|
7
|
+
it("should send text-start, text-delta, and text-end events for plain text responses", async () => {
|
|
8
|
+
const room = crypto.randomUUID();
|
|
9
|
+
const { ws } = await connectChatWS(`/agents/test-chat-agent/${room}`);
|
|
10
|
+
|
|
11
|
+
const messages: unknown[] = [];
|
|
12
|
+
let resolvePromise: (value: boolean) => void;
|
|
13
|
+
const donePromise = new Promise<boolean>((res) => {
|
|
14
|
+
resolvePromise = res;
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
const timeout = setTimeout(() => resolvePromise(false), 2000);
|
|
18
|
+
|
|
19
|
+
ws.addEventListener("message", (e: MessageEvent) => {
|
|
20
|
+
try {
|
|
21
|
+
const data = JSON.parse(e.data as string);
|
|
22
|
+
messages.push(data);
|
|
23
|
+
if (
|
|
24
|
+
data.type === MessageType.CF_AGENT_USE_CHAT_RESPONSE &&
|
|
25
|
+
data.done === true
|
|
26
|
+
) {
|
|
27
|
+
clearTimeout(timeout);
|
|
28
|
+
resolvePromise(true);
|
|
29
|
+
}
|
|
30
|
+
} catch {
|
|
31
|
+
messages.push(e.data);
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// Wait for initial connection messages
|
|
36
|
+
await new Promise((r) => setTimeout(r, 50));
|
|
37
|
+
|
|
38
|
+
const userMessage: ChatMessage = {
|
|
39
|
+
id: "msg1",
|
|
40
|
+
role: "user",
|
|
41
|
+
parts: [{ type: "text", text: "Hello" }]
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
// Send a chat message - TestChatAgent returns plain text response
|
|
45
|
+
ws.send(
|
|
46
|
+
JSON.stringify({
|
|
47
|
+
type: MessageType.CF_AGENT_USE_CHAT_REQUEST,
|
|
48
|
+
id: "req1",
|
|
49
|
+
init: {
|
|
50
|
+
method: "POST",
|
|
51
|
+
body: JSON.stringify({ messages: [userMessage] })
|
|
52
|
+
}
|
|
53
|
+
})
|
|
54
|
+
);
|
|
55
|
+
|
|
56
|
+
const done = await donePromise;
|
|
57
|
+
expect(done).toBe(true);
|
|
58
|
+
|
|
59
|
+
// Filter to only chat response messages
|
|
60
|
+
const chatResponses = messages.filter(isUseChatResponseMessage);
|
|
61
|
+
|
|
62
|
+
// Should have at least 4 messages:
|
|
63
|
+
// 1. text-start
|
|
64
|
+
// 2. text-delta (with the actual content)
|
|
65
|
+
// 3. text-end
|
|
66
|
+
// 4. done: true (final completion signal)
|
|
67
|
+
expect(chatResponses.length).toBeGreaterThanOrEqual(4);
|
|
68
|
+
|
|
69
|
+
// Parse the bodies to check event types
|
|
70
|
+
const eventTypes = chatResponses
|
|
71
|
+
.filter((m) => m.body && m.body.length > 0)
|
|
72
|
+
.map((m) => {
|
|
73
|
+
try {
|
|
74
|
+
return JSON.parse(m.body);
|
|
75
|
+
} catch {
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
})
|
|
79
|
+
.filter(Boolean);
|
|
80
|
+
|
|
81
|
+
// Check for text-start event
|
|
82
|
+
const textStartEvent = eventTypes.find((e) => e.type === "text-start");
|
|
83
|
+
expect(textStartEvent).toBeDefined();
|
|
84
|
+
expect(textStartEvent.id).toBe("req1");
|
|
85
|
+
|
|
86
|
+
// Check for text-delta event with content
|
|
87
|
+
const textDeltaEvent = eventTypes.find((e) => e.type === "text-delta");
|
|
88
|
+
expect(textDeltaEvent).toBeDefined();
|
|
89
|
+
expect(textDeltaEvent.id).toBe("req1");
|
|
90
|
+
expect(textDeltaEvent.delta).toBe("Hello from chat agent!");
|
|
91
|
+
|
|
92
|
+
// Check for text-end event
|
|
93
|
+
const textEndEvent = eventTypes.find((e) => e.type === "text-end");
|
|
94
|
+
expect(textEndEvent).toBeDefined();
|
|
95
|
+
expect(textEndEvent.id).toBe("req1");
|
|
96
|
+
|
|
97
|
+
// Verify order: text-start comes before text-delta, text-delta comes before text-end
|
|
98
|
+
const startIndex = eventTypes.findIndex((e) => e.type === "text-start");
|
|
99
|
+
const deltaIndex = eventTypes.findIndex((e) => e.type === "text-delta");
|
|
100
|
+
const endIndex = eventTypes.findIndex((e) => e.type === "text-end");
|
|
101
|
+
|
|
102
|
+
expect(startIndex).toBeLessThan(deltaIndex);
|
|
103
|
+
expect(deltaIndex).toBeLessThan(endIndex);
|
|
104
|
+
|
|
105
|
+
// Check final done message
|
|
106
|
+
const doneMessage = chatResponses.find((m) => m.done === true);
|
|
107
|
+
expect(doneMessage).toBeDefined();
|
|
108
|
+
|
|
109
|
+
ws.close();
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
it("should use consistent id across text-start, text-delta, and text-end events", async () => {
|
|
113
|
+
const room = crypto.randomUUID();
|
|
114
|
+
const { ws } = await connectChatWS(`/agents/test-chat-agent/${room}`);
|
|
115
|
+
|
|
116
|
+
const messages: unknown[] = [];
|
|
117
|
+
let resolvePromise: (value: boolean) => void;
|
|
118
|
+
const donePromise = new Promise<boolean>((res) => {
|
|
119
|
+
resolvePromise = res;
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
const timeout = setTimeout(() => resolvePromise(false), 2000);
|
|
123
|
+
|
|
124
|
+
ws.addEventListener("message", (e: MessageEvent) => {
|
|
125
|
+
try {
|
|
126
|
+
const data = JSON.parse(e.data as string);
|
|
127
|
+
messages.push(data);
|
|
128
|
+
if (
|
|
129
|
+
data.type === MessageType.CF_AGENT_USE_CHAT_RESPONSE &&
|
|
130
|
+
data.done === true
|
|
131
|
+
) {
|
|
132
|
+
clearTimeout(timeout);
|
|
133
|
+
resolvePromise(true);
|
|
134
|
+
}
|
|
135
|
+
} catch {
|
|
136
|
+
messages.push(e.data);
|
|
137
|
+
}
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
await new Promise((r) => setTimeout(r, 50));
|
|
141
|
+
|
|
142
|
+
const requestId = "test-request-id-123";
|
|
143
|
+
const userMessage: ChatMessage = {
|
|
144
|
+
id: "msg1",
|
|
145
|
+
role: "user",
|
|
146
|
+
parts: [{ type: "text", text: "Test" }]
|
|
147
|
+
};
|
|
148
|
+
|
|
149
|
+
ws.send(
|
|
150
|
+
JSON.stringify({
|
|
151
|
+
type: MessageType.CF_AGENT_USE_CHAT_REQUEST,
|
|
152
|
+
id: requestId,
|
|
153
|
+
init: {
|
|
154
|
+
method: "POST",
|
|
155
|
+
body: JSON.stringify({ messages: [userMessage] })
|
|
156
|
+
}
|
|
157
|
+
})
|
|
158
|
+
);
|
|
159
|
+
|
|
160
|
+
await donePromise;
|
|
161
|
+
|
|
162
|
+
const chatResponses = messages.filter(isUseChatResponseMessage);
|
|
163
|
+
const eventTypes = chatResponses
|
|
164
|
+
.filter((m) => m.body && m.body.length > 0)
|
|
165
|
+
.map((m) => {
|
|
166
|
+
try {
|
|
167
|
+
return JSON.parse(m.body);
|
|
168
|
+
} catch {
|
|
169
|
+
return null;
|
|
170
|
+
}
|
|
171
|
+
})
|
|
172
|
+
.filter(Boolean);
|
|
173
|
+
|
|
174
|
+
// All events should have the same request id
|
|
175
|
+
const textEvents = eventTypes.filter((e) =>
|
|
176
|
+
["text-start", "text-delta", "text-end"].includes(e.type)
|
|
177
|
+
);
|
|
178
|
+
|
|
179
|
+
expect(textEvents.length).toBeGreaterThanOrEqual(3);
|
|
180
|
+
for (const event of textEvents) {
|
|
181
|
+
expect(event.id).toBe(requestId);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
ws.close();
|
|
185
|
+
});
|
|
186
|
+
});
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { env } from "cloudflare:test";
|
|
2
2
|
import { describe, it, expect } from "vitest";
|
|
3
|
-
import worker, { type Env } from "./worker";
|
|
4
3
|
import { MessageType, type OutgoingMessage } from "../types";
|
|
4
|
+
import { connectChatWS, isUseChatResponseMessage } from "./test-utils";
|
|
5
5
|
|
|
6
6
|
function isStreamResumingMessage(
|
|
7
7
|
m: unknown
|
|
@@ -17,37 +17,6 @@ function isStreamResumingMessage(
|
|
|
17
17
|
);
|
|
18
18
|
}
|
|
19
19
|
|
|
20
|
-
function isUseChatResponseMessage(
|
|
21
|
-
m: unknown
|
|
22
|
-
): m is Extract<
|
|
23
|
-
OutgoingMessage,
|
|
24
|
-
{ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE }
|
|
25
|
-
> {
|
|
26
|
-
return (
|
|
27
|
-
typeof m === "object" &&
|
|
28
|
-
m !== null &&
|
|
29
|
-
"type" in m &&
|
|
30
|
-
m.type === MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
31
|
-
);
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
declare module "cloudflare:test" {
|
|
35
|
-
interface ProvidedEnv extends Env {}
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
async function connectChatWS(path: string) {
|
|
39
|
-
const ctx = createExecutionContext();
|
|
40
|
-
const req = new Request(`http://example.com${path}`, {
|
|
41
|
-
headers: { Upgrade: "websocket" }
|
|
42
|
-
});
|
|
43
|
-
const res = await worker.fetch(req, env, ctx);
|
|
44
|
-
expect(res.status).toBe(101);
|
|
45
|
-
const ws = res.webSocket as WebSocket;
|
|
46
|
-
expect(ws).toBeDefined();
|
|
47
|
-
ws.accept();
|
|
48
|
-
return { ws, ctx };
|
|
49
|
-
}
|
|
50
|
-
|
|
51
20
|
function collectMessages(ws: WebSocket): unknown[] {
|
|
52
21
|
const messages: unknown[] = [];
|
|
53
22
|
ws.addEventListener("message", (e: MessageEvent) => {
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { createExecutionContext, env } from "cloudflare:test";
|
|
2
|
+
import { expect } from "vitest";
|
|
3
|
+
import { MessageType, type OutgoingMessage } from "../types";
|
|
4
|
+
import worker from "./worker";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Connects to the chat agent and returns the WebSocket and execution context
|
|
8
|
+
*/
|
|
9
|
+
export async function connectChatWS(
|
|
10
|
+
path: string
|
|
11
|
+
): Promise<{ ws: WebSocket; ctx: ExecutionContext }> {
|
|
12
|
+
const ctx = createExecutionContext();
|
|
13
|
+
const req = new Request(`http://example.com${path}`, {
|
|
14
|
+
headers: { Upgrade: "websocket" }
|
|
15
|
+
});
|
|
16
|
+
const res = await worker.fetch(req, env, ctx);
|
|
17
|
+
expect(res.status).toBe(101);
|
|
18
|
+
const ws = res.webSocket as WebSocket;
|
|
19
|
+
expect(ws).toBeDefined();
|
|
20
|
+
ws.accept();
|
|
21
|
+
return { ws, ctx };
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Type guard for CF_AGENT_USE_CHAT_RESPONSE messages
|
|
26
|
+
*/
|
|
27
|
+
export function isUseChatResponseMessage(
|
|
28
|
+
m: unknown
|
|
29
|
+
): m is Extract<
|
|
30
|
+
OutgoingMessage,
|
|
31
|
+
{ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE }
|
|
32
|
+
> {
|
|
33
|
+
return (
|
|
34
|
+
typeof m === "object" &&
|
|
35
|
+
m !== null &&
|
|
36
|
+
"type" in m &&
|
|
37
|
+
m.type === MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
38
|
+
);
|
|
39
|
+
}
|
package/src/tests/worker.ts
CHANGED
|
@@ -2,13 +2,11 @@ import { AIChatAgent } from "../";
|
|
|
2
2
|
import type { UIMessage as ChatMessage } from "ai";
|
|
3
3
|
import { callable, getCurrentAgent, routeAgentRequest } from "agents";
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
output?: unknown;
|
|
11
|
-
}
|
|
5
|
+
// Type helper for tool call parts - extracts from ChatMessage parts
|
|
6
|
+
type TestToolCallPart = Extract<
|
|
7
|
+
ChatMessage["parts"][number],
|
|
8
|
+
{ type: `tool-${string}` }
|
|
9
|
+
>;
|
|
12
10
|
|
|
13
11
|
export type Env = {
|
|
14
12
|
TestChatAgent: DurableObjectNamespace<TestChatAgent>;
|
|
@@ -99,7 +97,7 @@ export class TestChatAgent extends AIChatAgent<Env> {
|
|
|
99
97
|
|
|
100
98
|
@callable()
|
|
101
99
|
async testPersistToolCall(messageId: string, toolName: string) {
|
|
102
|
-
const toolCallPart:
|
|
100
|
+
const toolCallPart: TestToolCallPart = {
|
|
103
101
|
type: `tool-${toolName}`,
|
|
104
102
|
toolCallId: `call_${messageId}`,
|
|
105
103
|
state: "input-available",
|
|
@@ -121,7 +119,7 @@ export class TestChatAgent extends AIChatAgent<Env> {
|
|
|
121
119
|
toolName: string,
|
|
122
120
|
output: string
|
|
123
121
|
) {
|
|
124
|
-
const toolResultPart:
|
|
122
|
+
const toolResultPart: TestToolCallPart = {
|
|
125
123
|
type: `tool-${toolName}`,
|
|
126
124
|
toolCallId: `call_${messageId}`,
|
|
127
125
|
state: "output-available",
|