agents 0.0.0-7972da4 → 0.0.0-79843bd

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/README.md +378 -27
  2. package/dist/ai-chat-agent.d.ts +236 -24
  3. package/dist/ai-chat-agent.js +1118 -222
  4. package/dist/ai-chat-agent.js.map +1 -1
  5. package/dist/ai-chat-v5-migration-DguhuLKF.js +155 -0
  6. package/dist/ai-chat-v5-migration-DguhuLKF.js.map +1 -0
  7. package/dist/ai-chat-v5-migration.d.ts +155 -0
  8. package/dist/ai-chat-v5-migration.js +3 -0
  9. package/dist/ai-react.d.ts +202 -85
  10. package/dist/ai-react.js +574 -199
  11. package/dist/ai-react.js.map +1 -1
  12. package/dist/ai-types-DEtF_8Km.js +28 -0
  13. package/dist/ai-types-DEtF_8Km.js.map +1 -0
  14. package/dist/ai-types-U8lYA0o8.d.ts +127 -0
  15. package/dist/ai-types.d.ts +6 -74
  16. package/dist/ai-types.js +3 -1
  17. package/dist/cli/index.d.ts +1 -0
  18. package/dist/cli/index.js +28 -0
  19. package/dist/cli/index.js.map +1 -0
  20. package/dist/client-BZVYeBmf.d.ts +834 -0
  21. package/dist/client-ClORm6f0.d.ts +104 -0
  22. package/dist/client-DjTPRM8-.js +117 -0
  23. package/dist/client-DjTPRM8-.js.map +1 -0
  24. package/dist/client-QZa2Rq0l.js +1105 -0
  25. package/dist/client-QZa2Rq0l.js.map +1 -0
  26. package/dist/client.d.ts +12 -93
  27. package/dist/client.js +4 -11
  28. package/dist/codemode/ai.d.ts +27 -0
  29. package/dist/codemode/ai.js +152 -0
  30. package/dist/codemode/ai.js.map +1 -0
  31. package/dist/context-BkKbAa1R.js +8 -0
  32. package/dist/context-BkKbAa1R.js.map +1 -0
  33. package/dist/context-_sPQqJWv.d.ts +24 -0
  34. package/dist/context.d.ts +6 -0
  35. package/dist/context.js +3 -0
  36. package/dist/do-oauth-client-provider-B-ryFIPr.d.ts +70 -0
  37. package/dist/do-oauth-client-provider-B1fVIshX.js +155 -0
  38. package/dist/do-oauth-client-provider-B1fVIshX.js.map +1 -0
  39. package/dist/index-B6XHf8p0.d.ts +577 -0
  40. package/dist/index-CyDpAVHZ.d.ts +58 -0
  41. package/dist/index.d.ts +61 -402
  42. package/dist/index.js +8 -22
  43. package/dist/mcp/client.d.ts +4 -783
  44. package/dist/mcp/client.js +4 -9
  45. package/dist/mcp/do-oauth-client-provider.d.ts +2 -41
  46. package/dist/mcp/do-oauth-client-provider.js +3 -7
  47. package/dist/mcp/index.d.ts +200 -81
  48. package/dist/mcp/index.js +1430 -770
  49. package/dist/mcp/index.js.map +1 -1
  50. package/dist/mcp/x402.d.ts +34 -0
  51. package/dist/mcp/x402.js +198 -0
  52. package/dist/mcp/x402.js.map +1 -0
  53. package/dist/mcp-CzbSsLfc.d.ts +61 -0
  54. package/dist/observability/index.d.ts +3 -0
  55. package/dist/observability/index.js +8 -0
  56. package/dist/react-DYwejKBr.d.ts +131 -0
  57. package/dist/react.d.ts +15 -119
  58. package/dist/react.js +183 -110
  59. package/dist/react.js.map +1 -1
  60. package/dist/schedule.d.ts +89 -12
  61. package/dist/schedule.js +46 -21
  62. package/dist/schedule.js.map +1 -1
  63. package/dist/serializable-C4GLimgv.d.ts +39 -0
  64. package/dist/serializable.d.ts +7 -32
  65. package/dist/serializable.js +1 -1
  66. package/dist/src-BZDh910Z.js +1181 -0
  67. package/dist/src-BZDh910Z.js.map +1 -0
  68. package/package.json +125 -71
  69. package/dist/ai-types.js.map +0 -1
  70. package/dist/chunk-BZXOAZUX.js +0 -106
  71. package/dist/chunk-BZXOAZUX.js.map +0 -1
  72. package/dist/chunk-CFTLYEEK.js +0 -783
  73. package/dist/chunk-CFTLYEEK.js.map +0 -1
  74. package/dist/chunk-IFXSRTKF.js +0 -465
  75. package/dist/chunk-IFXSRTKF.js.map +0 -1
  76. package/dist/chunk-VCSB47AK.js +0 -116
  77. package/dist/chunk-VCSB47AK.js.map +0 -1
  78. package/dist/client.js.map +0 -1
  79. package/dist/index.js.map +0 -1
  80. package/dist/mcp/client.js.map +0 -1
  81. package/dist/mcp/do-oauth-client-provider.js.map +0 -1
  82. package/dist/serializable.js.map +0 -1
  83. package/src/index.ts +0 -1250
@@ -1,230 +1,1126 @@
1
- import {
2
- Agent
3
- } from "./chunk-CFTLYEEK.js";
4
- import "./chunk-IFXSRTKF.js";
5
- import "./chunk-BZXOAZUX.js";
6
- import "./chunk-VCSB47AK.js";
1
+ import { t as agentContext } from "./context-BkKbAa1R.js";
2
+ import { t as MessageType } from "./ai-types-DEtF_8Km.js";
3
+ import "./client-DjTPRM8-.js";
4
+ import "./client-QZa2Rq0l.js";
5
+ import "./do-oauth-client-provider-B1fVIshX.js";
6
+ import { t as Agent } from "./src-BZDh910Z.js";
7
+ import { r as autoTransformMessages } from "./ai-chat-v5-migration-DguhuLKF.js";
8
+ import { jsonSchema, tool } from "ai";
9
+ import { nanoid } from "nanoid";
7
10
 
8
- // src/ai-chat-agent.ts
9
- import { appendResponseMessages } from "ai";
10
- var decoder = new TextDecoder();
11
+ //#region src/ai-chat-agent.ts
12
+ /**
13
+ * Converts client tool schemas to AI SDK tool format.
14
+ *
15
+ * These tools have no `execute` function - when the AI model calls them,
16
+ * the tool call is sent back to the client for execution.
17
+ *
18
+ * @param clientTools - Array of tool schemas from the client
19
+ * @returns Record of AI SDK tools that can be spread into your tools object
20
+ */
21
+ function createToolsFromClientSchemas(clientTools) {
22
+ if (!clientTools || clientTools.length === 0) return {};
23
+ const seenNames = /* @__PURE__ */ new Set();
24
+ for (const t of clientTools) {
25
+ if (seenNames.has(t.name)) console.warn(`[createToolsFromClientSchemas] Duplicate tool name "${t.name}" found. Later definitions will override earlier ones.`);
26
+ seenNames.add(t.name);
27
+ }
28
+ return Object.fromEntries(clientTools.map((t) => [t.name, tool({
29
+ description: t.description ?? "",
30
+ inputSchema: jsonSchema(t.parameters ?? { type: "object" })
31
+ })]));
32
+ }
33
+ /** Number of chunks to buffer before flushing to SQLite */
34
+ const CHUNK_BUFFER_SIZE = 10;
35
+ /** Maximum buffer size to prevent memory issues on rapid reconnections */
36
+ const CHUNK_BUFFER_MAX_SIZE = 100;
37
+ /** Maximum age for a "streaming" stream before considering it stale (ms) - 5 minutes */
38
+ const STREAM_STALE_THRESHOLD_MS = 300 * 1e3;
39
+ /** Default cleanup interval for old streams (ms) - every 10 minutes */
40
+ const CLEANUP_INTERVAL_MS = 600 * 1e3;
41
+ /** Default age threshold for cleaning up completed streams (ms) - 24 hours */
42
+ const CLEANUP_AGE_THRESHOLD_MS = 1440 * 60 * 1e3;
43
+ const decoder = new TextDecoder();
44
+ /**
45
+ * Extension of Agent with built-in chat capabilities
46
+ * @template Env Environment type containing bindings
47
+ */
11
48
  var AIChatAgent = class extends Agent {
12
- constructor(ctx, env) {
13
- super(ctx, env);
14
- this.sql`create table if not exists cf_ai_chat_agent_messages (
49
+ constructor(ctx, env) {
50
+ super(ctx, env);
51
+ this._activeStreamId = null;
52
+ this._activeRequestId = null;
53
+ this._streamingMessage = null;
54
+ this._streamCompletionPromise = null;
55
+ this._streamCompletionResolve = null;
56
+ this._streamChunkIndex = 0;
57
+ this._chunkBuffer = [];
58
+ this._isFlushingChunks = false;
59
+ this._lastCleanupTime = 0;
60
+ this.sql`create table if not exists cf_ai_chat_agent_messages (
15
61
  id text primary key,
16
62
  message text not null,
17
63
  created_at datetime default current_timestamp
18
64
  )`;
19
- this.messages = (this.sql`select * from cf_ai_chat_agent_messages` || []).map((row) => {
20
- return JSON.parse(row.message);
21
- });
22
- this._chatMessageAbortControllers = /* @__PURE__ */ new Map();
23
- }
24
- _broadcastChatMessage(message, exclude) {
25
- this.broadcast(JSON.stringify(message), exclude);
26
- }
27
- async onMessage(connection, message) {
28
- if (typeof message === "string") {
29
- let data;
30
- try {
31
- data = JSON.parse(message);
32
- } catch (error) {
33
- return;
34
- }
35
- if (data.type === "cf_agent_use_chat_request" && data.init.method === "POST") {
36
- const {
37
- method,
38
- keepalive,
39
- headers,
40
- body,
41
- // we're reading this
42
- redirect,
43
- integrity,
44
- credentials,
45
- mode,
46
- referrer,
47
- referrerPolicy,
48
- window
49
- // dispatcher,
50
- // duplex
51
- } = data.init;
52
- const { messages } = JSON.parse(body);
53
- this._broadcastChatMessage(
54
- {
55
- type: "cf_agent_chat_messages",
56
- messages
57
- },
58
- [connection.id]
59
- );
60
- await this.persistMessages(messages, [connection.id]);
61
- const chatMessageId = data.id;
62
- const abortSignal = this._getAbortSignal(chatMessageId);
63
- return this._tryCatchChat(async () => {
64
- const response = await this.onChatMessage(
65
- async ({ response: response2 }) => {
66
- const finalMessages = appendResponseMessages({
67
- messages,
68
- responseMessages: response2.messages
69
- });
70
- await this.persistMessages(finalMessages, [connection.id]);
71
- this._removeAbortController(chatMessageId);
72
- },
73
- abortSignal ? { abortSignal } : void 0
74
- );
75
- if (response) {
76
- await this._reply(data.id, response);
77
- }
78
- });
79
- }
80
- if (data.type === "cf_agent_chat_clear") {
81
- this._destroyAbortControllers();
82
- this.sql`delete from cf_ai_chat_agent_messages`;
83
- this.messages = [];
84
- this._broadcastChatMessage(
85
- {
86
- type: "cf_agent_chat_clear"
87
- },
88
- [connection.id]
89
- );
90
- } else if (data.type === "cf_agent_chat_messages") {
91
- await this.persistMessages(data.messages, [connection.id]);
92
- } else if (data.type === "cf_agent_chat_request_cancel") {
93
- this._cancelChatRequest(data.id);
94
- }
95
- }
96
- }
97
- async onRequest(request) {
98
- return this._tryCatchChat(() => {
99
- const url = new URL(request.url);
100
- if (url.pathname.endsWith("/get-messages")) {
101
- const messages = (this.sql`select * from cf_ai_chat_agent_messages` || []).map((row) => {
102
- return JSON.parse(row.message);
103
- });
104
- return Response.json(messages);
105
- }
106
- return super.onRequest(request);
107
- });
108
- }
109
- async _tryCatchChat(fn) {
110
- try {
111
- return await fn();
112
- } catch (e) {
113
- throw this.onError(e);
114
- }
115
- }
116
- /**
117
- * Handle incoming chat messages and generate a response
118
- * @param onFinish Callback to be called when the response is finished
119
- * @param options.signal A signal to pass to any child requests which can be used to cancel them
120
- * @returns Response to send to the client or undefined
121
- */
122
- async onChatMessage(onFinish, options) {
123
- throw new Error(
124
- "recieved a chat message, override onChatMessage and return a Response to send to the client"
125
- );
126
- }
127
- /**
128
- * Save messages on the server side and trigger AI response
129
- * @param messages Chat messages to save
130
- */
131
- async saveMessages(messages) {
132
- await this.persistMessages(messages);
133
- const response = await this.onChatMessage(async ({ response: response2 }) => {
134
- const finalMessages = appendResponseMessages({
135
- messages,
136
- responseMessages: response2.messages
137
- });
138
- await this.persistMessages(finalMessages, []);
139
- });
140
- if (response) {
141
- for await (const chunk of response.body) {
142
- decoder.decode(chunk);
143
- }
144
- response.body?.cancel();
145
- }
146
- }
147
- async persistMessages(messages, excludeBroadcastIds = []) {
148
- this.sql`delete from cf_ai_chat_agent_messages`;
149
- for (const message of messages) {
150
- this.sql`insert into cf_ai_chat_agent_messages (id, message) values (${message.id},${JSON.stringify(message)})`;
151
- }
152
- this.messages = messages;
153
- this._broadcastChatMessage(
154
- {
155
- type: "cf_agent_chat_messages",
156
- messages
157
- },
158
- excludeBroadcastIds
159
- );
160
- }
161
- async _reply(id, response) {
162
- return this._tryCatchChat(async () => {
163
- for await (const chunk of response.body) {
164
- const body = decoder.decode(chunk);
165
- this._broadcastChatMessage({
166
- id,
167
- type: "cf_agent_use_chat_response",
168
- body,
169
- done: false
170
- });
171
- }
172
- this._broadcastChatMessage({
173
- id,
174
- type: "cf_agent_use_chat_response",
175
- body: "",
176
- done: true
177
- });
178
- });
179
- }
180
- /**
181
- * For the given message id, look up its associated AbortController
182
- * If the AbortController does not exist, create and store one in memory
183
- *
184
- * returns the AbortSignal associated with the AbortController
185
- */
186
- _getAbortSignal(id) {
187
- if (typeof id !== "string") {
188
- return void 0;
189
- }
190
- if (!this._chatMessageAbortControllers.has(id)) {
191
- this._chatMessageAbortControllers.set(id, new AbortController());
192
- }
193
- return this._chatMessageAbortControllers.get(id)?.signal;
194
- }
195
- /**
196
- * Remove an abort controller from the cache of pending message responses
197
- */
198
- _removeAbortController(id) {
199
- this._chatMessageAbortControllers.delete(id);
200
- }
201
- /**
202
- * Propagate an abort signal for any requests associated with the given message id
203
- */
204
- _cancelChatRequest(id) {
205
- if (this._chatMessageAbortControllers.has(id)) {
206
- const abortController = this._chatMessageAbortControllers.get(id);
207
- abortController?.abort();
208
- }
209
- }
210
- /**
211
- * Abort all pending requests and clear the cache of AbortControllers
212
- */
213
- _destroyAbortControllers() {
214
- for (const controller of this._chatMessageAbortControllers.values()) {
215
- controller?.abort();
216
- }
217
- this._chatMessageAbortControllers.clear();
218
- }
219
- /**
220
- * When the DO is destroyed, cancel all pending requests
221
- */
222
- async destroy() {
223
- this._destroyAbortControllers();
224
- await super.destroy();
225
- }
226
- };
227
- export {
228
- AIChatAgent
65
+ this.sql`create table if not exists cf_ai_chat_stream_chunks (
66
+ id text primary key,
67
+ stream_id text not null,
68
+ body text not null,
69
+ chunk_index integer not null,
70
+ created_at integer not null
71
+ )`;
72
+ this.sql`create table if not exists cf_ai_chat_stream_metadata (
73
+ id text primary key,
74
+ request_id text not null,
75
+ status text not null,
76
+ created_at integer not null,
77
+ completed_at integer
78
+ )`;
79
+ this.sql`create index if not exists idx_stream_chunks_stream_id
80
+ on cf_ai_chat_stream_chunks(stream_id, chunk_index)`;
81
+ this.messages = autoTransformMessages(this._loadMessagesFromDb());
82
+ this._chatMessageAbortControllers = /* @__PURE__ */ new Map();
83
+ this._restoreActiveStream();
84
+ const _onConnect = this.onConnect.bind(this);
85
+ this.onConnect = async (connection, ctx$1) => {
86
+ if (this._activeStreamId) this._notifyStreamResuming(connection);
87
+ return _onConnect(connection, ctx$1);
88
+ };
89
+ const _onMessage = this.onMessage.bind(this);
90
+ this.onMessage = async (connection, message) => {
91
+ if (typeof message === "string") {
92
+ let data;
93
+ try {
94
+ data = JSON.parse(message);
95
+ } catch (_error) {
96
+ return _onMessage(connection, message);
97
+ }
98
+ if (data.type === MessageType.CF_AGENT_USE_CHAT_REQUEST && data.init.method === "POST") {
99
+ const { body } = data.init;
100
+ const { messages, clientTools } = JSON.parse(body);
101
+ const transformedMessages = autoTransformMessages(messages);
102
+ this._broadcastChatMessage({
103
+ messages: transformedMessages,
104
+ type: MessageType.CF_AGENT_CHAT_MESSAGES
105
+ }, [connection.id]);
106
+ await this.persistMessages(transformedMessages, [connection.id]);
107
+ this.observability?.emit({
108
+ displayMessage: "Chat message request",
109
+ id: data.id,
110
+ payload: {},
111
+ timestamp: Date.now(),
112
+ type: "message:request"
113
+ }, this.ctx);
114
+ const chatMessageId = data.id;
115
+ const abortSignal = this._getAbortSignal(chatMessageId);
116
+ return this._tryCatchChat(async () => {
117
+ return agentContext.run({
118
+ agent: this,
119
+ connection,
120
+ request: void 0,
121
+ email: void 0
122
+ }, async () => {
123
+ const response = await this.onChatMessage(async (_finishResult) => {
124
+ this._removeAbortController(chatMessageId);
125
+ this.observability?.emit({
126
+ displayMessage: "Chat message response",
127
+ id: data.id,
128
+ payload: {},
129
+ timestamp: Date.now(),
130
+ type: "message:response"
131
+ }, this.ctx);
132
+ }, {
133
+ abortSignal,
134
+ clientTools
135
+ });
136
+ if (response) await this._reply(data.id, response, [connection.id]);
137
+ else {
138
+ console.warn(`[AIChatAgent] onChatMessage returned no response for chatMessageId: ${chatMessageId}`);
139
+ this._broadcastChatMessage({
140
+ body: "No response was generated by the agent.",
141
+ done: true,
142
+ id: data.id,
143
+ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
144
+ }, [connection.id]);
145
+ }
146
+ });
147
+ });
148
+ }
149
+ if (data.type === MessageType.CF_AGENT_CHAT_CLEAR) {
150
+ this._destroyAbortControllers();
151
+ this.sql`delete from cf_ai_chat_agent_messages`;
152
+ this.sql`delete from cf_ai_chat_stream_chunks`;
153
+ this.sql`delete from cf_ai_chat_stream_metadata`;
154
+ this._activeStreamId = null;
155
+ this._activeRequestId = null;
156
+ this._streamChunkIndex = 0;
157
+ this.messages = [];
158
+ this._broadcastChatMessage({ type: MessageType.CF_AGENT_CHAT_CLEAR }, [connection.id]);
159
+ return;
160
+ }
161
+ if (data.type === MessageType.CF_AGENT_CHAT_MESSAGES) {
162
+ const transformedMessages = autoTransformMessages(data.messages);
163
+ await this.persistMessages(transformedMessages, [connection.id]);
164
+ return;
165
+ }
166
+ if (data.type === MessageType.CF_AGENT_CHAT_REQUEST_CANCEL) {
167
+ this._cancelChatRequest(data.id);
168
+ return;
169
+ }
170
+ if (data.type === MessageType.CF_AGENT_STREAM_RESUME_ACK) {
171
+ if (this._activeStreamId && this._activeRequestId && this._activeRequestId === data.id) this._sendStreamChunks(connection, this._activeStreamId, this._activeRequestId);
172
+ return;
173
+ }
174
+ if (data.type === MessageType.CF_AGENT_TOOL_RESULT) {
175
+ const { toolCallId, toolName, output, autoContinue } = data;
176
+ this._applyToolResult(toolCallId, toolName, output).then((applied) => {
177
+ if (applied && autoContinue) {
178
+ const waitForStream = async () => {
179
+ if (this._streamCompletionPromise) await this._streamCompletionPromise;
180
+ else await new Promise((resolve) => setTimeout(resolve, 500));
181
+ };
182
+ waitForStream().then(() => {
183
+ const continuationId = nanoid();
184
+ const abortSignal = this._getAbortSignal(continuationId);
185
+ this._tryCatchChat(async () => {
186
+ return agentContext.run({
187
+ agent: this,
188
+ connection,
189
+ request: void 0,
190
+ email: void 0
191
+ }, async () => {
192
+ const response = await this.onChatMessage(async (_finishResult) => {
193
+ this._removeAbortController(continuationId);
194
+ this.observability?.emit({
195
+ displayMessage: "Chat message response (tool continuation)",
196
+ id: continuationId,
197
+ payload: {},
198
+ timestamp: Date.now(),
199
+ type: "message:response"
200
+ }, this.ctx);
201
+ }, { abortSignal });
202
+ if (response) await this._reply(continuationId, response, [], { continuation: true });
203
+ });
204
+ });
205
+ });
206
+ }
207
+ });
208
+ return;
209
+ }
210
+ }
211
+ return _onMessage(connection, message);
212
+ };
213
+ }
214
+ /**
215
+ * Restore active stream state if the agent was restarted during streaming.
216
+ * Called during construction to recover any interrupted streams.
217
+ * Validates stream freshness to avoid sending stale resume notifications.
218
+ * @internal Protected for testing purposes.
219
+ */
220
+ _restoreActiveStream() {
221
+ const activeStreams = this.sql`
222
+ select * from cf_ai_chat_stream_metadata
223
+ where status = 'streaming'
224
+ order by created_at desc
225
+ limit 1
226
+ `;
227
+ if (activeStreams && activeStreams.length > 0) {
228
+ const stream = activeStreams[0];
229
+ const streamAge = Date.now() - stream.created_at;
230
+ if (streamAge > STREAM_STALE_THRESHOLD_MS) {
231
+ this.sql`delete from cf_ai_chat_stream_chunks where stream_id = ${stream.id}`;
232
+ this.sql`delete from cf_ai_chat_stream_metadata where id = ${stream.id}`;
233
+ console.warn(`[AIChatAgent] Deleted stale stream ${stream.id} (age: ${Math.round(streamAge / 1e3)}s)`);
234
+ return;
235
+ }
236
+ this._activeStreamId = stream.id;
237
+ this._activeRequestId = stream.request_id;
238
+ const lastChunk = this.sql`
239
+ select max(chunk_index) as max_index
240
+ from cf_ai_chat_stream_chunks
241
+ where stream_id = ${this._activeStreamId}
242
+ `;
243
+ this._streamChunkIndex = lastChunk && lastChunk[0]?.max_index != null ? lastChunk[0].max_index + 1 : 0;
244
+ }
245
+ }
246
+ /**
247
+ * Notify a connection about an active stream that can be resumed.
248
+ * The client should respond with CF_AGENT_STREAM_RESUME_ACK to receive chunks.
249
+ * Uses in-memory state for request ID - no extra DB lookup needed.
250
+ * @param connection - The WebSocket connection to notify
251
+ */
252
+ _notifyStreamResuming(connection) {
253
+ if (!this._activeStreamId || !this._activeRequestId) return;
254
+ connection.send(JSON.stringify({
255
+ type: MessageType.CF_AGENT_STREAM_RESUMING,
256
+ id: this._activeRequestId
257
+ }));
258
+ }
259
+ /**
260
+ * Send stream chunks to a connection after receiving ACK.
261
+ * @param connection - The WebSocket connection
262
+ * @param streamId - The stream to replay
263
+ * @param requestId - The original request ID
264
+ */
265
+ _sendStreamChunks(connection, streamId, requestId) {
266
+ this._flushChunkBuffer();
267
+ const chunks = this.sql`
268
+ select * from cf_ai_chat_stream_chunks
269
+ where stream_id = ${streamId}
270
+ order by chunk_index asc
271
+ `;
272
+ for (const chunk of chunks || []) connection.send(JSON.stringify({
273
+ body: chunk.body,
274
+ done: false,
275
+ id: requestId,
276
+ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
277
+ }));
278
+ if (this._activeStreamId !== streamId) connection.send(JSON.stringify({
279
+ body: "",
280
+ done: true,
281
+ id: requestId,
282
+ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
283
+ }));
284
+ }
285
+ /**
286
+ * Buffer a stream chunk for batch write to SQLite.
287
+ * @param streamId - The stream this chunk belongs to
288
+ * @param body - The serialized chunk body
289
+ * @internal Protected for testing purposes.
290
+ */
291
+ _storeStreamChunk(streamId, body) {
292
+ if (this._chunkBuffer.length >= CHUNK_BUFFER_MAX_SIZE) this._flushChunkBuffer();
293
+ this._chunkBuffer.push({
294
+ id: nanoid(),
295
+ streamId,
296
+ body,
297
+ index: this._streamChunkIndex
298
+ });
299
+ this._streamChunkIndex++;
300
+ if (this._chunkBuffer.length >= CHUNK_BUFFER_SIZE) this._flushChunkBuffer();
301
+ }
302
+ /**
303
+ * Flush buffered chunks to SQLite in a single batch.
304
+ * Uses a lock to prevent concurrent flush operations.
305
+ * @internal Protected for testing purposes.
306
+ */
307
+ _flushChunkBuffer() {
308
+ if (this._isFlushingChunks || this._chunkBuffer.length === 0) return;
309
+ this._isFlushingChunks = true;
310
+ try {
311
+ const chunks = this._chunkBuffer;
312
+ this._chunkBuffer = [];
313
+ const now = Date.now();
314
+ for (const chunk of chunks) this.sql`
315
+ insert into cf_ai_chat_stream_chunks (id, stream_id, body, chunk_index, created_at)
316
+ values (${chunk.id}, ${chunk.streamId}, ${chunk.body}, ${chunk.index}, ${now})
317
+ `;
318
+ } finally {
319
+ this._isFlushingChunks = false;
320
+ }
321
+ }
322
+ /**
323
+ * Start tracking a new stream for resumable streaming.
324
+ * Creates metadata entry in SQLite and sets up tracking state.
325
+ * @param requestId - The unique ID of the chat request
326
+ * @returns The generated stream ID
327
+ * @internal Protected for testing purposes.
328
+ */
329
+ _startStream(requestId) {
330
+ this._flushChunkBuffer();
331
+ const streamId = nanoid();
332
+ this._activeStreamId = streamId;
333
+ this._activeRequestId = requestId;
334
+ this._streamChunkIndex = 0;
335
+ this.sql`
336
+ insert into cf_ai_chat_stream_metadata (id, request_id, status, created_at)
337
+ values (${streamId}, ${requestId}, 'streaming', ${Date.now()})
338
+ `;
339
+ return streamId;
340
+ }
341
+ /**
342
+ * Mark a stream as completed and flush any pending chunks.
343
+ * @param streamId - The stream to mark as completed
344
+ * @internal Protected for testing purposes.
345
+ */
346
+ _completeStream(streamId) {
347
+ this._flushChunkBuffer();
348
+ this.sql`
349
+ update cf_ai_chat_stream_metadata
350
+ set status = 'completed', completed_at = ${Date.now()}
351
+ where id = ${streamId}
352
+ `;
353
+ this._activeStreamId = null;
354
+ this._activeRequestId = null;
355
+ this._streamChunkIndex = 0;
356
+ this._maybeCleanupOldStreams();
357
+ }
358
+ /**
359
+ * Clean up old completed streams if enough time has passed since last cleanup.
360
+ * This prevents database growth while avoiding cleanup overhead on every stream completion.
361
+ */
362
+ _maybeCleanupOldStreams() {
363
+ const now = Date.now();
364
+ if (now - this._lastCleanupTime < CLEANUP_INTERVAL_MS) return;
365
+ this._lastCleanupTime = now;
366
+ const cutoff = now - CLEANUP_AGE_THRESHOLD_MS;
367
+ this.sql`
368
+ delete from cf_ai_chat_stream_chunks
369
+ where stream_id in (
370
+ select id from cf_ai_chat_stream_metadata
371
+ where status = 'completed' and completed_at < ${cutoff}
372
+ )
373
+ `;
374
+ this.sql`
375
+ delete from cf_ai_chat_stream_metadata
376
+ where status = 'completed' and completed_at < ${cutoff}
377
+ `;
378
+ }
379
+ _broadcastChatMessage(message, exclude) {
380
+ this.broadcast(JSON.stringify(message), exclude);
381
+ }
382
+ _loadMessagesFromDb() {
383
+ return (this.sql`select * from cf_ai_chat_agent_messages order by created_at` || []).map((row) => {
384
+ try {
385
+ return JSON.parse(row.message);
386
+ } catch (error) {
387
+ console.error(`Failed to parse message ${row.id}:`, error);
388
+ return null;
389
+ }
390
+ }).filter((msg) => msg !== null);
391
+ }
392
+ async onRequest(request) {
393
+ return this._tryCatchChat(async () => {
394
+ if (new URL(request.url).pathname.endsWith("/get-messages")) {
395
+ const messages = this._loadMessagesFromDb();
396
+ return Response.json(messages);
397
+ }
398
+ return super.onRequest(request);
399
+ });
400
+ }
401
+ async _tryCatchChat(fn) {
402
+ try {
403
+ return await fn();
404
+ } catch (e) {
405
+ throw this.onError(e);
406
+ }
407
+ }
408
+ /**
409
+ * Handle incoming chat messages and generate a response
410
+ * @param onFinish Callback to be called when the response is finished
411
+ * @param options Options including abort signal and client-defined tools
412
+ * @returns Response to send to the client or undefined
413
+ */
414
+ async onChatMessage(onFinish, options) {
415
+ throw new Error("recieved a chat message, override onChatMessage and return a Response to send to the client");
416
+ }
417
+ /**
418
+ * Save messages on the server side
419
+ * @param messages Chat messages to save
420
+ */
421
+ async saveMessages(messages) {
422
+ await this.persistMessages(messages);
423
+ await this._tryCatchChat(async () => {
424
+ const response = await this.onChatMessage(() => {});
425
+ if (response) this._reply(crypto.randomUUID(), response);
426
+ });
427
+ }
428
+ async persistMessages(messages, excludeBroadcastIds = []) {
429
+ const mergedMessages = this._mergeIncomingWithServerState(messages);
430
+ for (const message of mergedMessages) {
431
+ const sanitizedMessage = this._sanitizeMessageForPersistence(message);
432
+ const messageToSave = this._resolveMessageForToolMerge(sanitizedMessage);
433
+ this.sql`
434
+ insert into cf_ai_chat_agent_messages (id, message)
435
+ values (${messageToSave.id}, ${JSON.stringify(messageToSave)})
436
+ on conflict(id) do update set message = excluded.message
437
+ `;
438
+ }
439
+ this.messages = autoTransformMessages(this._loadMessagesFromDb());
440
+ this._broadcastChatMessage({
441
+ messages: mergedMessages,
442
+ type: MessageType.CF_AGENT_CHAT_MESSAGES
443
+ }, excludeBroadcastIds);
444
+ }
445
+ /**
446
+ * Merges incoming messages with existing server state.
447
+ * This preserves tool outputs that the server has (via _applyToolResult)
448
+ * but the client doesn't have yet.
449
+ *
450
+ * @param incomingMessages - Messages from the client
451
+ * @returns Messages with server's tool outputs preserved
452
+ */
453
+ _mergeIncomingWithServerState(incomingMessages) {
454
+ const serverToolOutputs = /* @__PURE__ */ new Map();
455
+ for (const msg of this.messages) {
456
+ if (msg.role !== "assistant") continue;
457
+ for (const part of msg.parts) if ("toolCallId" in part && "state" in part && part.state === "output-available" && "output" in part) serverToolOutputs.set(part.toolCallId, part.output);
458
+ }
459
+ if (serverToolOutputs.size === 0) return incomingMessages;
460
+ return incomingMessages.map((msg) => {
461
+ if (msg.role !== "assistant") return msg;
462
+ let hasChanges = false;
463
+ const updatedParts = msg.parts.map((part) => {
464
+ if ("toolCallId" in part && "state" in part && part.state === "input-available" && serverToolOutputs.has(part.toolCallId)) {
465
+ hasChanges = true;
466
+ return {
467
+ ...part,
468
+ state: "output-available",
469
+ output: serverToolOutputs.get(part.toolCallId)
470
+ };
471
+ }
472
+ return part;
473
+ });
474
+ return hasChanges ? {
475
+ ...msg,
476
+ parts: updatedParts
477
+ } : msg;
478
+ });
479
+ }
480
+ /**
481
+ * Resolves a message for persistence, handling tool result merging.
482
+ * If the message contains tool parts with output-available state, checks if there's
483
+ * an existing message with the same toolCallId that should be updated instead of
484
+ * creating a duplicate. This prevents the "Duplicate item found" error from OpenAI
485
+ * when client-side tool results arrive in a new request.
486
+ *
487
+ * @param message - The message to potentially merge
488
+ * @returns The message with the correct ID (either original or merged)
489
+ */
490
+ _resolveMessageForToolMerge(message) {
491
+ if (message.role !== "assistant") return message;
492
+ for (const part of message.parts) if ("toolCallId" in part && "state" in part && part.state === "output-available") {
493
+ const toolCallId = part.toolCallId;
494
+ const existingMessage = this._findMessageByToolCallId(toolCallId);
495
+ if (existingMessage && existingMessage.id !== message.id) return {
496
+ ...message,
497
+ id: existingMessage.id
498
+ };
499
+ }
500
+ return message;
501
+ }
502
+ /**
503
+ * Finds an existing assistant message that contains a tool part with the given toolCallId.
504
+ * Used to detect when a tool result should update an existing message rather than
505
+ * creating a new one.
506
+ *
507
+ * @param toolCallId - The tool call ID to search for
508
+ * @returns The existing message if found, undefined otherwise
509
+ */
510
+ _findMessageByToolCallId(toolCallId) {
511
+ for (const msg of this.messages) {
512
+ if (msg.role !== "assistant") continue;
513
+ for (const part of msg.parts) if ("toolCallId" in part && part.toolCallId === toolCallId) return msg;
514
+ }
515
+ }
516
+ /**
517
+ * Sanitizes a message for persistence by removing ephemeral provider-specific
518
+ * data that should not be stored or sent back in subsequent requests.
519
+ *
520
+ * This handles two issues with the OpenAI Responses API:
521
+ *
522
+ * 1. **Duplicate item IDs**: The AI SDK's @ai-sdk/openai provider (v2.0.x+)
523
+ * defaults to using OpenAI's Responses API which assigns unique itemIds
524
+ * to each message part. When these IDs are persisted and sent back,
525
+ * OpenAI rejects them as duplicates.
526
+ *
527
+ * 2. **Empty reasoning parts**: OpenAI may return reasoning parts with empty
528
+ * text and encrypted content. These cause "Non-OpenAI reasoning parts are
529
+ * not supported" warnings when sent back via convertToModelMessages().
530
+ *
531
+ * @param message - The message to sanitize
532
+ * @returns A new message with ephemeral provider data removed
533
+ */
534
+ _sanitizeMessageForPersistence(message) {
535
+ const sanitizedParts = message.parts.filter((part) => {
536
+ if (part.type === "reasoning") {
537
+ const reasoningPart = part;
538
+ if (!reasoningPart.text || reasoningPart.text.trim() === "") return false;
539
+ }
540
+ return true;
541
+ }).map((part) => {
542
+ let sanitizedPart = part;
543
+ if ("providerMetadata" in sanitizedPart && sanitizedPart.providerMetadata && typeof sanitizedPart.providerMetadata === "object" && "openai" in sanitizedPart.providerMetadata) sanitizedPart = this._stripOpenAIMetadata(sanitizedPart, "providerMetadata");
544
+ if ("callProviderMetadata" in sanitizedPart && sanitizedPart.callProviderMetadata && typeof sanitizedPart.callProviderMetadata === "object" && "openai" in sanitizedPart.callProviderMetadata) sanitizedPart = this._stripOpenAIMetadata(sanitizedPart, "callProviderMetadata");
545
+ return sanitizedPart;
546
+ });
547
+ return {
548
+ ...message,
549
+ parts: sanitizedParts
550
+ };
551
+ }
552
+ /**
553
+ * Helper to strip OpenAI-specific ephemeral fields from a metadata object.
554
+ * Removes itemId and reasoningEncryptedContent while preserving other fields.
555
+ */
556
+ _stripOpenAIMetadata(part, metadataKey) {
557
+ const metadata = part[metadataKey];
558
+ if (!metadata?.openai) return part;
559
+ const { itemId: _itemId, reasoningEncryptedContent: _rec, ...restOpenai } = metadata.openai;
560
+ const hasOtherOpenaiFields = Object.keys(restOpenai).length > 0;
561
+ const { openai: _openai, ...restMetadata } = metadata;
562
+ let newMetadata;
563
+ if (hasOtherOpenaiFields) newMetadata = {
564
+ ...restMetadata,
565
+ openai: restOpenai
566
+ };
567
+ else if (Object.keys(restMetadata).length > 0) newMetadata = restMetadata;
568
+ const { [metadataKey]: _oldMeta, ...restPart } = part;
569
+ if (newMetadata) return {
570
+ ...restPart,
571
+ [metadataKey]: newMetadata
572
+ };
573
+ return restPart;
574
+ }
575
+ /**
576
+ * Applies a tool result to an existing assistant message.
577
+ * This is used when the client sends CF_AGENT_TOOL_RESULT for client-side tools.
578
+ * The server is the source of truth, so we update the message here and broadcast
579
+ * the update to all clients.
580
+ *
581
+ * @param toolCallId - The tool call ID this result is for
582
+ * @param toolName - The name of the tool
583
+ * @param output - The output from the tool execution
584
+ * @returns true if the result was applied, false if the message was not found
585
+ */
586
+ async _applyToolResult(toolCallId, _toolName, output) {
587
+ let message;
588
+ if (this._streamingMessage) {
589
+ for (const part of this._streamingMessage.parts) if ("toolCallId" in part && part.toolCallId === toolCallId) {
590
+ message = this._streamingMessage;
591
+ break;
592
+ }
593
+ }
594
+ if (!message) for (let attempt = 0; attempt < 10; attempt++) {
595
+ message = this._findMessageByToolCallId(toolCallId);
596
+ if (message) break;
597
+ await new Promise((resolve) => setTimeout(resolve, 100));
598
+ }
599
+ if (!message) {
600
+ console.warn(`[AIChatAgent] _applyToolResult: Could not find message with toolCallId ${toolCallId} after retries`);
601
+ return false;
602
+ }
603
+ const isStreamingMessage = message === this._streamingMessage;
604
+ let updated = false;
605
+ if (isStreamingMessage) {
606
+ for (const part of message.parts) if ("toolCallId" in part && part.toolCallId === toolCallId && "state" in part && part.state === "input-available") {
607
+ part.state = "output-available";
608
+ part.output = output;
609
+ updated = true;
610
+ break;
611
+ }
612
+ } else {
613
+ const updatedParts = message.parts.map((part) => {
614
+ if ("toolCallId" in part && part.toolCallId === toolCallId && "state" in part && part.state === "input-available") {
615
+ updated = true;
616
+ return {
617
+ ...part,
618
+ state: "output-available",
619
+ output
620
+ };
621
+ }
622
+ return part;
623
+ });
624
+ if (updated) {
625
+ const updatedMessage = this._sanitizeMessageForPersistence({
626
+ ...message,
627
+ parts: updatedParts
628
+ });
629
+ this.sql`
630
+ update cf_ai_chat_agent_messages
631
+ set message = ${JSON.stringify(updatedMessage)}
632
+ where id = ${message.id}
633
+ `;
634
+ this.messages = autoTransformMessages(this._loadMessagesFromDb());
635
+ }
636
+ }
637
+ if (!updated) {
638
+ console.warn(`[AIChatAgent] _applyToolResult: Tool part with toolCallId ${toolCallId} not in input-available state`);
639
+ return false;
640
+ }
641
+ if (!isStreamingMessage) {
642
+ const broadcastMessage = this._findMessageByToolCallId(toolCallId);
643
+ if (broadcastMessage) this._broadcastChatMessage({
644
+ type: MessageType.CF_AGENT_MESSAGE_UPDATED,
645
+ message: broadcastMessage
646
+ });
647
+ }
648
+ return true;
649
+ }
650
+ async _reply(id, response, excludeBroadcastIds = [], options = {}) {
651
+ const { continuation = false } = options;
652
+ return this._tryCatchChat(async () => {
653
+ if (!response.body) {
654
+ this._broadcastChatMessage({
655
+ body: "",
656
+ done: true,
657
+ id,
658
+ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
659
+ ...continuation && { continuation: true }
660
+ });
661
+ return;
662
+ }
663
+ const streamId = this._startStream(id);
664
+ const { getToolName: getToolName$1, isToolUIPart: isToolUIPart$1, parsePartialJson } = await import("ai");
665
+ const reader = response.body.getReader();
666
+ const message = {
667
+ id: `assistant_${Date.now()}_${Math.random().toString(36).slice(2, 11)}`,
668
+ role: "assistant",
669
+ parts: []
670
+ };
671
+ this._streamingMessage = message;
672
+ this._streamCompletionPromise = new Promise((resolve) => {
673
+ this._streamCompletionResolve = resolve;
674
+ });
675
+ let activeTextParts = {};
676
+ let activeReasoningParts = {};
677
+ const partialToolCalls = {};
678
+ function updateDynamicToolPart(options$1) {
679
+ const part = message.parts.find((part$1) => part$1.type === "dynamic-tool" && part$1.toolCallId === options$1.toolCallId);
680
+ const anyOptions = options$1;
681
+ const anyPart = part;
682
+ if (part != null) {
683
+ part.state = options$1.state;
684
+ anyPart.toolName = options$1.toolName;
685
+ anyPart.input = anyOptions.input;
686
+ anyPart.output = anyOptions.output;
687
+ anyPart.errorText = anyOptions.errorText;
688
+ anyPart.rawInput = anyOptions.rawInput ?? anyPart.rawInput;
689
+ anyPart.preliminary = anyOptions.preliminary;
690
+ if (anyOptions.providerMetadata != null && part.state === "input-available") part.callProviderMetadata = anyOptions.providerMetadata;
691
+ } else message.parts.push({
692
+ type: "dynamic-tool",
693
+ toolName: options$1.toolName,
694
+ toolCallId: options$1.toolCallId,
695
+ state: options$1.state,
696
+ input: anyOptions.input,
697
+ output: anyOptions.output,
698
+ errorText: anyOptions.errorText,
699
+ preliminary: anyOptions.preliminary,
700
+ ...anyOptions.providerMetadata != null ? { callProviderMetadata: anyOptions.providerMetadata } : {}
701
+ });
702
+ }
703
+ function updateToolPart(options$1) {
704
+ const part = message.parts.find((part$1) => isToolUIPart$1(part$1) && part$1.toolCallId === options$1.toolCallId);
705
+ const anyOptions = options$1;
706
+ const anyPart = part;
707
+ if (part != null) {
708
+ part.state = options$1.state;
709
+ anyPart.input = anyOptions.input;
710
+ anyPart.output = anyOptions.output;
711
+ anyPart.errorText = anyOptions.errorText;
712
+ anyPart.rawInput = anyOptions.rawInput;
713
+ anyPart.preliminary = anyOptions.preliminary;
714
+ anyPart.providerExecuted = anyOptions.providerExecuted ?? part.providerExecuted;
715
+ if (anyOptions.providerMetadata != null && part.state === "input-available") part.callProviderMetadata = anyOptions.providerMetadata;
716
+ } else message.parts.push({
717
+ type: `tool-${options$1.toolName}`,
718
+ toolCallId: options$1.toolCallId,
719
+ state: options$1.state,
720
+ input: anyOptions.input,
721
+ output: anyOptions.output,
722
+ rawInput: anyOptions.rawInput,
723
+ errorText: anyOptions.errorText,
724
+ providerExecuted: anyOptions.providerExecuted,
725
+ preliminary: anyOptions.preliminary,
726
+ ...anyOptions.providerMetadata != null ? { callProviderMetadata: anyOptions.providerMetadata } : {}
727
+ });
728
+ }
729
+ async function updateMessageMetadata(metadata) {
730
+ if (metadata != null) message.metadata = message.metadata != null ? {
731
+ ...message.metadata,
732
+ ...metadata
733
+ } : metadata;
734
+ }
735
+ let streamCompleted = false;
736
+ try {
737
+ while (true) {
738
+ const { done, value } = await reader.read();
739
+ if (done) {
740
+ this._completeStream(streamId);
741
+ streamCompleted = true;
742
+ this._broadcastChatMessage({
743
+ body: "",
744
+ done: true,
745
+ id,
746
+ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
747
+ ...continuation && { continuation: true }
748
+ });
749
+ break;
750
+ }
751
+ const chunk = decoder.decode(value);
752
+ if ((response.headers.get("content-type") || "").includes("text/event-stream")) {
753
+ const lines = chunk.split("\n");
754
+ for (const line of lines) if (line.startsWith("data: ") && line !== "data: [DONE]") try {
755
+ const data = JSON.parse(line.slice(6));
756
+ switch (data.type) {
757
+ case "text-start": {
758
+ const textPart = {
759
+ type: "text",
760
+ text: "",
761
+ providerMetadata: data.providerMetadata,
762
+ state: "streaming"
763
+ };
764
+ activeTextParts[data.id] = textPart;
765
+ message.parts.push(textPart);
766
+ break;
767
+ }
768
+ case "text-delta": {
769
+ const textPart = activeTextParts[data.id];
770
+ textPart.text += data.delta;
771
+ textPart.providerMetadata = data.providerMetadata ?? textPart.providerMetadata;
772
+ break;
773
+ }
774
+ case "text-end": {
775
+ const textPart = activeTextParts[data.id];
776
+ textPart.state = "done";
777
+ textPart.providerMetadata = data.providerMetadata ?? textPart.providerMetadata;
778
+ delete activeTextParts[data.id];
779
+ break;
780
+ }
781
+ case "reasoning-start": {
782
+ const reasoningPart = {
783
+ type: "reasoning",
784
+ text: "",
785
+ providerMetadata: data.providerMetadata,
786
+ state: "streaming"
787
+ };
788
+ activeReasoningParts[data.id] = reasoningPart;
789
+ message.parts.push(reasoningPart);
790
+ break;
791
+ }
792
+ case "reasoning-delta": {
793
+ const reasoningPart = activeReasoningParts[data.id];
794
+ reasoningPart.text += data.delta;
795
+ reasoningPart.providerMetadata = data.providerMetadata ?? reasoningPart.providerMetadata;
796
+ break;
797
+ }
798
+ case "reasoning-end": {
799
+ const reasoningPart = activeReasoningParts[data.id];
800
+ reasoningPart.providerMetadata = data.providerMetadata ?? reasoningPart.providerMetadata;
801
+ reasoningPart.state = "done";
802
+ delete activeReasoningParts[data.id];
803
+ break;
804
+ }
805
+ case "file":
806
+ message.parts.push({
807
+ type: "file",
808
+ mediaType: data.mediaType,
809
+ url: data.url
810
+ });
811
+ break;
812
+ case "source-url":
813
+ message.parts.push({
814
+ type: "source-url",
815
+ sourceId: data.sourceId,
816
+ url: data.url,
817
+ title: data.title,
818
+ providerMetadata: data.providerMetadata
819
+ });
820
+ break;
821
+ case "source-document":
822
+ message.parts.push({
823
+ type: "source-document",
824
+ sourceId: data.sourceId,
825
+ mediaType: data.mediaType,
826
+ title: data.title,
827
+ filename: data.filename,
828
+ providerMetadata: data.providerMetadata
829
+ });
830
+ break;
831
+ case "tool-input-start": {
832
+ const toolInvocations = message.parts.filter(isToolUIPart$1);
833
+ partialToolCalls[data.toolCallId] = {
834
+ text: "",
835
+ toolName: data.toolName,
836
+ index: toolInvocations.length,
837
+ dynamic: data.dynamic
838
+ };
839
+ if (data.dynamic) updateDynamicToolPart({
840
+ toolCallId: data.toolCallId,
841
+ toolName: data.toolName,
842
+ state: "input-streaming",
843
+ input: void 0
844
+ });
845
+ else updateToolPart({
846
+ toolCallId: data.toolCallId,
847
+ toolName: data.toolName,
848
+ state: "input-streaming",
849
+ input: void 0
850
+ });
851
+ break;
852
+ }
853
+ case "tool-input-delta": {
854
+ const partialToolCall = partialToolCalls[data.toolCallId];
855
+ partialToolCall.text += data.inputTextDelta;
856
+ const partialArgs = (await parsePartialJson(partialToolCall.text)).value;
857
+ if (partialToolCall.dynamic) updateDynamicToolPart({
858
+ toolCallId: data.toolCallId,
859
+ toolName: partialToolCall.toolName,
860
+ state: "input-streaming",
861
+ input: partialArgs
862
+ });
863
+ else updateToolPart({
864
+ toolCallId: data.toolCallId,
865
+ toolName: partialToolCall.toolName,
866
+ state: "input-streaming",
867
+ input: partialArgs
868
+ });
869
+ break;
870
+ }
871
+ case "tool-input-available":
872
+ if (data.dynamic) updateDynamicToolPart({
873
+ toolCallId: data.toolCallId,
874
+ toolName: data.toolName,
875
+ state: "input-available",
876
+ input: data.input,
877
+ providerMetadata: data.providerMetadata
878
+ });
879
+ else updateToolPart({
880
+ toolCallId: data.toolCallId,
881
+ toolName: data.toolName,
882
+ state: "input-available",
883
+ input: data.input,
884
+ providerExecuted: data.providerExecuted,
885
+ providerMetadata: data.providerMetadata
886
+ });
887
+ break;
888
+ case "tool-input-error":
889
+ if (data.dynamic) updateDynamicToolPart({
890
+ toolCallId: data.toolCallId,
891
+ toolName: data.toolName,
892
+ state: "output-error",
893
+ input: data.input,
894
+ errorText: data.errorText,
895
+ providerMetadata: data.providerMetadata
896
+ });
897
+ else updateToolPart({
898
+ toolCallId: data.toolCallId,
899
+ toolName: data.toolName,
900
+ state: "output-error",
901
+ input: void 0,
902
+ rawInput: data.input,
903
+ errorText: data.errorText,
904
+ providerExecuted: data.providerExecuted,
905
+ providerMetadata: data.providerMetadata
906
+ });
907
+ break;
908
+ case "tool-output-available":
909
+ if (data.dynamic) {
910
+ const toolInvocation = message.parts.filter((part) => part.type === "dynamic-tool").find((invocation) => invocation.toolCallId === data.toolCallId);
911
+ if (!toolInvocation) throw new Error("Tool invocation not found");
912
+ updateDynamicToolPart({
913
+ toolCallId: data.toolCallId,
914
+ toolName: toolInvocation.toolName,
915
+ state: "output-available",
916
+ input: toolInvocation.input,
917
+ output: data.output,
918
+ preliminary: data.preliminary
919
+ });
920
+ } else {
921
+ const toolInvocation = message.parts.filter(isToolUIPart$1).find((invocation) => invocation.toolCallId === data.toolCallId);
922
+ if (!toolInvocation) throw new Error("Tool invocation not found");
923
+ updateToolPart({
924
+ toolCallId: data.toolCallId,
925
+ toolName: getToolName$1(toolInvocation),
926
+ state: "output-available",
927
+ input: toolInvocation.input,
928
+ output: data.output,
929
+ providerExecuted: data.providerExecuted,
930
+ preliminary: data.preliminary
931
+ });
932
+ }
933
+ break;
934
+ case "tool-output-error":
935
+ if (data.dynamic) {
936
+ const toolInvocation = message.parts.filter((part) => part.type === "dynamic-tool").find((invocation) => invocation.toolCallId === data.toolCallId);
937
+ if (!toolInvocation) throw new Error("Tool invocation not found");
938
+ updateDynamicToolPart({
939
+ toolCallId: data.toolCallId,
940
+ toolName: toolInvocation.toolName,
941
+ state: "output-error",
942
+ input: toolInvocation.input,
943
+ errorText: data.errorText
944
+ });
945
+ } else {
946
+ const toolInvocation = message.parts.filter(isToolUIPart$1).find((invocation) => invocation.toolCallId === data.toolCallId);
947
+ if (!toolInvocation) throw new Error("Tool invocation not found");
948
+ updateToolPart({
949
+ toolCallId: data.toolCallId,
950
+ toolName: getToolName$1(toolInvocation),
951
+ state: "output-error",
952
+ input: toolInvocation.input,
953
+ rawInput: "rawInput" in toolInvocation ? toolInvocation.rawInput : void 0,
954
+ errorText: data.errorText
955
+ });
956
+ }
957
+ break;
958
+ case "start-step":
959
+ message.parts.push({ type: "step-start" });
960
+ break;
961
+ case "finish-step":
962
+ activeTextParts = {};
963
+ activeReasoningParts = {};
964
+ break;
965
+ case "start":
966
+ if (data.messageId != null) message.id = data.messageId;
967
+ await updateMessageMetadata(data.messageMetadata);
968
+ break;
969
+ case "finish":
970
+ await updateMessageMetadata(data.messageMetadata);
971
+ break;
972
+ case "message-metadata":
973
+ await updateMessageMetadata(data.messageMetadata);
974
+ break;
975
+ case "error":
976
+ this._broadcastChatMessage({
977
+ error: true,
978
+ body: data.errorText ?? JSON.stringify(data),
979
+ done: false,
980
+ id,
981
+ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
982
+ });
983
+ break;
984
+ }
985
+ let eventToSend = data;
986
+ if (data.type === "finish" && "finishReason" in data) {
987
+ const { finishReason, ...rest } = data;
988
+ eventToSend = {
989
+ ...rest,
990
+ type: "finish",
991
+ messageMetadata: { finishReason }
992
+ };
993
+ }
994
+ const chunkBody = JSON.stringify(eventToSend);
995
+ this._storeStreamChunk(streamId, chunkBody);
996
+ this._broadcastChatMessage({
997
+ body: chunkBody,
998
+ done: false,
999
+ id,
1000
+ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
1001
+ ...continuation && { continuation: true }
1002
+ });
1003
+ } catch (_error) {}
1004
+ } else if (chunk.length > 0) {
1005
+ message.parts.push({
1006
+ type: "text",
1007
+ text: chunk
1008
+ });
1009
+ const chunkBody = JSON.stringify({
1010
+ type: "text-delta",
1011
+ delta: chunk
1012
+ });
1013
+ this._storeStreamChunk(streamId, chunkBody);
1014
+ this._broadcastChatMessage({
1015
+ body: chunkBody,
1016
+ done: false,
1017
+ id,
1018
+ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
1019
+ ...continuation && { continuation: true }
1020
+ });
1021
+ }
1022
+ }
1023
+ } catch (error) {
1024
+ if (!streamCompleted) {
1025
+ this._markStreamError(streamId);
1026
+ this._broadcastChatMessage({
1027
+ body: error instanceof Error ? error.message : "Stream error",
1028
+ done: true,
1029
+ error: true,
1030
+ id,
1031
+ type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
1032
+ ...continuation && { continuation: true }
1033
+ });
1034
+ }
1035
+ throw error;
1036
+ } finally {
1037
+ reader.releaseLock();
1038
+ }
1039
+ if (message.parts.length > 0) if (continuation) {
1040
+ let lastAssistantIdx = -1;
1041
+ for (let i = this.messages.length - 1; i >= 0; i--) if (this.messages[i].role === "assistant") {
1042
+ lastAssistantIdx = i;
1043
+ break;
1044
+ }
1045
+ if (lastAssistantIdx >= 0) {
1046
+ const lastAssistant = this.messages[lastAssistantIdx];
1047
+ const mergedMessage = {
1048
+ ...lastAssistant,
1049
+ parts: [...lastAssistant.parts, ...message.parts]
1050
+ };
1051
+ const updatedMessages = [...this.messages];
1052
+ updatedMessages[lastAssistantIdx] = mergedMessage;
1053
+ await this.persistMessages(updatedMessages, excludeBroadcastIds);
1054
+ } else await this.persistMessages([...this.messages, message], excludeBroadcastIds);
1055
+ } else await this.persistMessages([...this.messages, message], excludeBroadcastIds);
1056
+ this._streamingMessage = null;
1057
+ if (this._streamCompletionResolve) {
1058
+ this._streamCompletionResolve();
1059
+ this._streamCompletionResolve = null;
1060
+ this._streamCompletionPromise = null;
1061
+ }
1062
+ });
1063
+ }
1064
+ /**
1065
+ * Mark a stream as errored and clean up state.
1066
+ * @param streamId - The stream to mark as errored
1067
+ * @internal Protected for testing purposes.
1068
+ */
1069
+ _markStreamError(streamId) {
1070
+ this._flushChunkBuffer();
1071
+ this.sql`
1072
+ update cf_ai_chat_stream_metadata
1073
+ set status = 'error', completed_at = ${Date.now()}
1074
+ where id = ${streamId}
1075
+ `;
1076
+ this._activeStreamId = null;
1077
+ this._activeRequestId = null;
1078
+ this._streamChunkIndex = 0;
1079
+ }
1080
+ /**
1081
+ * For the given message id, look up its associated AbortController
1082
+ * If the AbortController does not exist, create and store one in memory
1083
+ *
1084
+ * returns the AbortSignal associated with the AbortController
1085
+ */
1086
+ _getAbortSignal(id) {
1087
+ if (typeof id !== "string") return;
1088
+ if (!this._chatMessageAbortControllers.has(id)) this._chatMessageAbortControllers.set(id, new AbortController());
1089
+ return this._chatMessageAbortControllers.get(id)?.signal;
1090
+ }
1091
+ /**
1092
+ * Remove an abort controller from the cache of pending message responses
1093
+ */
1094
+ _removeAbortController(id) {
1095
+ this._chatMessageAbortControllers.delete(id);
1096
+ }
1097
+ /**
1098
+ * Propagate an abort signal for any requests associated with the given message id
1099
+ */
1100
+ _cancelChatRequest(id) {
1101
+ if (this._chatMessageAbortControllers.has(id)) this._chatMessageAbortControllers.get(id)?.abort();
1102
+ }
1103
+ /**
1104
+ * Abort all pending requests and clear the cache of AbortControllers
1105
+ */
1106
+ _destroyAbortControllers() {
1107
+ for (const controller of this._chatMessageAbortControllers.values()) controller?.abort();
1108
+ this._chatMessageAbortControllers.clear();
1109
+ }
1110
+ /**
1111
+ * When the DO is destroyed, cancel all pending requests and clean up resources
1112
+ */
1113
+ async destroy() {
1114
+ this._destroyAbortControllers();
1115
+ this._flushChunkBuffer();
1116
+ this.sql`drop table if exists cf_ai_chat_stream_chunks`;
1117
+ this.sql`drop table if exists cf_ai_chat_stream_metadata`;
1118
+ this._activeStreamId = null;
1119
+ this._activeRequestId = null;
1120
+ await super.destroy();
1121
+ }
229
1122
  };
1123
+
1124
+ //#endregion
1125
+ export { AIChatAgent, createToolsFromClientSchemas };
230
1126
  //# sourceMappingURL=ai-chat-agent.js.map