agents 0.0.0-293b546 → 0.0.0-29938d4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +121 -0
- package/dist/ai-chat-agent.d.ts +158 -24
- package/dist/ai-chat-agent.js +380 -60
- package/dist/ai-chat-agent.js.map +1 -1
- package/dist/ai-chat-v5-migration.js +154 -2
- package/dist/ai-chat-v5-migration.js.map +1 -0
- package/dist/ai-react.d.ts +136 -16
- package/dist/ai-react.js +252 -74
- package/dist/ai-react.js.map +1 -1
- package/dist/{ai-types-81H_-Uxh.d.ts → ai-types-0OnT3FHg.d.ts} +26 -2
- package/dist/{ai-types-CrMqkwc_.js → ai-types-DEtF_8Km.js} +5 -1
- package/dist/ai-types-DEtF_8Km.js.map +1 -0
- package/dist/ai-types.d.ts +1 -1
- package/dist/ai-types.js +1 -1
- package/dist/cli/index.d.ts +1 -0
- package/dist/{cli.js → cli/index.js} +7 -6
- package/dist/cli/index.js.map +1 -0
- package/dist/{client-BAQA84dr.d.ts → client-CdM5I962.d.ts} +2 -2
- package/dist/client-DFotUKH_.d.ts +834 -0
- package/dist/{client-B3SR12TQ.js → client-DjTPRM8-.js} +2 -2
- package/dist/{client-B3SR12TQ.js.map → client-DjTPRM8-.js.map} +1 -1
- package/dist/{client-C8VrzljV.js → client-QZa2Rq0l.js} +371 -187
- package/dist/client-QZa2Rq0l.js.map +1 -0
- package/dist/client.d.ts +1 -2
- package/dist/client.js +1 -2
- package/dist/codemode/ai.js +6 -6
- package/dist/codemode/ai.js.map +1 -1
- package/dist/context-BkKbAa1R.js +8 -0
- package/dist/context-BkKbAa1R.js.map +1 -0
- package/dist/context-DcbQ8o7k.d.ts +24 -0
- package/dist/context.d.ts +6 -0
- package/dist/context.js +3 -0
- package/dist/{do-oauth-client-provider-C2CHH5x-.d.ts → do-oauth-client-provider--To1Tsjj.d.ts} +20 -5
- package/dist/{do-oauth-client-provider-CwqK5SXm.js → do-oauth-client-provider-B1fVIshX.js} +69 -8
- package/dist/do-oauth-client-provider-B1fVIshX.js.map +1 -0
- package/dist/{index-7kI1zprE.d.ts → index-CT2tCrLr.d.ts} +59 -60
- package/dist/{index-BUle9RiP.d.ts → index-DLuxm_9W.d.ts} +2 -2
- package/dist/index.d.ts +31 -34
- package/dist/index.js +5 -5
- package/dist/mcp/client.d.ts +2 -4
- package/dist/mcp/client.js +2 -2
- package/dist/mcp/do-oauth-client-provider.d.ts +1 -1
- package/dist/mcp/do-oauth-client-provider.js +1 -1
- package/dist/mcp/index.d.ts +19 -12
- package/dist/mcp/index.js +55 -60
- package/dist/mcp/index.js.map +1 -1
- package/dist/{mcp-BwPscEiF.d.ts → mcp-CPSfGUgd.d.ts} +1 -1
- package/dist/observability/index.d.ts +1 -2
- package/dist/observability/index.js +5 -5
- package/dist/react.d.ts +134 -10
- package/dist/react.js +56 -56
- package/dist/react.js.map +1 -1
- package/dist/schedule.d.ts +18 -72
- package/dist/{serializable-faDkMCai.d.ts → serializable-Crsj26mx.d.ts} +1 -1
- package/dist/serializable.d.ts +1 -1
- package/dist/{src-xjQt2wBU.js → src-BZDh910Z.js} +26 -25
- package/dist/src-BZDh910Z.js.map +1 -0
- package/package.json +29 -14
- package/dist/ai-chat-v5-migration-BSiGZmYU.js +0 -155
- package/dist/ai-chat-v5-migration-BSiGZmYU.js.map +0 -1
- package/dist/ai-types-CrMqkwc_.js.map +0 -1
- package/dist/cli.d.ts +0 -8
- package/dist/cli.js.map +0 -1
- package/dist/client-BG2wUgN5.d.ts +0 -1462
- package/dist/client-C8VrzljV.js.map +0 -1
- package/dist/do-oauth-client-provider-CwqK5SXm.js.map +0 -1
- package/dist/react-9nVfoERh.d.ts +0 -113
- package/dist/src-xjQt2wBU.js.map +0 -1
package/dist/ai-chat-agent.js
CHANGED
|
@@ -1,12 +1,35 @@
|
|
|
1
|
-
import { t as
|
|
2
|
-
import "./
|
|
3
|
-
import "./client-
|
|
4
|
-
import "./
|
|
5
|
-
import
|
|
6
|
-
import {
|
|
1
|
+
import { t as agentContext } from "./context-BkKbAa1R.js";
|
|
2
|
+
import { t as MessageType } from "./ai-types-DEtF_8Km.js";
|
|
3
|
+
import "./client-DjTPRM8-.js";
|
|
4
|
+
import "./client-QZa2Rq0l.js";
|
|
5
|
+
import "./do-oauth-client-provider-B1fVIshX.js";
|
|
6
|
+
import { t as Agent } from "./src-BZDh910Z.js";
|
|
7
|
+
import { autoTransformMessages } from "./ai-chat-v5-migration.js";
|
|
8
|
+
import { jsonSchema, tool } from "ai";
|
|
7
9
|
import { nanoid } from "nanoid";
|
|
8
10
|
|
|
9
11
|
//#region src/ai-chat-agent.ts
|
|
12
|
+
/**
|
|
13
|
+
* Converts client tool schemas to AI SDK tool format.
|
|
14
|
+
*
|
|
15
|
+
* These tools have no `execute` function - when the AI model calls them,
|
|
16
|
+
* the tool call is sent back to the client for execution.
|
|
17
|
+
*
|
|
18
|
+
* @param clientTools - Array of tool schemas from the client
|
|
19
|
+
* @returns Record of AI SDK tools that can be spread into your tools object
|
|
20
|
+
*/
|
|
21
|
+
function createToolsFromClientSchemas(clientTools) {
|
|
22
|
+
if (!clientTools || clientTools.length === 0) return {};
|
|
23
|
+
const seenNames = /* @__PURE__ */ new Set();
|
|
24
|
+
for (const t of clientTools) {
|
|
25
|
+
if (seenNames.has(t.name)) console.warn(`[createToolsFromClientSchemas] Duplicate tool name "${t.name}" found. Later definitions will override earlier ones.`);
|
|
26
|
+
seenNames.add(t.name);
|
|
27
|
+
}
|
|
28
|
+
return Object.fromEntries(clientTools.map((t) => [t.name, tool({
|
|
29
|
+
description: t.description ?? "",
|
|
30
|
+
inputSchema: jsonSchema(t.parameters ?? { type: "object" })
|
|
31
|
+
})]));
|
|
32
|
+
}
|
|
10
33
|
/** Number of chunks to buffer before flushing to SQLite */
|
|
11
34
|
const CHUNK_BUFFER_SIZE = 10;
|
|
12
35
|
/** Maximum buffer size to prevent memory issues on rapid reconnections */
|
|
@@ -27,6 +50,9 @@ var AIChatAgent = class extends Agent {
|
|
|
27
50
|
super(ctx, env);
|
|
28
51
|
this._activeStreamId = null;
|
|
29
52
|
this._activeRequestId = null;
|
|
53
|
+
this._streamingMessage = null;
|
|
54
|
+
this._streamCompletionPromise = null;
|
|
55
|
+
this._streamCompletionResolve = null;
|
|
30
56
|
this._streamChunkIndex = 0;
|
|
31
57
|
this._chunkBuffer = [];
|
|
32
58
|
this._isFlushingChunks = false;
|
|
@@ -71,7 +97,7 @@ var AIChatAgent = class extends Agent {
|
|
|
71
97
|
}
|
|
72
98
|
if (data.type === MessageType.CF_AGENT_USE_CHAT_REQUEST && data.init.method === "POST") {
|
|
73
99
|
const { body } = data.init;
|
|
74
|
-
const { messages } = JSON.parse(body);
|
|
100
|
+
const { messages, clientTools } = JSON.parse(body);
|
|
75
101
|
const transformedMessages = autoTransformMessages(messages);
|
|
76
102
|
this._broadcastChatMessage({
|
|
77
103
|
messages: transformedMessages,
|
|
@@ -88,26 +114,36 @@ var AIChatAgent = class extends Agent {
|
|
|
88
114
|
const chatMessageId = data.id;
|
|
89
115
|
const abortSignal = this._getAbortSignal(chatMessageId);
|
|
90
116
|
return this._tryCatchChat(async () => {
|
|
91
|
-
|
|
92
|
-
this
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
}
|
|
110
|
-
|
|
117
|
+
return agentContext.run({
|
|
118
|
+
agent: this,
|
|
119
|
+
connection,
|
|
120
|
+
request: void 0,
|
|
121
|
+
email: void 0
|
|
122
|
+
}, async () => {
|
|
123
|
+
const response = await this.onChatMessage(async (_finishResult) => {
|
|
124
|
+
this._removeAbortController(chatMessageId);
|
|
125
|
+
this.observability?.emit({
|
|
126
|
+
displayMessage: "Chat message response",
|
|
127
|
+
id: data.id,
|
|
128
|
+
payload: {},
|
|
129
|
+
timestamp: Date.now(),
|
|
130
|
+
type: "message:response"
|
|
131
|
+
}, this.ctx);
|
|
132
|
+
}, {
|
|
133
|
+
abortSignal,
|
|
134
|
+
clientTools
|
|
135
|
+
});
|
|
136
|
+
if (response) await this._reply(data.id, response, [connection.id]);
|
|
137
|
+
else {
|
|
138
|
+
console.warn(`[AIChatAgent] onChatMessage returned no response for chatMessageId: ${chatMessageId}`);
|
|
139
|
+
this._broadcastChatMessage({
|
|
140
|
+
body: "No response was generated by the agent.",
|
|
141
|
+
done: true,
|
|
142
|
+
id: data.id,
|
|
143
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
144
|
+
}, [connection.id]);
|
|
145
|
+
}
|
|
146
|
+
});
|
|
111
147
|
});
|
|
112
148
|
}
|
|
113
149
|
if (data.type === MessageType.CF_AGENT_CHAT_CLEAR) {
|
|
@@ -135,6 +171,42 @@ var AIChatAgent = class extends Agent {
|
|
|
135
171
|
if (this._activeStreamId && this._activeRequestId && this._activeRequestId === data.id) this._sendStreamChunks(connection, this._activeStreamId, this._activeRequestId);
|
|
136
172
|
return;
|
|
137
173
|
}
|
|
174
|
+
if (data.type === MessageType.CF_AGENT_TOOL_RESULT) {
|
|
175
|
+
const { toolCallId, toolName, output, autoContinue } = data;
|
|
176
|
+
this._applyToolResult(toolCallId, toolName, output).then((applied) => {
|
|
177
|
+
if (applied && autoContinue) {
|
|
178
|
+
const waitForStream = async () => {
|
|
179
|
+
if (this._streamCompletionPromise) await this._streamCompletionPromise;
|
|
180
|
+
else await new Promise((resolve) => setTimeout(resolve, 500));
|
|
181
|
+
};
|
|
182
|
+
waitForStream().then(() => {
|
|
183
|
+
const continuationId = nanoid();
|
|
184
|
+
const abortSignal = this._getAbortSignal(continuationId);
|
|
185
|
+
this._tryCatchChat(async () => {
|
|
186
|
+
return agentContext.run({
|
|
187
|
+
agent: this,
|
|
188
|
+
connection,
|
|
189
|
+
request: void 0,
|
|
190
|
+
email: void 0
|
|
191
|
+
}, async () => {
|
|
192
|
+
const response = await this.onChatMessage(async (_finishResult) => {
|
|
193
|
+
this._removeAbortController(continuationId);
|
|
194
|
+
this.observability?.emit({
|
|
195
|
+
displayMessage: "Chat message response (tool continuation)",
|
|
196
|
+
id: continuationId,
|
|
197
|
+
payload: {},
|
|
198
|
+
timestamp: Date.now(),
|
|
199
|
+
type: "message:response"
|
|
200
|
+
}, this.ctx);
|
|
201
|
+
}, { abortSignal });
|
|
202
|
+
if (response) await this._reply(continuationId, response, [], { continuation: true });
|
|
203
|
+
});
|
|
204
|
+
});
|
|
205
|
+
});
|
|
206
|
+
}
|
|
207
|
+
});
|
|
208
|
+
return;
|
|
209
|
+
}
|
|
138
210
|
}
|
|
139
211
|
return _onMessage(connection, message);
|
|
140
212
|
};
|
|
@@ -143,6 +215,7 @@ var AIChatAgent = class extends Agent {
|
|
|
143
215
|
* Restore active stream state if the agent was restarted during streaming.
|
|
144
216
|
* Called during construction to recover any interrupted streams.
|
|
145
217
|
* Validates stream freshness to avoid sending stale resume notifications.
|
|
218
|
+
* @internal Protected for testing purposes.
|
|
146
219
|
*/
|
|
147
220
|
_restoreActiveStream() {
|
|
148
221
|
const activeStreams = this.sql`
|
|
@@ -213,6 +286,7 @@ var AIChatAgent = class extends Agent {
|
|
|
213
286
|
* Buffer a stream chunk for batch write to SQLite.
|
|
214
287
|
* @param streamId - The stream this chunk belongs to
|
|
215
288
|
* @param body - The serialized chunk body
|
|
289
|
+
* @internal Protected for testing purposes.
|
|
216
290
|
*/
|
|
217
291
|
_storeStreamChunk(streamId, body) {
|
|
218
292
|
if (this._chunkBuffer.length >= CHUNK_BUFFER_MAX_SIZE) this._flushChunkBuffer();
|
|
@@ -228,6 +302,7 @@ var AIChatAgent = class extends Agent {
|
|
|
228
302
|
/**
|
|
229
303
|
* Flush buffered chunks to SQLite in a single batch.
|
|
230
304
|
* Uses a lock to prevent concurrent flush operations.
|
|
305
|
+
* @internal Protected for testing purposes.
|
|
231
306
|
*/
|
|
232
307
|
_flushChunkBuffer() {
|
|
233
308
|
if (this._isFlushingChunks || this._chunkBuffer.length === 0) return;
|
|
@@ -249,6 +324,7 @@ var AIChatAgent = class extends Agent {
|
|
|
249
324
|
* Creates metadata entry in SQLite and sets up tracking state.
|
|
250
325
|
* @param requestId - The unique ID of the chat request
|
|
251
326
|
* @returns The generated stream ID
|
|
327
|
+
* @internal Protected for testing purposes.
|
|
252
328
|
*/
|
|
253
329
|
_startStream(requestId) {
|
|
254
330
|
this._flushChunkBuffer();
|
|
@@ -265,6 +341,7 @@ var AIChatAgent = class extends Agent {
|
|
|
265
341
|
/**
|
|
266
342
|
* Mark a stream as completed and flush any pending chunks.
|
|
267
343
|
* @param streamId - The stream to mark as completed
|
|
344
|
+
* @internal Protected for testing purposes.
|
|
268
345
|
*/
|
|
269
346
|
_completeStream(streamId) {
|
|
270
347
|
this._flushChunkBuffer();
|
|
@@ -331,7 +408,7 @@ var AIChatAgent = class extends Agent {
|
|
|
331
408
|
/**
|
|
332
409
|
* Handle incoming chat messages and generate a response
|
|
333
410
|
* @param onFinish Callback to be called when the response is finished
|
|
334
|
-
* @param options
|
|
411
|
+
* @param options Options including abort signal and client-defined tools
|
|
335
412
|
* @returns Response to send to the client or undefined
|
|
336
413
|
*/
|
|
337
414
|
async onChatMessage(onFinish, options) {
|
|
@@ -349,46 +426,262 @@ var AIChatAgent = class extends Agent {
|
|
|
349
426
|
});
|
|
350
427
|
}
|
|
351
428
|
async persistMessages(messages, excludeBroadcastIds = []) {
|
|
352
|
-
|
|
429
|
+
const mergedMessages = this._mergeIncomingWithServerState(messages);
|
|
430
|
+
for (const message of mergedMessages) {
|
|
431
|
+
const sanitizedMessage = this._sanitizeMessageForPersistence(message);
|
|
432
|
+
const messageToSave = this._resolveMessageForToolMerge(sanitizedMessage);
|
|
433
|
+
this.sql`
|
|
353
434
|
insert into cf_ai_chat_agent_messages (id, message)
|
|
354
|
-
values (${
|
|
435
|
+
values (${messageToSave.id}, ${JSON.stringify(messageToSave)})
|
|
355
436
|
on conflict(id) do update set message = excluded.message
|
|
356
437
|
`;
|
|
438
|
+
}
|
|
357
439
|
this.messages = autoTransformMessages(this._loadMessagesFromDb());
|
|
358
440
|
this._broadcastChatMessage({
|
|
359
|
-
messages,
|
|
441
|
+
messages: mergedMessages,
|
|
360
442
|
type: MessageType.CF_AGENT_CHAT_MESSAGES
|
|
361
443
|
}, excludeBroadcastIds);
|
|
362
444
|
}
|
|
363
|
-
|
|
445
|
+
/**
|
|
446
|
+
* Merges incoming messages with existing server state.
|
|
447
|
+
* This preserves tool outputs that the server has (via _applyToolResult)
|
|
448
|
+
* but the client doesn't have yet.
|
|
449
|
+
*
|
|
450
|
+
* @param incomingMessages - Messages from the client
|
|
451
|
+
* @returns Messages with server's tool outputs preserved
|
|
452
|
+
*/
|
|
453
|
+
_mergeIncomingWithServerState(incomingMessages) {
|
|
454
|
+
const serverToolOutputs = /* @__PURE__ */ new Map();
|
|
455
|
+
for (const msg of this.messages) {
|
|
456
|
+
if (msg.role !== "assistant") continue;
|
|
457
|
+
for (const part of msg.parts) if ("toolCallId" in part && "state" in part && part.state === "output-available" && "output" in part) serverToolOutputs.set(part.toolCallId, part.output);
|
|
458
|
+
}
|
|
459
|
+
if (serverToolOutputs.size === 0) return incomingMessages;
|
|
460
|
+
return incomingMessages.map((msg) => {
|
|
461
|
+
if (msg.role !== "assistant") return msg;
|
|
462
|
+
let hasChanges = false;
|
|
463
|
+
const updatedParts = msg.parts.map((part) => {
|
|
464
|
+
if ("toolCallId" in part && "state" in part && part.state === "input-available" && serverToolOutputs.has(part.toolCallId)) {
|
|
465
|
+
hasChanges = true;
|
|
466
|
+
return {
|
|
467
|
+
...part,
|
|
468
|
+
state: "output-available",
|
|
469
|
+
output: serverToolOutputs.get(part.toolCallId)
|
|
470
|
+
};
|
|
471
|
+
}
|
|
472
|
+
return part;
|
|
473
|
+
});
|
|
474
|
+
return hasChanges ? {
|
|
475
|
+
...msg,
|
|
476
|
+
parts: updatedParts
|
|
477
|
+
} : msg;
|
|
478
|
+
});
|
|
479
|
+
}
|
|
480
|
+
/**
|
|
481
|
+
* Resolves a message for persistence, handling tool result merging.
|
|
482
|
+
* If the message contains tool parts with output-available state, checks if there's
|
|
483
|
+
* an existing message with the same toolCallId that should be updated instead of
|
|
484
|
+
* creating a duplicate. This prevents the "Duplicate item found" error from OpenAI
|
|
485
|
+
* when client-side tool results arrive in a new request.
|
|
486
|
+
*
|
|
487
|
+
* @param message - The message to potentially merge
|
|
488
|
+
* @returns The message with the correct ID (either original or merged)
|
|
489
|
+
*/
|
|
490
|
+
_resolveMessageForToolMerge(message) {
|
|
491
|
+
if (message.role !== "assistant") return message;
|
|
492
|
+
for (const part of message.parts) if ("toolCallId" in part && "state" in part && part.state === "output-available") {
|
|
493
|
+
const toolCallId = part.toolCallId;
|
|
494
|
+
const existingMessage = this._findMessageByToolCallId(toolCallId);
|
|
495
|
+
if (existingMessage && existingMessage.id !== message.id) return {
|
|
496
|
+
...message,
|
|
497
|
+
id: existingMessage.id
|
|
498
|
+
};
|
|
499
|
+
}
|
|
500
|
+
return message;
|
|
501
|
+
}
|
|
502
|
+
/**
|
|
503
|
+
* Finds an existing assistant message that contains a tool part with the given toolCallId.
|
|
504
|
+
* Used to detect when a tool result should update an existing message rather than
|
|
505
|
+
* creating a new one.
|
|
506
|
+
*
|
|
507
|
+
* @param toolCallId - The tool call ID to search for
|
|
508
|
+
* @returns The existing message if found, undefined otherwise
|
|
509
|
+
*/
|
|
510
|
+
_findMessageByToolCallId(toolCallId) {
|
|
511
|
+
for (const msg of this.messages) {
|
|
512
|
+
if (msg.role !== "assistant") continue;
|
|
513
|
+
for (const part of msg.parts) if ("toolCallId" in part && part.toolCallId === toolCallId) return msg;
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
/**
|
|
517
|
+
* Sanitizes a message for persistence by removing ephemeral provider-specific
|
|
518
|
+
* data that should not be stored or sent back in subsequent requests.
|
|
519
|
+
*
|
|
520
|
+
* This handles two issues with the OpenAI Responses API:
|
|
521
|
+
*
|
|
522
|
+
* 1. **Duplicate item IDs**: The AI SDK's @ai-sdk/openai provider (v2.0.x+)
|
|
523
|
+
* defaults to using OpenAI's Responses API which assigns unique itemIds
|
|
524
|
+
* to each message part. When these IDs are persisted and sent back,
|
|
525
|
+
* OpenAI rejects them as duplicates.
|
|
526
|
+
*
|
|
527
|
+
* 2. **Empty reasoning parts**: OpenAI may return reasoning parts with empty
|
|
528
|
+
* text and encrypted content. These cause "Non-OpenAI reasoning parts are
|
|
529
|
+
* not supported" warnings when sent back via convertToModelMessages().
|
|
530
|
+
*
|
|
531
|
+
* @param message - The message to sanitize
|
|
532
|
+
* @returns A new message with ephemeral provider data removed
|
|
533
|
+
*/
|
|
534
|
+
_sanitizeMessageForPersistence(message) {
|
|
535
|
+
const sanitizedParts = message.parts.filter((part) => {
|
|
536
|
+
if (part.type === "reasoning") {
|
|
537
|
+
const reasoningPart = part;
|
|
538
|
+
if (!reasoningPart.text || reasoningPart.text.trim() === "") return false;
|
|
539
|
+
}
|
|
540
|
+
return true;
|
|
541
|
+
}).map((part) => {
|
|
542
|
+
let sanitizedPart = part;
|
|
543
|
+
if ("providerMetadata" in sanitizedPart && sanitizedPart.providerMetadata && typeof sanitizedPart.providerMetadata === "object" && "openai" in sanitizedPart.providerMetadata) sanitizedPart = this._stripOpenAIMetadata(sanitizedPart, "providerMetadata");
|
|
544
|
+
if ("callProviderMetadata" in sanitizedPart && sanitizedPart.callProviderMetadata && typeof sanitizedPart.callProviderMetadata === "object" && "openai" in sanitizedPart.callProviderMetadata) sanitizedPart = this._stripOpenAIMetadata(sanitizedPart, "callProviderMetadata");
|
|
545
|
+
return sanitizedPart;
|
|
546
|
+
});
|
|
547
|
+
return {
|
|
548
|
+
...message,
|
|
549
|
+
parts: sanitizedParts
|
|
550
|
+
};
|
|
551
|
+
}
|
|
552
|
+
/**
|
|
553
|
+
* Helper to strip OpenAI-specific ephemeral fields from a metadata object.
|
|
554
|
+
* Removes itemId and reasoningEncryptedContent while preserving other fields.
|
|
555
|
+
*/
|
|
556
|
+
_stripOpenAIMetadata(part, metadataKey) {
|
|
557
|
+
const metadata = part[metadataKey];
|
|
558
|
+
if (!metadata?.openai) return part;
|
|
559
|
+
const { itemId: _itemId, reasoningEncryptedContent: _rec, ...restOpenai } = metadata.openai;
|
|
560
|
+
const hasOtherOpenaiFields = Object.keys(restOpenai).length > 0;
|
|
561
|
+
const { openai: _openai, ...restMetadata } = metadata;
|
|
562
|
+
let newMetadata;
|
|
563
|
+
if (hasOtherOpenaiFields) newMetadata = {
|
|
564
|
+
...restMetadata,
|
|
565
|
+
openai: restOpenai
|
|
566
|
+
};
|
|
567
|
+
else if (Object.keys(restMetadata).length > 0) newMetadata = restMetadata;
|
|
568
|
+
const { [metadataKey]: _oldMeta, ...restPart } = part;
|
|
569
|
+
if (newMetadata) return {
|
|
570
|
+
...restPart,
|
|
571
|
+
[metadataKey]: newMetadata
|
|
572
|
+
};
|
|
573
|
+
return restPart;
|
|
574
|
+
}
|
|
575
|
+
/**
|
|
576
|
+
* Applies a tool result to an existing assistant message.
|
|
577
|
+
* This is used when the client sends CF_AGENT_TOOL_RESULT for client-side tools.
|
|
578
|
+
* The server is the source of truth, so we update the message here and broadcast
|
|
579
|
+
* the update to all clients.
|
|
580
|
+
*
|
|
581
|
+
* @param toolCallId - The tool call ID this result is for
|
|
582
|
+
* @param toolName - The name of the tool
|
|
583
|
+
* @param output - The output from the tool execution
|
|
584
|
+
* @returns true if the result was applied, false if the message was not found
|
|
585
|
+
*/
|
|
586
|
+
async _applyToolResult(toolCallId, _toolName, output) {
|
|
587
|
+
let message;
|
|
588
|
+
if (this._streamingMessage) {
|
|
589
|
+
for (const part of this._streamingMessage.parts) if ("toolCallId" in part && part.toolCallId === toolCallId) {
|
|
590
|
+
message = this._streamingMessage;
|
|
591
|
+
break;
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
if (!message) for (let attempt = 0; attempt < 10; attempt++) {
|
|
595
|
+
message = this._findMessageByToolCallId(toolCallId);
|
|
596
|
+
if (message) break;
|
|
597
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
598
|
+
}
|
|
599
|
+
if (!message) {
|
|
600
|
+
console.warn(`[AIChatAgent] _applyToolResult: Could not find message with toolCallId ${toolCallId} after retries`);
|
|
601
|
+
return false;
|
|
602
|
+
}
|
|
603
|
+
const isStreamingMessage = message === this._streamingMessage;
|
|
604
|
+
let updated = false;
|
|
605
|
+
if (isStreamingMessage) {
|
|
606
|
+
for (const part of message.parts) if ("toolCallId" in part && part.toolCallId === toolCallId && "state" in part && part.state === "input-available") {
|
|
607
|
+
part.state = "output-available";
|
|
608
|
+
part.output = output;
|
|
609
|
+
updated = true;
|
|
610
|
+
break;
|
|
611
|
+
}
|
|
612
|
+
} else {
|
|
613
|
+
const updatedParts = message.parts.map((part) => {
|
|
614
|
+
if ("toolCallId" in part && part.toolCallId === toolCallId && "state" in part && part.state === "input-available") {
|
|
615
|
+
updated = true;
|
|
616
|
+
return {
|
|
617
|
+
...part,
|
|
618
|
+
state: "output-available",
|
|
619
|
+
output
|
|
620
|
+
};
|
|
621
|
+
}
|
|
622
|
+
return part;
|
|
623
|
+
});
|
|
624
|
+
if (updated) {
|
|
625
|
+
const updatedMessage = this._sanitizeMessageForPersistence({
|
|
626
|
+
...message,
|
|
627
|
+
parts: updatedParts
|
|
628
|
+
});
|
|
629
|
+
this.sql`
|
|
630
|
+
update cf_ai_chat_agent_messages
|
|
631
|
+
set message = ${JSON.stringify(updatedMessage)}
|
|
632
|
+
where id = ${message.id}
|
|
633
|
+
`;
|
|
634
|
+
this.messages = autoTransformMessages(this._loadMessagesFromDb());
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
if (!updated) {
|
|
638
|
+
console.warn(`[AIChatAgent] _applyToolResult: Tool part with toolCallId ${toolCallId} not in input-available state`);
|
|
639
|
+
return false;
|
|
640
|
+
}
|
|
641
|
+
if (!isStreamingMessage) {
|
|
642
|
+
const broadcastMessage = this._findMessageByToolCallId(toolCallId);
|
|
643
|
+
if (broadcastMessage) this._broadcastChatMessage({
|
|
644
|
+
type: MessageType.CF_AGENT_MESSAGE_UPDATED,
|
|
645
|
+
message: broadcastMessage
|
|
646
|
+
});
|
|
647
|
+
}
|
|
648
|
+
return true;
|
|
649
|
+
}
|
|
650
|
+
async _reply(id, response, excludeBroadcastIds = [], options = {}) {
|
|
651
|
+
const { continuation = false } = options;
|
|
364
652
|
return this._tryCatchChat(async () => {
|
|
365
653
|
if (!response.body) {
|
|
366
654
|
this._broadcastChatMessage({
|
|
367
655
|
body: "",
|
|
368
656
|
done: true,
|
|
369
657
|
id,
|
|
370
|
-
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
658
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
|
|
659
|
+
...continuation && { continuation: true }
|
|
371
660
|
});
|
|
372
661
|
return;
|
|
373
662
|
}
|
|
374
663
|
const streamId = this._startStream(id);
|
|
375
|
-
const { getToolName, isToolUIPart, parsePartialJson } = await import("ai");
|
|
664
|
+
const { getToolName: getToolName$1, isToolUIPart: isToolUIPart$1, parsePartialJson } = await import("ai");
|
|
376
665
|
const reader = response.body.getReader();
|
|
377
666
|
const message = {
|
|
378
667
|
id: `assistant_${Date.now()}_${Math.random().toString(36).slice(2, 11)}`,
|
|
379
668
|
role: "assistant",
|
|
380
669
|
parts: []
|
|
381
670
|
};
|
|
671
|
+
this._streamingMessage = message;
|
|
672
|
+
this._streamCompletionPromise = new Promise((resolve) => {
|
|
673
|
+
this._streamCompletionResolve = resolve;
|
|
674
|
+
});
|
|
382
675
|
let activeTextParts = {};
|
|
383
676
|
let activeReasoningParts = {};
|
|
384
677
|
const partialToolCalls = {};
|
|
385
|
-
function updateDynamicToolPart(options) {
|
|
386
|
-
const part = message.parts.find((part$1) => part$1.type === "dynamic-tool" && part$1.toolCallId === options.toolCallId);
|
|
387
|
-
const anyOptions = options;
|
|
678
|
+
function updateDynamicToolPart(options$1) {
|
|
679
|
+
const part = message.parts.find((part$1) => part$1.type === "dynamic-tool" && part$1.toolCallId === options$1.toolCallId);
|
|
680
|
+
const anyOptions = options$1;
|
|
388
681
|
const anyPart = part;
|
|
389
682
|
if (part != null) {
|
|
390
|
-
part.state = options.state;
|
|
391
|
-
anyPart.toolName = options.toolName;
|
|
683
|
+
part.state = options$1.state;
|
|
684
|
+
anyPart.toolName = options$1.toolName;
|
|
392
685
|
anyPart.input = anyOptions.input;
|
|
393
686
|
anyPart.output = anyOptions.output;
|
|
394
687
|
anyPart.errorText = anyOptions.errorText;
|
|
@@ -397,9 +690,9 @@ var AIChatAgent = class extends Agent {
|
|
|
397
690
|
if (anyOptions.providerMetadata != null && part.state === "input-available") part.callProviderMetadata = anyOptions.providerMetadata;
|
|
398
691
|
} else message.parts.push({
|
|
399
692
|
type: "dynamic-tool",
|
|
400
|
-
toolName: options.toolName,
|
|
401
|
-
toolCallId: options.toolCallId,
|
|
402
|
-
state: options.state,
|
|
693
|
+
toolName: options$1.toolName,
|
|
694
|
+
toolCallId: options$1.toolCallId,
|
|
695
|
+
state: options$1.state,
|
|
403
696
|
input: anyOptions.input,
|
|
404
697
|
output: anyOptions.output,
|
|
405
698
|
errorText: anyOptions.errorText,
|
|
@@ -407,12 +700,12 @@ var AIChatAgent = class extends Agent {
|
|
|
407
700
|
...anyOptions.providerMetadata != null ? { callProviderMetadata: anyOptions.providerMetadata } : {}
|
|
408
701
|
});
|
|
409
702
|
}
|
|
410
|
-
function updateToolPart(options) {
|
|
411
|
-
const part = message.parts.find((part$1) => isToolUIPart(part$1) && part$1.toolCallId === options.toolCallId);
|
|
412
|
-
const anyOptions = options;
|
|
703
|
+
function updateToolPart(options$1) {
|
|
704
|
+
const part = message.parts.find((part$1) => isToolUIPart$1(part$1) && part$1.toolCallId === options$1.toolCallId);
|
|
705
|
+
const anyOptions = options$1;
|
|
413
706
|
const anyPart = part;
|
|
414
707
|
if (part != null) {
|
|
415
|
-
part.state = options.state;
|
|
708
|
+
part.state = options$1.state;
|
|
416
709
|
anyPart.input = anyOptions.input;
|
|
417
710
|
anyPart.output = anyOptions.output;
|
|
418
711
|
anyPart.errorText = anyOptions.errorText;
|
|
@@ -421,9 +714,9 @@ var AIChatAgent = class extends Agent {
|
|
|
421
714
|
anyPart.providerExecuted = anyOptions.providerExecuted ?? part.providerExecuted;
|
|
422
715
|
if (anyOptions.providerMetadata != null && part.state === "input-available") part.callProviderMetadata = anyOptions.providerMetadata;
|
|
423
716
|
} else message.parts.push({
|
|
424
|
-
type: `tool-${options.toolName}`,
|
|
425
|
-
toolCallId: options.toolCallId,
|
|
426
|
-
state: options.state,
|
|
717
|
+
type: `tool-${options$1.toolName}`,
|
|
718
|
+
toolCallId: options$1.toolCallId,
|
|
719
|
+
state: options$1.state,
|
|
427
720
|
input: anyOptions.input,
|
|
428
721
|
output: anyOptions.output,
|
|
429
722
|
rawInput: anyOptions.rawInput,
|
|
@@ -450,7 +743,8 @@ var AIChatAgent = class extends Agent {
|
|
|
450
743
|
body: "",
|
|
451
744
|
done: true,
|
|
452
745
|
id,
|
|
453
|
-
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
746
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
|
|
747
|
+
...continuation && { continuation: true }
|
|
454
748
|
});
|
|
455
749
|
break;
|
|
456
750
|
}
|
|
@@ -535,7 +829,7 @@ var AIChatAgent = class extends Agent {
|
|
|
535
829
|
});
|
|
536
830
|
break;
|
|
537
831
|
case "tool-input-start": {
|
|
538
|
-
const toolInvocations = message.parts.filter(isToolUIPart);
|
|
832
|
+
const toolInvocations = message.parts.filter(isToolUIPart$1);
|
|
539
833
|
partialToolCalls[data.toolCallId] = {
|
|
540
834
|
text: "",
|
|
541
835
|
toolName: data.toolName,
|
|
@@ -624,11 +918,11 @@ var AIChatAgent = class extends Agent {
|
|
|
624
918
|
preliminary: data.preliminary
|
|
625
919
|
});
|
|
626
920
|
} else {
|
|
627
|
-
const toolInvocation = message.parts.filter(isToolUIPart).find((invocation) => invocation.toolCallId === data.toolCallId);
|
|
921
|
+
const toolInvocation = message.parts.filter(isToolUIPart$1).find((invocation) => invocation.toolCallId === data.toolCallId);
|
|
628
922
|
if (!toolInvocation) throw new Error("Tool invocation not found");
|
|
629
923
|
updateToolPart({
|
|
630
924
|
toolCallId: data.toolCallId,
|
|
631
|
-
toolName: getToolName(toolInvocation),
|
|
925
|
+
toolName: getToolName$1(toolInvocation),
|
|
632
926
|
state: "output-available",
|
|
633
927
|
input: toolInvocation.input,
|
|
634
928
|
output: data.output,
|
|
@@ -649,11 +943,11 @@ var AIChatAgent = class extends Agent {
|
|
|
649
943
|
errorText: data.errorText
|
|
650
944
|
});
|
|
651
945
|
} else {
|
|
652
|
-
const toolInvocation = message.parts.filter(isToolUIPart).find((invocation) => invocation.toolCallId === data.toolCallId);
|
|
946
|
+
const toolInvocation = message.parts.filter(isToolUIPart$1).find((invocation) => invocation.toolCallId === data.toolCallId);
|
|
653
947
|
if (!toolInvocation) throw new Error("Tool invocation not found");
|
|
654
948
|
updateToolPart({
|
|
655
949
|
toolCallId: data.toolCallId,
|
|
656
|
-
toolName: getToolName(toolInvocation),
|
|
950
|
+
toolName: getToolName$1(toolInvocation),
|
|
657
951
|
state: "output-error",
|
|
658
952
|
input: toolInvocation.input,
|
|
659
953
|
rawInput: "rawInput" in toolInvocation ? toolInvocation.rawInput : void 0,
|
|
@@ -703,7 +997,8 @@ var AIChatAgent = class extends Agent {
|
|
|
703
997
|
body: chunkBody,
|
|
704
998
|
done: false,
|
|
705
999
|
id,
|
|
706
|
-
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
1000
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
|
|
1001
|
+
...continuation && { continuation: true }
|
|
707
1002
|
});
|
|
708
1003
|
} catch (_error) {}
|
|
709
1004
|
} else if (chunk.length > 0) {
|
|
@@ -720,7 +1015,8 @@ var AIChatAgent = class extends Agent {
|
|
|
720
1015
|
body: chunkBody,
|
|
721
1016
|
done: false,
|
|
722
1017
|
id,
|
|
723
|
-
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
1018
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
|
|
1019
|
+
...continuation && { continuation: true }
|
|
724
1020
|
});
|
|
725
1021
|
}
|
|
726
1022
|
}
|
|
@@ -732,19 +1028,43 @@ var AIChatAgent = class extends Agent {
|
|
|
732
1028
|
done: true,
|
|
733
1029
|
error: true,
|
|
734
1030
|
id,
|
|
735
|
-
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
1031
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE,
|
|
1032
|
+
...continuation && { continuation: true }
|
|
736
1033
|
});
|
|
737
1034
|
}
|
|
738
1035
|
throw error;
|
|
739
1036
|
} finally {
|
|
740
1037
|
reader.releaseLock();
|
|
741
1038
|
}
|
|
742
|
-
if (message.parts.length > 0)
|
|
1039
|
+
if (message.parts.length > 0) if (continuation) {
|
|
1040
|
+
let lastAssistantIdx = -1;
|
|
1041
|
+
for (let i = this.messages.length - 1; i >= 0; i--) if (this.messages[i].role === "assistant") {
|
|
1042
|
+
lastAssistantIdx = i;
|
|
1043
|
+
break;
|
|
1044
|
+
}
|
|
1045
|
+
if (lastAssistantIdx >= 0) {
|
|
1046
|
+
const lastAssistant = this.messages[lastAssistantIdx];
|
|
1047
|
+
const mergedMessage = {
|
|
1048
|
+
...lastAssistant,
|
|
1049
|
+
parts: [...lastAssistant.parts, ...message.parts]
|
|
1050
|
+
};
|
|
1051
|
+
const updatedMessages = [...this.messages];
|
|
1052
|
+
updatedMessages[lastAssistantIdx] = mergedMessage;
|
|
1053
|
+
await this.persistMessages(updatedMessages, excludeBroadcastIds);
|
|
1054
|
+
} else await this.persistMessages([...this.messages, message], excludeBroadcastIds);
|
|
1055
|
+
} else await this.persistMessages([...this.messages, message], excludeBroadcastIds);
|
|
1056
|
+
this._streamingMessage = null;
|
|
1057
|
+
if (this._streamCompletionResolve) {
|
|
1058
|
+
this._streamCompletionResolve();
|
|
1059
|
+
this._streamCompletionResolve = null;
|
|
1060
|
+
this._streamCompletionPromise = null;
|
|
1061
|
+
}
|
|
743
1062
|
});
|
|
744
1063
|
}
|
|
745
1064
|
/**
|
|
746
1065
|
* Mark a stream as errored and clean up state.
|
|
747
1066
|
* @param streamId - The stream to mark as errored
|
|
1067
|
+
* @internal Protected for testing purposes.
|
|
748
1068
|
*/
|
|
749
1069
|
_markStreamError(streamId) {
|
|
750
1070
|
this._flushChunkBuffer();
|
|
@@ -802,5 +1122,5 @@ var AIChatAgent = class extends Agent {
|
|
|
802
1122
|
};
|
|
803
1123
|
|
|
804
1124
|
//#endregion
|
|
805
|
-
export { AIChatAgent };
|
|
1125
|
+
export { AIChatAgent, createToolsFromClientSchemas };
|
|
806
1126
|
//# sourceMappingURL=ai-chat-agent.js.map
|