agents 0.0.0-ff431ff → 0.0.0-ff45307
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -3
- package/dist/ai-chat-agent.d.ts +94 -13
- package/dist/ai-chat-agent.js +346 -74
- package/dist/ai-chat-agent.js.map +1 -1
- package/dist/{ai-chat-v5-migration-DBHGW4Hv.js → ai-chat-v5-migration-BSiGZmYU.js} +1 -1
- package/dist/{ai-chat-v5-migration-DBHGW4Hv.js.map → ai-chat-v5-migration-BSiGZmYU.js.map} +1 -1
- package/dist/ai-chat-v5-migration.js +1 -1
- package/dist/ai-react.d.ts +14 -9
- package/dist/ai-react.js +179 -29
- package/dist/ai-react.js.map +1 -1
- package/dist/{ai-types-D5YoPrBZ.d.ts → ai-types-81H_-Uxh.d.ts} +15 -7
- package/dist/{ai-types-B3aQaFv3.js → ai-types-CrMqkwc_.js} +5 -1
- package/dist/ai-types-CrMqkwc_.js.map +1 -0
- package/dist/ai-types.d.ts +1 -1
- package/dist/ai-types.js +1 -1
- package/dist/cli/index.d.ts +1 -0
- package/dist/{cli.js → cli/index.js} +7 -6
- package/dist/cli/index.js.map +1 -0
- package/dist/{client-BfiZ3HQd.js → client-B3SR12TQ.js} +2 -2
- package/dist/{client-BfiZ3HQd.js.map → client-B3SR12TQ.js.map} +1 -1
- package/dist/{client-CbWe9FBd.d.ts → client-BAQA84dr.d.ts} +2 -2
- package/dist/client-CFhjXCiO.js +1093 -0
- package/dist/client-CFhjXCiO.js.map +1 -0
- package/dist/client-CwqTTb-B.d.ts +833 -0
- package/dist/client.d.ts +2 -2
- package/dist/client.js +2 -2
- package/dist/codemode/ai.js +5 -5
- package/dist/{do-oauth-client-provider-DGc5pP0l.d.ts → do-oauth-client-provider-C2CHH5x-.d.ts} +1 -1
- package/dist/{do-oauth-client-provider-CswoD5Lu.js → do-oauth-client-provider-CwqK5SXm.js} +2 -1
- package/dist/do-oauth-client-provider-CwqK5SXm.js.map +1 -0
- package/dist/{index-DhJCaDWd.d.ts → index-BUle9RiP.d.ts} +2 -2
- package/dist/{index-DxXJbYBM.d.ts → index-DJ4vV2-x.d.ts} +53 -26
- package/dist/index.d.ts +6 -6
- package/dist/index.js +5 -5
- package/dist/mcp/client.d.ts +4 -4
- package/dist/mcp/client.js +2 -1
- package/dist/mcp/do-oauth-client-provider.d.ts +1 -1
- package/dist/mcp/do-oauth-client-provider.js +1 -1
- package/dist/mcp/index.d.ts +73 -34
- package/dist/mcp/index.js +95 -18
- package/dist/mcp/index.js.map +1 -1
- package/dist/mcp/x402.js +10 -6
- package/dist/mcp/x402.js.map +1 -1
- package/dist/{mcp-Dw5vDrY8.d.ts → mcp-BwPscEiF.d.ts} +1 -1
- package/dist/observability/index.d.ts +2 -2
- package/dist/observability/index.js +5 -5
- package/dist/{react-BIh615qN.d.ts → react-w4GT20p4.d.ts} +33 -35
- package/dist/react.d.ts +9 -9
- package/dist/react.js +2 -2
- package/dist/react.js.map +1 -1
- package/dist/{serializable-CymX8ovI.d.ts → serializable-faDkMCai.d.ts} +1 -1
- package/dist/serializable.d.ts +1 -1
- package/dist/{src-nFNV3Ttx.js → src-tXpYCgas.js} +99 -131
- package/dist/src-tXpYCgas.js.map +1 -0
- package/package.json +48 -40
- package/dist/ai-types-B3aQaFv3.js.map +0 -1
- package/dist/cli.d.ts +0 -8
- package/dist/cli.js.map +0 -1
- package/dist/client-DbS2uI6p.d.ts +0 -5315
- package/dist/client-JMskg2fw.js +0 -793
- package/dist/client-JMskg2fw.js.map +0 -1
- package/dist/do-oauth-client-provider-CswoD5Lu.js.map +0 -1
- package/dist/src-nFNV3Ttx.js.map +0 -1
package/README.md
CHANGED
|
@@ -549,10 +549,12 @@ export class MyMCP extends McpAgent<Env, State, {}> {
|
|
|
549
549
|
};
|
|
550
550
|
});
|
|
551
551
|
|
|
552
|
-
this.server.
|
|
552
|
+
this.server.registerTool(
|
|
553
553
|
"add",
|
|
554
|
-
|
|
555
|
-
|
|
554
|
+
{
|
|
555
|
+
description: "Add to the counter, stored in the MCP",
|
|
556
|
+
inputSchema: { a: z.number() }
|
|
557
|
+
},
|
|
556
558
|
async ({ a }) => {
|
|
557
559
|
this.setState({ ...this.state, counter: this.state.counter + a });
|
|
558
560
|
|
package/dist/ai-chat-agent.d.ts
CHANGED
|
@@ -1,14 +1,9 @@
|
|
|
1
|
-
import "./client-
|
|
2
|
-
import "./mcp-
|
|
3
|
-
import "./do-oauth-client-provider-
|
|
4
|
-
import "./index-
|
|
5
|
-
import "./ai-types-
|
|
6
|
-
import {
|
|
7
|
-
n as AgentContext,
|
|
8
|
-
s as Connection,
|
|
9
|
-
t as Agent,
|
|
10
|
-
x as WSMessage
|
|
11
|
-
} from "./index-DxXJbYBM.js";
|
|
1
|
+
import "./client-CwqTTb-B.js";
|
|
2
|
+
import "./mcp-BwPscEiF.js";
|
|
3
|
+
import "./do-oauth-client-provider-C2CHH5x-.js";
|
|
4
|
+
import "./index-BUle9RiP.js";
|
|
5
|
+
import "./ai-types-81H_-Uxh.js";
|
|
6
|
+
import { n as AgentContext, t as Agent } from "./index-DJ4vV2-x.js";
|
|
12
7
|
import { StreamTextOnFinishCallback, ToolSet, UIMessage } from "ai";
|
|
13
8
|
|
|
14
9
|
//#region src/ai-chat-agent.d.ts
|
|
@@ -25,12 +20,92 @@ declare class AIChatAgent<Env = unknown, State = unknown> extends Agent<
|
|
|
25
20
|
* useful to propagate request cancellation signals for any external calls made by the agent
|
|
26
21
|
*/
|
|
27
22
|
private _chatMessageAbortControllers;
|
|
23
|
+
/**
|
|
24
|
+
* Currently active stream ID for resumable streaming.
|
|
25
|
+
* Stored in memory for quick access; persisted in stream_metadata table.
|
|
26
|
+
* @internal Protected for testing purposes.
|
|
27
|
+
*/
|
|
28
|
+
protected _activeStreamId: string | null;
|
|
29
|
+
/**
|
|
30
|
+
* Request ID associated with the active stream.
|
|
31
|
+
* @internal Protected for testing purposes.
|
|
32
|
+
*/
|
|
33
|
+
protected _activeRequestId: string | null;
|
|
34
|
+
/**
|
|
35
|
+
* Current chunk index for the active stream
|
|
36
|
+
*/
|
|
37
|
+
private _streamChunkIndex;
|
|
38
|
+
/**
|
|
39
|
+
* Buffer for stream chunks pending write to SQLite.
|
|
40
|
+
* Chunks are batched and flushed when buffer reaches CHUNK_BUFFER_SIZE.
|
|
41
|
+
*/
|
|
42
|
+
private _chunkBuffer;
|
|
43
|
+
/**
|
|
44
|
+
* Lock to prevent concurrent flush operations
|
|
45
|
+
*/
|
|
46
|
+
private _isFlushingChunks;
|
|
47
|
+
/**
|
|
48
|
+
* Timestamp of the last cleanup operation for old streams
|
|
49
|
+
*/
|
|
50
|
+
private _lastCleanupTime;
|
|
28
51
|
/** Array of chat messages for the current conversation */
|
|
29
52
|
messages: UIMessage[];
|
|
30
53
|
constructor(ctx: AgentContext, env: Env);
|
|
54
|
+
/**
|
|
55
|
+
* Restore active stream state if the agent was restarted during streaming.
|
|
56
|
+
* Called during construction to recover any interrupted streams.
|
|
57
|
+
* Validates stream freshness to avoid sending stale resume notifications.
|
|
58
|
+
* @internal Protected for testing purposes.
|
|
59
|
+
*/
|
|
60
|
+
protected _restoreActiveStream(): void;
|
|
61
|
+
/**
|
|
62
|
+
* Notify a connection about an active stream that can be resumed.
|
|
63
|
+
* The client should respond with CF_AGENT_STREAM_RESUME_ACK to receive chunks.
|
|
64
|
+
* Uses in-memory state for request ID - no extra DB lookup needed.
|
|
65
|
+
* @param connection - The WebSocket connection to notify
|
|
66
|
+
*/
|
|
67
|
+
private _notifyStreamResuming;
|
|
68
|
+
/**
|
|
69
|
+
* Send stream chunks to a connection after receiving ACK.
|
|
70
|
+
* @param connection - The WebSocket connection
|
|
71
|
+
* @param streamId - The stream to replay
|
|
72
|
+
* @param requestId - The original request ID
|
|
73
|
+
*/
|
|
74
|
+
private _sendStreamChunks;
|
|
75
|
+
/**
|
|
76
|
+
* Buffer a stream chunk for batch write to SQLite.
|
|
77
|
+
* @param streamId - The stream this chunk belongs to
|
|
78
|
+
* @param body - The serialized chunk body
|
|
79
|
+
* @internal Protected for testing purposes.
|
|
80
|
+
*/
|
|
81
|
+
protected _storeStreamChunk(streamId: string, body: string): void;
|
|
82
|
+
/**
|
|
83
|
+
* Flush buffered chunks to SQLite in a single batch.
|
|
84
|
+
* Uses a lock to prevent concurrent flush operations.
|
|
85
|
+
* @internal Protected for testing purposes.
|
|
86
|
+
*/
|
|
87
|
+
protected _flushChunkBuffer(): void;
|
|
88
|
+
/**
|
|
89
|
+
* Start tracking a new stream for resumable streaming.
|
|
90
|
+
* Creates metadata entry in SQLite and sets up tracking state.
|
|
91
|
+
* @param requestId - The unique ID of the chat request
|
|
92
|
+
* @returns The generated stream ID
|
|
93
|
+
* @internal Protected for testing purposes.
|
|
94
|
+
*/
|
|
95
|
+
protected _startStream(requestId: string): string;
|
|
96
|
+
/**
|
|
97
|
+
* Mark a stream as completed and flush any pending chunks.
|
|
98
|
+
* @param streamId - The stream to mark as completed
|
|
99
|
+
* @internal Protected for testing purposes.
|
|
100
|
+
*/
|
|
101
|
+
protected _completeStream(streamId: string): void;
|
|
102
|
+
/**
|
|
103
|
+
* Clean up old completed streams if enough time has passed since last cleanup.
|
|
104
|
+
* This prevents database growth while avoiding cleanup overhead on every stream completion.
|
|
105
|
+
*/
|
|
106
|
+
private _maybeCleanupOldStreams;
|
|
31
107
|
private _broadcastChatMessage;
|
|
32
108
|
private _loadMessagesFromDb;
|
|
33
|
-
onMessage(connection: Connection, message: WSMessage): Promise<void>;
|
|
34
109
|
onRequest(request: Request): Promise<Response>;
|
|
35
110
|
private _tryCatchChat;
|
|
36
111
|
/**
|
|
@@ -55,6 +130,12 @@ declare class AIChatAgent<Env = unknown, State = unknown> extends Agent<
|
|
|
55
130
|
excludeBroadcastIds?: string[]
|
|
56
131
|
): Promise<void>;
|
|
57
132
|
private _reply;
|
|
133
|
+
/**
|
|
134
|
+
* Mark a stream as errored and clean up state.
|
|
135
|
+
* @param streamId - The stream to mark as errored
|
|
136
|
+
* @internal Protected for testing purposes.
|
|
137
|
+
*/
|
|
138
|
+
protected _markStreamError(streamId: string): void;
|
|
58
139
|
/**
|
|
59
140
|
* For the given message id, look up its associated AbortController
|
|
60
141
|
* If the AbortController does not exist, create and store one in memory
|
|
@@ -75,7 +156,7 @@ declare class AIChatAgent<Env = unknown, State = unknown> extends Agent<
|
|
|
75
156
|
*/
|
|
76
157
|
private _destroyAbortControllers;
|
|
77
158
|
/**
|
|
78
|
-
* When the DO is destroyed, cancel all pending requests
|
|
159
|
+
* When the DO is destroyed, cancel all pending requests and clean up resources
|
|
79
160
|
*/
|
|
80
161
|
destroy(): Promise<void>;
|
|
81
162
|
}
|
package/dist/ai-chat-agent.js
CHANGED
|
@@ -1,11 +1,22 @@
|
|
|
1
|
-
import { t as MessageType } from "./ai-types-
|
|
2
|
-
import "./client-
|
|
3
|
-
import "./client-
|
|
4
|
-
import "./do-oauth-client-provider-
|
|
5
|
-
import { t as Agent } from "./src-
|
|
6
|
-
import { r as autoTransformMessages } from "./ai-chat-v5-migration-
|
|
1
|
+
import { t as MessageType } from "./ai-types-CrMqkwc_.js";
|
|
2
|
+
import "./client-B3SR12TQ.js";
|
|
3
|
+
import "./client-CFhjXCiO.js";
|
|
4
|
+
import "./do-oauth-client-provider-CwqK5SXm.js";
|
|
5
|
+
import { t as Agent } from "./src-tXpYCgas.js";
|
|
6
|
+
import { r as autoTransformMessages } from "./ai-chat-v5-migration-BSiGZmYU.js";
|
|
7
|
+
import { nanoid } from "nanoid";
|
|
7
8
|
|
|
8
9
|
//#region src/ai-chat-agent.ts
|
|
10
|
+
/** Number of chunks to buffer before flushing to SQLite */
|
|
11
|
+
const CHUNK_BUFFER_SIZE = 10;
|
|
12
|
+
/** Maximum buffer size to prevent memory issues on rapid reconnections */
|
|
13
|
+
const CHUNK_BUFFER_MAX_SIZE = 100;
|
|
14
|
+
/** Maximum age for a "streaming" stream before considering it stale (ms) - 5 minutes */
|
|
15
|
+
const STREAM_STALE_THRESHOLD_MS = 300 * 1e3;
|
|
16
|
+
/** Default cleanup interval for old streams (ms) - every 10 minutes */
|
|
17
|
+
const CLEANUP_INTERVAL_MS = 600 * 1e3;
|
|
18
|
+
/** Default age threshold for cleaning up completed streams (ms) - 24 hours */
|
|
19
|
+
const CLEANUP_AGE_THRESHOLD_MS = 1440 * 60 * 1e3;
|
|
9
20
|
const decoder = new TextDecoder();
|
|
10
21
|
/**
|
|
11
22
|
* Extension of Agent with built-in chat capabilities
|
|
@@ -14,13 +25,284 @@ const decoder = new TextDecoder();
|
|
|
14
25
|
var AIChatAgent = class extends Agent {
|
|
15
26
|
constructor(ctx, env) {
|
|
16
27
|
super(ctx, env);
|
|
28
|
+
this._activeStreamId = null;
|
|
29
|
+
this._activeRequestId = null;
|
|
30
|
+
this._streamChunkIndex = 0;
|
|
31
|
+
this._chunkBuffer = [];
|
|
32
|
+
this._isFlushingChunks = false;
|
|
33
|
+
this._lastCleanupTime = 0;
|
|
17
34
|
this.sql`create table if not exists cf_ai_chat_agent_messages (
|
|
18
35
|
id text primary key,
|
|
19
36
|
message text not null,
|
|
20
37
|
created_at datetime default current_timestamp
|
|
21
38
|
)`;
|
|
39
|
+
this.sql`create table if not exists cf_ai_chat_stream_chunks (
|
|
40
|
+
id text primary key,
|
|
41
|
+
stream_id text not null,
|
|
42
|
+
body text not null,
|
|
43
|
+
chunk_index integer not null,
|
|
44
|
+
created_at integer not null
|
|
45
|
+
)`;
|
|
46
|
+
this.sql`create table if not exists cf_ai_chat_stream_metadata (
|
|
47
|
+
id text primary key,
|
|
48
|
+
request_id text not null,
|
|
49
|
+
status text not null,
|
|
50
|
+
created_at integer not null,
|
|
51
|
+
completed_at integer
|
|
52
|
+
)`;
|
|
53
|
+
this.sql`create index if not exists idx_stream_chunks_stream_id
|
|
54
|
+
on cf_ai_chat_stream_chunks(stream_id, chunk_index)`;
|
|
22
55
|
this.messages = autoTransformMessages(this._loadMessagesFromDb());
|
|
23
56
|
this._chatMessageAbortControllers = /* @__PURE__ */ new Map();
|
|
57
|
+
this._restoreActiveStream();
|
|
58
|
+
const _onConnect = this.onConnect.bind(this);
|
|
59
|
+
this.onConnect = async (connection, ctx$1) => {
|
|
60
|
+
if (this._activeStreamId) this._notifyStreamResuming(connection);
|
|
61
|
+
return _onConnect(connection, ctx$1);
|
|
62
|
+
};
|
|
63
|
+
const _onMessage = this.onMessage.bind(this);
|
|
64
|
+
this.onMessage = async (connection, message) => {
|
|
65
|
+
if (typeof message === "string") {
|
|
66
|
+
let data;
|
|
67
|
+
try {
|
|
68
|
+
data = JSON.parse(message);
|
|
69
|
+
} catch (_error) {
|
|
70
|
+
return _onMessage(connection, message);
|
|
71
|
+
}
|
|
72
|
+
if (data.type === MessageType.CF_AGENT_USE_CHAT_REQUEST && data.init.method === "POST") {
|
|
73
|
+
const { body } = data.init;
|
|
74
|
+
const { messages } = JSON.parse(body);
|
|
75
|
+
const transformedMessages = autoTransformMessages(messages);
|
|
76
|
+
this._broadcastChatMessage({
|
|
77
|
+
messages: transformedMessages,
|
|
78
|
+
type: MessageType.CF_AGENT_CHAT_MESSAGES
|
|
79
|
+
}, [connection.id]);
|
|
80
|
+
await this.persistMessages(transformedMessages, [connection.id]);
|
|
81
|
+
this.observability?.emit({
|
|
82
|
+
displayMessage: "Chat message request",
|
|
83
|
+
id: data.id,
|
|
84
|
+
payload: {},
|
|
85
|
+
timestamp: Date.now(),
|
|
86
|
+
type: "message:request"
|
|
87
|
+
}, this.ctx);
|
|
88
|
+
const chatMessageId = data.id;
|
|
89
|
+
const abortSignal = this._getAbortSignal(chatMessageId);
|
|
90
|
+
return this._tryCatchChat(async () => {
|
|
91
|
+
const response = await this.onChatMessage(async (_finishResult) => {
|
|
92
|
+
this._removeAbortController(chatMessageId);
|
|
93
|
+
this.observability?.emit({
|
|
94
|
+
displayMessage: "Chat message response",
|
|
95
|
+
id: data.id,
|
|
96
|
+
payload: {},
|
|
97
|
+
timestamp: Date.now(),
|
|
98
|
+
type: "message:response"
|
|
99
|
+
}, this.ctx);
|
|
100
|
+
}, abortSignal ? { abortSignal } : void 0);
|
|
101
|
+
if (response) await this._reply(data.id, response);
|
|
102
|
+
else {
|
|
103
|
+
console.warn(`[AIChatAgent] onChatMessage returned no response for chatMessageId: ${chatMessageId}`);
|
|
104
|
+
this._broadcastChatMessage({
|
|
105
|
+
body: "No response was generated by the agent.",
|
|
106
|
+
done: true,
|
|
107
|
+
id: data.id,
|
|
108
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
109
|
+
}, [connection.id]);
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
if (data.type === MessageType.CF_AGENT_CHAT_CLEAR) {
|
|
114
|
+
this._destroyAbortControllers();
|
|
115
|
+
this.sql`delete from cf_ai_chat_agent_messages`;
|
|
116
|
+
this.sql`delete from cf_ai_chat_stream_chunks`;
|
|
117
|
+
this.sql`delete from cf_ai_chat_stream_metadata`;
|
|
118
|
+
this._activeStreamId = null;
|
|
119
|
+
this._activeRequestId = null;
|
|
120
|
+
this._streamChunkIndex = 0;
|
|
121
|
+
this.messages = [];
|
|
122
|
+
this._broadcastChatMessage({ type: MessageType.CF_AGENT_CHAT_CLEAR }, [connection.id]);
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
125
|
+
if (data.type === MessageType.CF_AGENT_CHAT_MESSAGES) {
|
|
126
|
+
const transformedMessages = autoTransformMessages(data.messages);
|
|
127
|
+
await this.persistMessages(transformedMessages, [connection.id]);
|
|
128
|
+
return;
|
|
129
|
+
}
|
|
130
|
+
if (data.type === MessageType.CF_AGENT_CHAT_REQUEST_CANCEL) {
|
|
131
|
+
this._cancelChatRequest(data.id);
|
|
132
|
+
return;
|
|
133
|
+
}
|
|
134
|
+
if (data.type === MessageType.CF_AGENT_STREAM_RESUME_ACK) {
|
|
135
|
+
if (this._activeStreamId && this._activeRequestId && this._activeRequestId === data.id) this._sendStreamChunks(connection, this._activeStreamId, this._activeRequestId);
|
|
136
|
+
return;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
return _onMessage(connection, message);
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Restore active stream state if the agent was restarted during streaming.
|
|
144
|
+
* Called during construction to recover any interrupted streams.
|
|
145
|
+
* Validates stream freshness to avoid sending stale resume notifications.
|
|
146
|
+
* @internal Protected for testing purposes.
|
|
147
|
+
*/
|
|
148
|
+
_restoreActiveStream() {
|
|
149
|
+
const activeStreams = this.sql`
|
|
150
|
+
select * from cf_ai_chat_stream_metadata
|
|
151
|
+
where status = 'streaming'
|
|
152
|
+
order by created_at desc
|
|
153
|
+
limit 1
|
|
154
|
+
`;
|
|
155
|
+
if (activeStreams && activeStreams.length > 0) {
|
|
156
|
+
const stream = activeStreams[0];
|
|
157
|
+
const streamAge = Date.now() - stream.created_at;
|
|
158
|
+
if (streamAge > STREAM_STALE_THRESHOLD_MS) {
|
|
159
|
+
this.sql`delete from cf_ai_chat_stream_chunks where stream_id = ${stream.id}`;
|
|
160
|
+
this.sql`delete from cf_ai_chat_stream_metadata where id = ${stream.id}`;
|
|
161
|
+
console.warn(`[AIChatAgent] Deleted stale stream ${stream.id} (age: ${Math.round(streamAge / 1e3)}s)`);
|
|
162
|
+
return;
|
|
163
|
+
}
|
|
164
|
+
this._activeStreamId = stream.id;
|
|
165
|
+
this._activeRequestId = stream.request_id;
|
|
166
|
+
const lastChunk = this.sql`
|
|
167
|
+
select max(chunk_index) as max_index
|
|
168
|
+
from cf_ai_chat_stream_chunks
|
|
169
|
+
where stream_id = ${this._activeStreamId}
|
|
170
|
+
`;
|
|
171
|
+
this._streamChunkIndex = lastChunk && lastChunk[0]?.max_index != null ? lastChunk[0].max_index + 1 : 0;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
/**
|
|
175
|
+
* Notify a connection about an active stream that can be resumed.
|
|
176
|
+
* The client should respond with CF_AGENT_STREAM_RESUME_ACK to receive chunks.
|
|
177
|
+
* Uses in-memory state for request ID - no extra DB lookup needed.
|
|
178
|
+
* @param connection - The WebSocket connection to notify
|
|
179
|
+
*/
|
|
180
|
+
_notifyStreamResuming(connection) {
|
|
181
|
+
if (!this._activeStreamId || !this._activeRequestId) return;
|
|
182
|
+
connection.send(JSON.stringify({
|
|
183
|
+
type: MessageType.CF_AGENT_STREAM_RESUMING,
|
|
184
|
+
id: this._activeRequestId
|
|
185
|
+
}));
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* Send stream chunks to a connection after receiving ACK.
|
|
189
|
+
* @param connection - The WebSocket connection
|
|
190
|
+
* @param streamId - The stream to replay
|
|
191
|
+
* @param requestId - The original request ID
|
|
192
|
+
*/
|
|
193
|
+
_sendStreamChunks(connection, streamId, requestId) {
|
|
194
|
+
this._flushChunkBuffer();
|
|
195
|
+
const chunks = this.sql`
|
|
196
|
+
select * from cf_ai_chat_stream_chunks
|
|
197
|
+
where stream_id = ${streamId}
|
|
198
|
+
order by chunk_index asc
|
|
199
|
+
`;
|
|
200
|
+
for (const chunk of chunks || []) connection.send(JSON.stringify({
|
|
201
|
+
body: chunk.body,
|
|
202
|
+
done: false,
|
|
203
|
+
id: requestId,
|
|
204
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
205
|
+
}));
|
|
206
|
+
if (this._activeStreamId !== streamId) connection.send(JSON.stringify({
|
|
207
|
+
body: "",
|
|
208
|
+
done: true,
|
|
209
|
+
id: requestId,
|
|
210
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
211
|
+
}));
|
|
212
|
+
}
|
|
213
|
+
/**
|
|
214
|
+
* Buffer a stream chunk for batch write to SQLite.
|
|
215
|
+
* @param streamId - The stream this chunk belongs to
|
|
216
|
+
* @param body - The serialized chunk body
|
|
217
|
+
* @internal Protected for testing purposes.
|
|
218
|
+
*/
|
|
219
|
+
_storeStreamChunk(streamId, body) {
|
|
220
|
+
if (this._chunkBuffer.length >= CHUNK_BUFFER_MAX_SIZE) this._flushChunkBuffer();
|
|
221
|
+
this._chunkBuffer.push({
|
|
222
|
+
id: nanoid(),
|
|
223
|
+
streamId,
|
|
224
|
+
body,
|
|
225
|
+
index: this._streamChunkIndex
|
|
226
|
+
});
|
|
227
|
+
this._streamChunkIndex++;
|
|
228
|
+
if (this._chunkBuffer.length >= CHUNK_BUFFER_SIZE) this._flushChunkBuffer();
|
|
229
|
+
}
|
|
230
|
+
/**
|
|
231
|
+
* Flush buffered chunks to SQLite in a single batch.
|
|
232
|
+
* Uses a lock to prevent concurrent flush operations.
|
|
233
|
+
* @internal Protected for testing purposes.
|
|
234
|
+
*/
|
|
235
|
+
_flushChunkBuffer() {
|
|
236
|
+
if (this._isFlushingChunks || this._chunkBuffer.length === 0) return;
|
|
237
|
+
this._isFlushingChunks = true;
|
|
238
|
+
try {
|
|
239
|
+
const chunks = this._chunkBuffer;
|
|
240
|
+
this._chunkBuffer = [];
|
|
241
|
+
const now = Date.now();
|
|
242
|
+
for (const chunk of chunks) this.sql`
|
|
243
|
+
insert into cf_ai_chat_stream_chunks (id, stream_id, body, chunk_index, created_at)
|
|
244
|
+
values (${chunk.id}, ${chunk.streamId}, ${chunk.body}, ${chunk.index}, ${now})
|
|
245
|
+
`;
|
|
246
|
+
} finally {
|
|
247
|
+
this._isFlushingChunks = false;
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
/**
|
|
251
|
+
* Start tracking a new stream for resumable streaming.
|
|
252
|
+
* Creates metadata entry in SQLite and sets up tracking state.
|
|
253
|
+
* @param requestId - The unique ID of the chat request
|
|
254
|
+
* @returns The generated stream ID
|
|
255
|
+
* @internal Protected for testing purposes.
|
|
256
|
+
*/
|
|
257
|
+
_startStream(requestId) {
|
|
258
|
+
this._flushChunkBuffer();
|
|
259
|
+
const streamId = nanoid();
|
|
260
|
+
this._activeStreamId = streamId;
|
|
261
|
+
this._activeRequestId = requestId;
|
|
262
|
+
this._streamChunkIndex = 0;
|
|
263
|
+
this.sql`
|
|
264
|
+
insert into cf_ai_chat_stream_metadata (id, request_id, status, created_at)
|
|
265
|
+
values (${streamId}, ${requestId}, 'streaming', ${Date.now()})
|
|
266
|
+
`;
|
|
267
|
+
return streamId;
|
|
268
|
+
}
|
|
269
|
+
/**
|
|
270
|
+
* Mark a stream as completed and flush any pending chunks.
|
|
271
|
+
* @param streamId - The stream to mark as completed
|
|
272
|
+
* @internal Protected for testing purposes.
|
|
273
|
+
*/
|
|
274
|
+
_completeStream(streamId) {
|
|
275
|
+
this._flushChunkBuffer();
|
|
276
|
+
this.sql`
|
|
277
|
+
update cf_ai_chat_stream_metadata
|
|
278
|
+
set status = 'completed', completed_at = ${Date.now()}
|
|
279
|
+
where id = ${streamId}
|
|
280
|
+
`;
|
|
281
|
+
this._activeStreamId = null;
|
|
282
|
+
this._activeRequestId = null;
|
|
283
|
+
this._streamChunkIndex = 0;
|
|
284
|
+
this._maybeCleanupOldStreams();
|
|
285
|
+
}
|
|
286
|
+
/**
|
|
287
|
+
* Clean up old completed streams if enough time has passed since last cleanup.
|
|
288
|
+
* This prevents database growth while avoiding cleanup overhead on every stream completion.
|
|
289
|
+
*/
|
|
290
|
+
_maybeCleanupOldStreams() {
|
|
291
|
+
const now = Date.now();
|
|
292
|
+
if (now - this._lastCleanupTime < CLEANUP_INTERVAL_MS) return;
|
|
293
|
+
this._lastCleanupTime = now;
|
|
294
|
+
const cutoff = now - CLEANUP_AGE_THRESHOLD_MS;
|
|
295
|
+
this.sql`
|
|
296
|
+
delete from cf_ai_chat_stream_chunks
|
|
297
|
+
where stream_id in (
|
|
298
|
+
select id from cf_ai_chat_stream_metadata
|
|
299
|
+
where status = 'completed' and completed_at < ${cutoff}
|
|
300
|
+
)
|
|
301
|
+
`;
|
|
302
|
+
this.sql`
|
|
303
|
+
delete from cf_ai_chat_stream_metadata
|
|
304
|
+
where status = 'completed' and completed_at < ${cutoff}
|
|
305
|
+
`;
|
|
24
306
|
}
|
|
25
307
|
_broadcastChatMessage(message, exclude) {
|
|
26
308
|
this.broadcast(JSON.stringify(message), exclude);
|
|
@@ -35,68 +317,8 @@ var AIChatAgent = class extends Agent {
|
|
|
35
317
|
}
|
|
36
318
|
}).filter((msg) => msg !== null);
|
|
37
319
|
}
|
|
38
|
-
async onMessage(connection, message) {
|
|
39
|
-
if (typeof message === "string") {
|
|
40
|
-
let data;
|
|
41
|
-
try {
|
|
42
|
-
data = JSON.parse(message);
|
|
43
|
-
} catch (_error) {
|
|
44
|
-
return;
|
|
45
|
-
}
|
|
46
|
-
if (data.type === MessageType.CF_AGENT_USE_CHAT_REQUEST && data.init.method === "POST") {
|
|
47
|
-
const { body } = data.init;
|
|
48
|
-
const { messages } = JSON.parse(body);
|
|
49
|
-
const transformedMessages = autoTransformMessages(messages);
|
|
50
|
-
this._broadcastChatMessage({
|
|
51
|
-
messages: transformedMessages,
|
|
52
|
-
type: MessageType.CF_AGENT_CHAT_MESSAGES
|
|
53
|
-
}, [connection.id]);
|
|
54
|
-
await this.persistMessages(transformedMessages, [connection.id]);
|
|
55
|
-
this.observability?.emit({
|
|
56
|
-
displayMessage: "Chat message request",
|
|
57
|
-
id: data.id,
|
|
58
|
-
payload: {},
|
|
59
|
-
timestamp: Date.now(),
|
|
60
|
-
type: "message:request"
|
|
61
|
-
}, this.ctx);
|
|
62
|
-
const chatMessageId = data.id;
|
|
63
|
-
const abortSignal = this._getAbortSignal(chatMessageId);
|
|
64
|
-
return this._tryCatchChat(async () => {
|
|
65
|
-
const response = await this.onChatMessage(async (_finishResult) => {
|
|
66
|
-
this._removeAbortController(chatMessageId);
|
|
67
|
-
this.observability?.emit({
|
|
68
|
-
displayMessage: "Chat message response",
|
|
69
|
-
id: data.id,
|
|
70
|
-
payload: {},
|
|
71
|
-
timestamp: Date.now(),
|
|
72
|
-
type: "message:response"
|
|
73
|
-
}, this.ctx);
|
|
74
|
-
}, abortSignal ? { abortSignal } : void 0);
|
|
75
|
-
if (response) await this._reply(data.id, response);
|
|
76
|
-
else {
|
|
77
|
-
console.warn(`[AIChatAgent] onChatMessage returned no response for chatMessageId: ${chatMessageId}`);
|
|
78
|
-
this._broadcastChatMessage({
|
|
79
|
-
body: "No response was generated by the agent.",
|
|
80
|
-
done: true,
|
|
81
|
-
id: data.id,
|
|
82
|
-
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
83
|
-
}, [connection.id]);
|
|
84
|
-
}
|
|
85
|
-
});
|
|
86
|
-
}
|
|
87
|
-
if (data.type === MessageType.CF_AGENT_CHAT_CLEAR) {
|
|
88
|
-
this._destroyAbortControllers();
|
|
89
|
-
this.sql`delete from cf_ai_chat_agent_messages`;
|
|
90
|
-
this.messages = [];
|
|
91
|
-
this._broadcastChatMessage({ type: MessageType.CF_AGENT_CHAT_CLEAR }, [connection.id]);
|
|
92
|
-
} else if (data.type === MessageType.CF_AGENT_CHAT_MESSAGES) {
|
|
93
|
-
const transformedMessages = autoTransformMessages(data.messages);
|
|
94
|
-
await this.persistMessages(transformedMessages, [connection.id]);
|
|
95
|
-
} else if (data.type === MessageType.CF_AGENT_CHAT_REQUEST_CANCEL) this._cancelChatRequest(data.id);
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
320
|
async onRequest(request) {
|
|
99
|
-
return this._tryCatchChat(() => {
|
|
321
|
+
return this._tryCatchChat(async () => {
|
|
100
322
|
if (new URL(request.url).pathname.endsWith("/get-messages")) {
|
|
101
323
|
const messages = this._loadMessagesFromDb();
|
|
102
324
|
return Response.json(messages);
|
|
@@ -154,6 +376,7 @@ var AIChatAgent = class extends Agent {
|
|
|
154
376
|
});
|
|
155
377
|
return;
|
|
156
378
|
}
|
|
379
|
+
const streamId = this._startStream(id);
|
|
157
380
|
const { getToolName, isToolUIPart, parsePartialJson } = await import("ai");
|
|
158
381
|
const reader = response.body.getReader();
|
|
159
382
|
const message = {
|
|
@@ -221,10 +444,13 @@ var AIChatAgent = class extends Agent {
|
|
|
221
444
|
...metadata
|
|
222
445
|
} : metadata;
|
|
223
446
|
}
|
|
447
|
+
let streamCompleted = false;
|
|
224
448
|
try {
|
|
225
449
|
while (true) {
|
|
226
450
|
const { done, value } = await reader.read();
|
|
227
451
|
if (done) {
|
|
452
|
+
this._completeStream(streamId);
|
|
453
|
+
streamCompleted = true;
|
|
228
454
|
this._broadcastChatMessage({
|
|
229
455
|
body: "",
|
|
230
456
|
done: true,
|
|
@@ -467,29 +693,54 @@ var AIChatAgent = class extends Agent {
|
|
|
467
693
|
});
|
|
468
694
|
break;
|
|
469
695
|
}
|
|
696
|
+
let eventToSend = data;
|
|
697
|
+
if (data.type === "finish" && "finishReason" in data) {
|
|
698
|
+
const { finishReason, ...rest } = data;
|
|
699
|
+
eventToSend = {
|
|
700
|
+
...rest,
|
|
701
|
+
type: "finish",
|
|
702
|
+
messageMetadata: { finishReason }
|
|
703
|
+
};
|
|
704
|
+
}
|
|
705
|
+
const chunkBody = JSON.stringify(eventToSend);
|
|
706
|
+
this._storeStreamChunk(streamId, chunkBody);
|
|
470
707
|
this._broadcastChatMessage({
|
|
471
|
-
body:
|
|
708
|
+
body: chunkBody,
|
|
472
709
|
done: false,
|
|
473
710
|
id,
|
|
474
711
|
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
475
712
|
});
|
|
476
|
-
} catch (
|
|
713
|
+
} catch (_error) {}
|
|
477
714
|
} else if (chunk.length > 0) {
|
|
478
715
|
message.parts.push({
|
|
479
716
|
type: "text",
|
|
480
717
|
text: chunk
|
|
481
718
|
});
|
|
719
|
+
const chunkBody = JSON.stringify({
|
|
720
|
+
type: "text-delta",
|
|
721
|
+
delta: chunk
|
|
722
|
+
});
|
|
723
|
+
this._storeStreamChunk(streamId, chunkBody);
|
|
482
724
|
this._broadcastChatMessage({
|
|
483
|
-
body:
|
|
484
|
-
type: "text-delta",
|
|
485
|
-
delta: chunk
|
|
486
|
-
}),
|
|
725
|
+
body: chunkBody,
|
|
487
726
|
done: false,
|
|
488
727
|
id,
|
|
489
728
|
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
490
729
|
});
|
|
491
730
|
}
|
|
492
731
|
}
|
|
732
|
+
} catch (error) {
|
|
733
|
+
if (!streamCompleted) {
|
|
734
|
+
this._markStreamError(streamId);
|
|
735
|
+
this._broadcastChatMessage({
|
|
736
|
+
body: error instanceof Error ? error.message : "Stream error",
|
|
737
|
+
done: true,
|
|
738
|
+
error: true,
|
|
739
|
+
id,
|
|
740
|
+
type: MessageType.CF_AGENT_USE_CHAT_RESPONSE
|
|
741
|
+
});
|
|
742
|
+
}
|
|
743
|
+
throw error;
|
|
493
744
|
} finally {
|
|
494
745
|
reader.releaseLock();
|
|
495
746
|
}
|
|
@@ -497,6 +748,22 @@ var AIChatAgent = class extends Agent {
|
|
|
497
748
|
});
|
|
498
749
|
}
|
|
499
750
|
/**
|
|
751
|
+
* Mark a stream as errored and clean up state.
|
|
752
|
+
* @param streamId - The stream to mark as errored
|
|
753
|
+
* @internal Protected for testing purposes.
|
|
754
|
+
*/
|
|
755
|
+
_markStreamError(streamId) {
|
|
756
|
+
this._flushChunkBuffer();
|
|
757
|
+
this.sql`
|
|
758
|
+
update cf_ai_chat_stream_metadata
|
|
759
|
+
set status = 'error', completed_at = ${Date.now()}
|
|
760
|
+
where id = ${streamId}
|
|
761
|
+
`;
|
|
762
|
+
this._activeStreamId = null;
|
|
763
|
+
this._activeRequestId = null;
|
|
764
|
+
this._streamChunkIndex = 0;
|
|
765
|
+
}
|
|
766
|
+
/**
|
|
500
767
|
* For the given message id, look up its associated AbortController
|
|
501
768
|
* If the AbortController does not exist, create and store one in memory
|
|
502
769
|
*
|
|
@@ -527,10 +794,15 @@ var AIChatAgent = class extends Agent {
|
|
|
527
794
|
this._chatMessageAbortControllers.clear();
|
|
528
795
|
}
|
|
529
796
|
/**
|
|
530
|
-
* When the DO is destroyed, cancel all pending requests
|
|
797
|
+
* When the DO is destroyed, cancel all pending requests and clean up resources
|
|
531
798
|
*/
|
|
532
799
|
async destroy() {
|
|
533
800
|
this._destroyAbortControllers();
|
|
801
|
+
this._flushChunkBuffer();
|
|
802
|
+
this.sql`drop table if exists cf_ai_chat_stream_chunks`;
|
|
803
|
+
this.sql`drop table if exists cf_ai_chat_stream_metadata`;
|
|
804
|
+
this._activeStreamId = null;
|
|
805
|
+
this._activeRequestId = null;
|
|
534
806
|
await super.destroy();
|
|
535
807
|
}
|
|
536
808
|
};
|