agents 0.0.0-eeb70e2 → 0.0.0-f0c6dce
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +131 -25
- package/dist/ai-chat-agent.d.ts +12 -8
- package/dist/ai-chat-agent.js +166 -59
- package/dist/ai-chat-agent.js.map +1 -1
- package/dist/ai-chat-v5-migration.d.ts +152 -0
- package/dist/ai-chat-v5-migration.js +19 -0
- package/dist/ai-chat-v5-migration.js.map +1 -0
- package/dist/ai-react.d.ts +63 -72
- package/dist/ai-react.js +161 -54
- package/dist/ai-react.js.map +1 -1
- package/dist/ai-types.d.ts +36 -19
- package/dist/ai-types.js +6 -0
- package/dist/chunk-AVYJQSLW.js +17 -0
- package/dist/chunk-AVYJQSLW.js.map +1 -0
- package/dist/chunk-MWQSU7GK.js +1301 -0
- package/dist/chunk-MWQSU7GK.js.map +1 -0
- package/dist/{chunk-BZXOAZUX.js → chunk-PVQZBKN7.js} +5 -5
- package/dist/chunk-PVQZBKN7.js.map +1 -0
- package/dist/{chunk-VCSB47AK.js → chunk-QEVM4BVL.js} +10 -10
- package/dist/chunk-QEVM4BVL.js.map +1 -0
- package/dist/chunk-UJVEAURM.js +150 -0
- package/dist/chunk-UJVEAURM.js.map +1 -0
- package/dist/{chunk-OYJXQRRH.js → chunk-VYENMKFS.js} +182 -35
- package/dist/chunk-VYENMKFS.js.map +1 -0
- package/dist/client-B9tFv5gX.d.ts +4607 -0
- package/dist/client.d.ts +2 -2
- package/dist/client.js +2 -1
- package/dist/index.d.ts +166 -22
- package/dist/index.js +13 -4
- package/dist/mcp/client.d.ts +9 -781
- package/dist/mcp/client.js +1 -1
- package/dist/mcp/do-oauth-client-provider.js +1 -1
- package/dist/mcp/index.d.ts +38 -10
- package/dist/mcp/index.js +233 -59
- package/dist/mcp/index.js.map +1 -1
- package/dist/observability/index.d.ts +46 -0
- package/dist/observability/index.js +11 -0
- package/dist/observability/index.js.map +1 -0
- package/dist/react.d.ts +12 -8
- package/dist/react.js +12 -10
- package/dist/react.js.map +1 -1
- package/dist/schedule.d.ts +81 -7
- package/dist/schedule.js +19 -6
- package/dist/schedule.js.map +1 -1
- package/package.json +83 -70
- package/src/index.ts +857 -170
- package/dist/chunk-BZXOAZUX.js.map +0 -1
- package/dist/chunk-OYJXQRRH.js.map +0 -1
- package/dist/chunk-P3RZJ72N.js +0 -783
- package/dist/chunk-P3RZJ72N.js.map +0 -1
- package/dist/chunk-VCSB47AK.js.map +0 -1
package/README.md
CHANGED
|
@@ -66,13 +66,13 @@ export class AIAgent extends Agent {
|
|
|
66
66
|
async onRequest(request) {
|
|
67
67
|
// Connect with AI capabilities
|
|
68
68
|
const ai = new OpenAI({
|
|
69
|
-
apiKey: this.env.OPENAI_API_KEY
|
|
69
|
+
apiKey: this.env.OPENAI_API_KEY
|
|
70
70
|
});
|
|
71
71
|
|
|
72
72
|
// Process and understand
|
|
73
73
|
const response = await ai.chat.completions.create({
|
|
74
74
|
model: "gpt-4",
|
|
75
|
-
messages: [{ role: "user", content: await request.text() }]
|
|
75
|
+
messages: [{ role: "user", content: await request.text() }]
|
|
76
76
|
});
|
|
77
77
|
|
|
78
78
|
return new Response(response.choices[0].message.content);
|
|
@@ -96,17 +96,17 @@ Define your agent's domain:
|
|
|
96
96
|
"bindings": [
|
|
97
97
|
{
|
|
98
98
|
"name": "AIAgent",
|
|
99
|
-
"class_name": "AIAgent"
|
|
100
|
-
}
|
|
101
|
-
]
|
|
99
|
+
"class_name": "AIAgent"
|
|
100
|
+
}
|
|
101
|
+
]
|
|
102
102
|
},
|
|
103
103
|
"migrations": [
|
|
104
104
|
{
|
|
105
105
|
"tag": "v1",
|
|
106
106
|
// Mandatory for the Agent to store state
|
|
107
|
-
"new_sqlite_classes": ["AIAgent"]
|
|
108
|
-
}
|
|
109
|
-
]
|
|
107
|
+
"new_sqlite_classes": ["AIAgent"]
|
|
108
|
+
}
|
|
109
|
+
]
|
|
110
110
|
}
|
|
111
111
|
```
|
|
112
112
|
|
|
@@ -123,7 +123,7 @@ const agent = env.AIAgent.get(id);
|
|
|
123
123
|
await agent.processTask({
|
|
124
124
|
type: "analysis",
|
|
125
125
|
context: "incoming_data",
|
|
126
|
-
parameters: initialConfig
|
|
126
|
+
parameters: initialConfig
|
|
127
127
|
});
|
|
128
128
|
|
|
129
129
|
// Or reconnect with an existing one
|
|
@@ -143,7 +143,7 @@ export class APIAgent extends Agent {
|
|
|
143
143
|
|
|
144
144
|
return Response.json({
|
|
145
145
|
insight: await this.process(data),
|
|
146
|
-
moment: Date.now()
|
|
146
|
+
moment: Date.now()
|
|
147
147
|
});
|
|
148
148
|
}
|
|
149
149
|
}
|
|
@@ -166,7 +166,7 @@ export class DialogueAgent extends Agent {
|
|
|
166
166
|
}
|
|
167
167
|
```
|
|
168
168
|
|
|
169
|
-
#### Client
|
|
169
|
+
#### Client Communication
|
|
170
170
|
|
|
171
171
|
For direct connection to your agent:
|
|
172
172
|
|
|
@@ -175,7 +175,7 @@ import { AgentClient } from "agents/client";
|
|
|
175
175
|
|
|
176
176
|
const connection = new AgentClient({
|
|
177
177
|
agent: "dialogue-agent",
|
|
178
|
-
name: "insight-seeker"
|
|
178
|
+
name: "insight-seeker"
|
|
179
179
|
});
|
|
180
180
|
|
|
181
181
|
connection.addEventListener("message", (event) => {
|
|
@@ -185,7 +185,7 @@ connection.addEventListener("message", (event) => {
|
|
|
185
185
|
connection.send(
|
|
186
186
|
JSON.stringify({
|
|
187
187
|
type: "inquiry",
|
|
188
|
-
content: "What patterns do you see?"
|
|
188
|
+
content: "What patterns do you see?"
|
|
189
189
|
})
|
|
190
190
|
);
|
|
191
191
|
```
|
|
@@ -205,14 +205,14 @@ function AgentInterface() {
|
|
|
205
205
|
console.log("Understanding received:", message.data);
|
|
206
206
|
},
|
|
207
207
|
onOpen: () => console.log("Connection established"),
|
|
208
|
-
onClose: () => console.log("Connection closed")
|
|
208
|
+
onClose: () => console.log("Connection closed")
|
|
209
209
|
});
|
|
210
210
|
|
|
211
211
|
const inquire = () => {
|
|
212
212
|
connection.send(
|
|
213
213
|
JSON.stringify({
|
|
214
214
|
type: "inquiry",
|
|
215
|
-
content: "What insights have you gathered?"
|
|
215
|
+
content: "What insights have you gathered?"
|
|
216
216
|
})
|
|
217
217
|
);
|
|
218
218
|
};
|
|
@@ -235,14 +235,14 @@ export class ThinkingAgent extends Agent {
|
|
|
235
235
|
this.setState({
|
|
236
236
|
...this.state,
|
|
237
237
|
insights: [...(this.state.insights || []), newInsight],
|
|
238
|
-
understanding: this.state.understanding + 1
|
|
238
|
+
understanding: this.state.understanding + 1
|
|
239
239
|
});
|
|
240
240
|
}
|
|
241
241
|
|
|
242
242
|
onStateUpdate(state, source) {
|
|
243
243
|
console.log("Understanding deepened:", {
|
|
244
244
|
newState: state,
|
|
245
|
-
origin: source
|
|
245
|
+
origin: source
|
|
246
246
|
});
|
|
247
247
|
}
|
|
248
248
|
}
|
|
@@ -259,7 +259,7 @@ function StateInterface() {
|
|
|
259
259
|
|
|
260
260
|
const agent = useAgent({
|
|
261
261
|
agent: "thinking-agent",
|
|
262
|
-
onStateUpdate: (newState) => setState(newState)
|
|
262
|
+
onStateUpdate: (newState) => setState(newState)
|
|
263
263
|
});
|
|
264
264
|
|
|
265
265
|
const increment = () => {
|
|
@@ -289,7 +289,7 @@ export class TimeAwareAgent extends Agent {
|
|
|
289
289
|
|
|
290
290
|
// Daily synthesis
|
|
291
291
|
this.schedule("0 0 * * *", "dailySynthesis", {
|
|
292
|
-
depth: "comprehensive"
|
|
292
|
+
depth: "comprehensive"
|
|
293
293
|
});
|
|
294
294
|
|
|
295
295
|
// Milestone review
|
|
@@ -325,11 +325,11 @@ export class DialogueAgent extends AIChatAgent {
|
|
|
325
325
|
const stream = streamText({
|
|
326
326
|
model: openai("gpt-4o"),
|
|
327
327
|
messages: this.messages,
|
|
328
|
-
onFinish
|
|
328
|
+
onFinish // call onFinish so that messages get saved
|
|
329
329
|
});
|
|
330
330
|
|
|
331
331
|
stream.mergeIntoDataStream(dataStream);
|
|
332
|
-
}
|
|
332
|
+
}
|
|
333
333
|
});
|
|
334
334
|
}
|
|
335
335
|
}
|
|
@@ -346,14 +346,14 @@ import { useAgentChat } from "agents/ai-react";
|
|
|
346
346
|
function ChatInterface() {
|
|
347
347
|
// Connect to the agent
|
|
348
348
|
const agent = useAgent({
|
|
349
|
-
agent: "dialogue-agent"
|
|
349
|
+
agent: "dialogue-agent"
|
|
350
350
|
});
|
|
351
351
|
|
|
352
352
|
// Set up the chat interaction
|
|
353
353
|
const { messages, input, handleInputChange, handleSubmit, clearHistory } =
|
|
354
354
|
useAgentChat({
|
|
355
355
|
agent,
|
|
356
|
-
maxSteps: 5
|
|
356
|
+
maxSteps: 5
|
|
357
357
|
});
|
|
358
358
|
|
|
359
359
|
return (
|
|
@@ -393,6 +393,112 @@ This creates:
|
|
|
393
393
|
- Intuitive input handling
|
|
394
394
|
- Easy conversation reset
|
|
395
395
|
|
|
396
|
+
### 🔗 MCP (Model Context Protocol) Integration
|
|
397
|
+
|
|
398
|
+
Agents can seamlessly integrate with the Model Context Protocol, allowing them to act as both MCP servers (providing tools to AI assistants) and MCP clients (using tools from other services).
|
|
399
|
+
|
|
400
|
+
#### Creating an MCP Server
|
|
401
|
+
|
|
402
|
+
```typescript
|
|
403
|
+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
404
|
+
import { McpAgent } from "agents/mcp";
|
|
405
|
+
import { z } from "zod";
|
|
406
|
+
|
|
407
|
+
type Env = {
|
|
408
|
+
MyMCP: DurableObjectNamespace<MyMCP>;
|
|
409
|
+
};
|
|
410
|
+
|
|
411
|
+
type State = { counter: number };
|
|
412
|
+
|
|
413
|
+
export class MyMCP extends McpAgent<Env, State, {}> {
|
|
414
|
+
server = new McpServer({
|
|
415
|
+
name: "Demo",
|
|
416
|
+
version: "1.0.0"
|
|
417
|
+
});
|
|
418
|
+
|
|
419
|
+
initialState: State = {
|
|
420
|
+
counter: 1
|
|
421
|
+
};
|
|
422
|
+
|
|
423
|
+
async init() {
|
|
424
|
+
this.server.resource("counter", "mcp://resource/counter", (uri) => {
|
|
425
|
+
return {
|
|
426
|
+
contents: [{ text: String(this.state.counter), uri: uri.href }]
|
|
427
|
+
};
|
|
428
|
+
});
|
|
429
|
+
|
|
430
|
+
this.server.tool(
|
|
431
|
+
"add",
|
|
432
|
+
"Add to the counter, stored in the MCP",
|
|
433
|
+
{ a: z.number() },
|
|
434
|
+
async ({ a }) => {
|
|
435
|
+
this.setState({ ...this.state, counter: this.state.counter + a });
|
|
436
|
+
|
|
437
|
+
return {
|
|
438
|
+
content: [
|
|
439
|
+
{
|
|
440
|
+
text: String(`Added ${a}, total is now ${this.state.counter}`),
|
|
441
|
+
type: "text"
|
|
442
|
+
}
|
|
443
|
+
]
|
|
444
|
+
};
|
|
445
|
+
}
|
|
446
|
+
);
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
onStateUpdate(state: State) {
|
|
450
|
+
console.log({ stateUpdate: state });
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
// HTTP Streamable transport (recommended)
|
|
455
|
+
export default MyMCP.serve("/mcp", {
|
|
456
|
+
binding: "MyMCP"
|
|
457
|
+
});
|
|
458
|
+
|
|
459
|
+
// Or SSE transport for legacy compatibility
|
|
460
|
+
// export default MyMCP.serveSSE("/mcp", { binding: "MyMCP" });
|
|
461
|
+
```
|
|
462
|
+
|
|
463
|
+
#### Using MCP Tools
|
|
464
|
+
|
|
465
|
+
```typescript
|
|
466
|
+
import { MCPClientManager } from "agents/mcp";
|
|
467
|
+
|
|
468
|
+
const client = new MCPClientManager("my-app", "1.0.0");
|
|
469
|
+
|
|
470
|
+
// Connect to an MCP server
|
|
471
|
+
await client.connect("https://weather-service.com/mcp", {
|
|
472
|
+
transport: { type: "streamable-http" }
|
|
473
|
+
});
|
|
474
|
+
|
|
475
|
+
// Use tools from the server
|
|
476
|
+
const weather = await client.callTool({
|
|
477
|
+
serverId: "weather-service",
|
|
478
|
+
name: "getWeather",
|
|
479
|
+
arguments: { location: "San Francisco" }
|
|
480
|
+
});
|
|
481
|
+
```
|
|
482
|
+
|
|
483
|
+
#### AI SDK Integration
|
|
484
|
+
|
|
485
|
+
```typescript
|
|
486
|
+
import { generateText } from "ai";
|
|
487
|
+
|
|
488
|
+
// Convert MCP tools for AI use
|
|
489
|
+
const result = await generateText({
|
|
490
|
+
model: openai("gpt-4"),
|
|
491
|
+
tools: client.getAITools(),
|
|
492
|
+
prompt: "What's the weather in Tokyo?"
|
|
493
|
+
});
|
|
494
|
+
```
|
|
495
|
+
|
|
496
|
+
**Transport Options:**
|
|
497
|
+
|
|
498
|
+
- **Auto**: Automatically determine the correct transport
|
|
499
|
+
- **HTTP Streamable**: Best performance, batch requests, session management
|
|
500
|
+
- **SSE**: Simple setup, legacy compatibility
|
|
501
|
+
|
|
396
502
|
### 💬 The Path Forward
|
|
397
503
|
|
|
398
504
|
We're developing new dimensions of agent capability:
|
|
@@ -418,8 +524,8 @@ Welcome to the future of intelligent agents. Create something meaningful. 🌟
|
|
|
418
524
|
Contributions are welcome, but are especially welcome when:
|
|
419
525
|
|
|
420
526
|
- You have opened an issue as a Request for Comment (RFC) to discuss your proposal, show your thinking, and iterate together.
|
|
421
|
-
-
|
|
422
|
-
- You're willing to accept feedback and make sure the changes fit the goals of the `agents`
|
|
527
|
+
- Not "AI slop": LLMs are powerful tools, but contributions entirely authored by vibe coding are unlikely to meet the quality bar, and will be rejected.
|
|
528
|
+
- You're willing to accept feedback and make sure the changes fit the goals of the `agents` SDK. Not everything will, and that's OK.
|
|
423
529
|
|
|
424
530
|
Small fixes, type bugs, and documentation improvements can be raised directly as PRs.
|
|
425
531
|
|
package/dist/ai-chat-agent.d.ts
CHANGED
|
@@ -1,15 +1,19 @@
|
|
|
1
|
+
import { UIMessage, StreamTextOnFinishCallback, ToolSet } from "ai";
|
|
1
2
|
import { Agent, AgentContext } from "./index.js";
|
|
2
|
-
import { Message, StreamTextOnFinishCallback, ToolSet } from "ai";
|
|
3
3
|
import { Connection, WSMessage } from "partyserver";
|
|
4
|
+
import "cloudflare:workers";
|
|
5
|
+
import "@modelcontextprotocol/sdk/client/index.js";
|
|
4
6
|
import "@modelcontextprotocol/sdk/types.js";
|
|
5
|
-
import "./
|
|
7
|
+
import "./client-B9tFv5gX.js";
|
|
6
8
|
import "zod";
|
|
7
|
-
import "@modelcontextprotocol/sdk/
|
|
9
|
+
import "@modelcontextprotocol/sdk/shared/protocol.js";
|
|
8
10
|
import "@modelcontextprotocol/sdk/client/sse.js";
|
|
11
|
+
import "@modelcontextprotocol/sdk/client/streamableHttp.js";
|
|
9
12
|
import "./mcp/do-oauth-client-provider.js";
|
|
10
13
|
import "@modelcontextprotocol/sdk/client/auth.js";
|
|
11
14
|
import "@modelcontextprotocol/sdk/shared/auth.js";
|
|
12
|
-
import "
|
|
15
|
+
import "./observability/index.js";
|
|
16
|
+
import "./ai-types.js";
|
|
13
17
|
|
|
14
18
|
/**
|
|
15
19
|
* Extension of Agent with built-in chat capabilities
|
|
@@ -25,7 +29,7 @@ declare class AIChatAgent<Env = unknown, State = unknown> extends Agent<
|
|
|
25
29
|
*/
|
|
26
30
|
private _chatMessageAbortControllers;
|
|
27
31
|
/** Array of chat messages for the current conversation */
|
|
28
|
-
messages:
|
|
32
|
+
messages: UIMessage[];
|
|
29
33
|
constructor(ctx: AgentContext, env: Env);
|
|
30
34
|
private _broadcastChatMessage;
|
|
31
35
|
onMessage(connection: Connection, message: WSMessage): Promise<void>;
|
|
@@ -44,12 +48,12 @@ declare class AIChatAgent<Env = unknown, State = unknown> extends Agent<
|
|
|
44
48
|
}
|
|
45
49
|
): Promise<Response | undefined>;
|
|
46
50
|
/**
|
|
47
|
-
* Save messages on the server side
|
|
51
|
+
* Save messages on the server side
|
|
48
52
|
* @param messages Chat messages to save
|
|
49
53
|
*/
|
|
50
|
-
saveMessages(messages:
|
|
54
|
+
saveMessages(messages: UIMessage[]): Promise<void>;
|
|
51
55
|
persistMessages(
|
|
52
|
-
messages:
|
|
56
|
+
messages: UIMessage[],
|
|
53
57
|
excludeBroadcastIds?: string[]
|
|
54
58
|
): Promise<void>;
|
|
55
59
|
private _reply;
|
package/dist/ai-chat-agent.js
CHANGED
|
@@ -1,12 +1,15 @@
|
|
|
1
|
+
import {
|
|
2
|
+
autoTransformMessages
|
|
3
|
+
} from "./chunk-UJVEAURM.js";
|
|
1
4
|
import {
|
|
2
5
|
Agent
|
|
3
|
-
} from "./chunk-
|
|
4
|
-
import "./chunk-
|
|
5
|
-
import "./chunk-
|
|
6
|
-
import "./chunk-
|
|
6
|
+
} from "./chunk-MWQSU7GK.js";
|
|
7
|
+
import "./chunk-VYENMKFS.js";
|
|
8
|
+
import "./chunk-PVQZBKN7.js";
|
|
9
|
+
import "./chunk-QEVM4BVL.js";
|
|
10
|
+
import "./chunk-AVYJQSLW.js";
|
|
7
11
|
|
|
8
12
|
// src/ai-chat-agent.ts
|
|
9
|
-
import { appendResponseMessages } from "ai";
|
|
10
13
|
var decoder = new TextDecoder();
|
|
11
14
|
var AIChatAgent = class extends Agent {
|
|
12
15
|
constructor(ctx, env) {
|
|
@@ -16,9 +19,10 @@ var AIChatAgent = class extends Agent {
|
|
|
16
19
|
message text not null,
|
|
17
20
|
created_at datetime default current_timestamp
|
|
18
21
|
)`;
|
|
19
|
-
|
|
22
|
+
const rawMessages = (this.sql`select * from cf_ai_chat_agent_messages` || []).map((row) => {
|
|
20
23
|
return JSON.parse(row.message);
|
|
21
24
|
});
|
|
25
|
+
this.messages = autoTransformMessages(rawMessages);
|
|
22
26
|
this._chatMessageAbortControllers = /* @__PURE__ */ new Map();
|
|
23
27
|
}
|
|
24
28
|
_broadcastChatMessage(message, exclude) {
|
|
@@ -29,67 +33,92 @@ var AIChatAgent = class extends Agent {
|
|
|
29
33
|
let data;
|
|
30
34
|
try {
|
|
31
35
|
data = JSON.parse(message);
|
|
32
|
-
} catch (
|
|
36
|
+
} catch (_error) {
|
|
33
37
|
return;
|
|
34
38
|
}
|
|
35
|
-
if (data.type === "cf_agent_use_chat_request" && data.init.method === "POST") {
|
|
39
|
+
if (data.type === "cf_agent_use_chat_request" /* CF_AGENT_USE_CHAT_REQUEST */ && data.init.method === "POST") {
|
|
36
40
|
const {
|
|
37
|
-
method,
|
|
38
|
-
keepalive,
|
|
39
|
-
headers,
|
|
40
|
-
body
|
|
41
|
+
// method,
|
|
42
|
+
// keepalive,
|
|
43
|
+
// headers,
|
|
44
|
+
body
|
|
41
45
|
// we're reading this
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
credentials,
|
|
45
|
-
mode,
|
|
46
|
-
referrer,
|
|
47
|
-
referrerPolicy,
|
|
48
|
-
window
|
|
46
|
+
//
|
|
47
|
+
// // these might not exist?
|
|
49
48
|
// dispatcher,
|
|
50
49
|
// duplex
|
|
51
50
|
} = data.init;
|
|
52
51
|
const { messages } = JSON.parse(body);
|
|
52
|
+
const transformedMessages = autoTransformMessages(messages);
|
|
53
53
|
this._broadcastChatMessage(
|
|
54
54
|
{
|
|
55
|
-
|
|
56
|
-
|
|
55
|
+
messages: transformedMessages,
|
|
56
|
+
type: "cf_agent_chat_messages" /* CF_AGENT_CHAT_MESSAGES */
|
|
57
57
|
},
|
|
58
58
|
[connection.id]
|
|
59
59
|
);
|
|
60
|
-
await this.persistMessages(
|
|
60
|
+
await this.persistMessages(transformedMessages, [connection.id]);
|
|
61
|
+
this.observability?.emit(
|
|
62
|
+
{
|
|
63
|
+
displayMessage: "Chat message request",
|
|
64
|
+
id: data.id,
|
|
65
|
+
payload: {},
|
|
66
|
+
timestamp: Date.now(),
|
|
67
|
+
type: "message:request"
|
|
68
|
+
},
|
|
69
|
+
this.ctx
|
|
70
|
+
);
|
|
61
71
|
const chatMessageId = data.id;
|
|
62
72
|
const abortSignal = this._getAbortSignal(chatMessageId);
|
|
63
73
|
return this._tryCatchChat(async () => {
|
|
64
74
|
const response = await this.onChatMessage(
|
|
65
|
-
async (
|
|
66
|
-
const finalMessages = appendResponseMessages({
|
|
67
|
-
messages,
|
|
68
|
-
responseMessages: response2.messages
|
|
69
|
-
});
|
|
70
|
-
await this.persistMessages(finalMessages, [connection.id]);
|
|
75
|
+
async (_finishResult) => {
|
|
71
76
|
this._removeAbortController(chatMessageId);
|
|
77
|
+
this.observability?.emit(
|
|
78
|
+
{
|
|
79
|
+
displayMessage: "Chat message response",
|
|
80
|
+
id: data.id,
|
|
81
|
+
payload: {},
|
|
82
|
+
timestamp: Date.now(),
|
|
83
|
+
type: "message:response"
|
|
84
|
+
},
|
|
85
|
+
this.ctx
|
|
86
|
+
);
|
|
72
87
|
},
|
|
73
88
|
abortSignal ? { abortSignal } : void 0
|
|
74
89
|
);
|
|
75
90
|
if (response) {
|
|
76
91
|
await this._reply(data.id, response);
|
|
92
|
+
} else {
|
|
93
|
+
console.warn(
|
|
94
|
+
`[AIChatAgent] onChatMessage returned no response for chatMessageId: ${chatMessageId}`
|
|
95
|
+
);
|
|
96
|
+
this._broadcastChatMessage(
|
|
97
|
+
{
|
|
98
|
+
body: "No response was generated by the agent.",
|
|
99
|
+
done: true,
|
|
100
|
+
id: data.id,
|
|
101
|
+
type: "cf_agent_use_chat_response" /* CF_AGENT_USE_CHAT_RESPONSE */
|
|
102
|
+
},
|
|
103
|
+
[connection.id]
|
|
104
|
+
);
|
|
77
105
|
}
|
|
78
106
|
});
|
|
79
107
|
}
|
|
80
|
-
if (data.type === "cf_agent_chat_clear") {
|
|
108
|
+
if (data.type === "cf_agent_chat_clear" /* CF_AGENT_CHAT_CLEAR */) {
|
|
81
109
|
this._destroyAbortControllers();
|
|
82
110
|
this.sql`delete from cf_ai_chat_agent_messages`;
|
|
83
111
|
this.messages = [];
|
|
84
112
|
this._broadcastChatMessage(
|
|
85
113
|
{
|
|
86
|
-
type: "cf_agent_chat_clear"
|
|
114
|
+
type: "cf_agent_chat_clear" /* CF_AGENT_CHAT_CLEAR */
|
|
87
115
|
},
|
|
88
116
|
[connection.id]
|
|
89
117
|
);
|
|
90
|
-
} else if (data.type === "cf_agent_chat_messages") {
|
|
91
|
-
|
|
92
|
-
|
|
118
|
+
} else if (data.type === "cf_agent_chat_messages" /* CF_AGENT_CHAT_MESSAGES */) {
|
|
119
|
+
const transformedMessages = autoTransformMessages(data.messages);
|
|
120
|
+
await this.persistMessages(transformedMessages, [connection.id]);
|
|
121
|
+
} else if (data.type === "cf_agent_chat_request_cancel" /* CF_AGENT_CHAT_REQUEST_CANCEL */) {
|
|
93
122
|
this._cancelChatRequest(data.id);
|
|
94
123
|
}
|
|
95
124
|
}
|
|
@@ -125,24 +154,11 @@ var AIChatAgent = class extends Agent {
|
|
|
125
154
|
);
|
|
126
155
|
}
|
|
127
156
|
/**
|
|
128
|
-
* Save messages on the server side
|
|
157
|
+
* Save messages on the server side
|
|
129
158
|
* @param messages Chat messages to save
|
|
130
159
|
*/
|
|
131
160
|
async saveMessages(messages) {
|
|
132
161
|
await this.persistMessages(messages);
|
|
133
|
-
const response = await this.onChatMessage(async ({ response: response2 }) => {
|
|
134
|
-
const finalMessages = appendResponseMessages({
|
|
135
|
-
messages,
|
|
136
|
-
responseMessages: response2.messages
|
|
137
|
-
});
|
|
138
|
-
await this.persistMessages(finalMessages, []);
|
|
139
|
-
});
|
|
140
|
-
if (response) {
|
|
141
|
-
for await (const chunk of response.body) {
|
|
142
|
-
decoder.decode(chunk);
|
|
143
|
-
}
|
|
144
|
-
response.body?.cancel();
|
|
145
|
-
}
|
|
146
162
|
}
|
|
147
163
|
async persistMessages(messages, excludeBroadcastIds = []) {
|
|
148
164
|
this.sql`delete from cf_ai_chat_agent_messages`;
|
|
@@ -152,29 +168,120 @@ var AIChatAgent = class extends Agent {
|
|
|
152
168
|
this.messages = messages;
|
|
153
169
|
this._broadcastChatMessage(
|
|
154
170
|
{
|
|
155
|
-
|
|
156
|
-
|
|
171
|
+
messages,
|
|
172
|
+
type: "cf_agent_chat_messages" /* CF_AGENT_CHAT_MESSAGES */
|
|
157
173
|
},
|
|
158
174
|
excludeBroadcastIds
|
|
159
175
|
);
|
|
160
176
|
}
|
|
161
177
|
async _reply(id, response) {
|
|
162
178
|
return this._tryCatchChat(async () => {
|
|
163
|
-
|
|
164
|
-
const body = decoder.decode(chunk);
|
|
179
|
+
if (!response.body) {
|
|
165
180
|
this._broadcastChatMessage({
|
|
181
|
+
body: "",
|
|
182
|
+
done: true,
|
|
166
183
|
id,
|
|
167
|
-
type: "cf_agent_use_chat_response"
|
|
168
|
-
body,
|
|
169
|
-
done: false
|
|
184
|
+
type: "cf_agent_use_chat_response" /* CF_AGENT_USE_CHAT_RESPONSE */
|
|
170
185
|
});
|
|
186
|
+
return;
|
|
187
|
+
}
|
|
188
|
+
const reader = response.body.getReader();
|
|
189
|
+
let fullResponseText = "";
|
|
190
|
+
const toolCalls = /* @__PURE__ */ new Map();
|
|
191
|
+
try {
|
|
192
|
+
while (true) {
|
|
193
|
+
const { done, value } = await reader.read();
|
|
194
|
+
if (done) {
|
|
195
|
+
this._broadcastChatMessage({
|
|
196
|
+
body: "",
|
|
197
|
+
done: true,
|
|
198
|
+
id,
|
|
199
|
+
type: "cf_agent_use_chat_response" /* CF_AGENT_USE_CHAT_RESPONSE */
|
|
200
|
+
});
|
|
201
|
+
break;
|
|
202
|
+
}
|
|
203
|
+
const chunk = decoder.decode(value);
|
|
204
|
+
const lines = chunk.split("\n");
|
|
205
|
+
for (const line of lines) {
|
|
206
|
+
if (line.startsWith("data: ") && line !== "data: [DONE]") {
|
|
207
|
+
try {
|
|
208
|
+
const data = JSON.parse(line.slice(6));
|
|
209
|
+
switch (data.type) {
|
|
210
|
+
// SSE event signaling the tool input is ready. We track by
|
|
211
|
+
// `toolCallId` so we can persist it as a tool part in the message.
|
|
212
|
+
case "tool-input-available": {
|
|
213
|
+
const { toolCallId, toolName, input } = data;
|
|
214
|
+
toolCalls.set(toolCallId, {
|
|
215
|
+
toolCallId,
|
|
216
|
+
toolName,
|
|
217
|
+
input,
|
|
218
|
+
type: toolName ? `tool-${toolName}` : "dynamic-tool",
|
|
219
|
+
state: "input-available"
|
|
220
|
+
});
|
|
221
|
+
break;
|
|
222
|
+
}
|
|
223
|
+
// SSE event signaling the tool output is ready. We should've
|
|
224
|
+
// already received the input in a previous event so an entry
|
|
225
|
+
// with `toolCallId` should already be present
|
|
226
|
+
case "tool-output-available": {
|
|
227
|
+
const { toolCallId, output, isError, errorText } = data;
|
|
228
|
+
const toolPart = toolCalls.get(toolCallId);
|
|
229
|
+
if (toolPart)
|
|
230
|
+
toolCalls.set(toolCallId, {
|
|
231
|
+
...toolPart,
|
|
232
|
+
output,
|
|
233
|
+
isError,
|
|
234
|
+
errorText,
|
|
235
|
+
state: "output-available"
|
|
236
|
+
});
|
|
237
|
+
break;
|
|
238
|
+
}
|
|
239
|
+
case "error": {
|
|
240
|
+
this._broadcastChatMessage({
|
|
241
|
+
error: true,
|
|
242
|
+
body: data.errorText ?? JSON.stringify(data),
|
|
243
|
+
done: false,
|
|
244
|
+
id,
|
|
245
|
+
type: "cf_agent_use_chat_response" /* CF_AGENT_USE_CHAT_RESPONSE */
|
|
246
|
+
});
|
|
247
|
+
return;
|
|
248
|
+
}
|
|
249
|
+
case "text-delta": {
|
|
250
|
+
if (data.delta) fullResponseText += data.delta;
|
|
251
|
+
break;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
this._broadcastChatMessage({
|
|
255
|
+
body: JSON.stringify(data),
|
|
256
|
+
done: false,
|
|
257
|
+
id,
|
|
258
|
+
type: "cf_agent_use_chat_response" /* CF_AGENT_USE_CHAT_RESPONSE */
|
|
259
|
+
});
|
|
260
|
+
} catch (_e) {
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
} finally {
|
|
266
|
+
reader.releaseLock();
|
|
171
267
|
}
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
body: "",
|
|
176
|
-
done: true
|
|
268
|
+
const messageParts = [];
|
|
269
|
+
Array.from(toolCalls.values()).forEach((t) => {
|
|
270
|
+
messageParts.push(t);
|
|
177
271
|
});
|
|
272
|
+
if (fullResponseText.trim()) {
|
|
273
|
+
messageParts.push({ type: "text", text: fullResponseText });
|
|
274
|
+
}
|
|
275
|
+
if (messageParts.length > 0) {
|
|
276
|
+
await this.persistMessages([
|
|
277
|
+
...this.messages,
|
|
278
|
+
{
|
|
279
|
+
id: `assistant_${Date.now()}_${Math.random().toString(36).slice(2, 11)}`,
|
|
280
|
+
role: "assistant",
|
|
281
|
+
parts: messageParts
|
|
282
|
+
}
|
|
283
|
+
]);
|
|
284
|
+
}
|
|
178
285
|
});
|
|
179
286
|
}
|
|
180
287
|
/**
|