deepagentsdk 0.9.2 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -4
- package/package.json +15 -9
- package/src/adapters/elements/index.ts +27 -0
- package/src/adapters/elements/messageAdapter.ts +165 -0
- package/src/adapters/elements/statusAdapter.ts +39 -0
- package/src/adapters/elements/types.ts +97 -0
- package/src/adapters/elements/useElementsAdapter.ts +261 -0
- package/src/agent.ts +34 -6
- package/src/types/events.ts +1 -0
package/README.md
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
#
|
|
1
|
+
# Deep Agent SDK
|
|
2
2
|
|
|
3
3
|
<p align="center">
|
|
4
|
-
<img src="assets/www-hero.png" alt="
|
|
4
|
+
<img src="assets/www-hero.png" alt="Deep Agent SDK" width="100%" />
|
|
5
5
|
</p>
|
|
6
6
|
|
|
7
7
|
[](https://www.npmjs.com/package/deepagentsdk)
|
|
8
8
|
[](https://opensource.org/licenses/MIT)
|
|
9
|
-
[](https://deepwiki.com/chrispangg/
|
|
10
|
-
[](https://deepagentsdk.
|
|
9
|
+
[](https://deepwiki.com/chrispangg/deepagentsdk)
|
|
10
|
+
[](https://deepagentsdk.dev/docs)
|
|
11
11
|
|
|
12
12
|
> **Note:** This package requires [Bun](https://bun.sh) runtime. It uses Bun-specific features and TypeScript imports.
|
|
13
13
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "deepagentsdk",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.11.0",
|
|
4
4
|
"description": "Deep Agent implementation using Vercel AI SDK - build controllable AI agents with planning, filesystem, and subagent capabilities",
|
|
5
5
|
"main": "./src/index.ts",
|
|
6
6
|
"module": "./src/index.ts",
|
|
@@ -16,6 +16,10 @@
|
|
|
16
16
|
},
|
|
17
17
|
"./cli": {
|
|
18
18
|
"import": "./src/cli/index.ts"
|
|
19
|
+
},
|
|
20
|
+
"./elements": {
|
|
21
|
+
"import": "./src/adapters/elements/index.ts",
|
|
22
|
+
"types": "./src/adapters/elements/index.ts"
|
|
19
23
|
}
|
|
20
24
|
},
|
|
21
25
|
"files": [
|
|
@@ -33,20 +37,20 @@
|
|
|
33
37
|
},
|
|
34
38
|
"repository": {
|
|
35
39
|
"type": "git",
|
|
36
|
-
"url": "git+https://github.com/chrispangg/
|
|
40
|
+
"url": "git+https://github.com/chrispangg/deepagentsdk.git"
|
|
37
41
|
},
|
|
38
|
-
"homepage": "https://github.com/chrispangg/
|
|
42
|
+
"homepage": "https://github.com/chrispangg/deepagentsdk#readme",
|
|
39
43
|
"bugs": {
|
|
40
|
-
"url": "https://github.com/chrispangg/
|
|
44
|
+
"url": "https://github.com/chrispangg/deepagentsdk/issues"
|
|
41
45
|
},
|
|
42
46
|
"dependencies": {
|
|
43
|
-
"@ai-sdk/anthropic": "^3.0.
|
|
44
|
-
"@ai-sdk/openai": "^3.0.
|
|
45
|
-
"@ai-sdk/react": "^3.0.
|
|
47
|
+
"@ai-sdk/anthropic": "^3.0.9",
|
|
48
|
+
"@ai-sdk/openai": "^3.0.7",
|
|
49
|
+
"@ai-sdk/react": "^3.0.9",
|
|
46
50
|
"@inkjs/ui": "^1.0.0",
|
|
47
51
|
"@mozilla/readability": "^0.6.0",
|
|
48
52
|
"@tavily/core": "^0.6.1",
|
|
49
|
-
"ai": "^6.0.
|
|
53
|
+
"ai": "^6.0.19",
|
|
50
54
|
"fast-glob": "^3.3.3",
|
|
51
55
|
"ink": "^5.1.0",
|
|
52
56
|
"jsdom": "^25.0.1",
|
|
@@ -63,6 +67,7 @@
|
|
|
63
67
|
"@opentelemetry/instrumentation": "^0.208.0",
|
|
64
68
|
"@opentelemetry/sdk-logs": "^0.208.0",
|
|
65
69
|
"@opentelemetry/sdk-node": "^0.208.0",
|
|
70
|
+
"@testing-library/react": "^16.3.1",
|
|
66
71
|
"@types/bun": "latest",
|
|
67
72
|
"@types/jsdom": "^21.1.7",
|
|
68
73
|
"@types/micromatch": "^4.0.9",
|
|
@@ -71,6 +76,7 @@
|
|
|
71
76
|
"@vercel/otel": "^2.1.0",
|
|
72
77
|
"install": "^0.13.0",
|
|
73
78
|
"langfuse-vercel": "^3.38.6",
|
|
79
|
+
"react-dom": "^18.2.0",
|
|
74
80
|
"typescript": "^5.7.3"
|
|
75
81
|
},
|
|
76
82
|
"peerDependencies": {
|
|
@@ -92,4 +98,4 @@
|
|
|
92
98
|
"bun": ">=1.0.0"
|
|
93
99
|
},
|
|
94
100
|
"license": "MIT"
|
|
95
|
-
}
|
|
101
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AI SDK Elements adapter for deepagentsdk
|
|
3
|
+
*
|
|
4
|
+
* This adapter enables deepagentsdk to work seamlessly with Vercel AI SDK Elements
|
|
5
|
+
* UI components by transforming agent events to the UIMessage format expected by Elements.
|
|
6
|
+
*
|
|
7
|
+
* @module adapters/elements
|
|
8
|
+
* @see https://ai-sdk.dev/elements
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
export { useElementsAdapter } from "./useElementsAdapter.js";
|
|
12
|
+
export type { UseElementsAdapterOptions } from "./useElementsAdapter.js";
|
|
13
|
+
|
|
14
|
+
export { mapAgentStatusToUIStatus } from "./statusAdapter.js";
|
|
15
|
+
export {
|
|
16
|
+
convertEventsToUIMessages,
|
|
17
|
+
extractToolParts,
|
|
18
|
+
} from "./messageAdapter.js";
|
|
19
|
+
|
|
20
|
+
export type {
|
|
21
|
+
UIMessage,
|
|
22
|
+
UIMessagePart,
|
|
23
|
+
UIStatus,
|
|
24
|
+
PromptInputMessage,
|
|
25
|
+
ToolUIPart,
|
|
26
|
+
UseElementsAdapterReturn,
|
|
27
|
+
} from "./types.js";
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Message transformation adapter for AI SDK Elements
|
|
3
|
+
*
|
|
4
|
+
* Converts deepagentsdk events to Elements UIMessage format
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type { AgentEventLog } from "../../cli/hooks/useAgent.js";
|
|
8
|
+
import type { UIMessage, UIMessagePart, UIStatus } from "./types.js";
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Converts agent event log to UIMessage format expected by Elements
|
|
12
|
+
*
|
|
13
|
+
* @param events - Array of agent events from useAgent hook
|
|
14
|
+
* @param streamingText - Current streaming text (if any)
|
|
15
|
+
* @param uiStatus - Current UI status
|
|
16
|
+
* @returns Array of UIMessage objects for Elements Message component
|
|
17
|
+
*
|
|
18
|
+
* Conversion logic:
|
|
19
|
+
* 1. Group events by role (user/assistant)
|
|
20
|
+
* 2. Convert each event type to appropriate UIMessagePart
|
|
21
|
+
* 3. Handle streaming text as in-progress message
|
|
22
|
+
* 4. Preserve event order and tool call/result pairing
|
|
23
|
+
*/
|
|
24
|
+
export function convertEventsToUIMessages(
|
|
25
|
+
events: AgentEventLog[],
|
|
26
|
+
streamingText: string,
|
|
27
|
+
uiStatus: UIStatus
|
|
28
|
+
): UIMessage[] {
|
|
29
|
+
const messages: UIMessage[] = [];
|
|
30
|
+
let currentAssistantParts: UIMessagePart[] = [];
|
|
31
|
+
let messageIdCounter = 0;
|
|
32
|
+
|
|
33
|
+
const generateMessageId = (): string => {
|
|
34
|
+
return `msg-${Date.now()}-${++messageIdCounter}`;
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
for (const eventLog of events) {
|
|
38
|
+
const event = eventLog.event;
|
|
39
|
+
|
|
40
|
+
switch (event.type) {
|
|
41
|
+
case "user-message":
|
|
42
|
+
// Flush any pending assistant parts before user message
|
|
43
|
+
if (currentAssistantParts.length > 0) {
|
|
44
|
+
messages.push({
|
|
45
|
+
id: generateMessageId(),
|
|
46
|
+
role: "assistant",
|
|
47
|
+
parts: currentAssistantParts,
|
|
48
|
+
status: "ready",
|
|
49
|
+
});
|
|
50
|
+
currentAssistantParts = [];
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Add user message
|
|
54
|
+
messages.push({
|
|
55
|
+
id: eventLog.id,
|
|
56
|
+
role: "user",
|
|
57
|
+
parts: [{ type: "text", text: event.content }],
|
|
58
|
+
status: "ready",
|
|
59
|
+
});
|
|
60
|
+
break;
|
|
61
|
+
|
|
62
|
+
case "text-segment":
|
|
63
|
+
// Add text segment as separate text part
|
|
64
|
+
currentAssistantParts.push({
|
|
65
|
+
type: "text",
|
|
66
|
+
text: event.text,
|
|
67
|
+
});
|
|
68
|
+
break;
|
|
69
|
+
|
|
70
|
+
case "tool-call":
|
|
71
|
+
// Add tool call part
|
|
72
|
+
currentAssistantParts.push({
|
|
73
|
+
type: "tool-call",
|
|
74
|
+
toolCallId: event.toolCallId,
|
|
75
|
+
toolName: event.toolName,
|
|
76
|
+
args: event.args,
|
|
77
|
+
});
|
|
78
|
+
break;
|
|
79
|
+
|
|
80
|
+
case "tool-result":
|
|
81
|
+
// Add tool result part
|
|
82
|
+
currentAssistantParts.push({
|
|
83
|
+
type: "tool-result",
|
|
84
|
+
toolCallId: event.toolCallId,
|
|
85
|
+
toolName: event.toolName,
|
|
86
|
+
result: event.result,
|
|
87
|
+
isError: event.isError,
|
|
88
|
+
});
|
|
89
|
+
break;
|
|
90
|
+
|
|
91
|
+
// Ignore other event types for message rendering
|
|
92
|
+
// (they're handled separately by Elements components like Task, etc.)
|
|
93
|
+
default:
|
|
94
|
+
break;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Add streaming text as in-progress assistant message
|
|
99
|
+
if (streamingText || currentAssistantParts.length > 0) {
|
|
100
|
+
if (streamingText) {
|
|
101
|
+
currentAssistantParts.push({ type: "text", text: streamingText });
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Determine status for current message
|
|
105
|
+
let messageStatus: UIStatus = "ready";
|
|
106
|
+
if (uiStatus === "streaming") {
|
|
107
|
+
messageStatus = "streaming";
|
|
108
|
+
} else if (uiStatus === "submitted") {
|
|
109
|
+
messageStatus = "submitted";
|
|
110
|
+
} else if (uiStatus === "error") {
|
|
111
|
+
messageStatus = "error";
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
messages.push({
|
|
115
|
+
id: generateMessageId(),
|
|
116
|
+
role: "assistant",
|
|
117
|
+
parts: currentAssistantParts,
|
|
118
|
+
status: messageStatus,
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
return messages;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Extracts tool parts from the most recent assistant message
|
|
127
|
+
*
|
|
128
|
+
* @param messages - UIMessage array
|
|
129
|
+
* @returns Array of tool parts (tool-call and tool-result)
|
|
130
|
+
*/
|
|
131
|
+
export function extractToolParts(messages: UIMessage[]) {
|
|
132
|
+
// Get last assistant message
|
|
133
|
+
const lastAssistantMessage = [...messages]
|
|
134
|
+
.reverse()
|
|
135
|
+
.find((m) => m.role === "assistant");
|
|
136
|
+
|
|
137
|
+
if (!lastAssistantMessage) {
|
|
138
|
+
return [];
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Extract only tool-related parts
|
|
142
|
+
return lastAssistantMessage.parts
|
|
143
|
+
.filter(
|
|
144
|
+
(part): part is Extract<UIMessagePart, { type: "tool-call" | "tool-result" }> =>
|
|
145
|
+
part.type === "tool-call" || part.type === "tool-result"
|
|
146
|
+
)
|
|
147
|
+
.map((part) => {
|
|
148
|
+
if (part.type === "tool-call") {
|
|
149
|
+
return {
|
|
150
|
+
type: "tool-call" as const,
|
|
151
|
+
toolCallId: part.toolCallId,
|
|
152
|
+
toolName: part.toolName,
|
|
153
|
+
args: part.args,
|
|
154
|
+
};
|
|
155
|
+
} else {
|
|
156
|
+
return {
|
|
157
|
+
type: "tool-result" as const,
|
|
158
|
+
toolCallId: part.toolCallId,
|
|
159
|
+
toolName: part.toolName,
|
|
160
|
+
result: part.result,
|
|
161
|
+
isError: part.isError,
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Status mapping adapter for AI SDK Elements
|
|
3
|
+
*
|
|
4
|
+
* Maps deepagentsdk AgentStatus to Elements UIStatus
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type { AgentStatus } from "../../cli/hooks/useAgent.js";
|
|
8
|
+
import type { UIStatus } from "./types.js";
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Maps deepagentsdk AgentStatus to Elements UIStatus
|
|
12
|
+
*
|
|
13
|
+
* @param agentStatus - The agent status from useAgent hook
|
|
14
|
+
* @returns The corresponding UI status for Elements components
|
|
15
|
+
*
|
|
16
|
+
* Mapping rules:
|
|
17
|
+
* - idle/done → ready (agent is waiting for input)
|
|
18
|
+
* - thinking/tool-call/subagent → submitted (agent is processing)
|
|
19
|
+
* - streaming → streaming (agent is generating text)
|
|
20
|
+
* - error → error (an error occurred)
|
|
21
|
+
*/
|
|
22
|
+
export function mapAgentStatusToUIStatus(
|
|
23
|
+
agentStatus: AgentStatus
|
|
24
|
+
): UIStatus {
|
|
25
|
+
switch (agentStatus) {
|
|
26
|
+
case "thinking":
|
|
27
|
+
case "tool-call":
|
|
28
|
+
case "subagent":
|
|
29
|
+
return "submitted";
|
|
30
|
+
case "streaming":
|
|
31
|
+
return "streaming";
|
|
32
|
+
case "error":
|
|
33
|
+
return "error";
|
|
34
|
+
case "idle":
|
|
35
|
+
case "done":
|
|
36
|
+
default:
|
|
37
|
+
return "ready";
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Types for AI SDK Elements adapter
|
|
3
|
+
*
|
|
4
|
+
* These types align with Vercel AI SDK Elements UI component expectations.
|
|
5
|
+
* @see https://ai-sdk.dev/elements
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* UI message part types that Elements components expect
|
|
10
|
+
*/
|
|
11
|
+
export type UIMessagePart =
|
|
12
|
+
| {
|
|
13
|
+
type: "text";
|
|
14
|
+
text: string;
|
|
15
|
+
}
|
|
16
|
+
| {
|
|
17
|
+
type: "tool-call";
|
|
18
|
+
toolCallId: string;
|
|
19
|
+
toolName: string;
|
|
20
|
+
args: unknown;
|
|
21
|
+
}
|
|
22
|
+
| {
|
|
23
|
+
type: "tool-result";
|
|
24
|
+
toolCallId: string;
|
|
25
|
+
toolName: string;
|
|
26
|
+
result: unknown;
|
|
27
|
+
isError?: boolean;
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* UI message format expected by Elements Message component
|
|
32
|
+
*/
|
|
33
|
+
export interface UIMessage {
|
|
34
|
+
id: string;
|
|
35
|
+
role: "user" | "assistant";
|
|
36
|
+
parts: UIMessagePart[];
|
|
37
|
+
status: "submitted" | "streaming" | "ready" | "error";
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* UI status that Elements components use
|
|
42
|
+
*/
|
|
43
|
+
export type UIStatus = "submitted" | "streaming" | "ready" | "error";
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* PromptInput component message format
|
|
47
|
+
*/
|
|
48
|
+
export interface PromptInputMessage {
|
|
49
|
+
text: string;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Tool parts extracted from current message for Tool component
|
|
54
|
+
*/
|
|
55
|
+
export interface ToolUIPart {
|
|
56
|
+
type: "tool-call" | "tool-result";
|
|
57
|
+
toolCallId: string;
|
|
58
|
+
toolName: string;
|
|
59
|
+
args?: unknown;
|
|
60
|
+
result?: unknown;
|
|
61
|
+
isError?: boolean;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Return type for useElementsAdapter hook
|
|
66
|
+
*/
|
|
67
|
+
export interface UseElementsAdapterReturn {
|
|
68
|
+
/**
|
|
69
|
+
* Messages formatted for Elements Message component
|
|
70
|
+
*/
|
|
71
|
+
uiMessages: UIMessage[];
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Current UI status for Elements components
|
|
75
|
+
*/
|
|
76
|
+
uiStatus: UIStatus;
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Tool parts from current message for Tool component
|
|
80
|
+
*/
|
|
81
|
+
toolParts: ToolUIPart[];
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Send a message (compatible with PromptInput onSubmit)
|
|
85
|
+
*/
|
|
86
|
+
sendMessage: (message: PromptInputMessage) => Promise<void>;
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Abort current streaming
|
|
90
|
+
*/
|
|
91
|
+
abort: () => void;
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Clear all messages
|
|
95
|
+
*/
|
|
96
|
+
clear: () => void;
|
|
97
|
+
}
|
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* React hook adapter for AI SDK Elements
|
|
3
|
+
*
|
|
4
|
+
* Provides Elements-compatible interface for deepagentsdk
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { useState, useCallback, useRef, useMemo } from "react";
|
|
8
|
+
import { createDeepAgent } from "../../agent.js";
|
|
9
|
+
import type { LanguageModel, ToolSet } from "ai";
|
|
10
|
+
import type {
|
|
11
|
+
BackendProtocol,
|
|
12
|
+
DeepAgentState,
|
|
13
|
+
DeepAgentEvent,
|
|
14
|
+
} from "../../types.js";
|
|
15
|
+
import {
|
|
16
|
+
convertEventsToUIMessages,
|
|
17
|
+
extractToolParts,
|
|
18
|
+
} from "./messageAdapter.js";
|
|
19
|
+
import { mapAgentStatusToUIStatus } from "./statusAdapter.js";
|
|
20
|
+
import type { UseElementsAdapterReturn, PromptInputMessage } from "./types.js";
|
|
21
|
+
import type { AgentStatus, AgentEventLog } from "../../cli/hooks/useAgent.js";
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Options for useElementsAdapter hook
|
|
25
|
+
*/
|
|
26
|
+
export interface UseElementsAdapterOptions {
|
|
27
|
+
/**
|
|
28
|
+
* Language model instance from AI SDK provider
|
|
29
|
+
*/
|
|
30
|
+
model: LanguageModel;
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Backend for state management
|
|
34
|
+
*/
|
|
35
|
+
backend: BackendProtocol;
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Optional tools to provide to the agent
|
|
39
|
+
*/
|
|
40
|
+
tools?: ToolSet;
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Maximum number of tool loop iterations
|
|
44
|
+
* @default 10
|
|
45
|
+
*/
|
|
46
|
+
maxSteps?: number;
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* System prompt for the agent
|
|
50
|
+
*/
|
|
51
|
+
systemPrompt?: string;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
let eventCounter = 0;
|
|
55
|
+
|
|
56
|
+
function createEventId(): string {
|
|
57
|
+
return `event-${++eventCounter}`;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Hook that adapts deepagentsdk to work with AI SDK Elements UI components
|
|
62
|
+
*
|
|
63
|
+
* @param options - Configuration options
|
|
64
|
+
* @returns Elements-compatible interface
|
|
65
|
+
*
|
|
66
|
+
* @example
|
|
67
|
+
* ```tsx
|
|
68
|
+
* import { useElementsAdapter } from 'deepagentsdk/elements';
|
|
69
|
+
* import { Conversation, Message, PromptInput } from '@/components/ai-elements';
|
|
70
|
+
*
|
|
71
|
+
* function Chat() {
|
|
72
|
+
* const { uiMessages, sendMessage } = useElementsAdapter({
|
|
73
|
+
* model,
|
|
74
|
+
* backend
|
|
75
|
+
* });
|
|
76
|
+
*
|
|
77
|
+
* return (
|
|
78
|
+
* <Conversation>
|
|
79
|
+
* {uiMessages.map(msg => <Message key={msg.id} from={msg.role} />)}
|
|
80
|
+
* <PromptInput onSubmit={sendMessage} />
|
|
81
|
+
* </Conversation>
|
|
82
|
+
* );
|
|
83
|
+
* }
|
|
84
|
+
* ```
|
|
85
|
+
*/
|
|
86
|
+
export function useElementsAdapter(
|
|
87
|
+
options: UseElementsAdapterOptions
|
|
88
|
+
): UseElementsAdapterReturn {
|
|
89
|
+
const { model, backend, tools, maxSteps = 10, systemPrompt } = options;
|
|
90
|
+
|
|
91
|
+
const [status, setStatus] = useState<AgentStatus>("idle");
|
|
92
|
+
const [streamingText, setStreamingText] = useState("");
|
|
93
|
+
const [events, setEvents] = useState<AgentEventLog[]>([]);
|
|
94
|
+
const [state, setState] = useState<DeepAgentState>({
|
|
95
|
+
todos: [],
|
|
96
|
+
files: {},
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
const abortControllerRef = useRef<AbortController | null>(null);
|
|
100
|
+
const accumulatedTextRef = useRef("");
|
|
101
|
+
|
|
102
|
+
// Create agent instance
|
|
103
|
+
const agentRef = useRef(
|
|
104
|
+
createDeepAgent({
|
|
105
|
+
model,
|
|
106
|
+
maxSteps,
|
|
107
|
+
systemPrompt,
|
|
108
|
+
backend,
|
|
109
|
+
tools,
|
|
110
|
+
})
|
|
111
|
+
);
|
|
112
|
+
|
|
113
|
+
const addEvent = useCallback(
|
|
114
|
+
(event: DeepAgentEvent | { type: "text-segment"; text: string }) => {
|
|
115
|
+
setEvents((prev) => [
|
|
116
|
+
...prev,
|
|
117
|
+
{
|
|
118
|
+
id: createEventId(),
|
|
119
|
+
type: event.type,
|
|
120
|
+
event,
|
|
121
|
+
timestamp: new Date(),
|
|
122
|
+
},
|
|
123
|
+
]);
|
|
124
|
+
},
|
|
125
|
+
[]
|
|
126
|
+
);
|
|
127
|
+
|
|
128
|
+
// Flush accumulated text as a text-segment event
|
|
129
|
+
const flushTextSegment = useCallback(() => {
|
|
130
|
+
if (accumulatedTextRef.current.trim()) {
|
|
131
|
+
addEvent({
|
|
132
|
+
type: "text-segment",
|
|
133
|
+
text: accumulatedTextRef.current,
|
|
134
|
+
});
|
|
135
|
+
accumulatedTextRef.current = "";
|
|
136
|
+
setStreamingText("");
|
|
137
|
+
}
|
|
138
|
+
}, [addEvent]);
|
|
139
|
+
|
|
140
|
+
const sendMessage = async (message: PromptInputMessage): Promise<void> => {
|
|
141
|
+
if (!message.text.trim()) {
|
|
142
|
+
return; // Ignore empty messages
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Reset for new generation
|
|
146
|
+
setStatus("thinking");
|
|
147
|
+
setStreamingText("");
|
|
148
|
+
accumulatedTextRef.current = "";
|
|
149
|
+
|
|
150
|
+
// Add user message to events
|
|
151
|
+
addEvent({ type: "user-message", content: message.text });
|
|
152
|
+
|
|
153
|
+
// Create new abort controller
|
|
154
|
+
abortControllerRef.current = new AbortController();
|
|
155
|
+
|
|
156
|
+
try {
|
|
157
|
+
for await (const event of agentRef.current.streamWithEvents({
|
|
158
|
+
messages: [{ role: "user", content: message.text }],
|
|
159
|
+
state,
|
|
160
|
+
abortSignal: abortControllerRef.current.signal,
|
|
161
|
+
})) {
|
|
162
|
+
switch (event.type) {
|
|
163
|
+
case "text":
|
|
164
|
+
setStatus("streaming");
|
|
165
|
+
accumulatedTextRef.current += event.text;
|
|
166
|
+
setStreamingText(accumulatedTextRef.current);
|
|
167
|
+
break;
|
|
168
|
+
|
|
169
|
+
case "step-start":
|
|
170
|
+
if (event.stepNumber > 1) {
|
|
171
|
+
addEvent(event);
|
|
172
|
+
}
|
|
173
|
+
break;
|
|
174
|
+
|
|
175
|
+
case "tool-call":
|
|
176
|
+
flushTextSegment();
|
|
177
|
+
setStatus("tool-call");
|
|
178
|
+
addEvent(event);
|
|
179
|
+
break;
|
|
180
|
+
|
|
181
|
+
case "tool-result":
|
|
182
|
+
addEvent(event);
|
|
183
|
+
break;
|
|
184
|
+
|
|
185
|
+
case "todos-changed":
|
|
186
|
+
flushTextSegment();
|
|
187
|
+
setStatus("tool-call");
|
|
188
|
+
setState((prev) => ({ ...prev, todos: event.todos }));
|
|
189
|
+
addEvent(event);
|
|
190
|
+
break;
|
|
191
|
+
|
|
192
|
+
case "done":
|
|
193
|
+
flushTextSegment();
|
|
194
|
+
setStatus("done");
|
|
195
|
+
setState(event.state);
|
|
196
|
+
addEvent(event);
|
|
197
|
+
break;
|
|
198
|
+
|
|
199
|
+
case "error":
|
|
200
|
+
flushTextSegment();
|
|
201
|
+
setStatus("error");
|
|
202
|
+
addEvent(event);
|
|
203
|
+
break;
|
|
204
|
+
|
|
205
|
+
default:
|
|
206
|
+
addEvent(event);
|
|
207
|
+
break;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
setStatus("idle");
|
|
212
|
+
} catch (err) {
|
|
213
|
+
if ((err as Error).name === "AbortError") {
|
|
214
|
+
flushTextSegment();
|
|
215
|
+
setStatus("idle");
|
|
216
|
+
} else {
|
|
217
|
+
flushTextSegment();
|
|
218
|
+
setStatus("error");
|
|
219
|
+
}
|
|
220
|
+
} finally {
|
|
221
|
+
abortControllerRef.current = null;
|
|
222
|
+
}
|
|
223
|
+
};
|
|
224
|
+
|
|
225
|
+
const abort = useCallback(() => {
|
|
226
|
+
if (abortControllerRef.current) {
|
|
227
|
+
abortControllerRef.current.abort();
|
|
228
|
+
setStatus("idle");
|
|
229
|
+
}
|
|
230
|
+
}, []);
|
|
231
|
+
|
|
232
|
+
const clear = useCallback(() => {
|
|
233
|
+
setEvents([]);
|
|
234
|
+
setStreamingText("");
|
|
235
|
+
setStatus("idle");
|
|
236
|
+
}, []);
|
|
237
|
+
|
|
238
|
+
// Convert agent status to UI status
|
|
239
|
+
const uiStatus = useMemo(
|
|
240
|
+
() => mapAgentStatusToUIStatus(status),
|
|
241
|
+
[status]
|
|
242
|
+
);
|
|
243
|
+
|
|
244
|
+
// Convert events to UI messages
|
|
245
|
+
const uiMessages = useMemo(
|
|
246
|
+
() => convertEventsToUIMessages(events, streamingText, uiStatus),
|
|
247
|
+
[events, streamingText, uiStatus]
|
|
248
|
+
);
|
|
249
|
+
|
|
250
|
+
// Extract tool parts from current message
|
|
251
|
+
const toolParts = useMemo(() => extractToolParts(uiMessages), [uiMessages]);
|
|
252
|
+
|
|
253
|
+
return {
|
|
254
|
+
uiMessages,
|
|
255
|
+
uiStatus,
|
|
256
|
+
toolParts,
|
|
257
|
+
sendMessage,
|
|
258
|
+
abort,
|
|
259
|
+
clear,
|
|
260
|
+
};
|
|
261
|
+
}
|
package/src/agent.ts
CHANGED
|
@@ -961,22 +961,50 @@ export class DeepAgent {
|
|
|
961
961
|
// Yield step start event
|
|
962
962
|
yield { type: "step-start", stepNumber: 1 };
|
|
963
963
|
|
|
964
|
-
// Stream text
|
|
965
|
-
for await (const chunk of result.
|
|
964
|
+
// Stream all chunks (text, tool calls, etc.)
|
|
965
|
+
for await (const chunk of result.fullStream) {
|
|
966
966
|
// First, yield any queued events from tool executions
|
|
967
967
|
while (eventQueue.length > 0) {
|
|
968
968
|
const event = eventQueue.shift()!;
|
|
969
969
|
yield event;
|
|
970
|
-
|
|
970
|
+
|
|
971
971
|
// If a step finished, yield the next step start
|
|
972
972
|
if (event.type === "step-finish") {
|
|
973
973
|
yield { type: "step-start", stepNumber: event.stepNumber + 1 };
|
|
974
974
|
}
|
|
975
975
|
}
|
|
976
976
|
|
|
977
|
-
//
|
|
978
|
-
if (chunk) {
|
|
979
|
-
yield { type: "text", text: chunk };
|
|
977
|
+
// Handle different chunk types from fullStream
|
|
978
|
+
if (chunk.type === "text-delta") {
|
|
979
|
+
yield { type: "text", text: chunk.text };
|
|
980
|
+
} else if (chunk.type === "tool-call") {
|
|
981
|
+
// Emit tool-call event for UI
|
|
982
|
+
// Note: chunk has input property (AI SDK v6), but we use args for our event type
|
|
983
|
+
yield {
|
|
984
|
+
type: "tool-call",
|
|
985
|
+
toolName: chunk.toolName,
|
|
986
|
+
toolCallId: chunk.toolCallId,
|
|
987
|
+
args: chunk.input,
|
|
988
|
+
} as DeepAgentEvent;
|
|
989
|
+
} else if (chunk.type === "tool-result") {
|
|
990
|
+
// Emit tool-result event for UI
|
|
991
|
+
// Note: chunk has output property (AI SDK v6), but we use result for our event type
|
|
992
|
+
yield {
|
|
993
|
+
type: "tool-result",
|
|
994
|
+
toolName: chunk.toolName,
|
|
995
|
+
toolCallId: chunk.toolCallId,
|
|
996
|
+
result: chunk.output,
|
|
997
|
+
isError: false,
|
|
998
|
+
} as DeepAgentEvent;
|
|
999
|
+
} else if (chunk.type === "tool-error") {
|
|
1000
|
+
// Emit tool-result event with error flag for UI
|
|
1001
|
+
yield {
|
|
1002
|
+
type: "tool-result",
|
|
1003
|
+
toolName: chunk.toolName,
|
|
1004
|
+
toolCallId: chunk.toolCallId,
|
|
1005
|
+
result: chunk.error,
|
|
1006
|
+
isError: true,
|
|
1007
|
+
} as DeepAgentEvent;
|
|
980
1008
|
}
|
|
981
1009
|
}
|
|
982
1010
|
|