@assistant-ui/mcp-docs-server 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.docs/organized/code-examples/with-ai-sdk-v5.md +15 -13
- package/.docs/organized/code-examples/with-cloud.md +19 -25
- package/.docs/organized/code-examples/with-external-store.md +9 -7
- package/.docs/organized/code-examples/with-ffmpeg.md +21 -21
- package/.docs/organized/code-examples/with-langgraph.md +72 -46
- package/.docs/organized/code-examples/with-parent-id-grouping.md +9 -7
- package/.docs/organized/code-examples/with-react-hook-form.md +19 -21
- package/.docs/raw/docs/api-reference/integrations/react-data-stream.mdx +194 -0
- package/.docs/raw/docs/api-reference/overview.mdx +7 -4
- package/.docs/raw/docs/api-reference/primitives/Composer.mdx +31 -0
- package/.docs/raw/docs/api-reference/primitives/Message.mdx +108 -3
- package/.docs/raw/docs/api-reference/primitives/Thread.mdx +59 -0
- package/.docs/raw/docs/api-reference/primitives/ThreadList.mdx +128 -0
- package/.docs/raw/docs/api-reference/primitives/ThreadListItem.mdx +160 -0
- package/.docs/raw/docs/api-reference/runtimes/AssistantRuntime.mdx +0 -11
- package/.docs/raw/docs/api-reference/runtimes/ComposerRuntime.mdx +3 -3
- package/.docs/raw/docs/copilots/assistant-frame.mdx +397 -0
- package/.docs/raw/docs/getting-started.mdx +53 -52
- package/.docs/raw/docs/guides/Attachments.mdx +7 -115
- package/.docs/raw/docs/guides/ToolUI.mdx +3 -3
- package/.docs/raw/docs/guides/Tools.mdx +152 -92
- package/.docs/raw/docs/guides/context-api.mdx +574 -0
- package/.docs/raw/docs/migrations/v0-12.mdx +125 -0
- package/.docs/raw/docs/runtimes/ai-sdk/use-chat.mdx +134 -55
- package/.docs/raw/docs/runtimes/ai-sdk/v4-legacy.mdx +182 -0
- package/.docs/raw/docs/runtimes/custom/local.mdx +16 -3
- package/.docs/raw/docs/runtimes/data-stream.mdx +287 -0
- package/.docs/raw/docs/runtimes/langgraph/index.mdx +0 -1
- package/.docs/raw/docs/runtimes/langserve.mdx +9 -11
- package/.docs/raw/docs/runtimes/pick-a-runtime.mdx +5 -0
- package/.docs/raw/docs/ui/ThreadList.mdx +54 -16
- package/dist/{chunk-L4K23SWI.js → chunk-NVNFQ5ZO.js} +4 -1
- package/dist/index.js +1 -1
- package/dist/prepare-docs/prepare.js +1 -1
- package/dist/stdio.js +1 -1
- package/package.json +7 -7
- package/.docs/organized/code-examples/local-ollama.md +0 -1135
- package/.docs/organized/code-examples/search-agent-for-e-commerce.md +0 -1721
- package/.docs/organized/code-examples/with-ai-sdk.md +0 -1082
- package/.docs/organized/code-examples/with-openai-assistants.md +0 -1175
- package/.docs/raw/docs/concepts/architecture.mdx +0 -19
- package/.docs/raw/docs/concepts/runtime-layer.mdx +0 -163
- package/.docs/raw/docs/concepts/why.mdx +0 -9
- package/.docs/raw/docs/runtimes/ai-sdk/rsc.mdx +0 -226
- package/.docs/raw/docs/runtimes/ai-sdk/use-assistant-hook.mdx +0 -195
- package/.docs/raw/docs/runtimes/ai-sdk/use-chat-hook.mdx +0 -138
- package/.docs/raw/docs/runtimes/ai-sdk/use-chat-v5.mdx +0 -212
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
---
|
|
2
|
-
title:
|
|
2
|
+
title: AI SDK v5
|
|
3
3
|
---
|
|
4
4
|
|
|
5
|
+
import { Callout } from "fumadocs-ui/components/callout";
|
|
6
|
+
|
|
5
7
|
## Overview
|
|
6
8
|
|
|
7
|
-
Integration with the Vercel AI SDK
|
|
8
|
-
|
|
9
|
+
Integration with the Vercel AI SDK v5 using the new `useChatRuntime` hook from `@assistant-ui/react-ai-sdk`.
|
|
10
|
+
This provides a streamlined way to integrate AI SDK v5 features including the new streamText API and improved TypeScript support.
|
|
9
11
|
|
|
10
12
|
## Getting Started
|
|
11
13
|
|
|
@@ -23,7 +25,7 @@ cd my-app
|
|
|
23
25
|
</Step>
|
|
24
26
|
<Step>
|
|
25
27
|
|
|
26
|
-
### Install
|
|
28
|
+
### Install AI SDK v5 and `@assistant-ui/react`
|
|
27
29
|
|
|
28
30
|
```sh npm2yarn
|
|
29
31
|
npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/openai
|
|
@@ -38,99 +40,176 @@ npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/openai
|
|
|
38
40
|
|
|
39
41
|
```tsx
|
|
40
42
|
import { openai } from "@ai-sdk/openai";
|
|
41
|
-
import {
|
|
43
|
+
import { streamText, UIMessage, convertToModelMessages, tool } from "ai";
|
|
44
|
+
import { frontendTools } from "@assistant-ui/assistant-stream/ai-sdk";
|
|
45
|
+
import { z } from "zod";
|
|
42
46
|
|
|
47
|
+
// Allow streaming responses up to 30 seconds
|
|
43
48
|
export const maxDuration = 30;
|
|
44
49
|
|
|
45
50
|
export async function POST(req: Request) {
|
|
46
|
-
const {
|
|
51
|
+
const {
|
|
52
|
+
messages,
|
|
53
|
+
system,
|
|
54
|
+
tools,
|
|
55
|
+
}: {
|
|
56
|
+
messages: UIMessage[];
|
|
57
|
+
system?: string; // System message forwarded from AssistantChatTransport
|
|
58
|
+
tools?: any; // Frontend tools forwarded from AssistantChatTransport
|
|
59
|
+
} = await req.json();
|
|
47
60
|
|
|
48
61
|
const result = streamText({
|
|
49
62
|
model: openai("gpt-4o"),
|
|
50
|
-
|
|
63
|
+
system, // Use the system message from the frontend if provided
|
|
64
|
+
messages: convertToModelMessages(messages),
|
|
65
|
+
tools: {
|
|
66
|
+
// Wrap frontend tools with frontendTools helper
|
|
67
|
+
...frontendTools(tools),
|
|
68
|
+
// Backend tools
|
|
69
|
+
get_current_weather: tool({
|
|
70
|
+
description: "Get the current weather",
|
|
71
|
+
inputSchema: z.object({
|
|
72
|
+
city: z.string(),
|
|
73
|
+
}),
|
|
74
|
+
execute: async ({ city }) => {
|
|
75
|
+
return `The weather in ${city} is sunny`;
|
|
76
|
+
},
|
|
77
|
+
}),
|
|
78
|
+
},
|
|
51
79
|
});
|
|
52
80
|
|
|
53
|
-
return result.
|
|
81
|
+
return result.toUIMessageStreamResponse();
|
|
54
82
|
}
|
|
55
83
|
```
|
|
56
84
|
|
|
57
85
|
</Step>
|
|
58
86
|
<Step>
|
|
59
87
|
|
|
60
|
-
###
|
|
88
|
+
### Wrap your app with `AssistantRuntimeProvider` using `useChatRuntime`
|
|
61
89
|
|
|
62
|
-
`@/app/
|
|
90
|
+
`@/app/page.tsx`
|
|
63
91
|
|
|
64
92
|
```tsx
|
|
65
93
|
"use client";
|
|
66
94
|
|
|
67
|
-
import {
|
|
95
|
+
import { Thread } from "@/components/assistant-ui/thread";
|
|
68
96
|
import { AssistantRuntimeProvider } from "@assistant-ui/react";
|
|
69
97
|
import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
|
|
70
98
|
|
|
71
|
-
export function
|
|
72
|
-
|
|
73
|
-
}: Readonly<{
|
|
74
|
-
children: React.ReactNode;
|
|
75
|
-
}>) {
|
|
76
|
-
const runtime = useChatRuntime({
|
|
77
|
-
api: "/api/chat",
|
|
78
|
-
});
|
|
99
|
+
export default function Home() {
|
|
100
|
+
const runtime = useChatRuntime();
|
|
79
101
|
|
|
80
102
|
return (
|
|
81
103
|
<AssistantRuntimeProvider runtime={runtime}>
|
|
82
|
-
|
|
104
|
+
<div className="h-full">
|
|
105
|
+
<Thread />
|
|
106
|
+
</div>
|
|
83
107
|
</AssistantRuntimeProvider>
|
|
84
108
|
);
|
|
85
109
|
}
|
|
86
110
|
```
|
|
87
111
|
|
|
88
112
|
</Step>
|
|
89
|
-
|
|
113
|
+
</Steps>
|
|
90
114
|
|
|
91
|
-
|
|
115
|
+
## API Reference
|
|
92
116
|
|
|
93
|
-
|
|
117
|
+
### useChatRuntime
|
|
94
118
|
|
|
95
|
-
|
|
96
|
-
import { MyRuntimeProvider } from '@/app/MyRuntimeProvider';
|
|
119
|
+
Creates a runtime directly with AI SDK v5's `useChat` hook integration.
|
|
97
120
|
|
|
98
|
-
|
|
121
|
+
```tsx
|
|
122
|
+
import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
|
|
99
123
|
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
}>) {
|
|
105
|
-
return (
|
|
106
|
-
<MyRuntimeProvider>
|
|
107
|
-
<html lang="en">
|
|
108
|
-
<body className={inter.className}>
|
|
109
|
-
{children}
|
|
110
|
-
</body>
|
|
111
|
-
</html>
|
|
112
|
-
</MyRuntimeProvider>
|
|
113
|
-
)
|
|
114
|
-
}
|
|
124
|
+
const runtime = useChatRuntime({
|
|
125
|
+
api: "/api/chat",
|
|
126
|
+
// All standard useChat options are supported
|
|
127
|
+
});
|
|
115
128
|
```
|
|
116
129
|
|
|
117
|
-
|
|
118
|
-
|
|
130
|
+
<Callout type="info">
|
|
131
|
+
By default, `useChatRuntime` uses `AssistantChatTransport` which automatically
|
|
132
|
+
forwards system messages and frontend tools to your backend API. This enables
|
|
133
|
+
your backend to receive the full context from the Assistant UI.
|
|
134
|
+
</Callout>
|
|
119
135
|
|
|
120
|
-
|
|
136
|
+
### Custom Transport Configuration
|
|
121
137
|
|
|
122
|
-
|
|
138
|
+
If you need to customize the transport configuration:
|
|
123
139
|
|
|
124
140
|
```tsx
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
const
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
141
|
+
import { DefaultChatTransport } from "ai";
|
|
142
|
+
import { AssistantChatTransport } from "@assistant-ui/react-ai-sdk";
|
|
143
|
+
import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
|
|
144
|
+
|
|
145
|
+
// Example 1: Custom API URL while keeping system/tools forwarding
|
|
146
|
+
const runtime = useChatRuntime({
|
|
147
|
+
transport: new AssistantChatTransport({
|
|
148
|
+
api: "/my-custom-api/chat", // Custom API URL with forwarding
|
|
149
|
+
}),
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
// Example 2: Disable system/tools forwarding
|
|
153
|
+
const runtime = useChatRuntime({
|
|
154
|
+
api: "/api/chat",
|
|
155
|
+
transport: new DefaultChatTransport(), // Standard AI SDK transport without forwarding
|
|
135
156
|
});
|
|
136
157
|
```
|
|
158
|
+
|
|
159
|
+
<Callout type="warning">
|
|
160
|
+
When customizing the API URL, you must explicitly use `AssistantChatTransport`
|
|
161
|
+
if you want to keep frontend system messages and tools forwarding. Simply
|
|
162
|
+
passing `api` to `useChatRuntime` will use the default transport
|
|
163
|
+
configuration.
|
|
164
|
+
</Callout>
|
|
165
|
+
|
|
166
|
+
#### Transport Options
|
|
167
|
+
|
|
168
|
+
- **`AssistantChatTransport`** (default): Automatically forwards system messages and frontend tools from the Assistant UI context to your backend
|
|
169
|
+
- **`DefaultChatTransport`**: Standard AI SDK transport without automatic forwarding
|
|
170
|
+
|
|
171
|
+
### Using Frontend Tools with `frontendTools`
|
|
172
|
+
|
|
173
|
+
When using `AssistantChatTransport`, frontend tools are forwarded to your backend. Use the `frontendTools` helper to properly integrate them:
|
|
174
|
+
|
|
175
|
+
```tsx
|
|
176
|
+
import { frontendTools } from "@assistant-ui/assistant-stream/ai-sdk";
|
|
177
|
+
|
|
178
|
+
export async function POST(req: Request) {
|
|
179
|
+
const { messages, system, tools } = await req.json();
|
|
180
|
+
|
|
181
|
+
const result = streamText({
|
|
182
|
+
model: openai("gpt-4o"),
|
|
183
|
+
system,
|
|
184
|
+
messages: convertToModelMessages(messages),
|
|
185
|
+
tools: {
|
|
186
|
+
// Wrap frontend tools with the helper
|
|
187
|
+
...frontendTools(tools),
|
|
188
|
+
// Your backend tools
|
|
189
|
+
myBackendTool: tool({
|
|
190
|
+
// ...
|
|
191
|
+
}),
|
|
192
|
+
},
|
|
193
|
+
});
|
|
194
|
+
|
|
195
|
+
return result.toUIMessageStreamResponse();
|
|
196
|
+
}
|
|
197
|
+
```
|
|
198
|
+
|
|
199
|
+
The `frontendTools` helper converts frontend tool definitions to the AI SDK format and ensures they are properly handled by the streaming response.
|
|
200
|
+
|
|
201
|
+
### useAISDKRuntime (Advanced)
|
|
202
|
+
|
|
203
|
+
For advanced use cases where you need direct access to the `useChat` hook:
|
|
204
|
+
|
|
205
|
+
```tsx
|
|
206
|
+
import { useChat } from "@ai-sdk/react";
|
|
207
|
+
import { useAISDKRuntime } from "@assistant-ui/react-ai-sdk";
|
|
208
|
+
|
|
209
|
+
const chat = useChat();
|
|
210
|
+
const runtime = useAISDKRuntime(chat);
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
## Example
|
|
214
|
+
|
|
215
|
+
For a complete example, check out the [AI SDK v5 example](https://github.com/assistant-ui/assistant-ui/tree/main/examples/with-ai-sdk-v5) in our repository.
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: AI SDK v4 (Legacy)
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
import { Callout } from "fumadocs-ui/components/callout";
|
|
6
|
+
|
|
7
|
+
## Overview
|
|
8
|
+
|
|
9
|
+
If you're using AI SDK v4 (legacy), you can integrate with assistant-ui using the `@assistant-ui/react-data-stream` package and its `useDataStreamRuntime` hook. This provides a compatible runtime that works with AI SDK v4's streaming responses.
|
|
10
|
+
|
|
11
|
+
<Callout type="warning">
|
|
12
|
+
AI SDK v4 is now considered legacy. We recommend upgrading to [AI SDK
|
|
13
|
+
v5](/docs/runtimes/ai-sdk/use-chat) for improved features and better
|
|
14
|
+
TypeScript support. This documentation is provided for projects that haven't
|
|
15
|
+
migrated yet.
|
|
16
|
+
</Callout>
|
|
17
|
+
|
|
18
|
+
## Getting Started
|
|
19
|
+
|
|
20
|
+
import { Steps, Step } from "fumadocs-ui/components/steps";
|
|
21
|
+
|
|
22
|
+
### Option 1: Using @assistant-ui/react-data-stream (Recommended)
|
|
23
|
+
|
|
24
|
+
<Steps>
|
|
25
|
+
<Step>
|
|
26
|
+
### Install the required packages
|
|
27
|
+
|
|
28
|
+
Install `@assistant-ui/react-data-stream` alongside assistant-ui and AI SDK v4:
|
|
29
|
+
|
|
30
|
+
```sh npm2yarn
|
|
31
|
+
npm install @assistant-ui/react @assistant-ui/react-data-stream ai@^4
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
</Step>
|
|
35
|
+
<Step>
|
|
36
|
+
|
|
37
|
+
### Setup your backend route
|
|
38
|
+
|
|
39
|
+
Create an API route that uses AI SDK v4's streaming capabilities:
|
|
40
|
+
|
|
41
|
+
`@/app/api/chat/route.ts`
|
|
42
|
+
|
|
43
|
+
```tsx
|
|
44
|
+
import { streamText } from "ai";
|
|
45
|
+
import { openai } from "@ai-sdk/openai";
|
|
46
|
+
|
|
47
|
+
export async function POST(req: Request) {
|
|
48
|
+
const { messages } = await req.json();
|
|
49
|
+
|
|
50
|
+
const result = streamText({
|
|
51
|
+
model: openai("gpt-4"),
|
|
52
|
+
messages,
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
return result.toDataStreamResponse();
|
|
56
|
+
}
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
</Step>
|
|
60
|
+
<Step>
|
|
61
|
+
|
|
62
|
+
### Use `useDataStreamRuntime` in your component
|
|
63
|
+
|
|
64
|
+
`@/app/page.tsx`
|
|
65
|
+
|
|
66
|
+
```tsx
|
|
67
|
+
"use client";
|
|
68
|
+
|
|
69
|
+
import { Thread } from "@assistant-ui/react";
|
|
70
|
+
import { AssistantRuntimeProvider } from "@assistant-ui/react";
|
|
71
|
+
import { useDataStreamRuntime } from "@assistant-ui/react-data-stream";
|
|
72
|
+
|
|
73
|
+
export default function Home() {
|
|
74
|
+
const runtime = useDataStreamRuntime({
|
|
75
|
+
api: "/api/chat",
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
return (
|
|
79
|
+
<AssistantRuntimeProvider runtime={runtime}>
|
|
80
|
+
<div className="h-full">
|
|
81
|
+
<Thread />
|
|
82
|
+
</div>
|
|
83
|
+
</AssistantRuntimeProvider>
|
|
84
|
+
);
|
|
85
|
+
}
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
</Step>
|
|
89
|
+
</Steps>
|
|
90
|
+
|
|
91
|
+
### Option 2: Using @assistant-ui/react-ai-sdk v0.1.10 (Legacy)
|
|
92
|
+
|
|
93
|
+
Alternatively, you can use the older version of the AI SDK integration package, though this version is no longer actively maintained:
|
|
94
|
+
|
|
95
|
+
```sh npm2yarn
|
|
96
|
+
npm install @assistant-ui/react @assistant-ui/react-ai-sdk@0.1.10 ai@^4
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
<Callout type="warning">
|
|
100
|
+
Version 0.1.10 of `@assistant-ui/react-ai-sdk` is no longer actively
|
|
101
|
+
maintained. We recommend using the `@assistant-ui/react-data-stream` approach
|
|
102
|
+
or upgrading to AI SDK v5 for continued support.
|
|
103
|
+
</Callout>
|
|
104
|
+
|
|
105
|
+
With this legacy version, you would use the `useVercelUseChatRuntime` hook:
|
|
106
|
+
|
|
107
|
+
```tsx
|
|
108
|
+
"use client";
|
|
109
|
+
|
|
110
|
+
import { useChat } from "ai/react";
|
|
111
|
+
import { Thread } from "@assistant-ui/react";
|
|
112
|
+
import { AssistantRuntimeProvider } from "@assistant-ui/react";
|
|
113
|
+
import { useVercelUseChatRuntime } from "@assistant-ui/react-ai-sdk";
|
|
114
|
+
|
|
115
|
+
export default function Home() {
|
|
116
|
+
const chat = useChat({
|
|
117
|
+
api: "/api/chat",
|
|
118
|
+
});
|
|
119
|
+
const runtime = useVercelUseChatRuntime(chat);
|
|
120
|
+
|
|
121
|
+
return (
|
|
122
|
+
<AssistantRuntimeProvider runtime={runtime}>
|
|
123
|
+
<div className="h-full">
|
|
124
|
+
<Thread />
|
|
125
|
+
</div>
|
|
126
|
+
</AssistantRuntimeProvider>
|
|
127
|
+
);
|
|
128
|
+
}
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
## API Reference
|
|
132
|
+
|
|
133
|
+
### `useDataStreamRuntime`
|
|
134
|
+
|
|
135
|
+
The `useDataStreamRuntime` hook creates a runtime compatible with assistant-ui from AI SDK v4's streaming responses.
|
|
136
|
+
|
|
137
|
+
```tsx
|
|
138
|
+
import { useDataStreamRuntime } from "@assistant-ui/react-data-stream";
|
|
139
|
+
|
|
140
|
+
const runtime = useDataStreamRuntime({
|
|
141
|
+
api: "/api/chat",
|
|
142
|
+
// Options similar to AI SDK v4's useChat
|
|
143
|
+
initialMessages: [],
|
|
144
|
+
onFinish: (message) => {
|
|
145
|
+
console.log("Message completed:", message);
|
|
146
|
+
},
|
|
147
|
+
onError: (error) => {
|
|
148
|
+
console.error("Chat error:", error);
|
|
149
|
+
},
|
|
150
|
+
});
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
#### Options
|
|
154
|
+
|
|
155
|
+
The `useDataStreamRuntime` hook accepts options similar to AI SDK v4's `useChat` hook:
|
|
156
|
+
|
|
157
|
+
- **`api`**: The API endpoint for chat requests (required)
|
|
158
|
+
- **`initialMessages`**: Initial messages to populate the chat
|
|
159
|
+
- **`onFinish`**: Callback when a message completes streaming
|
|
160
|
+
- **`onError`**: Callback for handling errors
|
|
161
|
+
- **`headers`**: Additional headers to send with requests
|
|
162
|
+
- **`body`**: Additional body parameters to send with requests
|
|
163
|
+
|
|
164
|
+
<Callout type="info">
|
|
165
|
+
The `useDataStreamRuntime` API is designed to be familiar to developers
|
|
166
|
+
already using AI SDK v4's `useChat` hook, making migration straightforward.
|
|
167
|
+
</Callout>
|
|
168
|
+
|
|
169
|
+
## Migration to AI SDK v5
|
|
170
|
+
|
|
171
|
+
When you're ready to upgrade to AI SDK v5:
|
|
172
|
+
|
|
173
|
+
1. Replace `@assistant-ui/react-data-stream` with `@assistant-ui/react-ai-sdk`
|
|
174
|
+
2. Update your backend to use AI SDK v5's `streamText` API
|
|
175
|
+
3. Switch from `useDataStreamRuntime` to `useChatRuntime`
|
|
176
|
+
4. Take advantage of improved TypeScript support and automatic system/tool forwarding
|
|
177
|
+
|
|
178
|
+
See our [AI SDK v5 documentation](/docs/runtimes/ai-sdk/use-chat) for the complete migration guide.
|
|
179
|
+
|
|
180
|
+
## Example
|
|
181
|
+
|
|
182
|
+
For a working example with AI SDK v4, you can adapt the patterns from our [AI SDK examples](https://github.com/assistant-ui/assistant-ui/tree/main/examples) using the `@assistant-ui/react-data-stream` package instead of the v5 integration.
|
|
@@ -414,6 +414,8 @@ import {
|
|
|
414
414
|
type RemoteThreadListAdapter,
|
|
415
415
|
type ThreadHistoryAdapter,
|
|
416
416
|
} from "@assistant-ui/react";
|
|
417
|
+
import { createAssistantStream } from "assistant-stream";
|
|
418
|
+
import { useMemo } from "react";
|
|
417
419
|
|
|
418
420
|
// Implement your custom adapter with proper message persistence
|
|
419
421
|
const myDatabaseAdapter: RemoteThreadListAdapter = {
|
|
@@ -453,9 +455,16 @@ const myDatabaseAdapter: RemoteThreadListAdapter = {
|
|
|
453
455
|
|
|
454
456
|
async generateTitle(remoteId, messages) {
|
|
455
457
|
// Generate title from messages using your AI
|
|
456
|
-
const
|
|
457
|
-
|
|
458
|
-
|
|
458
|
+
const newTitle = await generateTitle(messages);
|
|
459
|
+
|
|
460
|
+
// Persist the title in your DB
|
|
461
|
+
await db.threads.update(remoteId, { title: newTitle });
|
|
462
|
+
|
|
463
|
+
// IMPORTANT: Return an AssistantStream so the UI updates
|
|
464
|
+
return createAssistantStream((controller) => {
|
|
465
|
+
controller.appendText(newTitle);
|
|
466
|
+
controller.close();
|
|
467
|
+
});
|
|
459
468
|
},
|
|
460
469
|
};
|
|
461
470
|
|
|
@@ -528,6 +537,10 @@ export function MyRuntimeProvider({ children }) {
|
|
|
528
537
|
}
|
|
529
538
|
```
|
|
530
539
|
|
|
540
|
+
<Callout type="info" title="Returning a title from generateTitle">
|
|
541
|
+
The `generateTitle` method must return an <code>AssistantStream</code> containing the title text. The easiest, type-safe way is to use <code>createAssistantStream</code> and call <code>controller.appendText(newTitle)</code> followed by <code>controller.close()</code>. Returning a raw <code>ReadableStream</code> won't update the thread list UI.
|
|
542
|
+
</Callout>
|
|
543
|
+
|
|
531
544
|
#### Understanding the Architecture
|
|
532
545
|
|
|
533
546
|
<Callout type="info">
|