@assistant-ui/mcp-docs-server 0.1.6 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.docs/organized/code-examples/with-ai-sdk-v5.md +12 -12
- package/.docs/organized/code-examples/with-cloud.md +16 -24
- package/.docs/organized/code-examples/with-external-store.md +6 -6
- package/.docs/organized/code-examples/with-ffmpeg.md +18 -20
- package/.docs/organized/code-examples/with-langgraph.md +6 -8
- package/.docs/organized/code-examples/with-parent-id-grouping.md +6 -6
- package/.docs/organized/code-examples/with-react-hook-form.md +16 -20
- package/.docs/raw/docs/api-reference/overview.mdx +1 -4
- package/.docs/raw/docs/getting-started.mdx +33 -33
- package/.docs/raw/docs/guides/Attachments.mdx +1 -102
- package/.docs/raw/docs/guides/ToolUI.mdx +3 -3
- package/.docs/raw/docs/guides/Tools.mdx +101 -84
- package/.docs/raw/docs/runtimes/ai-sdk/use-chat.mdx +134 -55
- package/.docs/raw/docs/runtimes/ai-sdk/v4-legacy.mdx +182 -0
- package/.docs/raw/docs/runtimes/langgraph/index.mdx +0 -1
- package/.docs/raw/docs/runtimes/langserve.mdx +9 -11
- package/package.json +6 -6
- package/.docs/organized/code-examples/local-ollama.md +0 -1135
- package/.docs/organized/code-examples/search-agent-for-e-commerce.md +0 -1721
- package/.docs/organized/code-examples/with-ai-sdk.md +0 -1082
- package/.docs/organized/code-examples/with-openai-assistants.md +0 -1175
- package/.docs/raw/docs/runtimes/ai-sdk/rsc.mdx +0 -226
- package/.docs/raw/docs/runtimes/ai-sdk/use-assistant-hook.mdx +0 -195
- package/.docs/raw/docs/runtimes/ai-sdk/use-chat-hook.mdx +0 -138
- package/.docs/raw/docs/runtimes/ai-sdk/use-chat-v5.mdx +0 -212
|
@@ -1,226 +0,0 @@
|
|
|
1
|
-
---
|
|
2
|
-
title: Vercel AI SDK RSC Runtime
|
|
3
|
-
---
|
|
4
|
-
|
|
5
|
-
## Overview
|
|
6
|
-
|
|
7
|
-
Integration with the Vercel AI SDK React Server Components. It allows streaming React components directly from the server.
|
|
8
|
-
Integrates with OpenAI, Anthropic, Mistral, Perplexity, AWS Bedrock, Azure, Google Gemini, Hugging Face, Fireworks, Cohere, LangChain, Replicate, Ollama, and more.
|
|
9
|
-
|
|
10
|
-
## Example
|
|
11
|
-
|
|
12
|
-
[RSC Example App](https://assistant-ui-rsc-example.vercel.app/)
|
|
13
|
-
|
|
14
|
-
## Getting Started
|
|
15
|
-
|
|
16
|
-
import { Steps, Step } from "fumadocs-ui/components/steps";
|
|
17
|
-
import { Callout } from "fumadocs-ui/components/callout";
|
|
18
|
-
|
|
19
|
-
<Steps>
|
|
20
|
-
<Step>
|
|
21
|
-
### Create a Next.JS project
|
|
22
|
-
|
|
23
|
-
```sh
|
|
24
|
-
npx create-next-app@latest my-app
|
|
25
|
-
cd my-app
|
|
26
|
-
```
|
|
27
|
-
|
|
28
|
-
</Step>
|
|
29
|
-
<Step>
|
|
30
|
-
|
|
31
|
-
### Install Vercel AI SDK and `@assistant-ui/react-ai-sdk`
|
|
32
|
-
|
|
33
|
-
```sh npm2yarn
|
|
34
|
-
npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/openai zod nanoid
|
|
35
|
-
```
|
|
36
|
-
|
|
37
|
-
</Step>
|
|
38
|
-
<Step>
|
|
39
|
-
|
|
40
|
-
### Setup `actions.tsx`
|
|
41
|
-
|
|
42
|
-
`@/app/actions.tsx`
|
|
43
|
-
|
|
44
|
-
```tsx
|
|
45
|
-
"use server";
|
|
46
|
-
|
|
47
|
-
import { createAI, getMutableAIState, streamUI } from "ai/rsc";
|
|
48
|
-
import { openai } from "@ai-sdk/openai";
|
|
49
|
-
import { ReactNode } from "react";
|
|
50
|
-
import { z } from "zod";
|
|
51
|
-
import { nanoid } from "nanoid";
|
|
52
|
-
|
|
53
|
-
export interface ServerMessage {
|
|
54
|
-
role: "user" | "assistant";
|
|
55
|
-
content: string;
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
export interface ClientMessage {
|
|
59
|
-
id: string;
|
|
60
|
-
role: "user" | "assistant";
|
|
61
|
-
display: ReactNode;
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
export async function continueConversation(
|
|
65
|
-
input: string,
|
|
66
|
-
): Promise<ClientMessage> {
|
|
67
|
-
"use server";
|
|
68
|
-
|
|
69
|
-
const history = getMutableAIState();
|
|
70
|
-
|
|
71
|
-
const result = await streamUI({
|
|
72
|
-
model: openai("gpt-3.5-turbo"),
|
|
73
|
-
messages: [...history.get(), { role: "user", content: input }],
|
|
74
|
-
text: ({ content, done }) => {
|
|
75
|
-
if (done) {
|
|
76
|
-
history.done((messages: ServerMessage[]) => [
|
|
77
|
-
...messages,
|
|
78
|
-
{ role: "assistant", content },
|
|
79
|
-
]);
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
return <div>{content}</div>;
|
|
83
|
-
},
|
|
84
|
-
tools: {
|
|
85
|
-
deploy: {
|
|
86
|
-
description: "Deploy repository to vercel",
|
|
87
|
-
parameters: z.object({
|
|
88
|
-
repositoryName: z
|
|
89
|
-
.string()
|
|
90
|
-
.describe("The name of the repository, example: vercel/ai-chatbot"),
|
|
91
|
-
}),
|
|
92
|
-
generate: async function* ({ repositoryName }) {
|
|
93
|
-
yield <div>Cloning repository {repositoryName}...</div>; // [!code highlight:5]
|
|
94
|
-
await new Promise((resolve) => setTimeout(resolve, 3000));
|
|
95
|
-
yield <div>Building repository {repositoryName}...</div>;
|
|
96
|
-
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
97
|
-
return <div>{repositoryName} deployed!</div>;
|
|
98
|
-
},
|
|
99
|
-
},
|
|
100
|
-
},
|
|
101
|
-
});
|
|
102
|
-
|
|
103
|
-
return {
|
|
104
|
-
id: nanoid(),
|
|
105
|
-
role: "assistant",
|
|
106
|
-
display: result.value,
|
|
107
|
-
};
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
export const AI = createAI<ServerMessage[], ClientMessage[]>({
|
|
111
|
-
actions: {
|
|
112
|
-
continueConversation,
|
|
113
|
-
},
|
|
114
|
-
initialAIState: [],
|
|
115
|
-
initialUIState: [],
|
|
116
|
-
});
|
|
117
|
-
```
|
|
118
|
-
|
|
119
|
-
</Step>
|
|
120
|
-
<Step>
|
|
121
|
-
|
|
122
|
-
### Define a `MyRuntimeProvider` component
|
|
123
|
-
|
|
124
|
-
`@/app/MyRuntimeProvider.tsx`
|
|
125
|
-
|
|
126
|
-
```tsx
|
|
127
|
-
"use client";
|
|
128
|
-
|
|
129
|
-
import {
|
|
130
|
-
type AppendMessage,
|
|
131
|
-
AssistantRuntimeProvider,
|
|
132
|
-
} from "@assistant-ui/react";
|
|
133
|
-
import { useVercelRSCRuntime } from "@assistant-ui/react-ai-sdk";
|
|
134
|
-
import { useActions, useUIState } from "ai/rsc";
|
|
135
|
-
import { nanoid } from "nanoid";
|
|
136
|
-
|
|
137
|
-
import type { AI } from "./actions";
|
|
138
|
-
|
|
139
|
-
export function MyRuntimeProvider({
|
|
140
|
-
children,
|
|
141
|
-
}: Readonly<{
|
|
142
|
-
children: React.ReactNode;
|
|
143
|
-
}>) {
|
|
144
|
-
const { continueConversation } = useActions();
|
|
145
|
-
const [messages, setMessages] = useUIState<typeof AI>();
|
|
146
|
-
|
|
147
|
-
const onNew = async (m: AppendMessage) => {
|
|
148
|
-
if (m.content[0]?.type !== "text")
|
|
149
|
-
throw new Error("Only text messages are supported");
|
|
150
|
-
|
|
151
|
-
const input = m.content[0].text;
|
|
152
|
-
setMessages((currentConversation) => [
|
|
153
|
-
...currentConversation,
|
|
154
|
-
{ id: nanoid(), role: "user", display: input },
|
|
155
|
-
]);
|
|
156
|
-
|
|
157
|
-
const message = await continueConversation(input);
|
|
158
|
-
|
|
159
|
-
setMessages((currentConversation) => [...currentConversation, message]);
|
|
160
|
-
};
|
|
161
|
-
|
|
162
|
-
const runtime = useVercelRSCRuntime({ messages, onNew });
|
|
163
|
-
|
|
164
|
-
return (
|
|
165
|
-
<AssistantRuntimeProvider runtime={runtime}>
|
|
166
|
-
{children}
|
|
167
|
-
</AssistantRuntimeProvider>
|
|
168
|
-
);
|
|
169
|
-
}
|
|
170
|
-
```
|
|
171
|
-
|
|
172
|
-
</Step>
|
|
173
|
-
<Step>
|
|
174
|
-
|
|
175
|
-
### Wrap your app in `AI` and `MyRuntimeProvider`
|
|
176
|
-
|
|
177
|
-
`@/app/layout.tsx`
|
|
178
|
-
|
|
179
|
-
```tsx {1-2,12-13,19-20}
|
|
180
|
-
import { AI } from '@/app/actions';
|
|
181
|
-
import { MyRuntimeProvider } from '@/app/MyRuntimeProvider';
|
|
182
|
-
|
|
183
|
-
...
|
|
184
|
-
|
|
185
|
-
export default function RootLayout({
|
|
186
|
-
children,
|
|
187
|
-
}: Readonly<{
|
|
188
|
-
children: React.ReactNode;
|
|
189
|
-
}>) {
|
|
190
|
-
return (
|
|
191
|
-
<AI>
|
|
192
|
-
<MyRuntimeProvider>
|
|
193
|
-
<html lang="en">
|
|
194
|
-
<body className={inter.className}>
|
|
195
|
-
{children}
|
|
196
|
-
</body>
|
|
197
|
-
</html>
|
|
198
|
-
</MyRuntimeProvider>
|
|
199
|
-
</AI>
|
|
200
|
-
)
|
|
201
|
-
}
|
|
202
|
-
```
|
|
203
|
-
|
|
204
|
-
</Step>
|
|
205
|
-
</Steps>
|
|
206
|
-
|
|
207
|
-
## Set up RSCDisplay
|
|
208
|
-
|
|
209
|
-
Pass the `RSCDisplay` component to your `MessagePrimitive.Parts`:
|
|
210
|
-
|
|
211
|
-
```tsx
|
|
212
|
-
<MessagePrimitive.Parts components={{ Text: RSCDisplay }} />
|
|
213
|
-
```
|
|
214
|
-
|
|
215
|
-
(if you are using react-ui: `<Thread assistantMessage={{ components: { Text: RSCDisplay } }} />`)
|
|
216
|
-
|
|
217
|
-
## Accessing AI SDK Messages
|
|
218
|
-
|
|
219
|
-
You can use the `getExternalStoreMessages` utility to convert `ThreadMessage`s back to your message format.
|
|
220
|
-
|
|
221
|
-
```tsx
|
|
222
|
-
const MyAssistantMessage = () => {
|
|
223
|
-
const myMessage = useMessage((m) => getExternalStoreMessages(m)[0]);
|
|
224
|
-
// ...
|
|
225
|
-
};
|
|
226
|
-
```
|
|
@@ -1,195 +0,0 @@
|
|
|
1
|
-
---
|
|
2
|
-
title: useAssistant Hook Integration
|
|
3
|
-
---
|
|
4
|
-
|
|
5
|
-
## Overview
|
|
6
|
-
|
|
7
|
-
Integration with the Vercel AI SDK UI's `useAssistant` hook.
|
|
8
|
-
This allows interaction with the OpenAI Assistants API.
|
|
9
|
-
|
|
10
|
-
## Getting Started
|
|
11
|
-
|
|
12
|
-
import { Steps, Step } from "fumadocs-ui/components/steps";
|
|
13
|
-
|
|
14
|
-
<Steps>
|
|
15
|
-
<Step>
|
|
16
|
-
### Create a Next.JS project
|
|
17
|
-
|
|
18
|
-
```sh
|
|
19
|
-
npx create-next-app@latest my-app
|
|
20
|
-
cd my-app
|
|
21
|
-
```
|
|
22
|
-
|
|
23
|
-
</Step>
|
|
24
|
-
<Step>
|
|
25
|
-
|
|
26
|
-
### Install Vercel AI SDK and `@assistant-ui/react-ai-sdk`
|
|
27
|
-
|
|
28
|
-
```sh npm2yarn
|
|
29
|
-
npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai openai
|
|
30
|
-
```
|
|
31
|
-
|
|
32
|
-
</Step>
|
|
33
|
-
<Step>
|
|
34
|
-
|
|
35
|
-
### Setup a backend route under `/api/assistant`
|
|
36
|
-
|
|
37
|
-
`/app/api/assistant/route.ts`
|
|
38
|
-
|
|
39
|
-
```tsx
|
|
40
|
-
import { AssistantResponse } from "ai";
|
|
41
|
-
import OpenAI from "openai";
|
|
42
|
-
import type { Run } from "openai/resources/beta/threads/runs/runs";
|
|
43
|
-
|
|
44
|
-
const openai = new OpenAI();
|
|
45
|
-
|
|
46
|
-
// Allow streaming responses up to 30 seconds
|
|
47
|
-
export const maxDuration = 30;
|
|
48
|
-
|
|
49
|
-
export async function POST(req: Request) {
|
|
50
|
-
// Parse the request body
|
|
51
|
-
const input: {
|
|
52
|
-
threadId: string | null;
|
|
53
|
-
message: string;
|
|
54
|
-
} = await req.json();
|
|
55
|
-
|
|
56
|
-
// Create a thread if needed
|
|
57
|
-
const threadId = input.threadId ?? (await openai.beta.threads.create({})).id;
|
|
58
|
-
|
|
59
|
-
// Add a message to the thread
|
|
60
|
-
const createdMessage = await openai.beta.threads.messages.create(threadId, {
|
|
61
|
-
role: "user",
|
|
62
|
-
content: input.message,
|
|
63
|
-
});
|
|
64
|
-
|
|
65
|
-
return AssistantResponse(
|
|
66
|
-
{ threadId, messageId: createdMessage.id },
|
|
67
|
-
async ({ forwardStream, sendDataMessage }) => {
|
|
68
|
-
// Run the assistant on the thread
|
|
69
|
-
const runStream = openai.beta.threads.runs.stream(threadId, {
|
|
70
|
-
assistant_id:
|
|
71
|
-
process.env.ASSISTANT_ID ??
|
|
72
|
-
(() => {
|
|
73
|
-
throw new Error("ASSISTANT_ID is not set");
|
|
74
|
-
})(),
|
|
75
|
-
});
|
|
76
|
-
|
|
77
|
-
// forward run status would stream message deltas
|
|
78
|
-
let runResult: Run = await forwardStream(runStream);
|
|
79
|
-
|
|
80
|
-
// status can be: queued, in_progress, requires_action, cancelling, cancelled, failed, completed, or expired
|
|
81
|
-
while (
|
|
82
|
-
runResult?.status === "requires_action" &&
|
|
83
|
-
runResult.required_action?.type === "submit_tool_outputs"
|
|
84
|
-
) {
|
|
85
|
-
const tool_outputs =
|
|
86
|
-
runResult.required_action.submit_tool_outputs.tool_calls.map(
|
|
87
|
-
(toolCall: any) => {
|
|
88
|
-
const parameters = JSON.parse(toolCall.function.arguments);
|
|
89
|
-
|
|
90
|
-
switch (toolCall.function.name) {
|
|
91
|
-
// configure your tool calls here
|
|
92
|
-
|
|
93
|
-
default:
|
|
94
|
-
throw new Error(
|
|
95
|
-
`Unknown tool call function: ${toolCall.function.name}`,
|
|
96
|
-
);
|
|
97
|
-
}
|
|
98
|
-
},
|
|
99
|
-
);
|
|
100
|
-
|
|
101
|
-
runResult = await forwardStream(
|
|
102
|
-
openai.beta.threads.runs.submitToolOutputsStream(
|
|
103
|
-
threadId,
|
|
104
|
-
runResult.id,
|
|
105
|
-
{ tool_outputs },
|
|
106
|
-
),
|
|
107
|
-
);
|
|
108
|
-
}
|
|
109
|
-
},
|
|
110
|
-
);
|
|
111
|
-
}
|
|
112
|
-
```
|
|
113
|
-
|
|
114
|
-
</Step>
|
|
115
|
-
<Step>
|
|
116
|
-
|
|
117
|
-
### Define a `MyRuntimeProvider` component
|
|
118
|
-
|
|
119
|
-
`@/app/MyRuntimeProvider.tsx`
|
|
120
|
-
|
|
121
|
-
```tsx
|
|
122
|
-
"use client";
|
|
123
|
-
|
|
124
|
-
import { useAssistant } from "@ai-sdk/react";
|
|
125
|
-
import { AssistantRuntimeProvider } from "@assistant-ui/react";
|
|
126
|
-
import { useVercelUseAssistantRuntime } from "@assistant-ui/react-ai-sdk";
|
|
127
|
-
|
|
128
|
-
export function MyRuntimeProvider({
|
|
129
|
-
children,
|
|
130
|
-
}: Readonly<{
|
|
131
|
-
children: React.ReactNode;
|
|
132
|
-
}>) {
|
|
133
|
-
const assistant = useAssistant({
|
|
134
|
-
api: "/api/assistant",
|
|
135
|
-
});
|
|
136
|
-
|
|
137
|
-
const runtime = useVercelUseAssistantRuntime(assistant);
|
|
138
|
-
|
|
139
|
-
return (
|
|
140
|
-
<AssistantRuntimeProvider runtime={runtime}>
|
|
141
|
-
{children}
|
|
142
|
-
</AssistantRuntimeProvider>
|
|
143
|
-
);
|
|
144
|
-
}
|
|
145
|
-
```
|
|
146
|
-
|
|
147
|
-
</Step>
|
|
148
|
-
<Step>
|
|
149
|
-
|
|
150
|
-
### Wrap your app in `MyRuntimeProvider`
|
|
151
|
-
|
|
152
|
-
`@/app/layout.tsx`
|
|
153
|
-
|
|
154
|
-
```tsx {1,11,17}
|
|
155
|
-
import { MyRuntimeProvider } from '@/app/MyRuntimeProvider';
|
|
156
|
-
|
|
157
|
-
...
|
|
158
|
-
|
|
159
|
-
export default function RootLayout({
|
|
160
|
-
children,
|
|
161
|
-
}: Readonly<{
|
|
162
|
-
children: React.ReactNode;
|
|
163
|
-
}>) {
|
|
164
|
-
return (
|
|
165
|
-
<MyRuntimeProvider>
|
|
166
|
-
<html lang="en">
|
|
167
|
-
<body className={inter.className}>
|
|
168
|
-
{children}
|
|
169
|
-
</body>
|
|
170
|
-
</html>
|
|
171
|
-
</MyRuntimeProvider>
|
|
172
|
-
)
|
|
173
|
-
}
|
|
174
|
-
```
|
|
175
|
-
|
|
176
|
-
</Step>
|
|
177
|
-
</Steps>
|
|
178
|
-
|
|
179
|
-
## Accessing AI SDK Messages
|
|
180
|
-
|
|
181
|
-
You can use `getExternalStoreMessages` utility to convert `ThreadMessage`s back to `Message`s from AI SDK.
|
|
182
|
-
|
|
183
|
-
```tsx
|
|
184
|
-
const MyAssistantMessage = () => {
|
|
185
|
-
const aiSDKMessages = useMessage((m) => getExternalStoreMessages(m));
|
|
186
|
-
// ...
|
|
187
|
-
};
|
|
188
|
-
|
|
189
|
-
const WeatherToolUI = makeAssistantToolUI({
|
|
190
|
-
render: () => {
|
|
191
|
-
const aiSDKMessage = useMessagePart((p) => getExternalStoreMessages(p)[0]);
|
|
192
|
-
// ...
|
|
193
|
-
},
|
|
194
|
-
});
|
|
195
|
-
```
|
|
@@ -1,138 +0,0 @@
|
|
|
1
|
-
---
|
|
2
|
-
title: useChat Hook Integration (Legacy)
|
|
3
|
-
---
|
|
4
|
-
|
|
5
|
-
## Overview
|
|
6
|
-
|
|
7
|
-
Integration with the Vercel AI SDK UI's `useChat` hook.
|
|
8
|
-
It allows integration with OpenAI, Anthropic, Mistral, Perplexity, AWS Bedrock, Azure, Google Gemini, Hugging Face, Fireworks, Cohere, LangChain, Replicate, Ollama, and more.
|
|
9
|
-
|
|
10
|
-
## Getting Started
|
|
11
|
-
|
|
12
|
-
import { Steps, Step } from "fumadocs-ui/components/steps";
|
|
13
|
-
|
|
14
|
-
<Steps>
|
|
15
|
-
<Step>
|
|
16
|
-
### Create a Next.JS project
|
|
17
|
-
|
|
18
|
-
```sh
|
|
19
|
-
npx create-next-app@latest my-app
|
|
20
|
-
cd my-app
|
|
21
|
-
```
|
|
22
|
-
|
|
23
|
-
</Step>
|
|
24
|
-
<Step>
|
|
25
|
-
|
|
26
|
-
### Install Vercel AI SDK and `@assistant-ui/react`
|
|
27
|
-
|
|
28
|
-
```sh npm2yarn
|
|
29
|
-
npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/openai
|
|
30
|
-
```
|
|
31
|
-
|
|
32
|
-
</Step>
|
|
33
|
-
<Step>
|
|
34
|
-
|
|
35
|
-
### Setup a backend route under `/api/chat`
|
|
36
|
-
|
|
37
|
-
`@/app/api/chat/route.ts`
|
|
38
|
-
|
|
39
|
-
```tsx
|
|
40
|
-
import { openai } from "@ai-sdk/openai";
|
|
41
|
-
import { streamText } from "ai";
|
|
42
|
-
|
|
43
|
-
export const maxDuration = 30;
|
|
44
|
-
|
|
45
|
-
export async function POST(req: Request) {
|
|
46
|
-
const { messages } = await req.json();
|
|
47
|
-
|
|
48
|
-
const result = streamText({
|
|
49
|
-
model: openai("gpt-4o"),
|
|
50
|
-
messages: convertToCoreMessages(messages),
|
|
51
|
-
});
|
|
52
|
-
|
|
53
|
-
return result.toDataStreamResponse();
|
|
54
|
-
}
|
|
55
|
-
```
|
|
56
|
-
|
|
57
|
-
</Step>
|
|
58
|
-
<Step>
|
|
59
|
-
|
|
60
|
-
### Define a `MyRuntimeProvider` component
|
|
61
|
-
|
|
62
|
-
`@/app/MyRuntimeProvider.tsx`
|
|
63
|
-
|
|
64
|
-
```tsx
|
|
65
|
-
"use client";
|
|
66
|
-
|
|
67
|
-
import { useChat } from "@ai-sdk/react";
|
|
68
|
-
import { AssistantRuntimeProvider } from "@assistant-ui/react";
|
|
69
|
-
import { useVercelUseChatRuntime } from "@assistant-ui/react-ai-sdk";
|
|
70
|
-
|
|
71
|
-
export function MyRuntimeProvider({
|
|
72
|
-
children,
|
|
73
|
-
}: Readonly<{
|
|
74
|
-
children: React.ReactNode;
|
|
75
|
-
}>) {
|
|
76
|
-
const chat = useChat({
|
|
77
|
-
api: "/api/chat",
|
|
78
|
-
});
|
|
79
|
-
|
|
80
|
-
const runtime = useVercelUseChatRuntime(chat);
|
|
81
|
-
|
|
82
|
-
return (
|
|
83
|
-
<AssistantRuntimeProvider runtime={runtime}>
|
|
84
|
-
{children}
|
|
85
|
-
</AssistantRuntimeProvider>
|
|
86
|
-
);
|
|
87
|
-
}
|
|
88
|
-
```
|
|
89
|
-
|
|
90
|
-
</Step>
|
|
91
|
-
<Step>
|
|
92
|
-
|
|
93
|
-
### Wrap your app in `MyRuntimeProvider`
|
|
94
|
-
|
|
95
|
-
`@/app/layout.tsx`
|
|
96
|
-
|
|
97
|
-
```tsx {1,11,17}
|
|
98
|
-
import { MyRuntimeProvider } from '@/app/MyRuntimeProvider';
|
|
99
|
-
|
|
100
|
-
...
|
|
101
|
-
|
|
102
|
-
export default function RootLayout({
|
|
103
|
-
children,
|
|
104
|
-
}: Readonly<{
|
|
105
|
-
children: React.ReactNode;
|
|
106
|
-
}>) {
|
|
107
|
-
return (
|
|
108
|
-
<MyRuntimeProvider>
|
|
109
|
-
<html lang="en">
|
|
110
|
-
<body className={inter.className}>
|
|
111
|
-
{children}
|
|
112
|
-
</body>
|
|
113
|
-
</html>
|
|
114
|
-
</MyRuntimeProvider>
|
|
115
|
-
)
|
|
116
|
-
}
|
|
117
|
-
```
|
|
118
|
-
|
|
119
|
-
</Step>
|
|
120
|
-
</Steps>
|
|
121
|
-
|
|
122
|
-
## Accessing AI SDK Messages
|
|
123
|
-
|
|
124
|
-
You can use the `getExternalStoreMessages` utility to convert `ThreadMessage`s back to `Message`s from AI SDK.
|
|
125
|
-
|
|
126
|
-
```tsx
|
|
127
|
-
const MyAssistantMessage = () => {
|
|
128
|
-
const aiSDKMessages = useMessage((m) => getExternalStoreMessages(m));
|
|
129
|
-
// ...
|
|
130
|
-
};
|
|
131
|
-
|
|
132
|
-
const WeatherToolUI = makeAssistantToolUI({
|
|
133
|
-
render: () => {
|
|
134
|
-
const aiSDKMessage = useMessagePart((p) => getExternalStoreMessages(p)[0]);
|
|
135
|
-
// ...
|
|
136
|
-
},
|
|
137
|
-
});
|
|
138
|
-
```
|