@assistant-ui/mcp-docs-server 0.1.6 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/.docs/organized/code-examples/with-ai-sdk-v5.md +15 -13
  2. package/.docs/organized/code-examples/with-cloud.md +19 -25
  3. package/.docs/organized/code-examples/with-external-store.md +9 -7
  4. package/.docs/organized/code-examples/with-ffmpeg.md +21 -21
  5. package/.docs/organized/code-examples/with-langgraph.md +72 -46
  6. package/.docs/organized/code-examples/with-parent-id-grouping.md +9 -7
  7. package/.docs/organized/code-examples/with-react-hook-form.md +19 -21
  8. package/.docs/raw/docs/api-reference/integrations/react-data-stream.mdx +194 -0
  9. package/.docs/raw/docs/api-reference/overview.mdx +7 -4
  10. package/.docs/raw/docs/api-reference/primitives/Composer.mdx +31 -0
  11. package/.docs/raw/docs/api-reference/primitives/Message.mdx +108 -3
  12. package/.docs/raw/docs/api-reference/primitives/Thread.mdx +59 -0
  13. package/.docs/raw/docs/api-reference/primitives/ThreadList.mdx +128 -0
  14. package/.docs/raw/docs/api-reference/primitives/ThreadListItem.mdx +160 -0
  15. package/.docs/raw/docs/api-reference/runtimes/AssistantRuntime.mdx +0 -11
  16. package/.docs/raw/docs/api-reference/runtimes/ComposerRuntime.mdx +3 -3
  17. package/.docs/raw/docs/copilots/assistant-frame.mdx +397 -0
  18. package/.docs/raw/docs/getting-started.mdx +53 -52
  19. package/.docs/raw/docs/guides/Attachments.mdx +7 -115
  20. package/.docs/raw/docs/guides/ToolUI.mdx +3 -3
  21. package/.docs/raw/docs/guides/Tools.mdx +152 -92
  22. package/.docs/raw/docs/guides/context-api.mdx +574 -0
  23. package/.docs/raw/docs/migrations/v0-12.mdx +125 -0
  24. package/.docs/raw/docs/runtimes/ai-sdk/use-chat.mdx +134 -55
  25. package/.docs/raw/docs/runtimes/ai-sdk/v4-legacy.mdx +182 -0
  26. package/.docs/raw/docs/runtimes/custom/local.mdx +16 -3
  27. package/.docs/raw/docs/runtimes/data-stream.mdx +287 -0
  28. package/.docs/raw/docs/runtimes/langgraph/index.mdx +0 -1
  29. package/.docs/raw/docs/runtimes/langserve.mdx +9 -11
  30. package/.docs/raw/docs/runtimes/pick-a-runtime.mdx +5 -0
  31. package/.docs/raw/docs/ui/ThreadList.mdx +54 -16
  32. package/dist/{chunk-L4K23SWI.js → chunk-NVNFQ5ZO.js} +4 -1
  33. package/dist/index.js +1 -1
  34. package/dist/prepare-docs/prepare.js +1 -1
  35. package/dist/stdio.js +1 -1
  36. package/package.json +7 -7
  37. package/.docs/organized/code-examples/local-ollama.md +0 -1135
  38. package/.docs/organized/code-examples/search-agent-for-e-commerce.md +0 -1721
  39. package/.docs/organized/code-examples/with-ai-sdk.md +0 -1082
  40. package/.docs/organized/code-examples/with-openai-assistants.md +0 -1175
  41. package/.docs/raw/docs/concepts/architecture.mdx +0 -19
  42. package/.docs/raw/docs/concepts/runtime-layer.mdx +0 -163
  43. package/.docs/raw/docs/concepts/why.mdx +0 -9
  44. package/.docs/raw/docs/runtimes/ai-sdk/rsc.mdx +0 -226
  45. package/.docs/raw/docs/runtimes/ai-sdk/use-assistant-hook.mdx +0 -195
  46. package/.docs/raw/docs/runtimes/ai-sdk/use-chat-hook.mdx +0 -138
  47. package/.docs/raw/docs/runtimes/ai-sdk/use-chat-v5.mdx +0 -212
@@ -1,19 +0,0 @@
1
- ---
2
- title: Architecture
3
- ---
4
-
5
- import Image from "next/image";
6
- import architecture from "@/assets/docs/architecture.png";
7
-
8
- ### Architecture
9
-
10
- `assistant-ui` consists of two parts, **_Runtime_** and **_UI Components_**.
11
-
12
- <Image
13
- src={architecture}
14
- alt="Architecture diagram, UI components connected to the runtime layer and the runtime layer connected to LLM and tools"
15
- height={300}
16
- className="mx-auto my-2 dark:hue-rotate-180 dark:invert"
17
- />
18
-
19
- The Runtime and UI Components each require independent setup and both must be set up.
@@ -1,163 +0,0 @@
1
- ---
2
- title: Runtime Layer
3
- ---
4
-
5
- assistant-ui components are full stack components. This means that they include both the UI presentation, but also logic to communicate with an external system. This logic is handled by the runtime layer and APIs.
6
-
7
- You interact with the runtime layer in two ways:
8
-
9
- - defining a runtime for your app
10
- - using the runtime APIs to interact with the runtime
11
-
12
- ## Defining a runtime
13
-
14
- assistant-ui ships with two low-level runtimes:
15
-
16
- - `useLocalRuntime`
17
- - `useExternalStoreRuntime`
18
-
19
- Both of these runtimes let you implement your own runtime. The conceptual difference between the two is that `useLocalRuntime` takes ownership of the data layer, while `useExternalStoreRuntime` does not.
20
-
21
- If you have a stateful API to integrate, use `useExternalStoreRuntime`, if you have a stateless API to integrate, use `useLocalRuntime`.
22
-
23
- ### Higher level runtimes
24
-
25
- For many services and APIs, assistant-ui provides deeper integrations. These are built with the two low-level runtimes mentioned above.
26
-
27
- - `useChatRuntime`: Connect to Vercel AI SDK backends
28
- - `useVercelUseChatRuntime`: Integrate with Vercel AI SDK's `useChat` hook
29
- - `useVercelUseAssistantRuntime`: Integrate with Vercel AI SDK's `useAssistant` hook (OpenAI Assistants API)
30
- - `useVercelRSCRuntime`: Integrate with Vercel AI SDK React Server Components
31
- - `useLangGraphRuntime`: Connect to LangGraph Cloud
32
- - ...
33
-
34
- ### Runtime Providers
35
-
36
- The following components accept a `runtime` prop:
37
-
38
- - `AssistantRuntimeProvider`
39
- - `Thread`
40
-
41
- These components put the Runtime in the React Context, so that all child components can access the runtime.
42
-
43
- ### Runtime Adapters
44
-
45
- Most runtimes accept additional adapters to configure extra integrations:
46
-
47
- - ChatModelAdapter: Configures the backend API
48
- - AttachmentAdapter: Configures the file/media attachment API
49
- - SpeechSynthesisAdapter: Configures the speech API
50
- - FeedbackAdapter: Configures the feedback API
51
- - SuggestionAdapter: Configures dynamic suggestion generation based on conversation context
52
-
53
- ## Using the runtime APIs
54
-
55
- The same API used by the assistant-ui components is also available to you. This allows you to build your own UI components and integrate them with the runtime layer.
56
-
57
- ### Runtime Hierarchy
58
-
59
- The runtime API is nested as such:
60
-
61
- import { File, Folder, Files } from "fumadocs-ui/components/files";
62
-
63
- <Files>
64
- <Folder name="AssistantRuntime" defaultOpen>
65
- <Folder name="ThreadListRuntime" defaultOpen>
66
- <Folder name="ThreadRuntime" defaultOpen>
67
- <Folder name="MessageRuntime" defaultOpen>
68
- <Folder
69
- name="MessagePartRuntime (Text / Reasoning / Image / Audio / Tool-Call / UI)"
70
- defaultOpen
71
- ></Folder>
72
- <Folder name="MessageAttachmentRuntime" defaultOpen></Folder>
73
- <Folder name="EditComposerRuntime" defaultOpen>
74
- <Folder name="EditComposerAttachmentRuntime" defaultOpen></Folder>
75
- </Folder>
76
- </Folder>
77
- <Folder name="ThreadComposerRuntime" defaultOpen>
78
- <Folder name="ThreadComposerAttachmentRuntime" defaultOpen></Folder>
79
- </Folder>
80
- </Folder>
81
- </Folder>
82
- </Folder>
83
- </Files>
84
-
85
- The AssistantRuntime (which encompasses everything), is sometimes simply called `Runtime`.
86
-
87
- ### Runtime Context Provider Components
88
-
89
- The following components provide the runtime APIs:
90
-
91
- ```tsx
92
- // provides AssistantRuntime, ThreadListRuntime, ThreadRuntime, ComposerRuntime (ThreadComposer)
93
- <AssistantRuntimeProvider runtime={runtime} />
94
-
95
- // renders every message, provides MessageRuntime, ComposerRuntime (EditComposer)
96
- <ThreadPrimitive.Messages components={{ Message, ... }} />
97
-
98
- // renders every message part, provides MessagePartRuntime
99
- <MessagePrimitive.Parts components={{ Text, Reasoning, Image, Audio, UI, tools }} />
100
-
101
- // renders every attachment, provides AttachmentRuntime (Thread or EditComposer)
102
- <ComposerPrimitive.Attachments components={{ Attachment, ... }} />
103
-
104
- // renders every attachment, provides AtatchmentRuntime (Message)
105
- <MessagePrimitive.Attachments components={{ Attachment, ... }} />
106
-
107
- // provides a custom TextMessagePartRuntime
108
- <TextMessagePartProvider text="Hello!" />
109
- ```
110
-
111
- ### Accessing runtime APIs
112
-
113
- You can access the runtime APIs with react hooks:
114
-
115
- ```tsx
116
- const runtime = useAssistantRuntime();
117
- const threadRuntime = useThreadRuntime();
118
- const messageRuntime = useMessageRuntime();
119
- const MessagePartRuntime = useMessagePartRuntime();
120
-
121
- // thread manager has no separate hook (1:1 relationship with assistant runtime)
122
- const ThreadListRuntime = useAssistantRuntime().threads;
123
-
124
- // composer runtime is multi-use
125
- const composerRuntime = useComposerRuntime(); // refers to edit composer if available, otherwise thread composer
126
-
127
- // thread manager has no separate hook (1:1 relationship with assistant runtime)
128
- const threadComposer = useThreadRuntime().composer;
129
-
130
- // thread manager has no separate hook (1:1 relationship with assistant runtime)
131
- const editComposerRuntime = useMessageRuntime().composer;
132
-
133
- // attachment runtime is multi-use
134
- const attachmentRuntime = useAttachmentRuntime(); // refers to the closest attachment runtime
135
- const threadComposerAttachmentRuntime = useThreadComposerAttachmentRuntime();
136
- const editComposerAttachmentRuntime = useEditComposerAttachmentRuntime();
137
- const messageAttachmentRuntime = useMessageAttachmentRuntime();
138
- ```
139
-
140
- ### Accessing runtime state
141
-
142
- Most runtimes also expose a state through two methods `getState` and `subscribe`. The following helper hooks subscribe to the state, so that your component is updated when the state changes:
143
-
144
- ```tsx
145
- useThreadList(); // get thread manager state
146
- useThread(); // get thread state
147
- useMessage(); // get message state
148
- useMessagePart(); // get message part state
149
- useComposer(); // get composer state
150
- useThreadComposer(); // get thread composer state
151
- useEditComposer(); // get edit composer state
152
- useAttachment(); // get attachment state
153
- useThreadComposerAttachment(); // get thread composer attachment state
154
- useEditComposerAttachment(); // get edit composer attachment state
155
- useMessageAttachment(); // get message attachment state
156
- ```
157
-
158
- You might not want to subscribe to evey update. In that case, pass a callback selector to the hook:
159
-
160
- ```tsx
161
- // only subscribe to role changes
162
- const role = useMessage((state) => message.role);
163
- ```
@@ -1,9 +0,0 @@
1
- ---
2
- title: Why assistant-ui?
3
- ---
4
-
5
- assistant-ui is a collection of powerful, modular primitives to build AI chat interfaces.
6
-
7
- The modular approach means that you can incrementally adopt assistant-ui (e.g. use the backend connectors and bring your own components, or use the frontend compoents and bring your own backend).
8
- You can also partially opt out of assistant-ui whenever you hit any limitation in the library.
9
-
@@ -1,226 +0,0 @@
1
- ---
2
- title: Vercel AI SDK RSC Runtime
3
- ---
4
-
5
- ## Overview
6
-
7
- Integration with the Vercel AI SDK React Server Components. It allows streaming React components directly from the server.
8
- Integrates with OpenAI, Anthropic, Mistral, Perplexity, AWS Bedrock, Azure, Google Gemini, Hugging Face, Fireworks, Cohere, LangChain, Replicate, Ollama, and more.
9
-
10
- ## Example
11
-
12
- [RSC Example App](https://assistant-ui-rsc-example.vercel.app/)
13
-
14
- ## Getting Started
15
-
16
- import { Steps, Step } from "fumadocs-ui/components/steps";
17
- import { Callout } from "fumadocs-ui/components/callout";
18
-
19
- <Steps>
20
- <Step>
21
- ### Create a Next.JS project
22
-
23
- ```sh
24
- npx create-next-app@latest my-app
25
- cd my-app
26
- ```
27
-
28
- </Step>
29
- <Step>
30
-
31
- ### Install Vercel AI SDK and `@assistant-ui/react-ai-sdk`
32
-
33
- ```sh npm2yarn
34
- npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/openai zod nanoid
35
- ```
36
-
37
- </Step>
38
- <Step>
39
-
40
- ### Setup `actions.tsx`
41
-
42
- `@/app/actions.tsx`
43
-
44
- ```tsx
45
- "use server";
46
-
47
- import { createAI, getMutableAIState, streamUI } from "ai/rsc";
48
- import { openai } from "@ai-sdk/openai";
49
- import { ReactNode } from "react";
50
- import { z } from "zod";
51
- import { nanoid } from "nanoid";
52
-
53
- export interface ServerMessage {
54
- role: "user" | "assistant";
55
- content: string;
56
- }
57
-
58
- export interface ClientMessage {
59
- id: string;
60
- role: "user" | "assistant";
61
- display: ReactNode;
62
- }
63
-
64
- export async function continueConversation(
65
- input: string,
66
- ): Promise<ClientMessage> {
67
- "use server";
68
-
69
- const history = getMutableAIState();
70
-
71
- const result = await streamUI({
72
- model: openai("gpt-3.5-turbo"),
73
- messages: [...history.get(), { role: "user", content: input }],
74
- text: ({ content, done }) => {
75
- if (done) {
76
- history.done((messages: ServerMessage[]) => [
77
- ...messages,
78
- { role: "assistant", content },
79
- ]);
80
- }
81
-
82
- return <div>{content}</div>;
83
- },
84
- tools: {
85
- deploy: {
86
- description: "Deploy repository to vercel",
87
- parameters: z.object({
88
- repositoryName: z
89
- .string()
90
- .describe("The name of the repository, example: vercel/ai-chatbot"),
91
- }),
92
- generate: async function* ({ repositoryName }) {
93
- yield <div>Cloning repository {repositoryName}...</div>; // [!code highlight:5]
94
- await new Promise((resolve) => setTimeout(resolve, 3000));
95
- yield <div>Building repository {repositoryName}...</div>;
96
- await new Promise((resolve) => setTimeout(resolve, 2000));
97
- return <div>{repositoryName} deployed!</div>;
98
- },
99
- },
100
- },
101
- });
102
-
103
- return {
104
- id: nanoid(),
105
- role: "assistant",
106
- display: result.value,
107
- };
108
- }
109
-
110
- export const AI = createAI<ServerMessage[], ClientMessage[]>({
111
- actions: {
112
- continueConversation,
113
- },
114
- initialAIState: [],
115
- initialUIState: [],
116
- });
117
- ```
118
-
119
- </Step>
120
- <Step>
121
-
122
- ### Define a `MyRuntimeProvider` component
123
-
124
- `@/app/MyRuntimeProvider.tsx`
125
-
126
- ```tsx
127
- "use client";
128
-
129
- import {
130
- type AppendMessage,
131
- AssistantRuntimeProvider,
132
- } from "@assistant-ui/react";
133
- import { useVercelRSCRuntime } from "@assistant-ui/react-ai-sdk";
134
- import { useActions, useUIState } from "ai/rsc";
135
- import { nanoid } from "nanoid";
136
-
137
- import type { AI } from "./actions";
138
-
139
- export function MyRuntimeProvider({
140
- children,
141
- }: Readonly<{
142
- children: React.ReactNode;
143
- }>) {
144
- const { continueConversation } = useActions();
145
- const [messages, setMessages] = useUIState<typeof AI>();
146
-
147
- const onNew = async (m: AppendMessage) => {
148
- if (m.content[0]?.type !== "text")
149
- throw new Error("Only text messages are supported");
150
-
151
- const input = m.content[0].text;
152
- setMessages((currentConversation) => [
153
- ...currentConversation,
154
- { id: nanoid(), role: "user", display: input },
155
- ]);
156
-
157
- const message = await continueConversation(input);
158
-
159
- setMessages((currentConversation) => [...currentConversation, message]);
160
- };
161
-
162
- const runtime = useVercelRSCRuntime({ messages, onNew });
163
-
164
- return (
165
- <AssistantRuntimeProvider runtime={runtime}>
166
- {children}
167
- </AssistantRuntimeProvider>
168
- );
169
- }
170
- ```
171
-
172
- </Step>
173
- <Step>
174
-
175
- ### Wrap your app in `AI` and `MyRuntimeProvider`
176
-
177
- `@/app/layout.tsx`
178
-
179
- ```tsx {1-2,12-13,19-20}
180
- import { AI } from '@/app/actions';
181
- import { MyRuntimeProvider } from '@/app/MyRuntimeProvider';
182
-
183
- ...
184
-
185
- export default function RootLayout({
186
- children,
187
- }: Readonly<{
188
- children: React.ReactNode;
189
- }>) {
190
- return (
191
- <AI>
192
- <MyRuntimeProvider>
193
- <html lang="en">
194
- <body className={inter.className}>
195
- {children}
196
- </body>
197
- </html>
198
- </MyRuntimeProvider>
199
- </AI>
200
- )
201
- }
202
- ```
203
-
204
- </Step>
205
- </Steps>
206
-
207
- ## Set up RSCDisplay
208
-
209
- Pass the `RSCDisplay` component to your `MessagePrimitive.Parts`:
210
-
211
- ```tsx
212
- <MessagePrimitive.Parts components={{ Text: RSCDisplay }} />
213
- ```
214
-
215
- (if you are using react-ui: `<Thread assistantMessage={{ components: { Text: RSCDisplay } }} />`)
216
-
217
- ## Accessing AI SDK Messages
218
-
219
- You can use the `getExternalStoreMessages` utility to convert `ThreadMessage`s back to your message format.
220
-
221
- ```tsx
222
- const MyAssistantMessage = () => {
223
- const myMessage = useMessage((m) => getExternalStoreMessages(m)[0]);
224
- // ...
225
- };
226
- ```
@@ -1,195 +0,0 @@
1
- ---
2
- title: useAssistant Hook Integration
3
- ---
4
-
5
- ## Overview
6
-
7
- Integration with the Vercel AI SDK UI's `useAssistant` hook.
8
- This allows interaction with the OpenAI Assistants API.
9
-
10
- ## Getting Started
11
-
12
- import { Steps, Step } from "fumadocs-ui/components/steps";
13
-
14
- <Steps>
15
- <Step>
16
- ### Create a Next.JS project
17
-
18
- ```sh
19
- npx create-next-app@latest my-app
20
- cd my-app
21
- ```
22
-
23
- </Step>
24
- <Step>
25
-
26
- ### Install Vercel AI SDK and `@assistant-ui/react-ai-sdk`
27
-
28
- ```sh npm2yarn
29
- npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai openai
30
- ```
31
-
32
- </Step>
33
- <Step>
34
-
35
- ### Setup a backend route under `/api/assistant`
36
-
37
- `/app/api/assistant/route.ts`
38
-
39
- ```tsx
40
- import { AssistantResponse } from "ai";
41
- import OpenAI from "openai";
42
- import type { Run } from "openai/resources/beta/threads/runs/runs";
43
-
44
- const openai = new OpenAI();
45
-
46
- // Allow streaming responses up to 30 seconds
47
- export const maxDuration = 30;
48
-
49
- export async function POST(req: Request) {
50
- // Parse the request body
51
- const input: {
52
- threadId: string | null;
53
- message: string;
54
- } = await req.json();
55
-
56
- // Create a thread if needed
57
- const threadId = input.threadId ?? (await openai.beta.threads.create({})).id;
58
-
59
- // Add a message to the thread
60
- const createdMessage = await openai.beta.threads.messages.create(threadId, {
61
- role: "user",
62
- content: input.message,
63
- });
64
-
65
- return AssistantResponse(
66
- { threadId, messageId: createdMessage.id },
67
- async ({ forwardStream, sendDataMessage }) => {
68
- // Run the assistant on the thread
69
- const runStream = openai.beta.threads.runs.stream(threadId, {
70
- assistant_id:
71
- process.env.ASSISTANT_ID ??
72
- (() => {
73
- throw new Error("ASSISTANT_ID is not set");
74
- })(),
75
- });
76
-
77
- // forward run status would stream message deltas
78
- let runResult: Run = await forwardStream(runStream);
79
-
80
- // status can be: queued, in_progress, requires_action, cancelling, cancelled, failed, completed, or expired
81
- while (
82
- runResult?.status === "requires_action" &&
83
- runResult.required_action?.type === "submit_tool_outputs"
84
- ) {
85
- const tool_outputs =
86
- runResult.required_action.submit_tool_outputs.tool_calls.map(
87
- (toolCall: any) => {
88
- const parameters = JSON.parse(toolCall.function.arguments);
89
-
90
- switch (toolCall.function.name) {
91
- // configure your tool calls here
92
-
93
- default:
94
- throw new Error(
95
- `Unknown tool call function: ${toolCall.function.name}`,
96
- );
97
- }
98
- },
99
- );
100
-
101
- runResult = await forwardStream(
102
- openai.beta.threads.runs.submitToolOutputsStream(
103
- threadId,
104
- runResult.id,
105
- { tool_outputs },
106
- ),
107
- );
108
- }
109
- },
110
- );
111
- }
112
- ```
113
-
114
- </Step>
115
- <Step>
116
-
117
- ### Define a `MyRuntimeProvider` component
118
-
119
- `@/app/MyRuntimeProvider.tsx`
120
-
121
- ```tsx
122
- "use client";
123
-
124
- import { useAssistant } from "@ai-sdk/react";
125
- import { AssistantRuntimeProvider } from "@assistant-ui/react";
126
- import { useVercelUseAssistantRuntime } from "@assistant-ui/react-ai-sdk";
127
-
128
- export function MyRuntimeProvider({
129
- children,
130
- }: Readonly<{
131
- children: React.ReactNode;
132
- }>) {
133
- const assistant = useAssistant({
134
- api: "/api/assistant",
135
- });
136
-
137
- const runtime = useVercelUseAssistantRuntime(assistant);
138
-
139
- return (
140
- <AssistantRuntimeProvider runtime={runtime}>
141
- {children}
142
- </AssistantRuntimeProvider>
143
- );
144
- }
145
- ```
146
-
147
- </Step>
148
- <Step>
149
-
150
- ### Wrap your app in `MyRuntimeProvider`
151
-
152
- `@/app/layout.tsx`
153
-
154
- ```tsx {1,11,17}
155
- import { MyRuntimeProvider } from '@/app/MyRuntimeProvider';
156
-
157
- ...
158
-
159
- export default function RootLayout({
160
- children,
161
- }: Readonly<{
162
- children: React.ReactNode;
163
- }>) {
164
- return (
165
- <MyRuntimeProvider>
166
- <html lang="en">
167
- <body className={inter.className}>
168
- {children}
169
- </body>
170
- </html>
171
- </MyRuntimeProvider>
172
- )
173
- }
174
- ```
175
-
176
- </Step>
177
- </Steps>
178
-
179
- ## Accessing AI SDK Messages
180
-
181
- You can use `getExternalStoreMessages` utility to convert `ThreadMessage`s back to `Message`s from AI SDK.
182
-
183
- ```tsx
184
- const MyAssistantMessage = () => {
185
- const aiSDKMessages = useMessage((m) => getExternalStoreMessages(m));
186
- // ...
187
- };
188
-
189
- const WeatherToolUI = makeAssistantToolUI({
190
- render: () => {
191
- const aiSDKMessage = useMessagePart((p) => getExternalStoreMessages(p)[0]);
192
- // ...
193
- },
194
- });
195
- ```