@assistant-ui/mcp-docs-server 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. package/.docs/organized/code-examples/local-ollama.md +10 -10
  2. package/.docs/organized/code-examples/search-agent-for-e-commerce.md +14 -14
  3. package/.docs/organized/code-examples/{with-vercel-ai-rsc.md → with-ai-sdk-v5.md} +259 -229
  4. package/.docs/organized/code-examples/with-ai-sdk.md +11 -10
  5. package/.docs/organized/code-examples/with-cloud.md +9 -8
  6. package/.docs/organized/code-examples/with-external-store.md +6 -6
  7. package/.docs/organized/code-examples/with-ffmpeg.md +15 -16
  8. package/.docs/organized/code-examples/with-langgraph.md +9 -8
  9. package/.docs/organized/code-examples/with-openai-assistants.md +9 -9
  10. package/.docs/organized/code-examples/with-parent-id-grouping.md +1377 -0
  11. package/.docs/organized/code-examples/with-react-hook-form.md +14 -15
  12. package/.docs/raw/docs/about-assistantui.mdx +9 -0
  13. package/.docs/raw/docs/cloud/persistence/ai-sdk.mdx +89 -32
  14. package/.docs/raw/docs/cloud/persistence/langgraph.mdx +187 -32
  15. package/.docs/raw/docs/guides/Latex.mdx +107 -0
  16. package/.docs/raw/docs/guides/ToolUI.mdx +0 -32
  17. package/.docs/raw/docs/runtimes/ai-sdk/use-chat-v5.mdx +212 -0
  18. package/.docs/raw/docs/runtimes/ai-sdk/use-chat.mdx +1 -1
  19. package/.docs/raw/docs/runtimes/custom/local.mdx +1 -1
  20. package/.docs/raw/docs/runtimes/mastra/separate-server-integration.mdx +2 -2
  21. package/.docs/raw/docs/ui/PartGrouping.mdx +540 -0
  22. package/.docs/raw/docs/ui/ToolGroup.mdx +96 -0
  23. package/dist/{chunk-JS4PWCVA.js → chunk-L4K23SWI.js} +1 -1
  24. package/dist/index.js +1 -1
  25. package/dist/stdio.js +1 -1
  26. package/package.json +7 -7
@@ -0,0 +1,212 @@
1
+ ---
2
+ title: AI SDK v5 with useChatRuntime
3
+ ---
4
+
5
+ import { Callout } from "fumadocs-ui/components/callout";
6
+
7
+
8
+ ## Overview
9
+
10
+ Integration with the Vercel AI SDK v5 using the new `useChatRuntime` hook from `@assistant-ui/react-ai-sdk`.
11
+ This provides a streamlined way to integrate AI SDK v5 features including the new streamText API and improved TypeScript support.
12
+
13
+ ## Getting Started
14
+
15
+ import { Steps, Step } from "fumadocs-ui/components/steps";
16
+
17
+ <Steps>
18
+ <Step>
19
+ ### Create a Next.JS project
20
+
21
+ ```sh
22
+ npx create-next-app@latest my-app
23
+ cd my-app
24
+ ```
25
+
26
+ </Step>
27
+ <Step>
28
+
29
+ ### Install AI SDK v5 and `@assistant-ui/react`
30
+
31
+ ```sh npm2yarn
32
+ npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/openai
33
+ ```
34
+
35
+ </Step>
36
+ <Step>
37
+
38
+ ### Setup a backend route under `/api/chat`
39
+
40
+ `@/app/api/chat/route.ts`
41
+
42
+ ```tsx
43
+ import { openai } from "@ai-sdk/openai";
44
+ import {
45
+ streamText,
46
+ UIMessage,
47
+ convertToModelMessages,
48
+ tool,
49
+ } from "ai";
50
+ import { frontendTools } from "@assistant-ui/assistant-stream/ai-sdk";
51
+ import { z } from "zod";
52
+
53
+ // Allow streaming responses up to 30 seconds
54
+ export const maxDuration = 30;
55
+
56
+ export async function POST(req: Request) {
57
+ const { messages, system, tools }: {
58
+ messages: UIMessage[];
59
+ system?: string; // System message forwarded from AssistantChatTransport
60
+ tools?: any; // Frontend tools forwarded from AssistantChatTransport
61
+ } = await req.json();
62
+
63
+ const result = streamText({
64
+ model: openai("gpt-4o"),
65
+ system, // Use the system message from the frontend if provided
66
+ messages: convertToModelMessages(messages),
67
+ tools: {
68
+ // Wrap frontend tools with frontendTools helper
69
+ ...frontendTools(tools),
70
+ // Backend tools
71
+ get_current_weather: tool({
72
+ description: "Get the current weather",
73
+ inputSchema: z.object({
74
+ city: z.string(),
75
+ }),
76
+ execute: async ({ city }) => {
77
+ return `The weather in ${city} is sunny`;
78
+ },
79
+ }),
80
+ },
81
+ });
82
+
83
+ return result.toUIMessageStreamResponse();
84
+ }
85
+ ```
86
+
87
+ </Step>
88
+ <Step>
89
+
90
+ ### Wrap your app with `AssistantRuntimeProvider` using `useChatRuntime`
91
+
92
+ `@/app/page.tsx`
93
+
94
+ ```tsx
95
+ "use client";
96
+
97
+ import { Thread } from "@/components/assistant-ui/thread";
98
+ import { AssistantRuntimeProvider } from "@assistant-ui/react";
99
+ import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
100
+
101
+ export default function Home() {
102
+ const runtime = useChatRuntime();
103
+
104
+ return (
105
+ <AssistantRuntimeProvider runtime={runtime}>
106
+ <div className="h-full">
107
+ <Thread />
108
+ </div>
109
+ </AssistantRuntimeProvider>
110
+ );
111
+ }
112
+ ```
113
+
114
+ </Step>
115
+ </Steps>
116
+
117
+ ## API Reference
118
+
119
+ ### useChatRuntime
120
+
121
+ Creates a runtime directly with AI SDK v5's `useChat` hook integration.
122
+
123
+ ```tsx
124
+ import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
125
+
126
+ const runtime = useChatRuntime({
127
+ api: "/api/chat",
128
+ // All standard useChat options are supported
129
+ });
130
+ ```
131
+
132
+ <Callout type="info">
133
+ By default, `useChatRuntime` uses `AssistantChatTransport` which automatically forwards system messages and frontend tools to your backend API. This enables your backend to receive the full context from the Assistant UI.
134
+ </Callout>
135
+
136
+ ### Custom Transport Configuration
137
+
138
+ If you need to customize the transport configuration:
139
+
140
+ ```tsx
141
+ import { DefaultChatTransport } from "ai";
142
+ import { AssistantChatTransport } from "@assistant-ui/react-ai-sdk";
143
+ import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
144
+
145
+ // Example 1: Custom API URL while keeping system/tools forwarding
146
+ const runtime = useChatRuntime({
147
+ transport: new AssistantChatTransport({
148
+ api: "/my-custom-api/chat" // Custom API URL with forwarding
149
+ })
150
+ });
151
+
152
+ // Example 2: Disable system/tools forwarding
153
+ const runtime = useChatRuntime({
154
+ api: "/api/chat",
155
+ transport: new DefaultChatTransport() // Standard AI SDK transport without forwarding
156
+ });
157
+ ```
158
+
159
+ <Callout type="warning">
160
+ When customizing the API URL, you must explicitly use `AssistantChatTransport` if you want to keep frontend system messages and tools forwarding. Simply passing `api` to `useChatRuntime` will use the default transport configuration.
161
+ </Callout>
162
+
163
+ #### Transport Options
164
+
165
+ - **`AssistantChatTransport`** (default): Automatically forwards system messages and frontend tools from the Assistant UI context to your backend
166
+ - **`DefaultChatTransport`**: Standard AI SDK transport without automatic forwarding
167
+
168
+ ### Using Frontend Tools with `frontendTools`
169
+
170
+ When using `AssistantChatTransport`, frontend tools are forwarded to your backend. Use the `frontendTools` helper to properly integrate them:
171
+
172
+ ```tsx
173
+ import { frontendTools } from "@assistant-ui/assistant-stream/ai-sdk";
174
+
175
+ export async function POST(req: Request) {
176
+ const { messages, system, tools } = await req.json();
177
+
178
+ const result = streamText({
179
+ model: openai("gpt-4o"),
180
+ system,
181
+ messages: convertToModelMessages(messages),
182
+ tools: {
183
+ // Wrap frontend tools with the helper
184
+ ...frontendTools(tools),
185
+ // Your backend tools
186
+ myBackendTool: tool({
187
+ // ...
188
+ }),
189
+ },
190
+ });
191
+
192
+ return result.toUIMessageStreamResponse();
193
+ }
194
+ ```
195
+
196
+ The `frontendTools` helper converts frontend tool definitions to the AI SDK format and ensures they are properly handled by the streaming response.
197
+
198
+ ### useAISDKRuntime (Advanced)
199
+
200
+ For advanced use cases where you need direct access to the `useChat` hook:
201
+
202
+ ```tsx
203
+ import { useChat } from "@ai-sdk/react";
204
+ import { useAISDKRuntime } from "@assistant-ui/react-ai-sdk";
205
+
206
+ const chat = useChat();
207
+ const runtime = useAISDKRuntime(chat);
208
+ ```
209
+
210
+ ## Example
211
+
212
+ For a complete example, check out the [AI SDK v5 example](https://github.com/assistant-ui/assistant-ui/tree/main/examples/with-ai-sdk-v5) in our repository.
@@ -38,7 +38,7 @@ npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/openai
38
38
 
39
39
  ```tsx
40
40
  import { openai } from "@ai-sdk/openai";
41
- import { streamText } from "ai";
41
+ import { convertToCoreMessages, streamText } from "ai";
42
42
 
43
43
  export const maxDuration = 30;
44
44
 
@@ -169,7 +169,7 @@ Use `LocalRuntime` if you need:
169
169
  ### Use the Thread component
170
170
 
171
171
  ```tsx title="app/page.tsx"
172
- import { Thread } from "@assistant-ui/react";
172
+ import { Thread } from 'components/assistant-ui/thread.tsx'
173
173
 
174
174
  export default function Page() {
175
175
  return <Thread />;
@@ -140,13 +140,13 @@ Open the main page file in your Assistant UI frontend project (usually `app/page
140
140
  ```tsx {10} title="app/page.tsx"
141
141
  "use client";
142
142
  import { Thread } from "@/components/assistant-ui/thread";
143
- import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
143
+ import { useDataStreamRuntime } from "@assistant-ui/react-data-stream";
144
144
  import { AssistantRuntimeProvider } from "@assistant-ui/react";
145
145
  import { ThreadList } from "@/components/assistant-ui/thread-list";
146
146
 
147
147
  export default function Home() {
148
148
  // Point the runtime to the Mastra server endpoint
149
- const runtime = useChatRuntime({
149
+ const runtime = useDataStreamRuntime({
150
150
  api: "http://localhost:4111/api/agents/chefAgent/stream",
151
151
  });
152
152