@assistant-ui/mcp-docs-server 0.1.5 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -199,14 +199,12 @@ export default function RootLayout({
199
199
  "use client";
200
200
 
201
201
  import { Thread } from "@/components/assistant-ui/thread";
202
- import { useChat } from "@ai-sdk/react";
203
202
  import { AssistantRuntimeProvider } from "@assistant-ui/react";
204
- import { useAISDKRuntime } from "@assistant-ui/react-ai-sdk";
203
+ import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
205
204
 
206
205
  export default function Home() {
207
- const chat = useChat();
208
-
209
- const runtime = useAISDKRuntime(chat);
206
+ // Using the new simplified useChatRuntime hook
207
+ const runtime = useChatRuntime();
210
208
 
211
209
  return (
212
210
  <AssistantRuntimeProvider runtime={runtime}>
@@ -243,6 +241,7 @@ export default function Home() {
243
241
  },
244
242
  "iconLibrary": "lucide"
245
243
  }
244
+
246
245
  ```
247
246
 
248
247
  ## components/assistant-ui/markdown-text.tsx
@@ -1086,21 +1085,25 @@ This example demonstrates how to use `@assistant-ui/react-ai-sdk-v5` with the Ve
1086
1085
  ## Getting Started
1087
1086
 
1088
1087
  1. Install dependencies:
1088
+
1089
1089
  ```bash
1090
1090
  npm install
1091
1091
  ```
1092
1092
 
1093
1093
  2. Set up your environment variables:
1094
+
1094
1095
  ```bash
1095
1096
  cp .env.example .env.local
1096
1097
  ```
1097
1098
 
1098
1099
  Add your Anthropic API key to `.env.local`:
1100
+
1099
1101
  ```
1100
1102
  ANTHROPIC_API_KEY=your-api-key-here
1101
1103
  ```
1102
1104
 
1103
1105
  3. Run the development server:
1106
+
1104
1107
  ```bash
1105
1108
  npm run dev
1106
1109
  ```
@@ -1110,13 +1113,45 @@ Open [http://localhost:3000](http://localhost:3000) to see the result.
1110
1113
  ## Key Features
1111
1114
 
1112
1115
  - Uses the new AI SDK v5 with `@ai-sdk/react` and `@ai-sdk/anthropic`
1113
- - Integrates with `@assistant-ui/react` using the v5-compatible runtime
1116
+ - Integrates with `@assistant-ui/react` using the new `useChatRuntime` hook
1114
1117
  - No RSC support (client-side only)
1115
- - Uses the `useChat` hook from AI SDK v5
1118
+ - Simplified integration with the `useChatRuntime` hook that wraps AI SDK v5's `useChat`
1119
+ - Automatically uses `AssistantChatTransport` to pass system messages and frontend tools to the backend
1120
+
1121
+ ## Custom Transport Configuration
1122
+
1123
+ By default, `useChatRuntime` uses `AssistantChatTransport` which automatically forwards system messages and frontend tools to the backend.
1124
+
1125
+ ### Custom API URL with Forwarding
1126
+
1127
+ When customizing the API URL, you must explicitly use `AssistantChatTransport` to keep system/tools forwarding:
1128
+
1129
+ ```typescript
1130
+ import { AssistantChatTransport } from "@assistant-ui/react-ai-sdk";
1131
+
1132
+ const runtime = useChatRuntime({
1133
+ transport: new AssistantChatTransport({
1134
+ api: "/my-custom-api/chat", // Custom URL with system/tools forwarding
1135
+ }),
1136
+ });
1137
+ ```
1138
+
1139
+ ### Disable System/Tools Forwarding
1140
+
1141
+ To use the standard AI SDK transport without forwarding:
1142
+
1143
+ ```typescript
1144
+ import { DefaultChatTransport } from "ai";
1145
+
1146
+ const runtime = useChatRuntime({
1147
+ transport: new DefaultChatTransport(), // No system/tools forwarding
1148
+ });
1149
+ ```
1116
1150
 
1117
1151
  ## API Route
1118
1152
 
1119
1153
  The API route at `/api/chat` uses the new `streamText` function from AI SDK v5 to handle chat completions.
1154
+
1120
1155
  ```
1121
1156
 
1122
1157
  ## tsconfig.json
@@ -273,6 +273,7 @@ export default function Home() {
273
273
  },
274
274
  "iconLibrary": "lucide"
275
275
  }
276
+
276
277
  ```
277
278
 
278
279
  ## components/assistant-ui/markdown-text.tsx
@@ -279,6 +279,7 @@ export default function Home() {
279
279
  },
280
280
  "iconLibrary": "lucide"
281
281
  }
282
+
282
283
  ```
283
284
 
284
285
  ## components/assistant-ui/markdown-text.tsx
@@ -1254,7 +1254,6 @@ export default nextConfig;
1254
1254
  "tailwind-merge": "^3.3.1",
1255
1255
  "tw-animate-css": "^1.3.6",
1256
1256
  "zod": "^4.0.14",
1257
- "zod-to-json-schema": "^3.24.6",
1258
1257
  "zustand": "^5.0.7"
1259
1258
  },
1260
1259
  "devDependencies": {
@@ -1784,6 +1784,7 @@ To run the example, run the following commands:
1784
1784
  npm install
1785
1785
  npm run dev
1786
1786
  ```
1787
+
1787
1788
  ```
1788
1789
 
1789
1790
  ## tsconfig.json
@@ -1297,6 +1297,7 @@ This example demonstrates how to use the parent ID feature in assistant-ui to gr
1297
1297
  ## How it works
1298
1298
 
1299
1299
  1. **Message Structure**: The example uses the external store runtime with predefined messages that include parts with `parentId` fields:
1300
+
1300
1301
  ```typescript
1301
1302
  {
1302
1303
  type: "text",
@@ -1335,10 +1336,12 @@ Open [http://localhost:3000](http://localhost:3000) to see the example.
1335
1336
  ## Use Cases
1336
1337
 
1337
1338
  This pattern is useful for:
1339
+
1338
1340
  - Grouping research sources with their related findings
1339
1341
  - Organizing multi-step tool executions
1340
1342
  - Creating hierarchical content structures
1341
1343
  - Showing related content in collapsible sections
1344
+
1342
1345
  ```
1343
1346
 
1344
1347
  ## tsconfig.json
@@ -1676,7 +1676,6 @@ export default nextConfig;
1676
1676
  "tailwind-merge": "^3.3.1",
1677
1677
  "tw-animate-css": "^1.3.6",
1678
1678
  "zod": "^4.0.14",
1679
- "zod-to-json-schema": "^3.24.6",
1680
1679
  "zustand": "^5.0.7"
1681
1680
  },
1682
1681
  "devDependencies": {
@@ -53,29 +53,55 @@ export const MarkdownText = memo(MarkdownTextImpl);
53
53
  </Step>
54
54
  </Steps>
55
55
 
56
- ## Examples
56
+ ## Supported Formats
57
57
 
58
- ### Inline math
58
+ By default, remark-math supports:
59
+ - `$...$` for inline math
60
+ - `$$...$$` for display math
61
+ - Fenced code blocks with the `math` language identifier
59
62
 
60
- Single dollar signs for inline math: `$E = mc^2$`
63
+ ## Supporting Alternative LaTeX Delimiters
61
64
 
62
- ### Block math
65
+ Many language models generate LaTeX using different delimiter formats:
66
+ - `\(...\)` for inline math
67
+ - `\[...\]` for display math
68
+ - Custom formats like `[/math]...[/math]`
63
69
 
64
- Double dollar signs for block math:
70
+ You can use the `preprocess` prop to normalize these formats:
65
71
 
66
- ```
67
- $$
68
- \int_{a}^{b} f(x) \, dx = F(b) - F(a)
69
- $$
70
- ```
72
+ ```tsx title="/components/assistant-ui/markdown-text.tsx"
73
+ const MarkdownTextImpl = () => {
74
+ return (
75
+ <MarkdownTextPrimitive
76
+ remarkPlugins={[remarkGfm, remarkMath]}
77
+ rehypePlugins={[rehypeKatex]}
78
+ preprocess={normalizeCustomMathTags} // [!code ++]
79
+ className="aui-md"
80
+ components={defaultComponents}
81
+ />
82
+ );
83
+ };
71
84
 
72
- ### Fenced code blocks
85
+ // Your LaTeX preprocessing function
86
+ function normalizeCustomMathTags(input: string): string {
87
+ return (
88
+ input
89
+ // Convert [/math]...[/math] to $$...$$
90
+ .replace(/\[\/math\]([\s\S]*?)\[\/math\]/g, (_, content) => `$$${content.trim()}$$`)
73
91
 
74
- Fenced code blocks with the `math` language identifier:
92
+ // Convert [/inline]...[/inline] to $...$
93
+ .replace(/\[\/inline\]([\s\S]*?)\[\/inline\]/g, (_, content) => `$${content.trim()}$`)
75
94
 
76
- ````
77
- ```math
78
- \sum_{i=1}^{n} i = \frac{n(n+1)}{2}
95
+ // Convert \( ... \) to $...$ (inline math) - handles both single and double backslashes
96
+ .replace(/\\{1,2}\(([\s\S]*?)\\{1,2}\)/g, (_, content) => `$${content.trim()}$`)
97
+
98
+ // Convert \[ ... \] to $$...$$ (block math) - handles both single and double backslashes
99
+ .replace(/\\{1,2}\[([\s\S]*?)\\{1,2}\]/g, (_, content) => `$$${content.trim()}$$`)
100
+ );
101
+ }
79
102
  ```
80
- ````
103
+
104
+ <Callout type="tip">
105
+ The preprocessing function runs before markdown parsing, allowing you to transform any delimiter format into the standard `$` and `$$` format.
106
+ </Callout>
81
107
 
@@ -1,17 +1,14 @@
1
1
  ---
2
- title: AI SDK v5 (@alpha)
2
+ title: AI SDK v5 with useChatRuntime
3
3
  ---
4
4
 
5
5
  import { Callout } from "fumadocs-ui/components/callout";
6
6
 
7
- <Callout type="warn">
8
- This integration is currently in **alpha**. APIs may change before the stable release.
9
- </Callout>
10
7
 
11
8
  ## Overview
12
9
 
13
- Integration with the Vercel AI SDK v5's `useChat` hook using the `@assistant-ui/react-ai-sdk` package with the `@alpha` tag.
14
- This version supports the latest AI SDK v5 features including the new streamText API and improved TypeScript support.
10
+ Integration with the Vercel AI SDK v5 using the new `useChatRuntime` hook from `@assistant-ui/react-ai-sdk`.
11
+ This provides a streamlined way to integrate AI SDK v5 features including the new streamText API and improved TypeScript support.
15
12
 
16
13
  ## Getting Started
17
14
 
@@ -29,10 +26,10 @@ cd my-app
29
26
  </Step>
30
27
  <Step>
31
28
 
32
- ### Install AI SDK v5 and `@assistant-ui/react` with alpha tag
29
+ ### Install AI SDK v5 and `@assistant-ui/react`
33
30
 
34
31
  ```sh npm2yarn
35
- npm install @assistant-ui/react @assistant-ui/react-ai-sdk@alpha ai @ai-sdk/openai
32
+ npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/openai
36
33
  ```
37
34
 
38
35
  </Step>
@@ -50,18 +47,27 @@ import {
50
47
  convertToModelMessages,
51
48
  tool,
52
49
  } from "ai";
50
+ import { frontendTools } from "@assistant-ui/assistant-stream/ai-sdk";
53
51
  import { z } from "zod";
54
52
 
55
53
  // Allow streaming responses up to 30 seconds
56
54
  export const maxDuration = 30;
57
55
 
58
56
  export async function POST(req: Request) {
59
- const { messages }: { messages: UIMessage[] } = await req.json();
57
+ const { messages, system, tools }: {
58
+ messages: UIMessage[];
59
+ system?: string; // System message forwarded from AssistantChatTransport
60
+ tools?: any; // Frontend tools forwarded from AssistantChatTransport
61
+ } = await req.json();
60
62
 
61
63
  const result = streamText({
62
64
  model: openai("gpt-4o"),
65
+ system, // Use the system message from the frontend if provided
63
66
  messages: convertToModelMessages(messages),
64
67
  tools: {
68
+ // Wrap frontend tools with frontendTools helper
69
+ ...frontendTools(tools),
70
+ // Backend tools
65
71
  get_current_weather: tool({
66
72
  description: "Get the current weather",
67
73
  inputSchema: z.object({
@@ -81,7 +87,7 @@ export async function POST(req: Request) {
81
87
  </Step>
82
88
  <Step>
83
89
 
84
- ### Wrap your app with `AssistantRuntimeProvider` with AI SDK v5 runtime
90
+ ### Wrap your app with `AssistantRuntimeProvider` using `useChatRuntime`
85
91
 
86
92
  `@/app/page.tsx`
87
93
 
@@ -89,13 +95,11 @@ export async function POST(req: Request) {
89
95
  "use client";
90
96
 
91
97
  import { Thread } from "@/components/assistant-ui/thread";
92
- import { useChat } from "@ai-sdk/react";
93
98
  import { AssistantRuntimeProvider } from "@assistant-ui/react";
94
- import { useAISDKRuntime } from "@assistant-ui/react-ai-sdk";
99
+ import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
95
100
 
96
101
  export default function Home() {
97
- const chat = useChat();
98
- const runtime = useAISDKRuntime(chat);
102
+ const runtime = useChatRuntime();
99
103
 
100
104
  return (
101
105
  <AssistantRuntimeProvider runtime={runtime}>
@@ -112,9 +116,88 @@ export default function Home() {
112
116
 
113
117
  ## API Reference
114
118
 
115
- ### useAISDKRuntime
119
+ ### useChatRuntime
120
+
121
+ Creates a runtime directly with AI SDK v5's `useChat` hook integration.
122
+
123
+ ```tsx
124
+ import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
125
+
126
+ const runtime = useChatRuntime({
127
+ api: "/api/chat",
128
+ // All standard useChat options are supported
129
+ });
130
+ ```
131
+
132
+ <Callout type="info">
133
+ By default, `useChatRuntime` uses `AssistantChatTransport` which automatically forwards system messages and frontend tools to your backend API. This enables your backend to receive the full context from the Assistant UI.
134
+ </Callout>
135
+
136
+ ### Custom Transport Configuration
137
+
138
+ If you need to customize the transport configuration:
139
+
140
+ ```tsx
141
+ import { DefaultChatTransport } from "ai";
142
+ import { AssistantChatTransport } from "@assistant-ui/react-ai-sdk";
143
+ import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
144
+
145
+ // Example 1: Custom API URL while keeping system/tools forwarding
146
+ const runtime = useChatRuntime({
147
+ transport: new AssistantChatTransport({
148
+ api: "/my-custom-api/chat" // Custom API URL with forwarding
149
+ })
150
+ });
151
+
152
+ // Example 2: Disable system/tools forwarding
153
+ const runtime = useChatRuntime({
154
+ api: "/api/chat",
155
+ transport: new DefaultChatTransport() // Standard AI SDK transport without forwarding
156
+ });
157
+ ```
158
+
159
+ <Callout type="warning">
160
+ When customizing the API URL, you must explicitly use `AssistantChatTransport` if you want to keep frontend system messages and tools forwarding. Simply passing `api` to `useChatRuntime` will use the default transport configuration.
161
+ </Callout>
162
+
163
+ #### Transport Options
164
+
165
+ - **`AssistantChatTransport`** (default): Automatically forwards system messages and frontend tools from the Assistant UI context to your backend
166
+ - **`DefaultChatTransport`**: Standard AI SDK transport without automatic forwarding
167
+
168
+ ### Using Frontend Tools with `frontendTools`
169
+
170
+ When using `AssistantChatTransport`, frontend tools are forwarded to your backend. Use the `frontendTools` helper to properly integrate them:
171
+
172
+ ```tsx
173
+ import { frontendTools } from "@assistant-ui/assistant-stream/ai-sdk";
174
+
175
+ export async function POST(req: Request) {
176
+ const { messages, system, tools } = await req.json();
177
+
178
+ const result = streamText({
179
+ model: openai("gpt-4o"),
180
+ system,
181
+ messages: convertToModelMessages(messages),
182
+ tools: {
183
+ // Wrap frontend tools with the helper
184
+ ...frontendTools(tools),
185
+ // Your backend tools
186
+ myBackendTool: tool({
187
+ // ...
188
+ }),
189
+ },
190
+ });
191
+
192
+ return result.toUIMessageStreamResponse();
193
+ }
194
+ ```
195
+
196
+ The `frontendTools` helper converts frontend tool definitions to the AI SDK format and ensures they are properly handled by the streaming response.
197
+
198
+ ### useAISDKRuntime (Advanced)
116
199
 
117
- Creates a runtime adapter for AI SDK v5's `useChat` hook.
200
+ For advanced use cases where you need direct access to the `useChat` hook:
118
201
 
119
202
  ```tsx
120
203
  import { useChat } from "@ai-sdk/react";
@@ -126,4 +209,4 @@ const runtime = useAISDKRuntime(chat);
126
209
 
127
210
  ## Example
128
211
 
129
- For a complete example, check out the [AI SDK v5 example](https://github.com/assistant-ui/assistant-ui/tree/main/examples/with-ai-sdk-v5) in our repository.
212
+ For a complete example, check out the [AI SDK v5 example](https://github.com/assistant-ui/assistant-ui/tree/main/examples/with-ai-sdk-v5) in our repository.
@@ -169,7 +169,7 @@ Use `LocalRuntime` if you need:
169
169
  ### Use the Thread component
170
170
 
171
171
  ```tsx title="app/page.tsx"
172
- import { Thread } from "@assistant-ui/react";
172
+ import { Thread } from 'components/assistant-ui/thread.tsx'
173
173
 
174
174
  export default function Page() {
175
175
  return <Thread />;
@@ -140,13 +140,13 @@ Open the main page file in your Assistant UI frontend project (usually `app/page
140
140
  ```tsx {10} title="app/page.tsx"
141
141
  "use client";
142
142
  import { Thread } from "@/components/assistant-ui/thread";
143
- import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
143
+ import { useDataStreamRuntime } from "@assistant-ui/react-data-stream";
144
144
  import { AssistantRuntimeProvider } from "@assistant-ui/react";
145
145
  import { ThreadList } from "@/components/assistant-ui/thread-list";
146
146
 
147
147
  export default function Home() {
148
148
  // Point the runtime to the Mastra server endpoint
149
- const runtime = useChatRuntime({
149
+ const runtime = useDataStreamRuntime({
150
150
  api: "http://localhost:4111/api/agents/chefAgent/stream",
151
151
  });
152
152
 
@@ -1,7 +1,7 @@
1
1
  import { logger, IS_PREPARE_MODE, MDX_EXTENSION, DOCS_PATH, MAX_FILE_SIZE, CODE_EXAMPLES_PATH, MD_EXTENSION } from './chunk-M2RKUM66.js';
2
2
  import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
3
3
  import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
4
- import { z } from 'zod';
4
+ import { z } from 'zod/v3';
5
5
  import { lstat, stat, readdir, readFile } from 'fs/promises';
6
6
  import path, { dirname, join, extname, normalize, relative } from 'path';
7
7
  import matter from 'gray-matter';
package/dist/index.js CHANGED
@@ -1 +1 @@
1
- export { runServer, server } from './chunk-JS4PWCVA.js';
1
+ export { runServer, server } from './chunk-L4K23SWI.js';
package/dist/stdio.js CHANGED
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/env node
2
- import { runServer } from './chunk-JS4PWCVA.js';
2
+ import { runServer } from './chunk-L4K23SWI.js';
3
3
 
4
4
  // src/stdio.ts
5
5
  void runServer().catch((error) => {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@assistant-ui/mcp-docs-server",
3
- "version": "0.1.5",
3
+ "version": "0.1.6",
4
4
  "description": "MCP server for assistant-ui documentation and examples",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",