@assistant-ui/mcp-docs-server 0.1.12 → 0.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.docs/organized/code-examples/with-ag-ui.md +1089 -0
- package/.docs/organized/code-examples/with-ai-sdk-v5.md +12 -21
- package/.docs/organized/code-examples/with-assistant-transport.md +10 -19
- package/.docs/organized/code-examples/with-cloud.md +7 -16
- package/.docs/organized/code-examples/with-external-store.md +6 -15
- package/.docs/organized/code-examples/with-ffmpeg.md +14 -21
- package/.docs/organized/code-examples/with-langgraph.md +5 -14
- package/.docs/organized/code-examples/with-parent-id-grouping.md +6 -15
- package/.docs/organized/code-examples/with-react-hook-form.md +10 -19
- package/.docs/raw/docs/api-reference/context-providers/AssistantRuntimeProvider.mdx +6 -1
- package/.docs/raw/docs/api-reference/integrations/vercel-ai-sdk.mdx +179 -70
- package/.docs/raw/docs/cloud/authorization.mdx +2 -2
- package/.docs/raw/docs/copilots/model-context.mdx +4 -5
- package/.docs/raw/docs/copilots/motivation.mdx +4 -4
- package/.docs/raw/docs/getting-started.mdx +8 -4
- package/.docs/raw/docs/guides/Attachments.mdx +2 -2
- package/.docs/raw/docs/guides/Tools.mdx +5 -5
- package/.docs/raw/docs/guides/context-api.mdx +5 -5
- package/.docs/raw/docs/migrations/v0-12.mdx +2 -2
- package/.docs/raw/docs/runtimes/ai-sdk/use-chat.mdx +6 -2
- package/.docs/raw/docs/runtimes/custom/custom-thread-list.mdx +9 -0
- package/.docs/raw/docs/runtimes/custom/local.mdx +77 -4
- package/.docs/raw/docs/runtimes/langgraph/index.mdx +7 -4
- package/.docs/raw/docs/runtimes/langserve.mdx +3 -8
- package/.docs/raw/docs/runtimes/mastra/full-stack-integration.mdx +13 -11
- package/.docs/raw/docs/runtimes/mastra/separate-server-integration.mdx +50 -31
- package/.docs/raw/docs/ui/PartGrouping.mdx +2 -2
- package/.docs/raw/docs/ui/Reasoning.mdx +174 -0
- package/dist/chunk-M2RKUM66.js +3 -3
- package/dist/chunk-NVNFQ5ZO.js +2 -2
- package/package.json +5 -6
|
@@ -136,7 +136,7 @@ export const OPTIONS = () => {
|
|
|
136
136
|
|
|
137
137
|
// ---cut---
|
|
138
138
|
import { Client } from "@langchain/langgraph-sdk";
|
|
139
|
-
import { LangChainMessage } from "@assistant-ui/react-langgraph";
|
|
139
|
+
import { LangChainMessage, LangGraphSendMessageConfig } from "@assistant-ui/react-langgraph";
|
|
140
140
|
|
|
141
141
|
const createClient = () => {
|
|
142
142
|
const apiUrl = process.env["NEXT_PUBLIC_LANGGRAPH_API_URL"] || "/api";
|
|
@@ -160,6 +160,7 @@ export const getThreadState = async (
|
|
|
160
160
|
export const sendMessage = async (params: {
|
|
161
161
|
threadId: string;
|
|
162
162
|
messages: LangChainMessage;
|
|
163
|
+
config?: LangGraphSendMessageConfig;
|
|
163
164
|
}) => {
|
|
164
165
|
const client = createClient();
|
|
165
166
|
return client.runs.stream(
|
|
@@ -170,6 +171,7 @@ export const sendMessage = async (params: {
|
|
|
170
171
|
messages: params.messages,
|
|
171
172
|
},
|
|
172
173
|
streamMode: "messages",
|
|
174
|
+
...params.config
|
|
173
175
|
},
|
|
174
176
|
);
|
|
175
177
|
};
|
|
@@ -195,12 +197,13 @@ import { createThread, getThreadState, sendMessage } from "@/lib/chatApi";
|
|
|
195
197
|
|
|
196
198
|
export function MyAssistant() {
|
|
197
199
|
const runtime = useLangGraphRuntime({
|
|
198
|
-
stream: async (messages, { initialize }) => {
|
|
200
|
+
stream: async (messages, { initialize, config }) => {
|
|
199
201
|
const { externalId } = await initialize();
|
|
200
202
|
if (!externalId) throw new Error("Thread not found");
|
|
201
203
|
return sendMessage({
|
|
202
204
|
threadId: externalId,
|
|
203
205
|
messages,
|
|
206
|
+
config
|
|
204
207
|
});
|
|
205
208
|
},
|
|
206
209
|
create: async () => {
|
|
@@ -306,11 +309,11 @@ The `useLangGraphRuntime` hook now includes built-in thread management capabilit
|
|
|
306
309
|
|
|
307
310
|
```typescript
|
|
308
311
|
const runtime = useLangGraphRuntime({
|
|
309
|
-
stream: async (messages, { initialize }) => {
|
|
312
|
+
stream: async (messages, { initialize, config }) => {
|
|
310
313
|
// initialize() creates or loads a thread and returns its IDs
|
|
311
314
|
const { remoteId, externalId } = await initialize();
|
|
312
315
|
// Use externalId (your backend's thread ID) for API calls
|
|
313
|
-
return sendMessage({ threadId: externalId, messages });
|
|
316
|
+
return sendMessage({ threadId: externalId, messages, config });
|
|
314
317
|
},
|
|
315
318
|
create: async () => {
|
|
316
319
|
// Called when creating a new thread
|
|
@@ -26,7 +26,7 @@ cd my-app
|
|
|
26
26
|
</Step>
|
|
27
27
|
<Step>
|
|
28
28
|
|
|
29
|
-
### Install `@langchain/core`,
|
|
29
|
+
### Install `@langchain/core`, AI SDK and `@assistant-ui/react`
|
|
30
30
|
|
|
31
31
|
```sh npm2yarn
|
|
32
32
|
npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/react @langchain/core
|
|
@@ -71,19 +71,14 @@ export async function POST(req: Request) {
|
|
|
71
71
|
|
|
72
72
|
import { useChat } from "@ai-sdk/react";
|
|
73
73
|
import { AssistantRuntimeProvider } from "@assistant-ui/react";
|
|
74
|
-
import {
|
|
74
|
+
import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
|
|
75
75
|
|
|
76
76
|
export function MyRuntimeProvider({
|
|
77
77
|
children,
|
|
78
78
|
}: Readonly<{
|
|
79
79
|
children: React.ReactNode;
|
|
80
80
|
}>) {
|
|
81
|
-
const
|
|
82
|
-
api: "/api/chat",
|
|
83
|
-
unstable_AISDKInterop: true,
|
|
84
|
-
});
|
|
85
|
-
|
|
86
|
-
const runtime = useVercelUseChatRuntime(chat);
|
|
81
|
+
const runtime = useChatRuntime();
|
|
87
82
|
|
|
88
83
|
return (
|
|
89
84
|
<AssistantRuntimeProvider runtime={runtime}>
|
|
@@ -52,7 +52,7 @@ export async function POST(req: Request) {
|
|
|
52
52
|
messages,
|
|
53
53
|
});
|
|
54
54
|
|
|
55
|
-
return result.
|
|
55
|
+
return result.toUIMessageStreamResponse();
|
|
56
56
|
}
|
|
57
57
|
```
|
|
58
58
|
|
|
@@ -63,10 +63,10 @@ This default route uses the Vercel AI SDK directly with OpenAI. In the following
|
|
|
63
63
|
|
|
64
64
|
### Install Mastra Packages
|
|
65
65
|
|
|
66
|
-
Add the
|
|
66
|
+
Add the `@mastra/core` package and its peer dependency `zod` (which you can use later inside tools for example). Also add `@mastra/ai-sdk` to convert Mastra's stream to an AI SDK-compatible format:
|
|
67
67
|
|
|
68
68
|
```bash npm2yarn
|
|
69
|
-
npm install @mastra/core@latest @mastra/
|
|
69
|
+
npm install @mastra/core@latest @mastra/ai-sdk@latest zod@latest
|
|
70
70
|
```
|
|
71
71
|
|
|
72
72
|
</Step>
|
|
@@ -123,7 +123,6 @@ These files will be used in the next steps to define your Mastra agent and confi
|
|
|
123
123
|
Now, let's define the behavior of our AI agent. Open the `mastra/agents/chefAgent.ts` file and add the following code:
|
|
124
124
|
|
|
125
125
|
```typescript title="mastra/agents/chefAgent.ts"
|
|
126
|
-
import { openai } from "@ai-sdk/openai";
|
|
127
126
|
import { Agent } from "@mastra/core/agent";
|
|
128
127
|
|
|
129
128
|
export const chefAgent = new Agent({
|
|
@@ -131,14 +130,14 @@ export const chefAgent = new Agent({
|
|
|
131
130
|
instructions:
|
|
132
131
|
"You are Michel, a practical and experienced home chef. " +
|
|
133
132
|
"You help people cook with whatever ingredients they have available.",
|
|
134
|
-
model: openai
|
|
133
|
+
model: "openai/gpt-4o-mini",
|
|
135
134
|
});
|
|
136
135
|
```
|
|
137
136
|
|
|
138
137
|
This code creates a new Mastra `Agent` named `chef-agent`.
|
|
139
138
|
|
|
140
139
|
- `instructions`: Defines the agent's persona and primary goal.
|
|
141
|
-
- `model`: Specifies the language model the agent will use (in this case, OpenAI's GPT-4o Mini via
|
|
140
|
+
- `model`: Specifies the language model the agent will use (in this case, OpenAI's GPT-4o Mini via Mastra's model router).
|
|
142
141
|
|
|
143
142
|
Make sure you have set up your OpenAI API key as described in the [Getting Started guide](/docs/getting-started).
|
|
144
143
|
|
|
@@ -151,7 +150,6 @@ Next, register the agent with your Mastra instance. Open the `mastra/index.ts` f
|
|
|
151
150
|
|
|
152
151
|
```typescript title="mastra/index.ts"
|
|
153
152
|
import { Mastra } from "@mastra/core";
|
|
154
|
-
|
|
155
153
|
import { chefAgent } from "./agents/chefAgent";
|
|
156
154
|
|
|
157
155
|
export const mastra = new Mastra({
|
|
@@ -169,6 +167,8 @@ This code initializes Mastra and makes the `chefAgent` available for use in your
|
|
|
169
167
|
Now, update your API route (`app/api/chat/route.ts`) to use the Mastra agent you just configured. Replace the existing content with the following:
|
|
170
168
|
|
|
171
169
|
```typescript title="app/api/chat/route.ts"
|
|
170
|
+
import { createUIMessageStreamResponse } from "ai";
|
|
171
|
+
import { toAISdkFormat } from "@mastra/ai-sdk";
|
|
172
172
|
import { mastra } from "@/mastra"; // Adjust the import path if necessary
|
|
173
173
|
|
|
174
174
|
// Allow streaming responses up to 30 seconds
|
|
@@ -182,10 +182,12 @@ export async function POST(req: Request) {
|
|
|
182
182
|
const agent = mastra.getAgent("chefAgent");
|
|
183
183
|
|
|
184
184
|
// Stream the response using the agent
|
|
185
|
-
const
|
|
185
|
+
const stream = await agent.stream(messages);
|
|
186
186
|
|
|
187
|
-
//
|
|
188
|
-
return
|
|
187
|
+
// Create a Response that streams the UI message stream to the client
|
|
188
|
+
return createUIMessageStreamResponse({
|
|
189
|
+
stream: toAISdkFormat(stream, { from: "agent" }),
|
|
190
|
+
});
|
|
189
191
|
}
|
|
190
192
|
```
|
|
191
193
|
|
|
@@ -194,7 +196,7 @@ Key changes:
|
|
|
194
196
|
- We import the `mastra` instance created in `mastra/index.ts`. Make sure the import path (`@/mastra`) is correct for your project setup (you might need `~/mastra`, `../../../mastra`, etc., depending on your path aliases and project structure).
|
|
195
197
|
- We retrieve the `chefAgent` using `mastra.getAgent("chefAgent")`.
|
|
196
198
|
- Instead of calling the AI SDK's `streamText` directly, we call `agent.stream(messages)` to process the chat messages using the agent's configuration and model.
|
|
197
|
-
- The result is still returned in a format compatible with assistant-ui using `
|
|
199
|
+
- The result is still returned in a format compatible with assistant-ui using `createUIMessageStreamResponse()` and `toAISdkFormat()`.
|
|
198
200
|
|
|
199
201
|
Your API route is now powered by Mastra!
|
|
200
202
|
|
|
@@ -29,6 +29,12 @@ Once the setup is complete, navigate into your new Mastra project directory (the
|
|
|
29
29
|
cd your-mastra-server-directory # Replace with the actual directory name
|
|
30
30
|
```
|
|
31
31
|
|
|
32
|
+
In the next steps you'll need to use the `@mastra/ai-sdk` package. Add it to your Mastra project:
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
npm install @mastra/ai-sdk@latest
|
|
36
|
+
```
|
|
37
|
+
|
|
32
38
|
You now have a basic Mastra server project ready.
|
|
33
39
|
|
|
34
40
|
<Callout title="API Keys">
|
|
@@ -47,10 +53,9 @@ You now have a basic Mastra server project ready.
|
|
|
47
53
|
|
|
48
54
|
Next, let's define an agent within your Mastra server project. We'll create a `chefAgent` similar to the one used in the full-stack guide.
|
|
49
55
|
|
|
50
|
-
Open or create the agent file (e.g., `src/agents/chefAgent.ts` within your Mastra project) and add the following code:
|
|
56
|
+
Open or create the agent file (e.g., `src/mastra/agents/chefAgent.ts` within your Mastra project) and add the following code:
|
|
51
57
|
|
|
52
|
-
```typescript title="src/agents/chefAgent.ts"
|
|
53
|
-
import { openai } from "@ai-sdk/openai";
|
|
58
|
+
```typescript title="src/mastra/agents/chefAgent.ts"
|
|
54
59
|
import { Agent } from "@mastra/core/agent";
|
|
55
60
|
|
|
56
61
|
export const chefAgent = new Agent({
|
|
@@ -58,7 +63,7 @@ export const chefAgent = new Agent({
|
|
|
58
63
|
instructions:
|
|
59
64
|
"You are Michel, a practical and experienced home chef. " +
|
|
60
65
|
"You help people cook with whatever ingredients they have available.",
|
|
61
|
-
model: openai
|
|
66
|
+
model: "openai/gpt-4o-mini",
|
|
62
67
|
});
|
|
63
68
|
```
|
|
64
69
|
|
|
@@ -70,11 +75,11 @@ This defines the agent's behavior, but it's not yet active in the Mastra server.
|
|
|
70
75
|
|
|
71
76
|
### Register the Agent
|
|
72
77
|
|
|
73
|
-
Now, you need to register the `chefAgent` with your Mastra instance so the server knows about it. Open your main Mastra configuration file (this is often `src/index.ts` in projects created with `create-mastra`).
|
|
78
|
+
Now, you need to register the `chefAgent` with your Mastra instance so the server knows about it. Open your main Mastra configuration file (this is often `src/mastra/index.ts` in projects created with `create-mastra`).
|
|
74
79
|
|
|
75
80
|
Import the `chefAgent` and add it to the `agents` object when initializing Mastra:
|
|
76
81
|
|
|
77
|
-
```typescript title="src/index.ts"
|
|
82
|
+
```typescript title="src/mastra/index.ts"
|
|
78
83
|
import { Mastra } from "@mastra/core";
|
|
79
84
|
import { chefAgent } from "./agents/chefAgent"; // Adjust path if necessary
|
|
80
85
|
|
|
@@ -83,7 +88,34 @@ export const mastra = new Mastra({
|
|
|
83
88
|
});
|
|
84
89
|
```
|
|
85
90
|
|
|
86
|
-
Make sure you adapt this code to fit the existing structure of your `src/index.ts` file generated by `create-mastra`. The key is to import your agent and include it in the `agents` configuration object.
|
|
91
|
+
Make sure you adapt this code to fit the existing structure of your `src/mastra/index.ts` file generated by `create-mastra`. The key is to import your agent and include it in the `agents` configuration object.
|
|
92
|
+
|
|
93
|
+
</Step>
|
|
94
|
+
|
|
95
|
+
<Step>
|
|
96
|
+
|
|
97
|
+
### Register the Chat Route
|
|
98
|
+
|
|
99
|
+
Still inside `src/mastra/index.ts`, register a chat route for the `chefAgent` now. You can do this by using `chatRoute()` from `@mastra/ai-sdk`. You need to place this inside `server.apiRoutes` of your Mastra configuration:
|
|
100
|
+
|
|
101
|
+
```typescript title="src/mastra/index.ts" {3,7-13}
|
|
102
|
+
import { Mastra } from "@mastra/core";
|
|
103
|
+
import { chefAgent } from "./agents/chefAgent";
|
|
104
|
+
import { chatRoute } from "@mastra/ai-sdk";
|
|
105
|
+
|
|
106
|
+
export const mastra = new Mastra({
|
|
107
|
+
agents: { chefAgent },
|
|
108
|
+
server: {
|
|
109
|
+
apiRoutes: [
|
|
110
|
+
chatRoute({
|
|
111
|
+
path: "/chat/:agentId",
|
|
112
|
+
}),
|
|
113
|
+
],
|
|
114
|
+
},
|
|
115
|
+
});
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
Make sure you adapt this code to fit the existing structure of your `src/mastra/index.ts` file generated by `create-mastra`. This will make all agents available in AI SDK-compatible formats, including the `chefAgent` at the endpoint `/chat/chefAgent`.
|
|
87
119
|
|
|
88
120
|
</Step>
|
|
89
121
|
|
|
@@ -97,7 +129,7 @@ With the agent defined and registered, start the Mastra development server:
|
|
|
97
129
|
npm run dev
|
|
98
130
|
```
|
|
99
131
|
|
|
100
|
-
By default, the Mastra server will run on `http://localhost:4111`.
|
|
132
|
+
By default, the Mastra server will run on `http://localhost:4111`. Keep this server running for the next steps where we'll set up the assistant-ui frontend to connect to it.
|
|
101
133
|
|
|
102
134
|
</Step>
|
|
103
135
|
|
|
@@ -135,38 +167,25 @@ In the next step, we will configure this frontend to communicate with the separa
|
|
|
135
167
|
|
|
136
168
|
The default assistant-ui setup configures the chat runtime to use a local API route (`/api/chat`) within the Next.js project. Since our Mastra agent is running on a separate server, we need to update the frontend to point to that server's endpoint.
|
|
137
169
|
|
|
138
|
-
Open the
|
|
170
|
+
Open the file in your assistant-ui frontend project that contains the `useChatRuntime` hook (usually `app/assistant.tsx` or `src/app/assistant.tsx`). Find the `useChatRuntime` hook and change the `api` property to the full URL of your Mastra agent's stream endpoint:
|
|
139
171
|
|
|
140
|
-
```tsx {
|
|
172
|
+
```tsx {8} title="app/assistant.tsx"
|
|
141
173
|
"use client";
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
} from "@assistant-ui/react-ai-sdk";
|
|
147
|
-
import { AssistantRuntimeProvider } from "@assistant-ui/react";
|
|
148
|
-
import { ThreadList } from "@/components/assistant-ui/thread-list";
|
|
149
|
-
|
|
150
|
-
export default function Home() {
|
|
151
|
-
// Point the runtime to the Mastra server endpoint
|
|
174
|
+
|
|
175
|
+
// Rest of the imports...
|
|
176
|
+
|
|
177
|
+
export const Assistant = () => {
|
|
152
178
|
const runtime = useChatRuntime({
|
|
153
179
|
transport: new AssistantChatTransport({
|
|
154
|
-
api: "
|
|
180
|
+
api: "http://localhost:4111/chat/chefAgent",
|
|
155
181
|
}),
|
|
156
182
|
});
|
|
157
183
|
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
<main className="grid h-dvh grid-cols-[200px_1fr] gap-x-2 px-4 py-4">
|
|
161
|
-
<ThreadList />
|
|
162
|
-
<Thread />
|
|
163
|
-
</main>
|
|
164
|
-
</AssistantRuntimeProvider>
|
|
165
|
-
);
|
|
166
|
-
}
|
|
184
|
+
// Rest of the component...
|
|
185
|
+
};
|
|
167
186
|
```
|
|
168
187
|
|
|
169
|
-
Replace `"http://localhost:4111/
|
|
188
|
+
Replace `"http://localhost:4111/chat/chefAgent"` with the actual URL if your Mastra server runs on a different port or host, or if your agent has a different name.
|
|
170
189
|
|
|
171
190
|
Now, the assistant-ui frontend will send chat requests directly to your running Mastra server.
|
|
172
191
|
|
|
@@ -515,12 +515,12 @@ Adjust group appearance based on content:
|
|
|
515
515
|
|
|
516
516
|
```tsx
|
|
517
517
|
import { FC, PropsWithChildren } from "react";
|
|
518
|
-
import {
|
|
518
|
+
import { useAssistantState } from "@assistant-ui/react";
|
|
519
519
|
|
|
520
520
|
const DynamicGroup: FC<
|
|
521
521
|
PropsWithChildren<{ groupKey: string | undefined; indices: number[] }>
|
|
522
522
|
> = ({ groupKey, indices, children }) => {
|
|
523
|
-
const parts =
|
|
523
|
+
const parts = useAssistantState(({ message }) => message.content);
|
|
524
524
|
const groupParts = indices.map((i) => parts[i]);
|
|
525
525
|
|
|
526
526
|
// Analyze group content
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Reasoning
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
import { Steps, Step } from "fumadocs-ui/components/steps";
|
|
6
|
+
import { Tab, Tabs } from "fumadocs-ui/components/tabs";
|
|
7
|
+
|
|
8
|
+
## Overview
|
|
9
|
+
|
|
10
|
+
The Reasoning component displays AI reasoning or thinking messages in a collapsible UI. Consecutive reasoning message parts are automatically grouped together with smooth animations and a shimmer effect while streaming.
|
|
11
|
+
|
|
12
|
+
## Getting Started
|
|
13
|
+
|
|
14
|
+
<Steps>
|
|
15
|
+
<Step>
|
|
16
|
+
|
|
17
|
+
### Add `reasoning`
|
|
18
|
+
|
|
19
|
+
<Tabs items={["assistant-ui", "shadcn (namespace)", "shadcn"]}>
|
|
20
|
+
<Tab>
|
|
21
|
+
|
|
22
|
+
```sh
|
|
23
|
+
npx assistant-ui@latest add reasoning
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
</Tab>
|
|
27
|
+
<Tab>
|
|
28
|
+
|
|
29
|
+
```sh
|
|
30
|
+
npx shadcn@latest add @assistant-ui/reasoning
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
</Tab>
|
|
34
|
+
<Tab>
|
|
35
|
+
|
|
36
|
+
```sh
|
|
37
|
+
npx shadcn@latest add "https://r.assistant-ui.com/reasoning"
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
</Tab>
|
|
41
|
+
</Tabs>
|
|
42
|
+
|
|
43
|
+
This adds a `/components/assistant-ui/reasoning.tsx` file to your project, which you can adjust as needed.
|
|
44
|
+
|
|
45
|
+
</Step>
|
|
46
|
+
<Step>
|
|
47
|
+
|
|
48
|
+
### Use in your application
|
|
49
|
+
|
|
50
|
+
Pass the `Reasoning` and `ReasoningGroup` components to the `MessagePrimitive.Parts` component:
|
|
51
|
+
|
|
52
|
+
```tsx title="/app/components/assistant-ui/thread.tsx" {2,10-11}
|
|
53
|
+
import { MessagePrimitive } from "@assistant-ui/react";
|
|
54
|
+
import { Reasoning, ReasoningGroup } from "@/components/assistant-ui/reasoning";
|
|
55
|
+
|
|
56
|
+
const AssistantMessage: FC = () => {
|
|
57
|
+
return (
|
|
58
|
+
<MessagePrimitive.Root className="...">
|
|
59
|
+
<div className="...">
|
|
60
|
+
<MessagePrimitive.Parts
|
|
61
|
+
components={{
|
|
62
|
+
Reasoning: Reasoning,
|
|
63
|
+
ReasoningGroup: ReasoningGroup
|
|
64
|
+
}}
|
|
65
|
+
/>
|
|
66
|
+
</div>
|
|
67
|
+
<AssistantActionBar />
|
|
68
|
+
|
|
69
|
+
<BranchPicker className="..." />
|
|
70
|
+
</MessagePrimitive.Root>
|
|
71
|
+
);
|
|
72
|
+
};
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
</Step>
|
|
76
|
+
</Steps>
|
|
77
|
+
|
|
78
|
+
## How It Works
|
|
79
|
+
|
|
80
|
+
The component consists of two parts:
|
|
81
|
+
|
|
82
|
+
1. **Reasoning**: Renders individual reasoning message part content
|
|
83
|
+
2. **ReasoningGroup**: Wraps consecutive reasoning parts in a collapsible container
|
|
84
|
+
|
|
85
|
+
Consecutive reasoning parts are automatically grouped together by the `ReasoningGroup` component, similar to how `ToolGroup` handles tool calls.
|
|
86
|
+
|
|
87
|
+
### Reasoning
|
|
88
|
+
|
|
89
|
+
The Reasoning component doesn't accept additional props—it renders the reasoning text content with markdown support.
|
|
90
|
+
|
|
91
|
+
## Examples
|
|
92
|
+
|
|
93
|
+
### Basic Usage
|
|
94
|
+
|
|
95
|
+
```tsx title="/app/components/assistant-ui/thread.tsx"
|
|
96
|
+
<MessagePrimitive.Parts
|
|
97
|
+
components={{
|
|
98
|
+
Reasoning,
|
|
99
|
+
ReasoningGroup
|
|
100
|
+
}}
|
|
101
|
+
/>
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
### Custom Styling
|
|
105
|
+
|
|
106
|
+
Since the component is copied to your project, you can customize it directly by modifying the `reasoning.tsx` file. The internal components (`ReasoningRoot`, `ReasoningTrigger`, `ReasoningContent`, `ReasoningText`) accept `className` props for styling:
|
|
107
|
+
|
|
108
|
+
```tsx title="/components/assistant-ui/reasoning.tsx"
|
|
109
|
+
const ReasoningGroupImpl: ReasoningGroupComponent = ({
|
|
110
|
+
// ... existing code ...
|
|
111
|
+
return (
|
|
112
|
+
<ReasoningRoot className="rounded-lg border bg-muted/50 p-4">
|
|
113
|
+
<ReasoningTrigger
|
|
114
|
+
active={isReasoningStreaming}
|
|
115
|
+
className="font-semibold text-foreground"
|
|
116
|
+
/>
|
|
117
|
+
<ReasoningContent
|
|
118
|
+
aria-busy={isReasoningStreaming}
|
|
119
|
+
className="mt-2"
|
|
120
|
+
>
|
|
121
|
+
<ReasoningText className="text-base">{children}</ReasoningText>
|
|
122
|
+
</ReasoningContent>
|
|
123
|
+
</ReasoningRoot>
|
|
124
|
+
);
|
|
125
|
+
};
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
You can also customize the individual internal components:
|
|
129
|
+
|
|
130
|
+
```tsx title="/components/assistant-ui/reasoning.tsx"
|
|
131
|
+
const ReasoningRoot: FC<PropsWithChildren<{ className?: string }>> = ({
|
|
132
|
+
// ... existing code ...
|
|
133
|
+
return (
|
|
134
|
+
<Collapsible
|
|
135
|
+
// ...
|
|
136
|
+
className={cn("aui-reasoning-root mb-4 w-full rounded-lg border bg-muted/50 p-4", className)}
|
|
137
|
+
// ...
|
|
138
|
+
>
|
|
139
|
+
{children}
|
|
140
|
+
</Collapsible>
|
|
141
|
+
);
|
|
142
|
+
};
|
|
143
|
+
|
|
144
|
+
const ReasoningTrigger: FC<{ active: boolean; className?: string }> = ({
|
|
145
|
+
// ... existing code ...
|
|
146
|
+
<CollapsibleTrigger
|
|
147
|
+
className={cn(
|
|
148
|
+
"aui-reasoning-trigger group/trigger -mb-2 flex max-w-[75%] items-center gap-2 py-2 text-sm font-semibold text-foreground transition-colors hover:text-foreground",
|
|
149
|
+
className,
|
|
150
|
+
)}
|
|
151
|
+
>
|
|
152
|
+
{/* ... existing content ... */}
|
|
153
|
+
</CollapsibleTrigger>
|
|
154
|
+
);
|
|
155
|
+
```
|
|
156
|
+
|
|
157
|
+
## Technical Details
|
|
158
|
+
|
|
159
|
+
### Scroll Lock
|
|
160
|
+
|
|
161
|
+
The component uses the `useScrollLock` hook (exported from `@assistant-ui/react`) to prevent page jumps when collapsing the reasoning section. This maintains the scroll position during the collapse animation.
|
|
162
|
+
|
|
163
|
+
### Animation Timing
|
|
164
|
+
|
|
165
|
+
The component uses CSS custom properties for animation timing:
|
|
166
|
+
- `--animation-duration`: Controls expand/collapse animation (default: 200ms)
|
|
167
|
+
- `--shimmer-duration`: Controls the shimmer effect speed (default: 1000ms)
|
|
168
|
+
|
|
169
|
+
These can be customized by modifying the CSS variables in your component.
|
|
170
|
+
|
|
171
|
+
## Related Components
|
|
172
|
+
|
|
173
|
+
- [ToolGroup](/docs/ui/ToolGroup) - Similar grouping pattern for tool calls
|
|
174
|
+
- [PartGrouping](/docs/ui/PartGrouping) - Experimental API for grouping message parts
|
package/dist/chunk-M2RKUM66.js
CHANGED
|
@@ -2,9 +2,9 @@ import { fileURLToPath } from 'url';
|
|
|
2
2
|
import { dirname, join } from 'path';
|
|
3
3
|
|
|
4
4
|
// src/constants.ts
|
|
5
|
-
var __dirname = dirname(fileURLToPath(import.meta.url));
|
|
6
|
-
var ROOT_DIR = join(__dirname, "../../../");
|
|
7
|
-
var PACKAGE_DIR = join(__dirname, "../");
|
|
5
|
+
var __dirname$1 = dirname(fileURLToPath(import.meta.url));
|
|
6
|
+
var ROOT_DIR = join(__dirname$1, "../../../");
|
|
7
|
+
var PACKAGE_DIR = join(__dirname$1, "../");
|
|
8
8
|
var EXAMPLES_PATH = join(ROOT_DIR, "examples");
|
|
9
9
|
var DOCS_BASE = join(PACKAGE_DIR, ".docs");
|
|
10
10
|
var DOCS_PATH = join(DOCS_BASE, "raw/docs");
|
package/dist/chunk-NVNFQ5ZO.js
CHANGED
|
@@ -377,9 +377,9 @@ var examplesTools = {
|
|
|
377
377
|
}
|
|
378
378
|
}
|
|
379
379
|
};
|
|
380
|
-
var __dirname = dirname(fileURLToPath(import.meta.url));
|
|
380
|
+
var __dirname$1 = dirname(fileURLToPath(import.meta.url));
|
|
381
381
|
var packageJson = JSON.parse(
|
|
382
|
-
readFileSync(join(__dirname, "../package.json"), "utf-8")
|
|
382
|
+
readFileSync(join(__dirname$1, "../package.json"), "utf-8")
|
|
383
383
|
);
|
|
384
384
|
var server = new McpServer({
|
|
385
385
|
name: "assistant-ui-docs",
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@assistant-ui/mcp-docs-server",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.14",
|
|
4
4
|
"description": "MCP server for assistant-ui documentation and examples",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -8,18 +8,17 @@
|
|
|
8
8
|
"assistant-ui-mcp": "./dist/stdio.js"
|
|
9
9
|
},
|
|
10
10
|
"dependencies": {
|
|
11
|
-
"@modelcontextprotocol/sdk": "^1.
|
|
11
|
+
"@modelcontextprotocol/sdk": "^1.22.0",
|
|
12
12
|
"zod": "^4.1.12",
|
|
13
13
|
"gray-matter": "^4.0.3",
|
|
14
14
|
"cross-env": "^10.1.0"
|
|
15
15
|
},
|
|
16
16
|
"devDependencies": {
|
|
17
|
-
"@types/node": "^24.
|
|
18
|
-
"tsup": "^8.5.
|
|
17
|
+
"@types/node": "^24.10.1",
|
|
18
|
+
"tsup": "^8.5.1",
|
|
19
19
|
"tsx": "^4.20.6",
|
|
20
20
|
"typescript": "^5.9.3",
|
|
21
|
-
"vitest": "^4.0.
|
|
22
|
-
"eslint": "^9.38.0"
|
|
21
|
+
"vitest": "^4.0.10"
|
|
23
22
|
},
|
|
24
23
|
"files": [
|
|
25
24
|
"dist",
|