@assistant-ui/mcp-docs-server 0.1.19 → 0.1.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/.docs/organized/code-examples/with-ag-ui.md +172 -1633
  2. package/.docs/organized/code-examples/with-ai-sdk-v6.md +42 -1640
  3. package/.docs/organized/code-examples/with-assistant-transport.md +40 -1743
  4. package/.docs/organized/code-examples/with-cloud.md +71 -1745
  5. package/.docs/organized/code-examples/with-custom-thread-list.md +87 -1723
  6. package/.docs/organized/code-examples/with-elevenlabs-scribe.md +70 -1637
  7. package/.docs/organized/code-examples/with-external-store.md +67 -1624
  8. package/.docs/organized/code-examples/with-ffmpeg.md +71 -1629
  9. package/.docs/organized/code-examples/with-langgraph.md +95 -1893
  10. package/.docs/organized/code-examples/with-parent-id-grouping.md +57 -1654
  11. package/.docs/organized/code-examples/with-react-hook-form.md +220 -2163
  12. package/.docs/organized/code-examples/with-react-router.md +66 -1318
  13. package/.docs/organized/code-examples/with-store.md +31 -31
  14. package/.docs/organized/code-examples/with-tanstack.md +77 -861
  15. package/.docs/organized/code-examples/with-tap-runtime.md +812 -0
  16. package/.docs/raw/docs/(docs)/cli.mdx +66 -0
  17. package/.docs/raw/docs/(docs)/copilots/make-assistant-tool-ui.mdx +0 -1
  18. package/.docs/raw/docs/(docs)/copilots/make-assistant-tool.mdx +0 -1
  19. package/.docs/raw/docs/(docs)/copilots/model-context.mdx +4 -4
  20. package/.docs/raw/docs/(docs)/copilots/motivation.mdx +3 -3
  21. package/.docs/raw/docs/(docs)/devtools.mdx +0 -1
  22. package/.docs/raw/docs/(docs)/guides/attachments.mdx +2 -3
  23. package/.docs/raw/docs/(docs)/guides/context-api.mdx +117 -117
  24. package/.docs/raw/docs/(docs)/guides/suggestions.mdx +296 -0
  25. package/.docs/raw/docs/(docs)/guides/tools.mdx +336 -513
  26. package/.docs/raw/docs/(docs)/index.mdx +33 -410
  27. package/.docs/raw/docs/(docs)/installation.mdx +450 -0
  28. package/.docs/raw/docs/(docs)/llm.mdx +209 -0
  29. package/.docs/raw/docs/(reference)/api-reference/context-providers/assistant-runtime-provider.mdx +0 -1
  30. package/.docs/raw/docs/(reference)/api-reference/context-providers/text-message-part-provider.mdx +0 -1
  31. package/.docs/raw/docs/(reference)/api-reference/integrations/react-data-stream.mdx +48 -3
  32. package/.docs/raw/docs/(reference)/api-reference/integrations/react-hook-form.mdx +0 -1
  33. package/.docs/raw/docs/(reference)/api-reference/integrations/vercel-ai-sdk.mdx +0 -1
  34. package/.docs/raw/docs/(reference)/api-reference/overview.mdx +9 -3
  35. package/.docs/raw/docs/(reference)/api-reference/primitives/action-bar-more.mdx +20 -52
  36. package/.docs/raw/docs/(reference)/api-reference/primitives/action-bar.mdx +16 -39
  37. package/.docs/raw/docs/(reference)/api-reference/primitives/assistant-if.mdx +49 -50
  38. package/.docs/raw/docs/(reference)/api-reference/primitives/assistant-modal.mdx +3 -11
  39. package/.docs/raw/docs/(reference)/api-reference/primitives/attachment.mdx +0 -3
  40. package/.docs/raw/docs/(reference)/api-reference/primitives/branch-picker.mdx +0 -1
  41. package/.docs/raw/docs/(reference)/api-reference/primitives/composer.mdx +5 -16
  42. package/.docs/raw/docs/(reference)/api-reference/primitives/composition.mdx +0 -1
  43. package/.docs/raw/docs/(reference)/api-reference/primitives/error.mdx +0 -1
  44. package/.docs/raw/docs/(reference)/api-reference/primitives/message-part.mdx +1 -2
  45. package/.docs/raw/docs/(reference)/api-reference/primitives/message.mdx +0 -1
  46. package/.docs/raw/docs/(reference)/api-reference/primitives/suggestion.mdx +152 -0
  47. package/.docs/raw/docs/(reference)/api-reference/primitives/thread-list-item-more.mdx +0 -1
  48. package/.docs/raw/docs/(reference)/api-reference/primitives/thread-list-item.mdx +1 -2
  49. package/.docs/raw/docs/(reference)/api-reference/primitives/thread-list.mdx +1 -2
  50. package/.docs/raw/docs/(reference)/api-reference/primitives/thread.mdx +28 -40
  51. package/.docs/raw/docs/(reference)/api-reference/runtimes/assistant-runtime.mdx +0 -1
  52. package/.docs/raw/docs/(reference)/api-reference/runtimes/attachment-runtime.mdx +1 -2
  53. package/.docs/raw/docs/(reference)/api-reference/runtimes/composer-runtime.mdx +2 -3
  54. package/.docs/raw/docs/(reference)/api-reference/runtimes/message-part-runtime.mdx +1 -2
  55. package/.docs/raw/docs/(reference)/api-reference/runtimes/message-runtime.mdx +1 -2
  56. package/.docs/raw/docs/(reference)/api-reference/runtimes/thread-list-item-runtime.mdx +0 -1
  57. package/.docs/raw/docs/(reference)/api-reference/runtimes/thread-list-runtime.mdx +0 -1
  58. package/.docs/raw/docs/(reference)/api-reference/runtimes/thread-runtime.mdx +1 -2
  59. package/.docs/raw/docs/(reference)/legacy/styled/assistant-modal.mdx +0 -1
  60. package/.docs/raw/docs/(reference)/legacy/styled/decomposition.mdx +5 -5
  61. package/.docs/raw/docs/(reference)/legacy/styled/markdown.mdx +0 -1
  62. package/.docs/raw/docs/(reference)/legacy/styled/thread.mdx +0 -1
  63. package/.docs/raw/docs/(reference)/migrations/v0-12.mdx +207 -33
  64. package/.docs/raw/docs/(reference)/react-compatibility.mdx +0 -1
  65. package/.docs/raw/docs/cloud/persistence/ai-sdk.mdx +0 -1
  66. package/.docs/raw/docs/cloud/persistence/langgraph.mdx +0 -1
  67. package/.docs/raw/docs/runtimes/ai-sdk/v4-legacy.mdx +0 -1
  68. package/.docs/raw/docs/runtimes/ai-sdk/v5-legacy.mdx +118 -0
  69. package/.docs/raw/docs/runtimes/ai-sdk/v6.mdx +198 -0
  70. package/.docs/raw/docs/runtimes/assistant-transport.mdx +3 -3
  71. package/.docs/raw/docs/runtimes/custom/custom-thread-list.mdx +5 -6
  72. package/.docs/raw/docs/runtimes/custom/external-store.mdx +9 -11
  73. package/.docs/raw/docs/runtimes/custom/local.mdx +43 -36
  74. package/.docs/raw/docs/runtimes/data-stream.mdx +35 -3
  75. package/.docs/raw/docs/runtimes/langgraph/index.mdx +1 -2
  76. package/.docs/raw/docs/runtimes/langgraph/tutorial/part-3.mdx +0 -1
  77. package/.docs/raw/docs/runtimes/langserve.mdx +0 -1
  78. package/.docs/raw/docs/runtimes/mastra/full-stack-integration.mdx +0 -1
  79. package/.docs/raw/docs/runtimes/mastra/separate-server-integration.mdx +0 -1
  80. package/.docs/raw/docs/ui/accordion.mdx +259 -0
  81. package/.docs/raw/docs/ui/assistant-modal.mdx +1 -3
  82. package/.docs/raw/docs/ui/assistant-sidebar.mdx +1 -3
  83. package/.docs/raw/docs/ui/attachment.mdx +0 -2
  84. package/.docs/raw/docs/ui/badge.mdx +138 -0
  85. package/.docs/raw/docs/ui/diff-viewer.mdx +279 -0
  86. package/.docs/raw/docs/ui/file.mdx +152 -0
  87. package/.docs/raw/docs/ui/image.mdx +100 -0
  88. package/.docs/raw/docs/ui/markdown.mdx +0 -1
  89. package/.docs/raw/docs/ui/mermaid.mdx +0 -1
  90. package/.docs/raw/docs/ui/model-selector.mdx +224 -0
  91. package/.docs/raw/docs/ui/part-grouping.mdx +4 -5
  92. package/.docs/raw/docs/ui/reasoning.mdx +6 -5
  93. package/.docs/raw/docs/ui/scrollbar.mdx +26 -9
  94. package/.docs/raw/docs/ui/select.mdx +245 -0
  95. package/.docs/raw/docs/ui/sources.mdx +6 -5
  96. package/.docs/raw/docs/ui/streamdown.mdx +348 -0
  97. package/.docs/raw/docs/ui/syntax-highlighting.mdx +8 -63
  98. package/.docs/raw/docs/ui/tabs.mdx +259 -0
  99. package/.docs/raw/docs/ui/thread-list.mdx +98 -16
  100. package/.docs/raw/docs/ui/thread.mdx +57 -73
  101. package/.docs/raw/docs/ui/tool-fallback.mdx +0 -1
  102. package/.docs/raw/docs/ui/tool-group.mdx +1 -3
  103. package/README.md +3 -3
  104. package/package.json +4 -4
  105. package/src/tools/tests/examples.test.ts +1 -1
  106. package/.docs/raw/docs/(docs)/about-assistantui.mdx +0 -54
  107. package/.docs/raw/docs/(docs)/mcp-docs-server.mdx +0 -321
  108. package/.docs/raw/docs/runtimes/ai-sdk/use-chat.mdx +0 -219
@@ -0,0 +1,450 @@
1
+ ---
2
+ title: Installation
3
+ description: Get assistant-ui running in 5 minutes with npm and your first chat component.
4
+ ---
5
+
6
+
7
+ ## Quick Start
8
+
9
+ The fastest way to get started with assistant-ui.
10
+
11
+ ![animated gif showing the steps to create a new project](../../../../../.github/assets/assistant-ui-starter.gif)
12
+
13
+ <Steps>
14
+ <Step>
15
+
16
+ ### Initialize assistant-ui
17
+
18
+ **Create a new project:**
19
+
20
+ ```sh
21
+ npx assistant-ui@latest create
22
+ ```
23
+
24
+ Or choose a template:
25
+
26
+ ```sh
27
+ # Assistant Cloud - with persistence and thread management
28
+ npx assistant-ui@latest create -t cloud
29
+
30
+ # LangGraph
31
+ npx assistant-ui@latest create -t langgraph
32
+
33
+ # MCP support
34
+ npx assistant-ui@latest create -t mcp
35
+ ```
36
+
37
+ **Add to an existing project:**
38
+
39
+ ```sh
40
+ npx assistant-ui@latest init
41
+ ```
42
+
43
+ </Step>
44
+ <Step>
45
+
46
+ ### Add API key
47
+
48
+ Create a `.env` file with your API key:
49
+
50
+ ```
51
+ OPENAI_API_KEY="sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
52
+ ```
53
+
54
+ </Step>
55
+ <Step>
56
+
57
+ ### Start the app
58
+
59
+ ```sh
60
+ npm run dev
61
+ ```
62
+
63
+ </Step>
64
+ </Steps>
65
+
66
+ ## Manual Setup
67
+
68
+ If you prefer not to use the CLI, you can install components manually.
69
+
70
+ <Steps>
71
+ <Step>
72
+
73
+ ### Add assistant-ui
74
+
75
+ <InstallCommand shadcn={["thread", "thread-list"]} manualSetupInstructions />
76
+
77
+ </Step>
78
+ <Step>
79
+
80
+ ### Setup Backend Endpoint
81
+
82
+ Install provider SDK:
83
+
84
+ <Tabs groupId="provider" items={["OpenAI", "Anthropic", "Azure", "AWS", "Gemini", "GCP", "Groq", "Fireworks", "Cohere", "Ollama", "Chrome AI"]}>
85
+ <Tab>
86
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "@ai-sdk/openai"]} />
87
+ </Tab>
88
+ <Tab>
89
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "@ai-sdk/anthropic"]} />
90
+ </Tab>
91
+ <Tab>
92
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "@ai-sdk/azure"]} />
93
+ </Tab>
94
+ <Tab>
95
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "@ai-sdk/amazon-bedrock"]} />
96
+ </Tab>
97
+ <Tab>
98
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "@ai-sdk/google"]} />
99
+ </Tab>
100
+ <Tab>
101
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "@ai-sdk/google-vertex"]} />
102
+ </Tab>
103
+ <Tab>
104
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "@ai-sdk/openai"]} />
105
+ </Tab>
106
+ <Tab>
107
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "@ai-sdk/openai"]} />
108
+ </Tab>
109
+ <Tab>
110
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "@ai-sdk/cohere"]} />
111
+ </Tab>
112
+ <Tab>
113
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "ollama-ai-provider-v2"]} />
114
+ </Tab>
115
+ <Tab>
116
+ <InstallCommand npm={["ai", "@assistant-ui/react-ai-sdk", "chrome-ai"]} />
117
+ </Tab>
118
+ </Tabs>
119
+
120
+ Add an API endpoint:
121
+
122
+ <Tabs groupId="provider" items={["OpenAI", "Anthropic", "Azure", "AWS", "Gemini", "GCP", "Groq", "Fireworks", "Cohere", "Ollama", "Chrome AI"]}>
123
+ ```ts title="/app/api/chat/route.ts" tab="OpenAI"
124
+ import { openai } from "@ai-sdk/openai";
125
+ import { convertToModelMessages, streamText } from "ai";
126
+
127
+ export const maxDuration = 30;
128
+
129
+ export async function POST(req: Request) {
130
+ const { messages } = await req.json();
131
+ const result = streamText({
132
+ model: openai("gpt-4o-mini"),
133
+ messages: convertToModelMessages(messages),
134
+ });
135
+ return result.toUIMessageStreamResponse();
136
+ }
137
+ ```
138
+
139
+ ```ts title="/app/api/chat/route.ts" tab="Anthropic"
140
+ import { anthropic } from "@ai-sdk/anthropic";
141
+ import { convertToModelMessages, streamText } from "ai";
142
+
143
+ export const maxDuration = 30;
144
+
145
+ export async function POST(req: Request) {
146
+ const { messages } = await req.json();
147
+ const result = streamText({
148
+ model: anthropic("claude-3-5-sonnet-20240620"),
149
+ messages: convertToModelMessages(messages),
150
+ });
151
+ return result.toUIMessageStreamResponse();
152
+ }
153
+ ```
154
+
155
+ ```ts title="/app/api/chat/route.ts" tab="Azure"
156
+ import { azure } from "@ai-sdk/azure";
157
+ import { convertToModelMessages, streamText } from "ai";
158
+
159
+ export const maxDuration = 30;
160
+
161
+ export async function POST(req: Request) {
162
+ const { messages } = await req.json();
163
+ const result = streamText({
164
+ model: azure("your-deployment-name"),
165
+ messages: convertToModelMessages(messages),
166
+ });
167
+ return result.toUIMessageStreamResponse();
168
+ }
169
+ ```
170
+
171
+ ```ts title="/app/api/chat/route.ts" tab="AWS"
172
+ import { bedrock } from "@ai-sdk/amazon-bedrock";
173
+ import { convertToModelMessages, streamText } from "ai";
174
+
175
+ export const maxDuration = 30;
176
+
177
+ export async function POST(req: Request) {
178
+ const { messages } = await req.json();
179
+ const result = streamText({
180
+ model: bedrock("anthropic.claude-3-5-sonnet-20240620-v1:0"),
181
+ messages: convertToModelMessages(messages),
182
+ });
183
+ return result.toUIMessageStreamResponse();
184
+ }
185
+ ```
186
+
187
+ ```ts title="/app/api/chat/route.ts" tab="Gemini"
188
+ import { google } from "@ai-sdk/google";
189
+ import { convertToModelMessages, streamText } from "ai";
190
+
191
+ export const maxDuration = 30;
192
+
193
+ export async function POST(req: Request) {
194
+ const { messages } = await req.json();
195
+ const result = streamText({
196
+ model: google("gemini-2.0-flash"),
197
+ messages: convertToModelMessages(messages),
198
+ });
199
+ return result.toUIMessageStreamResponse();
200
+ }
201
+ ```
202
+
203
+ ```ts title="/app/api/chat/route.ts" tab="GCP"
204
+ import { vertex } from "@ai-sdk/google-vertex";
205
+ import { convertToModelMessages, streamText } from "ai";
206
+
207
+ export const maxDuration = 30;
208
+
209
+ export async function POST(req: Request) {
210
+ const { messages } = await req.json();
211
+ const result = streamText({
212
+ model: vertex("gemini-1.5-pro"),
213
+ messages: convertToModelMessages(messages),
214
+ });
215
+ return result.toUIMessageStreamResponse();
216
+ }
217
+ ```
218
+
219
+ ```ts title="/app/api/chat/route.ts" tab="Groq"
220
+ import { createOpenAI } from "@ai-sdk/openai";
221
+ import { convertToModelMessages, streamText } from "ai";
222
+
223
+ export const maxDuration = 30;
224
+
225
+ const groq = createOpenAI({
226
+ apiKey: process.env.GROQ_API_KEY ?? "",
227
+ baseURL: "https://api.groq.com/openai/v1",
228
+ });
229
+
230
+ export async function POST(req: Request) {
231
+ const { messages } = await req.json();
232
+ const result = streamText({
233
+ model: groq("llama3-70b-8192"),
234
+ messages: convertToModelMessages(messages),
235
+ });
236
+ return result.toUIMessageStreamResponse();
237
+ }
238
+ ```
239
+
240
+ ```ts title="/app/api/chat/route.ts" tab="Fireworks"
241
+ import { createOpenAI } from "@ai-sdk/openai";
242
+ import { convertToModelMessages, streamText } from "ai";
243
+
244
+ export const maxDuration = 30;
245
+
246
+ const fireworks = createOpenAI({
247
+ apiKey: process.env.FIREWORKS_API_KEY ?? "",
248
+ baseURL: "https://api.fireworks.ai/inference/v1",
249
+ });
250
+
251
+ export async function POST(req: Request) {
252
+ const { messages } = await req.json();
253
+ const result = streamText({
254
+ model: fireworks("accounts/fireworks/models/firefunction-v2"),
255
+ messages: convertToModelMessages(messages),
256
+ });
257
+ return result.toUIMessageStreamResponse();
258
+ }
259
+ ```
260
+
261
+ ```ts title="/app/api/chat/route.ts" tab="Cohere"
262
+ import { cohere } from "@ai-sdk/cohere";
263
+ import { convertToModelMessages, streamText } from "ai";
264
+
265
+ export const maxDuration = 30;
266
+
267
+ export async function POST(req: Request) {
268
+ const { messages } = await req.json();
269
+ const result = streamText({
270
+ model: cohere("command-r-plus"),
271
+ messages: convertToModelMessages(messages),
272
+ });
273
+ return result.toUIMessageStreamResponse();
274
+ }
275
+ ```
276
+
277
+ ```ts title="/app/api/chat/route.ts" tab="Ollama"
278
+ import { ollama } from "ollama-ai-provider-v2";
279
+ import { convertToModelMessages, streamText } from "ai";
280
+
281
+ export const maxDuration = 30;
282
+
283
+ export async function POST(req: Request) {
284
+ const { messages } = await req.json();
285
+ const result = streamText({
286
+ model: ollama("llama3"),
287
+ messages: convertToModelMessages(messages),
288
+ });
289
+ return result.toUIMessageStreamResponse();
290
+ }
291
+ ```
292
+
293
+ ```ts title="/app/api/chat/route.ts" tab="Chrome AI"
294
+ import { chromeai } from "chrome-ai";
295
+ import { convertToModelMessages, streamText } from "ai";
296
+
297
+ export const maxDuration = 30;
298
+
299
+ export async function POST(req: Request) {
300
+ const { messages } = await req.json();
301
+ const result = streamText({
302
+ model: chromeai(),
303
+ messages: convertToModelMessages(messages),
304
+ });
305
+ return result.toUIMessageStreamResponse();
306
+ }
307
+ ```
308
+
309
+ </Tabs>
310
+
311
+ Define environment variables:
312
+
313
+ <Tabs groupId="provider" items={["OpenAI", "Anthropic", "Azure", "AWS", "Gemini", "GCP", "Groq", "Fireworks", "Cohere", "Ollama", "Chrome AI"]}>
314
+
315
+ ```sh title="/.env.local" tab="OpenAI"
316
+ OPENAI_API_KEY="sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
317
+ ```
318
+
319
+ ```sh title="/.env.local" tab="Anthropic"
320
+ ANTHROPIC_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
321
+ ```
322
+
323
+ ```sh title="/.env.local" tab="Azure"
324
+ AZURE_RESOURCE_NAME="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
325
+ AZURE_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
326
+ ```
327
+
328
+ ```sh title="/.env.local" tab="AWS"
329
+ AWS_ACCESS_KEY_ID="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
330
+ AWS_SECRET_ACCESS_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
331
+ AWS_REGION="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
332
+ ```
333
+
334
+ ```sh title="/.env.local" tab="Gemini"
335
+ GOOGLE_GENERATIVE_AI_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
336
+ ```
337
+
338
+ ```sh title="/.env.local" tab="GCP"
339
+ GOOGLE_VERTEX_PROJECT="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
340
+ GOOGLE_VERTEX_LOCATION="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
341
+ GOOGLE_APPLICATION_CREDENTIALS="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
342
+ ```
343
+
344
+ ```sh title="/.env.local" tab="Groq"
345
+ GROQ_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
346
+ ```
347
+
348
+ ```sh title="/.env.local" tab="Fireworks"
349
+ FIREWORKS_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
350
+ ```
351
+
352
+ ```sh title="/.env.local" tab="Cohere"
353
+ COHERE_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
354
+ ```
355
+
356
+ ```sh tab="Ollama"
357
+ <none>
358
+ ```
359
+
360
+ ```sh tab="Chrome AI"
361
+ <none>
362
+ ```
363
+
364
+ </Tabs>
365
+
366
+ If you aren't using Next.js, you can also deploy this endpoint to Cloudflare Workers, or any other serverless platform.
367
+
368
+ </Step>
369
+
370
+ <Step>
371
+
372
+ ### Use it in your app
373
+
374
+ <Tabs items={["Thread", "AssistantModal"]}>
375
+
376
+ ```tsx title="/app/page.tsx" tab="Thread"
377
+ import { AssistantRuntimeProvider } from "@assistant-ui/react";
378
+ import { useChatRuntime, AssistantChatTransport } from "@assistant-ui/react-ai-sdk";
379
+ import { ThreadList } from "@/components/assistant-ui/thread-list";
380
+ import { Thread } from "@/components/assistant-ui/thread";
381
+
382
+ const MyApp = () => {
383
+ const runtime = useChatRuntime({
384
+ transport: new AssistantChatTransport({
385
+ api: "/api/chat",
386
+ }),
387
+ });
388
+
389
+ return (
390
+ <AssistantRuntimeProvider runtime={runtime}>
391
+ <div>
392
+ <ThreadList />
393
+ <Thread />
394
+ </div>
395
+ </AssistantRuntimeProvider>
396
+ );
397
+ };
398
+ ```
399
+
400
+ ```tsx title="/app/page.tsx" tab="AssistantModal"
401
+ // run `npx shadcn@latest add https://r.assistant-ui.com/assistant-modal.json`
402
+
403
+ import { AssistantRuntimeProvider } from "@assistant-ui/react";
404
+ import { useChatRuntime, AssistantChatTransport } from "@assistant-ui/react-ai-sdk";
405
+ import { AssistantModal } from "@/components/assistant-ui/assistant-modal";
406
+
407
+ const MyApp = () => {
408
+ const runtime = useChatRuntime({
409
+ transport: new AssistantChatTransport({
410
+ api: "/api/chat",
411
+ }),
412
+ });
413
+
414
+ return (
415
+ <AssistantRuntimeProvider runtime={runtime}>
416
+ <AssistantModal />
417
+ </AssistantRuntimeProvider>
418
+ );
419
+ };
420
+ ```
421
+
422
+ </Tabs>
423
+
424
+ </Step>
425
+ </Steps>
426
+
427
+ ## What's Next?
428
+
429
+ <Cards>
430
+ <Card
431
+ title="Pick a Runtime"
432
+ description="Choose the right runtime for your needs"
433
+ href="/docs/runtimes/pick-a-runtime"
434
+ />
435
+ <Card
436
+ title="Generative UI"
437
+ description="Create rich UI components for tool executions"
438
+ href="/docs/guides/tool-ui"
439
+ />
440
+ <Card
441
+ title="Add Persistence"
442
+ description="Save and restore chat conversations"
443
+ href="/docs/cloud/overview"
444
+ />
445
+ <Card
446
+ title="Examples"
447
+ description="Explore full implementations and demos"
448
+ href="https://github.com/assistant-ui/assistant-ui/tree/main/examples"
449
+ />
450
+ </Cards>
@@ -0,0 +1,209 @@
1
+ ---
2
+ title: "AI-Assisted Development"
3
+ description: Use AI tools to build with assistant-ui faster. AI-accessible documentation, Claude Code skills, and MCP integration.
4
+ ---
5
+
6
+ import { FileText } from "lucide-react";
7
+
8
+ Build faster with AI assistants that understand assistant-ui. This page covers all the ways to give your AI tools access to assistant-ui documentation and context.
9
+
10
+ ## AI Accessible Documentation
11
+
12
+ Our docs are designed to be easily accessible to AI assistants:
13
+
14
+ <Cards>
15
+ <Card icon={<FileText className="text-blue-300" />} title="/llms.txt" href="/llms.txt" external>
16
+ Structured index of all documentation pages. Point your AI here for a quick overview.
17
+ </Card>
18
+
19
+ <Card icon={<FileText className="text-green-300" />} title="/llms-full.txt" href="/llms-full.txt" external>
20
+ Complete documentation in a single file. Use this for full context.
21
+ </Card>
22
+
23
+ <Card icon={<FileText className="text-purple-300" />} title=".mdx suffix">
24
+ Add `.mdx` to any page's URL to get raw markdown content (e.g., `/docs/installation.mdx`).
25
+ </Card>
26
+ </Cards>
27
+
28
+ ### Context Files
29
+
30
+ Add assistant-ui context to your project's `CLAUDE.md` or `.cursorrules`:
31
+
32
+ ```md
33
+ ## assistant-ui
34
+
35
+ This project uses assistant-ui for chat interfaces.
36
+
37
+ Documentation: https://www.assistant-ui.com/llms-full.txt
38
+
39
+ Key patterns:
40
+ - Use AssistantRuntimeProvider at the app root
41
+ - Thread component for full chat interface
42
+ - AssistantModal for floating chat widget
43
+ - useChatRuntime hook with AI SDK transport
44
+ ```
45
+
46
+ ## Skills
47
+
48
+ Install assistant-ui skills for AI Tools:
49
+
50
+ ```sh
51
+ npx skills add assistant-ui/skills
52
+ ```
53
+
54
+ | Skill | Purpose |
55
+ |-------|---------|
56
+ | `/assistant-ui` | General architecture and overview guide |
57
+ | `/setup` | Project setup and configuration (AI SDK, LangGraph, custom backends) |
58
+ | `/primitives` | UI component primitives (Thread, Composer, Message, etc.) |
59
+ | `/runtime` | Runtime system and state management |
60
+ | `/tools` | Tool registration and tool UI |
61
+ | `/streaming` | Streaming protocol with assistant-stream |
62
+ | `/cloud` | Cloud persistence and authorization |
63
+ | `/thread-list` | Multi-thread management |
64
+ | `/update` | Update assistant-ui and AI SDK to latest versions |
65
+
66
+ Use by typing the command in Claude Code, e.g., `/assistant-ui` for the main guide or `/setup` when setting up a project.
67
+
68
+ ## MCP
69
+
70
+ `@assistant-ui/mcp-docs-server` provides direct access to assistant-ui documentation and examples in your IDE via the Model Context Protocol.
71
+
72
+ Once installed, your AI assistant will understand everything about assistant-ui - just ask naturally:
73
+
74
+ - "Add a chat interface with streaming support to my app"
75
+ - "How do I integrate assistant-ui with the Vercel AI SDK?"
76
+ - "My Thread component isn't updating, what could be wrong?"
77
+
78
+ ### Quick Install (CLI)
79
+
80
+ ```bash
81
+ npx assistant-ui mcp
82
+ ```
83
+
84
+ Or specify your IDE directly:
85
+
86
+ ```bash
87
+ npx assistant-ui mcp --cursor
88
+ npx assistant-ui mcp --windsurf
89
+ npx assistant-ui mcp --vscode
90
+ npx assistant-ui mcp --zed
91
+ npx assistant-ui mcp --claude-code
92
+ npx assistant-ui mcp --claude-desktop
93
+ ```
94
+
95
+ ### Manual Installation
96
+
97
+ <Tabs items={["Cursor", "Windsurf", "VSCode", "Zed", "Claude Code", "Claude Desktop"]}>
98
+ <Tab>
99
+ <a href="cursor://anysphere.cursor-deeplink/mcp/install?name=assistant-ui&config=eyJjb21tYW5kIjoibnB4IiwiYXJncyI6WyIteSIsIkBhc3Npc3RhbnQtdWkvbWNwLWRvY3Mtc2VydmVyIl19">
100
+ <img src="https://cursor.com/deeplink/mcp-install-dark.svg" alt="Install in Cursor" className="not-prose" />
101
+ </a>
102
+
103
+ Or add to `.cursor/mcp.json`:
104
+
105
+ ```json
106
+ {
107
+ "mcpServers": {
108
+ "assistant-ui": {
109
+ "command": "npx",
110
+ "args": ["-y", "@assistant-ui/mcp-docs-server"]
111
+ }
112
+ }
113
+ }
114
+ ```
115
+
116
+ After adding, open Cursor Settings → MCP → find "assistant-ui" and click enable.
117
+ </Tab>
118
+ <Tab>
119
+ Add to `~/.codeium/windsurf/mcp_config.json`:
120
+
121
+ ```json
122
+ {
123
+ "mcpServers": {
124
+ "assistant-ui": {
125
+ "command": "npx",
126
+ "args": ["-y", "@assistant-ui/mcp-docs-server"]
127
+ }
128
+ }
129
+ }
130
+ ```
131
+
132
+ After adding, fully quit and re-open Windsurf.
133
+ </Tab>
134
+ <Tab>
135
+ Add to `.vscode/mcp.json` in your project:
136
+
137
+ ```json
138
+ {
139
+ "servers": {
140
+ "assistant-ui": {
141
+ "command": "npx",
142
+ "args": ["-y", "@assistant-ui/mcp-docs-server"],
143
+ "type": "stdio"
144
+ }
145
+ }
146
+ }
147
+ ```
148
+
149
+ Enable MCP in Settings → search "MCP" → enable "Chat > MCP". Use GitHub Copilot Chat in Agent mode.
150
+ </Tab>
151
+ <Tab>
152
+ Add to your Zed settings file:
153
+ - macOS: `~/.zed/settings.json`
154
+ - Linux: `~/.config/zed/settings.json`
155
+ - Windows: `%APPDATA%\Zed\settings.json`
156
+
157
+ Or open via `Cmd/Ctrl + ,` → "Open JSON Settings"
158
+
159
+ ```json
160
+ {
161
+ "context_servers": {
162
+ "assistant-ui": {
163
+ "command": {
164
+ "path": "npx",
165
+ "args": ["-y", "@assistant-ui/mcp-docs-server"]
166
+ }
167
+ }
168
+ }
169
+ }
170
+ ```
171
+
172
+ The server starts automatically with the Assistant Panel.
173
+ </Tab>
174
+ <Tab>
175
+ ```bash
176
+ claude mcp add assistant-ui -- npx -y @assistant-ui/mcp-docs-server
177
+ ```
178
+
179
+ The server starts automatically once added.
180
+ </Tab>
181
+ <Tab>
182
+ Add to `~/Library/Application Support/Claude/claude_desktop_config.json` (macOS) or `%APPDATA%\Claude\claude_desktop_config.json` (Windows):
183
+
184
+ ```json
185
+ {
186
+ "mcpServers": {
187
+ "assistant-ui": {
188
+ "command": "npx",
189
+ "args": ["-y", "@assistant-ui/mcp-docs-server"]
190
+ }
191
+ }
192
+ }
193
+ ```
194
+
195
+ Restart Claude Desktop after updating the configuration.
196
+ </Tab>
197
+ </Tabs>
198
+
199
+ ### Available Tools
200
+
201
+ | Tool | Description |
202
+ |------|-------------|
203
+ | `assistantUIDocs` | Access documentation: getting started, component APIs, runtime docs, integration guides |
204
+ | `assistantUIExamples` | Browse code examples: AI SDK, LangGraph, OpenAI Assistants, tool UI patterns |
205
+
206
+ ### Troubleshooting
207
+
208
+ - **Server not starting**: Ensure `npx` is installed and working. Check configuration file syntax.
209
+ - **Tool calls failing**: Restart the MCP server and/or your IDE. Update to latest IDE version.
@@ -3,7 +3,6 @@ title: <AssistantRuntimeProvider />
3
3
  description: Root provider that connects your runtime to assistant-ui components.
4
4
  ---
5
5
 
6
- import { ParametersTable } from "@/components/docs/tables/ParametersTable";
7
6
  import { AssistantRuntimeProvider } from "@/generated/typeDocs";
8
7
 
9
8
  The `AssistantRuntimeProvider` provides data and APIs used by assistant-ui components.
@@ -3,7 +3,6 @@ title: <TextMessagePartProvider />
3
3
  description: Context provider for reusing text components outside of message content.
4
4
  ---
5
5
 
6
- import { ParametersTable } from "@/components/docs/tables/ParametersTable";
7
6
  import { AssistantRuntimeProvider } from "@/generated/typeDocs";
8
7
 
9
8
  The `TextMessagePartProvider` provides data and APIs for `TextMessagePart` components.