@assistant-ui/mcp-docs-server 0.1.18 → 0.1.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. package/.docs/organized/code-examples/with-ag-ui.md +375 -85
  2. package/.docs/organized/code-examples/{with-ai-sdk-v5.md → with-ai-sdk-v6.md} +386 -94
  3. package/.docs/organized/code-examples/with-assistant-transport.md +374 -84
  4. package/.docs/organized/code-examples/with-cloud.md +405 -100
  5. package/.docs/organized/code-examples/with-custom-thread-list.md +412 -105
  6. package/.docs/organized/code-examples/with-elevenlabs-scribe.md +2241 -0
  7. package/.docs/organized/code-examples/with-external-store.md +374 -83
  8. package/.docs/organized/code-examples/with-ffmpeg.md +377 -87
  9. package/.docs/organized/code-examples/with-langgraph.md +403 -98
  10. package/.docs/organized/code-examples/with-parent-id-grouping.md +374 -83
  11. package/.docs/organized/code-examples/with-react-hook-form.md +379 -89
  12. package/.docs/organized/code-examples/with-react-router.md +2167 -0
  13. package/.docs/organized/code-examples/with-store.md +5 -5
  14. package/.docs/organized/code-examples/with-tanstack.md +10 -10
  15. package/.docs/raw/blog/2025-01-31-changelog/index.mdx +0 -2
  16. package/.docs/raw/docs/{about-assistantui.mdx → (docs)/about-assistantui.mdx} +2 -1
  17. package/.docs/raw/docs/{architecture.mdx → (docs)/architecture.mdx} +3 -2
  18. package/.docs/raw/docs/{cli.mdx → (docs)/cli.mdx} +1 -19
  19. package/.docs/raw/docs/{copilots → (docs)/copilots}/make-assistant-readable.mdx +1 -0
  20. package/.docs/raw/docs/{copilots → (docs)/copilots}/make-assistant-tool-ui.mdx +2 -1
  21. package/.docs/raw/docs/{copilots → (docs)/copilots}/make-assistant-tool.mdx +2 -1
  22. package/.docs/raw/docs/{copilots → (docs)/copilots}/model-context.mdx +1 -0
  23. package/.docs/raw/docs/{copilots → (docs)/copilots}/motivation.mdx +1 -0
  24. package/.docs/raw/docs/{copilots → (docs)/copilots}/use-assistant-instructions.mdx +1 -0
  25. package/.docs/raw/docs/{devtools.mdx → (docs)/devtools.mdx} +4 -4
  26. package/.docs/raw/docs/{guides/Attachments.mdx → (docs)/guides/attachments.mdx} +4 -5
  27. package/.docs/raw/docs/{guides/Branching.mdx → (docs)/guides/branching.mdx} +2 -1
  28. package/.docs/raw/docs/{guides → (docs)/guides}/context-api.mdx +1 -0
  29. package/.docs/raw/docs/(docs)/guides/dictation.mdx +370 -0
  30. package/.docs/raw/docs/{guides/Editing.mdx → (docs)/guides/editing.mdx} +1 -0
  31. package/.docs/raw/docs/{guides/Latex.mdx → (docs)/guides/latex.mdx} +1 -2
  32. package/.docs/raw/docs/{guides/Speech.mdx → (docs)/guides/speech.mdx} +9 -10
  33. package/.docs/raw/docs/{guides/ToolUI.mdx → (docs)/guides/tool-ui.mdx} +15 -14
  34. package/.docs/raw/docs/{guides/Tools.mdx → (docs)/guides/tools.mdx} +10 -7
  35. package/.docs/raw/docs/{getting-started.mdx → (docs)/index.mdx} +17 -22
  36. package/.docs/raw/docs/{mcp-docs-server.mdx → (docs)/mcp-docs-server.mdx} +1 -2
  37. package/.docs/raw/docs/{api-reference/context-providers/AssistantRuntimeProvider.mdx → (reference)/api-reference/context-providers/assistant-runtime-provider.mdx} +2 -1
  38. package/.docs/raw/docs/{api-reference/context-providers/TextMessagePartProvider.mdx → (reference)/api-reference/context-providers/text-message-part-provider.mdx} +2 -1
  39. package/.docs/raw/docs/{api-reference → (reference)/api-reference}/integrations/react-data-stream.mdx +2 -1
  40. package/.docs/raw/docs/{api-reference → (reference)/api-reference}/integrations/react-hook-form.mdx +2 -1
  41. package/.docs/raw/docs/{api-reference → (reference)/api-reference}/integrations/vercel-ai-sdk.mdx +2 -2
  42. package/.docs/raw/docs/{api-reference → (reference)/api-reference}/overview.mdx +1 -1
  43. package/.docs/raw/docs/(reference)/api-reference/primitives/action-bar-more.mdx +327 -0
  44. package/.docs/raw/docs/{api-reference/primitives/ActionBar.mdx → (reference)/api-reference/primitives/action-bar.mdx} +3 -1
  45. package/.docs/raw/docs/{api-reference/primitives/AssistantIf.mdx → (reference)/api-reference/primitives/assistant-if.mdx} +2 -2
  46. package/.docs/raw/docs/{api-reference/primitives/AssistantModal.mdx → (reference)/api-reference/primitives/assistant-modal.mdx} +3 -1
  47. package/.docs/raw/docs/{api-reference/primitives/Attachment.mdx → (reference)/api-reference/primitives/attachment.mdx} +3 -2
  48. package/.docs/raw/docs/{api-reference/primitives/BranchPicker.mdx → (reference)/api-reference/primitives/branch-picker.mdx} +2 -1
  49. package/.docs/raw/docs/{api-reference/primitives/Composer.mdx → (reference)/api-reference/primitives/composer.mdx} +101 -2
  50. package/.docs/raw/docs/{api-reference → (reference)/api-reference}/primitives/composition.mdx +1 -0
  51. package/.docs/raw/docs/{api-reference/primitives/Error.mdx → (reference)/api-reference/primitives/error.mdx} +2 -1
  52. package/.docs/raw/docs/{api-reference/primitives/MessagePart.mdx → (reference)/api-reference/primitives/message-part.mdx} +2 -2
  53. package/.docs/raw/docs/{api-reference/primitives/Message.mdx → (reference)/api-reference/primitives/message.mdx} +2 -1
  54. package/.docs/raw/docs/(reference)/api-reference/primitives/thread-list-item-more.mdx +221 -0
  55. package/.docs/raw/docs/{api-reference/primitives/ThreadListItem.mdx → (reference)/api-reference/primitives/thread-list-item.mdx} +2 -1
  56. package/.docs/raw/docs/{api-reference/primitives/ThreadList.mdx → (reference)/api-reference/primitives/thread-list.mdx} +2 -1
  57. package/.docs/raw/docs/{api-reference/primitives/Thread.mdx → (reference)/api-reference/primitives/thread.mdx} +2 -1
  58. package/.docs/raw/docs/{api-reference/runtimes/AssistantRuntime.mdx → (reference)/api-reference/runtimes/assistant-runtime.mdx} +2 -1
  59. package/.docs/raw/docs/{api-reference/runtimes/AttachmentRuntime.mdx → (reference)/api-reference/runtimes/attachment-runtime.mdx} +3 -2
  60. package/.docs/raw/docs/{api-reference/runtimes/ComposerRuntime.mdx → (reference)/api-reference/runtimes/composer-runtime.mdx} +2 -1
  61. package/.docs/raw/docs/{api-reference/runtimes/MessagePartRuntime.mdx → (reference)/api-reference/runtimes/message-part-runtime.mdx} +3 -2
  62. package/.docs/raw/docs/{api-reference/runtimes/MessageRuntime.mdx → (reference)/api-reference/runtimes/message-runtime.mdx} +3 -2
  63. package/.docs/raw/docs/{api-reference/runtimes/ThreadListItemRuntime.mdx → (reference)/api-reference/runtimes/thread-list-item-runtime.mdx} +2 -1
  64. package/.docs/raw/docs/{api-reference/runtimes/ThreadListRuntime.mdx → (reference)/api-reference/runtimes/thread-list-runtime.mdx} +2 -1
  65. package/.docs/raw/docs/{api-reference/runtimes/ThreadRuntime.mdx → (reference)/api-reference/runtimes/thread-runtime.mdx} +3 -5
  66. package/.docs/raw/docs/{legacy/styled/AssistantModal.mdx → (reference)/legacy/styled/assistant-modal.mdx} +2 -3
  67. package/.docs/raw/docs/{legacy/styled/Decomposition.mdx → (reference)/legacy/styled/decomposition.mdx} +1 -0
  68. package/.docs/raw/docs/{legacy/styled/Markdown.mdx → (reference)/legacy/styled/markdown.mdx} +2 -4
  69. package/.docs/raw/docs/{legacy/styled/Scrollbar.mdx → (reference)/legacy/styled/scrollbar.mdx} +2 -1
  70. package/.docs/raw/docs/{legacy/styled/ThreadWidth.mdx → (reference)/legacy/styled/thread-width.mdx} +1 -0
  71. package/.docs/raw/docs/{legacy/styled/Thread.mdx → (reference)/legacy/styled/thread.mdx} +2 -3
  72. package/.docs/raw/docs/{migrations → (reference)/migrations}/deprecation-policy.mdx +1 -0
  73. package/.docs/raw/docs/{migrations → (reference)/migrations}/react-langgraph-v0-7.mdx +1 -2
  74. package/.docs/raw/docs/{migrations → (reference)/migrations}/v0-11.mdx +1 -0
  75. package/.docs/raw/docs/{migrations → (reference)/migrations}/v0-12.mdx +1 -0
  76. package/.docs/raw/docs/{react-compatibility.mdx → (reference)/react-compatibility.mdx} +2 -3
  77. package/.docs/raw/docs/cloud/authorization.mdx +1 -0
  78. package/.docs/raw/docs/cloud/overview.mdx +1 -0
  79. package/.docs/raw/docs/cloud/persistence/ai-sdk.mdx +2 -3
  80. package/.docs/raw/docs/cloud/persistence/langgraph.mdx +5 -7
  81. package/.docs/raw/docs/runtimes/ai-sdk/use-chat.mdx +9 -8
  82. package/.docs/raw/docs/runtimes/ai-sdk/v4-legacy.mdx +2 -3
  83. package/.docs/raw/docs/runtimes/assistant-transport.mdx +7 -6
  84. package/.docs/raw/docs/runtimes/custom/custom-thread-list.mdx +2 -3
  85. package/.docs/raw/docs/runtimes/custom/external-store.mdx +6 -8
  86. package/.docs/raw/docs/runtimes/custom/local.mdx +12 -8
  87. package/.docs/raw/docs/runtimes/data-stream.mdx +32 -4
  88. package/.docs/raw/docs/runtimes/helicone.mdx +1 -0
  89. package/.docs/raw/docs/runtimes/langgraph/index.mdx +3 -3
  90. package/.docs/raw/docs/runtimes/langgraph/tutorial/index.mdx +1 -0
  91. package/.docs/raw/docs/runtimes/langgraph/tutorial/introduction.mdx +1 -0
  92. package/.docs/raw/docs/runtimes/langgraph/tutorial/part-1.mdx +1 -0
  93. package/.docs/raw/docs/runtimes/langgraph/tutorial/part-2.mdx +1 -0
  94. package/.docs/raw/docs/runtimes/langgraph/tutorial/part-3.mdx +2 -1
  95. package/.docs/raw/docs/runtimes/langserve.mdx +2 -2
  96. package/.docs/raw/docs/runtimes/mastra/full-stack-integration.mdx +4 -5
  97. package/.docs/raw/docs/runtimes/mastra/overview.mdx +1 -0
  98. package/.docs/raw/docs/runtimes/mastra/separate-server-integration.mdx +3 -4
  99. package/.docs/raw/docs/runtimes/pick-a-runtime.mdx +2 -4
  100. package/.docs/raw/docs/ui/assistant-modal.mdx +163 -0
  101. package/.docs/raw/docs/ui/assistant-sidebar.mdx +90 -0
  102. package/.docs/raw/docs/ui/attachment.mdx +227 -0
  103. package/.docs/raw/docs/ui/{Markdown.mdx → markdown.mdx} +11 -6
  104. package/.docs/raw/docs/ui/{Mermaid.mdx → mermaid.mdx} +12 -5
  105. package/.docs/raw/docs/ui/{PartGrouping.mdx → part-grouping.mdx} +4 -6
  106. package/.docs/raw/docs/ui/reasoning.mdx +148 -0
  107. package/.docs/raw/docs/ui/{Scrollbar.mdx → scrollbar.mdx} +9 -1
  108. package/.docs/raw/docs/ui/sources.mdx +87 -0
  109. package/.docs/raw/docs/ui/{SyntaxHighlighting.mdx → syntax-highlighting.mdx} +9 -5
  110. package/.docs/raw/docs/ui/thread-list.mdx +275 -0
  111. package/.docs/raw/docs/ui/{Thread.mdx → thread.mdx} +5 -6
  112. package/.docs/raw/docs/ui/tool-fallback.mdx +112 -0
  113. package/.docs/raw/docs/ui/tool-group.mdx +214 -0
  114. package/dist/tools/docs.js +1 -1
  115. package/dist/tools/examples.js +1 -1
  116. package/dist/tools/examples.js.map +1 -1
  117. package/package.json +5 -5
  118. package/src/tools/docs.ts +1 -1
  119. package/src/tools/examples.ts +1 -1
  120. package/src/tools/tests/docs.test.ts +18 -16
  121. package/src/tools/tests/examples.test.ts +5 -5
  122. package/src/tools/tests/path-traversal.test.ts +3 -3
  123. package/src/utils/tests/security.test.ts +3 -3
  124. package/.docs/raw/docs/index.mdx +0 -7
  125. package/.docs/raw/docs/ui/AssistantModal.mdx +0 -45
  126. package/.docs/raw/docs/ui/AssistantSidebar.mdx +0 -41
  127. package/.docs/raw/docs/ui/Attachment.mdx +0 -84
  128. package/.docs/raw/docs/ui/Reasoning.mdx +0 -152
  129. package/.docs/raw/docs/ui/ThreadList.mdx +0 -90
  130. package/.docs/raw/docs/ui/ToolFallback.mdx +0 -63
  131. package/.docs/raw/docs/ui/ToolGroup.mdx +0 -96
  132. /package/.docs/raw/docs/{copilots → (docs)/copilots}/assistant-frame.mdx +0 -0
@@ -0,0 +1,370 @@
1
+ ---
2
+ title: Speech-to-Text (Dictation)
3
+ ---
4
+
5
+ import { DictationSample } from "@/components/docs/samples/dictation";
6
+
7
+ assistant-ui supports speech-to-text (dictation) via the `DictationAdapter` interface. This allows users to input messages using their voice.
8
+
9
+ <DictationSample />
10
+
11
+ ## DictationAdapter
12
+
13
+ Currently, the following dictation adapters are supported:
14
+
15
+ - `WebSpeechDictationAdapter`: Uses the browser's `Web Speech API` (SpeechRecognition)
16
+
17
+ The `WebSpeechDictationAdapter` is supported in Chrome, Edge, and Safari. Check [browser compatibility](https://developer.mozilla.org/en-US/docs/Web/API/SpeechRecognition#browser_compatibility) for details.
18
+
19
+ ## Configuration
20
+
21
+ ```tsx
22
+ import { WebSpeechDictationAdapter } from "@assistant-ui/react";
23
+
24
+ const runtime = useChatRuntime({
25
+ api: "/api/chat",
26
+ adapters: {
27
+ dictation: new WebSpeechDictationAdapter({
28
+ // Optional configuration
29
+ language: "en-US", // Language for recognition (default: browser language)
30
+ continuous: true, // Keep recording after user stops (default: true)
31
+ interimResults: true, // Return interim results (default: true)
32
+ }),
33
+ },
34
+ });
35
+ ```
36
+
37
+ ## UI
38
+
39
+ The dictation feature uses `ComposerPrimitive.Dictate` and `ComposerPrimitive.StopDictation` components.
40
+
41
+ ```tsx
42
+ import { ComposerPrimitive } from "@assistant-ui/react";
43
+ import { MicIcon, SquareIcon } from "lucide-react";
44
+
45
+ const ComposerWithDictation = () => (
46
+ <ComposerPrimitive.Root>
47
+ <ComposerPrimitive.Input />
48
+
49
+ {/* Show Dictate button when not dictating */}
50
+ <ComposerPrimitive.If dictation={false}>
51
+ <ComposerPrimitive.Dictate>
52
+ <MicIcon />
53
+ </ComposerPrimitive.Dictate>
54
+ </ComposerPrimitive.If>
55
+
56
+ {/* Show Stop button when dictating */}
57
+ <ComposerPrimitive.If dictation>
58
+ <ComposerPrimitive.StopDictation>
59
+ <SquareIcon className="animate-pulse" />
60
+ </ComposerPrimitive.StopDictation>
61
+ </ComposerPrimitive.If>
62
+
63
+ <ComposerPrimitive.Send />
64
+ </ComposerPrimitive.Root>
65
+ );
66
+ ```
67
+
68
+ ## Browser Compatibility Check
69
+
70
+ You can check if the browser supports dictation:
71
+
72
+ ```tsx
73
+ import { WebSpeechDictationAdapter } from "@assistant-ui/react";
74
+
75
+ if (WebSpeechDictationAdapter.isSupported()) {
76
+ // Dictation is available
77
+ }
78
+ ```
79
+
80
+ ## Disabling Input During Dictation
81
+
82
+ Some dictation services (like ElevenLabs Scribe) return cumulative transcripts that conflict with simultaneous typing. You can disable the text input during dictation:
83
+
84
+ ```tsx
85
+ import type { DictationAdapter } from "@assistant-ui/react";
86
+
87
+ class MyAdapter implements DictationAdapter {
88
+ // Set to true to disable typing while dictating
89
+ disableInputDuringDictation = true;
90
+
91
+ listen() { /* ... */ }
92
+ }
93
+ ```
94
+
95
+ <Callout type="info">
96
+ When a message is sent during an active dictation session, the session is automatically stopped.
97
+ </Callout>
98
+
99
+ ## Custom Adapters
100
+
101
+ You can create custom adapters to integrate with any dictation service by implementing the `DictationAdapter` interface.
102
+
103
+ ### DictationAdapter Interface
104
+
105
+ ```tsx
106
+ import type { DictationAdapter } from "@assistant-ui/react";
107
+
108
+ class MyCustomDictationAdapter implements DictationAdapter {
109
+ // Optional: disable text input while dictating (default: false)
110
+ disableInputDuringDictation?: boolean;
111
+
112
+ listen(): DictationAdapter.Session {
113
+ // Return a session object that manages the dictation
114
+ return {
115
+ status: { type: "starting" },
116
+
117
+ stop: async () => {
118
+ // Stop recognition and finalize results
119
+ },
120
+
121
+ cancel: () => {
122
+ // Cancel recognition without finalizing
123
+ },
124
+
125
+ onSpeechStart: (callback) => {
126
+ // Called when speech is detected
127
+ return () => {}; // Return unsubscribe function
128
+ },
129
+
130
+ onSpeechEnd: (callback) => {
131
+ // Called when recognition ends with final result
132
+ return () => {};
133
+ },
134
+
135
+ onSpeech: (callback) => {
136
+ // Called with transcription results
137
+ // callback({ transcript: "text", isFinal: true })
138
+ //
139
+ // isFinal: true → Append to composer input (default)
140
+ // isFinal: false → Show as preview only
141
+ return () => {};
142
+ },
143
+ };
144
+ }
145
+ }
146
+ ```
147
+
148
+ ### Interim vs Final Results
149
+
150
+ The `onSpeech` callback receives results with an optional `isFinal` flag:
151
+
152
+ ```tsx
153
+ onSpeech: (callback) => {
154
+ // callback({ transcript: "text", isFinal: true })
155
+ // - isFinal: true → Text is committed to the input
156
+ // - isFinal: false → Text is shown as preview in the input
157
+ return () => {};
158
+ },
159
+ ```
160
+
161
+ **Both interim and final results are displayed directly in the input field**, just like native dictation on iOS/Android. Interim results replace each other until a final result commits the text. This provides seamless real-time feedback while the user speaks.
162
+
163
+ ### Example: ElevenLabs Scribe v2 Realtime
164
+
165
+ [ElevenLabs Scribe](https://elevenlabs.io/docs/capabilities/speech-to-text) provides ultra-low latency (~150ms) real-time transcription via WebSocket.
166
+
167
+ #### Install Dependencies
168
+
169
+ ```bash
170
+ npm install @elevenlabs/client
171
+ ```
172
+
173
+ #### Backend API Route
174
+
175
+ Create an API route to generate single-use tokens:
176
+
177
+ ```ts title="app/api/scribe-token/route.ts"
178
+ export async function POST() {
179
+ const response = await fetch(
180
+ "https://api.elevenlabs.io/v1/single-use-token/realtime_scribe",
181
+ {
182
+ method: "POST",
183
+ headers: {
184
+ "xi-api-key": process.env.ELEVENLABS_API_KEY!,
185
+ },
186
+ }
187
+ );
188
+
189
+ const data = await response.json();
190
+ return Response.json({ token: data.token });
191
+ }
192
+ ```
193
+
194
+ #### Frontend Adapter
195
+
196
+ ```tsx title="lib/elevenlabs-scribe-adapter.ts"
197
+ import type { DictationAdapter } from "@assistant-ui/react";
198
+ import { Scribe, RealtimeEvents } from "@elevenlabs/client";
199
+
200
+ export class ElevenLabsScribeAdapter implements DictationAdapter {
201
+ private tokenEndpoint: string;
202
+ private languageCode: string;
203
+
204
+ // ElevenLabs returns cumulative transcripts, so we disable typing during dictation
205
+ public disableInputDuringDictation: boolean;
206
+
207
+ constructor(options: {
208
+ tokenEndpoint: string;
209
+ languageCode?: string;
210
+ disableInputDuringDictation?: boolean;
211
+ }) {
212
+ this.tokenEndpoint = options.tokenEndpoint;
213
+ this.languageCode = options.languageCode ?? "en";
214
+ this.disableInputDuringDictation = options.disableInputDuringDictation ?? true;
215
+ }
216
+
217
+ listen(): DictationAdapter.Session {
218
+ const callbacks = {
219
+ start: new Set<() => void>(),
220
+ end: new Set<(r: DictationAdapter.Result) => void>(),
221
+ speech: new Set<(r: DictationAdapter.Result) => void>(),
222
+ };
223
+
224
+ let connection: ReturnType<typeof Scribe.connect> | null = null;
225
+ let fullTranscript = "";
226
+
227
+ const session: DictationAdapter.Session = {
228
+ status: { type: "starting" },
229
+
230
+ stop: async () => {
231
+ if (connection) {
232
+ connection.commit();
233
+ await new Promise((r) => setTimeout(r, 500));
234
+ connection.close();
235
+ }
236
+ if (fullTranscript) {
237
+ for (const cb of callbacks.end) cb({ transcript: fullTranscript });
238
+ }
239
+ },
240
+
241
+ cancel: () => {
242
+ connection?.close();
243
+ },
244
+
245
+ onSpeechStart: (cb) => {
246
+ callbacks.start.add(cb);
247
+ return () => callbacks.start.delete(cb);
248
+ },
249
+
250
+ onSpeechEnd: (cb) => {
251
+ callbacks.end.add(cb);
252
+ return () => callbacks.end.delete(cb);
253
+ },
254
+
255
+ onSpeech: (cb) => {
256
+ callbacks.speech.add(cb);
257
+ return () => callbacks.speech.delete(cb);
258
+ },
259
+ };
260
+
261
+ this.connect(session, callbacks, {
262
+ setConnection: (c) => { connection = c; },
263
+ getFullTranscript: () => fullTranscript,
264
+ setFullTranscript: (t) => { fullTranscript = t; },
265
+ });
266
+
267
+ return session;
268
+ }
269
+
270
+ private async connect(
271
+ session: DictationAdapter.Session,
272
+ callbacks: {
273
+ start: Set<() => void>;
274
+ end: Set<(r: DictationAdapter.Result) => void>;
275
+ speech: Set<(r: DictationAdapter.Result) => void>;
276
+ },
277
+ refs: {
278
+ setConnection: (c: ReturnType<typeof Scribe.connect>) => void;
279
+ getFullTranscript: () => string;
280
+ setFullTranscript: (t: string) => void;
281
+ }
282
+ ) {
283
+ try {
284
+ // 1. Get token from backend
285
+ const tokenRes = await fetch(this.tokenEndpoint, { method: "POST" });
286
+ const { token } = await tokenRes.json();
287
+
288
+ // 2. Connect to Scribe with microphone
289
+ const connection = Scribe.connect({
290
+ token,
291
+ modelId: "scribe_v2_realtime",
292
+ languageCode: this.languageCode,
293
+ microphone: {
294
+ echoCancellation: true,
295
+ noiseSuppression: true,
296
+ },
297
+ });
298
+ refs.setConnection(connection);
299
+
300
+ // 3. Handle events
301
+ connection.on(RealtimeEvents.SESSION_STARTED, () => {
302
+ (session as { status: DictationAdapter.Status }).status = {
303
+ type: "running",
304
+ };
305
+ for (const cb of callbacks.start) cb();
306
+ });
307
+
308
+ // Partial transcripts → preview (isFinal: false)
309
+ connection.on(RealtimeEvents.PARTIAL_TRANSCRIPT, (data) => {
310
+ if (data.text) {
311
+ for (const cb of callbacks.speech)
312
+ cb({ transcript: data.text, isFinal: false });
313
+ }
314
+ });
315
+
316
+ // Committed transcripts → append to input (isFinal: true)
317
+ connection.on(RealtimeEvents.COMMITTED_TRANSCRIPT, (data) => {
318
+ if (data.text?.trim()) {
319
+ refs.setFullTranscript(refs.getFullTranscript() + data.text + " ");
320
+ for (const cb of callbacks.speech)
321
+ cb({ transcript: data.text, isFinal: true });
322
+ }
323
+ });
324
+
325
+ connection.on(RealtimeEvents.ERROR, (error) => {
326
+ console.error("Scribe error:", error);
327
+ (session as { status: DictationAdapter.Status }).status = {
328
+ type: "ended",
329
+ reason: "error",
330
+ };
331
+ });
332
+
333
+ } catch (error) {
334
+ console.error("ElevenLabs Scribe connection failed:", error);
335
+ (session as { status: DictationAdapter.Status }).status = {
336
+ type: "ended",
337
+ reason: "error",
338
+ };
339
+ }
340
+ }
341
+ }
342
+ ```
343
+
344
+ #### Usage
345
+
346
+ ```tsx
347
+ const runtime = useChatRuntime({
348
+ api: "/api/chat",
349
+ adapters: {
350
+ dictation: new ElevenLabsScribeAdapter({
351
+ tokenEndpoint: "/api/scribe-token",
352
+ languageCode: "en", // Optional: supports 90+ languages
353
+ disableInputDuringDictation: true, // Default: true (recommended for ElevenLabs)
354
+ }),
355
+ },
356
+ });
357
+ ```
358
+
359
+ #### Real-time Preview
360
+
361
+ The transcription is displayed directly in the input field as the user speaks — just like native dictation. No additional UI components are needed for basic use cases.
362
+
363
+ <Callout type="info">
364
+ For advanced customization, `composer.dictation?.transcript` contains the current interim transcript, and `ComposerPrimitive.DictationTranscript` can display it separately if desired.
365
+ </Callout>
366
+
367
+ <Callout type="info">
368
+ For more details, see the [ElevenLabs Scribe documentation](https://elevenlabs.io/docs/capabilities/speech-to-text).
369
+ </Callout>
370
+
@@ -1,5 +1,6 @@
1
1
  ---
2
2
  title: Message Editing
3
+ description: Allow users to edit their messages with custom editor interfaces.
3
4
  ---
4
5
 
5
6
  Give the user the ability to edit their message.
@@ -1,9 +1,8 @@
1
1
  ---
2
2
  title: LaTeX
3
+ description: Render mathematical expressions in chat messages using KaTeX.
3
4
  ---
4
5
 
5
- import { Steps, Step } from "fumadocs-ui/components/steps";
6
- import { Callout } from "fumadocs-ui/components/callout";
7
6
 
8
7
  Render LaTeX mathematical expressions in chat messages using KaTeX.
9
8
 
@@ -1,33 +1,32 @@
1
1
  ---
2
- title: Speech
2
+ title: Text-to-Speech (Speech Synthesis)
3
+ description: Read messages aloud with Web Speech API or custom TTS.
3
4
  ---
4
5
 
5
- import { SpeechSample } from "@/components/samples/speech-sample";
6
-
7
- ## Text-to-Speech
6
+ import { SpeechSample } from "@/components/docs/samples/speech";
8
7
 
9
8
  assistant-ui supports text-to-speech via the `SpeechSynthesisAdapter` interface.
10
9
 
11
10
  <SpeechSample />
12
11
 
13
- ### SpeechSynthesisAdapter
12
+ ## SpeechSynthesisAdapter
14
13
 
15
14
  Currently, the following speech synthesis adapters are supported:
16
15
 
17
- - `WebSpeechSynthesisAdapter`: Uses the browser's `Web Speech API` API
16
+ - `WebSpeechSynthesisAdapter`: Uses the browser's `Web Speech API`
18
17
 
19
18
  Support for other speech synthesis adapters is planned for the future.
20
19
 
21
- Passing a `SpeechSynthesisAdapter` to the `EdgeRuntime` will enable text-to-speech support.
20
+ Passing a `SpeechSynthesisAdapter` to the runtime will enable text-to-speech support.
22
21
 
23
- ### UI
22
+ ## UI
24
23
 
25
24
  By default, a `Read aloud` button will be shown in the assistant message action bar.
26
25
 
27
26
  This is implemented using `AssistantActionBar.SpeechControl` which is a wrapper around `AssistantActionBar.Speak` and `AssistantActionBar.StopSpeaking`.
28
27
  The underlying primitives are `ActionBarPrimitive.Speak` and `ActionBarPrimitive.StopSpeaking`.
29
28
 
30
- ### Example
29
+ ## Example
31
30
 
32
31
  The following example uses the `WebSpeechSynthesisAdapter`.
33
32
 
@@ -40,4 +39,4 @@ const runtime = useChatRuntime({
40
39
  speech: new WebSpeechSynthesisAdapter(),
41
40
  },
42
41
  });
43
- ```
42
+ ```
@@ -1,10 +1,9 @@
1
1
  ---
2
2
  title: Generative UI
3
+ description: Render tool calls as interactive UI instead of plain text.
3
4
  ---
4
5
 
5
- import { ToolUISample } from "@/components/samples/tool-ui-sample";
6
- import { Steps, Step } from "fumadocs-ui/components/steps";
7
- import { Callout } from "fumadocs-ui/components/callout";
6
+ import { ToolUISample } from "@/components/docs/samples/tool-ui";
8
7
 
9
8
  Create custom UI components for AI tool calls, providing visual feedback and interactive experiences when tools are executed.
10
9
 
@@ -81,7 +80,7 @@ const WeatherToolUI = makeAssistantToolUI<
81
80
  `frontendTools` utility
82
81
  </Callout>
83
82
 
84
- Learn more about creating tools in the [Tools Guide](/docs/guides/Tools).
83
+ Learn more about creating tools in the [Tools Guide](/docs/guides/tools).
85
84
 
86
85
  ### 2. UI-Only for Existing Tools (`makeAssistantToolUI`)
87
86
 
@@ -193,7 +192,7 @@ function App() {
193
192
  When using the Vercel AI SDK, define the corresponding tool in your API route:
194
193
 
195
194
  ```tsx title="/app/api/chat/route.ts"
196
- import { streamText, tool } from "ai";
195
+ import { streamText, tool, zodSchema } from "ai";
197
196
  import { z } from "zod";
198
197
 
199
198
  export async function POST(req: Request) {
@@ -205,10 +204,12 @@ export async function POST(req: Request) {
205
204
  tools: {
206
205
  getWeather: tool({
207
206
  description: "Get current weather for a location",
208
- inputSchema: z.object({
209
- location: z.string(),
210
- unit: z.enum(["celsius", "fahrenheit"]),
211
- }),
207
+ inputSchema: zodSchema(
208
+ z.object({
209
+ location: z.string(),
210
+ unit: z.enum(["celsius", "fahrenheit"]),
211
+ }),
212
+ ),
212
213
  execute: async ({ location, unit }) => {
213
214
  const weather = await fetchWeatherAPI(location);
214
215
  return {
@@ -670,7 +671,7 @@ const ConfirmationToolUI = makeAssistantToolUI<
670
671
  });
671
672
  ```
672
673
 
673
- Learn more about tool human input in the [Tools Guide](/docs/guides/Tools#tool-human-input).
674
+ Learn more about tool human input in the [Tools Guide](/docs/guides/tools#tool-human-input).
674
675
 
675
676
  ## Best Practices
676
677
 
@@ -740,7 +741,7 @@ useAssistantToolUI({
740
741
 
741
742
  ## Related Guides
742
743
 
743
- - [Tools Guide](/docs/guides/Tools) - Learn how to create and use tools with AI models
744
- - [Tool Fallback](/docs/ui/ToolFallback) - Default UI for tools without custom components
745
- - [API Reference](/docs/api-reference/primitives/MessagePart) - Detailed type definitions and component APIs
746
- - [Message Primitive](/docs/api-reference/primitives/Message) - Complete Message component documentation
744
+ - [Tools Guide](/docs/guides/tools) - Learn how to create and use tools with AI models
745
+ - [Tool Fallback](/docs/ui/tool-fallback) - Default UI for tools without custom components
746
+ - [API Reference](/docs/api-reference/primitives/message-part) - Detailed type definitions and component APIs
747
+ - [Message Primitive](/docs/api-reference/primitives/message) - Complete Message component documentation
@@ -1,5 +1,6 @@
1
1
  ---
2
2
  title: Tools
3
+ description: Give your assistant actions like API calls, database queries, and more.
3
4
  ---
4
5
 
5
6
  Tools enable LLMs to take actions and interact with external systems. assistant-ui provides a comprehensive toolkit for creating, managing, and visualizing tool interactions in real-time.
@@ -14,11 +15,11 @@ Tools in assistant-ui are functions that the LLM can call to perform specific ta
14
15
  - Controlling UI elements
15
16
  - Executing workflows
16
17
 
17
- When tools are executed, you can display custom generative UI components that provide rich, interactive visualizations of the tool's execution and results. Learn more in the [Generative UI guide](/docs/guides/ToolUI).
18
+ When tools are executed, you can display custom generative UI components that provide rich, interactive visualizations of the tool's execution and results. Learn more in the [Generative UI guide](/docs/guides/tool-ui).
18
19
 
19
20
  <Callout type="tip">
20
21
  If you haven't provided a custom UI for a tool, assistant-ui offers a
21
- [`ToolFallback`](/docs/ui/ToolFallback) component that you can add to your
22
+ [`ToolFallback`](/docs/ui/tool-fallback) component that you can add to your
22
23
  codebase to render a default UI for tool executions. You can customize this by
23
24
  creating your own Tool UI component for the tool's name.
24
25
  </Callout>
@@ -250,10 +251,12 @@ export async function POST(req: Request) {
250
251
  tools: {
251
252
  queryDatabase: {
252
253
  description: "Query the application database",
253
- inputSchema: z.object({
254
- query: z.string(),
255
- table: z.string(),
256
- }),
254
+ inputSchema: zodSchema(
255
+ z.object({
256
+ query: z.string(),
257
+ table: z.string(),
258
+ }),
259
+ ),
257
260
  execute: async ({ query, table }) => {
258
261
  // Server-side database access
259
262
  const results = await db.query(query, { table });
@@ -304,7 +307,7 @@ export async function POST(req: Request) {
304
307
  // Additional server-side tools
305
308
  queryDatabase: {
306
309
  description: "Query the application database",
307
- inputSchema: z.object({ query: z.string() }),
310
+ inputSchema: zodSchema(z.object({ query: z.string() })),
308
311
  execute: async ({ query }) => {
309
312
  return await db.query(query);
310
313
  },
@@ -1,16 +1,15 @@
1
1
  ---
2
2
  title: Getting Started
3
+ description: Get assistant-ui running in 5 minutes with npm and your first chat component.
3
4
  ---
4
5
 
5
- import { Step, Steps } from "fumadocs-ui/components/steps";
6
- import { Tab, Tabs } from "fumadocs-ui/components/tabs";
7
- import { Callout } from "fumadocs-ui/components/callout";
8
- import { Card, Cards } from "fumadocs-ui/components/card";
9
- import { InstallCommand } from "@/components/docs/install-command";
6
+ import { InstallCommand } from "@/components/docs/fumadocs/install/install-command";
10
7
 
11
- ## Start with a new project
8
+ ## Quick Start
12
9
 
13
- ![animated gif showing the steps to create a new project](../../../../.github/assets/assistant-ui-starter.gif)
10
+ The fastest way to get started with assistant-ui.
11
+
12
+ ![animated gif showing the steps to create a new project](../../../../../.github/assets/assistant-ui-starter.gif)
14
13
 
15
14
  <Steps>
16
15
  <Step>
@@ -20,11 +19,13 @@ import { InstallCommand } from "@/components/docs/install-command";
20
19
  **Create a new project:**
21
20
 
22
21
  ```sh
23
- # Create a new project with the default template
24
22
  npx assistant-ui@latest create
23
+ ```
24
+
25
+ Or choose a template:
25
26
 
26
- # Or choose one of the following templates:
27
- # Assistant Cloud for baked in persistence and thread management
27
+ ```sh
28
+ # Assistant Cloud - with persistence and thread management
28
29
  npx assistant-ui@latest create -t cloud
29
30
 
30
31
  # LangGraph
@@ -34,10 +35,9 @@ npx assistant-ui@latest create -t langgraph
34
35
  npx assistant-ui@latest create -t mcp
35
36
  ```
36
37
 
37
- **Add assistant-ui to an existing React project:**
38
+ **Add to an existing project:**
38
39
 
39
40
  ```sh
40
- # Add assistant-ui to an existing React project
41
41
  npx assistant-ui@latest init
42
42
  ```
43
43
 
@@ -46,13 +46,10 @@ npx assistant-ui@latest init
46
46
 
47
47
  ### Add API key
48
48
 
49
- Add a new `.env` file to your project with your OpenAI API key:
49
+ Create a `.env` file with your API key:
50
50
 
51
51
  ```
52
52
  OPENAI_API_KEY="sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
53
-
54
- # chat history -- sign up for free on https://cloud.assistant-ui.com
55
- # NEXT_PUBLIC_ASSISTANT_BASE_URL="https://..."
56
53
  ```
57
54
 
58
55
  </Step>
@@ -67,11 +64,9 @@ npm run dev
67
64
  </Step>
68
65
  </Steps>
69
66
 
70
- ## Manual installation
67
+ ## Add to Existing Project
71
68
 
72
- <Callout>
73
- We recommend `npx assistant-ui init` to setup existing projects.
74
- </Callout>
69
+ If you prefer not to use the CLI, you can install components manually.
75
70
 
76
71
  <Steps>
77
72
  <Step>
@@ -404,7 +399,7 @@ const MyApp = () => {
404
399
  ```
405
400
 
406
401
  ```tsx title="/app/page.tsx" tab="AssistantModal"
407
- // run `npx shadcn@latest add @assistant-ui/assistant-modal`
402
+ // run `npx shadcn@latest add https://r.assistant-ui.com/assistant-modal.json`
408
403
 
409
404
  import { AssistantRuntimeProvider } from "@assistant-ui/react";
410
405
  import { useChatRuntime, AssistantChatTransport } from "@assistant-ui/react-ai-sdk";
@@ -441,7 +436,7 @@ const MyApp = () => {
441
436
  <Card
442
437
  title="Generative UI"
443
438
  description="Create rich UI components for tool executions"
444
- href="/docs/guides/ToolUI"
439
+ href="/docs/guides/tool-ui"
445
440
  />
446
441
  <Card
447
442
  title="Add Persistence"
@@ -1,9 +1,8 @@
1
1
  ---
2
2
  title: "MCP Docs Server"
3
- description: "Learn how to use the assistant-ui MCP documentation server in your IDE to access documentation and examples directly."
3
+ description: Access assistant-ui docs and examples directly in your IDE via MCP.
4
4
  ---
5
5
 
6
- import { Tabs, Tab } from "fumadocs-ui/components/tabs";
7
6
 
8
7
  `@assistant-ui/mcp-docs-server` provides direct access to assistant-ui's documentation and examples in Cursor, Windsurf, VSCode, Zed, Claude Code, or any other IDE or tool that supports MCP.
9
8
 
@@ -1,8 +1,9 @@
1
1
  ---
2
2
  title: <AssistantRuntimeProvider />
3
+ description: Root provider that connects your runtime to assistant-ui components.
3
4
  ---
4
5
 
5
- import { ParametersTable } from "@/components/docs";
6
+ import { ParametersTable } from "@/components/docs/tables/ParametersTable";
6
7
  import { AssistantRuntimeProvider } from "@/generated/typeDocs";
7
8
 
8
9
  The `AssistantRuntimeProvider` provides data and APIs used by assistant-ui components.
@@ -1,8 +1,9 @@
1
1
  ---
2
2
  title: <TextMessagePartProvider />
3
+ description: Context provider for reusing text components outside of message content.
3
4
  ---
4
5
 
5
- import { ParametersTable } from "@/components/docs";
6
+ import { ParametersTable } from "@/components/docs/tables/ParametersTable";
6
7
  import { AssistantRuntimeProvider } from "@/generated/typeDocs";
7
8
 
8
9
  The `TextMessagePartProvider` provides data and APIs for `TextMessagePart` components.