@assistant-ui/mcp-docs-server 0.1.26 → 0.1.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/.docs/organized/code-examples/waterfall.md +1 -1
  2. package/.docs/organized/code-examples/with-a2a.md +1 -1
  3. package/.docs/organized/code-examples/with-ag-ui.md +2 -2
  4. package/.docs/organized/code-examples/with-ai-sdk-v6.md +3 -3
  5. package/.docs/organized/code-examples/with-artifacts.md +3 -3
  6. package/.docs/organized/code-examples/with-assistant-transport.md +1 -1
  7. package/.docs/organized/code-examples/with-chain-of-thought.md +3 -3
  8. package/.docs/organized/code-examples/with-cloud-standalone.md +3 -3
  9. package/.docs/organized/code-examples/with-cloud.md +3 -3
  10. package/.docs/organized/code-examples/with-custom-thread-list.md +3 -3
  11. package/.docs/organized/code-examples/with-elevenlabs-conversational.md +511 -0
  12. package/.docs/organized/code-examples/with-elevenlabs-scribe.md +5 -5
  13. package/.docs/organized/code-examples/with-expo.md +17 -17
  14. package/.docs/organized/code-examples/with-external-store.md +1 -1
  15. package/.docs/organized/code-examples/with-ffmpeg.md +216 -62
  16. package/.docs/organized/code-examples/with-google-adk.md +2 -2
  17. package/.docs/organized/code-examples/with-heat-graph.md +1 -1
  18. package/.docs/organized/code-examples/with-interactables.md +66 -8
  19. package/.docs/organized/code-examples/with-langgraph.md +2 -2
  20. package/.docs/organized/code-examples/with-livekit.md +591 -0
  21. package/.docs/organized/code-examples/with-parent-id-grouping.md +2 -2
  22. package/.docs/organized/code-examples/with-react-hook-form.md +3 -3
  23. package/.docs/organized/code-examples/with-react-ink.md +1 -1
  24. package/.docs/organized/code-examples/with-react-router.md +6 -6
  25. package/.docs/organized/code-examples/with-store.md +7 -2
  26. package/.docs/organized/code-examples/with-tanstack.md +3 -3
  27. package/.docs/organized/code-examples/with-tap-runtime.md +1 -1
  28. package/.docs/raw/docs/(docs)/copilots/model-context.mdx +9 -1
  29. package/.docs/raw/docs/(docs)/guides/interactables.mdx +99 -37
  30. package/.docs/raw/docs/(docs)/guides/tool-ui.mdx +29 -0
  31. package/.docs/raw/docs/(docs)/guides/voice.mdx +333 -0
  32. package/.docs/raw/docs/(reference)/api-reference/primitives/message-part.mdx +23 -0
  33. package/.docs/raw/docs/runtimes/a2a/index.mdx +4 -0
  34. package/.docs/raw/docs/runtimes/ai-sdk/v6.mdx +2 -2
  35. package/.docs/raw/docs/runtimes/assistant-transport.mdx +6 -2
  36. package/.docs/raw/docs/ui/context-display.mdx +2 -2
  37. package/.docs/raw/docs/ui/model-selector.mdx +1 -1
  38. package/.docs/raw/docs/ui/voice.mdx +172 -0
  39. package/package.json +3 -4
@@ -97,6 +97,7 @@ const runtime = useA2ARuntime({ client });
97
97
  | Option | Type | Description |
98
98
  | --- | --- | --- |
99
99
  | `baseUrl` | `string` | Base URL of the A2A server |
100
+ | `basePath` | `string` | Optional path prefix for API endpoints (e.g. `"/v1"`). Does not affect agent card discovery |
100
101
  | `headers` | `Record<string, string>` or `() => Record<string, string>` | Static or dynamic headers (e.g. for auth tokens) |
101
102
  | `tenant` | `string` | Tenant ID for multi-tenant servers (prepended to URL paths) |
102
103
  | `extensions` | `string[]` | Extension URIs to negotiate via `A2A-Extensions` header |
@@ -124,7 +125,10 @@ const runtime = useA2ARuntime({ client });
124
125
  | --- | --- | --- |
125
126
  | `client` | `A2AClient` | Pre-built A2A client instance (provide this OR `baseUrl`) |
126
127
  | `baseUrl` | `string` | A2A server URL (creates a client automatically) |
128
+ | `basePath` | `string` | Path prefix for API endpoints (e.g. `"/v1"`). Only used with `baseUrl` |
129
+ | `tenant` | `string` | Tenant ID for multi-tenant servers. Only used with `baseUrl` |
127
130
  | `headers` | see above | Headers for the auto-created client |
131
+ | `extensions` | `string[]` | Extension URIs to negotiate. Only used with `baseUrl` |
128
132
  | `contextId` | `string` | Initial context ID for the conversation |
129
133
  | `configuration` | `A2ASendMessageConfiguration` | Default send message configuration |
130
134
  | `onError` | `(error: Error) => void` | Error callback |
@@ -111,9 +111,9 @@ Use `messageMetadata` in your Next.js route to attach `usage` from `finish` and
111
111
  import { streamText, convertToModelMessages } from "ai";
112
112
  import { frontendTools } from "@assistant-ui/react-ai-sdk";
113
113
  export async function POST(req: Request) {
114
- const { messages, tools, modelName } = await req.json();
114
+ const { messages, tools, config } = await req.json();
115
115
  const result = streamText({
116
- model: getModel(modelName),
116
+ model: getModel(config?.modelName),
117
117
  messages: await convertToModelMessages(messages),
118
118
  tools: frontendTools(tools),
119
119
  });
@@ -72,11 +72,15 @@ The backend endpoint receives POST requests with the following payload:
72
72
  tools?: Record<string, ToolJSONSchema>, // Tool definitions keyed by tool name
73
73
  threadId: string | null, // The current thread/conversation identifier (null for new threads)
74
74
  parentId?: string | null, // The parent message ID (included when editing or branching)
75
- // ...callSettings (maxTokens, temperature, topP, presencePenalty, frequencyPenalty, seed)
76
- // ...config (apiKey, baseUrl, modelName)
75
+ callSettings?: { maxTokens, temperature, topP, presencePenalty, frequencyPenalty, seed },
76
+ config?: { apiKey, baseUrl, modelName },
77
77
  }
78
78
  ```
79
79
 
80
+ <Callout type="warn">
81
+ **Migrating from top-level fields:** `callSettings` and `config` fields were previously spread at the top level of the request body (e.g. `body.modelName` instead of `body.config.modelName`). Both formats are currently sent for backward compatibility, but the top-level fields are deprecated and will be removed in a future version. Update your backend to read from the nested objects.
82
+ </Callout>
83
+
80
84
  The backend endpoint returns a stream of state snapshots using the `assistant-stream` library ([npm](https://www.npmjs.com/package/assistant-stream) / [PyPI](https://pypi.org/project/assistant-stream/)).
81
85
 
82
86
  ### Handling Commands
@@ -33,9 +33,9 @@ Use `messageMetadata` in your Next.js route to attach `usage` from `finish` and
33
33
  import { streamText, convertToModelMessages } from "ai";
34
34
 
35
35
  export async function POST(req: Request) {
36
- const { messages, modelName } = await req.json();
36
+ const { messages, config } = await req.json();
37
37
  const result = streamText({
38
- model: getModel(modelName),
38
+ model: getModel(config?.modelName),
39
39
  messages: await convertToModelMessages(messages),
40
40
  });
41
41
  return result.toUIMessageStreamResponse({
@@ -50,7 +50,7 @@ const ComposerAction: FC = () => {
50
50
 
51
51
  ### Read the model in your API route
52
52
 
53
- The selected model name is sent as `config.modelName` in the request body:
53
+ The selected model's `id` is sent as `config.modelName` in the request body:
54
54
 
55
55
  ```tsx title="app/api/chat/route.ts" {2,5}
56
56
  export async function POST(req: Request) {
@@ -0,0 +1,172 @@
1
+ ---
2
+ title: Voice
3
+ description: Realtime voice session controls with connect, mute, and status indicator.
4
+ ---
5
+
6
+ import { VoiceSample, VoiceVariantsSample, VoiceStatesSample } from "@/components/docs/samples/voice";
7
+
8
+ A control bar for realtime bidirectional voice sessions with an animated orb indicator. Works with any `RealtimeVoiceAdapter` (LiveKit, ElevenLabs, etc.).
9
+
10
+ <VoiceSample />
11
+
12
+ ## Getting Started
13
+
14
+ <Steps>
15
+ <Step>
16
+
17
+ ### Add the component
18
+
19
+ <InstallCommand shadcn={["voice"]} />
20
+
21
+ This adds `/components/assistant-ui/voice.tsx` to your project, which you can adjust as needed.
22
+
23
+ </Step>
24
+ <Step>
25
+
26
+ ### Configure a voice adapter
27
+
28
+ Pass a `RealtimeVoiceAdapter` to your runtime. See the [Realtime Voice guide](/docs/guides/voice) for details.
29
+
30
+ ```tsx
31
+ const runtime = useChatRuntime({
32
+ adapters: {
33
+ voice: myVoiceAdapter,
34
+ },
35
+ });
36
+ ```
37
+
38
+ </Step>
39
+ <Step>
40
+
41
+ ### Use in your application
42
+
43
+ ```tsx title="app/page.tsx"
44
+ import { Thread } from "@/components/assistant-ui/thread";
45
+ import { VoiceControl } from "@/components/assistant-ui/voice";
46
+ import { AuiIf } from "@assistant-ui/react";
47
+
48
+ export default function Chat() {
49
+ return (
50
+ <div className="flex h-full flex-col">
51
+ <AuiIf condition={(s) => s.thread.capabilities.voice}>
52
+ <VoiceControl />
53
+ </AuiIf>
54
+ <div className="min-h-0 flex-1">
55
+ <Thread />
56
+ </div>
57
+ </div>
58
+ );
59
+ }
60
+ ```
61
+
62
+ </Step>
63
+ </Steps>
64
+
65
+ ## Anatomy
66
+
67
+ The `VoiceControl` component is built with the following hooks and conditionals:
68
+
69
+ ```tsx
70
+ import { AuiIf, useVoiceState, useVoiceControls } from "@assistant-ui/react";
71
+
72
+ <div className="aui-voice-control">
73
+ <VoiceIndicator />
74
+
75
+ <AuiIf condition={(s) => s.thread.voice == null}>
76
+ <VoiceConnectButton />
77
+ </AuiIf>
78
+
79
+ <AuiIf condition={(s) => s.thread.voice?.status.type === "running"}>
80
+ <VoiceMuteButton />
81
+ <VoiceDisconnectButton />
82
+ </AuiIf>
83
+ </div>
84
+ ```
85
+
86
+ ## Examples
87
+
88
+ ### Conditionally show voice controls
89
+
90
+ Only render when a voice adapter is configured:
91
+
92
+ ```tsx
93
+ <AuiIf condition={(s) => s.thread.capabilities.voice}>
94
+ <VoiceControl />
95
+ </AuiIf>
96
+ ```
97
+
98
+ ### Voice toggle in composer
99
+
100
+ Add a compact voice toggle button inside the composer action area:
101
+
102
+ ```tsx
103
+ function ComposerVoiceToggle() {
104
+ const voiceState = useVoiceState();
105
+ const { connect, disconnect } = useVoiceControls();
106
+ const isActive =
107
+ voiceState?.status.type === "running" ||
108
+ voiceState?.status.type === "starting";
109
+
110
+ return (
111
+ <AuiIf condition={(s) => s.thread.capabilities.voice}>
112
+ <button
113
+ type="button"
114
+ onClick={() => (isActive ? disconnect() : connect())}
115
+ aria-label={isActive ? "End voice" : "Start voice"}
116
+ >
117
+ {isActive ? <PhoneOffIcon /> : <PhoneIcon />}
118
+ </button>
119
+ </AuiIf>
120
+ );
121
+ }
122
+ ```
123
+
124
+ ### Custom indicator colors
125
+
126
+ Override the indicator styles by targeting the `aui-voice-indicator` class:
127
+
128
+ ```css
129
+ .aui-voice-indicator {
130
+ /* Override active color */
131
+ &.bg-green-500 {
132
+ background: theme("colors.blue.500");
133
+ }
134
+ }
135
+ ```
136
+
137
+ ## States
138
+
139
+ The `VoiceOrb` responds to five voice session states with distinct animations:
140
+
141
+ <VoiceStatesSample />
142
+
143
+ ## Variants
144
+
145
+ Four built-in color palettes. Size is controlled via `className`.
146
+
147
+ <VoiceVariantsSample />
148
+
149
+ ## Sub-components
150
+
151
+ | Component | Description |
152
+ |-----------|-------------|
153
+ | `VoiceOrb` | Animated orb visual with gradient, glow, and ripple effects. Accepts `state` and `variant` props. |
154
+ | `VoiceControl` | Control bar with status dot, connect/disconnect, and mute/unmute buttons. |
155
+ | `VoiceConnectButton` | Calls `connect()`. Shown when no session is active. |
156
+ | `VoiceMuteButton` | Toggles `mute()`/`unmute()`. Shown when session is running. |
157
+ | `VoiceDisconnectButton` | Calls `disconnect()`. Shown when session is active. |
158
+
159
+ All sub-components are exported and can be used independently for custom layouts.
160
+
161
+ ## State Selectors
162
+
163
+ Use these with `AuiIf` or `useAuiState` to build custom voice UI:
164
+
165
+ | Selector | Type | Description |
166
+ |----------|------|-------------|
167
+ | `s.thread.capabilities.voice` | `boolean` | Whether a voice adapter is configured |
168
+ | `s.thread.voice` | `VoiceSessionState \| undefined` | `undefined` when no session |
169
+ | `s.thread.voice?.status.type` | `"starting" \| "running" \| "ended"` | Session phase |
170
+ | `s.thread.voice?.isMuted` | `boolean` | Microphone muted state |
171
+ | `s.thread.voice?.mode` | `"listening" \| "speaking"` | Who is currently active (user or agent) |
172
+ | `useVoiceVolume()` | `number` | Real-time audio level (0–1), separate from main state to avoid 20Hz re-renders |
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@assistant-ui/mcp-docs-server",
3
- "version": "0.1.26",
3
+ "version": "0.1.27",
4
4
  "description": "MCP server for assistant-ui documentation and examples",
5
5
  "keywords": [
6
6
  "mcp",
@@ -15,7 +15,6 @@
15
15
  "type": "module",
16
16
  "exports": {
17
17
  ".": {
18
- "aui-source": "./src/index.ts",
19
18
  "types": "./dist/index.d.ts",
20
19
  "default": "./dist/index.js"
21
20
  }
@@ -33,14 +32,14 @@
33
32
  ],
34
33
  "sideEffects": false,
35
34
  "dependencies": {
36
- "@modelcontextprotocol/sdk": "^1.28.0",
35
+ "@modelcontextprotocol/sdk": "^1.29.0",
37
36
  "gray-matter": "^4.0.3",
38
37
  "zod": "^4.3.6"
39
38
  },
40
39
  "devDependencies": {
41
40
  "@types/node": "^25.5.0",
42
41
  "tsx": "^4.21.0",
43
- "vitest": "^4.1.1",
42
+ "vitest": "^4.1.2",
44
43
  "@assistant-ui/x-buildutils": "0.0.3"
45
44
  },
46
45
  "publishConfig": {