@assistant-ui/mcp-docs-server 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.docs/organized/code-examples/local-ollama.md +1135 -0
- package/.docs/organized/code-examples/search-agent-for-e-commerce.md +1721 -0
- package/.docs/organized/code-examples/with-ai-sdk.md +1081 -0
- package/.docs/organized/code-examples/with-cloud.md +1164 -0
- package/.docs/organized/code-examples/with-external-store.md +1064 -0
- package/.docs/organized/code-examples/with-ffmpeg.md +1305 -0
- package/.docs/organized/code-examples/with-langgraph.md +1819 -0
- package/.docs/organized/code-examples/with-openai-assistants.md +1175 -0
- package/.docs/organized/code-examples/with-react-hook-form.md +1727 -0
- package/.docs/organized/code-examples/with-vercel-ai-rsc.md +1157 -0
- package/.docs/raw/blog/2024-07-29-hello/index.mdx +65 -0
- package/.docs/raw/blog/2024-09-11/index.mdx +10 -0
- package/.docs/raw/blog/2024-12-15/index.mdx +10 -0
- package/.docs/raw/blog/2025-01-31-changelog/index.mdx +129 -0
- package/.docs/raw/docs/about-assistantui.mdx +44 -0
- package/.docs/raw/docs/api-reference/context-providers/AssistantRuntimeProvider.mdx +30 -0
- package/.docs/raw/docs/api-reference/context-providers/TextContentPartProvider.mdx +26 -0
- package/.docs/raw/docs/api-reference/integrations/react-hook-form.mdx +103 -0
- package/.docs/raw/docs/api-reference/integrations/vercel-ai-sdk.mdx +145 -0
- package/.docs/raw/docs/api-reference/overview.mdx +583 -0
- package/.docs/raw/docs/api-reference/primitives/ActionBar.mdx +264 -0
- package/.docs/raw/docs/api-reference/primitives/AssistantModal.mdx +129 -0
- package/.docs/raw/docs/api-reference/primitives/Attachment.mdx +96 -0
- package/.docs/raw/docs/api-reference/primitives/BranchPicker.mdx +87 -0
- package/.docs/raw/docs/api-reference/primitives/Composer.mdx +204 -0
- package/.docs/raw/docs/api-reference/primitives/ContentPart.mdx +173 -0
- package/.docs/raw/docs/api-reference/primitives/Error.mdx +70 -0
- package/.docs/raw/docs/api-reference/primitives/Message.mdx +181 -0
- package/.docs/raw/docs/api-reference/primitives/Thread.mdx +197 -0
- package/.docs/raw/docs/api-reference/primitives/composition.mdx +21 -0
- package/.docs/raw/docs/api-reference/runtimes/AssistantRuntime.mdx +33 -0
- package/.docs/raw/docs/api-reference/runtimes/AttachmentRuntime.mdx +46 -0
- package/.docs/raw/docs/api-reference/runtimes/ComposerRuntime.mdx +69 -0
- package/.docs/raw/docs/api-reference/runtimes/ContentPartRuntime.mdx +22 -0
- package/.docs/raw/docs/api-reference/runtimes/MessageRuntime.mdx +49 -0
- package/.docs/raw/docs/api-reference/runtimes/ThreadListItemRuntime.mdx +32 -0
- package/.docs/raw/docs/api-reference/runtimes/ThreadListRuntime.mdx +31 -0
- package/.docs/raw/docs/api-reference/runtimes/ThreadRuntime.mdx +48 -0
- package/.docs/raw/docs/architecture.mdx +92 -0
- package/.docs/raw/docs/cloud/authorization.mdx +152 -0
- package/.docs/raw/docs/cloud/overview.mdx +55 -0
- package/.docs/raw/docs/cloud/persistence/ai-sdk.mdx +54 -0
- package/.docs/raw/docs/cloud/persistence/langgraph.mdx +123 -0
- package/.docs/raw/docs/concepts/architecture.mdx +19 -0
- package/.docs/raw/docs/concepts/runtime-layer.mdx +163 -0
- package/.docs/raw/docs/concepts/why.mdx +9 -0
- package/.docs/raw/docs/copilots/make-assistant-readable.mdx +71 -0
- package/.docs/raw/docs/copilots/make-assistant-tool-ui.mdx +76 -0
- package/.docs/raw/docs/copilots/make-assistant-tool.mdx +117 -0
- package/.docs/raw/docs/copilots/model-context.mdx +135 -0
- package/.docs/raw/docs/copilots/motivation.mdx +191 -0
- package/.docs/raw/docs/copilots/use-assistant-instructions.mdx +62 -0
- package/.docs/raw/docs/getting-started.mdx +1133 -0
- package/.docs/raw/docs/guides/Attachments.mdx +640 -0
- package/.docs/raw/docs/guides/Branching.mdx +59 -0
- package/.docs/raw/docs/guides/Editing.mdx +56 -0
- package/.docs/raw/docs/guides/Speech.mdx +43 -0
- package/.docs/raw/docs/guides/ToolUI.mdx +663 -0
- package/.docs/raw/docs/guides/Tools.mdx +496 -0
- package/.docs/raw/docs/index.mdx +7 -0
- package/.docs/raw/docs/legacy/styled/AssistantModal.mdx +85 -0
- package/.docs/raw/docs/legacy/styled/Decomposition.mdx +633 -0
- package/.docs/raw/docs/legacy/styled/Markdown.mdx +86 -0
- package/.docs/raw/docs/legacy/styled/Scrollbar.mdx +71 -0
- package/.docs/raw/docs/legacy/styled/Thread.mdx +84 -0
- package/.docs/raw/docs/legacy/styled/ThreadWidth.mdx +21 -0
- package/.docs/raw/docs/mcp-docs-server.mdx +324 -0
- package/.docs/raw/docs/migrations/deprecation-policy.mdx +41 -0
- package/.docs/raw/docs/migrations/v0-7.mdx +188 -0
- package/.docs/raw/docs/migrations/v0-8.mdx +160 -0
- package/.docs/raw/docs/migrations/v0-9.mdx +75 -0
- package/.docs/raw/docs/react-compatibility.mdx +208 -0
- package/.docs/raw/docs/runtimes/ai-sdk/rsc.mdx +226 -0
- package/.docs/raw/docs/runtimes/ai-sdk/use-assistant-hook.mdx +195 -0
- package/.docs/raw/docs/runtimes/ai-sdk/use-chat-hook.mdx +138 -0
- package/.docs/raw/docs/runtimes/ai-sdk/use-chat.mdx +136 -0
- package/.docs/raw/docs/runtimes/custom/external-store.mdx +1624 -0
- package/.docs/raw/docs/runtimes/custom/local.mdx +1185 -0
- package/.docs/raw/docs/runtimes/helicone.mdx +60 -0
- package/.docs/raw/docs/runtimes/langgraph/index.mdx +320 -0
- package/.docs/raw/docs/runtimes/langgraph/tutorial/index.mdx +11 -0
- package/.docs/raw/docs/runtimes/langgraph/tutorial/introduction.mdx +28 -0
- package/.docs/raw/docs/runtimes/langgraph/tutorial/part-1.mdx +120 -0
- package/.docs/raw/docs/runtimes/langgraph/tutorial/part-2.mdx +336 -0
- package/.docs/raw/docs/runtimes/langgraph/tutorial/part-3.mdx +385 -0
- package/.docs/raw/docs/runtimes/langserve.mdx +126 -0
- package/.docs/raw/docs/runtimes/mastra/full-stack-integration.mdx +218 -0
- package/.docs/raw/docs/runtimes/mastra/overview.mdx +17 -0
- package/.docs/raw/docs/runtimes/mastra/separate-server-integration.mdx +196 -0
- package/.docs/raw/docs/runtimes/pick-a-runtime.mdx +222 -0
- package/.docs/raw/docs/ui/AssistantModal.mdx +46 -0
- package/.docs/raw/docs/ui/AssistantSidebar.mdx +42 -0
- package/.docs/raw/docs/ui/Attachment.mdx +82 -0
- package/.docs/raw/docs/ui/Markdown.mdx +72 -0
- package/.docs/raw/docs/ui/Mermaid.mdx +79 -0
- package/.docs/raw/docs/ui/Scrollbar.mdx +59 -0
- package/.docs/raw/docs/ui/SyntaxHighlighting.mdx +253 -0
- package/.docs/raw/docs/ui/Thread.mdx +47 -0
- package/.docs/raw/docs/ui/ThreadList.mdx +49 -0
- package/.docs/raw/docs/ui/ToolFallback.mdx +64 -0
- package/.docs/raw/docs/ui/primitives/Thread.mdx +197 -0
- package/LICENSE +21 -0
- package/README.md +128 -0
- package/dist/chunk-C7O7EFKU.js +38 -0
- package/dist/chunk-CZCDQ3YH.js +420 -0
- package/dist/index.js +1 -0
- package/dist/prepare-docs/prepare.js +199 -0
- package/dist/stdio.js +8 -0
- package/package.json +43 -0
|
@@ -0,0 +1,640 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Attachments
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
import { AttachmentSample } from "../../../components/samples/attachment-sample";
|
|
6
|
+
import { Steps, Step } from "fumadocs-ui/components/steps";
|
|
7
|
+
import { Callout } from "fumadocs-ui/components/callout";
|
|
8
|
+
|
|
9
|
+
Enable users to attach files to their messages, enhancing conversations with images, documents, and other content.
|
|
10
|
+
|
|
11
|
+
<AttachmentSample />
|
|
12
|
+
|
|
13
|
+
## Overview
|
|
14
|
+
|
|
15
|
+
The attachment system in assistant-ui provides a flexible framework for handling file uploads in your AI chat interface. It consists of:
|
|
16
|
+
|
|
17
|
+
- **Attachment Adapters**: Backend logic for processing attachment files
|
|
18
|
+
- **UI Components**: Pre-built components for attachment display and interaction
|
|
19
|
+
- **Runtime Integration**: Seamless integration with all assistant-ui runtimes
|
|
20
|
+
|
|
21
|
+
## Getting Started
|
|
22
|
+
|
|
23
|
+
<Steps>
|
|
24
|
+
<Step>
|
|
25
|
+
|
|
26
|
+
### Install UI Components
|
|
27
|
+
|
|
28
|
+
First, add the attachment UI components to your project:
|
|
29
|
+
|
|
30
|
+
```sh
|
|
31
|
+
npx shadcn@latest add "https://r.assistant-ui.com/attachment"
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
This adds `/components/assistant-ui/attachment.tsx` to your project.
|
|
35
|
+
|
|
36
|
+
<Callout type="tip">
|
|
37
|
+
**Next steps:** Feel free to adjust these auto-generated components (styling, layout, behavior) to match your application's design system.
|
|
38
|
+
</Callout>
|
|
39
|
+
|
|
40
|
+
</Step>
|
|
41
|
+
<Step>
|
|
42
|
+
|
|
43
|
+
### Configure Attachment Adapter
|
|
44
|
+
|
|
45
|
+
Set up an attachment adapter in your runtime provider:
|
|
46
|
+
|
|
47
|
+
```tsx title="/app/MyRuntimeProvider.tsx"
|
|
48
|
+
import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
|
|
49
|
+
import {
|
|
50
|
+
CompositeAttachmentAdapter,
|
|
51
|
+
SimpleImageAttachmentAdapter,
|
|
52
|
+
SimpleTextAttachmentAdapter,
|
|
53
|
+
} from "@assistant-ui/react";
|
|
54
|
+
|
|
55
|
+
const runtime = useChatRuntime({
|
|
56
|
+
api: "/api/chat",
|
|
57
|
+
adapters: {
|
|
58
|
+
attachments: new CompositeAttachmentAdapter([
|
|
59
|
+
new SimpleImageAttachmentAdapter(),
|
|
60
|
+
new SimpleTextAttachmentAdapter(),
|
|
61
|
+
]),
|
|
62
|
+
},
|
|
63
|
+
});
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
</Step>
|
|
67
|
+
<Step>
|
|
68
|
+
|
|
69
|
+
### Add UI Components
|
|
70
|
+
|
|
71
|
+
Integrate attachment components into your chat interface:
|
|
72
|
+
|
|
73
|
+
```tsx title="/components/assistant-ui/thread.tsx"
|
|
74
|
+
// In your Composer component
|
|
75
|
+
import {
|
|
76
|
+
ComposerAttachments,
|
|
77
|
+
ComposerAddAttachment,
|
|
78
|
+
} from "@/components/assistant-ui/attachment";
|
|
79
|
+
|
|
80
|
+
const Composer = () => {
|
|
81
|
+
return (
|
|
82
|
+
<ComposerPrimitive.Root>
|
|
83
|
+
<ComposerAttachments />
|
|
84
|
+
<ComposerAddAttachment />
|
|
85
|
+
<ComposerPrimitive.Input placeholder="Type a message..." />
|
|
86
|
+
</ComposerPrimitive.Root>
|
|
87
|
+
);
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
// In your UserMessage component
|
|
91
|
+
import { UserMessageAttachments } from "@/components/assistant-ui/attachment";
|
|
92
|
+
|
|
93
|
+
const UserMessage = () => {
|
|
94
|
+
return (
|
|
95
|
+
<MessagePrimitive.Root>
|
|
96
|
+
<UserMessageAttachments />
|
|
97
|
+
<MessagePrimitive.Content />
|
|
98
|
+
</MessagePrimitive.Root>
|
|
99
|
+
);
|
|
100
|
+
};
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
</Step>
|
|
104
|
+
</Steps>
|
|
105
|
+
|
|
106
|
+
## Built-in Attachment Adapters
|
|
107
|
+
|
|
108
|
+
### SimpleImageAttachmentAdapter
|
|
109
|
+
|
|
110
|
+
Handles image files and converts them to data URLs for display in the chat UI. By default, images are shown inline but not sent to the LLM - use the VisionImageAdapter example above to send images to vision-capable models.
|
|
111
|
+
|
|
112
|
+
```tsx
|
|
113
|
+
const imageAdapter = new SimpleImageAttachmentAdapter();
|
|
114
|
+
// Accepts: image/* (JPEG, PNG, GIF, etc.)
|
|
115
|
+
// Output: { type: "image", url: "data:image/..." }
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
### SimpleTextAttachmentAdapter
|
|
119
|
+
|
|
120
|
+
Processes text files and wraps content in formatted tags:
|
|
121
|
+
|
|
122
|
+
```tsx
|
|
123
|
+
const textAdapter = new SimpleTextAttachmentAdapter();
|
|
124
|
+
// Accepts: text/plain, text/html, text/markdown, etc.
|
|
125
|
+
// Output: Content wrapped in <attachment>...</attachment> tags
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
### CompositeAttachmentAdapter
|
|
129
|
+
|
|
130
|
+
Combines multiple adapters to support various file types:
|
|
131
|
+
|
|
132
|
+
```tsx
|
|
133
|
+
const compositeAdapter = new CompositeAttachmentAdapter([
|
|
134
|
+
new SimpleImageAttachmentAdapter(),
|
|
135
|
+
new SimpleTextAttachmentAdapter(),
|
|
136
|
+
// Add more adapters as needed
|
|
137
|
+
]);
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
## Creating Custom Attachment Adapters
|
|
141
|
+
|
|
142
|
+
Build your own adapters for specialized file handling. Below are complete examples for common use cases.
|
|
143
|
+
|
|
144
|
+
### Vision-Capable Image Adapter
|
|
145
|
+
|
|
146
|
+
Send images to vision-capable LLMs like GPT-4V, Claude 3, or Gemini Pro Vision:
|
|
147
|
+
|
|
148
|
+
```tsx
|
|
149
|
+
import {
|
|
150
|
+
AttachmentAdapter,
|
|
151
|
+
PendingAttachment,
|
|
152
|
+
CompleteAttachment,
|
|
153
|
+
} from "@assistant-ui/react";
|
|
154
|
+
|
|
155
|
+
class VisionImageAdapter implements AttachmentAdapter {
|
|
156
|
+
accept = "image/jpeg,image/png,image/webp,image/gif";
|
|
157
|
+
|
|
158
|
+
async add({ file }: { file: File }): Promise<PendingAttachment> {
|
|
159
|
+
// Validate file size (e.g., 20MB limit for most LLMs)
|
|
160
|
+
const maxSize = 20 * 1024 * 1024; // 20MB
|
|
161
|
+
if (file.size > maxSize) {
|
|
162
|
+
throw new Error("Image size exceeds 20MB limit");
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
// Return pending attachment while processing
|
|
166
|
+
return {
|
|
167
|
+
id: crypto.randomUUID(),
|
|
168
|
+
type: "image",
|
|
169
|
+
name: file.name,
|
|
170
|
+
file,
|
|
171
|
+
status: { type: "running" },
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
async send(attachment: PendingAttachment): Promise<CompleteAttachment> {
|
|
176
|
+
// Convert image to base64 data URL
|
|
177
|
+
const base64 = await this.fileToBase64DataURL(attachment.file);
|
|
178
|
+
|
|
179
|
+
// Return in assistant-ui format with image content
|
|
180
|
+
return {
|
|
181
|
+
id: attachment.id,
|
|
182
|
+
type: "image",
|
|
183
|
+
name: attachment.name,
|
|
184
|
+
content: [
|
|
185
|
+
{
|
|
186
|
+
type: "image",
|
|
187
|
+
image: base64, // data:image/jpeg;base64,... format
|
|
188
|
+
},
|
|
189
|
+
],
|
|
190
|
+
status: { type: "complete" },
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
async remove(attachment: PendingAttachment): Promise<void> {
|
|
195
|
+
// Cleanup if needed (e.g., revoke object URLs if you created any)
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
private async fileToBase64DataURL(file: File): Promise<string> {
|
|
199
|
+
return new Promise((resolve, reject) => {
|
|
200
|
+
const reader = new FileReader();
|
|
201
|
+
reader.onload = () => {
|
|
202
|
+
// FileReader result is already a data URL
|
|
203
|
+
resolve(reader.result as string);
|
|
204
|
+
};
|
|
205
|
+
reader.onerror = reject;
|
|
206
|
+
reader.readAsDataURL(file);
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
```
|
|
211
|
+
|
|
212
|
+
### PDF Document Adapter
|
|
213
|
+
|
|
214
|
+
Handle PDF files by extracting text or converting to base64 for processing:
|
|
215
|
+
|
|
216
|
+
```tsx
|
|
217
|
+
import {
|
|
218
|
+
AttachmentAdapter,
|
|
219
|
+
PendingAttachment,
|
|
220
|
+
CompleteAttachment,
|
|
221
|
+
} from "@assistant-ui/react";
|
|
222
|
+
|
|
223
|
+
class PDFAttachmentAdapter implements AttachmentAdapter {
|
|
224
|
+
accept = "application/pdf";
|
|
225
|
+
|
|
226
|
+
async add({ file }: { file: File }): Promise<PendingAttachment> {
|
|
227
|
+
// Validate file size
|
|
228
|
+
const maxSize = 10 * 1024 * 1024; // 10MB limit
|
|
229
|
+
if (file.size > maxSize) {
|
|
230
|
+
throw new Error("PDF size exceeds 10MB limit");
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
return {
|
|
234
|
+
id: crypto.randomUUID(),
|
|
235
|
+
type: "document",
|
|
236
|
+
name: file.name,
|
|
237
|
+
file,
|
|
238
|
+
status: { type: "running" },
|
|
239
|
+
};
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
async send(attachment: PendingAttachment): Promise<CompleteAttachment> {
|
|
243
|
+
// Option 1: Extract text from PDF (requires pdf parsing library)
|
|
244
|
+
// const text = await this.extractTextFromPDF(attachment.file);
|
|
245
|
+
|
|
246
|
+
// Option 2: Convert to base64 for API processing
|
|
247
|
+
const base64Data = await this.fileToBase64(attachment.file);
|
|
248
|
+
|
|
249
|
+
return {
|
|
250
|
+
id: attachment.id,
|
|
251
|
+
type: "document",
|
|
252
|
+
name: attachment.name,
|
|
253
|
+
content: [
|
|
254
|
+
{
|
|
255
|
+
type: "text",
|
|
256
|
+
text: `[PDF Document: ${attachment.name}]\nBase64 data: ${base64Data.substring(0, 50)}...`
|
|
257
|
+
}
|
|
258
|
+
],
|
|
259
|
+
status: { type: "complete" },
|
|
260
|
+
};
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
async remove(attachment: PendingAttachment): Promise<void> {
|
|
264
|
+
// Cleanup if needed
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
private async fileToBase64(file: File): Promise<string> {
|
|
268
|
+
const arrayBuffer = await file.arrayBuffer();
|
|
269
|
+
const bytes = new Uint8Array(arrayBuffer);
|
|
270
|
+
let binary = "";
|
|
271
|
+
bytes.forEach(byte => {
|
|
272
|
+
binary += String.fromCharCode(byte);
|
|
273
|
+
});
|
|
274
|
+
return btoa(binary);
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
// Optional: Extract text from PDF using a library like pdf.js
|
|
278
|
+
private async extractTextFromPDF(file: File): Promise<string> {
|
|
279
|
+
// Implementation would use pdf.js or similar
|
|
280
|
+
// This is a placeholder
|
|
281
|
+
return "Extracted PDF text content";
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
```
|
|
285
|
+
|
|
286
|
+
## Using Custom Adapters
|
|
287
|
+
|
|
288
|
+
### With LocalRuntime
|
|
289
|
+
|
|
290
|
+
When using `LocalRuntime`, you need to handle images in your `ChatModelAdapter` (the adapter that connects to your AI backend):
|
|
291
|
+
|
|
292
|
+
```tsx
|
|
293
|
+
import { useLocalRuntime, ChatModelAdapter } from "@assistant-ui/react";
|
|
294
|
+
|
|
295
|
+
// This adapter connects LocalRuntime to your AI backend
|
|
296
|
+
const MyModelAdapter: ChatModelAdapter = {
|
|
297
|
+
async run({ messages, abortSignal }) {
|
|
298
|
+
// Convert messages to format expected by your vision-capable API
|
|
299
|
+
const formattedMessages = messages.map(msg => {
|
|
300
|
+
if (msg.role === "user" && msg.content.some(part => part.type === "image")) {
|
|
301
|
+
// Format for GPT-4V or similar vision models
|
|
302
|
+
return {
|
|
303
|
+
role: "user",
|
|
304
|
+
content: msg.content.map(part => {
|
|
305
|
+
if (part.type === "text") {
|
|
306
|
+
return { type: "text", text: part.text };
|
|
307
|
+
}
|
|
308
|
+
if (part.type === "image") {
|
|
309
|
+
return {
|
|
310
|
+
type: "image_url",
|
|
311
|
+
image_url: { url: part.image }
|
|
312
|
+
};
|
|
313
|
+
}
|
|
314
|
+
return part;
|
|
315
|
+
})
|
|
316
|
+
};
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
// Regular text messages
|
|
320
|
+
return {
|
|
321
|
+
role: msg.role,
|
|
322
|
+
content: msg.content
|
|
323
|
+
.filter(c => c.type === "text")
|
|
324
|
+
.map(c => c.text)
|
|
325
|
+
.join("\n")
|
|
326
|
+
};
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
// Send to your vision-capable API
|
|
330
|
+
const response = await fetch("/api/vision-chat", {
|
|
331
|
+
method: "POST",
|
|
332
|
+
headers: { "Content-Type": "application/json" },
|
|
333
|
+
body: JSON.stringify({ messages: formattedMessages }),
|
|
334
|
+
signal: abortSignal,
|
|
335
|
+
});
|
|
336
|
+
|
|
337
|
+
const data = await response.json();
|
|
338
|
+
return {
|
|
339
|
+
content: [{ type: "text", text: data.message }],
|
|
340
|
+
};
|
|
341
|
+
},
|
|
342
|
+
};
|
|
343
|
+
|
|
344
|
+
// Create runtime with vision image adapter
|
|
345
|
+
const runtime = useLocalRuntime(MyModelAdapter, {
|
|
346
|
+
adapters: {
|
|
347
|
+
attachments: new VisionImageAdapter()
|
|
348
|
+
},
|
|
349
|
+
});
|
|
350
|
+
```
|
|
351
|
+
|
|
352
|
+
### With Vercel AI SDK
|
|
353
|
+
|
|
354
|
+
If you're using the Vercel AI SDK, images are handled automatically through experimental attachments:
|
|
355
|
+
|
|
356
|
+
```tsx
|
|
357
|
+
// In your API route
|
|
358
|
+
import { streamText } from "ai";
|
|
359
|
+
import { openai } from "@ai-sdk/openai";
|
|
360
|
+
|
|
361
|
+
export async function POST(req: Request) {
|
|
362
|
+
const { messages } = await req.json();
|
|
363
|
+
|
|
364
|
+
const result = streamText({
|
|
365
|
+
model: openai("gpt-4-vision-preview"),
|
|
366
|
+
messages: messages.map(msg => {
|
|
367
|
+
if (msg.experimental_attachments?.length) {
|
|
368
|
+
// Images are automatically formatted for the model
|
|
369
|
+
return {
|
|
370
|
+
...msg,
|
|
371
|
+
experimental_attachments: msg.experimental_attachments
|
|
372
|
+
};
|
|
373
|
+
}
|
|
374
|
+
return msg;
|
|
375
|
+
}),
|
|
376
|
+
});
|
|
377
|
+
|
|
378
|
+
return result.toDataStreamResponse();
|
|
379
|
+
}
|
|
380
|
+
```
|
|
381
|
+
|
|
382
|
+
## Advanced Features
|
|
383
|
+
|
|
384
|
+
### Progress Updates
|
|
385
|
+
|
|
386
|
+
Provide real-time upload progress using async generators:
|
|
387
|
+
|
|
388
|
+
```tsx
|
|
389
|
+
class UploadAttachmentAdapter implements AttachmentAdapter {
|
|
390
|
+
accept = "*/*";
|
|
391
|
+
|
|
392
|
+
async *add({ file }: { file: File }) {
|
|
393
|
+
const id = generateId();
|
|
394
|
+
|
|
395
|
+
// Initial pending state
|
|
396
|
+
yield {
|
|
397
|
+
id,
|
|
398
|
+
type: "file",
|
|
399
|
+
name: file.name,
|
|
400
|
+
file,
|
|
401
|
+
status: { type: "running", progress: 0 },
|
|
402
|
+
} as PendingAttachment;
|
|
403
|
+
|
|
404
|
+
// Simulate upload progress
|
|
405
|
+
for (let progress = 10; progress <= 90; progress += 10) {
|
|
406
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
407
|
+
|
|
408
|
+
yield {
|
|
409
|
+
id,
|
|
410
|
+
type: "file",
|
|
411
|
+
name: file.name,
|
|
412
|
+
file,
|
|
413
|
+
status: { type: "running", progress },
|
|
414
|
+
} as PendingAttachment;
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
// Return final pending state
|
|
418
|
+
return {
|
|
419
|
+
id,
|
|
420
|
+
type: "file",
|
|
421
|
+
name: file.name,
|
|
422
|
+
file,
|
|
423
|
+
status: { type: "running", progress: 100 },
|
|
424
|
+
} as PendingAttachment;
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
async send(attachment: PendingAttachment): Promise<CompleteAttachment> {
|
|
428
|
+
// Upload the file and return complete attachment
|
|
429
|
+
const url = await this.uploadFile(attachment.file);
|
|
430
|
+
|
|
431
|
+
return {
|
|
432
|
+
id: attachment.id,
|
|
433
|
+
type: attachment.type,
|
|
434
|
+
name: attachment.name,
|
|
435
|
+
content: [
|
|
436
|
+
{
|
|
437
|
+
type: "file",
|
|
438
|
+
data: url, // or base64 data
|
|
439
|
+
mimeType: attachment.file.type,
|
|
440
|
+
},
|
|
441
|
+
],
|
|
442
|
+
status: { type: "complete" },
|
|
443
|
+
};
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
async remove(attachment: PendingAttachment): Promise<void> {
|
|
447
|
+
// Cleanup logic
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
private async uploadFile(file: File): Promise<string> {
|
|
451
|
+
// Your upload logic here
|
|
452
|
+
return "https://example.com/file-url";
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
```
|
|
456
|
+
|
|
457
|
+
### Validation and Error Handling
|
|
458
|
+
|
|
459
|
+
Implement robust validation in your adapters:
|
|
460
|
+
|
|
461
|
+
```tsx
|
|
462
|
+
class ValidatedImageAdapter implements AttachmentAdapter {
|
|
463
|
+
accept = "image/*";
|
|
464
|
+
maxSizeBytes = 5 * 1024 * 1024; // 5MB
|
|
465
|
+
|
|
466
|
+
async add({ file }: { file: File }): Promise<PendingAttachment> {
|
|
467
|
+
// Validate file size
|
|
468
|
+
if (file.size > this.maxSizeBytes) {
|
|
469
|
+
return {
|
|
470
|
+
id: generateId(),
|
|
471
|
+
type: "image",
|
|
472
|
+
name: file.name,
|
|
473
|
+
file,
|
|
474
|
+
status: {
|
|
475
|
+
type: "incomplete",
|
|
476
|
+
reason: "error",
|
|
477
|
+
error: new Error("File size exceeds 5MB limit"),
|
|
478
|
+
},
|
|
479
|
+
};
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
// Validate image dimensions
|
|
483
|
+
try {
|
|
484
|
+
const dimensions = await this.getImageDimensions(file);
|
|
485
|
+
if (dimensions.width > 4096 || dimensions.height > 4096) {
|
|
486
|
+
throw new Error("Image dimensions exceed 4096x4096");
|
|
487
|
+
}
|
|
488
|
+
} catch (error) {
|
|
489
|
+
return {
|
|
490
|
+
id: generateId(),
|
|
491
|
+
type: "image",
|
|
492
|
+
name: file.name,
|
|
493
|
+
file,
|
|
494
|
+
status: {
|
|
495
|
+
type: "incomplete",
|
|
496
|
+
reason: "error",
|
|
497
|
+
error,
|
|
498
|
+
},
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
// Return valid attachment
|
|
503
|
+
return {
|
|
504
|
+
id: generateId(),
|
|
505
|
+
type: "image",
|
|
506
|
+
name: file.name,
|
|
507
|
+
file,
|
|
508
|
+
status: { type: "running" },
|
|
509
|
+
};
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
private async getImageDimensions(file: File) {
|
|
513
|
+
// Implementation to check image dimensions
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
```
|
|
517
|
+
|
|
518
|
+
### Multiple File Selection
|
|
519
|
+
|
|
520
|
+
Enable multi-file selection with custom limits:
|
|
521
|
+
|
|
522
|
+
```tsx
|
|
523
|
+
const composer = useComposer();
|
|
524
|
+
|
|
525
|
+
const handleMultipleFiles = async (files: FileList) => {
|
|
526
|
+
const maxFiles = 5;
|
|
527
|
+
const filesToAdd = Array.from(files).slice(0, maxFiles);
|
|
528
|
+
|
|
529
|
+
for (const file of filesToAdd) {
|
|
530
|
+
await composer.addAttachment({ file });
|
|
531
|
+
}
|
|
532
|
+
};
|
|
533
|
+
```
|
|
534
|
+
|
|
535
|
+
## Backend Integration
|
|
536
|
+
|
|
537
|
+
### With Vercel AI SDK
|
|
538
|
+
|
|
539
|
+
Process attachments in your API route:
|
|
540
|
+
|
|
541
|
+
```tsx title="/app/api/chat/route.ts"
|
|
542
|
+
import { streamText } from "ai";
|
|
543
|
+
import { openai } from "@ai-sdk/openai";
|
|
544
|
+
|
|
545
|
+
export async function POST(req: Request) {
|
|
546
|
+
const { messages } = await req.json();
|
|
547
|
+
|
|
548
|
+
// Process messages with attachments
|
|
549
|
+
const processedMessages = messages.map((msg) => {
|
|
550
|
+
if (msg.role === "user" && msg.experimental_attachments) {
|
|
551
|
+
// Handle attachments
|
|
552
|
+
const attachmentContent = msg.experimental_attachments
|
|
553
|
+
.map((att) => {
|
|
554
|
+
if (att.contentType.startsWith("image/")) {
|
|
555
|
+
return `[Image: ${att.name}]`;
|
|
556
|
+
}
|
|
557
|
+
return att.content;
|
|
558
|
+
})
|
|
559
|
+
.join("\n");
|
|
560
|
+
|
|
561
|
+
return {
|
|
562
|
+
...msg,
|
|
563
|
+
content: `${msg.content}\n\nAttachments:\n${attachmentContent}`,
|
|
564
|
+
};
|
|
565
|
+
}
|
|
566
|
+
return msg;
|
|
567
|
+
});
|
|
568
|
+
|
|
569
|
+
const result = streamText({
|
|
570
|
+
model: openai("gpt-4o"),
|
|
571
|
+
messages: processedMessages,
|
|
572
|
+
});
|
|
573
|
+
|
|
574
|
+
return result.toDataStreamResponse();
|
|
575
|
+
}
|
|
576
|
+
```
|
|
577
|
+
|
|
578
|
+
### Custom Backend Handling
|
|
579
|
+
|
|
580
|
+
Implement your own attachment processing:
|
|
581
|
+
|
|
582
|
+
```tsx
|
|
583
|
+
// In your attachment adapter
|
|
584
|
+
class ServerUploadAdapter implements AttachmentAdapter {
|
|
585
|
+
async send(attachment: PendingAttachment): Promise<CompleteAttachment> {
|
|
586
|
+
const formData = new FormData();
|
|
587
|
+
formData.append("file", attachment.file);
|
|
588
|
+
|
|
589
|
+
const response = await fetch("/api/upload", {
|
|
590
|
+
method: "POST",
|
|
591
|
+
body: formData,
|
|
592
|
+
});
|
|
593
|
+
|
|
594
|
+
const { url, id } = await response.json();
|
|
595
|
+
|
|
596
|
+
return {
|
|
597
|
+
id,
|
|
598
|
+
type: attachment.type,
|
|
599
|
+
name: attachment.name,
|
|
600
|
+
content: [
|
|
601
|
+
{
|
|
602
|
+
type: "image",
|
|
603
|
+
url,
|
|
604
|
+
},
|
|
605
|
+
],
|
|
606
|
+
status: { type: "complete" },
|
|
607
|
+
};
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
```
|
|
611
|
+
|
|
612
|
+
## Runtime Support
|
|
613
|
+
|
|
614
|
+
Attachments work with all assistant-ui runtimes:
|
|
615
|
+
|
|
616
|
+
- **AI SDK Runtime**: `useChatRuntime`, `useAssistantRuntime`
|
|
617
|
+
- **External Store**: `useExternalStoreRuntime`
|
|
618
|
+
- **LangGraph**: `useLangGraphRuntime`
|
|
619
|
+
- **Custom Runtimes**: Any runtime implementing the attachment interface
|
|
620
|
+
|
|
621
|
+
<Callout type="tip">
|
|
622
|
+
The attachment system is designed to be extensible. You can create adapters
|
|
623
|
+
for any file type, integrate with cloud storage services, or implement custom
|
|
624
|
+
processing logic to fit your specific needs.
|
|
625
|
+
</Callout>
|
|
626
|
+
|
|
627
|
+
## Best Practices
|
|
628
|
+
|
|
629
|
+
1. **File Size Limits**: Always validate file sizes to prevent memory issues
|
|
630
|
+
2. **Type Validation**: Verify file types match your `accept` pattern
|
|
631
|
+
3. **Error Handling**: Provide clear error messages for failed uploads
|
|
632
|
+
4. **Progress Feedback**: Show upload progress for better UX
|
|
633
|
+
5. **Security**: Validate and sanitize file content before processing
|
|
634
|
+
6. **Accessibility**: Ensure attachment UI is keyboard navigable
|
|
635
|
+
|
|
636
|
+
## Resources
|
|
637
|
+
|
|
638
|
+
- [Attachment UI Components](/docs/ui/Attachment) - UI implementation details
|
|
639
|
+
- [API Reference](/docs/api-reference) - Detailed type definitions
|
|
640
|
+
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Message Branching
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
import { BranchingSample } from "../../../components/samples/branching-sample";
|
|
6
|
+
|
|
7
|
+
Switch between different conversation branches.
|
|
8
|
+
|
|
9
|
+
<BranchingSample />
|
|
10
|
+
|
|
11
|
+
A new branch is created when:
|
|
12
|
+
|
|
13
|
+
- a user message is edited
|
|
14
|
+
- an assistant message is reloaded
|
|
15
|
+
|
|
16
|
+
Branches are automatically tracked by assistant-ui by observing changes to the `messages` array.
|
|
17
|
+
|
|
18
|
+
## Enabling branch support
|
|
19
|
+
|
|
20
|
+
You can show a branch picker by using `BranchPickerPrimitive`.
|
|
21
|
+
|
|
22
|
+
```tsx {1, 8, 15-30}
|
|
23
|
+
import { BranchPickerPrimitive } from "@assistant-ui/react";
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
const Message = () => {
|
|
27
|
+
return (
|
|
28
|
+
<MessagePrimitive.Root>
|
|
29
|
+
...
|
|
30
|
+
<BranchPicker /> {/* <-- show the branch picker */}
|
|
31
|
+
...
|
|
32
|
+
</EditComposerPrimitive.Root>
|
|
33
|
+
);
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
const BranchPicker = () => {
|
|
38
|
+
return (
|
|
39
|
+
<BranchPickerPrimitive.Root hideWhenSingleBranch>
|
|
40
|
+
<BranchPickerPrimitive.Previous />
|
|
41
|
+
<BranchPickerPrimitive.Number /> / <BranchPickerPrimitive.Count />
|
|
42
|
+
<BranchPickerPrimitive.Next />
|
|
43
|
+
</BranchPickerPrimitive.Root>
|
|
44
|
+
);
|
|
45
|
+
};
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
## API
|
|
49
|
+
|
|
50
|
+
You can access the current branch state or navigate via the API as well.
|
|
51
|
+
These APIs rely on the message state and may only be called inside a message component.
|
|
52
|
+
|
|
53
|
+
```tsx
|
|
54
|
+
const hasBranches = useMessageIf({ hasBranches: true }); // whether branchCount is >= 2
|
|
55
|
+
|
|
56
|
+
// navigation
|
|
57
|
+
const goToNextBranch = useGoToNextBranch(); // null if there is no next branch
|
|
58
|
+
const goToPreviousBranch = useGoToPreviousBranch(); // null if there is no previous branch
|
|
59
|
+
```
|