@assistant-ui/react-ai-sdk 0.5.15 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +4 -81
- package/dist/index.d.ts +4 -81
- package/dist/index.js +150 -407
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +154 -411
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.js
CHANGED
|
@@ -66,78 +66,56 @@ var getVercelRSCMessage = (message) => {
|
|
|
66
66
|
return (0, import_react2.getExternalStoreMessage)(message);
|
|
67
67
|
};
|
|
68
68
|
|
|
69
|
-
// src/ui/
|
|
70
|
-
var
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
// src/ui/utils/sliceMessagesUntil.tsx
|
|
86
|
-
var sliceMessagesUntil = (messages, messageId) => {
|
|
87
|
-
if (messageId == null) return [];
|
|
88
|
-
let messageIdx = messages.findIndex((m) => m.id === messageId);
|
|
89
|
-
if (messageIdx === -1)
|
|
90
|
-
throw new Error(
|
|
91
|
-
"useVercelAIThreadState: Message not found. This is liekly an internal bug in assistant-ui."
|
|
92
|
-
);
|
|
93
|
-
while (messages[messageIdx + 1]?.role === "assistant") {
|
|
94
|
-
messageIdx++;
|
|
69
|
+
// src/ui/utils/useCachedChunkedMessages.ts
|
|
70
|
+
var import_react3 = require("react");
|
|
71
|
+
var hasItems = (messages) => messages.length > 0;
|
|
72
|
+
var chunkedMessages = (messages) => {
|
|
73
|
+
const chunks = [];
|
|
74
|
+
let currentChunk = [];
|
|
75
|
+
for (const message of messages) {
|
|
76
|
+
if (message.role === "assistant" || message.role === "data") {
|
|
77
|
+
currentChunk.push(message);
|
|
78
|
+
} else {
|
|
79
|
+
if (hasItems(currentChunk)) {
|
|
80
|
+
chunks.push(currentChunk);
|
|
81
|
+
currentChunk = [];
|
|
82
|
+
}
|
|
83
|
+
chunks.push([message]);
|
|
84
|
+
}
|
|
95
85
|
}
|
|
96
|
-
|
|
86
|
+
if (hasItems(currentChunk)) {
|
|
87
|
+
chunks.push(currentChunk);
|
|
88
|
+
}
|
|
89
|
+
return chunks;
|
|
97
90
|
};
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
(0, import_react4.useEffect)(() => {
|
|
105
|
-
useComposer.setState({
|
|
106
|
-
value: vercel.input,
|
|
107
|
-
setValue: vercel.setInput
|
|
108
|
-
});
|
|
109
|
-
}, [useComposer, vercel.input, vercel.setInput]);
|
|
91
|
+
var shallowArrayEqual = (a, b) => {
|
|
92
|
+
if (a.length !== b.length) return false;
|
|
93
|
+
for (let i = 0; i < a.length; i++) {
|
|
94
|
+
if (a[i] !== b[i]) return false;
|
|
95
|
+
}
|
|
96
|
+
return true;
|
|
110
97
|
};
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
const cached = this.cache.get(key);
|
|
122
|
-
const newMessage = converter(m, cached);
|
|
123
|
-
this.cache.set(key, newMessage);
|
|
124
|
-
return newMessage;
|
|
98
|
+
var useCachedChunkedMessages = (messages) => {
|
|
99
|
+
const cache = (0, import_react3.useMemo)(() => /* @__PURE__ */ new WeakMap(), []);
|
|
100
|
+
return (0, import_react3.useMemo)(() => {
|
|
101
|
+
return chunkedMessages(messages).map((m) => {
|
|
102
|
+
const key = m[0];
|
|
103
|
+
if (!key) return m;
|
|
104
|
+
const cached = cache.get(key);
|
|
105
|
+
if (cached && shallowArrayEqual(cached, m)) return cached;
|
|
106
|
+
cache.set(key, m);
|
|
107
|
+
return m;
|
|
125
108
|
});
|
|
126
|
-
}
|
|
109
|
+
}, [messages, cache]);
|
|
127
110
|
};
|
|
128
111
|
|
|
129
|
-
// src/ui/utils/
|
|
130
|
-
var
|
|
131
|
-
if ("isLoading" in vercel) return vercel.isLoading;
|
|
132
|
-
return vercel.status === "in_progress";
|
|
133
|
-
};
|
|
134
|
-
var vercelToThreadMessage2 = (messages, status) => {
|
|
112
|
+
// src/ui/utils/convertMessage.ts
|
|
113
|
+
var convertMessage = (messages) => {
|
|
135
114
|
const firstMessage = messages[0];
|
|
136
115
|
if (!firstMessage) throw new Error("No messages found");
|
|
137
116
|
const common = {
|
|
138
117
|
id: firstMessage.id,
|
|
139
|
-
createdAt: firstMessage.createdAt ?? /* @__PURE__ */ new Date()
|
|
140
|
-
[symbolInnerAIMessage]: messages
|
|
118
|
+
createdAt: firstMessage.createdAt ?? /* @__PURE__ */ new Date()
|
|
141
119
|
};
|
|
142
120
|
switch (firstMessage.role) {
|
|
143
121
|
case "user":
|
|
@@ -177,8 +155,7 @@ var vercelToThreadMessage2 = (messages, status) => {
|
|
|
177
155
|
) ?? [],
|
|
178
156
|
...typeof message.data === "object" && !Array.isArray(message.data) && message.data?.["type"] === "tool-call" ? [message.data] : []
|
|
179
157
|
];
|
|
180
|
-
})
|
|
181
|
-
status
|
|
158
|
+
})
|
|
182
159
|
};
|
|
183
160
|
for (const message of messages) {
|
|
184
161
|
if (typeof message.data === "object" && !Array.isArray(message.data) && message.data?.["type"] === "tool-result") {
|
|
@@ -199,367 +176,133 @@ var vercelToThreadMessage2 = (messages, status) => {
|
|
|
199
176
|
);
|
|
200
177
|
}
|
|
201
178
|
};
|
|
202
|
-
var hasItems = (messages) => messages.length > 0;
|
|
203
|
-
var chunkedMessages = (messages) => {
|
|
204
|
-
const chunks = [];
|
|
205
|
-
let currentChunk = [];
|
|
206
|
-
for (const message of messages) {
|
|
207
|
-
if (message.role === "assistant" || message.role === "data") {
|
|
208
|
-
currentChunk.push(message);
|
|
209
|
-
} else {
|
|
210
|
-
if (hasItems(currentChunk)) {
|
|
211
|
-
chunks.push(currentChunk);
|
|
212
|
-
currentChunk = [];
|
|
213
|
-
}
|
|
214
|
-
chunks.push([message]);
|
|
215
|
-
}
|
|
216
|
-
}
|
|
217
|
-
if (hasItems(currentChunk)) {
|
|
218
|
-
chunks.push(currentChunk);
|
|
219
|
-
}
|
|
220
|
-
return chunks;
|
|
221
|
-
};
|
|
222
|
-
var shallowArrayEqual = (a, b) => {
|
|
223
|
-
if (a.length !== b.length) return false;
|
|
224
|
-
for (let i = 0; i < a.length; i++) {
|
|
225
|
-
if (a[i] !== b[i]) return false;
|
|
226
|
-
}
|
|
227
|
-
return true;
|
|
228
|
-
};
|
|
229
|
-
var useVercelAIThreadSync = (vercel, updateData) => {
|
|
230
|
-
const isRunning = getIsRunning(vercel);
|
|
231
|
-
const converter = (0, import_react5.useMemo)(() => new ThreadMessageConverter(), []);
|
|
232
|
-
(0, import_react5.useEffect)(() => {
|
|
233
|
-
const lastMessageId = vercel.messages.at(-1)?.id;
|
|
234
|
-
const convertCallback = (messages2, cache) => {
|
|
235
|
-
const status = lastMessageId === messages2[0].id && isRunning ? {
|
|
236
|
-
type: "running"
|
|
237
|
-
} : {
|
|
238
|
-
type: "complete",
|
|
239
|
-
reason: "unknown"
|
|
240
|
-
};
|
|
241
|
-
if (cache && shallowArrayEqual(cache.content, messages2) && (cache.role !== "assistant" || cache.status.type === status.type))
|
|
242
|
-
return cache;
|
|
243
|
-
return vercelToThreadMessage2(messages2, status);
|
|
244
|
-
};
|
|
245
|
-
const messages = converter.convertMessages(
|
|
246
|
-
chunkedMessages(vercel.messages),
|
|
247
|
-
convertCallback,
|
|
248
|
-
(m) => m[0]
|
|
249
|
-
);
|
|
250
|
-
updateData(isRunning, messages);
|
|
251
|
-
}, [updateData, isRunning, vercel.messages, converter]);
|
|
252
|
-
};
|
|
253
179
|
|
|
254
|
-
// src/ui/use-chat/
|
|
255
|
-
var
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
var
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
var VercelUseChatThreadRuntime = class {
|
|
267
|
-
constructor(vercel) {
|
|
268
|
-
this.vercel = vercel;
|
|
269
|
-
this.useVercel = (0, import_zustand.create)(() => ({
|
|
270
|
-
vercel
|
|
271
|
-
}));
|
|
272
|
-
}
|
|
273
|
-
_subscriptions = /* @__PURE__ */ new Set();
|
|
274
|
-
repository = new MessageRepository();
|
|
275
|
-
assistantOptimisticId = null;
|
|
276
|
-
useVercel;
|
|
277
|
-
capabilities = CAPABILITIES;
|
|
278
|
-
messages = [];
|
|
279
|
-
isDisabled = false;
|
|
280
|
-
getBranches(messageId) {
|
|
281
|
-
return this.repository.getBranches(messageId);
|
|
282
|
-
}
|
|
283
|
-
switchToBranch(branchId) {
|
|
284
|
-
this.repository.switchToBranch(branchId);
|
|
285
|
-
this.updateVercelMessages(this.repository.getMessages());
|
|
286
|
-
}
|
|
287
|
-
async append(message) {
|
|
288
|
-
if (message.content.length !== 1 || message.content[0]?.type !== "text")
|
|
289
|
-
throw new Error(
|
|
290
|
-
"Only text content is supported by VercelUseChatRuntime. Use the Edge runtime for image support."
|
|
291
|
-
);
|
|
292
|
-
const newMessages = sliceMessagesUntil(
|
|
293
|
-
this.vercel.messages,
|
|
294
|
-
message.parentId
|
|
295
|
-
);
|
|
296
|
-
this.vercel.setMessages(newMessages);
|
|
297
|
-
await this.vercel.append({
|
|
298
|
-
role: message.role,
|
|
299
|
-
content: message.content[0].text
|
|
300
|
-
});
|
|
301
|
-
}
|
|
302
|
-
async startRun(parentId) {
|
|
303
|
-
const newMessages = sliceMessagesUntil(this.vercel.messages, parentId);
|
|
304
|
-
this.vercel.setMessages(newMessages);
|
|
305
|
-
await this.vercel.reload();
|
|
306
|
-
}
|
|
307
|
-
cancelRun() {
|
|
308
|
-
const previousMessage = this.vercel.messages.at(-1);
|
|
309
|
-
this.vercel.stop();
|
|
310
|
-
if (this.assistantOptimisticId) {
|
|
311
|
-
this.repository.deleteMessage(this.assistantOptimisticId);
|
|
312
|
-
this.assistantOptimisticId = null;
|
|
313
|
-
}
|
|
314
|
-
let messages = this.repository.getMessages();
|
|
315
|
-
if (previousMessage?.role === "user" && previousMessage.id === messages.at(-1)?.id) {
|
|
316
|
-
this.vercel.setInput(previousMessage.content);
|
|
317
|
-
this.repository.deleteMessage(previousMessage.id);
|
|
318
|
-
messages = this.repository.getMessages();
|
|
319
|
-
}
|
|
320
|
-
setTimeout(() => {
|
|
321
|
-
this.updateVercelMessages(messages);
|
|
322
|
-
}, 0);
|
|
323
|
-
}
|
|
324
|
-
subscribe(callback) {
|
|
325
|
-
this._subscriptions.add(callback);
|
|
326
|
-
return () => this._subscriptions.delete(callback);
|
|
327
|
-
}
|
|
328
|
-
updateVercelMessages = (messages) => {
|
|
329
|
-
this.vercel.setMessages(
|
|
330
|
-
messages.flatMap(getVercelAIMessage).filter((m) => m != null)
|
|
331
|
-
);
|
|
332
|
-
};
|
|
333
|
-
onVercelUpdated() {
|
|
334
|
-
if (this.useVercel.getState().vercel !== this.vercel) {
|
|
335
|
-
this.useVercel.setState({ vercel: this.vercel });
|
|
336
|
-
}
|
|
337
|
-
}
|
|
338
|
-
updateData = (isRunning, vm) => {
|
|
339
|
-
for (let i = 0; i < vm.length; i++) {
|
|
340
|
-
const message = vm[i];
|
|
341
|
-
const parent = vm[i - 1];
|
|
342
|
-
this.repository.addOrUpdateMessage(parent?.id ?? null, message);
|
|
343
|
-
}
|
|
344
|
-
if (this.assistantOptimisticId) {
|
|
345
|
-
this.repository.deleteMessage(this.assistantOptimisticId);
|
|
346
|
-
this.assistantOptimisticId = null;
|
|
347
|
-
}
|
|
348
|
-
if (hasUpcomingMessage(isRunning, vm)) {
|
|
349
|
-
this.assistantOptimisticId = this.repository.appendOptimisticMessage(
|
|
350
|
-
vm.at(-1)?.id ?? null,
|
|
351
|
-
{
|
|
352
|
-
role: "assistant",
|
|
353
|
-
content: []
|
|
354
|
-
}
|
|
355
|
-
);
|
|
180
|
+
// src/ui/use-chat/useVercelUseChatRuntime.tsx
|
|
181
|
+
var import_react6 = require("@assistant-ui/react");
|
|
182
|
+
|
|
183
|
+
// src/ui/utils/useInputSync.tsx
|
|
184
|
+
var import_react4 = require("react");
|
|
185
|
+
var import_react5 = require("@assistant-ui/react");
|
|
186
|
+
var useInputSync = (helpers, runtime) => {
|
|
187
|
+
const helpersRef = (0, import_react4.useRef)(helpers);
|
|
188
|
+
(0, import_react4.useEffect)(() => {
|
|
189
|
+
helpersRef.current = helpers;
|
|
190
|
+
if (runtime.thread.composer.text !== helpers.input) {
|
|
191
|
+
runtime.thread.composer.setText(helpers.input);
|
|
356
192
|
}
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
)
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
useVercelAIThreadSync(vercel, this.updateData);
|
|
366
|
-
useVercelAIComposerSync(vercel);
|
|
367
|
-
return null;
|
|
368
|
-
};
|
|
369
|
-
addToolResult({ toolCallId, result }) {
|
|
370
|
-
this.vercel.addToolResult({ toolCallId, result });
|
|
371
|
-
}
|
|
193
|
+
}, [helpers, runtime]);
|
|
194
|
+
(0, import_react4.useEffect)(() => {
|
|
195
|
+
return (0, import_react5.subscribeToMainThread)(runtime, () => {
|
|
196
|
+
if (runtime.thread.composer.text !== helpersRef.current.input) {
|
|
197
|
+
helpersRef.current.setInput(runtime.thread.composer.text);
|
|
198
|
+
}
|
|
199
|
+
});
|
|
200
|
+
}, [runtime]);
|
|
372
201
|
};
|
|
373
202
|
|
|
374
|
-
// src/ui/
|
|
375
|
-
var
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
onVercelUpdated() {
|
|
385
|
-
return this.thread.onVercelUpdated();
|
|
386
|
-
}
|
|
387
|
-
getModelConfig() {
|
|
388
|
-
return this._proxyConfigProvider.getModelConfig();
|
|
389
|
-
}
|
|
390
|
-
registerModelConfigProvider(provider) {
|
|
391
|
-
return this._proxyConfigProvider.registerModelConfigProvider(provider);
|
|
392
|
-
}
|
|
393
|
-
switchToThread(threadId) {
|
|
394
|
-
if (threadId) {
|
|
395
|
-
throw new Error(
|
|
396
|
-
"VercelAIRuntime does not yet support switching threads."
|
|
397
|
-
);
|
|
398
|
-
}
|
|
399
|
-
this.thread.vercel.messages = [];
|
|
400
|
-
this.thread.vercel.input = "";
|
|
401
|
-
this.thread.vercel.setMessages([]);
|
|
402
|
-
this.thread.vercel.setInput("");
|
|
403
|
-
this.thread = new VercelUseChatThreadRuntime(this.thread.vercel);
|
|
203
|
+
// src/ui/utils/sliceMessagesUntil.tsx
|
|
204
|
+
var sliceMessagesUntil = (messages, messageId) => {
|
|
205
|
+
if (messageId == null) return [];
|
|
206
|
+
let messageIdx = messages.findIndex((m) => m.id === messageId);
|
|
207
|
+
if (messageIdx === -1)
|
|
208
|
+
throw new Error(
|
|
209
|
+
"useVercelAIThreadState: Message not found. This is liekly an internal bug in assistant-ui."
|
|
210
|
+
);
|
|
211
|
+
while (messages[messageIdx + 1]?.role === "assistant") {
|
|
212
|
+
messageIdx++;
|
|
404
213
|
}
|
|
214
|
+
return messages.slice(0, messageIdx + 1);
|
|
405
215
|
};
|
|
406
216
|
|
|
407
217
|
// src/ui/use-chat/useVercelUseChatRuntime.tsx
|
|
408
218
|
var useVercelUseChatRuntime = (chatHelpers) => {
|
|
409
|
-
const
|
|
410
|
-
(0,
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
copy: true
|
|
434
|
-
});
|
|
435
|
-
var VercelUseAssistantThreadRuntime = class {
|
|
436
|
-
constructor(vercel) {
|
|
437
|
-
this.vercel = vercel;
|
|
438
|
-
this.useVercel = (0, import_zustand2.create)(() => ({
|
|
439
|
-
vercel
|
|
440
|
-
}));
|
|
441
|
-
}
|
|
442
|
-
_subscriptions = /* @__PURE__ */ new Set();
|
|
443
|
-
capabilities = CAPABILITIES2;
|
|
444
|
-
useVercel;
|
|
445
|
-
messages = [];
|
|
446
|
-
isDisabled = false;
|
|
447
|
-
getBranches() {
|
|
448
|
-
return EMPTY_BRANCHES;
|
|
449
|
-
}
|
|
450
|
-
switchToBranch() {
|
|
451
|
-
throw new Error(
|
|
452
|
-
"VercelUseAssistantRuntime does not support switching branches."
|
|
453
|
-
);
|
|
454
|
-
}
|
|
455
|
-
async append(message) {
|
|
456
|
-
if (message.role !== "user")
|
|
457
|
-
throw new Error(
|
|
458
|
-
"Only appending user messages are supported in VercelUseAssistantRuntime. This is likely an internal bug in assistant-ui."
|
|
459
|
-
);
|
|
460
|
-
if (message.content.length !== 1 || message.content[0]?.type !== "text")
|
|
461
|
-
throw new Error("VercelUseAssistantRuntime only supports text content.");
|
|
462
|
-
if (message.parentId !== (this.messages.at(-1)?.id ?? null))
|
|
463
|
-
throw new Error(
|
|
464
|
-
"VercelUseAssistantRuntime does not support editing messages."
|
|
219
|
+
const messages = useCachedChunkedMessages(chatHelpers.messages);
|
|
220
|
+
const runtime = (0, import_react6.useExternalStoreRuntime)({
|
|
221
|
+
isRunning: chatHelpers.isLoading,
|
|
222
|
+
messages,
|
|
223
|
+
setMessages: (messages2) => chatHelpers.setMessages(messages2.flat()),
|
|
224
|
+
onCancel: async () => chatHelpers.stop(),
|
|
225
|
+
onNew: async (message) => {
|
|
226
|
+
if (message.content.length !== 1 || message.content[0]?.type !== "text")
|
|
227
|
+
throw new Error(
|
|
228
|
+
"Only text content is supported by VercelUseChatRuntime. Use the Edge runtime for image support."
|
|
229
|
+
);
|
|
230
|
+
await chatHelpers.append({
|
|
231
|
+
role: message.role,
|
|
232
|
+
content: message.content[0].text
|
|
233
|
+
});
|
|
234
|
+
},
|
|
235
|
+
onEdit: async (message) => {
|
|
236
|
+
if (message.content.length !== 1 || message.content[0]?.type !== "text")
|
|
237
|
+
throw new Error(
|
|
238
|
+
"Only text content is supported by VercelUseChatRuntime. Use the Edge runtime for image support."
|
|
239
|
+
);
|
|
240
|
+
const newMessages = sliceMessagesUntil(
|
|
241
|
+
chatHelpers.messages,
|
|
242
|
+
message.parentId
|
|
465
243
|
);
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
}
|
|
471
|
-
async startRun() {
|
|
472
|
-
throw new Error("VercelUseAssistantRuntime does not support reloading.");
|
|
473
|
-
}
|
|
474
|
-
cancelRun() {
|
|
475
|
-
const previousMessage = this.vercel.messages.at(-1);
|
|
476
|
-
this.vercel.stop();
|
|
477
|
-
if (previousMessage?.role === "user") {
|
|
478
|
-
this.vercel.setInput(previousMessage.content);
|
|
479
|
-
}
|
|
480
|
-
}
|
|
481
|
-
subscribe(callback) {
|
|
482
|
-
this._subscriptions.add(callback);
|
|
483
|
-
return () => this._subscriptions.delete(callback);
|
|
484
|
-
}
|
|
485
|
-
onVercelUpdated() {
|
|
486
|
-
if (this.useVercel.getState().vercel !== this.vercel) {
|
|
487
|
-
this.useVercel.setState({ vercel: this.vercel });
|
|
488
|
-
}
|
|
489
|
-
}
|
|
490
|
-
updateData = (isRunning, vm) => {
|
|
491
|
-
if (hasUpcomingMessage2(isRunning, vm)) {
|
|
492
|
-
vm.push({
|
|
493
|
-
id: "__optimistic__result",
|
|
494
|
-
createdAt: /* @__PURE__ */ new Date(),
|
|
495
|
-
status: { type: "running" },
|
|
496
|
-
role: "assistant",
|
|
497
|
-
content: []
|
|
244
|
+
chatHelpers.setMessages(newMessages);
|
|
245
|
+
await chatHelpers.append({
|
|
246
|
+
role: message.role,
|
|
247
|
+
content: message.content[0].text
|
|
498
248
|
});
|
|
499
|
-
}
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
}
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
return isRunning && messages[messages.length - 1]?.role !== "assistant";
|
|
520
|
-
};
|
|
521
|
-
var VercelUseAssistantRuntime = class extends BaseAssistantRuntime2 {
|
|
522
|
-
_proxyConfigProvider = new ProxyConfigProvider2();
|
|
523
|
-
constructor(vercel) {
|
|
524
|
-
super(new VercelUseAssistantThreadRuntime(vercel));
|
|
525
|
-
}
|
|
526
|
-
set vercel(vercel) {
|
|
527
|
-
this.thread.vercel = vercel;
|
|
528
|
-
}
|
|
529
|
-
onVercelUpdated() {
|
|
530
|
-
return this.thread.onVercelUpdated();
|
|
531
|
-
}
|
|
532
|
-
getModelConfig() {
|
|
533
|
-
return this._proxyConfigProvider.getModelConfig();
|
|
534
|
-
}
|
|
535
|
-
registerModelConfigProvider(provider) {
|
|
536
|
-
return this._proxyConfigProvider.registerModelConfigProvider(provider);
|
|
537
|
-
}
|
|
538
|
-
switchToThread(threadId) {
|
|
539
|
-
if (threadId) {
|
|
540
|
-
throw new Error("VercelAIRuntime does not yet support switching threads");
|
|
541
|
-
}
|
|
542
|
-
this.thread.vercel.messages = [];
|
|
543
|
-
this.thread.vercel.input = "";
|
|
544
|
-
this.thread.vercel.setMessages([]);
|
|
545
|
-
this.thread.vercel.setInput("");
|
|
546
|
-
this.thread = new VercelUseAssistantThreadRuntime(this.thread.vercel);
|
|
547
|
-
}
|
|
249
|
+
},
|
|
250
|
+
onReload: async (parentId) => {
|
|
251
|
+
const newMessages = sliceMessagesUntil(chatHelpers.messages, parentId);
|
|
252
|
+
chatHelpers.setMessages(newMessages);
|
|
253
|
+
await chatHelpers.reload();
|
|
254
|
+
},
|
|
255
|
+
onAddToolResult: ({ toolCallId, result }) => {
|
|
256
|
+
chatHelpers.addToolResult({ toolCallId, result });
|
|
257
|
+
},
|
|
258
|
+
// onCopy // TODO
|
|
259
|
+
onNewThread: () => {
|
|
260
|
+
chatHelpers.messages = [];
|
|
261
|
+
chatHelpers.input = "";
|
|
262
|
+
chatHelpers.setMessages([]);
|
|
263
|
+
chatHelpers.setInput("");
|
|
264
|
+
},
|
|
265
|
+
convertMessage
|
|
266
|
+
});
|
|
267
|
+
useInputSync(chatHelpers, runtime);
|
|
268
|
+
return runtime;
|
|
548
269
|
};
|
|
549
270
|
|
|
550
271
|
// src/ui/use-assistant/useVercelUseAssistantRuntime.tsx
|
|
272
|
+
var import_react7 = require("@assistant-ui/react");
|
|
551
273
|
var useVercelUseAssistantRuntime = (assistantHelpers) => {
|
|
552
|
-
const
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
274
|
+
const messages = useCachedChunkedMessages(assistantHelpers.messages);
|
|
275
|
+
const runtime = (0, import_react7.useExternalStoreRuntime)({
|
|
276
|
+
isRunning: assistantHelpers.status === "in_progress",
|
|
277
|
+
messages,
|
|
278
|
+
onCancel: async () => assistantHelpers.stop(),
|
|
279
|
+
onNew: async (message) => {
|
|
280
|
+
if (message.content.length !== 1 || message.content[0]?.type !== "text")
|
|
281
|
+
throw new Error(
|
|
282
|
+
"VercelUseAssistantRuntime only supports text content."
|
|
283
|
+
);
|
|
284
|
+
await assistantHelpers.append({
|
|
285
|
+
role: message.role,
|
|
286
|
+
content: message.content[0].text
|
|
287
|
+
});
|
|
288
|
+
},
|
|
289
|
+
onNewThread: () => {
|
|
290
|
+
assistantHelpers.messages = [];
|
|
291
|
+
assistantHelpers.input = "";
|
|
292
|
+
assistantHelpers.setMessages([]);
|
|
293
|
+
assistantHelpers.setInput("");
|
|
294
|
+
},
|
|
295
|
+
convertMessage
|
|
560
296
|
});
|
|
297
|
+
useInputSync(assistantHelpers, runtime);
|
|
561
298
|
return runtime;
|
|
562
299
|
};
|
|
300
|
+
|
|
301
|
+
// src/ui/getVercelAIMessage.tsx
|
|
302
|
+
var import_react8 = require("@assistant-ui/react");
|
|
303
|
+
var getVercelAIMessage = (message) => {
|
|
304
|
+
return (0, import_react8.getExternalStoreMessage)(message);
|
|
305
|
+
};
|
|
563
306
|
// Annotate the CommonJS export names for ESM import in node:
|
|
564
307
|
0 && (module.exports = {
|
|
565
308
|
getVercelAIMessage,
|