ai 5.0.0-alpha.5 → 5.0.0-alpha.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +27 -0
- package/dist/index.d.mts +275 -435
- package/dist/index.d.ts +275 -435
- package/dist/index.js +481 -593
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +481 -583
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -3
package/dist/index.mjs
CHANGED
@@ -449,21 +449,8 @@ function pipeTextStreamToResponse({
|
|
449
449
|
});
|
450
450
|
}
|
451
451
|
|
452
|
-
// src/ui/
|
453
|
-
|
454
|
-
messages,
|
455
|
-
message
|
456
|
-
}) {
|
457
|
-
return [
|
458
|
-
...messages.length > 0 && messages[messages.length - 1].id === message.id ? messages.slice(0, -1) : messages,
|
459
|
-
message
|
460
|
-
];
|
461
|
-
}
|
462
|
-
|
463
|
-
// src/ui/call-chat-api.ts
|
464
|
-
import {
|
465
|
-
parseJsonEventStream
|
466
|
-
} from "@ai-sdk/provider-utils";
|
452
|
+
// src/ui/call-completion-api.ts
|
453
|
+
import { parseJsonEventStream } from "@ai-sdk/provider-utils";
|
467
454
|
|
468
455
|
// src/ui-message-stream/ui-message-stream-parts.ts
|
469
456
|
import { z } from "zod";
|
@@ -569,6 +556,138 @@ async function consumeStream({
|
|
569
556
|
}
|
570
557
|
}
|
571
558
|
|
559
|
+
// src/ui/process-text-stream.ts
|
560
|
+
async function processTextStream({
|
561
|
+
stream,
|
562
|
+
onTextPart
|
563
|
+
}) {
|
564
|
+
const reader = stream.pipeThrough(new TextDecoderStream()).getReader();
|
565
|
+
while (true) {
|
566
|
+
const { done, value } = await reader.read();
|
567
|
+
if (done) {
|
568
|
+
break;
|
569
|
+
}
|
570
|
+
await onTextPart(value);
|
571
|
+
}
|
572
|
+
}
|
573
|
+
|
574
|
+
// src/ui/call-completion-api.ts
|
575
|
+
var getOriginalFetch = () => fetch;
|
576
|
+
async function callCompletionApi({
|
577
|
+
api,
|
578
|
+
prompt,
|
579
|
+
credentials,
|
580
|
+
headers,
|
581
|
+
body,
|
582
|
+
streamProtocol = "data",
|
583
|
+
setCompletion,
|
584
|
+
setLoading,
|
585
|
+
setError,
|
586
|
+
setAbortController,
|
587
|
+
onFinish,
|
588
|
+
onError,
|
589
|
+
fetch: fetch2 = getOriginalFetch()
|
590
|
+
}) {
|
591
|
+
var _a17;
|
592
|
+
try {
|
593
|
+
setLoading(true);
|
594
|
+
setError(void 0);
|
595
|
+
const abortController = new AbortController();
|
596
|
+
setAbortController(abortController);
|
597
|
+
setCompletion("");
|
598
|
+
const response = await fetch2(api, {
|
599
|
+
method: "POST",
|
600
|
+
body: JSON.stringify({
|
601
|
+
prompt,
|
602
|
+
...body
|
603
|
+
}),
|
604
|
+
credentials,
|
605
|
+
headers: {
|
606
|
+
"Content-Type": "application/json",
|
607
|
+
...headers
|
608
|
+
},
|
609
|
+
signal: abortController.signal
|
610
|
+
}).catch((err) => {
|
611
|
+
throw err;
|
612
|
+
});
|
613
|
+
if (!response.ok) {
|
614
|
+
throw new Error(
|
615
|
+
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
616
|
+
);
|
617
|
+
}
|
618
|
+
if (!response.body) {
|
619
|
+
throw new Error("The response body is empty.");
|
620
|
+
}
|
621
|
+
let result = "";
|
622
|
+
switch (streamProtocol) {
|
623
|
+
case "text": {
|
624
|
+
await processTextStream({
|
625
|
+
stream: response.body,
|
626
|
+
onTextPart: (chunk) => {
|
627
|
+
result += chunk;
|
628
|
+
setCompletion(result);
|
629
|
+
}
|
630
|
+
});
|
631
|
+
break;
|
632
|
+
}
|
633
|
+
case "data": {
|
634
|
+
await consumeStream({
|
635
|
+
stream: parseJsonEventStream({
|
636
|
+
stream: response.body,
|
637
|
+
schema: uiMessageStreamPartSchema
|
638
|
+
}).pipeThrough(
|
639
|
+
new TransformStream({
|
640
|
+
async transform(part) {
|
641
|
+
if (!part.success) {
|
642
|
+
throw part.error;
|
643
|
+
}
|
644
|
+
const streamPart = part.value;
|
645
|
+
if (streamPart.type === "text") {
|
646
|
+
result += streamPart.text;
|
647
|
+
setCompletion(result);
|
648
|
+
} else if (streamPart.type === "error") {
|
649
|
+
throw new Error(streamPart.errorText);
|
650
|
+
}
|
651
|
+
}
|
652
|
+
})
|
653
|
+
),
|
654
|
+
onError: (error) => {
|
655
|
+
throw error;
|
656
|
+
}
|
657
|
+
});
|
658
|
+
break;
|
659
|
+
}
|
660
|
+
default: {
|
661
|
+
const exhaustiveCheck = streamProtocol;
|
662
|
+
throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
|
663
|
+
}
|
664
|
+
}
|
665
|
+
if (onFinish) {
|
666
|
+
onFinish(prompt, result);
|
667
|
+
}
|
668
|
+
setAbortController(null);
|
669
|
+
return result;
|
670
|
+
} catch (err) {
|
671
|
+
if (err.name === "AbortError") {
|
672
|
+
setAbortController(null);
|
673
|
+
return null;
|
674
|
+
}
|
675
|
+
if (err instanceof Error) {
|
676
|
+
if (onError) {
|
677
|
+
onError(err);
|
678
|
+
}
|
679
|
+
}
|
680
|
+
setError(err);
|
681
|
+
} finally {
|
682
|
+
setLoading(false);
|
683
|
+
}
|
684
|
+
}
|
685
|
+
|
686
|
+
// src/ui/chat-store.ts
|
687
|
+
import {
|
688
|
+
generateId as generateIdFunc
|
689
|
+
} from "@ai-sdk/provider-utils";
|
690
|
+
|
572
691
|
// src/ui/process-ui-message-stream.ts
|
573
692
|
import {
|
574
693
|
validateTypes
|
@@ -944,14 +1063,6 @@ async function parsePartialJson(jsonText) {
|
|
944
1063
|
return { value: void 0, state: "failed-parse" };
|
945
1064
|
}
|
946
1065
|
|
947
|
-
// src/ui/extract-max-tool-invocation-step.ts
|
948
|
-
function extractMaxToolInvocationStep(toolInvocations) {
|
949
|
-
return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
|
950
|
-
var _a17;
|
951
|
-
return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
|
952
|
-
}, 0);
|
953
|
-
}
|
954
|
-
|
955
1066
|
// src/ui/get-tool-invocations.ts
|
956
1067
|
function getToolInvocations(message) {
|
957
1068
|
return message.parts.filter(
|
@@ -964,9 +1075,7 @@ function createStreamingUIMessageState({
|
|
964
1075
|
lastMessage,
|
965
1076
|
newMessageId = ""
|
966
1077
|
} = {}) {
|
967
|
-
var _a17;
|
968
1078
|
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
969
|
-
const step = isContinuation ? 1 + ((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) : 0;
|
970
1079
|
const message = isContinuation ? lastMessage : {
|
971
1080
|
id: newMessageId,
|
972
1081
|
metadata: {},
|
@@ -977,8 +1086,7 @@ function createStreamingUIMessageState({
|
|
977
1086
|
message,
|
978
1087
|
activeTextPart: void 0,
|
979
1088
|
activeReasoningPart: void 0,
|
980
|
-
partialToolCalls: {}
|
981
|
-
step
|
1089
|
+
partialToolCalls: {}
|
982
1090
|
};
|
983
1091
|
}
|
984
1092
|
function processUIMessageStream({
|
@@ -1076,13 +1184,11 @@ function processUIMessageStream({
|
|
1076
1184
|
const toolInvocations = getToolInvocations(state.message);
|
1077
1185
|
state.partialToolCalls[part.toolCallId] = {
|
1078
1186
|
text: "",
|
1079
|
-
step: state.step,
|
1080
1187
|
toolName: part.toolName,
|
1081
1188
|
index: toolInvocations.length
|
1082
1189
|
};
|
1083
1190
|
updateToolInvocationPart(part.toolCallId, {
|
1084
1191
|
state: "partial-call",
|
1085
|
-
step: state.step,
|
1086
1192
|
toolCallId: part.toolCallId,
|
1087
1193
|
toolName: part.toolName,
|
1088
1194
|
args: void 0
|
@@ -1098,7 +1204,6 @@ function processUIMessageStream({
|
|
1098
1204
|
);
|
1099
1205
|
updateToolInvocationPart(part.toolCallId, {
|
1100
1206
|
state: "partial-call",
|
1101
|
-
step: partialToolCall.step,
|
1102
1207
|
toolCallId: part.toolCallId,
|
1103
1208
|
toolName: partialToolCall.toolName,
|
1104
1209
|
args: partialArgs
|
@@ -1109,7 +1214,6 @@ function processUIMessageStream({
|
|
1109
1214
|
case "tool-call": {
|
1110
1215
|
updateToolInvocationPart(part.toolCallId, {
|
1111
1216
|
state: "call",
|
1112
|
-
step: state.step,
|
1113
1217
|
toolCallId: part.toolCallId,
|
1114
1218
|
toolName: part.toolName,
|
1115
1219
|
args: part.args
|
@@ -1122,7 +1226,6 @@ function processUIMessageStream({
|
|
1122
1226
|
if (result != null) {
|
1123
1227
|
updateToolInvocationPart(part.toolCallId, {
|
1124
1228
|
state: "result",
|
1125
|
-
step: state.step,
|
1126
1229
|
toolCallId: part.toolCallId,
|
1127
1230
|
toolName: part.toolName,
|
1128
1231
|
args: part.args,
|
@@ -1161,7 +1264,6 @@ function processUIMessageStream({
|
|
1161
1264
|
break;
|
1162
1265
|
}
|
1163
1266
|
case "finish-step": {
|
1164
|
-
state.step += 1;
|
1165
1267
|
state.activeTextPart = void 0;
|
1166
1268
|
state.activeReasoningPart = void 0;
|
1167
1269
|
await updateMessageMetadata(part.metadata);
|
@@ -1224,408 +1326,64 @@ function isObject(value) {
|
|
1224
1326
|
return typeof value === "object" && value !== null;
|
1225
1327
|
}
|
1226
1328
|
|
1227
|
-
// src/ui/
|
1228
|
-
function
|
1229
|
-
|
1329
|
+
// src/ui/should-resubmit-messages.ts
|
1330
|
+
function shouldResubmitMessages({
|
1331
|
+
originalMaxToolInvocationStep,
|
1332
|
+
originalMessageCount,
|
1333
|
+
maxSteps,
|
1334
|
+
messages
|
1230
1335
|
}) {
|
1231
|
-
|
1232
|
-
|
1233
|
-
|
1234
|
-
|
1235
|
-
|
1236
|
-
|
1237
|
-
|
1238
|
-
|
1239
|
-
|
1240
|
-
|
1241
|
-
|
1242
|
-
controller.enqueue({ type: "finish" });
|
1243
|
-
}
|
1244
|
-
})
|
1336
|
+
const lastMessage = messages[messages.length - 1];
|
1337
|
+
const lastMessageStepStartCount = lastMessage.parts.filter(
|
1338
|
+
(part) => part.type === "step-start"
|
1339
|
+
).length;
|
1340
|
+
return (
|
1341
|
+
// check if the feature is enabled:
|
1342
|
+
maxSteps > 1 && // ensure there is a last message:
|
1343
|
+
lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
|
1344
|
+
(messages.length > originalMessageCount || lastMessageStepStartCount !== originalMaxToolInvocationStep) && // check that next step is possible:
|
1345
|
+
isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
|
1346
|
+
lastMessageStepStartCount < maxSteps
|
1245
1347
|
);
|
1246
1348
|
}
|
1349
|
+
function isAssistantMessageWithCompletedToolCalls(message) {
|
1350
|
+
if (message.role !== "assistant") {
|
1351
|
+
return false;
|
1352
|
+
}
|
1353
|
+
const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
|
1354
|
+
return part.type === "step-start" ? index : lastIndex;
|
1355
|
+
}, -1);
|
1356
|
+
const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
|
1357
|
+
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
|
1358
|
+
}
|
1247
1359
|
|
1248
|
-
// src/ui/
|
1249
|
-
var
|
1250
|
-
|
1251
|
-
|
1252
|
-
|
1253
|
-
|
1254
|
-
|
1255
|
-
|
1256
|
-
|
1257
|
-
|
1258
|
-
}) {
|
1259
|
-
|
1260
|
-
|
1261
|
-
|
1262
|
-
|
1263
|
-
|
1264
|
-
|
1265
|
-
},
|
1266
|
-
signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
|
1267
|
-
credentials
|
1268
|
-
}) : await fetch2(api, {
|
1269
|
-
method: "POST",
|
1270
|
-
body: JSON.stringify(body),
|
1271
|
-
headers: {
|
1272
|
-
"Content-Type": "application/json",
|
1273
|
-
...headers
|
1274
|
-
},
|
1275
|
-
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
1276
|
-
credentials
|
1277
|
-
});
|
1278
|
-
if (!response.ok) {
|
1279
|
-
throw new Error(
|
1280
|
-
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
1360
|
+
// src/ui/chat-store.ts
|
1361
|
+
var ChatStore = class {
|
1362
|
+
constructor({
|
1363
|
+
chats = {},
|
1364
|
+
generateId: generateId3,
|
1365
|
+
transport,
|
1366
|
+
maxSteps = 1,
|
1367
|
+
messageMetadataSchema,
|
1368
|
+
dataPartSchemas,
|
1369
|
+
createChat
|
1370
|
+
}) {
|
1371
|
+
this.createChat = createChat;
|
1372
|
+
this.chats = new Map(
|
1373
|
+
Object.entries(chats).map(([id, chat]) => [
|
1374
|
+
id,
|
1375
|
+
this.createChat({ messages: chat.messages })
|
1376
|
+
])
|
1281
1377
|
);
|
1378
|
+
this.maxSteps = maxSteps;
|
1379
|
+
this.transport = transport;
|
1380
|
+
this.subscribers = /* @__PURE__ */ new Set();
|
1381
|
+
this.generateId = generateId3 != null ? generateId3 : generateIdFunc;
|
1382
|
+
this.messageMetadataSchema = messageMetadataSchema;
|
1383
|
+
this.dataPartSchemas = dataPartSchemas;
|
1282
1384
|
}
|
1283
|
-
|
1284
|
-
|
1285
|
-
}
|
1286
|
-
return parseJsonEventStream({
|
1287
|
-
stream: response.body,
|
1288
|
-
schema: uiMessageStreamPartSchema
|
1289
|
-
}).pipeThrough(
|
1290
|
-
new TransformStream({
|
1291
|
-
async transform(part, controller) {
|
1292
|
-
if (!part.success) {
|
1293
|
-
throw part.error;
|
1294
|
-
}
|
1295
|
-
controller.enqueue(part.value);
|
1296
|
-
}
|
1297
|
-
})
|
1298
|
-
);
|
1299
|
-
}
|
1300
|
-
async function fetchTextStream({
|
1301
|
-
api,
|
1302
|
-
body,
|
1303
|
-
credentials,
|
1304
|
-
headers,
|
1305
|
-
abortController,
|
1306
|
-
fetch: fetch2 = getOriginalFetch(),
|
1307
|
-
requestType = "generate"
|
1308
|
-
}) {
|
1309
|
-
var _a17, _b, _c;
|
1310
|
-
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
|
1311
|
-
method: "GET",
|
1312
|
-
headers: {
|
1313
|
-
"Content-Type": "application/json",
|
1314
|
-
...headers
|
1315
|
-
},
|
1316
|
-
signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
|
1317
|
-
credentials
|
1318
|
-
}) : await fetch2(api, {
|
1319
|
-
method: "POST",
|
1320
|
-
body: JSON.stringify(body),
|
1321
|
-
headers: {
|
1322
|
-
"Content-Type": "application/json",
|
1323
|
-
...headers
|
1324
|
-
},
|
1325
|
-
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
1326
|
-
credentials
|
1327
|
-
});
|
1328
|
-
if (!response.ok) {
|
1329
|
-
throw new Error(
|
1330
|
-
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
1331
|
-
);
|
1332
|
-
}
|
1333
|
-
if (!response.body) {
|
1334
|
-
throw new Error("The response body is empty.");
|
1335
|
-
}
|
1336
|
-
return transformTextToUiMessageStream({
|
1337
|
-
stream: response.body.pipeThrough(new TextDecoderStream())
|
1338
|
-
});
|
1339
|
-
}
|
1340
|
-
async function consumeUIMessageStream({
|
1341
|
-
stream,
|
1342
|
-
onUpdate,
|
1343
|
-
onFinish,
|
1344
|
-
onToolCall,
|
1345
|
-
generateId: generateId3,
|
1346
|
-
lastMessage,
|
1347
|
-
messageMetadataSchema
|
1348
|
-
}) {
|
1349
|
-
const state = createStreamingUIMessageState({
|
1350
|
-
lastMessage: lastMessage ? structuredClone(lastMessage) : void 0,
|
1351
|
-
newMessageId: generateId3()
|
1352
|
-
});
|
1353
|
-
const runUpdateMessageJob = async (job) => {
|
1354
|
-
await job({
|
1355
|
-
state,
|
1356
|
-
write: () => {
|
1357
|
-
onUpdate({ message: state.message });
|
1358
|
-
}
|
1359
|
-
});
|
1360
|
-
};
|
1361
|
-
await consumeStream({
|
1362
|
-
stream: processUIMessageStream({
|
1363
|
-
stream,
|
1364
|
-
onToolCall,
|
1365
|
-
messageMetadataSchema,
|
1366
|
-
runUpdateMessageJob
|
1367
|
-
}),
|
1368
|
-
onError: (error) => {
|
1369
|
-
throw error;
|
1370
|
-
}
|
1371
|
-
});
|
1372
|
-
onFinish == null ? void 0 : onFinish({ message: state.message });
|
1373
|
-
}
|
1374
|
-
async function callChatApi({
|
1375
|
-
api,
|
1376
|
-
body,
|
1377
|
-
streamProtocol = "ui-message",
|
1378
|
-
credentials,
|
1379
|
-
headers,
|
1380
|
-
abortController,
|
1381
|
-
onUpdate,
|
1382
|
-
onFinish,
|
1383
|
-
onToolCall,
|
1384
|
-
generateId: generateId3,
|
1385
|
-
fetch: fetch2 = getOriginalFetch(),
|
1386
|
-
lastMessage,
|
1387
|
-
requestType = "generate",
|
1388
|
-
messageMetadataSchema
|
1389
|
-
}) {
|
1390
|
-
const stream = streamProtocol === "text" ? await fetchTextStream({
|
1391
|
-
api,
|
1392
|
-
body,
|
1393
|
-
credentials,
|
1394
|
-
headers,
|
1395
|
-
abortController,
|
1396
|
-
fetch: fetch2,
|
1397
|
-
requestType
|
1398
|
-
}) : await fetchUIMessageStream({
|
1399
|
-
api,
|
1400
|
-
body,
|
1401
|
-
credentials,
|
1402
|
-
headers,
|
1403
|
-
abortController,
|
1404
|
-
fetch: fetch2,
|
1405
|
-
requestType
|
1406
|
-
});
|
1407
|
-
await consumeUIMessageStream({
|
1408
|
-
stream,
|
1409
|
-
onUpdate,
|
1410
|
-
onFinish,
|
1411
|
-
onToolCall,
|
1412
|
-
generateId: generateId3,
|
1413
|
-
lastMessage,
|
1414
|
-
messageMetadataSchema
|
1415
|
-
});
|
1416
|
-
}
|
1417
|
-
|
1418
|
-
// src/ui/call-completion-api.ts
|
1419
|
-
import { parseJsonEventStream as parseJsonEventStream2 } from "@ai-sdk/provider-utils";
|
1420
|
-
|
1421
|
-
// src/ui/process-text-stream.ts
|
1422
|
-
async function processTextStream({
|
1423
|
-
stream,
|
1424
|
-
onTextPart
|
1425
|
-
}) {
|
1426
|
-
const reader = stream.pipeThrough(new TextDecoderStream()).getReader();
|
1427
|
-
while (true) {
|
1428
|
-
const { done, value } = await reader.read();
|
1429
|
-
if (done) {
|
1430
|
-
break;
|
1431
|
-
}
|
1432
|
-
await onTextPart(value);
|
1433
|
-
}
|
1434
|
-
}
|
1435
|
-
|
1436
|
-
// src/ui/call-completion-api.ts
|
1437
|
-
var getOriginalFetch2 = () => fetch;
|
1438
|
-
async function callCompletionApi({
|
1439
|
-
api,
|
1440
|
-
prompt,
|
1441
|
-
credentials,
|
1442
|
-
headers,
|
1443
|
-
body,
|
1444
|
-
streamProtocol = "data",
|
1445
|
-
setCompletion,
|
1446
|
-
setLoading,
|
1447
|
-
setError,
|
1448
|
-
setAbortController,
|
1449
|
-
onFinish,
|
1450
|
-
onError,
|
1451
|
-
fetch: fetch2 = getOriginalFetch2()
|
1452
|
-
}) {
|
1453
|
-
var _a17;
|
1454
|
-
try {
|
1455
|
-
setLoading(true);
|
1456
|
-
setError(void 0);
|
1457
|
-
const abortController = new AbortController();
|
1458
|
-
setAbortController(abortController);
|
1459
|
-
setCompletion("");
|
1460
|
-
const response = await fetch2(api, {
|
1461
|
-
method: "POST",
|
1462
|
-
body: JSON.stringify({
|
1463
|
-
prompt,
|
1464
|
-
...body
|
1465
|
-
}),
|
1466
|
-
credentials,
|
1467
|
-
headers: {
|
1468
|
-
"Content-Type": "application/json",
|
1469
|
-
...headers
|
1470
|
-
},
|
1471
|
-
signal: abortController.signal
|
1472
|
-
}).catch((err) => {
|
1473
|
-
throw err;
|
1474
|
-
});
|
1475
|
-
if (!response.ok) {
|
1476
|
-
throw new Error(
|
1477
|
-
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
1478
|
-
);
|
1479
|
-
}
|
1480
|
-
if (!response.body) {
|
1481
|
-
throw new Error("The response body is empty.");
|
1482
|
-
}
|
1483
|
-
let result = "";
|
1484
|
-
switch (streamProtocol) {
|
1485
|
-
case "text": {
|
1486
|
-
await processTextStream({
|
1487
|
-
stream: response.body,
|
1488
|
-
onTextPart: (chunk) => {
|
1489
|
-
result += chunk;
|
1490
|
-
setCompletion(result);
|
1491
|
-
}
|
1492
|
-
});
|
1493
|
-
break;
|
1494
|
-
}
|
1495
|
-
case "data": {
|
1496
|
-
await consumeStream({
|
1497
|
-
stream: parseJsonEventStream2({
|
1498
|
-
stream: response.body,
|
1499
|
-
schema: uiMessageStreamPartSchema
|
1500
|
-
}).pipeThrough(
|
1501
|
-
new TransformStream({
|
1502
|
-
async transform(part) {
|
1503
|
-
if (!part.success) {
|
1504
|
-
throw part.error;
|
1505
|
-
}
|
1506
|
-
const streamPart = part.value;
|
1507
|
-
if (streamPart.type === "text") {
|
1508
|
-
result += streamPart.text;
|
1509
|
-
setCompletion(result);
|
1510
|
-
} else if (streamPart.type === "error") {
|
1511
|
-
throw new Error(streamPart.errorText);
|
1512
|
-
}
|
1513
|
-
}
|
1514
|
-
})
|
1515
|
-
),
|
1516
|
-
onError: (error) => {
|
1517
|
-
throw error;
|
1518
|
-
}
|
1519
|
-
});
|
1520
|
-
break;
|
1521
|
-
}
|
1522
|
-
default: {
|
1523
|
-
const exhaustiveCheck = streamProtocol;
|
1524
|
-
throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
|
1525
|
-
}
|
1526
|
-
}
|
1527
|
-
if (onFinish) {
|
1528
|
-
onFinish(prompt, result);
|
1529
|
-
}
|
1530
|
-
setAbortController(null);
|
1531
|
-
return result;
|
1532
|
-
} catch (err) {
|
1533
|
-
if (err.name === "AbortError") {
|
1534
|
-
setAbortController(null);
|
1535
|
-
return null;
|
1536
|
-
}
|
1537
|
-
if (err instanceof Error) {
|
1538
|
-
if (onError) {
|
1539
|
-
onError(err);
|
1540
|
-
}
|
1541
|
-
}
|
1542
|
-
setError(err);
|
1543
|
-
} finally {
|
1544
|
-
setLoading(false);
|
1545
|
-
}
|
1546
|
-
}
|
1547
|
-
|
1548
|
-
// src/ui/chat-store.ts
|
1549
|
-
import {
|
1550
|
-
generateId as generateIdFunc
|
1551
|
-
} from "@ai-sdk/provider-utils";
|
1552
|
-
|
1553
|
-
// src/ui/should-resubmit-messages.ts
|
1554
|
-
function shouldResubmitMessages({
|
1555
|
-
originalMaxToolInvocationStep,
|
1556
|
-
originalMessageCount,
|
1557
|
-
maxSteps,
|
1558
|
-
messages
|
1559
|
-
}) {
|
1560
|
-
var _a17;
|
1561
|
-
const lastMessage = messages[messages.length - 1];
|
1562
|
-
return (
|
1563
|
-
// check if the feature is enabled:
|
1564
|
-
maxSteps > 1 && // ensure there is a last message:
|
1565
|
-
lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
|
1566
|
-
(messages.length > originalMessageCount || extractMaxToolInvocationStep(getToolInvocations(lastMessage)) !== originalMaxToolInvocationStep) && // check that next step is possible:
|
1567
|
-
isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
|
1568
|
-
((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) < maxSteps
|
1569
|
-
);
|
1570
|
-
}
|
1571
|
-
function isAssistantMessageWithCompletedToolCalls(message) {
|
1572
|
-
if (message.role !== "assistant") {
|
1573
|
-
return false;
|
1574
|
-
}
|
1575
|
-
const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
|
1576
|
-
return part.type === "step-start" ? index : lastIndex;
|
1577
|
-
}, -1);
|
1578
|
-
const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
|
1579
|
-
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
|
1580
|
-
}
|
1581
|
-
|
1582
|
-
// src/ui/update-tool-call-result.ts
|
1583
|
-
function updateToolCallResult({
|
1584
|
-
messages,
|
1585
|
-
toolCallId,
|
1586
|
-
toolResult: result
|
1587
|
-
}) {
|
1588
|
-
const lastMessage = messages[messages.length - 1];
|
1589
|
-
const invocationPart = lastMessage.parts.find(
|
1590
|
-
(part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
|
1591
|
-
);
|
1592
|
-
if (invocationPart == null) {
|
1593
|
-
return;
|
1594
|
-
}
|
1595
|
-
invocationPart.toolInvocation = {
|
1596
|
-
...invocationPart.toolInvocation,
|
1597
|
-
state: "result",
|
1598
|
-
result
|
1599
|
-
};
|
1600
|
-
}
|
1601
|
-
|
1602
|
-
// src/ui/chat-store.ts
|
1603
|
-
var ChatStore = class {
|
1604
|
-
constructor({
|
1605
|
-
chats = {},
|
1606
|
-
generateId: generateId3,
|
1607
|
-
transport,
|
1608
|
-
maxSteps = 1,
|
1609
|
-
messageMetadataSchema,
|
1610
|
-
dataPartSchemas,
|
1611
|
-
createChat
|
1612
|
-
}) {
|
1613
|
-
this.createChat = createChat;
|
1614
|
-
this.chats = new Map(
|
1615
|
-
Object.entries(chats).map(([id, chat]) => [
|
1616
|
-
id,
|
1617
|
-
this.createChat({ messages: chat.messages })
|
1618
|
-
])
|
1619
|
-
);
|
1620
|
-
this.maxSteps = maxSteps;
|
1621
|
-
this.transport = transport;
|
1622
|
-
this.subscribers = /* @__PURE__ */ new Set();
|
1623
|
-
this.generateId = generateId3 != null ? generateId3 : generateIdFunc;
|
1624
|
-
this.messageMetadataSchema = messageMetadataSchema;
|
1625
|
-
this.dataPartSchemas = dataPartSchemas;
|
1626
|
-
}
|
1627
|
-
hasChat(id) {
|
1628
|
-
return this.chats.has(id);
|
1385
|
+
hasChat(id) {
|
1386
|
+
return this.chats.has(id);
|
1629
1387
|
}
|
1630
1388
|
addChat(id, messages) {
|
1631
1389
|
this.chats.set(id, this.createChat({ messages }));
|
@@ -1818,14 +1576,15 @@ var ChatStore = class {
|
|
1818
1576
|
const chat = this.getChatState(chatId);
|
1819
1577
|
this.setStatus({ id: chatId, status: "submitted", error: void 0 });
|
1820
1578
|
const messageCount = chat.messages.length;
|
1821
|
-
const
|
1822
|
-
|
1823
|
-
|
1579
|
+
const lastMessage = chat.messages[chat.messages.length - 1];
|
1580
|
+
const maxStep = lastMessage.parts.filter(
|
1581
|
+
(part) => part.type === "step-start"
|
1582
|
+
).length;
|
1824
1583
|
try {
|
1825
|
-
const
|
1584
|
+
const lastMessage2 = chat.messages[chat.messages.length - 1];
|
1826
1585
|
const activeResponse = {
|
1827
1586
|
state: createStreamingUIMessageState({
|
1828
|
-
lastMessage: chat.snapshot ? chat.snapshot(
|
1587
|
+
lastMessage: chat.snapshot ? chat.snapshot(lastMessage2) : lastMessage2,
|
1829
1588
|
newMessageId: this.generateId()
|
1830
1589
|
}),
|
1831
1590
|
abortController: new AbortController()
|
@@ -1904,109 +1663,27 @@ var ChatStore = class {
|
|
1904
1663
|
headers,
|
1905
1664
|
body
|
1906
1665
|
});
|
1907
|
-
}
|
1908
|
-
}
|
1909
|
-
};
|
1910
|
-
|
1911
|
-
// src/ui/chat-transport.ts
|
1912
|
-
var DefaultChatTransport = class {
|
1913
|
-
constructor({
|
1914
|
-
api,
|
1915
|
-
credentials,
|
1916
|
-
headers,
|
1917
|
-
body,
|
1918
|
-
fetch: fetch2,
|
1919
|
-
prepareRequestBody
|
1920
|
-
}) {
|
1921
|
-
this.api = api;
|
1922
|
-
this.credentials = credentials;
|
1923
|
-
this.headers = headers;
|
1924
|
-
this.body = body;
|
1925
|
-
this.fetch = fetch2;
|
1926
|
-
this.prepareRequestBody = prepareRequestBody;
|
1927
|
-
}
|
1928
|
-
submitMessages({
|
1929
|
-
chatId,
|
1930
|
-
messages,
|
1931
|
-
abortController,
|
1932
|
-
body,
|
1933
|
-
headers,
|
1934
|
-
requestType
|
1935
|
-
}) {
|
1936
|
-
var _a17, _b;
|
1937
|
-
return fetchUIMessageStream({
|
1938
|
-
api: this.api,
|
1939
|
-
headers: {
|
1940
|
-
...this.headers,
|
1941
|
-
...headers
|
1942
|
-
},
|
1943
|
-
body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
|
1944
|
-
chatId,
|
1945
|
-
messages,
|
1946
|
-
...this.body,
|
1947
|
-
...body
|
1948
|
-
})) != null ? _b : {
|
1949
|
-
chatId,
|
1950
|
-
messages,
|
1951
|
-
...this.body,
|
1952
|
-
...body
|
1953
|
-
},
|
1954
|
-
credentials: this.credentials,
|
1955
|
-
abortController: () => abortController,
|
1956
|
-
fetch: this.fetch,
|
1957
|
-
requestType
|
1958
|
-
});
|
1959
|
-
}
|
1960
|
-
};
|
1961
|
-
var TextStreamChatTransport = class {
|
1962
|
-
constructor({
|
1963
|
-
api,
|
1964
|
-
credentials,
|
1965
|
-
headers,
|
1966
|
-
body,
|
1967
|
-
fetch: fetch2,
|
1968
|
-
prepareRequestBody
|
1969
|
-
}) {
|
1970
|
-
this.api = api;
|
1971
|
-
this.credentials = credentials;
|
1972
|
-
this.headers = headers;
|
1973
|
-
this.body = body;
|
1974
|
-
this.fetch = fetch2;
|
1975
|
-
this.prepareRequestBody = prepareRequestBody;
|
1976
|
-
}
|
1977
|
-
submitMessages({
|
1978
|
-
chatId,
|
1979
|
-
messages,
|
1980
|
-
abortController,
|
1981
|
-
body,
|
1982
|
-
headers,
|
1983
|
-
requestType
|
1984
|
-
}) {
|
1985
|
-
var _a17, _b;
|
1986
|
-
return fetchTextStream({
|
1987
|
-
api: this.api,
|
1988
|
-
headers: {
|
1989
|
-
...this.headers,
|
1990
|
-
...headers
|
1991
|
-
},
|
1992
|
-
body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
|
1993
|
-
chatId,
|
1994
|
-
messages,
|
1995
|
-
...this.body,
|
1996
|
-
...body
|
1997
|
-
})) != null ? _b : {
|
1998
|
-
chatId,
|
1999
|
-
messages,
|
2000
|
-
...this.body,
|
2001
|
-
...body
|
2002
|
-
},
|
2003
|
-
credentials: this.credentials,
|
2004
|
-
abortController: () => abortController,
|
2005
|
-
fetch: this.fetch,
|
2006
|
-
requestType
|
2007
|
-
});
|
1666
|
+
}
|
2008
1667
|
}
|
2009
1668
|
};
|
1669
|
+
function updateToolCallResult({
|
1670
|
+
messages,
|
1671
|
+
toolCallId,
|
1672
|
+
toolResult: result
|
1673
|
+
}) {
|
1674
|
+
const lastMessage = messages[messages.length - 1];
|
1675
|
+
const invocationPart = lastMessage.parts.find(
|
1676
|
+
(part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
|
1677
|
+
);
|
1678
|
+
if (invocationPart == null) {
|
1679
|
+
return;
|
1680
|
+
}
|
1681
|
+
invocationPart.toolInvocation = {
|
1682
|
+
...invocationPart.toolInvocation,
|
1683
|
+
state: "result",
|
1684
|
+
result
|
1685
|
+
};
|
1686
|
+
}
|
2010
1687
|
|
2011
1688
|
// src/ui/convert-file-list-to-file-ui-parts.ts
|
2012
1689
|
async function convertFileListToFileUIParts(files) {
|
@@ -2040,7 +1717,7 @@ async function convertFileListToFileUIParts(files) {
|
|
2040
1717
|
|
2041
1718
|
// src/ui/convert-to-model-messages.ts
|
2042
1719
|
function convertToModelMessages(messages, options) {
|
2043
|
-
var _a17
|
1720
|
+
var _a17;
|
2044
1721
|
const tools = (_a17 = options == null ? void 0 : options.tools) != null ? _a17 : {};
|
2045
1722
|
const modelMessages = [];
|
2046
1723
|
for (const message of messages) {
|
@@ -2071,6 +1748,9 @@ function convertToModelMessages(messages, options) {
|
|
2071
1748
|
case "assistant": {
|
2072
1749
|
if (message.parts != null) {
|
2073
1750
|
let processBlock2 = function() {
|
1751
|
+
if (block.length === 0) {
|
1752
|
+
return;
|
1753
|
+
}
|
2074
1754
|
const content = [];
|
2075
1755
|
for (const part of block) {
|
2076
1756
|
switch (part.type) {
|
@@ -2145,33 +1825,20 @@ function convertToModelMessages(messages, options) {
|
|
2145
1825
|
});
|
2146
1826
|
}
|
2147
1827
|
block = [];
|
2148
|
-
blockHasToolInvocations = false;
|
2149
|
-
currentStep++;
|
2150
1828
|
};
|
2151
1829
|
var processBlock = processBlock2;
|
2152
|
-
let currentStep = 0;
|
2153
|
-
let blockHasToolInvocations = false;
|
2154
1830
|
let block = [];
|
2155
1831
|
for (const part of message.parts) {
|
2156
1832
|
switch (part.type) {
|
2157
|
-
case "text":
|
2158
|
-
|
2159
|
-
processBlock2();
|
2160
|
-
}
|
2161
|
-
block.push(part);
|
2162
|
-
break;
|
2163
|
-
}
|
1833
|
+
case "text":
|
1834
|
+
case "reasoning":
|
2164
1835
|
case "file":
|
2165
|
-
case "
|
1836
|
+
case "tool-invocation": {
|
2166
1837
|
block.push(part);
|
2167
1838
|
break;
|
2168
1839
|
}
|
2169
|
-
case "
|
2170
|
-
|
2171
|
-
processBlock2();
|
2172
|
-
}
|
2173
|
-
block.push(part);
|
2174
|
-
blockHasToolInvocations = true;
|
1840
|
+
case "step-start": {
|
1841
|
+
processBlock2();
|
2175
1842
|
break;
|
2176
1843
|
}
|
2177
1844
|
}
|
@@ -2198,6 +1865,113 @@ var convertToCoreMessages = convertToModelMessages;
|
|
2198
1865
|
import {
|
2199
1866
|
generateId as generateIdFunc2
|
2200
1867
|
} from "@ai-sdk/provider-utils";
|
1868
|
+
|
1869
|
+
// src/ui/default-chat-transport.ts
|
1870
|
+
import {
|
1871
|
+
parseJsonEventStream as parseJsonEventStream2
|
1872
|
+
} from "@ai-sdk/provider-utils";
|
1873
|
+
var getOriginalFetch2 = () => fetch;
|
1874
|
+
async function fetchUIMessageStream({
|
1875
|
+
api,
|
1876
|
+
body,
|
1877
|
+
credentials,
|
1878
|
+
headers,
|
1879
|
+
abortController,
|
1880
|
+
fetch: fetch2 = getOriginalFetch2(),
|
1881
|
+
requestType = "generate"
|
1882
|
+
}) {
|
1883
|
+
var _a17, _b, _c;
|
1884
|
+
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
|
1885
|
+
method: "GET",
|
1886
|
+
headers: {
|
1887
|
+
"Content-Type": "application/json",
|
1888
|
+
...headers
|
1889
|
+
},
|
1890
|
+
signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
|
1891
|
+
credentials
|
1892
|
+
}) : await fetch2(api, {
|
1893
|
+
method: "POST",
|
1894
|
+
body: JSON.stringify(body),
|
1895
|
+
headers: {
|
1896
|
+
"Content-Type": "application/json",
|
1897
|
+
...headers
|
1898
|
+
},
|
1899
|
+
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
1900
|
+
credentials
|
1901
|
+
});
|
1902
|
+
if (!response.ok) {
|
1903
|
+
throw new Error(
|
1904
|
+
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
1905
|
+
);
|
1906
|
+
}
|
1907
|
+
if (!response.body) {
|
1908
|
+
throw new Error("The response body is empty.");
|
1909
|
+
}
|
1910
|
+
return parseJsonEventStream2({
|
1911
|
+
stream: response.body,
|
1912
|
+
schema: uiMessageStreamPartSchema
|
1913
|
+
}).pipeThrough(
|
1914
|
+
new TransformStream({
|
1915
|
+
async transform(part, controller) {
|
1916
|
+
if (!part.success) {
|
1917
|
+
throw part.error;
|
1918
|
+
}
|
1919
|
+
controller.enqueue(part.value);
|
1920
|
+
}
|
1921
|
+
})
|
1922
|
+
);
|
1923
|
+
}
|
1924
|
+
var DefaultChatTransport = class {
|
1925
|
+
constructor({
|
1926
|
+
api,
|
1927
|
+
credentials,
|
1928
|
+
headers,
|
1929
|
+
body,
|
1930
|
+
fetch: fetch2,
|
1931
|
+
prepareRequestBody
|
1932
|
+
}) {
|
1933
|
+
this.api = api;
|
1934
|
+
this.credentials = credentials;
|
1935
|
+
this.headers = headers;
|
1936
|
+
this.body = body;
|
1937
|
+
this.fetch = fetch2;
|
1938
|
+
this.prepareRequestBody = prepareRequestBody;
|
1939
|
+
}
|
1940
|
+
submitMessages({
|
1941
|
+
chatId,
|
1942
|
+
messages,
|
1943
|
+
abortController,
|
1944
|
+
body,
|
1945
|
+
headers,
|
1946
|
+
requestType
|
1947
|
+
}) {
|
1948
|
+
var _a17, _b;
|
1949
|
+
return fetchUIMessageStream({
|
1950
|
+
api: this.api,
|
1951
|
+
headers: {
|
1952
|
+
...this.headers,
|
1953
|
+
...headers
|
1954
|
+
},
|
1955
|
+
body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
|
1956
|
+
chatId,
|
1957
|
+
messages,
|
1958
|
+
...this.body,
|
1959
|
+
...body
|
1960
|
+
})) != null ? _b : {
|
1961
|
+
chatId,
|
1962
|
+
messages,
|
1963
|
+
...this.body,
|
1964
|
+
...body
|
1965
|
+
},
|
1966
|
+
credentials: this.credentials,
|
1967
|
+
abortController: () => abortController,
|
1968
|
+
fetch: this.fetch,
|
1969
|
+
requestType
|
1970
|
+
});
|
1971
|
+
}
|
1972
|
+
};
|
1973
|
+
|
1974
|
+
// src/ui/default-chat-store-options.ts
|
2201
1975
|
function defaultChatStoreOptions({
|
2202
1976
|
api = "/api/chat",
|
2203
1977
|
fetch: fetch2,
|
@@ -2228,6 +2002,119 @@ function defaultChatStoreOptions({
|
|
2228
2002
|
});
|
2229
2003
|
}
|
2230
2004
|
|
2005
|
+
// src/ui/transform-text-to-ui-message-stream.ts
|
2006
|
+
function transformTextToUiMessageStream({
|
2007
|
+
stream
|
2008
|
+
}) {
|
2009
|
+
return stream.pipeThrough(
|
2010
|
+
new TransformStream({
|
2011
|
+
start(controller) {
|
2012
|
+
controller.enqueue({ type: "start" });
|
2013
|
+
controller.enqueue({ type: "start-step" });
|
2014
|
+
},
|
2015
|
+
async transform(part, controller) {
|
2016
|
+
controller.enqueue({ type: "text", text: part });
|
2017
|
+
},
|
2018
|
+
async flush(controller) {
|
2019
|
+
controller.enqueue({ type: "finish-step" });
|
2020
|
+
controller.enqueue({ type: "finish" });
|
2021
|
+
}
|
2022
|
+
})
|
2023
|
+
);
|
2024
|
+
}
|
2025
|
+
|
2026
|
+
// src/ui/text-stream-chat-transport.ts
|
2027
|
+
var getOriginalFetch3 = () => fetch;
|
2028
|
+
async function fetchTextStream({
|
2029
|
+
api,
|
2030
|
+
body,
|
2031
|
+
credentials,
|
2032
|
+
headers,
|
2033
|
+
abortController,
|
2034
|
+
fetch: fetch2 = getOriginalFetch3(),
|
2035
|
+
requestType = "generate"
|
2036
|
+
}) {
|
2037
|
+
var _a17, _b, _c;
|
2038
|
+
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
|
2039
|
+
method: "GET",
|
2040
|
+
headers: {
|
2041
|
+
"Content-Type": "application/json",
|
2042
|
+
...headers
|
2043
|
+
},
|
2044
|
+
signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
|
2045
|
+
credentials
|
2046
|
+
}) : await fetch2(api, {
|
2047
|
+
method: "POST",
|
2048
|
+
body: JSON.stringify(body),
|
2049
|
+
headers: {
|
2050
|
+
"Content-Type": "application/json",
|
2051
|
+
...headers
|
2052
|
+
},
|
2053
|
+
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
2054
|
+
credentials
|
2055
|
+
});
|
2056
|
+
if (!response.ok) {
|
2057
|
+
throw new Error(
|
2058
|
+
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
2059
|
+
);
|
2060
|
+
}
|
2061
|
+
if (!response.body) {
|
2062
|
+
throw new Error("The response body is empty.");
|
2063
|
+
}
|
2064
|
+
return transformTextToUiMessageStream({
|
2065
|
+
stream: response.body.pipeThrough(new TextDecoderStream())
|
2066
|
+
});
|
2067
|
+
}
|
2068
|
+
var TextStreamChatTransport = class {
|
2069
|
+
constructor({
|
2070
|
+
api,
|
2071
|
+
credentials,
|
2072
|
+
headers,
|
2073
|
+
body,
|
2074
|
+
fetch: fetch2,
|
2075
|
+
prepareRequestBody
|
2076
|
+
}) {
|
2077
|
+
this.api = api;
|
2078
|
+
this.credentials = credentials;
|
2079
|
+
this.headers = headers;
|
2080
|
+
this.body = body;
|
2081
|
+
this.fetch = fetch2;
|
2082
|
+
this.prepareRequestBody = prepareRequestBody;
|
2083
|
+
}
|
2084
|
+
submitMessages({
|
2085
|
+
chatId,
|
2086
|
+
messages,
|
2087
|
+
abortController,
|
2088
|
+
body,
|
2089
|
+
headers,
|
2090
|
+
requestType
|
2091
|
+
}) {
|
2092
|
+
var _a17, _b;
|
2093
|
+
return fetchTextStream({
|
2094
|
+
api: this.api,
|
2095
|
+
headers: {
|
2096
|
+
...this.headers,
|
2097
|
+
...headers
|
2098
|
+
},
|
2099
|
+
body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
|
2100
|
+
chatId,
|
2101
|
+
messages,
|
2102
|
+
...this.body,
|
2103
|
+
...body
|
2104
|
+
})) != null ? _b : {
|
2105
|
+
chatId,
|
2106
|
+
messages,
|
2107
|
+
...this.body,
|
2108
|
+
...body
|
2109
|
+
},
|
2110
|
+
credentials: this.credentials,
|
2111
|
+
abortController: () => abortController,
|
2112
|
+
fetch: this.fetch,
|
2113
|
+
requestType
|
2114
|
+
});
|
2115
|
+
}
|
2116
|
+
};
|
2117
|
+
|
2231
2118
|
// src/ui-message-stream/handle-ui-message-stream-finish.ts
|
2232
2119
|
function handleUIMessageStreamFinish({
|
2233
2120
|
newMessageId,
|
@@ -4462,11 +4349,17 @@ function validateObjectGenerationInput({
|
|
4462
4349
|
}
|
4463
4350
|
}
|
4464
4351
|
|
4352
|
+
// core/prompt/resolve-language-model.ts
|
4353
|
+
import { gateway } from "@ai-sdk/gateway";
|
4354
|
+
function resolveLanguageModel(model) {
|
4355
|
+
return typeof model === "string" ? gateway.languageModel(model) : model;
|
4356
|
+
}
|
4357
|
+
|
4465
4358
|
// core/generate-object/generate-object.ts
|
4466
4359
|
var originalGenerateId = createIdGenerator({ prefix: "aiobj", size: 24 });
|
4467
4360
|
async function generateObject(options) {
|
4468
4361
|
const {
|
4469
|
-
model,
|
4362
|
+
model: modelArg,
|
4470
4363
|
output = "object",
|
4471
4364
|
system,
|
4472
4365
|
prompt,
|
@@ -4483,6 +4376,7 @@ async function generateObject(options) {
|
|
4483
4376
|
} = {},
|
4484
4377
|
...settings
|
4485
4378
|
} = options;
|
4379
|
+
const model = resolveLanguageModel(modelArg);
|
4486
4380
|
const enumValues = "enum" in options ? options.enum : void 0;
|
4487
4381
|
const {
|
4488
4382
|
schema: inputSchema,
|
@@ -4942,7 +4836,7 @@ function streamObject(options) {
|
|
4942
4836
|
}
|
4943
4837
|
var DefaultStreamObjectResult = class {
|
4944
4838
|
constructor({
|
4945
|
-
model,
|
4839
|
+
model: modelArg,
|
4946
4840
|
headers,
|
4947
4841
|
telemetry,
|
4948
4842
|
settings,
|
@@ -4967,6 +4861,7 @@ var DefaultStreamObjectResult = class {
|
|
4967
4861
|
this._warnings = new DelayedPromise();
|
4968
4862
|
this._request = new DelayedPromise();
|
4969
4863
|
this._response = new DelayedPromise();
|
4864
|
+
const model = resolveLanguageModel(modelArg);
|
4970
4865
|
const { maxRetries, retry } = prepareRetries({
|
4971
4866
|
maxRetries: maxRetriesArg
|
4972
4867
|
});
|
@@ -5784,7 +5679,7 @@ var originalGenerateId3 = createIdGenerator3({
|
|
5784
5679
|
size: 24
|
5785
5680
|
});
|
5786
5681
|
async function generateText({
|
5787
|
-
model,
|
5682
|
+
model: modelArg,
|
5788
5683
|
tools,
|
5789
5684
|
toolChoice,
|
5790
5685
|
system,
|
@@ -5809,6 +5704,7 @@ async function generateText({
|
|
5809
5704
|
onStepFinish,
|
5810
5705
|
...settings
|
5811
5706
|
}) {
|
5707
|
+
const model = resolveLanguageModel(modelArg);
|
5812
5708
|
const stopConditions = asArray(stopWhen);
|
5813
5709
|
const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
5814
5710
|
const callSettings = prepareCallSettings(settings);
|
@@ -5845,7 +5741,7 @@ async function generateText({
|
|
5845
5741
|
}),
|
5846
5742
|
tracer,
|
5847
5743
|
fn: async (span) => {
|
5848
|
-
var _a17, _b, _c, _d;
|
5744
|
+
var _a17, _b, _c, _d, _e;
|
5849
5745
|
const callSettings2 = prepareCallSettings(settings);
|
5850
5746
|
let currentModelResponse;
|
5851
5747
|
let currentToolCalls = [];
|
@@ -5864,16 +5760,18 @@ async function generateText({
|
|
5864
5760
|
}));
|
5865
5761
|
const promptMessages = await convertToLanguageModelPrompt({
|
5866
5762
|
prompt: {
|
5867
|
-
system: initialPrompt.system,
|
5763
|
+
system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
|
5868
5764
|
messages: stepInputMessages
|
5869
5765
|
},
|
5870
5766
|
supportedUrls: await model.supportedUrls
|
5871
5767
|
});
|
5872
|
-
const stepModel = (
|
5768
|
+
const stepModel = resolveLanguageModel(
|
5769
|
+
(_b = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _b : model
|
5770
|
+
);
|
5873
5771
|
const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
|
5874
5772
|
tools,
|
5875
|
-
toolChoice: (
|
5876
|
-
activeTools: (
|
5773
|
+
toolChoice: (_c = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _c : toolChoice,
|
5774
|
+
activeTools: (_d = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _d : activeTools
|
5877
5775
|
});
|
5878
5776
|
currentModelResponse = await retry(
|
5879
5777
|
() => {
|
@@ -5916,7 +5814,7 @@ async function generateText({
|
|
5916
5814
|
}),
|
5917
5815
|
tracer,
|
5918
5816
|
fn: async (span2) => {
|
5919
|
-
var _a19, _b2, _c2, _d2,
|
5817
|
+
var _a19, _b2, _c2, _d2, _e2, _f, _g, _h;
|
5920
5818
|
const result = await stepModel.doGenerate({
|
5921
5819
|
...callSettings2,
|
5922
5820
|
tools: stepTools,
|
@@ -5930,7 +5828,7 @@ async function generateText({
|
|
5930
5828
|
const responseData = {
|
5931
5829
|
id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
|
5932
5830
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
5933
|
-
modelId: (_f = (
|
5831
|
+
modelId: (_f = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f : stepModel.modelId,
|
5934
5832
|
headers: (_g = result.response) == null ? void 0 : _g.headers,
|
5935
5833
|
body: (_h = result.response) == null ? void 0 : _h.body
|
5936
5834
|
};
|
@@ -6006,7 +5904,7 @@ async function generateText({
|
|
6006
5904
|
usage: currentModelResponse.usage,
|
6007
5905
|
warnings: currentModelResponse.warnings,
|
6008
5906
|
providerMetadata: currentModelResponse.providerMetadata,
|
6009
|
-
request: (
|
5907
|
+
request: (_e = currentModelResponse.request) != null ? _e : {},
|
6010
5908
|
response: {
|
6011
5909
|
...currentModelResponse.response,
|
6012
5910
|
// deep clone msgs to avoid mutating past messages in multi-step:
|
@@ -6602,7 +6500,7 @@ function streamText({
|
|
6602
6500
|
...settings
|
6603
6501
|
}) {
|
6604
6502
|
return new DefaultStreamTextResult({
|
6605
|
-
model,
|
6503
|
+
model: resolveLanguageModel(model),
|
6606
6504
|
telemetry,
|
6607
6505
|
headers,
|
6608
6506
|
settings,
|
@@ -6917,7 +6815,7 @@ var DefaultStreamTextResult = class {
|
|
6917
6815
|
responseMessages,
|
6918
6816
|
usage
|
6919
6817
|
}) {
|
6920
|
-
var _a17, _b, _c;
|
6818
|
+
var _a17, _b, _c, _d;
|
6921
6819
|
stepFinish = new DelayedPromise();
|
6922
6820
|
const initialPrompt = await standardizePrompt({
|
6923
6821
|
system,
|
@@ -6935,16 +6833,18 @@ var DefaultStreamTextResult = class {
|
|
6935
6833
|
}));
|
6936
6834
|
const promptMessages = await convertToLanguageModelPrompt({
|
6937
6835
|
prompt: {
|
6938
|
-
system: initialPrompt.system,
|
6836
|
+
system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
|
6939
6837
|
messages: stepInputMessages
|
6940
6838
|
},
|
6941
6839
|
supportedUrls: await model.supportedUrls
|
6942
6840
|
});
|
6943
|
-
const stepModel = (
|
6841
|
+
const stepModel = resolveLanguageModel(
|
6842
|
+
(_b = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _b : model
|
6843
|
+
);
|
6944
6844
|
const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
|
6945
6845
|
tools,
|
6946
|
-
toolChoice: (
|
6947
|
-
activeTools: (
|
6846
|
+
toolChoice: (_c = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _c : toolChoice,
|
6847
|
+
activeTools: (_d = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _d : activeTools
|
6948
6848
|
});
|
6949
6849
|
const {
|
6950
6850
|
result: { stream: stream2, response, request },
|
@@ -7050,7 +6950,7 @@ var DefaultStreamTextResult = class {
|
|
7050
6950
|
streamWithToolResults.pipeThrough(
|
7051
6951
|
new TransformStream({
|
7052
6952
|
async transform(chunk, controller) {
|
7053
|
-
var _a18, _b2, _c2,
|
6953
|
+
var _a18, _b2, _c2, _d2;
|
7054
6954
|
if (chunk.type === "stream-start") {
|
7055
6955
|
warnings = chunk.warnings;
|
7056
6956
|
return;
|
@@ -7127,7 +7027,7 @@ var DefaultStreamTextResult = class {
|
|
7127
7027
|
doStreamSpan.addEvent("ai.stream.finish");
|
7128
7028
|
doStreamSpan.setAttributes({
|
7129
7029
|
"ai.response.msToFinish": msToFinish,
|
7130
|
-
"ai.response.avgOutputTokensPerSecond": 1e3 * ((
|
7030
|
+
"ai.response.avgOutputTokensPerSecond": 1e3 * ((_d2 = stepUsage.outputTokens) != null ? _d2 : 0) / msToFinish
|
7131
7031
|
});
|
7132
7032
|
break;
|
7133
7033
|
}
|
@@ -7384,7 +7284,7 @@ var DefaultStreamTextResult = class {
|
|
7384
7284
|
} = {}) {
|
7385
7285
|
const lastMessage = originalMessages[originalMessages.length - 1];
|
7386
7286
|
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
7387
|
-
const messageId = isContinuation ? lastMessage.id : newMessageId
|
7287
|
+
const messageId = isContinuation ? lastMessage.id : newMessageId;
|
7388
7288
|
const baseStream = this.fullStream.pipeThrough(
|
7389
7289
|
new TransformStream({
|
7390
7290
|
transform: async (part, controller) => {
|
@@ -7520,7 +7420,7 @@ var DefaultStreamTextResult = class {
|
|
7520
7420
|
);
|
7521
7421
|
return handleUIMessageStreamFinish({
|
7522
7422
|
stream: baseStream,
|
7523
|
-
newMessageId: messageId,
|
7423
|
+
newMessageId: messageId != null ? messageId : this.generateId(),
|
7524
7424
|
originalMessages,
|
7525
7425
|
onFinish
|
7526
7426
|
});
|
@@ -7824,7 +7724,9 @@ var doWrap = ({
|
|
7824
7724
|
};
|
7825
7725
|
|
7826
7726
|
// core/registry/custom-provider.ts
|
7827
|
-
import {
|
7727
|
+
import {
|
7728
|
+
NoSuchModelError as NoSuchModelError2
|
7729
|
+
} from "@ai-sdk/provider";
|
7828
7730
|
function customProvider({
|
7829
7731
|
languageModels,
|
7830
7732
|
textEmbeddingModels,
|
@@ -7889,7 +7791,9 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
|
|
7889
7791
|
_a16 = symbol16;
|
7890
7792
|
|
7891
7793
|
// core/registry/provider-registry.ts
|
7892
|
-
import {
|
7794
|
+
import {
|
7795
|
+
NoSuchModelError as NoSuchModelError4
|
7796
|
+
} from "@ai-sdk/provider";
|
7893
7797
|
function createProviderRegistry(providers, {
|
7894
7798
|
separator = ":"
|
7895
7799
|
} = {}) {
|
@@ -8634,10 +8538,8 @@ export {
|
|
8634
8538
|
ToolExecutionError,
|
8635
8539
|
TypeValidationError,
|
8636
8540
|
UnsupportedFunctionalityError,
|
8637
|
-
appendClientMessage,
|
8638
8541
|
asSchema5 as asSchema,
|
8639
8542
|
assistantModelMessageSchema,
|
8640
|
-
callChatApi,
|
8641
8543
|
callCompletionApi,
|
8642
8544
|
convertFileListToFileUIParts,
|
8643
8545
|
convertToCoreMessages,
|
@@ -8664,7 +8566,6 @@ export {
|
|
8664
8566
|
generateImage as experimental_generateImage,
|
8665
8567
|
generateSpeech as experimental_generateSpeech,
|
8666
8568
|
transcribe as experimental_transcribe,
|
8667
|
-
extractMaxToolInvocationStep,
|
8668
8569
|
extractReasoningMiddleware,
|
8669
8570
|
generateId2 as generateId,
|
8670
8571
|
generateObject,
|
@@ -8672,14 +8573,12 @@ export {
|
|
8672
8573
|
getTextFromDataUrl,
|
8673
8574
|
getToolInvocations,
|
8674
8575
|
hasToolCall,
|
8675
|
-
isAssistantMessageWithCompletedToolCalls,
|
8676
8576
|
isDeepEqualData,
|
8677
8577
|
jsonSchema2 as jsonSchema,
|
8678
8578
|
modelMessageSchema,
|
8679
8579
|
parsePartialJson,
|
8680
8580
|
pipeTextStreamToResponse,
|
8681
8581
|
pipeUIMessageStreamToResponse,
|
8682
|
-
shouldResubmitMessages,
|
8683
8582
|
simulateReadableStream,
|
8684
8583
|
simulateStreamingMiddleware,
|
8685
8584
|
smoothStream,
|
@@ -8689,7 +8588,6 @@ export {
|
|
8689
8588
|
systemModelMessageSchema,
|
8690
8589
|
tool,
|
8691
8590
|
toolModelMessageSchema,
|
8692
|
-
updateToolCallResult,
|
8693
8591
|
userModelMessageSchema,
|
8694
8592
|
wrapLanguageModel
|
8695
8593
|
};
|