ai 5.0.0-alpha.5 → 5.0.0-alpha.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +27 -0
- package/dist/index.d.mts +275 -435
- package/dist/index.d.ts +275 -435
- package/dist/index.js +481 -593
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +481 -583
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -3
package/dist/index.js
CHANGED
@@ -53,10 +53,8 @@ __export(src_exports, {
|
|
53
53
|
ToolExecutionError: () => ToolExecutionError,
|
54
54
|
TypeValidationError: () => import_provider16.TypeValidationError,
|
55
55
|
UnsupportedFunctionalityError: () => import_provider16.UnsupportedFunctionalityError,
|
56
|
-
appendClientMessage: () => appendClientMessage,
|
57
56
|
asSchema: () => import_provider_utils26.asSchema,
|
58
57
|
assistantModelMessageSchema: () => assistantModelMessageSchema,
|
59
|
-
callChatApi: () => callChatApi,
|
60
58
|
callCompletionApi: () => callCompletionApi,
|
61
59
|
convertFileListToFileUIParts: () => convertFileListToFileUIParts,
|
62
60
|
convertToCoreMessages: () => convertToCoreMessages,
|
@@ -83,7 +81,6 @@ __export(src_exports, {
|
|
83
81
|
experimental_generateImage: () => generateImage,
|
84
82
|
experimental_generateSpeech: () => generateSpeech,
|
85
83
|
experimental_transcribe: () => transcribe,
|
86
|
-
extractMaxToolInvocationStep: () => extractMaxToolInvocationStep,
|
87
84
|
extractReasoningMiddleware: () => extractReasoningMiddleware,
|
88
85
|
generateId: () => import_provider_utils26.generateId,
|
89
86
|
generateObject: () => generateObject,
|
@@ -91,14 +88,12 @@ __export(src_exports, {
|
|
91
88
|
getTextFromDataUrl: () => getTextFromDataUrl,
|
92
89
|
getToolInvocations: () => getToolInvocations,
|
93
90
|
hasToolCall: () => hasToolCall,
|
94
|
-
isAssistantMessageWithCompletedToolCalls: () => isAssistantMessageWithCompletedToolCalls,
|
95
91
|
isDeepEqualData: () => isDeepEqualData,
|
96
92
|
jsonSchema: () => import_provider_utils26.jsonSchema,
|
97
93
|
modelMessageSchema: () => modelMessageSchema,
|
98
94
|
parsePartialJson: () => parsePartialJson,
|
99
95
|
pipeTextStreamToResponse: () => pipeTextStreamToResponse,
|
100
96
|
pipeUIMessageStreamToResponse: () => pipeUIMessageStreamToResponse,
|
101
|
-
shouldResubmitMessages: () => shouldResubmitMessages,
|
102
97
|
simulateReadableStream: () => simulateReadableStream,
|
103
98
|
simulateStreamingMiddleware: () => simulateStreamingMiddleware,
|
104
99
|
smoothStream: () => smoothStream,
|
@@ -108,7 +103,6 @@ __export(src_exports, {
|
|
108
103
|
systemModelMessageSchema: () => systemModelMessageSchema,
|
109
104
|
tool: () => tool,
|
110
105
|
toolModelMessageSchema: () => toolModelMessageSchema,
|
111
|
-
updateToolCallResult: () => updateToolCallResult,
|
112
106
|
userModelMessageSchema: () => userModelMessageSchema,
|
113
107
|
wrapLanguageModel: () => wrapLanguageModel
|
114
108
|
});
|
@@ -540,19 +534,8 @@ function pipeTextStreamToResponse({
|
|
540
534
|
});
|
541
535
|
}
|
542
536
|
|
543
|
-
// src/ui/
|
544
|
-
|
545
|
-
messages,
|
546
|
-
message
|
547
|
-
}) {
|
548
|
-
return [
|
549
|
-
...messages.length > 0 && messages[messages.length - 1].id === message.id ? messages.slice(0, -1) : messages,
|
550
|
-
message
|
551
|
-
];
|
552
|
-
}
|
553
|
-
|
554
|
-
// src/ui/call-chat-api.ts
|
555
|
-
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
537
|
+
// src/ui/call-completion-api.ts
|
538
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
556
539
|
|
557
540
|
// src/ui-message-stream/ui-message-stream-parts.ts
|
558
541
|
var import_zod = require("zod");
|
@@ -658,8 +641,138 @@ async function consumeStream({
|
|
658
641
|
}
|
659
642
|
}
|
660
643
|
|
644
|
+
// src/ui/process-text-stream.ts
|
645
|
+
async function processTextStream({
|
646
|
+
stream,
|
647
|
+
onTextPart
|
648
|
+
}) {
|
649
|
+
const reader = stream.pipeThrough(new TextDecoderStream()).getReader();
|
650
|
+
while (true) {
|
651
|
+
const { done, value } = await reader.read();
|
652
|
+
if (done) {
|
653
|
+
break;
|
654
|
+
}
|
655
|
+
await onTextPart(value);
|
656
|
+
}
|
657
|
+
}
|
658
|
+
|
659
|
+
// src/ui/call-completion-api.ts
|
660
|
+
var getOriginalFetch = () => fetch;
|
661
|
+
async function callCompletionApi({
|
662
|
+
api,
|
663
|
+
prompt,
|
664
|
+
credentials,
|
665
|
+
headers,
|
666
|
+
body,
|
667
|
+
streamProtocol = "data",
|
668
|
+
setCompletion,
|
669
|
+
setLoading,
|
670
|
+
setError,
|
671
|
+
setAbortController,
|
672
|
+
onFinish,
|
673
|
+
onError,
|
674
|
+
fetch: fetch2 = getOriginalFetch()
|
675
|
+
}) {
|
676
|
+
var _a17;
|
677
|
+
try {
|
678
|
+
setLoading(true);
|
679
|
+
setError(void 0);
|
680
|
+
const abortController = new AbortController();
|
681
|
+
setAbortController(abortController);
|
682
|
+
setCompletion("");
|
683
|
+
const response = await fetch2(api, {
|
684
|
+
method: "POST",
|
685
|
+
body: JSON.stringify({
|
686
|
+
prompt,
|
687
|
+
...body
|
688
|
+
}),
|
689
|
+
credentials,
|
690
|
+
headers: {
|
691
|
+
"Content-Type": "application/json",
|
692
|
+
...headers
|
693
|
+
},
|
694
|
+
signal: abortController.signal
|
695
|
+
}).catch((err) => {
|
696
|
+
throw err;
|
697
|
+
});
|
698
|
+
if (!response.ok) {
|
699
|
+
throw new Error(
|
700
|
+
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
701
|
+
);
|
702
|
+
}
|
703
|
+
if (!response.body) {
|
704
|
+
throw new Error("The response body is empty.");
|
705
|
+
}
|
706
|
+
let result = "";
|
707
|
+
switch (streamProtocol) {
|
708
|
+
case "text": {
|
709
|
+
await processTextStream({
|
710
|
+
stream: response.body,
|
711
|
+
onTextPart: (chunk) => {
|
712
|
+
result += chunk;
|
713
|
+
setCompletion(result);
|
714
|
+
}
|
715
|
+
});
|
716
|
+
break;
|
717
|
+
}
|
718
|
+
case "data": {
|
719
|
+
await consumeStream({
|
720
|
+
stream: (0, import_provider_utils.parseJsonEventStream)({
|
721
|
+
stream: response.body,
|
722
|
+
schema: uiMessageStreamPartSchema
|
723
|
+
}).pipeThrough(
|
724
|
+
new TransformStream({
|
725
|
+
async transform(part) {
|
726
|
+
if (!part.success) {
|
727
|
+
throw part.error;
|
728
|
+
}
|
729
|
+
const streamPart = part.value;
|
730
|
+
if (streamPart.type === "text") {
|
731
|
+
result += streamPart.text;
|
732
|
+
setCompletion(result);
|
733
|
+
} else if (streamPart.type === "error") {
|
734
|
+
throw new Error(streamPart.errorText);
|
735
|
+
}
|
736
|
+
}
|
737
|
+
})
|
738
|
+
),
|
739
|
+
onError: (error) => {
|
740
|
+
throw error;
|
741
|
+
}
|
742
|
+
});
|
743
|
+
break;
|
744
|
+
}
|
745
|
+
default: {
|
746
|
+
const exhaustiveCheck = streamProtocol;
|
747
|
+
throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
|
748
|
+
}
|
749
|
+
}
|
750
|
+
if (onFinish) {
|
751
|
+
onFinish(prompt, result);
|
752
|
+
}
|
753
|
+
setAbortController(null);
|
754
|
+
return result;
|
755
|
+
} catch (err) {
|
756
|
+
if (err.name === "AbortError") {
|
757
|
+
setAbortController(null);
|
758
|
+
return null;
|
759
|
+
}
|
760
|
+
if (err instanceof Error) {
|
761
|
+
if (onError) {
|
762
|
+
onError(err);
|
763
|
+
}
|
764
|
+
}
|
765
|
+
setError(err);
|
766
|
+
} finally {
|
767
|
+
setLoading(false);
|
768
|
+
}
|
769
|
+
}
|
770
|
+
|
771
|
+
// src/ui/chat-store.ts
|
772
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
773
|
+
|
661
774
|
// src/ui/process-ui-message-stream.ts
|
662
|
-
var
|
775
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
663
776
|
|
664
777
|
// src/util/merge-objects.ts
|
665
778
|
function mergeObjects(base, overrides) {
|
@@ -695,7 +808,7 @@ function mergeObjects(base, overrides) {
|
|
695
808
|
}
|
696
809
|
|
697
810
|
// src/util/parse-partial-json.ts
|
698
|
-
var
|
811
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
699
812
|
|
700
813
|
// src/util/fix-json.ts
|
701
814
|
function fixJson(input) {
|
@@ -1020,25 +1133,17 @@ async function parsePartialJson(jsonText) {
|
|
1020
1133
|
if (jsonText === void 0) {
|
1021
1134
|
return { value: void 0, state: "undefined-input" };
|
1022
1135
|
}
|
1023
|
-
let result = await (0,
|
1136
|
+
let result = await (0, import_provider_utils2.safeParseJSON)({ text: jsonText });
|
1024
1137
|
if (result.success) {
|
1025
1138
|
return { value: result.value, state: "successful-parse" };
|
1026
1139
|
}
|
1027
|
-
result = await (0,
|
1140
|
+
result = await (0, import_provider_utils2.safeParseJSON)({ text: fixJson(jsonText) });
|
1028
1141
|
if (result.success) {
|
1029
1142
|
return { value: result.value, state: "repaired-parse" };
|
1030
1143
|
}
|
1031
1144
|
return { value: void 0, state: "failed-parse" };
|
1032
1145
|
}
|
1033
1146
|
|
1034
|
-
// src/ui/extract-max-tool-invocation-step.ts
|
1035
|
-
function extractMaxToolInvocationStep(toolInvocations) {
|
1036
|
-
return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
|
1037
|
-
var _a17;
|
1038
|
-
return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
|
1039
|
-
}, 0);
|
1040
|
-
}
|
1041
|
-
|
1042
1147
|
// src/ui/get-tool-invocations.ts
|
1043
1148
|
function getToolInvocations(message) {
|
1044
1149
|
return message.parts.filter(
|
@@ -1051,9 +1156,7 @@ function createStreamingUIMessageState({
|
|
1051
1156
|
lastMessage,
|
1052
1157
|
newMessageId = ""
|
1053
1158
|
} = {}) {
|
1054
|
-
var _a17;
|
1055
1159
|
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
1056
|
-
const step = isContinuation ? 1 + ((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) : 0;
|
1057
1160
|
const message = isContinuation ? lastMessage : {
|
1058
1161
|
id: newMessageId,
|
1059
1162
|
metadata: {},
|
@@ -1064,8 +1167,7 @@ function createStreamingUIMessageState({
|
|
1064
1167
|
message,
|
1065
1168
|
activeTextPart: void 0,
|
1066
1169
|
activeReasoningPart: void 0,
|
1067
|
-
partialToolCalls: {}
|
1068
|
-
step
|
1170
|
+
partialToolCalls: {}
|
1069
1171
|
};
|
1070
1172
|
}
|
1071
1173
|
function processUIMessageStream({
|
@@ -1096,7 +1198,7 @@ function processUIMessageStream({
|
|
1096
1198
|
if (metadata != null) {
|
1097
1199
|
const mergedMetadata = state.message.metadata != null ? mergeObjects(state.message.metadata, metadata) : metadata;
|
1098
1200
|
if (messageMetadataSchema != null) {
|
1099
|
-
await (0,
|
1201
|
+
await (0, import_provider_utils3.validateTypes)({
|
1100
1202
|
value: mergedMetadata,
|
1101
1203
|
schema: messageMetadataSchema
|
1102
1204
|
});
|
@@ -1163,13 +1265,11 @@ function processUIMessageStream({
|
|
1163
1265
|
const toolInvocations = getToolInvocations(state.message);
|
1164
1266
|
state.partialToolCalls[part.toolCallId] = {
|
1165
1267
|
text: "",
|
1166
|
-
step: state.step,
|
1167
1268
|
toolName: part.toolName,
|
1168
1269
|
index: toolInvocations.length
|
1169
1270
|
};
|
1170
1271
|
updateToolInvocationPart(part.toolCallId, {
|
1171
1272
|
state: "partial-call",
|
1172
|
-
step: state.step,
|
1173
1273
|
toolCallId: part.toolCallId,
|
1174
1274
|
toolName: part.toolName,
|
1175
1275
|
args: void 0
|
@@ -1185,7 +1285,6 @@ function processUIMessageStream({
|
|
1185
1285
|
);
|
1186
1286
|
updateToolInvocationPart(part.toolCallId, {
|
1187
1287
|
state: "partial-call",
|
1188
|
-
step: partialToolCall.step,
|
1189
1288
|
toolCallId: part.toolCallId,
|
1190
1289
|
toolName: partialToolCall.toolName,
|
1191
1290
|
args: partialArgs
|
@@ -1196,7 +1295,6 @@ function processUIMessageStream({
|
|
1196
1295
|
case "tool-call": {
|
1197
1296
|
updateToolInvocationPart(part.toolCallId, {
|
1198
1297
|
state: "call",
|
1199
|
-
step: state.step,
|
1200
1298
|
toolCallId: part.toolCallId,
|
1201
1299
|
toolName: part.toolName,
|
1202
1300
|
args: part.args
|
@@ -1209,7 +1307,6 @@ function processUIMessageStream({
|
|
1209
1307
|
if (result != null) {
|
1210
1308
|
updateToolInvocationPart(part.toolCallId, {
|
1211
1309
|
state: "result",
|
1212
|
-
step: state.step,
|
1213
1310
|
toolCallId: part.toolCallId,
|
1214
1311
|
toolName: part.toolName,
|
1215
1312
|
args: part.args,
|
@@ -1248,7 +1345,6 @@ function processUIMessageStream({
|
|
1248
1345
|
break;
|
1249
1346
|
}
|
1250
1347
|
case "finish-step": {
|
1251
|
-
state.step += 1;
|
1252
1348
|
state.activeTextPart = void 0;
|
1253
1349
|
state.activeReasoningPart = void 0;
|
1254
1350
|
await updateMessageMetadata(part.metadata);
|
@@ -1311,406 +1407,64 @@ function isObject(value) {
|
|
1311
1407
|
return typeof value === "object" && value !== null;
|
1312
1408
|
}
|
1313
1409
|
|
1314
|
-
// src/ui/
|
1315
|
-
function
|
1316
|
-
|
1410
|
+
// src/ui/should-resubmit-messages.ts
|
1411
|
+
function shouldResubmitMessages({
|
1412
|
+
originalMaxToolInvocationStep,
|
1413
|
+
originalMessageCount,
|
1414
|
+
maxSteps,
|
1415
|
+
messages
|
1317
1416
|
}) {
|
1318
|
-
|
1319
|
-
|
1320
|
-
|
1321
|
-
|
1322
|
-
|
1323
|
-
|
1324
|
-
|
1325
|
-
|
1326
|
-
|
1327
|
-
|
1328
|
-
|
1329
|
-
controller.enqueue({ type: "finish" });
|
1330
|
-
}
|
1331
|
-
})
|
1417
|
+
const lastMessage = messages[messages.length - 1];
|
1418
|
+
const lastMessageStepStartCount = lastMessage.parts.filter(
|
1419
|
+
(part) => part.type === "step-start"
|
1420
|
+
).length;
|
1421
|
+
return (
|
1422
|
+
// check if the feature is enabled:
|
1423
|
+
maxSteps > 1 && // ensure there is a last message:
|
1424
|
+
lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
|
1425
|
+
(messages.length > originalMessageCount || lastMessageStepStartCount !== originalMaxToolInvocationStep) && // check that next step is possible:
|
1426
|
+
isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
|
1427
|
+
lastMessageStepStartCount < maxSteps
|
1332
1428
|
);
|
1333
1429
|
}
|
1430
|
+
function isAssistantMessageWithCompletedToolCalls(message) {
|
1431
|
+
if (message.role !== "assistant") {
|
1432
|
+
return false;
|
1433
|
+
}
|
1434
|
+
const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
|
1435
|
+
return part.type === "step-start" ? index : lastIndex;
|
1436
|
+
}, -1);
|
1437
|
+
const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
|
1438
|
+
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
|
1439
|
+
}
|
1334
1440
|
|
1335
|
-
// src/ui/
|
1336
|
-
var
|
1337
|
-
|
1338
|
-
|
1339
|
-
|
1340
|
-
|
1341
|
-
|
1342
|
-
|
1343
|
-
|
1344
|
-
|
1345
|
-
}) {
|
1346
|
-
|
1347
|
-
|
1348
|
-
|
1349
|
-
|
1350
|
-
|
1351
|
-
|
1352
|
-
},
|
1353
|
-
signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
|
1354
|
-
credentials
|
1355
|
-
}) : await fetch2(api, {
|
1356
|
-
method: "POST",
|
1357
|
-
body: JSON.stringify(body),
|
1358
|
-
headers: {
|
1359
|
-
"Content-Type": "application/json",
|
1360
|
-
...headers
|
1361
|
-
},
|
1362
|
-
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
1363
|
-
credentials
|
1364
|
-
});
|
1365
|
-
if (!response.ok) {
|
1366
|
-
throw new Error(
|
1367
|
-
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
1441
|
+
// src/ui/chat-store.ts
|
1442
|
+
var ChatStore = class {
|
1443
|
+
constructor({
|
1444
|
+
chats = {},
|
1445
|
+
generateId: generateId3,
|
1446
|
+
transport,
|
1447
|
+
maxSteps = 1,
|
1448
|
+
messageMetadataSchema,
|
1449
|
+
dataPartSchemas,
|
1450
|
+
createChat
|
1451
|
+
}) {
|
1452
|
+
this.createChat = createChat;
|
1453
|
+
this.chats = new Map(
|
1454
|
+
Object.entries(chats).map(([id, chat]) => [
|
1455
|
+
id,
|
1456
|
+
this.createChat({ messages: chat.messages })
|
1457
|
+
])
|
1368
1458
|
);
|
1459
|
+
this.maxSteps = maxSteps;
|
1460
|
+
this.transport = transport;
|
1461
|
+
this.subscribers = /* @__PURE__ */ new Set();
|
1462
|
+
this.generateId = generateId3 != null ? generateId3 : import_provider_utils4.generateId;
|
1463
|
+
this.messageMetadataSchema = messageMetadataSchema;
|
1464
|
+
this.dataPartSchemas = dataPartSchemas;
|
1369
1465
|
}
|
1370
|
-
|
1371
|
-
|
1372
|
-
}
|
1373
|
-
return (0, import_provider_utils3.parseJsonEventStream)({
|
1374
|
-
stream: response.body,
|
1375
|
-
schema: uiMessageStreamPartSchema
|
1376
|
-
}).pipeThrough(
|
1377
|
-
new TransformStream({
|
1378
|
-
async transform(part, controller) {
|
1379
|
-
if (!part.success) {
|
1380
|
-
throw part.error;
|
1381
|
-
}
|
1382
|
-
controller.enqueue(part.value);
|
1383
|
-
}
|
1384
|
-
})
|
1385
|
-
);
|
1386
|
-
}
|
1387
|
-
async function fetchTextStream({
|
1388
|
-
api,
|
1389
|
-
body,
|
1390
|
-
credentials,
|
1391
|
-
headers,
|
1392
|
-
abortController,
|
1393
|
-
fetch: fetch2 = getOriginalFetch(),
|
1394
|
-
requestType = "generate"
|
1395
|
-
}) {
|
1396
|
-
var _a17, _b, _c;
|
1397
|
-
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
|
1398
|
-
method: "GET",
|
1399
|
-
headers: {
|
1400
|
-
"Content-Type": "application/json",
|
1401
|
-
...headers
|
1402
|
-
},
|
1403
|
-
signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
|
1404
|
-
credentials
|
1405
|
-
}) : await fetch2(api, {
|
1406
|
-
method: "POST",
|
1407
|
-
body: JSON.stringify(body),
|
1408
|
-
headers: {
|
1409
|
-
"Content-Type": "application/json",
|
1410
|
-
...headers
|
1411
|
-
},
|
1412
|
-
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
1413
|
-
credentials
|
1414
|
-
});
|
1415
|
-
if (!response.ok) {
|
1416
|
-
throw new Error(
|
1417
|
-
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
1418
|
-
);
|
1419
|
-
}
|
1420
|
-
if (!response.body) {
|
1421
|
-
throw new Error("The response body is empty.");
|
1422
|
-
}
|
1423
|
-
return transformTextToUiMessageStream({
|
1424
|
-
stream: response.body.pipeThrough(new TextDecoderStream())
|
1425
|
-
});
|
1426
|
-
}
|
1427
|
-
async function consumeUIMessageStream({
|
1428
|
-
stream,
|
1429
|
-
onUpdate,
|
1430
|
-
onFinish,
|
1431
|
-
onToolCall,
|
1432
|
-
generateId: generateId3,
|
1433
|
-
lastMessage,
|
1434
|
-
messageMetadataSchema
|
1435
|
-
}) {
|
1436
|
-
const state = createStreamingUIMessageState({
|
1437
|
-
lastMessage: lastMessage ? structuredClone(lastMessage) : void 0,
|
1438
|
-
newMessageId: generateId3()
|
1439
|
-
});
|
1440
|
-
const runUpdateMessageJob = async (job) => {
|
1441
|
-
await job({
|
1442
|
-
state,
|
1443
|
-
write: () => {
|
1444
|
-
onUpdate({ message: state.message });
|
1445
|
-
}
|
1446
|
-
});
|
1447
|
-
};
|
1448
|
-
await consumeStream({
|
1449
|
-
stream: processUIMessageStream({
|
1450
|
-
stream,
|
1451
|
-
onToolCall,
|
1452
|
-
messageMetadataSchema,
|
1453
|
-
runUpdateMessageJob
|
1454
|
-
}),
|
1455
|
-
onError: (error) => {
|
1456
|
-
throw error;
|
1457
|
-
}
|
1458
|
-
});
|
1459
|
-
onFinish == null ? void 0 : onFinish({ message: state.message });
|
1460
|
-
}
|
1461
|
-
async function callChatApi({
|
1462
|
-
api,
|
1463
|
-
body,
|
1464
|
-
streamProtocol = "ui-message",
|
1465
|
-
credentials,
|
1466
|
-
headers,
|
1467
|
-
abortController,
|
1468
|
-
onUpdate,
|
1469
|
-
onFinish,
|
1470
|
-
onToolCall,
|
1471
|
-
generateId: generateId3,
|
1472
|
-
fetch: fetch2 = getOriginalFetch(),
|
1473
|
-
lastMessage,
|
1474
|
-
requestType = "generate",
|
1475
|
-
messageMetadataSchema
|
1476
|
-
}) {
|
1477
|
-
const stream = streamProtocol === "text" ? await fetchTextStream({
|
1478
|
-
api,
|
1479
|
-
body,
|
1480
|
-
credentials,
|
1481
|
-
headers,
|
1482
|
-
abortController,
|
1483
|
-
fetch: fetch2,
|
1484
|
-
requestType
|
1485
|
-
}) : await fetchUIMessageStream({
|
1486
|
-
api,
|
1487
|
-
body,
|
1488
|
-
credentials,
|
1489
|
-
headers,
|
1490
|
-
abortController,
|
1491
|
-
fetch: fetch2,
|
1492
|
-
requestType
|
1493
|
-
});
|
1494
|
-
await consumeUIMessageStream({
|
1495
|
-
stream,
|
1496
|
-
onUpdate,
|
1497
|
-
onFinish,
|
1498
|
-
onToolCall,
|
1499
|
-
generateId: generateId3,
|
1500
|
-
lastMessage,
|
1501
|
-
messageMetadataSchema
|
1502
|
-
});
|
1503
|
-
}
|
1504
|
-
|
1505
|
-
// src/ui/call-completion-api.ts
|
1506
|
-
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
1507
|
-
|
1508
|
-
// src/ui/process-text-stream.ts
|
1509
|
-
async function processTextStream({
|
1510
|
-
stream,
|
1511
|
-
onTextPart
|
1512
|
-
}) {
|
1513
|
-
const reader = stream.pipeThrough(new TextDecoderStream()).getReader();
|
1514
|
-
while (true) {
|
1515
|
-
const { done, value } = await reader.read();
|
1516
|
-
if (done) {
|
1517
|
-
break;
|
1518
|
-
}
|
1519
|
-
await onTextPart(value);
|
1520
|
-
}
|
1521
|
-
}
|
1522
|
-
|
1523
|
-
// src/ui/call-completion-api.ts
|
1524
|
-
var getOriginalFetch2 = () => fetch;
|
1525
|
-
async function callCompletionApi({
|
1526
|
-
api,
|
1527
|
-
prompt,
|
1528
|
-
credentials,
|
1529
|
-
headers,
|
1530
|
-
body,
|
1531
|
-
streamProtocol = "data",
|
1532
|
-
setCompletion,
|
1533
|
-
setLoading,
|
1534
|
-
setError,
|
1535
|
-
setAbortController,
|
1536
|
-
onFinish,
|
1537
|
-
onError,
|
1538
|
-
fetch: fetch2 = getOriginalFetch2()
|
1539
|
-
}) {
|
1540
|
-
var _a17;
|
1541
|
-
try {
|
1542
|
-
setLoading(true);
|
1543
|
-
setError(void 0);
|
1544
|
-
const abortController = new AbortController();
|
1545
|
-
setAbortController(abortController);
|
1546
|
-
setCompletion("");
|
1547
|
-
const response = await fetch2(api, {
|
1548
|
-
method: "POST",
|
1549
|
-
body: JSON.stringify({
|
1550
|
-
prompt,
|
1551
|
-
...body
|
1552
|
-
}),
|
1553
|
-
credentials,
|
1554
|
-
headers: {
|
1555
|
-
"Content-Type": "application/json",
|
1556
|
-
...headers
|
1557
|
-
},
|
1558
|
-
signal: abortController.signal
|
1559
|
-
}).catch((err) => {
|
1560
|
-
throw err;
|
1561
|
-
});
|
1562
|
-
if (!response.ok) {
|
1563
|
-
throw new Error(
|
1564
|
-
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
1565
|
-
);
|
1566
|
-
}
|
1567
|
-
if (!response.body) {
|
1568
|
-
throw new Error("The response body is empty.");
|
1569
|
-
}
|
1570
|
-
let result = "";
|
1571
|
-
switch (streamProtocol) {
|
1572
|
-
case "text": {
|
1573
|
-
await processTextStream({
|
1574
|
-
stream: response.body,
|
1575
|
-
onTextPart: (chunk) => {
|
1576
|
-
result += chunk;
|
1577
|
-
setCompletion(result);
|
1578
|
-
}
|
1579
|
-
});
|
1580
|
-
break;
|
1581
|
-
}
|
1582
|
-
case "data": {
|
1583
|
-
await consumeStream({
|
1584
|
-
stream: (0, import_provider_utils4.parseJsonEventStream)({
|
1585
|
-
stream: response.body,
|
1586
|
-
schema: uiMessageStreamPartSchema
|
1587
|
-
}).pipeThrough(
|
1588
|
-
new TransformStream({
|
1589
|
-
async transform(part) {
|
1590
|
-
if (!part.success) {
|
1591
|
-
throw part.error;
|
1592
|
-
}
|
1593
|
-
const streamPart = part.value;
|
1594
|
-
if (streamPart.type === "text") {
|
1595
|
-
result += streamPart.text;
|
1596
|
-
setCompletion(result);
|
1597
|
-
} else if (streamPart.type === "error") {
|
1598
|
-
throw new Error(streamPart.errorText);
|
1599
|
-
}
|
1600
|
-
}
|
1601
|
-
})
|
1602
|
-
),
|
1603
|
-
onError: (error) => {
|
1604
|
-
throw error;
|
1605
|
-
}
|
1606
|
-
});
|
1607
|
-
break;
|
1608
|
-
}
|
1609
|
-
default: {
|
1610
|
-
const exhaustiveCheck = streamProtocol;
|
1611
|
-
throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
|
1612
|
-
}
|
1613
|
-
}
|
1614
|
-
if (onFinish) {
|
1615
|
-
onFinish(prompt, result);
|
1616
|
-
}
|
1617
|
-
setAbortController(null);
|
1618
|
-
return result;
|
1619
|
-
} catch (err) {
|
1620
|
-
if (err.name === "AbortError") {
|
1621
|
-
setAbortController(null);
|
1622
|
-
return null;
|
1623
|
-
}
|
1624
|
-
if (err instanceof Error) {
|
1625
|
-
if (onError) {
|
1626
|
-
onError(err);
|
1627
|
-
}
|
1628
|
-
}
|
1629
|
-
setError(err);
|
1630
|
-
} finally {
|
1631
|
-
setLoading(false);
|
1632
|
-
}
|
1633
|
-
}
|
1634
|
-
|
1635
|
-
// src/ui/chat-store.ts
|
1636
|
-
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
1637
|
-
|
1638
|
-
// src/ui/should-resubmit-messages.ts
|
1639
|
-
function shouldResubmitMessages({
|
1640
|
-
originalMaxToolInvocationStep,
|
1641
|
-
originalMessageCount,
|
1642
|
-
maxSteps,
|
1643
|
-
messages
|
1644
|
-
}) {
|
1645
|
-
var _a17;
|
1646
|
-
const lastMessage = messages[messages.length - 1];
|
1647
|
-
return (
|
1648
|
-
// check if the feature is enabled:
|
1649
|
-
maxSteps > 1 && // ensure there is a last message:
|
1650
|
-
lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
|
1651
|
-
(messages.length > originalMessageCount || extractMaxToolInvocationStep(getToolInvocations(lastMessage)) !== originalMaxToolInvocationStep) && // check that next step is possible:
|
1652
|
-
isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
|
1653
|
-
((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) < maxSteps
|
1654
|
-
);
|
1655
|
-
}
|
1656
|
-
function isAssistantMessageWithCompletedToolCalls(message) {
|
1657
|
-
if (message.role !== "assistant") {
|
1658
|
-
return false;
|
1659
|
-
}
|
1660
|
-
const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
|
1661
|
-
return part.type === "step-start" ? index : lastIndex;
|
1662
|
-
}, -1);
|
1663
|
-
const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
|
1664
|
-
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
|
1665
|
-
}
|
1666
|
-
|
1667
|
-
// src/ui/update-tool-call-result.ts
|
1668
|
-
function updateToolCallResult({
|
1669
|
-
messages,
|
1670
|
-
toolCallId,
|
1671
|
-
toolResult: result
|
1672
|
-
}) {
|
1673
|
-
const lastMessage = messages[messages.length - 1];
|
1674
|
-
const invocationPart = lastMessage.parts.find(
|
1675
|
-
(part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
|
1676
|
-
);
|
1677
|
-
if (invocationPart == null) {
|
1678
|
-
return;
|
1679
|
-
}
|
1680
|
-
invocationPart.toolInvocation = {
|
1681
|
-
...invocationPart.toolInvocation,
|
1682
|
-
state: "result",
|
1683
|
-
result
|
1684
|
-
};
|
1685
|
-
}
|
1686
|
-
|
1687
|
-
// src/ui/chat-store.ts
|
1688
|
-
var ChatStore = class {
|
1689
|
-
constructor({
|
1690
|
-
chats = {},
|
1691
|
-
generateId: generateId3,
|
1692
|
-
transport,
|
1693
|
-
maxSteps = 1,
|
1694
|
-
messageMetadataSchema,
|
1695
|
-
dataPartSchemas,
|
1696
|
-
createChat
|
1697
|
-
}) {
|
1698
|
-
this.createChat = createChat;
|
1699
|
-
this.chats = new Map(
|
1700
|
-
Object.entries(chats).map(([id, chat]) => [
|
1701
|
-
id,
|
1702
|
-
this.createChat({ messages: chat.messages })
|
1703
|
-
])
|
1704
|
-
);
|
1705
|
-
this.maxSteps = maxSteps;
|
1706
|
-
this.transport = transport;
|
1707
|
-
this.subscribers = /* @__PURE__ */ new Set();
|
1708
|
-
this.generateId = generateId3 != null ? generateId3 : import_provider_utils5.generateId;
|
1709
|
-
this.messageMetadataSchema = messageMetadataSchema;
|
1710
|
-
this.dataPartSchemas = dataPartSchemas;
|
1711
|
-
}
|
1712
|
-
hasChat(id) {
|
1713
|
-
return this.chats.has(id);
|
1466
|
+
hasChat(id) {
|
1467
|
+
return this.chats.has(id);
|
1714
1468
|
}
|
1715
1469
|
addChat(id, messages) {
|
1716
1470
|
this.chats.set(id, this.createChat({ messages }));
|
@@ -1903,14 +1657,15 @@ var ChatStore = class {
|
|
1903
1657
|
const chat = this.getChatState(chatId);
|
1904
1658
|
this.setStatus({ id: chatId, status: "submitted", error: void 0 });
|
1905
1659
|
const messageCount = chat.messages.length;
|
1906
|
-
const
|
1907
|
-
|
1908
|
-
|
1660
|
+
const lastMessage = chat.messages[chat.messages.length - 1];
|
1661
|
+
const maxStep = lastMessage.parts.filter(
|
1662
|
+
(part) => part.type === "step-start"
|
1663
|
+
).length;
|
1909
1664
|
try {
|
1910
|
-
const
|
1665
|
+
const lastMessage2 = chat.messages[chat.messages.length - 1];
|
1911
1666
|
const activeResponse = {
|
1912
1667
|
state: createStreamingUIMessageState({
|
1913
|
-
lastMessage: chat.snapshot ? chat.snapshot(
|
1668
|
+
lastMessage: chat.snapshot ? chat.snapshot(lastMessage2) : lastMessage2,
|
1914
1669
|
newMessageId: this.generateId()
|
1915
1670
|
}),
|
1916
1671
|
abortController: new AbortController()
|
@@ -1984,114 +1739,32 @@ var ChatStore = class {
|
|
1984
1739
|
chatId,
|
1985
1740
|
requestType,
|
1986
1741
|
onError,
|
1987
|
-
onToolCall,
|
1988
|
-
onFinish,
|
1989
|
-
headers,
|
1990
|
-
body
|
1991
|
-
});
|
1992
|
-
}
|
1993
|
-
}
|
1994
|
-
};
|
1995
|
-
|
1996
|
-
// src/ui/chat-transport.ts
|
1997
|
-
var DefaultChatTransport = class {
|
1998
|
-
constructor({
|
1999
|
-
api,
|
2000
|
-
credentials,
|
2001
|
-
headers,
|
2002
|
-
body,
|
2003
|
-
fetch: fetch2,
|
2004
|
-
prepareRequestBody
|
2005
|
-
}) {
|
2006
|
-
this.api = api;
|
2007
|
-
this.credentials = credentials;
|
2008
|
-
this.headers = headers;
|
2009
|
-
this.body = body;
|
2010
|
-
this.fetch = fetch2;
|
2011
|
-
this.prepareRequestBody = prepareRequestBody;
|
2012
|
-
}
|
2013
|
-
submitMessages({
|
2014
|
-
chatId,
|
2015
|
-
messages,
|
2016
|
-
abortController,
|
2017
|
-
body,
|
2018
|
-
headers,
|
2019
|
-
requestType
|
2020
|
-
}) {
|
2021
|
-
var _a17, _b;
|
2022
|
-
return fetchUIMessageStream({
|
2023
|
-
api: this.api,
|
2024
|
-
headers: {
|
2025
|
-
...this.headers,
|
2026
|
-
...headers
|
2027
|
-
},
|
2028
|
-
body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
|
2029
|
-
chatId,
|
2030
|
-
messages,
|
2031
|
-
...this.body,
|
2032
|
-
...body
|
2033
|
-
})) != null ? _b : {
|
2034
|
-
chatId,
|
2035
|
-
messages,
|
2036
|
-
...this.body,
|
2037
|
-
...body
|
2038
|
-
},
|
2039
|
-
credentials: this.credentials,
|
2040
|
-
abortController: () => abortController,
|
2041
|
-
fetch: this.fetch,
|
2042
|
-
requestType
|
2043
|
-
});
|
2044
|
-
}
|
2045
|
-
};
|
2046
|
-
var TextStreamChatTransport = class {
|
2047
|
-
constructor({
|
2048
|
-
api,
|
2049
|
-
credentials,
|
2050
|
-
headers,
|
2051
|
-
body,
|
2052
|
-
fetch: fetch2,
|
2053
|
-
prepareRequestBody
|
2054
|
-
}) {
|
2055
|
-
this.api = api;
|
2056
|
-
this.credentials = credentials;
|
2057
|
-
this.headers = headers;
|
2058
|
-
this.body = body;
|
2059
|
-
this.fetch = fetch2;
|
2060
|
-
this.prepareRequestBody = prepareRequestBody;
|
2061
|
-
}
|
2062
|
-
submitMessages({
|
2063
|
-
chatId,
|
2064
|
-
messages,
|
2065
|
-
abortController,
|
2066
|
-
body,
|
2067
|
-
headers,
|
2068
|
-
requestType
|
2069
|
-
}) {
|
2070
|
-
var _a17, _b;
|
2071
|
-
return fetchTextStream({
|
2072
|
-
api: this.api,
|
2073
|
-
headers: {
|
2074
|
-
...this.headers,
|
2075
|
-
...headers
|
2076
|
-
},
|
2077
|
-
body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
|
2078
|
-
chatId,
|
2079
|
-
messages,
|
2080
|
-
...this.body,
|
2081
|
-
...body
|
2082
|
-
})) != null ? _b : {
|
2083
|
-
chatId,
|
2084
|
-
messages,
|
2085
|
-
...this.body,
|
2086
|
-
...body
|
2087
|
-
},
|
2088
|
-
credentials: this.credentials,
|
2089
|
-
abortController: () => abortController,
|
2090
|
-
fetch: this.fetch,
|
2091
|
-
requestType
|
2092
|
-
});
|
1742
|
+
onToolCall,
|
1743
|
+
onFinish,
|
1744
|
+
headers,
|
1745
|
+
body
|
1746
|
+
});
|
1747
|
+
}
|
2093
1748
|
}
|
2094
1749
|
};
|
1750
|
+
function updateToolCallResult({
|
1751
|
+
messages,
|
1752
|
+
toolCallId,
|
1753
|
+
toolResult: result
|
1754
|
+
}) {
|
1755
|
+
const lastMessage = messages[messages.length - 1];
|
1756
|
+
const invocationPart = lastMessage.parts.find(
|
1757
|
+
(part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
|
1758
|
+
);
|
1759
|
+
if (invocationPart == null) {
|
1760
|
+
return;
|
1761
|
+
}
|
1762
|
+
invocationPart.toolInvocation = {
|
1763
|
+
...invocationPart.toolInvocation,
|
1764
|
+
state: "result",
|
1765
|
+
result
|
1766
|
+
};
|
1767
|
+
}
|
2095
1768
|
|
2096
1769
|
// src/ui/convert-file-list-to-file-ui-parts.ts
|
2097
1770
|
async function convertFileListToFileUIParts(files) {
|
@@ -2125,7 +1798,7 @@ async function convertFileListToFileUIParts(files) {
|
|
2125
1798
|
|
2126
1799
|
// src/ui/convert-to-model-messages.ts
|
2127
1800
|
function convertToModelMessages(messages, options) {
|
2128
|
-
var _a17
|
1801
|
+
var _a17;
|
2129
1802
|
const tools = (_a17 = options == null ? void 0 : options.tools) != null ? _a17 : {};
|
2130
1803
|
const modelMessages = [];
|
2131
1804
|
for (const message of messages) {
|
@@ -2156,6 +1829,9 @@ function convertToModelMessages(messages, options) {
|
|
2156
1829
|
case "assistant": {
|
2157
1830
|
if (message.parts != null) {
|
2158
1831
|
let processBlock2 = function() {
|
1832
|
+
if (block.length === 0) {
|
1833
|
+
return;
|
1834
|
+
}
|
2159
1835
|
const content = [];
|
2160
1836
|
for (const part of block) {
|
2161
1837
|
switch (part.type) {
|
@@ -2230,33 +1906,20 @@ function convertToModelMessages(messages, options) {
|
|
2230
1906
|
});
|
2231
1907
|
}
|
2232
1908
|
block = [];
|
2233
|
-
blockHasToolInvocations = false;
|
2234
|
-
currentStep++;
|
2235
1909
|
};
|
2236
1910
|
var processBlock = processBlock2;
|
2237
|
-
let currentStep = 0;
|
2238
|
-
let blockHasToolInvocations = false;
|
2239
1911
|
let block = [];
|
2240
1912
|
for (const part of message.parts) {
|
2241
1913
|
switch (part.type) {
|
2242
|
-
case "text":
|
2243
|
-
|
2244
|
-
processBlock2();
|
2245
|
-
}
|
2246
|
-
block.push(part);
|
2247
|
-
break;
|
2248
|
-
}
|
1914
|
+
case "text":
|
1915
|
+
case "reasoning":
|
2249
1916
|
case "file":
|
2250
|
-
case "
|
1917
|
+
case "tool-invocation": {
|
2251
1918
|
block.push(part);
|
2252
1919
|
break;
|
2253
1920
|
}
|
2254
|
-
case "
|
2255
|
-
|
2256
|
-
processBlock2();
|
2257
|
-
}
|
2258
|
-
block.push(part);
|
2259
|
-
blockHasToolInvocations = true;
|
1921
|
+
case "step-start": {
|
1922
|
+
processBlock2();
|
2260
1923
|
break;
|
2261
1924
|
}
|
2262
1925
|
}
|
@@ -2281,6 +1944,111 @@ var convertToCoreMessages = convertToModelMessages;
|
|
2281
1944
|
|
2282
1945
|
// src/ui/default-chat-store-options.ts
|
2283
1946
|
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
1947
|
+
|
1948
|
+
// src/ui/default-chat-transport.ts
|
1949
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
1950
|
+
var getOriginalFetch2 = () => fetch;
|
1951
|
+
async function fetchUIMessageStream({
|
1952
|
+
api,
|
1953
|
+
body,
|
1954
|
+
credentials,
|
1955
|
+
headers,
|
1956
|
+
abortController,
|
1957
|
+
fetch: fetch2 = getOriginalFetch2(),
|
1958
|
+
requestType = "generate"
|
1959
|
+
}) {
|
1960
|
+
var _a17, _b, _c;
|
1961
|
+
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
|
1962
|
+
method: "GET",
|
1963
|
+
headers: {
|
1964
|
+
"Content-Type": "application/json",
|
1965
|
+
...headers
|
1966
|
+
},
|
1967
|
+
signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
|
1968
|
+
credentials
|
1969
|
+
}) : await fetch2(api, {
|
1970
|
+
method: "POST",
|
1971
|
+
body: JSON.stringify(body),
|
1972
|
+
headers: {
|
1973
|
+
"Content-Type": "application/json",
|
1974
|
+
...headers
|
1975
|
+
},
|
1976
|
+
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
1977
|
+
credentials
|
1978
|
+
});
|
1979
|
+
if (!response.ok) {
|
1980
|
+
throw new Error(
|
1981
|
+
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
1982
|
+
);
|
1983
|
+
}
|
1984
|
+
if (!response.body) {
|
1985
|
+
throw new Error("The response body is empty.");
|
1986
|
+
}
|
1987
|
+
return (0, import_provider_utils5.parseJsonEventStream)({
|
1988
|
+
stream: response.body,
|
1989
|
+
schema: uiMessageStreamPartSchema
|
1990
|
+
}).pipeThrough(
|
1991
|
+
new TransformStream({
|
1992
|
+
async transform(part, controller) {
|
1993
|
+
if (!part.success) {
|
1994
|
+
throw part.error;
|
1995
|
+
}
|
1996
|
+
controller.enqueue(part.value);
|
1997
|
+
}
|
1998
|
+
})
|
1999
|
+
);
|
2000
|
+
}
|
2001
|
+
var DefaultChatTransport = class {
|
2002
|
+
constructor({
|
2003
|
+
api,
|
2004
|
+
credentials,
|
2005
|
+
headers,
|
2006
|
+
body,
|
2007
|
+
fetch: fetch2,
|
2008
|
+
prepareRequestBody
|
2009
|
+
}) {
|
2010
|
+
this.api = api;
|
2011
|
+
this.credentials = credentials;
|
2012
|
+
this.headers = headers;
|
2013
|
+
this.body = body;
|
2014
|
+
this.fetch = fetch2;
|
2015
|
+
this.prepareRequestBody = prepareRequestBody;
|
2016
|
+
}
|
2017
|
+
submitMessages({
|
2018
|
+
chatId,
|
2019
|
+
messages,
|
2020
|
+
abortController,
|
2021
|
+
body,
|
2022
|
+
headers,
|
2023
|
+
requestType
|
2024
|
+
}) {
|
2025
|
+
var _a17, _b;
|
2026
|
+
return fetchUIMessageStream({
|
2027
|
+
api: this.api,
|
2028
|
+
headers: {
|
2029
|
+
...this.headers,
|
2030
|
+
...headers
|
2031
|
+
},
|
2032
|
+
body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
|
2033
|
+
chatId,
|
2034
|
+
messages,
|
2035
|
+
...this.body,
|
2036
|
+
...body
|
2037
|
+
})) != null ? _b : {
|
2038
|
+
chatId,
|
2039
|
+
messages,
|
2040
|
+
...this.body,
|
2041
|
+
...body
|
2042
|
+
},
|
2043
|
+
credentials: this.credentials,
|
2044
|
+
abortController: () => abortController,
|
2045
|
+
fetch: this.fetch,
|
2046
|
+
requestType
|
2047
|
+
});
|
2048
|
+
}
|
2049
|
+
};
|
2050
|
+
|
2051
|
+
// src/ui/default-chat-store-options.ts
|
2284
2052
|
function defaultChatStoreOptions({
|
2285
2053
|
api = "/api/chat",
|
2286
2054
|
fetch: fetch2,
|
@@ -2311,6 +2079,119 @@ function defaultChatStoreOptions({
|
|
2311
2079
|
});
|
2312
2080
|
}
|
2313
2081
|
|
2082
|
+
// src/ui/transform-text-to-ui-message-stream.ts
|
2083
|
+
function transformTextToUiMessageStream({
|
2084
|
+
stream
|
2085
|
+
}) {
|
2086
|
+
return stream.pipeThrough(
|
2087
|
+
new TransformStream({
|
2088
|
+
start(controller) {
|
2089
|
+
controller.enqueue({ type: "start" });
|
2090
|
+
controller.enqueue({ type: "start-step" });
|
2091
|
+
},
|
2092
|
+
async transform(part, controller) {
|
2093
|
+
controller.enqueue({ type: "text", text: part });
|
2094
|
+
},
|
2095
|
+
async flush(controller) {
|
2096
|
+
controller.enqueue({ type: "finish-step" });
|
2097
|
+
controller.enqueue({ type: "finish" });
|
2098
|
+
}
|
2099
|
+
})
|
2100
|
+
);
|
2101
|
+
}
|
2102
|
+
|
2103
|
+
// src/ui/text-stream-chat-transport.ts
|
2104
|
+
var getOriginalFetch3 = () => fetch;
|
2105
|
+
async function fetchTextStream({
|
2106
|
+
api,
|
2107
|
+
body,
|
2108
|
+
credentials,
|
2109
|
+
headers,
|
2110
|
+
abortController,
|
2111
|
+
fetch: fetch2 = getOriginalFetch3(),
|
2112
|
+
requestType = "generate"
|
2113
|
+
}) {
|
2114
|
+
var _a17, _b, _c;
|
2115
|
+
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
|
2116
|
+
method: "GET",
|
2117
|
+
headers: {
|
2118
|
+
"Content-Type": "application/json",
|
2119
|
+
...headers
|
2120
|
+
},
|
2121
|
+
signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
|
2122
|
+
credentials
|
2123
|
+
}) : await fetch2(api, {
|
2124
|
+
method: "POST",
|
2125
|
+
body: JSON.stringify(body),
|
2126
|
+
headers: {
|
2127
|
+
"Content-Type": "application/json",
|
2128
|
+
...headers
|
2129
|
+
},
|
2130
|
+
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
2131
|
+
credentials
|
2132
|
+
});
|
2133
|
+
if (!response.ok) {
|
2134
|
+
throw new Error(
|
2135
|
+
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
2136
|
+
);
|
2137
|
+
}
|
2138
|
+
if (!response.body) {
|
2139
|
+
throw new Error("The response body is empty.");
|
2140
|
+
}
|
2141
|
+
return transformTextToUiMessageStream({
|
2142
|
+
stream: response.body.pipeThrough(new TextDecoderStream())
|
2143
|
+
});
|
2144
|
+
}
|
2145
|
+
var TextStreamChatTransport = class {
|
2146
|
+
constructor({
|
2147
|
+
api,
|
2148
|
+
credentials,
|
2149
|
+
headers,
|
2150
|
+
body,
|
2151
|
+
fetch: fetch2,
|
2152
|
+
prepareRequestBody
|
2153
|
+
}) {
|
2154
|
+
this.api = api;
|
2155
|
+
this.credentials = credentials;
|
2156
|
+
this.headers = headers;
|
2157
|
+
this.body = body;
|
2158
|
+
this.fetch = fetch2;
|
2159
|
+
this.prepareRequestBody = prepareRequestBody;
|
2160
|
+
}
|
2161
|
+
submitMessages({
|
2162
|
+
chatId,
|
2163
|
+
messages,
|
2164
|
+
abortController,
|
2165
|
+
body,
|
2166
|
+
headers,
|
2167
|
+
requestType
|
2168
|
+
}) {
|
2169
|
+
var _a17, _b;
|
2170
|
+
return fetchTextStream({
|
2171
|
+
api: this.api,
|
2172
|
+
headers: {
|
2173
|
+
...this.headers,
|
2174
|
+
...headers
|
2175
|
+
},
|
2176
|
+
body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
|
2177
|
+
chatId,
|
2178
|
+
messages,
|
2179
|
+
...this.body,
|
2180
|
+
...body
|
2181
|
+
})) != null ? _b : {
|
2182
|
+
chatId,
|
2183
|
+
messages,
|
2184
|
+
...this.body,
|
2185
|
+
...body
|
2186
|
+
},
|
2187
|
+
credentials: this.credentials,
|
2188
|
+
abortController: () => abortController,
|
2189
|
+
fetch: this.fetch,
|
2190
|
+
requestType
|
2191
|
+
});
|
2192
|
+
}
|
2193
|
+
};
|
2194
|
+
|
2314
2195
|
// src/ui-message-stream/handle-ui-message-stream-finish.ts
|
2315
2196
|
function handleUIMessageStreamFinish({
|
2316
2197
|
newMessageId,
|
@@ -4525,11 +4406,17 @@ function validateObjectGenerationInput({
|
|
4525
4406
|
}
|
4526
4407
|
}
|
4527
4408
|
|
4409
|
+
// core/prompt/resolve-language-model.ts
|
4410
|
+
var import_gateway = require("@ai-sdk/gateway");
|
4411
|
+
function resolveLanguageModel(model) {
|
4412
|
+
return typeof model === "string" ? import_gateway.gateway.languageModel(model) : model;
|
4413
|
+
}
|
4414
|
+
|
4528
4415
|
// core/generate-object/generate-object.ts
|
4529
4416
|
var originalGenerateId = (0, import_provider_utils15.createIdGenerator)({ prefix: "aiobj", size: 24 });
|
4530
4417
|
async function generateObject(options) {
|
4531
4418
|
const {
|
4532
|
-
model,
|
4419
|
+
model: modelArg,
|
4533
4420
|
output = "object",
|
4534
4421
|
system,
|
4535
4422
|
prompt,
|
@@ -4546,6 +4433,7 @@ async function generateObject(options) {
|
|
4546
4433
|
} = {},
|
4547
4434
|
...settings
|
4548
4435
|
} = options;
|
4436
|
+
const model = resolveLanguageModel(modelArg);
|
4549
4437
|
const enumValues = "enum" in options ? options.enum : void 0;
|
4550
4438
|
const {
|
4551
4439
|
schema: inputSchema,
|
@@ -5005,7 +4893,7 @@ function streamObject(options) {
|
|
5005
4893
|
}
|
5006
4894
|
var DefaultStreamObjectResult = class {
|
5007
4895
|
constructor({
|
5008
|
-
model,
|
4896
|
+
model: modelArg,
|
5009
4897
|
headers,
|
5010
4898
|
telemetry,
|
5011
4899
|
settings,
|
@@ -5030,6 +4918,7 @@ var DefaultStreamObjectResult = class {
|
|
5030
4918
|
this._warnings = new DelayedPromise();
|
5031
4919
|
this._request = new DelayedPromise();
|
5032
4920
|
this._response = new DelayedPromise();
|
4921
|
+
const model = resolveLanguageModel(modelArg);
|
5033
4922
|
const { maxRetries, retry } = prepareRetries({
|
5034
4923
|
maxRetries: maxRetriesArg
|
5035
4924
|
});
|
@@ -5843,7 +5732,7 @@ var originalGenerateId3 = (0, import_provider_utils19.createIdGenerator)({
|
|
5843
5732
|
size: 24
|
5844
5733
|
});
|
5845
5734
|
async function generateText({
|
5846
|
-
model,
|
5735
|
+
model: modelArg,
|
5847
5736
|
tools,
|
5848
5737
|
toolChoice,
|
5849
5738
|
system,
|
@@ -5868,6 +5757,7 @@ async function generateText({
|
|
5868
5757
|
onStepFinish,
|
5869
5758
|
...settings
|
5870
5759
|
}) {
|
5760
|
+
const model = resolveLanguageModel(modelArg);
|
5871
5761
|
const stopConditions = asArray(stopWhen);
|
5872
5762
|
const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
5873
5763
|
const callSettings = prepareCallSettings(settings);
|
@@ -5904,7 +5794,7 @@ async function generateText({
|
|
5904
5794
|
}),
|
5905
5795
|
tracer,
|
5906
5796
|
fn: async (span) => {
|
5907
|
-
var _a17, _b, _c, _d;
|
5797
|
+
var _a17, _b, _c, _d, _e;
|
5908
5798
|
const callSettings2 = prepareCallSettings(settings);
|
5909
5799
|
let currentModelResponse;
|
5910
5800
|
let currentToolCalls = [];
|
@@ -5923,16 +5813,18 @@ async function generateText({
|
|
5923
5813
|
}));
|
5924
5814
|
const promptMessages = await convertToLanguageModelPrompt({
|
5925
5815
|
prompt: {
|
5926
|
-
system: initialPrompt.system,
|
5816
|
+
system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
|
5927
5817
|
messages: stepInputMessages
|
5928
5818
|
},
|
5929
5819
|
supportedUrls: await model.supportedUrls
|
5930
5820
|
});
|
5931
|
-
const stepModel = (
|
5821
|
+
const stepModel = resolveLanguageModel(
|
5822
|
+
(_b = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _b : model
|
5823
|
+
);
|
5932
5824
|
const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
|
5933
5825
|
tools,
|
5934
|
-
toolChoice: (
|
5935
|
-
activeTools: (
|
5826
|
+
toolChoice: (_c = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _c : toolChoice,
|
5827
|
+
activeTools: (_d = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _d : activeTools
|
5936
5828
|
});
|
5937
5829
|
currentModelResponse = await retry(
|
5938
5830
|
() => {
|
@@ -5975,7 +5867,7 @@ async function generateText({
|
|
5975
5867
|
}),
|
5976
5868
|
tracer,
|
5977
5869
|
fn: async (span2) => {
|
5978
|
-
var _a19, _b2, _c2, _d2,
|
5870
|
+
var _a19, _b2, _c2, _d2, _e2, _f, _g, _h;
|
5979
5871
|
const result = await stepModel.doGenerate({
|
5980
5872
|
...callSettings2,
|
5981
5873
|
tools: stepTools,
|
@@ -5989,7 +5881,7 @@ async function generateText({
|
|
5989
5881
|
const responseData = {
|
5990
5882
|
id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
|
5991
5883
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
5992
|
-
modelId: (_f = (
|
5884
|
+
modelId: (_f = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f : stepModel.modelId,
|
5993
5885
|
headers: (_g = result.response) == null ? void 0 : _g.headers,
|
5994
5886
|
body: (_h = result.response) == null ? void 0 : _h.body
|
5995
5887
|
};
|
@@ -6065,7 +5957,7 @@ async function generateText({
|
|
6065
5957
|
usage: currentModelResponse.usage,
|
6066
5958
|
warnings: currentModelResponse.warnings,
|
6067
5959
|
providerMetadata: currentModelResponse.providerMetadata,
|
6068
|
-
request: (
|
5960
|
+
request: (_e = currentModelResponse.request) != null ? _e : {},
|
6069
5961
|
response: {
|
6070
5962
|
...currentModelResponse.response,
|
6071
5963
|
// deep clone msgs to avoid mutating past messages in multi-step:
|
@@ -6657,7 +6549,7 @@ function streamText({
|
|
6657
6549
|
...settings
|
6658
6550
|
}) {
|
6659
6551
|
return new DefaultStreamTextResult({
|
6660
|
-
model,
|
6552
|
+
model: resolveLanguageModel(model),
|
6661
6553
|
telemetry,
|
6662
6554
|
headers,
|
6663
6555
|
settings,
|
@@ -6972,7 +6864,7 @@ var DefaultStreamTextResult = class {
|
|
6972
6864
|
responseMessages,
|
6973
6865
|
usage
|
6974
6866
|
}) {
|
6975
|
-
var _a17, _b, _c;
|
6867
|
+
var _a17, _b, _c, _d;
|
6976
6868
|
stepFinish = new DelayedPromise();
|
6977
6869
|
const initialPrompt = await standardizePrompt({
|
6978
6870
|
system,
|
@@ -6990,16 +6882,18 @@ var DefaultStreamTextResult = class {
|
|
6990
6882
|
}));
|
6991
6883
|
const promptMessages = await convertToLanguageModelPrompt({
|
6992
6884
|
prompt: {
|
6993
|
-
system: initialPrompt.system,
|
6885
|
+
system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
|
6994
6886
|
messages: stepInputMessages
|
6995
6887
|
},
|
6996
6888
|
supportedUrls: await model.supportedUrls
|
6997
6889
|
});
|
6998
|
-
const stepModel = (
|
6890
|
+
const stepModel = resolveLanguageModel(
|
6891
|
+
(_b = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _b : model
|
6892
|
+
);
|
6999
6893
|
const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
|
7000
6894
|
tools,
|
7001
|
-
toolChoice: (
|
7002
|
-
activeTools: (
|
6895
|
+
toolChoice: (_c = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _c : toolChoice,
|
6896
|
+
activeTools: (_d = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _d : activeTools
|
7003
6897
|
});
|
7004
6898
|
const {
|
7005
6899
|
result: { stream: stream2, response, request },
|
@@ -7105,7 +6999,7 @@ var DefaultStreamTextResult = class {
|
|
7105
6999
|
streamWithToolResults.pipeThrough(
|
7106
7000
|
new TransformStream({
|
7107
7001
|
async transform(chunk, controller) {
|
7108
|
-
var _a18, _b2, _c2,
|
7002
|
+
var _a18, _b2, _c2, _d2;
|
7109
7003
|
if (chunk.type === "stream-start") {
|
7110
7004
|
warnings = chunk.warnings;
|
7111
7005
|
return;
|
@@ -7182,7 +7076,7 @@ var DefaultStreamTextResult = class {
|
|
7182
7076
|
doStreamSpan.addEvent("ai.stream.finish");
|
7183
7077
|
doStreamSpan.setAttributes({
|
7184
7078
|
"ai.response.msToFinish": msToFinish,
|
7185
|
-
"ai.response.avgOutputTokensPerSecond": 1e3 * ((
|
7079
|
+
"ai.response.avgOutputTokensPerSecond": 1e3 * ((_d2 = stepUsage.outputTokens) != null ? _d2 : 0) / msToFinish
|
7186
7080
|
});
|
7187
7081
|
break;
|
7188
7082
|
}
|
@@ -7439,7 +7333,7 @@ var DefaultStreamTextResult = class {
|
|
7439
7333
|
} = {}) {
|
7440
7334
|
const lastMessage = originalMessages[originalMessages.length - 1];
|
7441
7335
|
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
7442
|
-
const messageId = isContinuation ? lastMessage.id : newMessageId
|
7336
|
+
const messageId = isContinuation ? lastMessage.id : newMessageId;
|
7443
7337
|
const baseStream = this.fullStream.pipeThrough(
|
7444
7338
|
new TransformStream({
|
7445
7339
|
transform: async (part, controller) => {
|
@@ -7575,7 +7469,7 @@ var DefaultStreamTextResult = class {
|
|
7575
7469
|
);
|
7576
7470
|
return handleUIMessageStreamFinish({
|
7577
7471
|
stream: baseStream,
|
7578
|
-
newMessageId: messageId,
|
7472
|
+
newMessageId: messageId != null ? messageId : this.generateId(),
|
7579
7473
|
originalMessages,
|
7580
7474
|
onFinish
|
7581
7475
|
});
|
@@ -8690,10 +8584,8 @@ var DefaultTranscriptionResult = class {
|
|
8690
8584
|
ToolExecutionError,
|
8691
8585
|
TypeValidationError,
|
8692
8586
|
UnsupportedFunctionalityError,
|
8693
|
-
appendClientMessage,
|
8694
8587
|
asSchema,
|
8695
8588
|
assistantModelMessageSchema,
|
8696
|
-
callChatApi,
|
8697
8589
|
callCompletionApi,
|
8698
8590
|
convertFileListToFileUIParts,
|
8699
8591
|
convertToCoreMessages,
|
@@ -8720,7 +8612,6 @@ var DefaultTranscriptionResult = class {
|
|
8720
8612
|
experimental_generateImage,
|
8721
8613
|
experimental_generateSpeech,
|
8722
8614
|
experimental_transcribe,
|
8723
|
-
extractMaxToolInvocationStep,
|
8724
8615
|
extractReasoningMiddleware,
|
8725
8616
|
generateId,
|
8726
8617
|
generateObject,
|
@@ -8728,14 +8619,12 @@ var DefaultTranscriptionResult = class {
|
|
8728
8619
|
getTextFromDataUrl,
|
8729
8620
|
getToolInvocations,
|
8730
8621
|
hasToolCall,
|
8731
|
-
isAssistantMessageWithCompletedToolCalls,
|
8732
8622
|
isDeepEqualData,
|
8733
8623
|
jsonSchema,
|
8734
8624
|
modelMessageSchema,
|
8735
8625
|
parsePartialJson,
|
8736
8626
|
pipeTextStreamToResponse,
|
8737
8627
|
pipeUIMessageStreamToResponse,
|
8738
|
-
shouldResubmitMessages,
|
8739
8628
|
simulateReadableStream,
|
8740
8629
|
simulateStreamingMiddleware,
|
8741
8630
|
smoothStream,
|
@@ -8745,7 +8634,6 @@ var DefaultTranscriptionResult = class {
|
|
8745
8634
|
systemModelMessageSchema,
|
8746
8635
|
tool,
|
8747
8636
|
toolModelMessageSchema,
|
8748
|
-
updateToolCallResult,
|
8749
8637
|
userModelMessageSchema,
|
8750
8638
|
wrapLanguageModel
|
8751
8639
|
});
|