ai 5.0.0-alpha.11 → 5.0.0-alpha.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/dist/index.d.mts +932 -942
- package/dist/index.d.ts +932 -942
- package/dist/index.js +446 -439
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +386 -378
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.js
CHANGED
@@ -54,7 +54,7 @@ __export(src_exports, {
|
|
54
54
|
ToolExecutionError: () => ToolExecutionError,
|
55
55
|
TypeValidationError: () => import_provider16.TypeValidationError,
|
56
56
|
UnsupportedFunctionalityError: () => import_provider16.UnsupportedFunctionalityError,
|
57
|
-
asSchema: () =>
|
57
|
+
asSchema: () => import_provider_utils26.asSchema,
|
58
58
|
assistantModelMessageSchema: () => assistantModelMessageSchema,
|
59
59
|
callCompletionApi: () => callCompletionApi,
|
60
60
|
convertFileListToFileUIParts: () => convertFileListToFileUIParts,
|
@@ -66,7 +66,7 @@ __export(src_exports, {
|
|
66
66
|
coreToolMessageSchema: () => coreToolMessageSchema,
|
67
67
|
coreUserMessageSchema: () => coreUserMessageSchema,
|
68
68
|
cosineSimilarity: () => cosineSimilarity,
|
69
|
-
createIdGenerator: () =>
|
69
|
+
createIdGenerator: () => import_provider_utils26.createIdGenerator,
|
70
70
|
createProviderRegistry: () => createProviderRegistry,
|
71
71
|
createTextStreamResponse: () => createTextStreamResponse,
|
72
72
|
createUIMessageStream: () => createUIMessageStream,
|
@@ -82,14 +82,15 @@ __export(src_exports, {
|
|
82
82
|
experimental_generateSpeech: () => generateSpeech,
|
83
83
|
experimental_transcribe: () => transcribe,
|
84
84
|
extractReasoningMiddleware: () => extractReasoningMiddleware,
|
85
|
-
generateId: () =>
|
85
|
+
generateId: () => import_provider_utils26.generateId,
|
86
86
|
generateObject: () => generateObject,
|
87
87
|
generateText: () => generateText,
|
88
88
|
getTextFromDataUrl: () => getTextFromDataUrl,
|
89
|
-
|
89
|
+
getToolName: () => getToolName,
|
90
90
|
hasToolCall: () => hasToolCall,
|
91
91
|
isDeepEqualData: () => isDeepEqualData,
|
92
|
-
|
92
|
+
isToolUIPart: () => isToolUIPart,
|
93
|
+
jsonSchema: () => import_provider_utils26.jsonSchema,
|
93
94
|
modelMessageSchema: () => modelMessageSchema,
|
94
95
|
parsePartialJson: () => parsePartialJson,
|
95
96
|
pipeTextStreamToResponse: () => pipeTextStreamToResponse,
|
@@ -107,7 +108,7 @@ __export(src_exports, {
|
|
107
108
|
wrapLanguageModel: () => wrapLanguageModel
|
108
109
|
});
|
109
110
|
module.exports = __toCommonJS(src_exports);
|
110
|
-
var
|
111
|
+
var import_provider_utils26 = require("@ai-sdk/provider-utils");
|
111
112
|
|
112
113
|
// src/error/index.ts
|
113
114
|
var import_provider16 = require("@ai-sdk/provider");
|
@@ -540,42 +541,45 @@ var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
540
541
|
// src/ui-message-stream/ui-message-stream-parts.ts
|
541
542
|
var import_zod = require("zod");
|
542
543
|
var uiMessageStreamPartSchema = import_zod.z.union([
|
543
|
-
import_zod.z.
|
544
|
+
import_zod.z.strictObject({
|
544
545
|
type: import_zod.z.literal("text"),
|
545
546
|
text: import_zod.z.string()
|
546
547
|
}),
|
547
|
-
import_zod.z.
|
548
|
+
import_zod.z.strictObject({
|
548
549
|
type: import_zod.z.literal("error"),
|
549
550
|
errorText: import_zod.z.string()
|
550
551
|
}),
|
551
|
-
import_zod.z.
|
552
|
+
import_zod.z.strictObject({
|
552
553
|
type: import_zod.z.literal("tool-call-streaming-start"),
|
553
554
|
toolCallId: import_zod.z.string(),
|
554
555
|
toolName: import_zod.z.string()
|
555
556
|
}),
|
556
|
-
import_zod.z.
|
557
|
+
import_zod.z.strictObject({
|
557
558
|
type: import_zod.z.literal("tool-call-delta"),
|
558
559
|
toolCallId: import_zod.z.string(),
|
559
560
|
argsTextDelta: import_zod.z.string()
|
560
561
|
}),
|
561
|
-
import_zod.z.
|
562
|
+
import_zod.z.strictObject({
|
562
563
|
type: import_zod.z.literal("tool-call"),
|
563
564
|
toolCallId: import_zod.z.string(),
|
564
565
|
toolName: import_zod.z.string(),
|
565
566
|
args: import_zod.z.unknown()
|
566
567
|
}),
|
567
|
-
import_zod.z.
|
568
|
+
import_zod.z.strictObject({
|
568
569
|
type: import_zod.z.literal("tool-result"),
|
569
570
|
toolCallId: import_zod.z.string(),
|
570
571
|
result: import_zod.z.unknown(),
|
571
572
|
providerMetadata: import_zod.z.any().optional()
|
572
573
|
}),
|
573
|
-
import_zod.z.
|
574
|
+
import_zod.z.strictObject({
|
574
575
|
type: import_zod.z.literal("reasoning"),
|
575
576
|
text: import_zod.z.string(),
|
576
577
|
providerMetadata: import_zod.z.record(import_zod.z.any()).optional()
|
577
578
|
}),
|
578
|
-
import_zod.z.
|
579
|
+
import_zod.z.strictObject({
|
580
|
+
type: import_zod.z.literal("reasoning-part-finish")
|
581
|
+
}),
|
582
|
+
import_zod.z.strictObject({
|
579
583
|
type: import_zod.z.literal("source-url"),
|
580
584
|
sourceId: import_zod.z.string(),
|
581
585
|
url: import_zod.z.string(),
|
@@ -583,7 +587,7 @@ var uiMessageStreamPartSchema = import_zod.z.union([
|
|
583
587
|
providerMetadata: import_zod.z.any().optional()
|
584
588
|
// Use z.any() for generic metadata
|
585
589
|
}),
|
586
|
-
import_zod.z.
|
590
|
+
import_zod.z.strictObject({
|
587
591
|
type: import_zod.z.literal("source-document"),
|
588
592
|
sourceId: import_zod.z.string(),
|
589
593
|
mediaType: import_zod.z.string(),
|
@@ -592,39 +596,34 @@ var uiMessageStreamPartSchema = import_zod.z.union([
|
|
592
596
|
providerMetadata: import_zod.z.any().optional()
|
593
597
|
// Use z.any() for generic metadata
|
594
598
|
}),
|
595
|
-
import_zod.z.
|
599
|
+
import_zod.z.strictObject({
|
596
600
|
type: import_zod.z.literal("file"),
|
597
601
|
url: import_zod.z.string(),
|
598
602
|
mediaType: import_zod.z.string()
|
599
603
|
}),
|
600
|
-
import_zod.z.
|
604
|
+
import_zod.z.strictObject({
|
601
605
|
type: import_zod.z.string().startsWith("data-"),
|
602
606
|
id: import_zod.z.string().optional(),
|
603
607
|
data: import_zod.z.unknown()
|
604
608
|
}),
|
605
|
-
import_zod.z.
|
606
|
-
type: import_zod.z.literal("
|
607
|
-
value: import_zod.z.object({ metadata: import_zod.z.unknown() })
|
608
|
-
}),
|
609
|
-
import_zod.z.object({
|
610
|
-
type: import_zod.z.literal("start-step"),
|
611
|
-
metadata: import_zod.z.unknown().optional()
|
609
|
+
import_zod.z.strictObject({
|
610
|
+
type: import_zod.z.literal("start-step")
|
612
611
|
}),
|
613
|
-
import_zod.z.
|
614
|
-
type: import_zod.z.literal("finish-step")
|
615
|
-
metadata: import_zod.z.unknown().optional()
|
612
|
+
import_zod.z.strictObject({
|
613
|
+
type: import_zod.z.literal("finish-step")
|
616
614
|
}),
|
617
|
-
import_zod.z.
|
615
|
+
import_zod.z.strictObject({
|
618
616
|
type: import_zod.z.literal("start"),
|
619
617
|
messageId: import_zod.z.string().optional(),
|
620
|
-
|
618
|
+
messageMetadata: import_zod.z.unknown().optional()
|
621
619
|
}),
|
622
|
-
import_zod.z.
|
620
|
+
import_zod.z.strictObject({
|
623
621
|
type: import_zod.z.literal("finish"),
|
624
|
-
|
622
|
+
messageMetadata: import_zod.z.unknown().optional()
|
625
623
|
}),
|
626
|
-
import_zod.z.
|
627
|
-
type: import_zod.z.literal("
|
624
|
+
import_zod.z.strictObject({
|
625
|
+
type: import_zod.z.literal("message-metadata"),
|
626
|
+
messageMetadata: import_zod.z.unknown()
|
628
627
|
})
|
629
628
|
]);
|
630
629
|
function isDataUIMessageStreamPart(part) {
|
@@ -812,8 +811,137 @@ var SerialJobExecutor = class {
|
|
812
811
|
}
|
813
812
|
};
|
814
813
|
|
814
|
+
// src/ui/convert-file-list-to-file-ui-parts.ts
|
815
|
+
async function convertFileListToFileUIParts(files) {
|
816
|
+
if (files == null) {
|
817
|
+
return [];
|
818
|
+
}
|
819
|
+
if (!globalThis.FileList || !(files instanceof globalThis.FileList)) {
|
820
|
+
throw new Error("FileList is not supported in the current environment");
|
821
|
+
}
|
822
|
+
return Promise.all(
|
823
|
+
Array.from(files).map(async (file) => {
|
824
|
+
const { name: name17, type } = file;
|
825
|
+
const dataUrl = await new Promise((resolve, reject) => {
|
826
|
+
const reader = new FileReader();
|
827
|
+
reader.onload = (readerEvent) => {
|
828
|
+
var _a17;
|
829
|
+
resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
|
830
|
+
};
|
831
|
+
reader.onerror = (error) => reject(error);
|
832
|
+
reader.readAsDataURL(file);
|
833
|
+
});
|
834
|
+
return {
|
835
|
+
type: "file",
|
836
|
+
mediaType: type,
|
837
|
+
filename: name17,
|
838
|
+
url: dataUrl
|
839
|
+
};
|
840
|
+
})
|
841
|
+
);
|
842
|
+
}
|
843
|
+
|
844
|
+
// src/ui/default-chat-transport.ts
|
845
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
846
|
+
var getOriginalFetch2 = () => fetch;
|
847
|
+
async function fetchUIMessageStream({
|
848
|
+
api,
|
849
|
+
body,
|
850
|
+
credentials,
|
851
|
+
headers,
|
852
|
+
abortSignal,
|
853
|
+
fetch: fetch2 = getOriginalFetch2(),
|
854
|
+
requestType = "generate"
|
855
|
+
}) {
|
856
|
+
var _a17;
|
857
|
+
const response = requestType === "resume" ? await fetch2(`${api}?id=${body.id}`, {
|
858
|
+
method: "GET",
|
859
|
+
headers: {
|
860
|
+
"Content-Type": "application/json",
|
861
|
+
...headers
|
862
|
+
},
|
863
|
+
signal: abortSignal,
|
864
|
+
credentials
|
865
|
+
}) : await fetch2(api, {
|
866
|
+
method: "POST",
|
867
|
+
body: JSON.stringify(body),
|
868
|
+
headers: {
|
869
|
+
"Content-Type": "application/json",
|
870
|
+
...headers
|
871
|
+
},
|
872
|
+
signal: abortSignal,
|
873
|
+
credentials
|
874
|
+
});
|
875
|
+
if (!response.ok) {
|
876
|
+
throw new Error(
|
877
|
+
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
878
|
+
);
|
879
|
+
}
|
880
|
+
if (!response.body) {
|
881
|
+
throw new Error("The response body is empty.");
|
882
|
+
}
|
883
|
+
return (0, import_provider_utils2.parseJsonEventStream)({
|
884
|
+
stream: response.body,
|
885
|
+
schema: uiMessageStreamPartSchema
|
886
|
+
}).pipeThrough(
|
887
|
+
new TransformStream({
|
888
|
+
async transform(part, controller) {
|
889
|
+
if (!part.success) {
|
890
|
+
throw part.error;
|
891
|
+
}
|
892
|
+
controller.enqueue(part.value);
|
893
|
+
}
|
894
|
+
})
|
895
|
+
);
|
896
|
+
}
|
897
|
+
var DefaultChatTransport = class {
|
898
|
+
constructor({
|
899
|
+
api = "/api/chat",
|
900
|
+
credentials,
|
901
|
+
headers,
|
902
|
+
body,
|
903
|
+
fetch: fetch2,
|
904
|
+
prepareRequest
|
905
|
+
} = {}) {
|
906
|
+
this.api = api;
|
907
|
+
this.credentials = credentials;
|
908
|
+
this.headers = headers;
|
909
|
+
this.body = body;
|
910
|
+
this.fetch = fetch2;
|
911
|
+
this.prepareRequest = prepareRequest;
|
912
|
+
}
|
913
|
+
submitMessages({
|
914
|
+
chatId,
|
915
|
+
messages,
|
916
|
+
abortSignal,
|
917
|
+
metadata,
|
918
|
+
headers,
|
919
|
+
body,
|
920
|
+
requestType
|
921
|
+
}) {
|
922
|
+
var _a17, _b;
|
923
|
+
const preparedRequest = (_a17 = this.prepareRequest) == null ? void 0 : _a17.call(this, {
|
924
|
+
id: chatId,
|
925
|
+
messages,
|
926
|
+
body: { ...this.body, ...body },
|
927
|
+
headers: { ...this.headers, ...headers },
|
928
|
+
credentials: this.credentials,
|
929
|
+
requestMetadata: metadata
|
930
|
+
});
|
931
|
+
return fetchUIMessageStream({
|
932
|
+
api: this.api,
|
933
|
+
body: (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : { ...this.body, ...body, id: chatId, messages },
|
934
|
+
headers: (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...headers },
|
935
|
+
credentials: (_b = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _b : this.credentials,
|
936
|
+
abortSignal,
|
937
|
+
fetch: this.fetch,
|
938
|
+
requestType
|
939
|
+
});
|
940
|
+
}
|
941
|
+
};
|
942
|
+
|
815
943
|
// src/ui/process-ui-message-stream.ts
|
816
|
-
var
|
944
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
817
945
|
|
818
946
|
// src/util/merge-objects.ts
|
819
947
|
function mergeObjects(base, overrides) {
|
@@ -849,7 +977,7 @@ function mergeObjects(base, overrides) {
|
|
849
977
|
}
|
850
978
|
|
851
979
|
// src/util/parse-partial-json.ts
|
852
|
-
var
|
980
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
853
981
|
|
854
982
|
// src/util/fix-json.ts
|
855
983
|
function fixJson(input) {
|
@@ -1174,38 +1302,37 @@ async function parsePartialJson(jsonText) {
|
|
1174
1302
|
if (jsonText === void 0) {
|
1175
1303
|
return { value: void 0, state: "undefined-input" };
|
1176
1304
|
}
|
1177
|
-
let result = await (0,
|
1305
|
+
let result = await (0, import_provider_utils3.safeParseJSON)({ text: jsonText });
|
1178
1306
|
if (result.success) {
|
1179
1307
|
return { value: result.value, state: "successful-parse" };
|
1180
1308
|
}
|
1181
|
-
result = await (0,
|
1309
|
+
result = await (0, import_provider_utils3.safeParseJSON)({ text: fixJson(jsonText) });
|
1182
1310
|
if (result.success) {
|
1183
1311
|
return { value: result.value, state: "repaired-parse" };
|
1184
1312
|
}
|
1185
1313
|
return { value: void 0, state: "failed-parse" };
|
1186
1314
|
}
|
1187
1315
|
|
1188
|
-
// src/ui/
|
1189
|
-
function
|
1190
|
-
return
|
1191
|
-
|
1192
|
-
|
1316
|
+
// src/ui/ui-messages.ts
|
1317
|
+
function isToolUIPart(part) {
|
1318
|
+
return part.type.startsWith("tool-");
|
1319
|
+
}
|
1320
|
+
function getToolName(part) {
|
1321
|
+
return part.type.split("-")[1];
|
1193
1322
|
}
|
1194
1323
|
|
1195
1324
|
// src/ui/process-ui-message-stream.ts
|
1196
1325
|
function createStreamingUIMessageState({
|
1197
1326
|
lastMessage,
|
1198
|
-
|
1199
|
-
}
|
1200
|
-
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
1201
|
-
const message = isContinuation ? lastMessage : {
|
1202
|
-
id: newMessageId,
|
1203
|
-
metadata: {},
|
1204
|
-
role: "assistant",
|
1205
|
-
parts: []
|
1206
|
-
};
|
1327
|
+
messageId
|
1328
|
+
}) {
|
1207
1329
|
return {
|
1208
|
-
message
|
1330
|
+
message: (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage : {
|
1331
|
+
id: messageId,
|
1332
|
+
metadata: void 0,
|
1333
|
+
role: "assistant",
|
1334
|
+
parts: []
|
1335
|
+
},
|
1209
1336
|
activeTextPart: void 0,
|
1210
1337
|
activeReasoningPart: void 0,
|
1211
1338
|
partialToolCalls: {}
|
@@ -1222,16 +1349,21 @@ function processUIMessageStream({
|
|
1222
1349
|
new TransformStream({
|
1223
1350
|
async transform(part, controller) {
|
1224
1351
|
await runUpdateMessageJob(async ({ state, write }) => {
|
1225
|
-
function updateToolInvocationPart(
|
1352
|
+
function updateToolInvocationPart(options) {
|
1226
1353
|
const part2 = state.message.parts.find(
|
1227
|
-
(part3) =>
|
1354
|
+
(part3) => isToolUIPart(part3) && part3.toolCallId === options.toolCallId
|
1228
1355
|
);
|
1229
1356
|
if (part2 != null) {
|
1230
|
-
part2.
|
1357
|
+
part2.state = options.state;
|
1358
|
+
part2.args = options.args;
|
1359
|
+
part2.result = options.result;
|
1231
1360
|
} else {
|
1232
1361
|
state.message.parts.push({
|
1233
|
-
type:
|
1234
|
-
|
1362
|
+
type: `tool-${options.toolName}`,
|
1363
|
+
toolCallId: options.toolCallId,
|
1364
|
+
state: options.state,
|
1365
|
+
args: options.args,
|
1366
|
+
result: options.result
|
1235
1367
|
});
|
1236
1368
|
}
|
1237
1369
|
}
|
@@ -1239,7 +1371,7 @@ function processUIMessageStream({
|
|
1239
1371
|
if (metadata != null) {
|
1240
1372
|
const mergedMetadata = state.message.metadata != null ? mergeObjects(state.message.metadata, metadata) : metadata;
|
1241
1373
|
if (messageMetadataSchema != null) {
|
1242
|
-
await (0,
|
1374
|
+
await (0, import_provider_utils4.validateTypes)({
|
1243
1375
|
value: mergedMetadata,
|
1244
1376
|
schema: messageMetadataSchema
|
1245
1377
|
});
|
@@ -1315,16 +1447,16 @@ function processUIMessageStream({
|
|
1315
1447
|
break;
|
1316
1448
|
}
|
1317
1449
|
case "tool-call-streaming-start": {
|
1318
|
-
const toolInvocations =
|
1450
|
+
const toolInvocations = state.message.parts.filter(isToolUIPart);
|
1319
1451
|
state.partialToolCalls[part.toolCallId] = {
|
1320
1452
|
text: "",
|
1321
1453
|
toolName: part.toolName,
|
1322
1454
|
index: toolInvocations.length
|
1323
1455
|
};
|
1324
|
-
updateToolInvocationPart(
|
1325
|
-
state: "partial-call",
|
1456
|
+
updateToolInvocationPart({
|
1326
1457
|
toolCallId: part.toolCallId,
|
1327
1458
|
toolName: part.toolName,
|
1459
|
+
state: "partial-call",
|
1328
1460
|
args: void 0
|
1329
1461
|
});
|
1330
1462
|
write();
|
@@ -1336,20 +1468,20 @@ function processUIMessageStream({
|
|
1336
1468
|
const { value: partialArgs } = await parsePartialJson(
|
1337
1469
|
partialToolCall.text
|
1338
1470
|
);
|
1339
|
-
updateToolInvocationPart(
|
1340
|
-
state: "partial-call",
|
1471
|
+
updateToolInvocationPart({
|
1341
1472
|
toolCallId: part.toolCallId,
|
1342
1473
|
toolName: partialToolCall.toolName,
|
1474
|
+
state: "partial-call",
|
1343
1475
|
args: partialArgs
|
1344
1476
|
});
|
1345
1477
|
write();
|
1346
1478
|
break;
|
1347
1479
|
}
|
1348
1480
|
case "tool-call": {
|
1349
|
-
updateToolInvocationPart(
|
1350
|
-
state: "call",
|
1481
|
+
updateToolInvocationPart({
|
1351
1482
|
toolCallId: part.toolCallId,
|
1352
1483
|
toolName: part.toolName,
|
1484
|
+
state: "call",
|
1353
1485
|
args: part.args
|
1354
1486
|
});
|
1355
1487
|
write();
|
@@ -1358,10 +1490,10 @@ function processUIMessageStream({
|
|
1358
1490
|
toolCall: part
|
1359
1491
|
});
|
1360
1492
|
if (result != null) {
|
1361
|
-
updateToolInvocationPart(
|
1362
|
-
state: "result",
|
1493
|
+
updateToolInvocationPart({
|
1363
1494
|
toolCallId: part.toolCallId,
|
1364
1495
|
toolName: part.toolName,
|
1496
|
+
state: "result",
|
1365
1497
|
args: part.args,
|
1366
1498
|
result
|
1367
1499
|
});
|
@@ -1371,7 +1503,7 @@ function processUIMessageStream({
|
|
1371
1503
|
break;
|
1372
1504
|
}
|
1373
1505
|
case "tool-result": {
|
1374
|
-
const toolInvocations =
|
1506
|
+
const toolInvocations = state.message.parts.filter(isToolUIPart);
|
1375
1507
|
if (toolInvocations == null) {
|
1376
1508
|
throw new Error("tool_result must be preceded by a tool_call");
|
1377
1509
|
}
|
@@ -1383,9 +1515,14 @@ function processUIMessageStream({
|
|
1383
1515
|
"tool_result must be preceded by a tool_call with the same toolCallId"
|
1384
1516
|
);
|
1385
1517
|
}
|
1386
|
-
|
1387
|
-
|
1518
|
+
const toolName = getToolName(
|
1519
|
+
toolInvocations[toolInvocationIndex]
|
1520
|
+
);
|
1521
|
+
updateToolInvocationPart({
|
1522
|
+
toolCallId: part.toolCallId,
|
1523
|
+
toolName,
|
1388
1524
|
state: "result",
|
1525
|
+
args: toolInvocations[toolInvocationIndex].args,
|
1389
1526
|
result: part.result
|
1390
1527
|
});
|
1391
1528
|
write();
|
@@ -1393,39 +1530,33 @@ function processUIMessageStream({
|
|
1393
1530
|
}
|
1394
1531
|
case "start-step": {
|
1395
1532
|
state.message.parts.push({ type: "step-start" });
|
1396
|
-
await updateMessageMetadata(part.metadata);
|
1397
|
-
write();
|
1398
1533
|
break;
|
1399
1534
|
}
|
1400
1535
|
case "finish-step": {
|
1401
1536
|
state.activeTextPart = void 0;
|
1402
1537
|
state.activeReasoningPart = void 0;
|
1403
|
-
await updateMessageMetadata(part.metadata);
|
1404
|
-
if (part.metadata != null) {
|
1405
|
-
write();
|
1406
|
-
}
|
1407
1538
|
break;
|
1408
1539
|
}
|
1409
1540
|
case "start": {
|
1410
1541
|
if (part.messageId != null) {
|
1411
1542
|
state.message.id = part.messageId;
|
1412
1543
|
}
|
1413
|
-
await updateMessageMetadata(part.
|
1414
|
-
if (part.messageId != null || part.
|
1544
|
+
await updateMessageMetadata(part.messageMetadata);
|
1545
|
+
if (part.messageId != null || part.messageMetadata != null) {
|
1415
1546
|
write();
|
1416
1547
|
}
|
1417
1548
|
break;
|
1418
1549
|
}
|
1419
1550
|
case "finish": {
|
1420
|
-
await updateMessageMetadata(part.
|
1421
|
-
if (part.
|
1551
|
+
await updateMessageMetadata(part.messageMetadata);
|
1552
|
+
if (part.messageMetadata != null) {
|
1422
1553
|
write();
|
1423
1554
|
}
|
1424
1555
|
break;
|
1425
1556
|
}
|
1426
|
-
case "metadata": {
|
1427
|
-
await updateMessageMetadata(part.
|
1428
|
-
if (part.
|
1557
|
+
case "message-metadata": {
|
1558
|
+
await updateMessageMetadata(part.messageMetadata);
|
1559
|
+
if (part.messageMetadata != null) {
|
1429
1560
|
write();
|
1430
1561
|
}
|
1431
1562
|
break;
|
@@ -1453,9 +1584,6 @@ function processUIMessageStream({
|
|
1453
1584
|
})
|
1454
1585
|
);
|
1455
1586
|
}
|
1456
|
-
function isToolInvocationUIPart(part) {
|
1457
|
-
return part.type === "tool-invocation";
|
1458
|
-
}
|
1459
1587
|
function isObject(value) {
|
1460
1588
|
return typeof value === "object" && value !== null;
|
1461
1589
|
}
|
@@ -1490,137 +1618,8 @@ function isAssistantMessageWithCompletedToolCalls(message) {
|
|
1490
1618
|
const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
|
1491
1619
|
return part.type === "step-start" ? index : lastIndex;
|
1492
1620
|
}, -1);
|
1493
|
-
const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter(
|
1494
|
-
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result"
|
1495
|
-
}
|
1496
|
-
|
1497
|
-
// src/ui/default-chat-transport.ts
|
1498
|
-
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
1499
|
-
var getOriginalFetch2 = () => fetch;
|
1500
|
-
async function fetchUIMessageStream({
|
1501
|
-
api,
|
1502
|
-
body,
|
1503
|
-
credentials,
|
1504
|
-
headers,
|
1505
|
-
abortSignal,
|
1506
|
-
fetch: fetch2 = getOriginalFetch2(),
|
1507
|
-
requestType = "generate"
|
1508
|
-
}) {
|
1509
|
-
var _a17;
|
1510
|
-
const response = requestType === "resume" ? await fetch2(`${api}?id=${body.id}`, {
|
1511
|
-
method: "GET",
|
1512
|
-
headers: {
|
1513
|
-
"Content-Type": "application/json",
|
1514
|
-
...headers
|
1515
|
-
},
|
1516
|
-
signal: abortSignal,
|
1517
|
-
credentials
|
1518
|
-
}) : await fetch2(api, {
|
1519
|
-
method: "POST",
|
1520
|
-
body: JSON.stringify(body),
|
1521
|
-
headers: {
|
1522
|
-
"Content-Type": "application/json",
|
1523
|
-
...headers
|
1524
|
-
},
|
1525
|
-
signal: abortSignal,
|
1526
|
-
credentials
|
1527
|
-
});
|
1528
|
-
if (!response.ok) {
|
1529
|
-
throw new Error(
|
1530
|
-
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
1531
|
-
);
|
1532
|
-
}
|
1533
|
-
if (!response.body) {
|
1534
|
-
throw new Error("The response body is empty.");
|
1535
|
-
}
|
1536
|
-
return (0, import_provider_utils4.parseJsonEventStream)({
|
1537
|
-
stream: response.body,
|
1538
|
-
schema: uiMessageStreamPartSchema
|
1539
|
-
}).pipeThrough(
|
1540
|
-
new TransformStream({
|
1541
|
-
async transform(part, controller) {
|
1542
|
-
if (!part.success) {
|
1543
|
-
throw part.error;
|
1544
|
-
}
|
1545
|
-
controller.enqueue(part.value);
|
1546
|
-
}
|
1547
|
-
})
|
1548
|
-
);
|
1549
|
-
}
|
1550
|
-
var DefaultChatTransport = class {
|
1551
|
-
constructor({
|
1552
|
-
api = "/api/chat",
|
1553
|
-
credentials,
|
1554
|
-
headers,
|
1555
|
-
body,
|
1556
|
-
fetch: fetch2,
|
1557
|
-
prepareRequest
|
1558
|
-
} = {}) {
|
1559
|
-
this.api = api;
|
1560
|
-
this.credentials = credentials;
|
1561
|
-
this.headers = headers;
|
1562
|
-
this.body = body;
|
1563
|
-
this.fetch = fetch2;
|
1564
|
-
this.prepareRequest = prepareRequest;
|
1565
|
-
}
|
1566
|
-
submitMessages({
|
1567
|
-
chatId,
|
1568
|
-
messages,
|
1569
|
-
abortSignal,
|
1570
|
-
metadata,
|
1571
|
-
headers,
|
1572
|
-
body,
|
1573
|
-
requestType
|
1574
|
-
}) {
|
1575
|
-
var _a17, _b;
|
1576
|
-
const preparedRequest = (_a17 = this.prepareRequest) == null ? void 0 : _a17.call(this, {
|
1577
|
-
id: chatId,
|
1578
|
-
messages,
|
1579
|
-
body: { ...this.body, ...body },
|
1580
|
-
headers: { ...this.headers, ...headers },
|
1581
|
-
credentials: this.credentials,
|
1582
|
-
requestMetadata: metadata
|
1583
|
-
});
|
1584
|
-
return fetchUIMessageStream({
|
1585
|
-
api: this.api,
|
1586
|
-
body: (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : { ...this.body, ...body, id: chatId, messages },
|
1587
|
-
headers: (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...headers },
|
1588
|
-
credentials: (_b = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _b : this.credentials,
|
1589
|
-
abortSignal,
|
1590
|
-
fetch: this.fetch,
|
1591
|
-
requestType
|
1592
|
-
});
|
1593
|
-
}
|
1594
|
-
};
|
1595
|
-
|
1596
|
-
// src/ui/convert-file-list-to-file-ui-parts.ts
|
1597
|
-
async function convertFileListToFileUIParts(files) {
|
1598
|
-
if (files == null) {
|
1599
|
-
return [];
|
1600
|
-
}
|
1601
|
-
if (!globalThis.FileList || !(files instanceof globalThis.FileList)) {
|
1602
|
-
throw new Error("FileList is not supported in the current environment");
|
1603
|
-
}
|
1604
|
-
return Promise.all(
|
1605
|
-
Array.from(files).map(async (file) => {
|
1606
|
-
const { name: name17, type } = file;
|
1607
|
-
const dataUrl = await new Promise((resolve, reject) => {
|
1608
|
-
const reader = new FileReader();
|
1609
|
-
reader.onload = (readerEvent) => {
|
1610
|
-
var _a17;
|
1611
|
-
resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
|
1612
|
-
};
|
1613
|
-
reader.onerror = (error) => reject(error);
|
1614
|
-
reader.readAsDataURL(file);
|
1615
|
-
});
|
1616
|
-
return {
|
1617
|
-
type: "file",
|
1618
|
-
mediaType: type,
|
1619
|
-
filename: name17,
|
1620
|
-
url: dataUrl
|
1621
|
-
};
|
1622
|
-
})
|
1623
|
-
);
|
1621
|
+
const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter(isToolUIPart);
|
1622
|
+
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => part.state === "result");
|
1624
1623
|
}
|
1625
1624
|
|
1626
1625
|
// src/ui/chat.ts
|
@@ -1637,7 +1636,6 @@ var AbstractChat = class {
|
|
1637
1636
|
onToolCall,
|
1638
1637
|
onFinish
|
1639
1638
|
}) {
|
1640
|
-
this.subscribers = /* @__PURE__ */ new Set();
|
1641
1639
|
this.activeResponse = void 0;
|
1642
1640
|
this.jobExecutor = new SerialJobExecutor();
|
1643
1641
|
this.removeAssistantResponse = () => {
|
@@ -1649,7 +1647,6 @@ var AbstractChat = class {
|
|
1649
1647
|
throw new Error("Last message is not an assistant message");
|
1650
1648
|
}
|
1651
1649
|
this.state.popMessage();
|
1652
|
-
this.emit({ type: "messages-changed" });
|
1653
1650
|
};
|
1654
1651
|
/**
|
1655
1652
|
* Append a user message to the chat list. This triggers the API call to fetch
|
@@ -1674,7 +1671,6 @@ var AbstractChat = class {
|
|
1674
1671
|
id: (_a17 = uiMessage.id) != null ? _a17 : this.generateId(),
|
1675
1672
|
role: (_b = uiMessage.role) != null ? _b : "user"
|
1676
1673
|
});
|
1677
|
-
this.emit({ type: "messages-changed" });
|
1678
1674
|
await this.triggerRequest({ requestType: "generate", ...options });
|
1679
1675
|
};
|
1680
1676
|
/**
|
@@ -1686,7 +1682,6 @@ var AbstractChat = class {
|
|
1686
1682
|
}
|
1687
1683
|
if (this.lastMessage.role === "assistant") {
|
1688
1684
|
this.state.popMessage();
|
1689
|
-
this.emit({ type: "messages-changed" });
|
1690
1685
|
}
|
1691
1686
|
await this.triggerRequest({ requestType: "generate", ...options });
|
1692
1687
|
};
|
@@ -1701,7 +1696,7 @@ var AbstractChat = class {
|
|
1701
1696
|
result
|
1702
1697
|
}) => {
|
1703
1698
|
this.jobExecutor.run(async () => {
|
1704
|
-
|
1699
|
+
updateToolResult({
|
1705
1700
|
messages: this.state.messages,
|
1706
1701
|
toolCallId,
|
1707
1702
|
toolResult: result
|
@@ -1727,7 +1722,6 @@ var AbstractChat = class {
|
|
1727
1722
|
return;
|
1728
1723
|
if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
|
1729
1724
|
this.activeResponse.abortController.abort();
|
1730
|
-
this.activeResponse.abortController = void 0;
|
1731
1725
|
}
|
1732
1726
|
};
|
1733
1727
|
this.id = id;
|
@@ -1760,7 +1754,6 @@ var AbstractChat = class {
|
|
1760
1754
|
return;
|
1761
1755
|
this.state.status = status;
|
1762
1756
|
this.state.error = error;
|
1763
|
-
this.emit({ type: "status-changed" });
|
1764
1757
|
}
|
1765
1758
|
get error() {
|
1766
1759
|
return this.state.error;
|
@@ -1771,18 +1764,8 @@ var AbstractChat = class {
|
|
1771
1764
|
get lastMessage() {
|
1772
1765
|
return this.state.messages[this.state.messages.length - 1];
|
1773
1766
|
}
|
1774
|
-
subscribe(subscriber) {
|
1775
|
-
this.subscribers.add(subscriber);
|
1776
|
-
return () => this.subscribers.delete(subscriber);
|
1777
|
-
}
|
1778
1767
|
set messages(messages) {
|
1779
1768
|
this.state.messages = messages;
|
1780
|
-
this.emit({ type: "messages-changed" });
|
1781
|
-
}
|
1782
|
-
emit(event) {
|
1783
|
-
for (const subscriber of this.subscribers) {
|
1784
|
-
subscriber.onChange(event);
|
1785
|
-
}
|
1786
1769
|
}
|
1787
1770
|
async triggerRequest({
|
1788
1771
|
requestType,
|
@@ -1799,7 +1782,7 @@ var AbstractChat = class {
|
|
1799
1782
|
const activeResponse = {
|
1800
1783
|
state: createStreamingUIMessageState({
|
1801
1784
|
lastMessage: this.state.snapshot(lastMessage),
|
1802
|
-
|
1785
|
+
messageId: this.generateId()
|
1803
1786
|
}),
|
1804
1787
|
abortController: new AbortController()
|
1805
1788
|
};
|
@@ -1830,9 +1813,6 @@ var AbstractChat = class {
|
|
1830
1813
|
} else {
|
1831
1814
|
this.state.pushMessage(activeResponse.state.message);
|
1832
1815
|
}
|
1833
|
-
this.emit({
|
1834
|
-
type: "messages-changed"
|
1835
|
-
});
|
1836
1816
|
}
|
1837
1817
|
})
|
1838
1818
|
)
|
@@ -1879,23 +1859,20 @@ var AbstractChat = class {
|
|
1879
1859
|
}
|
1880
1860
|
}
|
1881
1861
|
};
|
1882
|
-
function
|
1862
|
+
function updateToolResult({
|
1883
1863
|
messages,
|
1884
1864
|
toolCallId,
|
1885
1865
|
toolResult: result
|
1886
1866
|
}) {
|
1887
1867
|
const lastMessage = messages[messages.length - 1];
|
1888
|
-
const
|
1889
|
-
(part) => part
|
1868
|
+
const toolPart = lastMessage.parts.find(
|
1869
|
+
(part) => isToolUIPart(part) && part.toolCallId === toolCallId
|
1890
1870
|
);
|
1891
|
-
if (
|
1871
|
+
if (toolPart == null) {
|
1892
1872
|
return;
|
1893
1873
|
}
|
1894
|
-
|
1895
|
-
|
1896
|
-
state: "result",
|
1897
|
-
result
|
1898
|
-
};
|
1874
|
+
toolPart.state = "result";
|
1875
|
+
toolPart.result = result;
|
1899
1876
|
}
|
1900
1877
|
|
1901
1878
|
// src/ui/convert-to-model-messages.ts
|
@@ -1936,75 +1913,71 @@ function convertToModelMessages(messages, options) {
|
|
1936
1913
|
}
|
1937
1914
|
const content = [];
|
1938
1915
|
for (const part of block) {
|
1939
|
-
|
1940
|
-
|
1941
|
-
|
1942
|
-
|
1943
|
-
|
1944
|
-
|
1945
|
-
|
1946
|
-
|
1947
|
-
|
1948
|
-
|
1949
|
-
|
1950
|
-
|
1951
|
-
|
1952
|
-
|
1953
|
-
|
1954
|
-
|
1955
|
-
|
1956
|
-
|
1916
|
+
if (part.type === "text") {
|
1917
|
+
content.push(part);
|
1918
|
+
} else if (part.type === "file") {
|
1919
|
+
content.push({
|
1920
|
+
type: "file",
|
1921
|
+
mediaType: part.mediaType,
|
1922
|
+
data: part.url
|
1923
|
+
});
|
1924
|
+
} else if (part.type === "reasoning") {
|
1925
|
+
content.push({
|
1926
|
+
type: "reasoning",
|
1927
|
+
text: part.text,
|
1928
|
+
providerOptions: part.providerMetadata
|
1929
|
+
});
|
1930
|
+
} else if (isToolUIPart(part)) {
|
1931
|
+
const toolName = getToolName(part);
|
1932
|
+
if (part.state === "partial-call") {
|
1933
|
+
throw new MessageConversionError({
|
1934
|
+
originalMessage: message,
|
1935
|
+
message: `Partial tool call is not supported: ${part.toolCallId}`
|
1957
1936
|
});
|
1958
|
-
|
1959
|
-
}
|
1960
|
-
case "tool-invocation":
|
1937
|
+
} else {
|
1961
1938
|
content.push({
|
1962
1939
|
type: "tool-call",
|
1963
|
-
toolCallId: part.
|
1964
|
-
toolName
|
1965
|
-
args: part.
|
1940
|
+
toolCallId: part.toolCallId,
|
1941
|
+
toolName,
|
1942
|
+
args: part.args
|
1966
1943
|
});
|
1967
|
-
break;
|
1968
|
-
default: {
|
1969
|
-
const _exhaustiveCheck = part;
|
1970
|
-
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
1971
1944
|
}
|
1945
|
+
} else {
|
1946
|
+
const _exhaustiveCheck = part;
|
1947
|
+
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
1972
1948
|
}
|
1973
1949
|
}
|
1974
1950
|
modelMessages.push({
|
1975
1951
|
role: "assistant",
|
1976
1952
|
content
|
1977
1953
|
});
|
1978
|
-
const
|
1979
|
-
|
1980
|
-
).map((part) => part.toolInvocation);
|
1981
|
-
if (stepInvocations.length > 0) {
|
1954
|
+
const toolParts = block.filter(isToolUIPart);
|
1955
|
+
if (toolParts.length > 0) {
|
1982
1956
|
modelMessages.push({
|
1983
1957
|
role: "tool",
|
1984
|
-
content:
|
1985
|
-
(
|
1986
|
-
|
1987
|
-
|
1988
|
-
|
1989
|
-
|
1990
|
-
});
|
1991
|
-
}
|
1992
|
-
const { toolCallId, toolName, result } = toolInvocation;
|
1993
|
-
const tool2 = tools[toolName];
|
1994
|
-
return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
|
1995
|
-
type: "tool-result",
|
1996
|
-
toolCallId,
|
1997
|
-
toolName,
|
1998
|
-
result: tool2.experimental_toToolResultContent(result),
|
1999
|
-
experimental_content: tool2.experimental_toToolResultContent(result)
|
2000
|
-
} : {
|
2001
|
-
type: "tool-result",
|
2002
|
-
toolCallId,
|
2003
|
-
toolName,
|
2004
|
-
result
|
2005
|
-
};
|
1958
|
+
content: toolParts.map((toolPart) => {
|
1959
|
+
if (toolPart.state !== "result") {
|
1960
|
+
throw new MessageConversionError({
|
1961
|
+
originalMessage: message,
|
1962
|
+
message: "ToolInvocation must have a result: " + JSON.stringify(toolPart)
|
1963
|
+
});
|
2006
1964
|
}
|
2007
|
-
|
1965
|
+
const toolName = getToolName(toolPart);
|
1966
|
+
const { toolCallId, result } = toolPart;
|
1967
|
+
const tool2 = tools[toolName];
|
1968
|
+
return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
|
1969
|
+
type: "tool-result",
|
1970
|
+
toolCallId,
|
1971
|
+
toolName,
|
1972
|
+
result: tool2.experimental_toToolResultContent(result),
|
1973
|
+
experimental_content: tool2.experimental_toToolResultContent(result)
|
1974
|
+
} : {
|
1975
|
+
type: "tool-result",
|
1976
|
+
toolCallId,
|
1977
|
+
toolName,
|
1978
|
+
result
|
1979
|
+
};
|
1980
|
+
})
|
2008
1981
|
});
|
2009
1982
|
}
|
2010
1983
|
block = [];
|
@@ -2012,18 +1985,10 @@ function convertToModelMessages(messages, options) {
|
|
2012
1985
|
var processBlock = processBlock2;
|
2013
1986
|
let block = [];
|
2014
1987
|
for (const part of message.parts) {
|
2015
|
-
|
2016
|
-
|
2017
|
-
|
2018
|
-
|
2019
|
-
case "tool-invocation": {
|
2020
|
-
block.push(part);
|
2021
|
-
break;
|
2022
|
-
}
|
2023
|
-
case "step-start": {
|
2024
|
-
processBlock2();
|
2025
|
-
break;
|
2026
|
-
}
|
1988
|
+
if (part.type === "text" || part.type === "reasoning" || part.type === "file" || isToolUIPart(part)) {
|
1989
|
+
block.push(part);
|
1990
|
+
} else if (part.type === "step-start") {
|
1991
|
+
processBlock2();
|
2027
1992
|
}
|
2028
1993
|
}
|
2029
1994
|
processBlock2();
|
@@ -2153,9 +2118,12 @@ var TextStreamChatTransport = class {
|
|
2153
2118
|
}
|
2154
2119
|
};
|
2155
2120
|
|
2121
|
+
// src/ui-message-stream/create-ui-message-stream.ts
|
2122
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
2123
|
+
|
2156
2124
|
// src/ui-message-stream/handle-ui-message-stream-finish.ts
|
2157
2125
|
function handleUIMessageStreamFinish({
|
2158
|
-
|
2126
|
+
messageId,
|
2159
2127
|
originalMessages = [],
|
2160
2128
|
onFinish,
|
2161
2129
|
stream
|
@@ -2163,19 +2131,30 @@ function handleUIMessageStreamFinish({
|
|
2163
2131
|
if (onFinish == null) {
|
2164
2132
|
return stream;
|
2165
2133
|
}
|
2166
|
-
const lastMessage = originalMessages[originalMessages.length - 1];
|
2167
|
-
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
2168
|
-
const messageId = isContinuation ? lastMessage.id : newMessageId;
|
2134
|
+
const lastMessage = originalMessages == null ? void 0 : originalMessages[originalMessages.length - 1];
|
2169
2135
|
const state = createStreamingUIMessageState({
|
2170
|
-
lastMessage: structuredClone(lastMessage),
|
2171
|
-
|
2136
|
+
lastMessage: lastMessage ? structuredClone(lastMessage) : void 0,
|
2137
|
+
messageId
|
2138
|
+
// will be overridden by the stream
|
2172
2139
|
});
|
2173
2140
|
const runUpdateMessageJob = async (job) => {
|
2174
2141
|
await job({ state, write: () => {
|
2175
2142
|
} });
|
2176
2143
|
};
|
2177
2144
|
return processUIMessageStream({
|
2178
|
-
stream
|
2145
|
+
stream: stream.pipeThrough(
|
2146
|
+
new TransformStream({
|
2147
|
+
transform(chunk, controller) {
|
2148
|
+
if (chunk.type === "start") {
|
2149
|
+
const startChunk = chunk;
|
2150
|
+
if (startChunk.messageId == null) {
|
2151
|
+
startChunk.messageId = messageId;
|
2152
|
+
}
|
2153
|
+
}
|
2154
|
+
controller.enqueue(chunk);
|
2155
|
+
}
|
2156
|
+
})
|
2157
|
+
),
|
2179
2158
|
runUpdateMessageJob
|
2180
2159
|
}).pipeThrough(
|
2181
2160
|
new TransformStream({
|
@@ -2183,12 +2162,12 @@ function handleUIMessageStreamFinish({
|
|
2183
2162
|
controller.enqueue(chunk);
|
2184
2163
|
},
|
2185
2164
|
flush() {
|
2186
|
-
const
|
2165
|
+
const isContinuation = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
|
2187
2166
|
onFinish({
|
2188
|
-
isContinuation
|
2167
|
+
isContinuation,
|
2189
2168
|
responseMessage: state.message,
|
2190
2169
|
messages: [
|
2191
|
-
...
|
2170
|
+
...isContinuation ? originalMessages.slice(0, -1) : originalMessages,
|
2192
2171
|
state.message
|
2193
2172
|
]
|
2194
2173
|
});
|
@@ -2203,7 +2182,8 @@ function createUIMessageStream({
|
|
2203
2182
|
onError = () => "An error occurred.",
|
2204
2183
|
// mask error messages for safety by default
|
2205
2184
|
originalMessages,
|
2206
|
-
onFinish
|
2185
|
+
onFinish,
|
2186
|
+
generateId: generateId3 = import_provider_utils6.generateId
|
2207
2187
|
}) {
|
2208
2188
|
let controller;
|
2209
2189
|
const ongoingStreamPromises = [];
|
@@ -2235,7 +2215,10 @@ function createUIMessageStream({
|
|
2235
2215
|
safeEnqueue(value);
|
2236
2216
|
}
|
2237
2217
|
})().catch((error) => {
|
2238
|
-
safeEnqueue({
|
2218
|
+
safeEnqueue({
|
2219
|
+
type: "error",
|
2220
|
+
errorText: onError(error)
|
2221
|
+
});
|
2239
2222
|
})
|
2240
2223
|
);
|
2241
2224
|
},
|
@@ -2245,12 +2228,18 @@ function createUIMessageStream({
|
|
2245
2228
|
if (result) {
|
2246
2229
|
ongoingStreamPromises.push(
|
2247
2230
|
result.catch((error) => {
|
2248
|
-
safeEnqueue({
|
2231
|
+
safeEnqueue({
|
2232
|
+
type: "error",
|
2233
|
+
errorText: onError(error)
|
2234
|
+
});
|
2249
2235
|
})
|
2250
2236
|
);
|
2251
2237
|
}
|
2252
2238
|
} catch (error) {
|
2253
|
-
safeEnqueue({
|
2239
|
+
safeEnqueue({
|
2240
|
+
type: "error",
|
2241
|
+
errorText: onError(error)
|
2242
|
+
});
|
2254
2243
|
}
|
2255
2244
|
const waitForStreams = new Promise(async (resolve) => {
|
2256
2245
|
while (ongoingStreamPromises.length > 0) {
|
@@ -2266,22 +2255,12 @@ function createUIMessageStream({
|
|
2266
2255
|
});
|
2267
2256
|
return handleUIMessageStreamFinish({
|
2268
2257
|
stream,
|
2269
|
-
|
2258
|
+
messageId: generateId3(),
|
2270
2259
|
originalMessages,
|
2271
2260
|
onFinish
|
2272
2261
|
});
|
2273
2262
|
}
|
2274
2263
|
|
2275
|
-
// src/ui-message-stream/ui-message-stream-headers.ts
|
2276
|
-
var uiMessageStreamHeaders = {
|
2277
|
-
"content-type": "text/event-stream",
|
2278
|
-
"cache-control": "no-cache",
|
2279
|
-
connection: "keep-alive",
|
2280
|
-
"x-vercel-ai-ui-message-stream": "v1",
|
2281
|
-
"x-accel-buffering": "no"
|
2282
|
-
// disable nginx buffering
|
2283
|
-
};
|
2284
|
-
|
2285
2264
|
// src/ui-message-stream/json-to-sse-transform-stream.ts
|
2286
2265
|
var JsonToSseTransformStream = class extends TransformStream {
|
2287
2266
|
constructor() {
|
@@ -2298,6 +2277,16 @@ var JsonToSseTransformStream = class extends TransformStream {
|
|
2298
2277
|
}
|
2299
2278
|
};
|
2300
2279
|
|
2280
|
+
// src/ui-message-stream/ui-message-stream-headers.ts
|
2281
|
+
var uiMessageStreamHeaders = {
|
2282
|
+
"content-type": "text/event-stream",
|
2283
|
+
"cache-control": "no-cache",
|
2284
|
+
connection: "keep-alive",
|
2285
|
+
"x-vercel-ai-ui-message-stream": "v1",
|
2286
|
+
"x-accel-buffering": "no"
|
2287
|
+
// disable nginx buffering
|
2288
|
+
};
|
2289
|
+
|
2301
2290
|
// src/ui-message-stream/create-ui-message-stream-response.ts
|
2302
2291
|
function createUIMessageStreamResponse({
|
2303
2292
|
status,
|
@@ -2410,7 +2399,7 @@ function isDeepEqualData(obj1, obj2) {
|
|
2410
2399
|
}
|
2411
2400
|
|
2412
2401
|
// src/util/simulate-readable-stream.ts
|
2413
|
-
var
|
2402
|
+
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
2414
2403
|
function simulateReadableStream({
|
2415
2404
|
chunks,
|
2416
2405
|
initialDelayInMs = 0,
|
@@ -2418,7 +2407,7 @@ function simulateReadableStream({
|
|
2418
2407
|
_internal
|
2419
2408
|
}) {
|
2420
2409
|
var _a17;
|
2421
|
-
const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 :
|
2410
|
+
const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : import_provider_utils7.delay;
|
2422
2411
|
let index = 0;
|
2423
2412
|
return new ReadableStream({
|
2424
2413
|
async pull(controller) {
|
@@ -2434,7 +2423,7 @@ function simulateReadableStream({
|
|
2434
2423
|
|
2435
2424
|
// src/util/retry-with-exponential-backoff.ts
|
2436
2425
|
var import_provider17 = require("@ai-sdk/provider");
|
2437
|
-
var
|
2426
|
+
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
2438
2427
|
var retryWithExponentialBackoff = ({
|
2439
2428
|
maxRetries = 2,
|
2440
2429
|
initialDelayInMs = 2e3,
|
@@ -2452,13 +2441,13 @@ async function _retryWithExponentialBackoff(f, {
|
|
2452
2441
|
try {
|
2453
2442
|
return await f();
|
2454
2443
|
} catch (error) {
|
2455
|
-
if ((0,
|
2444
|
+
if ((0, import_provider_utils8.isAbortError)(error)) {
|
2456
2445
|
throw error;
|
2457
2446
|
}
|
2458
2447
|
if (maxRetries === 0) {
|
2459
2448
|
throw error;
|
2460
2449
|
}
|
2461
|
-
const errorMessage = (0,
|
2450
|
+
const errorMessage = (0, import_provider_utils8.getErrorMessage)(error);
|
2462
2451
|
const newErrors = [...errors, error];
|
2463
2452
|
const tryNumber = newErrors.length;
|
2464
2453
|
if (tryNumber > maxRetries) {
|
@@ -2469,7 +2458,7 @@ async function _retryWithExponentialBackoff(f, {
|
|
2469
2458
|
});
|
2470
2459
|
}
|
2471
2460
|
if (error instanceof Error && import_provider17.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
2472
|
-
await (0,
|
2461
|
+
await (0, import_provider_utils8.delay)(delayInMs);
|
2473
2462
|
return _retryWithExponentialBackoff(
|
2474
2463
|
f,
|
2475
2464
|
{ maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
|
@@ -3028,7 +3017,7 @@ var DefaultEmbedManyResult = class {
|
|
3028
3017
|
};
|
3029
3018
|
|
3030
3019
|
// src/util/detect-media-type.ts
|
3031
|
-
var
|
3020
|
+
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
3032
3021
|
var imageMediaTypeSignatures = [
|
3033
3022
|
{
|
3034
3023
|
mediaType: "image/gif",
|
@@ -3135,7 +3124,7 @@ var audioMediaTypeSignatures = [
|
|
3135
3124
|
}
|
3136
3125
|
];
|
3137
3126
|
var stripID3 = (data) => {
|
3138
|
-
const bytes = typeof data === "string" ? (0,
|
3127
|
+
const bytes = typeof data === "string" ? (0, import_provider_utils9.convertBase64ToUint8Array)(data) : data;
|
3139
3128
|
const id3Size = (bytes[6] & 127) << 21 | (bytes[7] & 127) << 14 | (bytes[8] & 127) << 7 | bytes[9] & 127;
|
3140
3129
|
return bytes.slice(id3Size + 10);
|
3141
3130
|
};
|
@@ -3161,7 +3150,7 @@ function detectMediaType({
|
|
3161
3150
|
}
|
3162
3151
|
|
3163
3152
|
// core/generate-text/generated-file.ts
|
3164
|
-
var
|
3153
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
3165
3154
|
var DefaultGeneratedFile = class {
|
3166
3155
|
constructor({
|
3167
3156
|
data,
|
@@ -3175,14 +3164,14 @@ var DefaultGeneratedFile = class {
|
|
3175
3164
|
// lazy conversion with caching to avoid unnecessary conversion overhead:
|
3176
3165
|
get base64() {
|
3177
3166
|
if (this.base64Data == null) {
|
3178
|
-
this.base64Data = (0,
|
3167
|
+
this.base64Data = (0, import_provider_utils10.convertUint8ArrayToBase64)(this.uint8ArrayData);
|
3179
3168
|
}
|
3180
3169
|
return this.base64Data;
|
3181
3170
|
}
|
3182
3171
|
// lazy conversion with caching to avoid unnecessary conversion overhead:
|
3183
3172
|
get uint8Array() {
|
3184
3173
|
if (this.uint8ArrayData == null) {
|
3185
|
-
this.uint8ArrayData = (0,
|
3174
|
+
this.uint8ArrayData = (0, import_provider_utils10.convertBase64ToUint8Array)(this.base64Data);
|
3186
3175
|
}
|
3187
3176
|
return this.uint8ArrayData;
|
3188
3177
|
}
|
@@ -3298,7 +3287,7 @@ async function invokeModelMaxImagesPerCall(model) {
|
|
3298
3287
|
|
3299
3288
|
// core/generate-object/generate-object.ts
|
3300
3289
|
var import_provider22 = require("@ai-sdk/provider");
|
3301
|
-
var
|
3290
|
+
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
3302
3291
|
|
3303
3292
|
// core/generate-text/extract-content-text.ts
|
3304
3293
|
function extractContentText(content) {
|
@@ -3312,7 +3301,7 @@ function extractContentText(content) {
|
|
3312
3301
|
}
|
3313
3302
|
|
3314
3303
|
// core/prompt/convert-to-language-model-prompt.ts
|
3315
|
-
var
|
3304
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
3316
3305
|
|
3317
3306
|
// src/util/download.ts
|
3318
3307
|
async function download({ url }) {
|
@@ -3341,7 +3330,7 @@ async function download({ url }) {
|
|
3341
3330
|
|
3342
3331
|
// core/prompt/data-content.ts
|
3343
3332
|
var import_provider18 = require("@ai-sdk/provider");
|
3344
|
-
var
|
3333
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
3345
3334
|
var import_zod2 = require("zod");
|
3346
3335
|
|
3347
3336
|
// core/prompt/split-data-url.ts
|
@@ -3406,9 +3395,9 @@ function convertDataContentToBase64String(content) {
|
|
3406
3395
|
return content;
|
3407
3396
|
}
|
3408
3397
|
if (content instanceof ArrayBuffer) {
|
3409
|
-
return (0,
|
3398
|
+
return (0, import_provider_utils11.convertUint8ArrayToBase64)(new Uint8Array(content));
|
3410
3399
|
}
|
3411
|
-
return (0,
|
3400
|
+
return (0, import_provider_utils11.convertUint8ArrayToBase64)(content);
|
3412
3401
|
}
|
3413
3402
|
function convertDataContentToUint8Array(content) {
|
3414
3403
|
if (content instanceof Uint8Array) {
|
@@ -3416,7 +3405,7 @@ function convertDataContentToUint8Array(content) {
|
|
3416
3405
|
}
|
3417
3406
|
if (typeof content === "string") {
|
3418
3407
|
try {
|
3419
|
-
return (0,
|
3408
|
+
return (0, import_provider_utils11.convertBase64ToUint8Array)(content);
|
3420
3409
|
} catch (error) {
|
3421
3410
|
throw new InvalidDataContentError({
|
3422
3411
|
message: "Invalid data content. Content string is not a base64-encoded media.",
|
@@ -3567,7 +3556,7 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
|
|
3567
3556
|
}
|
3568
3557
|
return { mediaType, data };
|
3569
3558
|
}).filter(
|
3570
|
-
(part) => part.data instanceof URL && part.mediaType != null && !(0,
|
3559
|
+
(part) => part.data instanceof URL && part.mediaType != null && !(0, import_provider_utils12.isUrlSupported)({
|
3571
3560
|
url: part.data.toString(),
|
3572
3561
|
mediaType: part.mediaType,
|
3573
3562
|
supportedUrls
|
@@ -3751,7 +3740,7 @@ function resolveLanguageModel(model) {
|
|
3751
3740
|
|
3752
3741
|
// core/prompt/standardize-prompt.ts
|
3753
3742
|
var import_provider19 = require("@ai-sdk/provider");
|
3754
|
-
var
|
3743
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
3755
3744
|
var import_zod8 = require("zod");
|
3756
3745
|
|
3757
3746
|
// core/prompt/message.ts
|
@@ -3923,7 +3912,7 @@ async function standardizePrompt(prompt) {
|
|
3923
3912
|
message: "messages must not be empty"
|
3924
3913
|
});
|
3925
3914
|
}
|
3926
|
-
const validationResult = await (0,
|
3915
|
+
const validationResult = await (0, import_provider_utils13.safeValidateTypes)({
|
3927
3916
|
value: messages,
|
3928
3917
|
schema: import_zod8.z.array(modelMessageSchema)
|
3929
3918
|
});
|
@@ -3971,7 +3960,7 @@ function stringifyForTelemetry(prompt) {
|
|
3971
3960
|
|
3972
3961
|
// core/generate-object/output-strategy.ts
|
3973
3962
|
var import_provider21 = require("@ai-sdk/provider");
|
3974
|
-
var
|
3963
|
+
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
3975
3964
|
|
3976
3965
|
// src/util/async-iterable-stream.ts
|
3977
3966
|
function createAsyncIterableStream(source) {
|
@@ -4027,7 +4016,7 @@ var objectOutputStrategy = (schema) => ({
|
|
4027
4016
|
};
|
4028
4017
|
},
|
4029
4018
|
async validateFinalResult(value) {
|
4030
|
-
return (0,
|
4019
|
+
return (0, import_provider_utils14.safeValidateTypes)({ value, schema });
|
4031
4020
|
},
|
4032
4021
|
createElementStream() {
|
4033
4022
|
throw new import_provider21.UnsupportedFunctionalityError({
|
@@ -4071,7 +4060,7 @@ var arrayOutputStrategy = (schema) => {
|
|
4071
4060
|
const resultArray = [];
|
4072
4061
|
for (let i = 0; i < inputArray.length; i++) {
|
4073
4062
|
const element = inputArray[i];
|
4074
|
-
const result = await (0,
|
4063
|
+
const result = await (0, import_provider_utils14.safeValidateTypes)({ value: element, schema });
|
4075
4064
|
if (i === inputArray.length - 1 && !isFinalDelta) {
|
4076
4065
|
continue;
|
4077
4066
|
}
|
@@ -4112,7 +4101,7 @@ var arrayOutputStrategy = (schema) => {
|
|
4112
4101
|
}
|
4113
4102
|
const inputArray = value.elements;
|
4114
4103
|
for (const element of inputArray) {
|
4115
|
-
const result = await (0,
|
4104
|
+
const result = await (0, import_provider_utils14.safeValidateTypes)({ value: element, schema });
|
4116
4105
|
if (!result.success) {
|
4117
4106
|
return result;
|
4118
4107
|
}
|
@@ -4230,9 +4219,9 @@ function getOutputStrategy({
|
|
4230
4219
|
}) {
|
4231
4220
|
switch (output) {
|
4232
4221
|
case "object":
|
4233
|
-
return objectOutputStrategy((0,
|
4222
|
+
return objectOutputStrategy((0, import_provider_utils14.asSchema)(schema));
|
4234
4223
|
case "array":
|
4235
|
-
return arrayOutputStrategy((0,
|
4224
|
+
return arrayOutputStrategy((0, import_provider_utils14.asSchema)(schema));
|
4236
4225
|
case "enum":
|
4237
4226
|
return enumOutputStrategy(enumValues);
|
4238
4227
|
case "no-schema":
|
@@ -4363,7 +4352,7 @@ function validateObjectGenerationInput({
|
|
4363
4352
|
}
|
4364
4353
|
|
4365
4354
|
// core/generate-object/generate-object.ts
|
4366
|
-
var originalGenerateId = (0,
|
4355
|
+
var originalGenerateId = (0, import_provider_utils15.createIdGenerator)({ prefix: "aiobj", size: 24 });
|
4367
4356
|
async function generateObject(options) {
|
4368
4357
|
const {
|
4369
4358
|
model: modelArg,
|
@@ -4541,7 +4530,7 @@ async function generateObject(options) {
|
|
4541
4530
|
request = (_a17 = generateResult.request) != null ? _a17 : {};
|
4542
4531
|
response = generateResult.responseData;
|
4543
4532
|
async function processResult(result2) {
|
4544
|
-
const parseResult = await (0,
|
4533
|
+
const parseResult = await (0, import_provider_utils15.safeParseJSON)({ text: result2 });
|
4545
4534
|
if (!parseResult.success) {
|
4546
4535
|
throw new NoObjectGeneratedError({
|
4547
4536
|
message: "No object generated: could not parse the response.",
|
@@ -4640,7 +4629,7 @@ var DefaultGenerateObjectResult = class {
|
|
4640
4629
|
};
|
4641
4630
|
|
4642
4631
|
// core/generate-object/stream-object.ts
|
4643
|
-
var
|
4632
|
+
var import_provider_utils16 = require("@ai-sdk/provider-utils");
|
4644
4633
|
|
4645
4634
|
// src/util/create-resolvable-promise.ts
|
4646
4635
|
function createResolvablePromise() {
|
@@ -4784,7 +4773,7 @@ function now() {
|
|
4784
4773
|
}
|
4785
4774
|
|
4786
4775
|
// core/generate-object/stream-object.ts
|
4787
|
-
var originalGenerateId2 = (0,
|
4776
|
+
var originalGenerateId2 = (0, import_provider_utils16.createIdGenerator)({ prefix: "aiobj", size: 24 });
|
4788
4777
|
function streamObject(options) {
|
4789
4778
|
const {
|
4790
4779
|
model,
|
@@ -4939,7 +4928,8 @@ var DefaultStreamObjectResult = class {
|
|
4939
4928
|
}),
|
4940
4929
|
providerOptions,
|
4941
4930
|
abortSignal,
|
4942
|
-
headers
|
4931
|
+
headers,
|
4932
|
+
includeRawChunks: false
|
4943
4933
|
};
|
4944
4934
|
const transformer = {
|
4945
4935
|
transform: (chunk, controller) => {
|
@@ -5382,7 +5372,7 @@ var DefaultSpeechResult = class {
|
|
5382
5372
|
};
|
5383
5373
|
|
5384
5374
|
// core/generate-text/generate-text.ts
|
5385
|
-
var
|
5375
|
+
var import_provider_utils19 = require("@ai-sdk/provider-utils");
|
5386
5376
|
|
5387
5377
|
// src/util/as-array.ts
|
5388
5378
|
function asArray(value) {
|
@@ -5390,7 +5380,7 @@ function asArray(value) {
|
|
5390
5380
|
}
|
5391
5381
|
|
5392
5382
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
5393
|
-
var
|
5383
|
+
var import_provider_utils17 = require("@ai-sdk/provider-utils");
|
5394
5384
|
|
5395
5385
|
// src/util/is-non-empty-object.ts
|
5396
5386
|
function isNonEmptyObject(object2) {
|
@@ -5422,7 +5412,7 @@ function prepareToolsAndToolChoice({
|
|
5422
5412
|
type: "function",
|
5423
5413
|
name: name17,
|
5424
5414
|
description: tool2.description,
|
5425
|
-
parameters: (0,
|
5415
|
+
parameters: (0, import_provider_utils17.asSchema)(tool2.parameters).jsonSchema
|
5426
5416
|
};
|
5427
5417
|
case "provider-defined":
|
5428
5418
|
return {
|
@@ -5492,7 +5482,7 @@ function asContent({
|
|
5492
5482
|
}
|
5493
5483
|
|
5494
5484
|
// core/generate-text/parse-tool-call.ts
|
5495
|
-
var
|
5485
|
+
var import_provider_utils18 = require("@ai-sdk/provider-utils");
|
5496
5486
|
async function parseToolCall({
|
5497
5487
|
toolCall,
|
5498
5488
|
tools,
|
@@ -5516,7 +5506,7 @@ async function parseToolCall({
|
|
5516
5506
|
tools,
|
5517
5507
|
parameterSchema: ({ toolName }) => {
|
5518
5508
|
const { parameters } = tools[toolName];
|
5519
|
-
return (0,
|
5509
|
+
return (0, import_provider_utils18.asSchema)(parameters).jsonSchema;
|
5520
5510
|
},
|
5521
5511
|
system,
|
5522
5512
|
messages,
|
@@ -5546,8 +5536,8 @@ async function doParseToolCall({
|
|
5546
5536
|
availableTools: Object.keys(tools)
|
5547
5537
|
});
|
5548
5538
|
}
|
5549
|
-
const schema = (0,
|
5550
|
-
const parseResult = toolCall.args.trim() === "" ? await (0,
|
5539
|
+
const schema = (0, import_provider_utils18.asSchema)(tool2.parameters);
|
5540
|
+
const parseResult = toolCall.args.trim() === "" ? await (0, import_provider_utils18.safeValidateTypes)({ value: {}, schema }) : await (0, import_provider_utils18.safeParseJSON)({ text: toolCall.args, schema });
|
5551
5541
|
if (parseResult.success === false) {
|
5552
5542
|
throw new InvalidToolArgumentsError({
|
5553
5543
|
toolName,
|
@@ -5683,7 +5673,7 @@ function toResponseMessages({
|
|
5683
5673
|
}
|
5684
5674
|
|
5685
5675
|
// core/generate-text/generate-text.ts
|
5686
|
-
var originalGenerateId3 = (0,
|
5676
|
+
var originalGenerateId3 = (0, import_provider_utils19.createIdGenerator)({
|
5687
5677
|
prefix: "aitxt",
|
5688
5678
|
size: 24
|
5689
5679
|
});
|
@@ -6142,7 +6132,7 @@ __export(output_exports, {
|
|
6142
6132
|
object: () => object,
|
6143
6133
|
text: () => text
|
6144
6134
|
});
|
6145
|
-
var
|
6135
|
+
var import_provider_utils20 = require("@ai-sdk/provider-utils");
|
6146
6136
|
var text = () => ({
|
6147
6137
|
type: "text",
|
6148
6138
|
responseFormat: { type: "text" },
|
@@ -6156,7 +6146,7 @@ var text = () => ({
|
|
6156
6146
|
var object = ({
|
6157
6147
|
schema: inputSchema
|
6158
6148
|
}) => {
|
6159
|
-
const schema = (0,
|
6149
|
+
const schema = (0, import_provider_utils20.asSchema)(inputSchema);
|
6160
6150
|
return {
|
6161
6151
|
type: "object",
|
6162
6152
|
responseFormat: {
|
@@ -6182,7 +6172,7 @@ var object = ({
|
|
6182
6172
|
}
|
6183
6173
|
},
|
6184
6174
|
async parseOutput({ text: text2 }, context) {
|
6185
|
-
const parseResult = await (0,
|
6175
|
+
const parseResult = await (0, import_provider_utils20.safeParseJSON)({ text: text2 });
|
6186
6176
|
if (!parseResult.success) {
|
6187
6177
|
throw new NoObjectGeneratedError({
|
6188
6178
|
message: "No object generated: could not parse the response.",
|
@@ -6193,7 +6183,7 @@ var object = ({
|
|
6193
6183
|
finishReason: context.finishReason
|
6194
6184
|
});
|
6195
6185
|
}
|
6196
|
-
const validationResult = await (0,
|
6186
|
+
const validationResult = await (0, import_provider_utils20.safeValidateTypes)({
|
6197
6187
|
value: parseResult.value,
|
6198
6188
|
schema
|
6199
6189
|
});
|
@@ -6213,7 +6203,7 @@ var object = ({
|
|
6213
6203
|
};
|
6214
6204
|
|
6215
6205
|
// core/generate-text/smooth-stream.ts
|
6216
|
-
var
|
6206
|
+
var import_provider_utils21 = require("@ai-sdk/provider-utils");
|
6217
6207
|
var import_provider24 = require("@ai-sdk/provider");
|
6218
6208
|
var CHUNKING_REGEXPS = {
|
6219
6209
|
word: /\S+\s+/m,
|
@@ -6222,7 +6212,7 @@ var CHUNKING_REGEXPS = {
|
|
6222
6212
|
function smoothStream({
|
6223
6213
|
delayInMs = 10,
|
6224
6214
|
chunking = "word",
|
6225
|
-
_internal: { delay: delay2 =
|
6215
|
+
_internal: { delay: delay2 = import_provider_utils21.delay } = {}
|
6226
6216
|
} = {}) {
|
6227
6217
|
let detectChunk;
|
6228
6218
|
if (typeof chunking === "function") {
|
@@ -6282,14 +6272,13 @@ function smoothStream({
|
|
6282
6272
|
}
|
6283
6273
|
|
6284
6274
|
// core/generate-text/stream-text.ts
|
6285
|
-
var
|
6275
|
+
var import_provider_utils23 = require("@ai-sdk/provider-utils");
|
6286
6276
|
|
6287
6277
|
// core/generate-text/run-tools-transformation.ts
|
6288
|
-
var
|
6278
|
+
var import_provider_utils22 = require("@ai-sdk/provider-utils");
|
6289
6279
|
function runToolsTransformation({
|
6290
6280
|
tools,
|
6291
6281
|
generatorStream,
|
6292
|
-
toolCallStreaming,
|
6293
6282
|
tracer,
|
6294
6283
|
telemetry,
|
6295
6284
|
system,
|
@@ -6330,6 +6319,10 @@ function runToolsTransformation({
|
|
6330
6319
|
controller.enqueue(chunk);
|
6331
6320
|
break;
|
6332
6321
|
}
|
6322
|
+
case "raw": {
|
6323
|
+
controller.enqueue(chunk);
|
6324
|
+
break;
|
6325
|
+
}
|
6333
6326
|
case "file": {
|
6334
6327
|
controller.enqueue({
|
6335
6328
|
type: "file",
|
@@ -6341,22 +6334,20 @@ function runToolsTransformation({
|
|
6341
6334
|
break;
|
6342
6335
|
}
|
6343
6336
|
case "tool-call-delta": {
|
6344
|
-
if (
|
6345
|
-
if (!activeToolCalls[chunk.toolCallId]) {
|
6346
|
-
controller.enqueue({
|
6347
|
-
type: "tool-call-streaming-start",
|
6348
|
-
toolCallId: chunk.toolCallId,
|
6349
|
-
toolName: chunk.toolName
|
6350
|
-
});
|
6351
|
-
activeToolCalls[chunk.toolCallId] = true;
|
6352
|
-
}
|
6337
|
+
if (!activeToolCalls[chunk.toolCallId]) {
|
6353
6338
|
controller.enqueue({
|
6354
|
-
type: "tool-call-
|
6339
|
+
type: "tool-call-streaming-start",
|
6355
6340
|
toolCallId: chunk.toolCallId,
|
6356
|
-
toolName: chunk.toolName
|
6357
|
-
argsTextDelta: chunk.argsTextDelta
|
6341
|
+
toolName: chunk.toolName
|
6358
6342
|
});
|
6343
|
+
activeToolCalls[chunk.toolCallId] = true;
|
6359
6344
|
}
|
6345
|
+
controller.enqueue({
|
6346
|
+
type: "tool-call-delta",
|
6347
|
+
toolCallId: chunk.toolCallId,
|
6348
|
+
toolName: chunk.toolName,
|
6349
|
+
argsTextDelta: chunk.argsTextDelta
|
6350
|
+
});
|
6360
6351
|
break;
|
6361
6352
|
}
|
6362
6353
|
case "tool-call": {
|
@@ -6379,7 +6370,7 @@ function runToolsTransformation({
|
|
6379
6370
|
});
|
6380
6371
|
}
|
6381
6372
|
if (tool2.execute != null) {
|
6382
|
-
const toolExecutionId = (0,
|
6373
|
+
const toolExecutionId = (0, import_provider_utils22.generateId)();
|
6383
6374
|
outstandingToolResults.add(toolExecutionId);
|
6384
6375
|
recordSpan({
|
6385
6376
|
name: "ai.toolCall",
|
@@ -6487,8 +6478,20 @@ function runToolsTransformation({
|
|
6487
6478
|
});
|
6488
6479
|
}
|
6489
6480
|
|
6481
|
+
// src/ui-message-stream/get-response-ui-message-id.ts
|
6482
|
+
function getResponseUIMessageId({
|
6483
|
+
originalMessages,
|
6484
|
+
responseMessageId
|
6485
|
+
}) {
|
6486
|
+
if (originalMessages == null) {
|
6487
|
+
return void 0;
|
6488
|
+
}
|
6489
|
+
const lastMessage = originalMessages[originalMessages.length - 1];
|
6490
|
+
return (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage.id : typeof responseMessageId === "function" ? responseMessageId() : responseMessageId;
|
6491
|
+
}
|
6492
|
+
|
6490
6493
|
// core/generate-text/stream-text.ts
|
6491
|
-
var originalGenerateId4 = (0,
|
6494
|
+
var originalGenerateId4 = (0, import_provider_utils23.createIdGenerator)({
|
6492
6495
|
prefix: "aitxt",
|
6493
6496
|
size: 24
|
6494
6497
|
});
|
@@ -6507,12 +6510,11 @@ function streamText({
|
|
6507
6510
|
experimental_telemetry: telemetry,
|
6508
6511
|
prepareStep,
|
6509
6512
|
providerOptions,
|
6510
|
-
experimental_toolCallStreaming = false,
|
6511
|
-
toolCallStreaming = experimental_toolCallStreaming,
|
6512
6513
|
experimental_activeTools,
|
6513
6514
|
activeTools = experimental_activeTools,
|
6514
6515
|
experimental_repairToolCall: repairToolCall,
|
6515
6516
|
experimental_transform: transform,
|
6517
|
+
includeRawChunks = false,
|
6516
6518
|
onChunk,
|
6517
6519
|
onError = ({ error }) => {
|
6518
6520
|
console.error(error);
|
@@ -6538,7 +6540,6 @@ function streamText({
|
|
6538
6540
|
messages,
|
6539
6541
|
tools,
|
6540
6542
|
toolChoice,
|
6541
|
-
toolCallStreaming,
|
6542
6543
|
transforms: asArray(transform),
|
6543
6544
|
activeTools,
|
6544
6545
|
repairToolCall,
|
@@ -6546,6 +6547,7 @@ function streamText({
|
|
6546
6547
|
output,
|
6547
6548
|
providerOptions,
|
6548
6549
|
prepareStep,
|
6550
|
+
includeRawChunks,
|
6549
6551
|
onChunk,
|
6550
6552
|
onError,
|
6551
6553
|
onFinish,
|
@@ -6616,7 +6618,6 @@ var DefaultStreamTextResult = class {
|
|
6616
6618
|
messages,
|
6617
6619
|
tools,
|
6618
6620
|
toolChoice,
|
6619
|
-
toolCallStreaming,
|
6620
6621
|
transforms,
|
6621
6622
|
activeTools,
|
6622
6623
|
repairToolCall,
|
@@ -6624,6 +6625,7 @@ var DefaultStreamTextResult = class {
|
|
6624
6625
|
output,
|
6625
6626
|
providerOptions,
|
6626
6627
|
prepareStep,
|
6628
|
+
includeRawChunks,
|
6627
6629
|
now: now2,
|
6628
6630
|
currentDate,
|
6629
6631
|
generateId: generateId3,
|
@@ -6636,6 +6638,7 @@ var DefaultStreamTextResult = class {
|
|
6636
6638
|
this._finishReason = new DelayedPromise();
|
6637
6639
|
this._steps = new DelayedPromise();
|
6638
6640
|
this.output = output;
|
6641
|
+
this.includeRawChunks = includeRawChunks;
|
6639
6642
|
this.generateId = generateId3;
|
6640
6643
|
let stepFinish;
|
6641
6644
|
let activeReasoningPart = void 0;
|
@@ -6651,7 +6654,7 @@ var DefaultStreamTextResult = class {
|
|
6651
6654
|
async transform(chunk, controller) {
|
6652
6655
|
controller.enqueue(chunk);
|
6653
6656
|
const { part } = chunk;
|
6654
|
-
if (part.type === "text" || part.type === "reasoning" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-call-streaming-start" || part.type === "tool-call-delta") {
|
6657
|
+
if (part.type === "text" || part.type === "reasoning" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-call-streaming-start" || part.type === "tool-call-delta" || part.type === "raw") {
|
6655
6658
|
await (onChunk == null ? void 0 : onChunk({ chunk: part }));
|
6656
6659
|
}
|
6657
6660
|
if (part.type === "error") {
|
@@ -6843,6 +6846,7 @@ var DefaultStreamTextResult = class {
|
|
6843
6846
|
usage
|
6844
6847
|
}) {
|
6845
6848
|
var _a17, _b, _c, _d;
|
6849
|
+
const includeRawChunks2 = self.includeRawChunks;
|
6846
6850
|
stepFinish = new DelayedPromise();
|
6847
6851
|
const initialPrompt = await standardizePrompt({
|
6848
6852
|
system,
|
@@ -6929,7 +6933,8 @@ var DefaultStreamTextResult = class {
|
|
6929
6933
|
prompt: promptMessages,
|
6930
6934
|
providerOptions,
|
6931
6935
|
abortSignal,
|
6932
|
-
headers
|
6936
|
+
headers,
|
6937
|
+
includeRawChunks: includeRawChunks2
|
6933
6938
|
})
|
6934
6939
|
};
|
6935
6940
|
}
|
@@ -6938,7 +6943,6 @@ var DefaultStreamTextResult = class {
|
|
6938
6943
|
const streamWithToolResults = runToolsTransformation({
|
6939
6944
|
tools,
|
6940
6945
|
generatorStream: stream2,
|
6941
|
-
toolCallStreaming,
|
6942
6946
|
tracer,
|
6943
6947
|
telemetry,
|
6944
6948
|
system,
|
@@ -7098,6 +7102,12 @@ var DefaultStreamTextResult = class {
|
|
7098
7102
|
stepFinishReason = "error";
|
7099
7103
|
break;
|
7100
7104
|
}
|
7105
|
+
case "raw": {
|
7106
|
+
if (includeRawChunks2) {
|
7107
|
+
controller.enqueue(chunk);
|
7108
|
+
}
|
7109
|
+
break;
|
7110
|
+
}
|
7101
7111
|
default: {
|
7102
7112
|
const exhaustiveCheck = chunkType;
|
7103
7113
|
throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
|
@@ -7318,23 +7328,24 @@ var DefaultStreamTextResult = class {
|
|
7318
7328
|
);
|
7319
7329
|
}
|
7320
7330
|
toUIMessageStream({
|
7321
|
-
|
7322
|
-
originalMessages = [],
|
7331
|
+
originalMessages,
|
7323
7332
|
onFinish,
|
7324
7333
|
messageMetadata,
|
7325
|
-
sendReasoning =
|
7334
|
+
sendReasoning = true,
|
7326
7335
|
sendSources = false,
|
7327
7336
|
sendStart = true,
|
7328
7337
|
sendFinish = true,
|
7329
7338
|
onError = () => "An error occurred."
|
7330
7339
|
// mask error messages for safety by default
|
7331
7340
|
} = {}) {
|
7332
|
-
const
|
7333
|
-
|
7334
|
-
|
7341
|
+
const responseMessageId = getResponseUIMessageId({
|
7342
|
+
originalMessages,
|
7343
|
+
responseMessageId: this.generateId
|
7344
|
+
});
|
7335
7345
|
const baseStream = this.fullStream.pipeThrough(
|
7336
7346
|
new TransformStream({
|
7337
7347
|
transform: async (part, controller) => {
|
7348
|
+
const messageMetadataValue = messageMetadata == null ? void 0 : messageMetadata({ part });
|
7338
7349
|
const partType = part.type;
|
7339
7350
|
switch (partType) {
|
7340
7351
|
case "text": {
|
@@ -7431,59 +7442,57 @@ var DefaultStreamTextResult = class {
|
|
7431
7442
|
break;
|
7432
7443
|
}
|
7433
7444
|
case "start-step": {
|
7434
|
-
|
7435
|
-
controller.enqueue({
|
7436
|
-
type: "start-step",
|
7437
|
-
metadata
|
7438
|
-
});
|
7445
|
+
controller.enqueue({ type: "start-step" });
|
7439
7446
|
break;
|
7440
7447
|
}
|
7441
7448
|
case "finish-step": {
|
7442
|
-
|
7443
|
-
controller.enqueue({
|
7444
|
-
type: "finish-step",
|
7445
|
-
metadata
|
7446
|
-
});
|
7449
|
+
controller.enqueue({ type: "finish-step" });
|
7447
7450
|
break;
|
7448
7451
|
}
|
7449
7452
|
case "start": {
|
7450
7453
|
if (sendStart) {
|
7451
|
-
const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
|
7452
7454
|
controller.enqueue({
|
7453
7455
|
type: "start",
|
7454
|
-
messageId,
|
7455
|
-
|
7456
|
+
messageId: responseMessageId,
|
7457
|
+
messageMetadata: messageMetadataValue
|
7456
7458
|
});
|
7457
7459
|
}
|
7458
7460
|
break;
|
7459
7461
|
}
|
7460
7462
|
case "finish": {
|
7461
7463
|
if (sendFinish) {
|
7462
|
-
const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
|
7463
7464
|
controller.enqueue({
|
7464
7465
|
type: "finish",
|
7465
|
-
|
7466
|
+
messageMetadata: messageMetadataValue
|
7466
7467
|
});
|
7467
7468
|
}
|
7468
7469
|
break;
|
7469
7470
|
}
|
7471
|
+
case "raw": {
|
7472
|
+
break;
|
7473
|
+
}
|
7470
7474
|
default: {
|
7471
7475
|
const exhaustiveCheck = partType;
|
7472
7476
|
throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
|
7473
7477
|
}
|
7474
7478
|
}
|
7479
|
+
if (messageMetadataValue != null && partType !== "start" && partType !== "finish") {
|
7480
|
+
controller.enqueue({
|
7481
|
+
type: "message-metadata",
|
7482
|
+
messageMetadata: messageMetadataValue
|
7483
|
+
});
|
7484
|
+
}
|
7475
7485
|
}
|
7476
7486
|
})
|
7477
7487
|
);
|
7478
7488
|
return handleUIMessageStreamFinish({
|
7479
7489
|
stream: baseStream,
|
7480
|
-
|
7490
|
+
messageId: responseMessageId != null ? responseMessageId : this.generateId(),
|
7481
7491
|
originalMessages,
|
7482
7492
|
onFinish
|
7483
7493
|
});
|
7484
7494
|
}
|
7485
7495
|
pipeUIMessageStreamToResponse(response, {
|
7486
|
-
newMessageId,
|
7487
7496
|
originalMessages,
|
7488
7497
|
onFinish,
|
7489
7498
|
messageMetadata,
|
@@ -7497,7 +7506,6 @@ var DefaultStreamTextResult = class {
|
|
7497
7506
|
pipeUIMessageStreamToResponse({
|
7498
7507
|
response,
|
7499
7508
|
stream: this.toUIMessageStream({
|
7500
|
-
newMessageId,
|
7501
7509
|
originalMessages,
|
7502
7510
|
onFinish,
|
7503
7511
|
messageMetadata,
|
@@ -7518,7 +7526,6 @@ var DefaultStreamTextResult = class {
|
|
7518
7526
|
});
|
7519
7527
|
}
|
7520
7528
|
toUIMessageStreamResponse({
|
7521
|
-
newMessageId,
|
7522
7529
|
originalMessages,
|
7523
7530
|
onFinish,
|
7524
7531
|
messageMetadata,
|
@@ -7531,7 +7538,6 @@ var DefaultStreamTextResult = class {
|
|
7531
7538
|
} = {}) {
|
7532
7539
|
return createUIMessageStreamResponse({
|
7533
7540
|
stream: this.toUIMessageStream({
|
7534
|
-
newMessageId,
|
7535
7541
|
originalMessages,
|
7536
7542
|
onFinish,
|
7537
7543
|
messageMetadata,
|
@@ -7928,7 +7934,7 @@ var DefaultProviderRegistry = class {
|
|
7928
7934
|
};
|
7929
7935
|
|
7930
7936
|
// core/tool/mcp/mcp-client.ts
|
7931
|
-
var
|
7937
|
+
var import_provider_utils25 = require("@ai-sdk/provider-utils");
|
7932
7938
|
|
7933
7939
|
// core/tool/tool.ts
|
7934
7940
|
function tool(tool2) {
|
@@ -7936,7 +7942,7 @@ function tool(tool2) {
|
|
7936
7942
|
}
|
7937
7943
|
|
7938
7944
|
// core/tool/mcp/mcp-sse-transport.ts
|
7939
|
-
var
|
7945
|
+
var import_provider_utils24 = require("@ai-sdk/provider-utils");
|
7940
7946
|
|
7941
7947
|
// core/tool/mcp/json-rpc-message.ts
|
7942
7948
|
var import_zod10 = require("zod");
|
@@ -8107,7 +8113,7 @@ var SseMCPTransport = class {
|
|
8107
8113
|
(_b = this.onerror) == null ? void 0 : _b.call(this, error);
|
8108
8114
|
return reject(error);
|
8109
8115
|
}
|
8110
|
-
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough((0,
|
8116
|
+
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough((0, import_provider_utils24.createEventSourceParserStream)());
|
8111
8117
|
const reader = stream.getReader();
|
8112
8118
|
const processEvents = async () => {
|
8113
8119
|
var _a18, _b2, _c2;
|
@@ -8431,7 +8437,7 @@ var MCPClient = class {
|
|
8431
8437
|
if (schemas !== "automatic" && !(name17 in schemas)) {
|
8432
8438
|
continue;
|
8433
8439
|
}
|
8434
|
-
const parameters = schemas === "automatic" ? (0,
|
8440
|
+
const parameters = schemas === "automatic" ? (0, import_provider_utils25.jsonSchema)({
|
8435
8441
|
...inputSchema,
|
8436
8442
|
properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
|
8437
8443
|
additionalProperties: false
|
@@ -8625,9 +8631,10 @@ var DefaultTranscriptionResult = class {
|
|
8625
8631
|
generateObject,
|
8626
8632
|
generateText,
|
8627
8633
|
getTextFromDataUrl,
|
8628
|
-
|
8634
|
+
getToolName,
|
8629
8635
|
hasToolCall,
|
8630
8636
|
isDeepEqualData,
|
8637
|
+
isToolUIPart,
|
8631
8638
|
jsonSchema,
|
8632
8639
|
modelMessageSchema,
|
8633
8640
|
parsePartialJson,
|