ai 5.0.0-canary.15 → 5.0.0-canary.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -52,9 +52,12 @@ __export(ai_exports, {
52
52
  appendClientMessage: () => appendClientMessage,
53
53
  appendResponseMessages: () => appendResponseMessages,
54
54
  asSchema: () => import_provider_utils4.asSchema,
55
+ assistantModelMessageSchema: () => assistantModelMessageSchema,
55
56
  callChatApi: () => callChatApi,
56
57
  callCompletionApi: () => callCompletionApi,
58
+ convertFileListToFileUIParts: () => convertFileListToFileUIParts,
57
59
  convertToCoreMessages: () => convertToCoreMessages,
60
+ convertToModelMessages: () => convertToModelMessages,
58
61
  coreAssistantMessageSchema: () => coreAssistantMessageSchema,
59
62
  coreMessageSchema: () => coreMessageSchema,
60
63
  coreSystemMessageSchema: () => coreSystemMessageSchema,
@@ -77,20 +80,19 @@ __export(ai_exports, {
77
80
  experimental_transcribe: () => transcribe,
78
81
  extractMaxToolInvocationStep: () => extractMaxToolInvocationStep,
79
82
  extractReasoningMiddleware: () => extractReasoningMiddleware,
80
- fillMessageParts: () => fillMessageParts,
81
83
  formatDataStreamPart: () => formatDataStreamPart,
82
84
  generateId: () => import_provider_utils21.generateId,
83
85
  generateObject: () => generateObject,
84
86
  generateText: () => generateText,
85
- getMessageParts: () => getMessageParts,
86
87
  getTextFromDataUrl: () => getTextFromDataUrl,
88
+ getToolInvocations: () => getToolInvocations,
87
89
  isAssistantMessageWithCompletedToolCalls: () => isAssistantMessageWithCompletedToolCalls,
88
90
  isDeepEqualData: () => isDeepEqualData,
89
91
  jsonSchema: () => import_provider_utils4.jsonSchema,
92
+ modelMessageSchema: () => modelMessageSchema,
90
93
  parseDataStreamPart: () => parseDataStreamPart,
91
94
  parsePartialJson: () => parsePartialJson,
92
95
  pipeDataStreamToResponse: () => pipeDataStreamToResponse,
93
- prepareAttachmentsForRequest: () => prepareAttachmentsForRequest,
94
96
  processDataStream: () => processDataStream,
95
97
  processTextStream: () => processTextStream,
96
98
  shouldResubmitMessages: () => shouldResubmitMessages,
@@ -99,8 +101,11 @@ __export(ai_exports, {
99
101
  smoothStream: () => smoothStream,
100
102
  streamObject: () => streamObject,
101
103
  streamText: () => streamText,
104
+ systemModelMessageSchema: () => systemModelMessageSchema,
102
105
  tool: () => tool,
106
+ toolModelMessageSchema: () => toolModelMessageSchema,
103
107
  updateToolCallResult: () => updateToolCallResult,
108
+ userModelMessageSchema: () => userModelMessageSchema,
104
109
  wrapLanguageModel: () => wrapLanguageModel
105
110
  });
106
111
  module.exports = __toCommonJS(ai_exports);
@@ -126,6 +131,21 @@ function calculateLanguageModelUsage({
126
131
  };
127
132
  }
128
133
 
134
+ // core/ui/get-tool-invocations.ts
135
+ function getToolInvocations(message) {
136
+ return message.parts.filter(
137
+ (part) => part.type === "tool-invocation"
138
+ ).map((part) => part.toolInvocation);
139
+ }
140
+
141
+ // core/util/extract-max-tool-invocation-step.ts
142
+ function extractMaxToolInvocationStep(toolInvocations) {
143
+ return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
144
+ var _a17;
145
+ return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
146
+ }, 0);
147
+ }
148
+
129
149
  // core/util/parse-partial-json.ts
130
150
  var import_provider_utils = require("@ai-sdk/provider-utils");
131
151
 
@@ -668,9 +688,9 @@ var fileStreamPart = {
668
688
  code: "k",
669
689
  name: "file",
670
690
  parse: (value) => {
671
- if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string" || !("mimeType" in value) || typeof value.mimeType !== "string") {
691
+ if (value == null || typeof value !== "object" || !("url" in value) || typeof value.url !== "string" || !("mediaType" in value) || typeof value.mediaType !== "string") {
672
692
  throw new Error(
673
- '"file" parts expect an object with a "data" and "mimeType" property.'
693
+ '"file" parts expect an object with a "url" and "mediaType" property.'
674
694
  );
675
695
  }
676
696
  return { type: "file", value };
@@ -848,13 +868,9 @@ async function processChatResponse({
848
868
  getCurrentDate = () => /* @__PURE__ */ new Date(),
849
869
  lastMessage
850
870
  }) {
851
- var _a17, _b;
871
+ var _a17;
852
872
  const replaceLastMessage = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
853
- let step = replaceLastMessage ? 1 + // find max step in existing tool invocations:
854
- ((_b = (_a17 = lastMessage.toolInvocations) == null ? void 0 : _a17.reduce((max, toolInvocation) => {
855
- var _a18;
856
- return Math.max(max, (_a18 = toolInvocation.step) != null ? _a18 : 0);
857
- }, 0)) != null ? _b : 0) : 0;
873
+ let step = replaceLastMessage ? 1 + ((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) : 0;
858
874
  const message = replaceLastMessage ? structuredClone(lastMessage) : {
859
875
  id: generateId3(),
860
876
  createdAt: getCurrentDate(),
@@ -927,12 +943,12 @@ async function processChatResponse({
927
943
  if (currentReasoningPart == null) {
928
944
  currentReasoningPart = {
929
945
  type: "reasoning",
930
- reasoning: value.text,
946
+ text: value.text,
931
947
  providerMetadata: value.providerMetadata
932
948
  };
933
949
  message.parts.push(currentReasoningPart);
934
950
  } else {
935
- currentReasoningPart.reasoning += value.text;
951
+ currentReasoningPart.text += value.text;
936
952
  currentReasoningPart.providerMetadata = value.providerMetadata;
937
953
  }
938
954
  execUpdate();
@@ -945,8 +961,8 @@ async function processChatResponse({
945
961
  onFilePart(value) {
946
962
  message.parts.push({
947
963
  type: "file",
948
- mediaType: value.mimeType,
949
- data: value.data
964
+ mediaType: value.mediaType,
965
+ url: value.url
950
966
  });
951
967
  execUpdate();
952
968
  },
@@ -958,24 +974,20 @@ async function processChatResponse({
958
974
  execUpdate();
959
975
  },
960
976
  onToolCallStreamingStartPart(value) {
961
- if (message.toolInvocations == null) {
962
- message.toolInvocations = [];
963
- }
977
+ const toolInvocations = getToolInvocations(message);
964
978
  partialToolCalls[value.toolCallId] = {
965
979
  text: "",
966
980
  step,
967
981
  toolName: value.toolName,
968
- index: message.toolInvocations.length
982
+ index: toolInvocations.length
969
983
  };
970
- const invocation = {
984
+ updateToolInvocationPart(value.toolCallId, {
971
985
  state: "partial-call",
972
986
  step,
973
987
  toolCallId: value.toolCallId,
974
988
  toolName: value.toolName,
975
989
  args: void 0
976
- };
977
- message.toolInvocations.push(invocation);
978
- updateToolInvocationPart(value.toolCallId, invocation);
990
+ });
979
991
  execUpdate();
980
992
  },
981
993
  async onToolCallDeltaPart(value) {
@@ -984,68 +996,53 @@ async function processChatResponse({
984
996
  const { value: partialArgs } = await parsePartialJson(
985
997
  partialToolCall.text
986
998
  );
987
- const invocation = {
999
+ updateToolInvocationPart(value.toolCallId, {
988
1000
  state: "partial-call",
989
1001
  step: partialToolCall.step,
990
1002
  toolCallId: value.toolCallId,
991
1003
  toolName: partialToolCall.toolName,
992
1004
  args: partialArgs
993
- };
994
- message.toolInvocations[partialToolCall.index] = invocation;
995
- updateToolInvocationPart(value.toolCallId, invocation);
1005
+ });
996
1006
  execUpdate();
997
1007
  },
998
1008
  async onToolCallPart(value) {
999
- const invocation = {
1009
+ updateToolInvocationPart(value.toolCallId, {
1000
1010
  state: "call",
1001
1011
  step,
1002
1012
  ...value
1003
- };
1004
- if (partialToolCalls[value.toolCallId] != null) {
1005
- message.toolInvocations[partialToolCalls[value.toolCallId].index] = invocation;
1006
- } else {
1007
- if (message.toolInvocations == null) {
1008
- message.toolInvocations = [];
1009
- }
1010
- message.toolInvocations.push(invocation);
1011
- }
1012
- updateToolInvocationPart(value.toolCallId, invocation);
1013
+ });
1013
1014
  execUpdate();
1014
1015
  if (onToolCall) {
1015
1016
  const result = await onToolCall({ toolCall: value });
1016
1017
  if (result != null) {
1017
- const invocation2 = {
1018
+ updateToolInvocationPart(value.toolCallId, {
1018
1019
  state: "result",
1019
1020
  step,
1020
1021
  ...value,
1021
1022
  result
1022
- };
1023
- message.toolInvocations[message.toolInvocations.length - 1] = invocation2;
1024
- updateToolInvocationPart(value.toolCallId, invocation2);
1023
+ });
1025
1024
  execUpdate();
1026
1025
  }
1027
1026
  }
1028
1027
  },
1029
1028
  onToolResultPart(value) {
1030
- const toolInvocations = message.toolInvocations;
1029
+ const toolInvocations = getToolInvocations(message);
1031
1030
  if (toolInvocations == null) {
1032
1031
  throw new Error("tool_result must be preceded by a tool_call");
1033
1032
  }
1034
1033
  const toolInvocationIndex = toolInvocations.findIndex(
1035
- (invocation2) => invocation2.toolCallId === value.toolCallId
1034
+ (invocation) => invocation.toolCallId === value.toolCallId
1036
1035
  );
1037
1036
  if (toolInvocationIndex === -1) {
1038
1037
  throw new Error(
1039
1038
  "tool_result must be preceded by a tool_call with the same toolCallId"
1040
1039
  );
1041
1040
  }
1042
- const invocation = {
1041
+ updateToolInvocationPart(value.toolCallId, {
1043
1042
  ...toolInvocations[toolInvocationIndex],
1044
1043
  state: "result",
1045
1044
  ...value
1046
- };
1047
- toolInvocations[toolInvocationIndex] = invocation;
1048
- updateToolInvocationPart(value.toolCallId, invocation);
1045
+ });
1049
1046
  execUpdate();
1050
1047
  },
1051
1048
  onDataPart(value) {
@@ -1153,19 +1150,30 @@ async function callChatApi({
1153
1150
  onToolCall,
1154
1151
  generateId: generateId3,
1155
1152
  fetch: fetch2 = getOriginalFetch(),
1156
- lastMessage
1153
+ lastMessage,
1154
+ getCurrentDate,
1155
+ requestType = "generate"
1157
1156
  }) {
1158
- var _a17, _b;
1159
- const response = await fetch2(api, {
1157
+ var _a17, _b, _c;
1158
+ const request = requestType === "resume" ? fetch2(`${api}?chatId=${body.id}`, {
1159
+ method: "GET",
1160
+ headers: {
1161
+ "Content-Type": "application/json",
1162
+ ...headers
1163
+ },
1164
+ signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
1165
+ credentials
1166
+ }) : fetch2(api, {
1160
1167
  method: "POST",
1161
1168
  body: JSON.stringify(body),
1162
1169
  headers: {
1163
1170
  "Content-Type": "application/json",
1164
1171
  ...headers
1165
1172
  },
1166
- signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
1173
+ signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
1167
1174
  credentials
1168
- }).catch((err) => {
1175
+ });
1176
+ const response = await request.catch((err) => {
1169
1177
  restoreMessagesOnFailure();
1170
1178
  throw err;
1171
1179
  });
@@ -1179,7 +1187,7 @@ async function callChatApi({
1179
1187
  if (!response.ok) {
1180
1188
  restoreMessagesOnFailure();
1181
1189
  throw new Error(
1182
- (_b = await response.text()) != null ? _b : "Failed to fetch the chat response."
1190
+ (_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
1183
1191
  );
1184
1192
  }
1185
1193
  if (!response.body) {
@@ -1206,7 +1214,8 @@ async function callChatApi({
1206
1214
  onFinish(message, { usage, finishReason });
1207
1215
  }
1208
1216
  },
1209
- generateId: generateId3
1217
+ generateId: generateId3,
1218
+ getCurrentDate
1210
1219
  });
1211
1220
  return;
1212
1221
  }
@@ -1327,6 +1336,36 @@ async function callCompletionApi({
1327
1336
  }
1328
1337
  }
1329
1338
 
1339
+ // core/util/convert-file-list-to-file-ui-parts.ts
1340
+ async function convertFileListToFileUIParts(files) {
1341
+ if (files == null) {
1342
+ return [];
1343
+ }
1344
+ if (!globalThis.FileList || !(files instanceof globalThis.FileList)) {
1345
+ throw new Error("FileList is not supported in the current environment");
1346
+ }
1347
+ return Promise.all(
1348
+ Array.from(files).map(async (file) => {
1349
+ const { name: name17, type } = file;
1350
+ const dataUrl = await new Promise((resolve, reject) => {
1351
+ const reader = new FileReader();
1352
+ reader.onload = (readerEvent) => {
1353
+ var _a17;
1354
+ resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
1355
+ };
1356
+ reader.onerror = (error) => reject(error);
1357
+ reader.readAsDataURL(file);
1358
+ });
1359
+ return {
1360
+ type: "file",
1361
+ mediaType: type,
1362
+ filename: name17,
1363
+ url: dataUrl
1364
+ };
1365
+ })
1366
+ );
1367
+ }
1368
+
1330
1369
  // core/util/data-url.ts
1331
1370
  function getTextFromDataUrl(dataUrl) {
1332
1371
  const [header, base64Content] = dataUrl.split(",");
@@ -1341,34 +1380,6 @@ function getTextFromDataUrl(dataUrl) {
1341
1380
  }
1342
1381
  }
1343
1382
 
1344
- // core/util/extract-max-tool-invocation-step.ts
1345
- function extractMaxToolInvocationStep(toolInvocations) {
1346
- return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
1347
- var _a17;
1348
- return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
1349
- }, 0);
1350
- }
1351
-
1352
- // core/util/get-message-parts.ts
1353
- function getMessageParts(message) {
1354
- var _a17;
1355
- return (_a17 = message.parts) != null ? _a17 : [
1356
- ...message.toolInvocations ? message.toolInvocations.map((toolInvocation) => ({
1357
- type: "tool-invocation",
1358
- toolInvocation
1359
- })) : [],
1360
- ...message.content ? [{ type: "text", text: message.content }] : []
1361
- ];
1362
- }
1363
-
1364
- // core/util/fill-message-parts.ts
1365
- function fillMessageParts(messages) {
1366
- return messages.map((message) => ({
1367
- ...message,
1368
- parts: getMessageParts(message)
1369
- }));
1370
- }
1371
-
1372
1383
  // core/util/is-deep-equal-data.ts
1373
1384
  function isDeepEqualData(obj1, obj2) {
1374
1385
  if (obj1 === obj2)
@@ -1404,63 +1415,6 @@ function isDeepEqualData(obj1, obj2) {
1404
1415
  return true;
1405
1416
  }
1406
1417
 
1407
- // core/util/prepare-attachments-for-request.ts
1408
- async function prepareAttachmentsForRequest(attachmentsFromOptions) {
1409
- if (!attachmentsFromOptions) {
1410
- return [];
1411
- }
1412
- if (attachmentsFromOptions instanceof FileList) {
1413
- return Promise.all(
1414
- Array.from(attachmentsFromOptions).map(async (attachment) => {
1415
- const { name: name17, type } = attachment;
1416
- const dataUrl = await new Promise((resolve, reject) => {
1417
- const reader = new FileReader();
1418
- reader.onload = (readerEvent) => {
1419
- var _a17;
1420
- resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
1421
- };
1422
- reader.onerror = (error) => reject(error);
1423
- reader.readAsDataURL(attachment);
1424
- });
1425
- return {
1426
- name: name17,
1427
- contentType: type,
1428
- url: dataUrl
1429
- };
1430
- })
1431
- );
1432
- }
1433
- if (Array.isArray(attachmentsFromOptions)) {
1434
- return attachmentsFromOptions;
1435
- }
1436
- throw new Error("Invalid attachments type");
1437
- }
1438
-
1439
- // core/util/update-tool-call-result.ts
1440
- function updateToolCallResult({
1441
- messages,
1442
- toolCallId,
1443
- toolResult: result
1444
- }) {
1445
- var _a17;
1446
- const lastMessage = messages[messages.length - 1];
1447
- const invocationPart = lastMessage.parts.find(
1448
- (part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
1449
- );
1450
- if (invocationPart == null) {
1451
- return;
1452
- }
1453
- const toolResult = {
1454
- ...invocationPart.toolInvocation,
1455
- state: "result",
1456
- result
1457
- };
1458
- invocationPart.toolInvocation = toolResult;
1459
- lastMessage.toolInvocations = (_a17 = lastMessage.toolInvocations) == null ? void 0 : _a17.map(
1460
- (toolInvocation) => toolInvocation.toolCallId === toolCallId ? toolResult : toolInvocation
1461
- );
1462
- }
1463
-
1464
1418
  // core/util/should-resubmit-messages.ts
1465
1419
  function shouldResubmitMessages({
1466
1420
  originalMaxToolInvocationStep,
@@ -1474,9 +1428,9 @@ function shouldResubmitMessages({
1474
1428
  // check if the feature is enabled:
1475
1429
  maxSteps > 1 && // ensure there is a last message:
1476
1430
  lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
1477
- (messages.length > originalMessageCount || extractMaxToolInvocationStep(lastMessage.toolInvocations) !== originalMaxToolInvocationStep) && // check that next step is possible:
1431
+ (messages.length > originalMessageCount || extractMaxToolInvocationStep(getToolInvocations(lastMessage)) !== originalMaxToolInvocationStep) && // check that next step is possible:
1478
1432
  isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
1479
- ((_a17 = extractMaxToolInvocationStep(lastMessage.toolInvocations)) != null ? _a17 : 0) < maxSteps
1433
+ ((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) < maxSteps
1480
1434
  );
1481
1435
  }
1482
1436
  function isAssistantMessageWithCompletedToolCalls(message) {
@@ -1490,6 +1444,26 @@ function isAssistantMessageWithCompletedToolCalls(message) {
1490
1444
  return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
1491
1445
  }
1492
1446
 
1447
+ // core/util/update-tool-call-result.ts
1448
+ function updateToolCallResult({
1449
+ messages,
1450
+ toolCallId,
1451
+ toolResult: result
1452
+ }) {
1453
+ const lastMessage = messages[messages.length - 1];
1454
+ const invocationPart = lastMessage.parts.find(
1455
+ (part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
1456
+ );
1457
+ if (invocationPart == null) {
1458
+ return;
1459
+ }
1460
+ invocationPart.toolInvocation = {
1461
+ ...invocationPart.toolInvocation,
1462
+ state: "result",
1463
+ result
1464
+ };
1465
+ }
1466
+
1493
1467
  // core/data-stream/create-data-stream.ts
1494
1468
  function createDataStream({
1495
1469
  execute,
@@ -2507,7 +2481,7 @@ async function generateImage({
2507
2481
  abortSignal,
2508
2482
  headers
2509
2483
  }) {
2510
- var _a17;
2484
+ var _a17, _b;
2511
2485
  const { retry } = prepareRetries({ maxRetries: maxRetriesArg });
2512
2486
  const maxImagesPerCall = (_a17 = model.maxImagesPerCall) != null ? _a17 : 1;
2513
2487
  const callCount = Math.ceil(n / maxImagesPerCall);
@@ -2537,6 +2511,7 @@ async function generateImage({
2537
2511
  const images = [];
2538
2512
  const warnings = [];
2539
2513
  const responses = [];
2514
+ const providerMetadata = {};
2540
2515
  for (const result of results) {
2541
2516
  images.push(
2542
2517
  ...result.images.map(
@@ -2553,18 +2528,32 @@ async function generateImage({
2553
2528
  )
2554
2529
  );
2555
2530
  warnings.push(...result.warnings);
2531
+ if (result.providerMetadata) {
2532
+ for (const [providerName, metadata] of Object.entries(result.providerMetadata)) {
2533
+ (_b = providerMetadata[providerName]) != null ? _b : providerMetadata[providerName] = { images: [] };
2534
+ providerMetadata[providerName].images.push(
2535
+ ...result.providerMetadata[providerName].images
2536
+ );
2537
+ }
2538
+ }
2556
2539
  responses.push(result.response);
2557
2540
  }
2558
2541
  if (!images.length) {
2559
2542
  throw new NoImageGeneratedError({ responses });
2560
2543
  }
2561
- return new DefaultGenerateImageResult({ images, warnings, responses });
2544
+ return new DefaultGenerateImageResult({
2545
+ images,
2546
+ warnings,
2547
+ responses,
2548
+ providerMetadata
2549
+ });
2562
2550
  }
2563
2551
  var DefaultGenerateImageResult = class {
2564
2552
  constructor(options) {
2565
2553
  this.images = options.images;
2566
2554
  this.warnings = options.warnings;
2567
2555
  this.responses = options.responses;
2556
+ this.providerMetadata = options.providerMetadata;
2568
2557
  }
2569
2558
  get image() {
2570
2559
  return this.images[0];
@@ -2614,6 +2603,9 @@ function extractContentText(content) {
2614
2603
  return parts.map((content2) => content2.text).join("");
2615
2604
  }
2616
2605
 
2606
+ // core/prompt/convert-to-language-model-prompt.ts
2607
+ var import_provider_utils9 = require("@ai-sdk/provider-utils");
2608
+
2617
2609
  // util/download-error.ts
2618
2610
  var import_provider6 = require("@ai-sdk/provider");
2619
2611
  var name5 = "AI_DownloadError";
@@ -2778,13 +2770,6 @@ function convertDataContentToUint8Array(content) {
2778
2770
  }
2779
2771
  throw new InvalidDataContentError({ content });
2780
2772
  }
2781
- function convertUint8ArrayToText(uint8Array) {
2782
- try {
2783
- return new TextDecoder().decode(uint8Array);
2784
- } catch (error) {
2785
- throw new Error("Error decoding Uint8Array to text");
2786
- }
2787
- }
2788
2773
 
2789
2774
  // core/prompt/invalid-message-role-error.ts
2790
2775
  var import_provider9 = require("@ai-sdk/provider");
@@ -2808,7 +2793,6 @@ var InvalidMessageRoleError = class extends import_provider9.AISDKError {
2808
2793
  _a7 = symbol7;
2809
2794
 
2810
2795
  // core/prompt/convert-to-language-model-prompt.ts
2811
- var import_provider_utils9 = require("@ai-sdk/provider-utils");
2812
2796
  async function convertToLanguageModelPrompt({
2813
2797
  prompt,
2814
2798
  supportedUrls,
@@ -2864,7 +2848,6 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
2864
2848
  // remove empty text parts:
2865
2849
  (part) => part.type !== "text" || part.text !== ""
2866
2850
  ).map((part) => {
2867
- var _a17;
2868
2851
  const providerOptions = part.providerOptions;
2869
2852
  switch (part.type) {
2870
2853
  case "file": {
@@ -2875,7 +2858,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
2875
2858
  type: "file",
2876
2859
  data,
2877
2860
  filename: part.filename,
2878
- mediaType: (_a17 = mediaType != null ? mediaType : part.mediaType) != null ? _a17 : part.mimeType,
2861
+ mediaType: mediaType != null ? mediaType : part.mediaType,
2879
2862
  providerOptions
2880
2863
  };
2881
2864
  }
@@ -2934,8 +2917,8 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
2934
2917
  ).flat().filter(
2935
2918
  (part) => part.type === "image" || part.type === "file"
2936
2919
  ).map((part) => {
2937
- var _a17, _b;
2938
- const mediaType = (_b = (_a17 = part.mediaType) != null ? _a17 : part.mimeType) != null ? _b : part.type === "image" ? "image/*" : void 0;
2920
+ var _a17;
2921
+ const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
2939
2922
  let data = part.type === "image" ? part.image : part.data;
2940
2923
  if (typeof data === "string") {
2941
2924
  try {
@@ -2962,7 +2945,7 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
2962
2945
  );
2963
2946
  }
2964
2947
  function convertPartToLanguageModelPart(part, downloadedAssets) {
2965
- var _a17, _b, _c;
2948
+ var _a17, _b;
2966
2949
  if (part.type === "text") {
2967
2950
  return {
2968
2951
  type: "text",
@@ -2983,19 +2966,19 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
2983
2966
  throw new Error(`Unsupported part type: ${type}`);
2984
2967
  }
2985
2968
  const { data: convertedData, mediaType: convertedMediaType } = convertToLanguageModelV2DataContent(originalData);
2986
- let mediaType = (_a17 = convertedMediaType != null ? convertedMediaType : part.mediaType) != null ? _a17 : part.mimeType;
2969
+ let mediaType = convertedMediaType != null ? convertedMediaType : part.mediaType;
2987
2970
  let data = convertedData;
2988
2971
  if (data instanceof URL) {
2989
2972
  const downloadedFile = downloadedAssets[data.toString()];
2990
2973
  if (downloadedFile) {
2991
2974
  data = downloadedFile.data;
2992
- mediaType = (_b = downloadedFile.mediaType) != null ? _b : mediaType;
2975
+ mediaType = (_a17 = downloadedFile.mediaType) != null ? _a17 : mediaType;
2993
2976
  }
2994
2977
  }
2995
2978
  switch (type) {
2996
2979
  case "image": {
2997
2980
  if (data instanceof Uint8Array || typeof data === "string") {
2998
- mediaType = (_c = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _c : mediaType;
2981
+ mediaType = (_b = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _b : mediaType;
2999
2982
  }
3000
2983
  return {
3001
2984
  type: "file",
@@ -3119,83 +3102,6 @@ var import_provider11 = require("@ai-sdk/provider");
3119
3102
  var import_provider_utils10 = require("@ai-sdk/provider-utils");
3120
3103
  var import_zod7 = require("zod");
3121
3104
 
3122
- // core/prompt/attachments-to-parts.ts
3123
- function attachmentsToParts(attachments) {
3124
- var _a17, _b, _c;
3125
- const parts = [];
3126
- for (const attachment of attachments) {
3127
- let url;
3128
- try {
3129
- url = new URL(attachment.url);
3130
- } catch (error) {
3131
- throw new Error(`Invalid URL: ${attachment.url}`);
3132
- }
3133
- switch (url.protocol) {
3134
- case "http:":
3135
- case "https:": {
3136
- if ((_a17 = attachment.contentType) == null ? void 0 : _a17.startsWith("image/")) {
3137
- parts.push({ type: "image", image: url });
3138
- } else {
3139
- if (!attachment.contentType) {
3140
- throw new Error(
3141
- "If the attachment is not an image, it must specify a content type"
3142
- );
3143
- }
3144
- parts.push({
3145
- type: "file",
3146
- data: url,
3147
- mediaType: attachment.contentType
3148
- });
3149
- }
3150
- break;
3151
- }
3152
- case "data:": {
3153
- let header;
3154
- let base64Content;
3155
- let mediaType;
3156
- try {
3157
- [header, base64Content] = attachment.url.split(",");
3158
- mediaType = header.split(";")[0].split(":")[1];
3159
- } catch (error) {
3160
- throw new Error(`Error processing data URL: ${attachment.url}`);
3161
- }
3162
- if (mediaType == null || base64Content == null) {
3163
- throw new Error(`Invalid data URL format: ${attachment.url}`);
3164
- }
3165
- if ((_b = attachment.contentType) == null ? void 0 : _b.startsWith("image/")) {
3166
- parts.push({
3167
- type: "image",
3168
- image: convertDataContentToUint8Array(base64Content)
3169
- });
3170
- } else if ((_c = attachment.contentType) == null ? void 0 : _c.startsWith("text/")) {
3171
- parts.push({
3172
- type: "text",
3173
- text: convertUint8ArrayToText(
3174
- convertDataContentToUint8Array(base64Content)
3175
- )
3176
- });
3177
- } else {
3178
- if (!attachment.contentType) {
3179
- throw new Error(
3180
- "If the attachment is not an image or text, it must specify a content type"
3181
- );
3182
- }
3183
- parts.push({
3184
- type: "file",
3185
- data: base64Content,
3186
- mediaType: attachment.contentType
3187
- });
3188
- }
3189
- break;
3190
- }
3191
- default: {
3192
- throw new Error(`Unsupported URL protocol: ${url.protocol}`);
3193
- }
3194
- }
3195
- }
3196
- return parts;
3197
- }
3198
-
3199
3105
  // core/prompt/message-conversion-error.ts
3200
3106
  var import_provider10 = require("@ai-sdk/provider");
3201
3107
  var name8 = "AI_MessageConversionError";
@@ -3217,15 +3123,15 @@ var MessageConversionError = class extends import_provider10.AISDKError {
3217
3123
  };
3218
3124
  _a8 = symbol8;
3219
3125
 
3220
- // core/prompt/convert-to-core-messages.ts
3221
- function convertToCoreMessages(messages, options) {
3126
+ // core/prompt/convert-to-model-messages.ts
3127
+ function convertToModelMessages(messages, options) {
3222
3128
  var _a17, _b;
3223
3129
  const tools = (_a17 = options == null ? void 0 : options.tools) != null ? _a17 : {};
3224
3130
  const coreMessages = [];
3225
3131
  for (let i = 0; i < messages.length; i++) {
3226
3132
  const message = messages[i];
3227
3133
  const isLastMessage = i === messages.length - 1;
3228
- const { role, content, experimental_attachments } = message;
3134
+ const { role, content } = message;
3229
3135
  switch (role) {
3230
3136
  case "system": {
3231
3137
  coreMessages.push({
@@ -3235,30 +3141,24 @@ function convertToCoreMessages(messages, options) {
3235
3141
  break;
3236
3142
  }
3237
3143
  case "user": {
3238
- if (message.parts == null) {
3239
- coreMessages.push({
3240
- role: "user",
3241
- content: experimental_attachments ? [
3242
- { type: "text", text: content },
3243
- ...attachmentsToParts(experimental_attachments)
3244
- ] : content
3245
- });
3246
- } else {
3247
- const textParts = message.parts.filter((part) => part.type === "text").map((part) => ({
3248
- type: "text",
3249
- text: part.text
3250
- }));
3251
- coreMessages.push({
3252
- role: "user",
3253
- content: experimental_attachments ? [...textParts, ...attachmentsToParts(experimental_attachments)] : textParts
3254
- });
3255
- }
3144
+ coreMessages.push({
3145
+ role: "user",
3146
+ content: message.parts.filter(
3147
+ (part) => part.type === "text" || part.type === "file"
3148
+ ).map(
3149
+ (part) => part.type === "file" ? {
3150
+ type: "file",
3151
+ mediaType: part.mediaType,
3152
+ filename: part.filename,
3153
+ data: part.url
3154
+ } : part
3155
+ )
3156
+ });
3256
3157
  break;
3257
3158
  }
3258
3159
  case "assistant": {
3259
3160
  if (message.parts != null) {
3260
3161
  let processBlock2 = function() {
3261
- var _a18;
3262
3162
  const content2 = [];
3263
3163
  for (const part of block) {
3264
3164
  switch (part.type) {
@@ -3269,16 +3169,15 @@ function convertToCoreMessages(messages, options) {
3269
3169
  case "file": {
3270
3170
  content2.push({
3271
3171
  type: "file",
3272
- data: part.data,
3273
- mediaType: (_a18 = part.mediaType) != null ? _a18 : part.mimeType
3274
- // TODO migration, remove
3172
+ mediaType: part.mediaType,
3173
+ data: part.url
3275
3174
  });
3276
3175
  break;
3277
3176
  }
3278
3177
  case "reasoning": {
3279
3178
  content2.push({
3280
3179
  type: "reasoning",
3281
- text: part.reasoning,
3180
+ text: part.text,
3282
3181
  providerOptions: part.providerMetadata
3283
3182
  });
3284
3183
  break;
@@ -3368,73 +3267,11 @@ function convertToCoreMessages(messages, options) {
3368
3267
  processBlock2();
3369
3268
  break;
3370
3269
  }
3371
- const toolInvocations = message.toolInvocations;
3372
- if (toolInvocations == null || toolInvocations.length === 0) {
3373
- coreMessages.push({ role: "assistant", content });
3374
- break;
3375
- }
3376
- const maxStep = toolInvocations.reduce((max, toolInvocation) => {
3377
- var _a18;
3378
- return Math.max(max, (_a18 = toolInvocation.step) != null ? _a18 : 0);
3379
- }, 0);
3380
- for (let i2 = 0; i2 <= maxStep; i2++) {
3381
- const stepInvocations = toolInvocations.filter(
3382
- (toolInvocation) => {
3383
- var _a18;
3384
- return ((_a18 = toolInvocation.step) != null ? _a18 : 0) === i2;
3385
- }
3386
- );
3387
- if (stepInvocations.length === 0) {
3388
- continue;
3389
- }
3390
- coreMessages.push({
3391
- role: "assistant",
3392
- content: [
3393
- ...isLastMessage && content && i2 === 0 ? [{ type: "text", text: content }] : [],
3394
- ...stepInvocations.map(
3395
- ({ toolCallId, toolName, args }) => ({
3396
- type: "tool-call",
3397
- toolCallId,
3398
- toolName,
3399
- args
3400
- })
3401
- )
3402
- ]
3403
- });
3404
- coreMessages.push({
3405
- role: "tool",
3406
- content: stepInvocations.map((toolInvocation) => {
3407
- if (!("result" in toolInvocation)) {
3408
- throw new MessageConversionError({
3409
- originalMessage: message,
3410
- message: "ToolInvocation must have a result: " + JSON.stringify(toolInvocation)
3411
- });
3412
- }
3413
- const { toolCallId, toolName, result } = toolInvocation;
3414
- const tool2 = tools[toolName];
3415
- return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
3416
- type: "tool-result",
3417
- toolCallId,
3418
- toolName,
3419
- result: tool2.experimental_toToolResultContent(result),
3420
- experimental_content: tool2.experimental_toToolResultContent(result)
3421
- } : {
3422
- type: "tool-result",
3423
- toolCallId,
3424
- toolName,
3425
- result
3426
- };
3427
- })
3428
- });
3429
- }
3430
3270
  if (content && !isLastMessage) {
3431
3271
  coreMessages.push({ role: "assistant", content });
3432
3272
  }
3433
3273
  break;
3434
3274
  }
3435
- case "data": {
3436
- break;
3437
- }
3438
3275
  default: {
3439
3276
  const _exhaustiveCheck = role;
3440
3277
  throw new MessageConversionError({
@@ -3446,6 +3283,7 @@ function convertToCoreMessages(messages, options) {
3446
3283
  }
3447
3284
  return coreMessages;
3448
3285
  }
3286
+ var convertToCoreMessages = convertToModelMessages;
3449
3287
 
3450
3288
  // core/prompt/detect-prompt-type.ts
3451
3289
  function detectPromptType(prompt) {
@@ -3534,7 +3372,6 @@ var imagePartSchema = import_zod5.z.object({
3534
3372
  type: import_zod5.z.literal("image"),
3535
3373
  image: import_zod5.z.union([dataContentSchema, import_zod5.z.instanceof(URL)]),
3536
3374
  mediaType: import_zod5.z.string().optional(),
3537
- mimeType: import_zod5.z.string().optional(),
3538
3375
  providerOptions: providerMetadataSchema.optional()
3539
3376
  });
3540
3377
  var filePartSchema = import_zod5.z.object({
@@ -3542,7 +3379,6 @@ var filePartSchema = import_zod5.z.object({
3542
3379
  data: import_zod5.z.union([dataContentSchema, import_zod5.z.instanceof(URL)]),
3543
3380
  filename: import_zod5.z.string().optional(),
3544
3381
  mediaType: import_zod5.z.string(),
3545
- mimeType: import_zod5.z.string().optional(),
3546
3382
  providerOptions: providerMetadataSchema.optional()
3547
3383
  });
3548
3384
  var reasoningPartSchema = import_zod5.z.object({
@@ -3568,12 +3404,15 @@ var toolResultPartSchema = import_zod5.z.object({
3568
3404
  });
3569
3405
 
3570
3406
  // core/prompt/message.ts
3571
- var coreSystemMessageSchema = import_zod6.z.object({
3572
- role: import_zod6.z.literal("system"),
3573
- content: import_zod6.z.string(),
3574
- providerOptions: providerMetadataSchema.optional()
3575
- });
3576
- var coreUserMessageSchema = import_zod6.z.object({
3407
+ var systemModelMessageSchema = import_zod6.z.object(
3408
+ {
3409
+ role: import_zod6.z.literal("system"),
3410
+ content: import_zod6.z.string(),
3411
+ providerOptions: providerMetadataSchema.optional()
3412
+ }
3413
+ );
3414
+ var coreSystemMessageSchema = systemModelMessageSchema;
3415
+ var userModelMessageSchema = import_zod6.z.object({
3577
3416
  role: import_zod6.z.literal("user"),
3578
3417
  content: import_zod6.z.union([
3579
3418
  import_zod6.z.string(),
@@ -3581,7 +3420,8 @@ var coreUserMessageSchema = import_zod6.z.object({
3581
3420
  ]),
3582
3421
  providerOptions: providerMetadataSchema.optional()
3583
3422
  });
3584
- var coreAssistantMessageSchema = import_zod6.z.object({
3423
+ var coreUserMessageSchema = userModelMessageSchema;
3424
+ var assistantModelMessageSchema = import_zod6.z.object({
3585
3425
  role: import_zod6.z.literal("assistant"),
3586
3426
  content: import_zod6.z.union([
3587
3427
  import_zod6.z.string(),
@@ -3596,17 +3436,20 @@ var coreAssistantMessageSchema = import_zod6.z.object({
3596
3436
  ]),
3597
3437
  providerOptions: providerMetadataSchema.optional()
3598
3438
  });
3599
- var coreToolMessageSchema = import_zod6.z.object({
3439
+ var coreAssistantMessageSchema = assistantModelMessageSchema;
3440
+ var toolModelMessageSchema = import_zod6.z.object({
3600
3441
  role: import_zod6.z.literal("tool"),
3601
3442
  content: import_zod6.z.array(toolResultPartSchema),
3602
3443
  providerOptions: providerMetadataSchema.optional()
3603
3444
  });
3604
- var coreMessageSchema = import_zod6.z.union([
3605
- coreSystemMessageSchema,
3606
- coreUserMessageSchema,
3607
- coreAssistantMessageSchema,
3608
- coreToolMessageSchema
3445
+ var coreToolMessageSchema = toolModelMessageSchema;
3446
+ var modelMessageSchema = import_zod6.z.union([
3447
+ systemModelMessageSchema,
3448
+ userModelMessageSchema,
3449
+ assistantModelMessageSchema,
3450
+ toolModelMessageSchema
3609
3451
  ]);
3452
+ var coreMessageSchema = modelMessageSchema;
3610
3453
 
3611
3454
  // core/prompt/standardize-prompt.ts
3612
3455
  async function standardizePrompt({
@@ -3653,10 +3496,10 @@ async function standardizePrompt({
3653
3496
  if (promptType === "other") {
3654
3497
  throw new import_provider11.InvalidPromptError({
3655
3498
  prompt,
3656
- message: "messages must be an array of CoreMessage or UIMessage"
3499
+ message: "messages must be an array of ModelMessage or UIMessage"
3657
3500
  });
3658
3501
  }
3659
- const messages = promptType === "ui-messages" ? convertToCoreMessages(prompt.messages, {
3502
+ const messages = promptType === "ui-messages" ? convertToModelMessages(prompt.messages, {
3660
3503
  tools
3661
3504
  }) : prompt.messages;
3662
3505
  if (messages.length === 0) {
@@ -3667,12 +3510,12 @@ async function standardizePrompt({
3667
3510
  }
3668
3511
  const validationResult = await (0, import_provider_utils10.safeValidateTypes)({
3669
3512
  value: messages,
3670
- schema: import_zod7.z.array(coreMessageSchema)
3513
+ schema: import_zod7.z.array(modelMessageSchema)
3671
3514
  });
3672
3515
  if (!validationResult.success) {
3673
3516
  throw new import_provider11.InvalidPromptError({
3674
3517
  prompt,
3675
- message: "messages must be an array of CoreMessage or UIMessage",
3518
+ message: "messages must be an array of ModelMessage or UIMessage",
3676
3519
  cause: validationResult.error
3677
3520
  });
3678
3521
  }
@@ -3919,10 +3762,36 @@ var enumOutputStrategy = (enumValues) => {
3919
3762
  })
3920
3763
  };
3921
3764
  },
3922
- validatePartialResult() {
3923
- throw new import_provider12.UnsupportedFunctionalityError({
3924
- functionality: "partial results in enum mode"
3925
- });
3765
+ async validatePartialResult({ value, textDelta }) {
3766
+ if (!(0, import_provider12.isJSONObject)(value) || typeof value.result !== "string") {
3767
+ return {
3768
+ success: false,
3769
+ error: new import_provider12.TypeValidationError({
3770
+ value,
3771
+ cause: 'value must be an object that contains a string in the "result" property.'
3772
+ })
3773
+ };
3774
+ }
3775
+ const result = value.result;
3776
+ const possibleEnumValues = enumValues.filter(
3777
+ (enumValue) => enumValue.startsWith(result)
3778
+ );
3779
+ if (value.result.length === 0 || possibleEnumValues.length === 0) {
3780
+ return {
3781
+ success: false,
3782
+ error: new import_provider12.TypeValidationError({
3783
+ value,
3784
+ cause: "value must be a string in the enum"
3785
+ })
3786
+ };
3787
+ }
3788
+ return {
3789
+ success: true,
3790
+ value: {
3791
+ partial: possibleEnumValues.length > 1 ? result : possibleEnumValues[0],
3792
+ textDelta
3793
+ }
3794
+ };
3926
3795
  },
3927
3796
  createElementStream() {
3928
3797
  throw new import_provider12.UnsupportedFunctionalityError({
@@ -4508,6 +4377,7 @@ function streamObject(options) {
4508
4377
  } = {},
4509
4378
  ...settings
4510
4379
  } = options;
4380
+ const enumValues = "enum" in options && options.enum ? options.enum : void 0;
4511
4381
  const {
4512
4382
  schema: inputSchema,
4513
4383
  schemaDescription,
@@ -4517,9 +4387,14 @@ function streamObject(options) {
4517
4387
  output,
4518
4388
  schema: inputSchema,
4519
4389
  schemaName,
4520
- schemaDescription
4390
+ schemaDescription,
4391
+ enumValues
4392
+ });
4393
+ const outputStrategy = getOutputStrategy({
4394
+ output,
4395
+ schema: inputSchema,
4396
+ enumValues
4521
4397
  });
4522
- const outputStrategy = getOutputStrategy({ output, schema: inputSchema });
4523
4398
  return new DefaultStreamObjectResult({
4524
4399
  model,
4525
4400
  telemetry,
@@ -7274,8 +7149,8 @@ var DefaultStreamTextResult = class {
7274
7149
  controller.enqueue(
7275
7150
  // TODO update protocol to v2 or replace with event stream
7276
7151
  formatDataStreamPart("file", {
7277
- mimeType: chunk.file.mediaType,
7278
- data: chunk.file.base64
7152
+ mediaType: chunk.file.mediaType,
7153
+ url: `data:${chunk.file.mediaType};base64,${chunk.file.base64}`
7279
7154
  })
7280
7155
  );
7281
7156
  break;
@@ -7497,16 +7372,16 @@ var DefaultGeneratedAudioFile = class extends DefaultGeneratedFile {
7497
7372
  super({ data, mediaType });
7498
7373
  let format = "mp3";
7499
7374
  if (mediaType) {
7500
- const mimeTypeParts = mediaType.split("/");
7501
- if (mimeTypeParts.length === 2) {
7375
+ const mediaTypeParts = mediaType.split("/");
7376
+ if (mediaTypeParts.length === 2) {
7502
7377
  if (mediaType !== "audio/mpeg") {
7503
- format = mimeTypeParts[1];
7378
+ format = mediaTypeParts[1];
7504
7379
  }
7505
7380
  }
7506
7381
  }
7507
7382
  if (!format) {
7508
7383
  throw new Error(
7509
- "Audio format must be provided or determinable from mimeType"
7384
+ "Audio format must be provided or determinable from media type"
7510
7385
  );
7511
7386
  }
7512
7387
  this.format = format;
@@ -7935,7 +7810,7 @@ function appendResponseMessages({
7935
7810
  responseMessages,
7936
7811
  _internal: { currentDate = () => /* @__PURE__ */ new Date() } = {}
7937
7812
  }) {
7938
- var _a17, _b, _c, _d, _e;
7813
+ var _a17, _b;
7939
7814
  const clonedMessages = structuredClone(messages);
7940
7815
  for (const message of responseMessages) {
7941
7816
  const role = message.role;
@@ -7943,7 +7818,7 @@ function appendResponseMessages({
7943
7818
  const isLastMessageAssistant = lastMessage.role === "assistant";
7944
7819
  switch (role) {
7945
7820
  case "assistant": {
7946
- let getToolInvocations2 = function(step) {
7821
+ let getToolInvocationsForStep2 = function(step) {
7947
7822
  return (typeof message.content === "string" ? [] : message.content.filter((part) => part.type === "tool-call")).map((call) => ({
7948
7823
  state: "call",
7949
7824
  step,
@@ -7952,7 +7827,7 @@ function appendResponseMessages({
7952
7827
  toolName: call.toolName
7953
7828
  }));
7954
7829
  };
7955
- var getToolInvocations = getToolInvocations2;
7830
+ var getToolInvocationsForStep = getToolInvocationsForStep2;
7956
7831
  const parts = [{ type: "step-start" }];
7957
7832
  let textContent = "";
7958
7833
  let reasoningTextContent = void 0;
@@ -7979,12 +7854,12 @@ function appendResponseMessages({
7979
7854
  if (reasoningPart == null) {
7980
7855
  reasoningPart = {
7981
7856
  type: "reasoning",
7982
- reasoning: ""
7857
+ text: ""
7983
7858
  };
7984
7859
  parts.push(reasoningPart);
7985
7860
  }
7986
7861
  reasoningTextContent = (reasoningTextContent != null ? reasoningTextContent : "") + part.text;
7987
- reasoningPart.reasoning += part.text;
7862
+ reasoningPart.text += part.text;
7988
7863
  reasoningPart.providerMetadata = part.providerOptions;
7989
7864
  break;
7990
7865
  }
@@ -7999,8 +7874,8 @@ function appendResponseMessages({
7999
7874
  }
8000
7875
  parts.push({
8001
7876
  type: "file",
8002
- mediaType: (_a17 = part.mediaType) != null ? _a17 : part.mimeType,
8003
- data: convertDataContentToBase64String(part.data)
7877
+ mediaType: part.mediaType,
7878
+ url: `data:${part.mediaType};base64,${convertDataContentToBase64String(part.data)}`
8004
7879
  });
8005
7880
  break;
8006
7881
  }
@@ -8008,16 +7883,13 @@ function appendResponseMessages({
8008
7883
  }
8009
7884
  if (isLastMessageAssistant) {
8010
7885
  const maxStep = extractMaxToolInvocationStep(
8011
- lastMessage.toolInvocations
7886
+ getToolInvocations(lastMessage)
7887
+ // TODO remove once Message is removed
8012
7888
  );
8013
- (_b = lastMessage.parts) != null ? _b : lastMessage.parts = [];
7889
+ (_a17 = lastMessage.parts) != null ? _a17 : lastMessage.parts = [];
8014
7890
  lastMessage.content = textContent;
8015
7891
  lastMessage.parts.push(...parts);
8016
- lastMessage.toolInvocations = [
8017
- ...(_c = lastMessage.toolInvocations) != null ? _c : [],
8018
- ...getToolInvocations2(maxStep === void 0 ? 0 : maxStep + 1)
8019
- ];
8020
- getToolInvocations2(maxStep === void 0 ? 0 : maxStep + 1).map((call) => ({
7892
+ getToolInvocationsForStep2(maxStep === void 0 ? 0 : maxStep + 1).map((call) => ({
8021
7893
  type: "tool-invocation",
8022
7894
  toolInvocation: call
8023
7895
  })).forEach((part) => {
@@ -8030,10 +7902,9 @@ function appendResponseMessages({
8030
7902
  createdAt: currentDate(),
8031
7903
  // generate a createdAt date for the message, will be overridden by the client
8032
7904
  content: textContent,
8033
- toolInvocations: getToolInvocations2(0),
8034
7905
  parts: [
8035
7906
  ...parts,
8036
- ...getToolInvocations2(0).map((call) => ({
7907
+ ...getToolInvocationsForStep2(0).map((call) => ({
8037
7908
  type: "tool-invocation",
8038
7909
  toolInvocation: call
8039
7910
  }))
@@ -8043,17 +7914,17 @@ function appendResponseMessages({
8043
7914
  break;
8044
7915
  }
8045
7916
  case "tool": {
8046
- (_d = lastMessage.toolInvocations) != null ? _d : lastMessage.toolInvocations = [];
8047
7917
  if (lastMessage.role !== "assistant") {
8048
7918
  throw new Error(
8049
7919
  `Tool result must follow an assistant message: ${lastMessage.role}`
8050
7920
  );
8051
7921
  }
8052
- (_e = lastMessage.parts) != null ? _e : lastMessage.parts = [];
7922
+ (_b = lastMessage.parts) != null ? _b : lastMessage.parts = [];
8053
7923
  for (const contentPart of message.content) {
8054
- const toolCall = lastMessage.toolInvocations.find(
8055
- (call) => call.toolCallId === contentPart.toolCallId
8056
- );
7924
+ const toolCall = getToolInvocations(
7925
+ lastMessage
7926
+ // TODO remove once Message is removed
7927
+ ).find((call) => call.toolCallId === contentPart.toolCallId);
8057
7928
  const toolCallPart = lastMessage.parts.find(
8058
7929
  (part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === contentPart.toolCallId
8059
7930
  );
@@ -8606,6 +8477,24 @@ var MCPClient = class {
8606
8477
  await ((_a17 = this.transport) == null ? void 0 : _a17.close());
8607
8478
  this.onClose();
8608
8479
  }
8480
+ assertCapability(method) {
8481
+ switch (method) {
8482
+ case "initialize":
8483
+ break;
8484
+ case "tools/list":
8485
+ case "tools/call":
8486
+ if (!this.serverCapabilities.tools) {
8487
+ throw new MCPClientError({
8488
+ message: `Server does not support tools`
8489
+ });
8490
+ }
8491
+ break;
8492
+ default:
8493
+ throw new MCPClientError({
8494
+ message: `Unsupported method: ${method}`
8495
+ });
8496
+ }
8497
+ }
8609
8498
  async request({
8610
8499
  request,
8611
8500
  resultSchema,
@@ -8619,6 +8508,7 @@ var MCPClient = class {
8619
8508
  })
8620
8509
  );
8621
8510
  }
8511
+ this.assertCapability(request.method);
8622
8512
  const signal = options == null ? void 0 : options.signal;
8623
8513
  signal == null ? void 0 : signal.throwIfAborted();
8624
8514
  const messageId = this.requestMessageId++;
@@ -8647,7 +8537,7 @@ var MCPClient = class {
8647
8537
  resolve(result);
8648
8538
  } catch (error) {
8649
8539
  const parseError = new MCPClientError({
8650
- message: "Failed to parse server initialization result",
8540
+ message: "Failed to parse server response",
8651
8541
  cause: error
8652
8542
  });
8653
8543
  reject(parseError);
@@ -8663,11 +8553,6 @@ var MCPClient = class {
8663
8553
  params,
8664
8554
  options
8665
8555
  } = {}) {
8666
- if (!this.serverCapabilities.tools) {
8667
- throw new MCPClientError({
8668
- message: `Server does not support tools`
8669
- });
8670
- }
8671
8556
  try {
8672
8557
  return this.request({
8673
8558
  request: { method: "tools/list", params },
@@ -8683,11 +8568,6 @@ var MCPClient = class {
8683
8568
  args,
8684
8569
  options
8685
8570
  }) {
8686
- if (!this.serverCapabilities.tools) {
8687
- throw new MCPClientError({
8688
- message: `Server does not support tools`
8689
- });
8690
- }
8691
8571
  try {
8692
8572
  return this.request({
8693
8573
  request: { method: "tools/call", params: { name: name17, arguments: args } },
@@ -8933,9 +8813,12 @@ var StreamData = class {
8933
8813
  appendClientMessage,
8934
8814
  appendResponseMessages,
8935
8815
  asSchema,
8816
+ assistantModelMessageSchema,
8936
8817
  callChatApi,
8937
8818
  callCompletionApi,
8819
+ convertFileListToFileUIParts,
8938
8820
  convertToCoreMessages,
8821
+ convertToModelMessages,
8939
8822
  coreAssistantMessageSchema,
8940
8823
  coreMessageSchema,
8941
8824
  coreSystemMessageSchema,
@@ -8958,20 +8841,19 @@ var StreamData = class {
8958
8841
  experimental_transcribe,
8959
8842
  extractMaxToolInvocationStep,
8960
8843
  extractReasoningMiddleware,
8961
- fillMessageParts,
8962
8844
  formatDataStreamPart,
8963
8845
  generateId,
8964
8846
  generateObject,
8965
8847
  generateText,
8966
- getMessageParts,
8967
8848
  getTextFromDataUrl,
8849
+ getToolInvocations,
8968
8850
  isAssistantMessageWithCompletedToolCalls,
8969
8851
  isDeepEqualData,
8970
8852
  jsonSchema,
8853
+ modelMessageSchema,
8971
8854
  parseDataStreamPart,
8972
8855
  parsePartialJson,
8973
8856
  pipeDataStreamToResponse,
8974
- prepareAttachmentsForRequest,
8975
8857
  processDataStream,
8976
8858
  processTextStream,
8977
8859
  shouldResubmitMessages,
@@ -8980,8 +8862,11 @@ var StreamData = class {
8980
8862
  smoothStream,
8981
8863
  streamObject,
8982
8864
  streamText,
8865
+ systemModelMessageSchema,
8983
8866
  tool,
8867
+ toolModelMessageSchema,
8984
8868
  updateToolCallResult,
8869
+ userModelMessageSchema,
8985
8870
  wrapLanguageModel
8986
8871
  });
8987
8872
  //# sourceMappingURL=index.js.map