ai 5.0.0-canary.16 → 5.0.0-canary.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -9,9 +9,9 @@ import { createIdGenerator as createIdGenerator5, generateId as generateId2 } fr
9
9
 
10
10
  // core/util/index.ts
11
11
  import {
12
+ asSchema,
12
13
  generateId,
13
- jsonSchema,
14
- asSchema
14
+ jsonSchema
15
15
  } from "@ai-sdk/provider-utils";
16
16
 
17
17
  // core/util/process-chat-response.ts
@@ -586,9 +586,9 @@ var fileStreamPart = {
586
586
  code: "k",
587
587
  name: "file",
588
588
  parse: (value) => {
589
- if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string" || !("mimeType" in value) || typeof value.mimeType !== "string") {
589
+ if (value == null || typeof value !== "object" || !("url" in value) || typeof value.url !== "string" || !("mediaType" in value) || typeof value.mediaType !== "string") {
590
590
  throw new Error(
591
- '"file" parts expect an object with a "data" and "mimeType" property.'
591
+ '"file" parts expect an object with a "url" and "mediaType" property.'
592
592
  );
593
593
  }
594
594
  return { type: "file", value };
@@ -859,8 +859,8 @@ async function processChatResponse({
859
859
  onFilePart(value) {
860
860
  message.parts.push({
861
861
  type: "file",
862
- mediaType: value.mimeType,
863
- data: value.data
862
+ mediaType: value.mediaType,
863
+ url: value.url
864
864
  });
865
865
  execUpdate();
866
866
  },
@@ -1048,19 +1048,30 @@ async function callChatApi({
1048
1048
  onToolCall,
1049
1049
  generateId: generateId3,
1050
1050
  fetch: fetch2 = getOriginalFetch(),
1051
- lastMessage
1051
+ lastMessage,
1052
+ getCurrentDate,
1053
+ requestType = "generate"
1052
1054
  }) {
1053
- var _a17, _b;
1054
- const response = await fetch2(api, {
1055
+ var _a17, _b, _c;
1056
+ const request = requestType === "resume" ? fetch2(`${api}?chatId=${body.id}`, {
1057
+ method: "GET",
1058
+ headers: {
1059
+ "Content-Type": "application/json",
1060
+ ...headers
1061
+ },
1062
+ signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
1063
+ credentials
1064
+ }) : fetch2(api, {
1055
1065
  method: "POST",
1056
1066
  body: JSON.stringify(body),
1057
1067
  headers: {
1058
1068
  "Content-Type": "application/json",
1059
1069
  ...headers
1060
1070
  },
1061
- signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
1071
+ signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
1062
1072
  credentials
1063
- }).catch((err) => {
1073
+ });
1074
+ const response = await request.catch((err) => {
1064
1075
  restoreMessagesOnFailure();
1065
1076
  throw err;
1066
1077
  });
@@ -1074,7 +1085,7 @@ async function callChatApi({
1074
1085
  if (!response.ok) {
1075
1086
  restoreMessagesOnFailure();
1076
1087
  throw new Error(
1077
- (_b = await response.text()) != null ? _b : "Failed to fetch the chat response."
1088
+ (_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
1078
1089
  );
1079
1090
  }
1080
1091
  if (!response.body) {
@@ -1101,7 +1112,8 @@ async function callChatApi({
1101
1112
  onFinish(message, { usage, finishReason });
1102
1113
  }
1103
1114
  },
1104
- generateId: generateId3
1115
+ generateId: generateId3,
1116
+ getCurrentDate
1105
1117
  });
1106
1118
  return;
1107
1119
  }
@@ -1222,6 +1234,36 @@ async function callCompletionApi({
1222
1234
  }
1223
1235
  }
1224
1236
 
1237
+ // core/util/convert-file-list-to-file-ui-parts.ts
1238
+ async function convertFileListToFileUIParts(files) {
1239
+ if (files == null) {
1240
+ return [];
1241
+ }
1242
+ if (!globalThis.FileList || !(files instanceof globalThis.FileList)) {
1243
+ throw new Error("FileList is not supported in the current environment");
1244
+ }
1245
+ return Promise.all(
1246
+ Array.from(files).map(async (file) => {
1247
+ const { name: name17, type } = file;
1248
+ const dataUrl = await new Promise((resolve, reject) => {
1249
+ const reader = new FileReader();
1250
+ reader.onload = (readerEvent) => {
1251
+ var _a17;
1252
+ resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
1253
+ };
1254
+ reader.onerror = (error) => reject(error);
1255
+ reader.readAsDataURL(file);
1256
+ });
1257
+ return {
1258
+ type: "file",
1259
+ mediaType: type,
1260
+ filename: name17,
1261
+ url: dataUrl
1262
+ };
1263
+ })
1264
+ );
1265
+ }
1266
+
1225
1267
  // core/util/data-url.ts
1226
1268
  function getTextFromDataUrl(dataUrl) {
1227
1269
  const [header, base64Content] = dataUrl.split(",");
@@ -1271,58 +1313,6 @@ function isDeepEqualData(obj1, obj2) {
1271
1313
  return true;
1272
1314
  }
1273
1315
 
1274
- // core/util/prepare-attachments-for-request.ts
1275
- async function prepareAttachmentsForRequest(attachmentsFromOptions) {
1276
- if (!attachmentsFromOptions) {
1277
- return [];
1278
- }
1279
- if (globalThis.FileList && attachmentsFromOptions instanceof globalThis.FileList) {
1280
- return Promise.all(
1281
- Array.from(attachmentsFromOptions).map(async (attachment) => {
1282
- const { name: name17, type } = attachment;
1283
- const dataUrl = await new Promise((resolve, reject) => {
1284
- const reader = new FileReader();
1285
- reader.onload = (readerEvent) => {
1286
- var _a17;
1287
- resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
1288
- };
1289
- reader.onerror = (error) => reject(error);
1290
- reader.readAsDataURL(attachment);
1291
- });
1292
- return {
1293
- name: name17,
1294
- contentType: type,
1295
- url: dataUrl
1296
- };
1297
- })
1298
- );
1299
- }
1300
- if (Array.isArray(attachmentsFromOptions)) {
1301
- return attachmentsFromOptions;
1302
- }
1303
- throw new Error("Invalid attachments type");
1304
- }
1305
-
1306
- // core/util/update-tool-call-result.ts
1307
- function updateToolCallResult({
1308
- messages,
1309
- toolCallId,
1310
- toolResult: result
1311
- }) {
1312
- const lastMessage = messages[messages.length - 1];
1313
- const invocationPart = lastMessage.parts.find(
1314
- (part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
1315
- );
1316
- if (invocationPart == null) {
1317
- return;
1318
- }
1319
- invocationPart.toolInvocation = {
1320
- ...invocationPart.toolInvocation,
1321
- state: "result",
1322
- result
1323
- };
1324
- }
1325
-
1326
1316
  // core/util/should-resubmit-messages.ts
1327
1317
  function shouldResubmitMessages({
1328
1318
  originalMaxToolInvocationStep,
@@ -1352,6 +1342,26 @@ function isAssistantMessageWithCompletedToolCalls(message) {
1352
1342
  return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
1353
1343
  }
1354
1344
 
1345
+ // core/util/update-tool-call-result.ts
1346
+ function updateToolCallResult({
1347
+ messages,
1348
+ toolCallId,
1349
+ toolResult: result
1350
+ }) {
1351
+ const lastMessage = messages[messages.length - 1];
1352
+ const invocationPart = lastMessage.parts.find(
1353
+ (part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
1354
+ );
1355
+ if (invocationPart == null) {
1356
+ return;
1357
+ }
1358
+ invocationPart.toolInvocation = {
1359
+ ...invocationPart.toolInvocation,
1360
+ state: "result",
1361
+ result
1362
+ };
1363
+ }
1364
+
1355
1365
  // core/data-stream/create-data-stream.ts
1356
1366
  function createDataStream({
1357
1367
  execute,
@@ -2372,7 +2382,7 @@ async function generateImage({
2372
2382
  abortSignal,
2373
2383
  headers
2374
2384
  }) {
2375
- var _a17;
2385
+ var _a17, _b;
2376
2386
  const { retry } = prepareRetries({ maxRetries: maxRetriesArg });
2377
2387
  const maxImagesPerCall = (_a17 = model.maxImagesPerCall) != null ? _a17 : 1;
2378
2388
  const callCount = Math.ceil(n / maxImagesPerCall);
@@ -2402,6 +2412,7 @@ async function generateImage({
2402
2412
  const images = [];
2403
2413
  const warnings = [];
2404
2414
  const responses = [];
2415
+ const providerMetadata = {};
2405
2416
  for (const result of results) {
2406
2417
  images.push(
2407
2418
  ...result.images.map(
@@ -2418,18 +2429,32 @@ async function generateImage({
2418
2429
  )
2419
2430
  );
2420
2431
  warnings.push(...result.warnings);
2432
+ if (result.providerMetadata) {
2433
+ for (const [providerName, metadata] of Object.entries(result.providerMetadata)) {
2434
+ (_b = providerMetadata[providerName]) != null ? _b : providerMetadata[providerName] = { images: [] };
2435
+ providerMetadata[providerName].images.push(
2436
+ ...result.providerMetadata[providerName].images
2437
+ );
2438
+ }
2439
+ }
2421
2440
  responses.push(result.response);
2422
2441
  }
2423
2442
  if (!images.length) {
2424
2443
  throw new NoImageGeneratedError({ responses });
2425
2444
  }
2426
- return new DefaultGenerateImageResult({ images, warnings, responses });
2445
+ return new DefaultGenerateImageResult({
2446
+ images,
2447
+ warnings,
2448
+ responses,
2449
+ providerMetadata
2450
+ });
2427
2451
  }
2428
2452
  var DefaultGenerateImageResult = class {
2429
2453
  constructor(options) {
2430
2454
  this.images = options.images;
2431
2455
  this.warnings = options.warnings;
2432
2456
  this.responses = options.responses;
2457
+ this.providerMetadata = options.providerMetadata;
2433
2458
  }
2434
2459
  get image() {
2435
2460
  return this.images[0];
@@ -2482,6 +2507,9 @@ function extractContentText(content) {
2482
2507
  return parts.map((content2) => content2.text).join("");
2483
2508
  }
2484
2509
 
2510
+ // core/prompt/convert-to-language-model-prompt.ts
2511
+ import { isUrlSupported } from "@ai-sdk/provider-utils";
2512
+
2485
2513
  // util/download-error.ts
2486
2514
  import { AISDKError as AISDKError5 } from "@ai-sdk/provider";
2487
2515
  var name5 = "AI_DownloadError";
@@ -2649,13 +2677,6 @@ function convertDataContentToUint8Array(content) {
2649
2677
  }
2650
2678
  throw new InvalidDataContentError({ content });
2651
2679
  }
2652
- function convertUint8ArrayToText(uint8Array) {
2653
- try {
2654
- return new TextDecoder().decode(uint8Array);
2655
- } catch (error) {
2656
- throw new Error("Error decoding Uint8Array to text");
2657
- }
2658
- }
2659
2680
 
2660
2681
  // core/prompt/invalid-message-role-error.ts
2661
2682
  import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
@@ -2679,7 +2700,6 @@ var InvalidMessageRoleError = class extends AISDKError8 {
2679
2700
  _a7 = symbol7;
2680
2701
 
2681
2702
  // core/prompt/convert-to-language-model-prompt.ts
2682
- import { isUrlSupported } from "@ai-sdk/provider-utils";
2683
2703
  async function convertToLanguageModelPrompt({
2684
2704
  prompt,
2685
2705
  supportedUrls,
@@ -2735,7 +2755,6 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
2735
2755
  // remove empty text parts:
2736
2756
  (part) => part.type !== "text" || part.text !== ""
2737
2757
  ).map((part) => {
2738
- var _a17;
2739
2758
  const providerOptions = part.providerOptions;
2740
2759
  switch (part.type) {
2741
2760
  case "file": {
@@ -2746,7 +2765,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
2746
2765
  type: "file",
2747
2766
  data,
2748
2767
  filename: part.filename,
2749
- mediaType: (_a17 = mediaType != null ? mediaType : part.mediaType) != null ? _a17 : part.mimeType,
2768
+ mediaType: mediaType != null ? mediaType : part.mediaType,
2750
2769
  providerOptions
2751
2770
  };
2752
2771
  }
@@ -2805,8 +2824,8 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
2805
2824
  ).flat().filter(
2806
2825
  (part) => part.type === "image" || part.type === "file"
2807
2826
  ).map((part) => {
2808
- var _a17, _b;
2809
- const mediaType = (_b = (_a17 = part.mediaType) != null ? _a17 : part.mimeType) != null ? _b : part.type === "image" ? "image/*" : void 0;
2827
+ var _a17;
2828
+ const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
2810
2829
  let data = part.type === "image" ? part.image : part.data;
2811
2830
  if (typeof data === "string") {
2812
2831
  try {
@@ -2833,7 +2852,7 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
2833
2852
  );
2834
2853
  }
2835
2854
  function convertPartToLanguageModelPart(part, downloadedAssets) {
2836
- var _a17, _b, _c;
2855
+ var _a17, _b;
2837
2856
  if (part.type === "text") {
2838
2857
  return {
2839
2858
  type: "text",
@@ -2854,19 +2873,19 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
2854
2873
  throw new Error(`Unsupported part type: ${type}`);
2855
2874
  }
2856
2875
  const { data: convertedData, mediaType: convertedMediaType } = convertToLanguageModelV2DataContent(originalData);
2857
- let mediaType = (_a17 = convertedMediaType != null ? convertedMediaType : part.mediaType) != null ? _a17 : part.mimeType;
2876
+ let mediaType = convertedMediaType != null ? convertedMediaType : part.mediaType;
2858
2877
  let data = convertedData;
2859
2878
  if (data instanceof URL) {
2860
2879
  const downloadedFile = downloadedAssets[data.toString()];
2861
2880
  if (downloadedFile) {
2862
2881
  data = downloadedFile.data;
2863
- mediaType = (_b = downloadedFile.mediaType) != null ? _b : mediaType;
2882
+ mediaType = (_a17 = downloadedFile.mediaType) != null ? _a17 : mediaType;
2864
2883
  }
2865
2884
  }
2866
2885
  switch (type) {
2867
2886
  case "image": {
2868
2887
  if (data instanceof Uint8Array || typeof data === "string") {
2869
- mediaType = (_c = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _c : mediaType;
2888
+ mediaType = (_b = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _b : mediaType;
2870
2889
  }
2871
2890
  return {
2872
2891
  type: "file",
@@ -2990,83 +3009,6 @@ import { InvalidPromptError } from "@ai-sdk/provider";
2990
3009
  import { safeValidateTypes } from "@ai-sdk/provider-utils";
2991
3010
  import { z as z7 } from "zod";
2992
3011
 
2993
- // core/prompt/attachments-to-parts.ts
2994
- function attachmentsToParts(attachments) {
2995
- var _a17, _b, _c;
2996
- const parts = [];
2997
- for (const attachment of attachments) {
2998
- let url;
2999
- try {
3000
- url = new URL(attachment.url);
3001
- } catch (error) {
3002
- throw new Error(`Invalid URL: ${attachment.url}`);
3003
- }
3004
- switch (url.protocol) {
3005
- case "http:":
3006
- case "https:": {
3007
- if ((_a17 = attachment.contentType) == null ? void 0 : _a17.startsWith("image/")) {
3008
- parts.push({ type: "image", image: url });
3009
- } else {
3010
- if (!attachment.contentType) {
3011
- throw new Error(
3012
- "If the attachment is not an image, it must specify a content type"
3013
- );
3014
- }
3015
- parts.push({
3016
- type: "file",
3017
- data: url,
3018
- mediaType: attachment.contentType
3019
- });
3020
- }
3021
- break;
3022
- }
3023
- case "data:": {
3024
- let header;
3025
- let base64Content;
3026
- let mediaType;
3027
- try {
3028
- [header, base64Content] = attachment.url.split(",");
3029
- mediaType = header.split(";")[0].split(":")[1];
3030
- } catch (error) {
3031
- throw new Error(`Error processing data URL: ${attachment.url}`);
3032
- }
3033
- if (mediaType == null || base64Content == null) {
3034
- throw new Error(`Invalid data URL format: ${attachment.url}`);
3035
- }
3036
- if ((_b = attachment.contentType) == null ? void 0 : _b.startsWith("image/")) {
3037
- parts.push({
3038
- type: "image",
3039
- image: convertDataContentToUint8Array(base64Content)
3040
- });
3041
- } else if ((_c = attachment.contentType) == null ? void 0 : _c.startsWith("text/")) {
3042
- parts.push({
3043
- type: "text",
3044
- text: convertUint8ArrayToText(
3045
- convertDataContentToUint8Array(base64Content)
3046
- )
3047
- });
3048
- } else {
3049
- if (!attachment.contentType) {
3050
- throw new Error(
3051
- "If the attachment is not an image or text, it must specify a content type"
3052
- );
3053
- }
3054
- parts.push({
3055
- type: "file",
3056
- data: base64Content,
3057
- mediaType: attachment.contentType
3058
- });
3059
- }
3060
- break;
3061
- }
3062
- default: {
3063
- throw new Error(`Unsupported URL protocol: ${url.protocol}`);
3064
- }
3065
- }
3066
- }
3067
- return parts;
3068
- }
3069
-
3070
3012
  // core/prompt/message-conversion-error.ts
3071
3013
  import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
3072
3014
  var name8 = "AI_MessageConversionError";
@@ -3088,15 +3030,15 @@ var MessageConversionError = class extends AISDKError9 {
3088
3030
  };
3089
3031
  _a8 = symbol8;
3090
3032
 
3091
- // core/prompt/convert-to-core-messages.ts
3092
- function convertToCoreMessages(messages, options) {
3033
+ // core/prompt/convert-to-model-messages.ts
3034
+ function convertToModelMessages(messages, options) {
3093
3035
  var _a17, _b;
3094
3036
  const tools = (_a17 = options == null ? void 0 : options.tools) != null ? _a17 : {};
3095
3037
  const coreMessages = [];
3096
3038
  for (let i = 0; i < messages.length; i++) {
3097
3039
  const message = messages[i];
3098
3040
  const isLastMessage = i === messages.length - 1;
3099
- const { role, content, experimental_attachments } = message;
3041
+ const { role, content } = message;
3100
3042
  switch (role) {
3101
3043
  case "system": {
3102
3044
  coreMessages.push({
@@ -3106,30 +3048,24 @@ function convertToCoreMessages(messages, options) {
3106
3048
  break;
3107
3049
  }
3108
3050
  case "user": {
3109
- if (message.parts == null) {
3110
- coreMessages.push({
3111
- role: "user",
3112
- content: experimental_attachments ? [
3113
- { type: "text", text: content },
3114
- ...attachmentsToParts(experimental_attachments)
3115
- ] : content
3116
- });
3117
- } else {
3118
- const textParts = message.parts.filter((part) => part.type === "text").map((part) => ({
3119
- type: "text",
3120
- text: part.text
3121
- }));
3122
- coreMessages.push({
3123
- role: "user",
3124
- content: experimental_attachments ? [...textParts, ...attachmentsToParts(experimental_attachments)] : textParts
3125
- });
3126
- }
3051
+ coreMessages.push({
3052
+ role: "user",
3053
+ content: message.parts.filter(
3054
+ (part) => part.type === "text" || part.type === "file"
3055
+ ).map(
3056
+ (part) => part.type === "file" ? {
3057
+ type: "file",
3058
+ mediaType: part.mediaType,
3059
+ filename: part.filename,
3060
+ data: part.url
3061
+ } : part
3062
+ )
3063
+ });
3127
3064
  break;
3128
3065
  }
3129
3066
  case "assistant": {
3130
3067
  if (message.parts != null) {
3131
3068
  let processBlock2 = function() {
3132
- var _a18;
3133
3069
  const content2 = [];
3134
3070
  for (const part of block) {
3135
3071
  switch (part.type) {
@@ -3140,9 +3076,8 @@ function convertToCoreMessages(messages, options) {
3140
3076
  case "file": {
3141
3077
  content2.push({
3142
3078
  type: "file",
3143
- data: part.data,
3144
- mediaType: (_a18 = part.mediaType) != null ? _a18 : part.mimeType
3145
- // TODO migration, remove
3079
+ mediaType: part.mediaType,
3080
+ data: part.url
3146
3081
  });
3147
3082
  break;
3148
3083
  }
@@ -3255,6 +3190,7 @@ function convertToCoreMessages(messages, options) {
3255
3190
  }
3256
3191
  return coreMessages;
3257
3192
  }
3193
+ var convertToCoreMessages = convertToModelMessages;
3258
3194
 
3259
3195
  // core/prompt/detect-prompt-type.ts
3260
3196
  function detectPromptType(prompt) {
@@ -3343,7 +3279,6 @@ var imagePartSchema = z5.object({
3343
3279
  type: z5.literal("image"),
3344
3280
  image: z5.union([dataContentSchema, z5.instanceof(URL)]),
3345
3281
  mediaType: z5.string().optional(),
3346
- mimeType: z5.string().optional(),
3347
3282
  providerOptions: providerMetadataSchema.optional()
3348
3283
  });
3349
3284
  var filePartSchema = z5.object({
@@ -3351,7 +3286,6 @@ var filePartSchema = z5.object({
3351
3286
  data: z5.union([dataContentSchema, z5.instanceof(URL)]),
3352
3287
  filename: z5.string().optional(),
3353
3288
  mediaType: z5.string(),
3354
- mimeType: z5.string().optional(),
3355
3289
  providerOptions: providerMetadataSchema.optional()
3356
3290
  });
3357
3291
  var reasoningPartSchema = z5.object({
@@ -3377,12 +3311,15 @@ var toolResultPartSchema = z5.object({
3377
3311
  });
3378
3312
 
3379
3313
  // core/prompt/message.ts
3380
- var coreSystemMessageSchema = z6.object({
3381
- role: z6.literal("system"),
3382
- content: z6.string(),
3383
- providerOptions: providerMetadataSchema.optional()
3384
- });
3385
- var coreUserMessageSchema = z6.object({
3314
+ var systemModelMessageSchema = z6.object(
3315
+ {
3316
+ role: z6.literal("system"),
3317
+ content: z6.string(),
3318
+ providerOptions: providerMetadataSchema.optional()
3319
+ }
3320
+ );
3321
+ var coreSystemMessageSchema = systemModelMessageSchema;
3322
+ var userModelMessageSchema = z6.object({
3386
3323
  role: z6.literal("user"),
3387
3324
  content: z6.union([
3388
3325
  z6.string(),
@@ -3390,7 +3327,8 @@ var coreUserMessageSchema = z6.object({
3390
3327
  ]),
3391
3328
  providerOptions: providerMetadataSchema.optional()
3392
3329
  });
3393
- var coreAssistantMessageSchema = z6.object({
3330
+ var coreUserMessageSchema = userModelMessageSchema;
3331
+ var assistantModelMessageSchema = z6.object({
3394
3332
  role: z6.literal("assistant"),
3395
3333
  content: z6.union([
3396
3334
  z6.string(),
@@ -3405,17 +3343,20 @@ var coreAssistantMessageSchema = z6.object({
3405
3343
  ]),
3406
3344
  providerOptions: providerMetadataSchema.optional()
3407
3345
  });
3408
- var coreToolMessageSchema = z6.object({
3346
+ var coreAssistantMessageSchema = assistantModelMessageSchema;
3347
+ var toolModelMessageSchema = z6.object({
3409
3348
  role: z6.literal("tool"),
3410
3349
  content: z6.array(toolResultPartSchema),
3411
3350
  providerOptions: providerMetadataSchema.optional()
3412
3351
  });
3413
- var coreMessageSchema = z6.union([
3414
- coreSystemMessageSchema,
3415
- coreUserMessageSchema,
3416
- coreAssistantMessageSchema,
3417
- coreToolMessageSchema
3352
+ var coreToolMessageSchema = toolModelMessageSchema;
3353
+ var modelMessageSchema = z6.union([
3354
+ systemModelMessageSchema,
3355
+ userModelMessageSchema,
3356
+ assistantModelMessageSchema,
3357
+ toolModelMessageSchema
3418
3358
  ]);
3359
+ var coreMessageSchema = modelMessageSchema;
3419
3360
 
3420
3361
  // core/prompt/standardize-prompt.ts
3421
3362
  async function standardizePrompt({
@@ -3462,10 +3403,10 @@ async function standardizePrompt({
3462
3403
  if (promptType === "other") {
3463
3404
  throw new InvalidPromptError({
3464
3405
  prompt,
3465
- message: "messages must be an array of CoreMessage or UIMessage"
3406
+ message: "messages must be an array of ModelMessage or UIMessage"
3466
3407
  });
3467
3408
  }
3468
- const messages = promptType === "ui-messages" ? convertToCoreMessages(prompt.messages, {
3409
+ const messages = promptType === "ui-messages" ? convertToModelMessages(prompt.messages, {
3469
3410
  tools
3470
3411
  }) : prompt.messages;
3471
3412
  if (messages.length === 0) {
@@ -3476,12 +3417,12 @@ async function standardizePrompt({
3476
3417
  }
3477
3418
  const validationResult = await safeValidateTypes({
3478
3419
  value: messages,
3479
- schema: z7.array(coreMessageSchema)
3420
+ schema: z7.array(modelMessageSchema)
3480
3421
  });
3481
3422
  if (!validationResult.success) {
3482
3423
  throw new InvalidPromptError({
3483
3424
  prompt,
3484
- message: "messages must be an array of CoreMessage or UIMessage",
3425
+ message: "messages must be an array of ModelMessage or UIMessage",
3485
3426
  cause: validationResult.error
3486
3427
  });
3487
3428
  }
@@ -7132,8 +7073,8 @@ var DefaultStreamTextResult = class {
7132
7073
  controller.enqueue(
7133
7074
  // TODO update protocol to v2 or replace with event stream
7134
7075
  formatDataStreamPart("file", {
7135
- mimeType: chunk.file.mediaType,
7136
- data: chunk.file.base64
7076
+ mediaType: chunk.file.mediaType,
7077
+ url: `data:${chunk.file.mediaType};base64,${chunk.file.base64}`
7137
7078
  })
7138
7079
  );
7139
7080
  break;
@@ -7355,16 +7296,16 @@ var DefaultGeneratedAudioFile = class extends DefaultGeneratedFile {
7355
7296
  super({ data, mediaType });
7356
7297
  let format = "mp3";
7357
7298
  if (mediaType) {
7358
- const mimeTypeParts = mediaType.split("/");
7359
- if (mimeTypeParts.length === 2) {
7299
+ const mediaTypeParts = mediaType.split("/");
7300
+ if (mediaTypeParts.length === 2) {
7360
7301
  if (mediaType !== "audio/mpeg") {
7361
- format = mimeTypeParts[1];
7302
+ format = mediaTypeParts[1];
7362
7303
  }
7363
7304
  }
7364
7305
  }
7365
7306
  if (!format) {
7366
7307
  throw new Error(
7367
- "Audio format must be provided or determinable from mimeType"
7308
+ "Audio format must be provided or determinable from media type"
7368
7309
  );
7369
7310
  }
7370
7311
  this.format = format;
@@ -7793,7 +7734,7 @@ function appendResponseMessages({
7793
7734
  responseMessages,
7794
7735
  _internal: { currentDate = () => /* @__PURE__ */ new Date() } = {}
7795
7736
  }) {
7796
- var _a17, _b, _c;
7737
+ var _a17, _b;
7797
7738
  const clonedMessages = structuredClone(messages);
7798
7739
  for (const message of responseMessages) {
7799
7740
  const role = message.role;
@@ -7857,8 +7798,8 @@ function appendResponseMessages({
7857
7798
  }
7858
7799
  parts.push({
7859
7800
  type: "file",
7860
- mediaType: (_a17 = part.mediaType) != null ? _a17 : part.mimeType,
7861
- data: convertDataContentToBase64String(part.data)
7801
+ mediaType: part.mediaType,
7802
+ url: `data:${part.mediaType};base64,${convertDataContentToBase64String(part.data)}`
7862
7803
  });
7863
7804
  break;
7864
7805
  }
@@ -7869,7 +7810,7 @@ function appendResponseMessages({
7869
7810
  getToolInvocations(lastMessage)
7870
7811
  // TODO remove once Message is removed
7871
7812
  );
7872
- (_b = lastMessage.parts) != null ? _b : lastMessage.parts = [];
7813
+ (_a17 = lastMessage.parts) != null ? _a17 : lastMessage.parts = [];
7873
7814
  lastMessage.content = textContent;
7874
7815
  lastMessage.parts.push(...parts);
7875
7816
  getToolInvocationsForStep2(maxStep === void 0 ? 0 : maxStep + 1).map((call) => ({
@@ -7902,7 +7843,7 @@ function appendResponseMessages({
7902
7843
  `Tool result must follow an assistant message: ${lastMessage.role}`
7903
7844
  );
7904
7845
  }
7905
- (_c = lastMessage.parts) != null ? _c : lastMessage.parts = [];
7846
+ (_b = lastMessage.parts) != null ? _b : lastMessage.parts = [];
7906
7847
  for (const contentPart of message.content) {
7907
7848
  const toolCall = getToolInvocations(
7908
7849
  lastMessage
@@ -8795,9 +8736,12 @@ export {
8795
8736
  appendClientMessage,
8796
8737
  appendResponseMessages,
8797
8738
  asSchema,
8739
+ assistantModelMessageSchema,
8798
8740
  callChatApi,
8799
8741
  callCompletionApi,
8742
+ convertFileListToFileUIParts,
8800
8743
  convertToCoreMessages,
8744
+ convertToModelMessages,
8801
8745
  coreAssistantMessageSchema,
8802
8746
  coreMessageSchema,
8803
8747
  coreSystemMessageSchema,
@@ -8829,10 +8773,10 @@ export {
8829
8773
  isAssistantMessageWithCompletedToolCalls,
8830
8774
  isDeepEqualData,
8831
8775
  jsonSchema,
8776
+ modelMessageSchema,
8832
8777
  parseDataStreamPart,
8833
8778
  parsePartialJson,
8834
8779
  pipeDataStreamToResponse,
8835
- prepareAttachmentsForRequest,
8836
8780
  processDataStream,
8837
8781
  processTextStream,
8838
8782
  shouldResubmitMessages,
@@ -8841,8 +8785,11 @@ export {
8841
8785
  smoothStream,
8842
8786
  streamObject,
8843
8787
  streamText,
8788
+ systemModelMessageSchema,
8844
8789
  tool,
8790
+ toolModelMessageSchema,
8845
8791
  updateToolCallResult,
8792
+ userModelMessageSchema,
8846
8793
  wrapLanguageModel
8847
8794
  };
8848
8795
  //# sourceMappingURL=index.mjs.map