ai 5.0.26 → 5.0.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -40,6 +40,56 @@ var NoOutputSpecifiedError = class extends AISDKError {
40
40
  };
41
41
  _a = symbol;
42
42
 
43
+ // src/logger/log-warnings.ts
44
+ function formatWarning(warning) {
45
+ const prefix = "AI SDK Warning:";
46
+ switch (warning.type) {
47
+ case "unsupported-setting": {
48
+ let message = `${prefix} The "${warning.setting}" setting is not supported by this model`;
49
+ if (warning.details) {
50
+ message += ` - ${warning.details}`;
51
+ }
52
+ return message;
53
+ }
54
+ case "unsupported-tool": {
55
+ const toolName = "name" in warning.tool ? warning.tool.name : "unknown tool";
56
+ let message = `${prefix} The tool "${toolName}" is not supported by this model`;
57
+ if (warning.details) {
58
+ message += ` - ${warning.details}`;
59
+ }
60
+ return message;
61
+ }
62
+ case "other": {
63
+ return `${prefix} ${warning.message}`;
64
+ }
65
+ default: {
66
+ return `${prefix} ${JSON.stringify(warning, null, 2)}`;
67
+ }
68
+ }
69
+ }
70
+ var FIRST_WARNING_INFO_MESSAGE = "AI SDK Warning System: To turn off warning logging, set the AI_SDK_LOG_WARNINGS global to false.";
71
+ var hasLoggedBefore = false;
72
+ var logWarnings = (warnings) => {
73
+ if (warnings.length === 0) {
74
+ return;
75
+ }
76
+ const logger = globalThis.AI_SDK_LOG_WARNINGS;
77
+ if (logger === false) {
78
+ return;
79
+ }
80
+ if (typeof logger === "function") {
81
+ logger(warnings);
82
+ return;
83
+ }
84
+ if (!hasLoggedBefore) {
85
+ hasLoggedBefore = true;
86
+ console.info(FIRST_WARNING_INFO_MESSAGE);
87
+ }
88
+ for (const warning of warnings) {
89
+ console.warn(formatWarning(warning));
90
+ }
91
+ };
92
+
43
93
  // src/model/resolve-model.ts
44
94
  import { gateway } from "@ai-sdk/gateway";
45
95
 
@@ -344,7 +394,7 @@ var MessageConversionError = class extends AISDKError14 {
344
394
  };
345
395
  _a13 = symbol13;
346
396
 
347
- // src/util/download-error.ts
397
+ // src/util/download/download-error.ts
348
398
  import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
349
399
  var name14 = "AI_DownloadError";
350
400
  var marker14 = `vercel.ai.error.${name14}`;
@@ -596,8 +646,8 @@ function detectMediaType({
596
646
  return void 0;
597
647
  }
598
648
 
599
- // src/util/download.ts
600
- async function download({ url }) {
649
+ // src/util/download/download.ts
650
+ var download = async ({ url }) => {
601
651
  var _a17;
602
652
  const urlText = url.toString();
603
653
  try {
@@ -619,7 +669,14 @@ async function download({ url }) {
619
669
  }
620
670
  throw new DownloadError({ url: urlText, cause: error });
621
671
  }
622
- }
672
+ };
673
+
674
+ // src/util/download/download-function.ts
675
+ var createDefaultDownloadFunction = (download2 = download) => (requestedDownloads) => Promise.all(
676
+ requestedDownloads.map(
677
+ async (requestedDownload) => requestedDownload.isUrlSupportedByModel ? null : download2(requestedDownload)
678
+ )
679
+ );
623
680
 
624
681
  // src/prompt/data-content.ts
625
682
  import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
@@ -720,11 +777,11 @@ function convertDataContentToUint8Array(content) {
720
777
  async function convertToLanguageModelPrompt({
721
778
  prompt,
722
779
  supportedUrls,
723
- downloadImplementation = download
780
+ download: download2 = createDefaultDownloadFunction()
724
781
  }) {
725
782
  const downloadedAssets = await downloadAssets(
726
783
  prompt.messages,
727
- downloadImplementation,
784
+ download2,
728
785
  supportedUrls
729
786
  );
730
787
  return [
@@ -846,8 +903,8 @@ function convertToLanguageModelMessage({
846
903
  }
847
904
  }
848
905
  }
849
- async function downloadAssets(messages, downloadImplementation, supportedUrls) {
850
- const urls = messages.filter((message) => message.role === "user").map((message) => message.content).filter(
906
+ async function downloadAssets(messages, download2, supportedUrls) {
907
+ const plannedDownloads = messages.filter((message) => message.role === "user").map((message) => message.content).filter(
851
908
  (content) => Array.isArray(content)
852
909
  ).flat().filter(
853
910
  (part) => part.type === "image" || part.type === "file"
@@ -863,20 +920,23 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
863
920
  }
864
921
  return { mediaType, data };
865
922
  }).filter(
866
- (part) => part.data instanceof URL && part.mediaType != null && !isUrlSupported({
923
+ (part) => part.data instanceof URL
924
+ ).map((part) => ({
925
+ url: part.data,
926
+ isUrlSupportedByModel: part.mediaType != null && isUrlSupported({
867
927
  url: part.data.toString(),
868
928
  mediaType: part.mediaType,
869
929
  supportedUrls
870
930
  })
871
- ).map((part) => part.data);
872
- const downloadedImages = await Promise.all(
873
- urls.map(async (url) => ({
874
- url,
875
- data: await downloadImplementation({ url })
876
- }))
877
- );
931
+ }));
932
+ const downloadedFiles = await download2(plannedDownloads);
878
933
  return Object.fromEntries(
879
- downloadedImages.map(({ url, data }) => [url.toString(), data])
934
+ downloadedFiles.filter(
935
+ (downloadedFile) => (downloadedFile == null ? void 0 : downloadedFile.data) != null
936
+ ).map(({ data, mediaType }, index) => [
937
+ plannedDownloads[index].url.toString(),
938
+ { data, mediaType }
939
+ ])
880
940
  );
881
941
  }
882
942
  function convertPartToLanguageModelPart(part, downloadedAssets) {
@@ -2044,6 +2104,7 @@ async function generateText({
2044
2104
  experimental_prepareStep,
2045
2105
  prepareStep = experimental_prepareStep,
2046
2106
  experimental_repairToolCall: repairToolCall,
2107
+ experimental_download: download2,
2047
2108
  experimental_context,
2048
2109
  _internal: {
2049
2110
  generateId: generateId3 = originalGenerateId,
@@ -2093,7 +2154,7 @@ async function generateText({
2093
2154
  }),
2094
2155
  tracer,
2095
2156
  fn: async (span) => {
2096
- var _a17, _b, _c, _d, _e, _f;
2157
+ var _a17, _b, _c, _d, _e, _f, _g;
2097
2158
  const callSettings2 = prepareCallSettings(settings);
2098
2159
  let currentModelResponse;
2099
2160
  let clientToolCalls = [];
@@ -2116,7 +2177,8 @@ async function generateText({
2116
2177
  system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
2117
2178
  messages: (_b = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _b : stepInputMessages
2118
2179
  },
2119
- supportedUrls: await model.supportedUrls
2180
+ supportedUrls: await model.supportedUrls,
2181
+ download: download2
2120
2182
  });
2121
2183
  const stepModel = resolveLanguageModel(
2122
2184
  (_c = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _c : model
@@ -2167,7 +2229,7 @@ async function generateText({
2167
2229
  }),
2168
2230
  tracer,
2169
2231
  fn: async (span2) => {
2170
- var _a19, _b2, _c2, _d2, _e2, _f2, _g, _h;
2232
+ var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h;
2171
2233
  const result = await stepModel.doGenerate({
2172
2234
  ...callSettings2,
2173
2235
  tools: stepTools,
@@ -2182,7 +2244,7 @@ async function generateText({
2182
2244
  id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
2183
2245
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
2184
2246
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
2185
- headers: (_g = result.response) == null ? void 0 : _g.headers,
2247
+ headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
2186
2248
  body: (_h = result.response) == null ? void 0 : _h.body
2187
2249
  };
2188
2250
  span2.setAttributes(
@@ -2306,6 +2368,7 @@ async function generateText({
2306
2368
  messages: structuredClone(responseMessages)
2307
2369
  }
2308
2370
  });
2371
+ logWarnings((_g = currentModelResponse.warnings) != null ? _g : []);
2309
2372
  steps.push(currentStepResult);
2310
2373
  await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
2311
2374
  } while (
@@ -4325,6 +4388,7 @@ function streamText({
4325
4388
  activeTools = experimental_activeTools,
4326
4389
  experimental_repairToolCall: repairToolCall,
4327
4390
  experimental_transform: transform,
4391
+ experimental_download: download2,
4328
4392
  includeRawChunks = false,
4329
4393
  onChunk,
4330
4394
  onError = ({ error }) => {
@@ -4369,7 +4433,8 @@ function streamText({
4369
4433
  now: now2,
4370
4434
  currentDate,
4371
4435
  generateId: generateId3,
4372
- experimental_context
4436
+ experimental_context,
4437
+ download: download2
4373
4438
  });
4374
4439
  }
4375
4440
  function createOutputTransformStream(output) {
@@ -4466,7 +4531,8 @@ var DefaultStreamTextResult = class {
4466
4531
  onFinish,
4467
4532
  onAbort,
4468
4533
  onStepFinish,
4469
- experimental_context
4534
+ experimental_context,
4535
+ download: download2
4470
4536
  }) {
4471
4537
  this._totalUsage = new DelayedPromise();
4472
4538
  this._finishReason = new DelayedPromise();
@@ -4597,6 +4663,7 @@ var DefaultStreamTextResult = class {
4597
4663
  providerMetadata: part.providerMetadata
4598
4664
  });
4599
4665
  await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
4666
+ logWarnings(recordedWarnings);
4600
4667
  recordedSteps.push(currentStepResult);
4601
4668
  recordedContent = [];
4602
4669
  activeReasoningContent = {};
@@ -4788,7 +4855,8 @@ var DefaultStreamTextResult = class {
4788
4855
  system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
4789
4856
  messages: (_b = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _b : stepInputMessages
4790
4857
  },
4791
- supportedUrls: await model.supportedUrls
4858
+ supportedUrls: await model.supportedUrls,
4859
+ download: download2
4792
4860
  });
4793
4861
  const stepModel = resolveLanguageModel(
4794
4862
  (_c = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _c : model
@@ -6063,6 +6131,7 @@ async function generateImage({
6063
6131
  }
6064
6132
  responses.push(result.response);
6065
6133
  }
6134
+ logWarnings(warnings);
6066
6135
  if (!images.length) {
6067
6136
  throw new NoImageGeneratedError({ responses });
6068
6137
  }
@@ -6562,6 +6631,7 @@ async function generateObject(options) {
6562
6631
  headers,
6563
6632
  experimental_repairText: repairText,
6564
6633
  experimental_telemetry: telemetry,
6634
+ experimental_download: download2,
6565
6635
  providerOptions,
6566
6636
  _internal: {
6567
6637
  generateId: generateId3 = originalGenerateId3,
@@ -6639,7 +6709,8 @@ async function generateObject(options) {
6639
6709
  });
6640
6710
  const promptMessages = await convertToLanguageModelPrompt({
6641
6711
  prompt: standardizedPrompt,
6642
- supportedUrls: await model.supportedUrls
6712
+ supportedUrls: await model.supportedUrls,
6713
+ download: download2
6643
6714
  });
6644
6715
  const generateResult = await retry(
6645
6716
  () => recordSpan({
@@ -6740,6 +6811,7 @@ async function generateObject(options) {
6740
6811
  request = (_a17 = generateResult.request) != null ? _a17 : {};
6741
6812
  response = generateResult.responseData;
6742
6813
  reasoning = generateResult.reasoning;
6814
+ logWarnings(warnings);
6743
6815
  const object2 = await parseAndValidateObjectResultWithRepair(
6744
6816
  result,
6745
6817
  outputStrategy,
@@ -6954,6 +7026,7 @@ function streamObject(options) {
6954
7026
  headers,
6955
7027
  experimental_repairText: repairText,
6956
7028
  experimental_telemetry: telemetry,
7029
+ experimental_download: download2,
6957
7030
  providerOptions,
6958
7031
  onError = ({ error }) => {
6959
7032
  console.error(error);
@@ -7001,6 +7074,7 @@ function streamObject(options) {
7001
7074
  repairText,
7002
7075
  onError,
7003
7076
  onFinish,
7077
+ download: download2,
7004
7078
  generateId: generateId3,
7005
7079
  currentDate,
7006
7080
  now: now2
@@ -7024,6 +7098,7 @@ var DefaultStreamObjectResult = class {
7024
7098
  repairText,
7025
7099
  onError,
7026
7100
  onFinish,
7101
+ download: download2,
7027
7102
  generateId: generateId3,
7028
7103
  currentDate,
7029
7104
  now: now2
@@ -7097,7 +7172,8 @@ var DefaultStreamObjectResult = class {
7097
7172
  ...prepareCallSettings(settings),
7098
7173
  prompt: await convertToLanguageModelPrompt({
7099
7174
  prompt: standardizedPrompt,
7100
- supportedUrls: await model.supportedUrls
7175
+ supportedUrls: await model.supportedUrls,
7176
+ download: download2
7101
7177
  }),
7102
7178
  providerOptions,
7103
7179
  abortSignal,
@@ -7250,6 +7326,7 @@ var DefaultStreamObjectResult = class {
7250
7326
  usage,
7251
7327
  response: fullResponse
7252
7328
  });
7329
+ logWarnings(warnings != null ? warnings : []);
7253
7330
  self._usage.resolve(usage);
7254
7331
  self._providerMetadata.resolve(providerMetadata);
7255
7332
  self._warnings.resolve(warnings);
@@ -7535,6 +7612,7 @@ async function generateSpeech({
7535
7612
  if (!result.audio || result.audio.length === 0) {
7536
7613
  throw new NoSpeechGeneratedError({ responses: [result.response] });
7537
7614
  }
7615
+ logWarnings(result.warnings);
7538
7616
  return new DefaultSpeechResult({
7539
7617
  audio: new DefaultGeneratedAudioFile({
7540
7618
  data: result.audio,
@@ -8865,6 +8943,7 @@ async function transcribe({
8865
8943
  });
8866
8944
  }
8867
8945
  );
8946
+ logWarnings(result.warnings);
8868
8947
  if (!result.text) {
8869
8948
  throw new NoTranscriptGeneratedError({ responses: [result.response] });
8870
8949
  }