ai 4.1.1 → 4.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -4,8 +4,8 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
5
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
6
  var __export = (target, all) => {
7
- for (var name14 in all)
8
- __defProp(target, name14, { get: all[name14], enumerable: true });
7
+ for (var name15 in all)
8
+ __defProp(target, name15, { get: all[name15], enumerable: true });
9
9
  };
10
10
  var __copyProps = (to, from, except, desc) => {
11
11
  if (from && typeof from === "object" || typeof from === "function") {
@@ -20,26 +20,27 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // streams/index.ts
21
21
  var streams_exports = {};
22
22
  __export(streams_exports, {
23
- AISDKError: () => import_provider16.AISDKError,
24
- APICallError: () => import_provider16.APICallError,
23
+ AISDKError: () => import_provider17.AISDKError,
24
+ APICallError: () => import_provider17.APICallError,
25
25
  AssistantResponse: () => AssistantResponse,
26
26
  DownloadError: () => DownloadError,
27
- EmptyResponseBodyError: () => import_provider16.EmptyResponseBodyError,
27
+ EmptyResponseBodyError: () => import_provider17.EmptyResponseBodyError,
28
28
  InvalidArgumentError: () => InvalidArgumentError,
29
29
  InvalidDataContentError: () => InvalidDataContentError,
30
30
  InvalidMessageRoleError: () => InvalidMessageRoleError,
31
- InvalidPromptError: () => import_provider16.InvalidPromptError,
32
- InvalidResponseDataError: () => import_provider16.InvalidResponseDataError,
31
+ InvalidPromptError: () => import_provider17.InvalidPromptError,
32
+ InvalidResponseDataError: () => import_provider17.InvalidResponseDataError,
33
33
  InvalidToolArgumentsError: () => InvalidToolArgumentsError,
34
- JSONParseError: () => import_provider16.JSONParseError,
34
+ JSONParseError: () => import_provider17.JSONParseError,
35
35
  LangChainAdapter: () => langchain_adapter_exports,
36
36
  LlamaIndexAdapter: () => llamaindex_adapter_exports,
37
- LoadAPIKeyError: () => import_provider16.LoadAPIKeyError,
37
+ LoadAPIKeyError: () => import_provider17.LoadAPIKeyError,
38
38
  MessageConversionError: () => MessageConversionError,
39
- NoContentGeneratedError: () => import_provider16.NoContentGeneratedError,
39
+ NoContentGeneratedError: () => import_provider17.NoContentGeneratedError,
40
+ NoImageGeneratedError: () => NoImageGeneratedError,
40
41
  NoObjectGeneratedError: () => NoObjectGeneratedError,
41
42
  NoOutputSpecifiedError: () => NoOutputSpecifiedError,
42
- NoSuchModelError: () => import_provider16.NoSuchModelError,
43
+ NoSuchModelError: () => import_provider17.NoSuchModelError,
43
44
  NoSuchProviderError: () => NoSuchProviderError,
44
45
  NoSuchToolError: () => NoSuchToolError,
45
46
  Output: () => output_exports,
@@ -47,8 +48,8 @@ __export(streams_exports, {
47
48
  StreamData: () => StreamData,
48
49
  ToolCallRepairError: () => ToolCallRepairError,
49
50
  ToolExecutionError: () => ToolExecutionError,
50
- TypeValidationError: () => import_provider16.TypeValidationError,
51
- UnsupportedFunctionalityError: () => import_provider16.UnsupportedFunctionalityError,
51
+ TypeValidationError: () => import_provider17.TypeValidationError,
52
+ UnsupportedFunctionalityError: () => import_provider17.UnsupportedFunctionalityError,
52
53
  appendResponseMessages: () => appendResponseMessages,
53
54
  convertToCoreMessages: () => convertToCoreMessages,
54
55
  cosineSimilarity: () => cosineSimilarity,
@@ -420,7 +421,7 @@ function getBaseTelemetryAttributes({
420
421
  telemetry,
421
422
  headers
422
423
  }) {
423
- var _a14;
424
+ var _a15;
424
425
  return {
425
426
  "ai.model.provider": model.provider,
426
427
  "ai.model.id": model.modelId,
@@ -430,7 +431,7 @@ function getBaseTelemetryAttributes({
430
431
  return attributes;
431
432
  }, {}),
432
433
  // add metadata as attributes:
433
- ...Object.entries((_a14 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a14 : {}).reduce(
434
+ ...Object.entries((_a15 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a15 : {}).reduce(
434
435
  (attributes, [key, value]) => {
435
436
  attributes[`ai.telemetry.metadata.${key}`] = value;
436
437
  return attributes;
@@ -455,7 +456,7 @@ var noopTracer = {
455
456
  startSpan() {
456
457
  return noopSpan;
457
458
  },
458
- startActiveSpan(name14, arg1, arg2, arg3) {
459
+ startActiveSpan(name15, arg1, arg2, arg3) {
459
460
  if (typeof arg1 === "function") {
460
461
  return arg1(noopSpan);
461
462
  }
@@ -525,13 +526,13 @@ function getTracer({
525
526
  // core/telemetry/record-span.ts
526
527
  var import_api2 = require("@opentelemetry/api");
527
528
  function recordSpan({
528
- name: name14,
529
+ name: name15,
529
530
  tracer,
530
531
  attributes,
531
532
  fn,
532
533
  endWhenDone = true
533
534
  }) {
534
- return tracer.startActiveSpan(name14, { attributes }, async (span) => {
535
+ return tracer.startActiveSpan(name15, { attributes }, async (span) => {
535
536
  try {
536
537
  const result = await fn(span);
537
538
  if (endWhenDone) {
@@ -639,14 +640,14 @@ async function embed({
639
640
  }),
640
641
  tracer,
641
642
  fn: async (doEmbedSpan) => {
642
- var _a14;
643
+ var _a15;
643
644
  const modelResponse = await model.doEmbed({
644
645
  values: [value],
645
646
  abortSignal,
646
647
  headers
647
648
  });
648
649
  const embedding2 = modelResponse.embeddings[0];
649
- const usage2 = (_a14 = modelResponse.usage) != null ? _a14 : { tokens: NaN };
650
+ const usage2 = (_a15 = modelResponse.usage) != null ? _a15 : { tokens: NaN };
650
651
  doEmbedSpan.setAttributes(
651
652
  selectTelemetryAttributes({
652
653
  telemetry,
@@ -756,14 +757,14 @@ async function embedMany({
756
757
  }),
757
758
  tracer,
758
759
  fn: async (doEmbedSpan) => {
759
- var _a14;
760
+ var _a15;
760
761
  const modelResponse = await model.doEmbed({
761
762
  values,
762
763
  abortSignal,
763
764
  headers
764
765
  });
765
766
  const embeddings3 = modelResponse.embeddings;
766
- const usage2 = (_a14 = modelResponse.usage) != null ? _a14 : { tokens: NaN };
767
+ const usage2 = (_a15 = modelResponse.usage) != null ? _a15 : { tokens: NaN };
767
768
  doEmbedSpan.setAttributes(
768
769
  selectTelemetryAttributes({
769
770
  telemetry,
@@ -815,14 +816,14 @@ async function embedMany({
815
816
  }),
816
817
  tracer,
817
818
  fn: async (doEmbedSpan) => {
818
- var _a14;
819
+ var _a15;
819
820
  const modelResponse = await model.doEmbed({
820
821
  values: chunk,
821
822
  abortSignal,
822
823
  headers
823
824
  });
824
825
  const embeddings2 = modelResponse.embeddings;
825
- const usage2 = (_a14 = modelResponse.usage) != null ? _a14 : { tokens: NaN };
826
+ const usage2 = (_a15 = modelResponse.usage) != null ? _a15 : { tokens: NaN };
826
827
  doEmbedSpan.setAttributes(
827
828
  selectTelemetryAttributes({
828
829
  telemetry,
@@ -870,6 +871,30 @@ var DefaultEmbedManyResult = class {
870
871
 
871
872
  // core/generate-image/generate-image.ts
872
873
  var import_provider_utils2 = require("@ai-sdk/provider-utils");
874
+
875
+ // errors/no-image-generated-error.ts
876
+ var import_provider4 = require("@ai-sdk/provider");
877
+ var name3 = "AI_NoImageGeneratedError";
878
+ var marker3 = `vercel.ai.error.${name3}`;
879
+ var symbol3 = Symbol.for(marker3);
880
+ var _a3;
881
+ var NoImageGeneratedError = class extends import_provider4.AISDKError {
882
+ constructor({
883
+ message = "No image generated.",
884
+ cause,
885
+ responses
886
+ }) {
887
+ super({ name: name3, message, cause });
888
+ this[_a3] = true;
889
+ this.responses = responses;
890
+ }
891
+ static isInstance(error) {
892
+ return import_provider4.AISDKError.hasMarker(error, marker3);
893
+ }
894
+ };
895
+ _a3 = symbol3;
896
+
897
+ // core/generate-image/generate-image.ts
873
898
  async function generateImage({
874
899
  model,
875
900
  prompt,
@@ -880,11 +905,14 @@ async function generateImage({
880
905
  providerOptions,
881
906
  maxRetries: maxRetriesArg,
882
907
  abortSignal,
883
- headers
908
+ headers,
909
+ _internal = {
910
+ currentDate: () => /* @__PURE__ */ new Date()
911
+ }
884
912
  }) {
885
- var _a14;
913
+ var _a15;
886
914
  const { retry } = prepareRetries({ maxRetries: maxRetriesArg });
887
- const maxImagesPerCall = (_a14 = model.maxImagesPerCall) != null ? _a14 : 1;
915
+ const maxImagesPerCall = (_a15 = model.maxImagesPerCall) != null ? _a15 : 1;
888
916
  const callCount = Math.ceil(n / maxImagesPerCall);
889
917
  const callImageCounts = Array.from({ length: callCount }, (_, i) => {
890
918
  if (i < callCount - 1) {
@@ -911,18 +939,24 @@ async function generateImage({
911
939
  );
912
940
  const images = [];
913
941
  const warnings = [];
942
+ const responses = [];
914
943
  for (const result of results) {
915
944
  images.push(
916
945
  ...result.images.map((image) => new DefaultGeneratedImage({ image }))
917
946
  );
918
947
  warnings.push(...result.warnings);
948
+ responses.push(result.response);
919
949
  }
920
- return new DefaultGenerateImageResult({ images, warnings });
950
+ if (!images.length) {
951
+ throw new NoImageGeneratedError({ responses });
952
+ }
953
+ return new DefaultGenerateImageResult({ images, warnings, responses });
921
954
  }
922
955
  var DefaultGenerateImageResult = class {
923
956
  constructor(options) {
924
957
  this.images = options.images;
925
958
  this.warnings = options.warnings;
959
+ this.responses = options.responses;
926
960
  }
927
961
  get image() {
928
962
  return this.images[0];
@@ -954,12 +988,12 @@ var DefaultGeneratedImage = class {
954
988
  var import_provider_utils6 = require("@ai-sdk/provider-utils");
955
989
 
956
990
  // errors/no-object-generated-error.ts
957
- var import_provider4 = require("@ai-sdk/provider");
958
- var name3 = "AI_NoObjectGeneratedError";
959
- var marker3 = `vercel.ai.error.${name3}`;
960
- var symbol3 = Symbol.for(marker3);
961
- var _a3;
962
- var NoObjectGeneratedError = class extends import_provider4.AISDKError {
991
+ var import_provider5 = require("@ai-sdk/provider");
992
+ var name4 = "AI_NoObjectGeneratedError";
993
+ var marker4 = `vercel.ai.error.${name4}`;
994
+ var symbol4 = Symbol.for(marker4);
995
+ var _a4;
996
+ var NoObjectGeneratedError = class extends import_provider5.AISDKError {
963
997
  constructor({
964
998
  message = "No object generated.",
965
999
  cause,
@@ -967,25 +1001,25 @@ var NoObjectGeneratedError = class extends import_provider4.AISDKError {
967
1001
  response,
968
1002
  usage
969
1003
  }) {
970
- super({ name: name3, message, cause });
971
- this[_a3] = true;
1004
+ super({ name: name4, message, cause });
1005
+ this[_a4] = true;
972
1006
  this.text = text2;
973
1007
  this.response = response;
974
1008
  this.usage = usage;
975
1009
  }
976
1010
  static isInstance(error) {
977
- return import_provider4.AISDKError.hasMarker(error, marker3);
1011
+ return import_provider5.AISDKError.hasMarker(error, marker4);
978
1012
  }
979
1013
  };
980
- _a3 = symbol3;
1014
+ _a4 = symbol4;
981
1015
 
982
1016
  // util/download-error.ts
983
- var import_provider5 = require("@ai-sdk/provider");
984
- var name4 = "AI_DownloadError";
985
- var marker4 = `vercel.ai.error.${name4}`;
986
- var symbol4 = Symbol.for(marker4);
987
- var _a4;
988
- var DownloadError = class extends import_provider5.AISDKError {
1017
+ var import_provider6 = require("@ai-sdk/provider");
1018
+ var name5 = "AI_DownloadError";
1019
+ var marker5 = `vercel.ai.error.${name5}`;
1020
+ var symbol5 = Symbol.for(marker5);
1021
+ var _a5;
1022
+ var DownloadError = class extends import_provider6.AISDKError {
989
1023
  constructor({
990
1024
  url,
991
1025
  statusCode,
@@ -993,24 +1027,24 @@ var DownloadError = class extends import_provider5.AISDKError {
993
1027
  cause,
994
1028
  message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
995
1029
  }) {
996
- super({ name: name4, message, cause });
997
- this[_a4] = true;
1030
+ super({ name: name5, message, cause });
1031
+ this[_a5] = true;
998
1032
  this.url = url;
999
1033
  this.statusCode = statusCode;
1000
1034
  this.statusText = statusText;
1001
1035
  }
1002
1036
  static isInstance(error) {
1003
- return import_provider5.AISDKError.hasMarker(error, marker4);
1037
+ return import_provider6.AISDKError.hasMarker(error, marker5);
1004
1038
  }
1005
1039
  };
1006
- _a4 = symbol4;
1040
+ _a5 = symbol5;
1007
1041
 
1008
1042
  // util/download.ts
1009
1043
  async function download({
1010
1044
  url,
1011
1045
  fetchImplementation = fetch
1012
1046
  }) {
1013
- var _a14;
1047
+ var _a15;
1014
1048
  const urlText = url.toString();
1015
1049
  try {
1016
1050
  const response = await fetchImplementation(urlText);
@@ -1023,7 +1057,7 @@ async function download({
1023
1057
  }
1024
1058
  return {
1025
1059
  data: new Uint8Array(await response.arrayBuffer()),
1026
- mimeType: (_a14 = response.headers.get("content-type")) != null ? _a14 : void 0
1060
+ mimeType: (_a15 = response.headers.get("content-type")) != null ? _a15 : void 0
1027
1061
  };
1028
1062
  } catch (error) {
1029
1063
  if (DownloadError.isInstance(error)) {
@@ -1053,26 +1087,26 @@ function detectImageMimeType(image) {
1053
1087
  var import_provider_utils3 = require("@ai-sdk/provider-utils");
1054
1088
 
1055
1089
  // core/prompt/invalid-data-content-error.ts
1056
- var import_provider6 = require("@ai-sdk/provider");
1057
- var name5 = "AI_InvalidDataContentError";
1058
- var marker5 = `vercel.ai.error.${name5}`;
1059
- var symbol5 = Symbol.for(marker5);
1060
- var _a5;
1061
- var InvalidDataContentError = class extends import_provider6.AISDKError {
1090
+ var import_provider7 = require("@ai-sdk/provider");
1091
+ var name6 = "AI_InvalidDataContentError";
1092
+ var marker6 = `vercel.ai.error.${name6}`;
1093
+ var symbol6 = Symbol.for(marker6);
1094
+ var _a6;
1095
+ var InvalidDataContentError = class extends import_provider7.AISDKError {
1062
1096
  constructor({
1063
1097
  content,
1064
1098
  cause,
1065
1099
  message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
1066
1100
  }) {
1067
- super({ name: name5, message, cause });
1068
- this[_a5] = true;
1101
+ super({ name: name6, message, cause });
1102
+ this[_a6] = true;
1069
1103
  this.content = content;
1070
1104
  }
1071
1105
  static isInstance(error) {
1072
- return import_provider6.AISDKError.hasMarker(error, marker5);
1106
+ return import_provider7.AISDKError.hasMarker(error, marker6);
1073
1107
  }
1074
1108
  };
1075
- _a5 = symbol5;
1109
+ _a6 = symbol6;
1076
1110
 
1077
1111
  // core/prompt/data-content.ts
1078
1112
  var import_zod = require("zod");
@@ -1083,8 +1117,8 @@ var dataContentSchema = import_zod.z.union([
1083
1117
  import_zod.z.custom(
1084
1118
  // Buffer might not be available in some environments such as CloudFlare:
1085
1119
  (value) => {
1086
- var _a14, _b;
1087
- return (_b = (_a14 = globalThis.Buffer) == null ? void 0 : _a14.isBuffer(value)) != null ? _b : false;
1120
+ var _a15, _b;
1121
+ return (_b = (_a15 = globalThis.Buffer) == null ? void 0 : _a15.isBuffer(value)) != null ? _b : false;
1088
1122
  },
1089
1123
  { message: "Must be a Buffer" }
1090
1124
  )
@@ -1127,25 +1161,25 @@ function convertUint8ArrayToText(uint8Array) {
1127
1161
  }
1128
1162
 
1129
1163
  // core/prompt/invalid-message-role-error.ts
1130
- var import_provider7 = require("@ai-sdk/provider");
1131
- var name6 = "AI_InvalidMessageRoleError";
1132
- var marker6 = `vercel.ai.error.${name6}`;
1133
- var symbol6 = Symbol.for(marker6);
1134
- var _a6;
1135
- var InvalidMessageRoleError = class extends import_provider7.AISDKError {
1164
+ var import_provider8 = require("@ai-sdk/provider");
1165
+ var name7 = "AI_InvalidMessageRoleError";
1166
+ var marker7 = `vercel.ai.error.${name7}`;
1167
+ var symbol7 = Symbol.for(marker7);
1168
+ var _a7;
1169
+ var InvalidMessageRoleError = class extends import_provider8.AISDKError {
1136
1170
  constructor({
1137
1171
  role,
1138
1172
  message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
1139
1173
  }) {
1140
- super({ name: name6, message });
1141
- this[_a6] = true;
1174
+ super({ name: name7, message });
1175
+ this[_a7] = true;
1142
1176
  this.role = role;
1143
1177
  }
1144
1178
  static isInstance(error) {
1145
- return import_provider7.AISDKError.hasMarker(error, marker6);
1179
+ return import_provider8.AISDKError.hasMarker(error, marker7);
1146
1180
  }
1147
1181
  };
1148
- _a6 = symbol6;
1182
+ _a7 = symbol7;
1149
1183
 
1150
1184
  // core/prompt/split-data-url.ts
1151
1185
  function splitDataUrl(dataUrl) {
@@ -1275,7 +1309,7 @@ async function downloadAssets(messages, downloadImplementation, modelSupportsIma
1275
1309
  );
1276
1310
  }
1277
1311
  function convertPartToLanguageModelPart(part, downloadedAssets) {
1278
- var _a14;
1312
+ var _a15;
1279
1313
  if (part.type === "text") {
1280
1314
  return {
1281
1315
  type: "text",
@@ -1328,7 +1362,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1328
1362
  switch (type) {
1329
1363
  case "image": {
1330
1364
  if (normalizedData instanceof Uint8Array) {
1331
- mimeType = (_a14 = detectImageMimeType(normalizedData)) != null ? _a14 : mimeType;
1365
+ mimeType = (_a15 = detectImageMimeType(normalizedData)) != null ? _a15 : mimeType;
1332
1366
  }
1333
1367
  return {
1334
1368
  type: "image",
@@ -1445,7 +1479,7 @@ function prepareCallSettings({
1445
1479
  }
1446
1480
 
1447
1481
  // core/prompt/standardize-prompt.ts
1448
- var import_provider9 = require("@ai-sdk/provider");
1482
+ var import_provider10 = require("@ai-sdk/provider");
1449
1483
  var import_provider_utils4 = require("@ai-sdk/provider-utils");
1450
1484
  var import_zod7 = require("zod");
1451
1485
 
@@ -1595,7 +1629,7 @@ function detectSingleMessageCharacteristics(message) {
1595
1629
 
1596
1630
  // core/prompt/attachments-to-parts.ts
1597
1631
  function attachmentsToParts(attachments) {
1598
- var _a14, _b, _c;
1632
+ var _a15, _b, _c;
1599
1633
  const parts = [];
1600
1634
  for (const attachment of attachments) {
1601
1635
  let url;
@@ -1607,7 +1641,7 @@ function attachmentsToParts(attachments) {
1607
1641
  switch (url.protocol) {
1608
1642
  case "http:":
1609
1643
  case "https:": {
1610
- if ((_a14 = attachment.contentType) == null ? void 0 : _a14.startsWith("image/")) {
1644
+ if ((_a15 = attachment.contentType) == null ? void 0 : _a15.startsWith("image/")) {
1611
1645
  parts.push({ type: "image", image: url });
1612
1646
  } else {
1613
1647
  if (!attachment.contentType) {
@@ -1671,30 +1705,30 @@ function attachmentsToParts(attachments) {
1671
1705
  }
1672
1706
 
1673
1707
  // core/prompt/message-conversion-error.ts
1674
- var import_provider8 = require("@ai-sdk/provider");
1675
- var name7 = "AI_MessageConversionError";
1676
- var marker7 = `vercel.ai.error.${name7}`;
1677
- var symbol7 = Symbol.for(marker7);
1678
- var _a7;
1679
- var MessageConversionError = class extends import_provider8.AISDKError {
1708
+ var import_provider9 = require("@ai-sdk/provider");
1709
+ var name8 = "AI_MessageConversionError";
1710
+ var marker8 = `vercel.ai.error.${name8}`;
1711
+ var symbol8 = Symbol.for(marker8);
1712
+ var _a8;
1713
+ var MessageConversionError = class extends import_provider9.AISDKError {
1680
1714
  constructor({
1681
1715
  originalMessage,
1682
1716
  message
1683
1717
  }) {
1684
- super({ name: name7, message });
1685
- this[_a7] = true;
1718
+ super({ name: name8, message });
1719
+ this[_a8] = true;
1686
1720
  this.originalMessage = originalMessage;
1687
1721
  }
1688
1722
  static isInstance(error) {
1689
- return import_provider8.AISDKError.hasMarker(error, marker7);
1723
+ return import_provider9.AISDKError.hasMarker(error, marker8);
1690
1724
  }
1691
1725
  };
1692
- _a7 = symbol7;
1726
+ _a8 = symbol8;
1693
1727
 
1694
1728
  // core/prompt/convert-to-core-messages.ts
1695
1729
  function convertToCoreMessages(messages, options) {
1696
- var _a14;
1697
- const tools = (_a14 = options == null ? void 0 : options.tools) != null ? _a14 : {};
1730
+ var _a15;
1731
+ const tools = (_a15 = options == null ? void 0 : options.tools) != null ? _a15 : {};
1698
1732
  const coreMessages = [];
1699
1733
  for (const message of messages) {
1700
1734
  const { role, content, toolInvocations, experimental_attachments } = message;
@@ -1783,26 +1817,26 @@ function standardizePrompt({
1783
1817
  tools
1784
1818
  }) {
1785
1819
  if (prompt.prompt == null && prompt.messages == null) {
1786
- throw new import_provider9.InvalidPromptError({
1820
+ throw new import_provider10.InvalidPromptError({
1787
1821
  prompt,
1788
1822
  message: "prompt or messages must be defined"
1789
1823
  });
1790
1824
  }
1791
1825
  if (prompt.prompt != null && prompt.messages != null) {
1792
- throw new import_provider9.InvalidPromptError({
1826
+ throw new import_provider10.InvalidPromptError({
1793
1827
  prompt,
1794
1828
  message: "prompt and messages cannot be defined at the same time"
1795
1829
  });
1796
1830
  }
1797
1831
  if (prompt.system != null && typeof prompt.system !== "string") {
1798
- throw new import_provider9.InvalidPromptError({
1832
+ throw new import_provider10.InvalidPromptError({
1799
1833
  prompt,
1800
1834
  message: "system must be a string"
1801
1835
  });
1802
1836
  }
1803
1837
  if (prompt.prompt != null) {
1804
1838
  if (typeof prompt.prompt !== "string") {
1805
- throw new import_provider9.InvalidPromptError({
1839
+ throw new import_provider10.InvalidPromptError({
1806
1840
  prompt,
1807
1841
  message: "prompt must be a string"
1808
1842
  });
@@ -1821,7 +1855,7 @@ function standardizePrompt({
1821
1855
  if (prompt.messages != null) {
1822
1856
  const promptType = detectPromptType(prompt.messages);
1823
1857
  if (promptType === "other") {
1824
- throw new import_provider9.InvalidPromptError({
1858
+ throw new import_provider10.InvalidPromptError({
1825
1859
  prompt,
1826
1860
  message: "messages must be an array of CoreMessage or UIMessage"
1827
1861
  });
@@ -1834,7 +1868,7 @@ function standardizePrompt({
1834
1868
  schema: import_zod7.z.array(coreMessageSchema)
1835
1869
  });
1836
1870
  if (!validationResult.success) {
1837
- throw new import_provider9.InvalidPromptError({
1871
+ throw new import_provider10.InvalidPromptError({
1838
1872
  prompt,
1839
1873
  message: "messages must be an array of CoreMessage or UIMessage",
1840
1874
  cause: validationResult.error
@@ -1889,7 +1923,7 @@ function injectJsonInstruction({
1889
1923
  }
1890
1924
 
1891
1925
  // core/generate-object/output-strategy.ts
1892
- var import_provider10 = require("@ai-sdk/provider");
1926
+ var import_provider11 = require("@ai-sdk/provider");
1893
1927
  var import_provider_utils5 = require("@ai-sdk/provider-utils");
1894
1928
  var import_ui_utils2 = require("@ai-sdk/ui-utils");
1895
1929
 
@@ -1927,7 +1961,7 @@ var noSchemaOutputStrategy = {
1927
1961
  } : { success: true, value };
1928
1962
  },
1929
1963
  createElementStream() {
1930
- throw new import_provider10.UnsupportedFunctionalityError({
1964
+ throw new import_provider11.UnsupportedFunctionalityError({
1931
1965
  functionality: "element streams in no-schema mode"
1932
1966
  });
1933
1967
  }
@@ -1949,7 +1983,7 @@ var objectOutputStrategy = (schema) => ({
1949
1983
  return (0, import_provider_utils5.safeValidateTypes)({ value, schema });
1950
1984
  },
1951
1985
  createElementStream() {
1952
- throw new import_provider10.UnsupportedFunctionalityError({
1986
+ throw new import_provider11.UnsupportedFunctionalityError({
1953
1987
  functionality: "element streams in object mode"
1954
1988
  });
1955
1989
  }
@@ -1971,11 +2005,11 @@ var arrayOutputStrategy = (schema) => {
1971
2005
  additionalProperties: false
1972
2006
  },
1973
2007
  validatePartialResult({ value, latestObject, isFirstDelta, isFinalDelta }) {
1974
- var _a14;
1975
- if (!(0, import_provider10.isJSONObject)(value) || !(0, import_provider10.isJSONArray)(value.elements)) {
2008
+ var _a15;
2009
+ if (!(0, import_provider11.isJSONObject)(value) || !(0, import_provider11.isJSONArray)(value.elements)) {
1976
2010
  return {
1977
2011
  success: false,
1978
- error: new import_provider10.TypeValidationError({
2012
+ error: new import_provider11.TypeValidationError({
1979
2013
  value,
1980
2014
  cause: "value must be an object that contains an array of elements"
1981
2015
  })
@@ -1994,7 +2028,7 @@ var arrayOutputStrategy = (schema) => {
1994
2028
  }
1995
2029
  resultArray.push(result.value);
1996
2030
  }
1997
- const publishedElementCount = (_a14 = latestObject == null ? void 0 : latestObject.length) != null ? _a14 : 0;
2031
+ const publishedElementCount = (_a15 = latestObject == null ? void 0 : latestObject.length) != null ? _a15 : 0;
1998
2032
  let textDelta = "";
1999
2033
  if (isFirstDelta) {
2000
2034
  textDelta += "[";
@@ -2015,10 +2049,10 @@ var arrayOutputStrategy = (schema) => {
2015
2049
  };
2016
2050
  },
2017
2051
  validateFinalResult(value) {
2018
- if (!(0, import_provider10.isJSONObject)(value) || !(0, import_provider10.isJSONArray)(value.elements)) {
2052
+ if (!(0, import_provider11.isJSONObject)(value) || !(0, import_provider11.isJSONArray)(value.elements)) {
2019
2053
  return {
2020
2054
  success: false,
2021
- error: new import_provider10.TypeValidationError({
2055
+ error: new import_provider11.TypeValidationError({
2022
2056
  value,
2023
2057
  cause: "value must be an object that contains an array of elements"
2024
2058
  })
@@ -2083,10 +2117,10 @@ var enumOutputStrategy = (enumValues) => {
2083
2117
  additionalProperties: false
2084
2118
  },
2085
2119
  validateFinalResult(value) {
2086
- if (!(0, import_provider10.isJSONObject)(value) || typeof value.result !== "string") {
2120
+ if (!(0, import_provider11.isJSONObject)(value) || typeof value.result !== "string") {
2087
2121
  return {
2088
2122
  success: false,
2089
- error: new import_provider10.TypeValidationError({
2123
+ error: new import_provider11.TypeValidationError({
2090
2124
  value,
2091
2125
  cause: 'value must be an object that contains a string in the "result" property.'
2092
2126
  })
@@ -2095,19 +2129,19 @@ var enumOutputStrategy = (enumValues) => {
2095
2129
  const result = value.result;
2096
2130
  return enumValues.includes(result) ? { success: true, value: result } : {
2097
2131
  success: false,
2098
- error: new import_provider10.TypeValidationError({
2132
+ error: new import_provider11.TypeValidationError({
2099
2133
  value,
2100
2134
  cause: "value must be a string in the enum"
2101
2135
  })
2102
2136
  };
2103
2137
  },
2104
2138
  validatePartialResult() {
2105
- throw new import_provider10.UnsupportedFunctionalityError({
2139
+ throw new import_provider11.UnsupportedFunctionalityError({
2106
2140
  functionality: "partial results in enum mode"
2107
2141
  });
2108
2142
  },
2109
2143
  createElementStream() {
2110
- throw new import_provider10.UnsupportedFunctionalityError({
2144
+ throw new import_provider11.UnsupportedFunctionalityError({
2111
2145
  functionality: "element streams in enum mode"
2112
2146
  });
2113
2147
  }
@@ -2332,7 +2366,7 @@ async function generateObject({
2332
2366
  }),
2333
2367
  tracer,
2334
2368
  fn: async (span) => {
2335
- var _a14, _b;
2369
+ var _a15, _b;
2336
2370
  if (mode === "auto" || mode == null) {
2337
2371
  mode = model.defaultObjectGenerationMode;
2338
2372
  }
@@ -2394,7 +2428,7 @@ async function generateObject({
2394
2428
  }),
2395
2429
  tracer,
2396
2430
  fn: async (span2) => {
2397
- var _a15, _b2, _c, _d, _e, _f;
2431
+ var _a16, _b2, _c, _d, _e, _f;
2398
2432
  const result2 = await model.doGenerate({
2399
2433
  mode: {
2400
2434
  type: "object-json",
@@ -2410,7 +2444,7 @@ async function generateObject({
2410
2444
  headers
2411
2445
  });
2412
2446
  const responseData = {
2413
- id: (_b2 = (_a15 = result2.response) == null ? void 0 : _a15.id) != null ? _b2 : generateId3(),
2447
+ id: (_b2 = (_a16 = result2.response) == null ? void 0 : _a16.id) != null ? _b2 : generateId3(),
2414
2448
  timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
2415
2449
  modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
2416
2450
  };
@@ -2452,7 +2486,7 @@ async function generateObject({
2452
2486
  rawResponse = generateResult.rawResponse;
2453
2487
  logprobs = generateResult.logprobs;
2454
2488
  resultProviderMetadata = generateResult.providerMetadata;
2455
- request = (_a14 = generateResult.request) != null ? _a14 : {};
2489
+ request = (_a15 = generateResult.request) != null ? _a15 : {};
2456
2490
  response = generateResult.responseData;
2457
2491
  break;
2458
2492
  }
@@ -2498,7 +2532,7 @@ async function generateObject({
2498
2532
  }),
2499
2533
  tracer,
2500
2534
  fn: async (span2) => {
2501
- var _a15, _b2, _c, _d, _e, _f, _g, _h;
2535
+ var _a16, _b2, _c, _d, _e, _f, _g, _h;
2502
2536
  const result2 = await model.doGenerate({
2503
2537
  mode: {
2504
2538
  type: "object-tool",
@@ -2516,7 +2550,7 @@ async function generateObject({
2516
2550
  abortSignal,
2517
2551
  headers
2518
2552
  });
2519
- const objectText = (_b2 = (_a15 = result2.toolCalls) == null ? void 0 : _a15[0]) == null ? void 0 : _b2.args;
2553
+ const objectText = (_b2 = (_a16 = result2.toolCalls) == null ? void 0 : _a16[0]) == null ? void 0 : _b2.args;
2520
2554
  const responseData = {
2521
2555
  id: (_d = (_c = result2.response) == null ? void 0 : _c.id) != null ? _d : generateId3(),
2522
2556
  timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),
@@ -2642,9 +2676,9 @@ var DefaultGenerateObjectResult = class {
2642
2676
  this.logprobs = options.logprobs;
2643
2677
  }
2644
2678
  toJsonResponse(init) {
2645
- var _a14;
2679
+ var _a15;
2646
2680
  return new Response(JSON.stringify(this.object), {
2647
- status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
2681
+ status: (_a15 = init == null ? void 0 : init.status) != null ? _a15 : 200,
2648
2682
  headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
2649
2683
  contentType: "application/json; charset=utf-8"
2650
2684
  })
@@ -2679,17 +2713,17 @@ var DelayedPromise = class {
2679
2713
  return this.promise;
2680
2714
  }
2681
2715
  resolve(value) {
2682
- var _a14;
2716
+ var _a15;
2683
2717
  this.status = { type: "resolved", value };
2684
2718
  if (this.promise) {
2685
- (_a14 = this._resolve) == null ? void 0 : _a14.call(this, value);
2719
+ (_a15 = this._resolve) == null ? void 0 : _a15.call(this, value);
2686
2720
  }
2687
2721
  }
2688
2722
  reject(error) {
2689
- var _a14;
2723
+ var _a15;
2690
2724
  this.status = { type: "rejected", error };
2691
2725
  if (this.promise) {
2692
- (_a14 = this._reject) == null ? void 0 : _a14.call(this, error);
2726
+ (_a15 = this._reject) == null ? void 0 : _a15.call(this, error);
2693
2727
  }
2694
2728
  }
2695
2729
  };
@@ -2793,8 +2827,8 @@ function createStitchableStream() {
2793
2827
 
2794
2828
  // core/util/now.ts
2795
2829
  function now() {
2796
- var _a14, _b;
2797
- return (_b = (_a14 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a14.now()) != null ? _b : Date.now();
2830
+ var _a15, _b;
2831
+ return (_b = (_a15 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a15.now()) != null ? _b : Date.now();
2798
2832
  }
2799
2833
 
2800
2834
  // core/generate-object/stream-object.ts
@@ -3083,7 +3117,7 @@ var DefaultStreamObjectResult = class {
3083
3117
  const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
3084
3118
  new TransformStream({
3085
3119
  async transform(chunk, controller) {
3086
- var _a14, _b, _c;
3120
+ var _a15, _b, _c;
3087
3121
  if (isFirstChunk) {
3088
3122
  const msToFirstChunk = now2() - startTimestampMs;
3089
3123
  isFirstChunk = false;
@@ -3129,7 +3163,7 @@ var DefaultStreamObjectResult = class {
3129
3163
  switch (chunk.type) {
3130
3164
  case "response-metadata": {
3131
3165
  response = {
3132
- id: (_a14 = chunk.id) != null ? _a14 : response.id,
3166
+ id: (_a15 = chunk.id) != null ? _a15 : response.id,
3133
3167
  timestamp: (_b = chunk.timestamp) != null ? _b : response.timestamp,
3134
3168
  modelId: (_c = chunk.modelId) != null ? _c : response.modelId
3135
3169
  };
@@ -3343,9 +3377,9 @@ var DefaultStreamObjectResult = class {
3343
3377
  });
3344
3378
  }
3345
3379
  toTextStreamResponse(init) {
3346
- var _a14;
3380
+ var _a15;
3347
3381
  return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
3348
- status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
3382
+ status: (_a15 = init == null ? void 0 : init.status) != null ? _a15 : 200,
3349
3383
  headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
3350
3384
  contentType: "text/plain; charset=utf-8"
3351
3385
  })
@@ -3357,48 +3391,48 @@ var DefaultStreamObjectResult = class {
3357
3391
  var import_provider_utils9 = require("@ai-sdk/provider-utils");
3358
3392
 
3359
3393
  // errors/no-output-specified-error.ts
3360
- var import_provider11 = require("@ai-sdk/provider");
3361
- var name8 = "AI_NoOutputSpecifiedError";
3362
- var marker8 = `vercel.ai.error.${name8}`;
3363
- var symbol8 = Symbol.for(marker8);
3364
- var _a8;
3365
- var NoOutputSpecifiedError = class extends import_provider11.AISDKError {
3394
+ var import_provider12 = require("@ai-sdk/provider");
3395
+ var name9 = "AI_NoOutputSpecifiedError";
3396
+ var marker9 = `vercel.ai.error.${name9}`;
3397
+ var symbol9 = Symbol.for(marker9);
3398
+ var _a9;
3399
+ var NoOutputSpecifiedError = class extends import_provider12.AISDKError {
3366
3400
  // used in isInstance
3367
3401
  constructor({ message = "No output specified." } = {}) {
3368
- super({ name: name8, message });
3369
- this[_a8] = true;
3402
+ super({ name: name9, message });
3403
+ this[_a9] = true;
3370
3404
  }
3371
3405
  static isInstance(error) {
3372
- return import_provider11.AISDKError.hasMarker(error, marker8);
3406
+ return import_provider12.AISDKError.hasMarker(error, marker9);
3373
3407
  }
3374
3408
  };
3375
- _a8 = symbol8;
3409
+ _a9 = symbol9;
3376
3410
 
3377
3411
  // errors/tool-execution-error.ts
3378
- var import_provider12 = require("@ai-sdk/provider");
3379
- var name9 = "AI_ToolExecutionError";
3380
- var marker9 = `vercel.ai.error.${name9}`;
3381
- var symbol9 = Symbol.for(marker9);
3382
- var _a9;
3383
- var ToolExecutionError = class extends import_provider12.AISDKError {
3412
+ var import_provider13 = require("@ai-sdk/provider");
3413
+ var name10 = "AI_ToolExecutionError";
3414
+ var marker10 = `vercel.ai.error.${name10}`;
3415
+ var symbol10 = Symbol.for(marker10);
3416
+ var _a10;
3417
+ var ToolExecutionError = class extends import_provider13.AISDKError {
3384
3418
  constructor({
3385
3419
  toolArgs,
3386
3420
  toolName,
3387
3421
  toolCallId,
3388
3422
  cause,
3389
- message = `Error executing tool ${toolName}: ${(0, import_provider12.getErrorMessage)(cause)}`
3423
+ message = `Error executing tool ${toolName}: ${(0, import_provider13.getErrorMessage)(cause)}`
3390
3424
  }) {
3391
- super({ name: name9, message, cause });
3392
- this[_a9] = true;
3425
+ super({ name: name10, message, cause });
3426
+ this[_a10] = true;
3393
3427
  this.toolArgs = toolArgs;
3394
3428
  this.toolName = toolName;
3395
3429
  this.toolCallId = toolCallId;
3396
3430
  }
3397
3431
  static isInstance(error) {
3398
- return import_provider12.AISDKError.hasMarker(error, marker9);
3432
+ return import_provider13.AISDKError.hasMarker(error, marker10);
3399
3433
  }
3400
3434
  };
3401
- _a9 = symbol9;
3435
+ _a10 = symbol10;
3402
3436
 
3403
3437
  // core/prompt/prepare-tools-and-tool-choice.ts
3404
3438
  var import_ui_utils4 = require("@ai-sdk/ui-utils");
@@ -3421,24 +3455,24 @@ function prepareToolsAndToolChoice({
3421
3455
  };
3422
3456
  }
3423
3457
  const filteredTools = activeTools != null ? Object.entries(tools).filter(
3424
- ([name14]) => activeTools.includes(name14)
3458
+ ([name15]) => activeTools.includes(name15)
3425
3459
  ) : Object.entries(tools);
3426
3460
  return {
3427
- tools: filteredTools.map(([name14, tool2]) => {
3461
+ tools: filteredTools.map(([name15, tool2]) => {
3428
3462
  const toolType = tool2.type;
3429
3463
  switch (toolType) {
3430
3464
  case void 0:
3431
3465
  case "function":
3432
3466
  return {
3433
3467
  type: "function",
3434
- name: name14,
3468
+ name: name15,
3435
3469
  description: tool2.description,
3436
3470
  parameters: (0, import_ui_utils4.asSchema)(tool2.parameters).jsonSchema
3437
3471
  };
3438
3472
  case "provider-defined":
3439
3473
  return {
3440
3474
  type: "provider-defined",
3441
- name: name14,
3475
+ name: name15,
3442
3476
  id: tool2.id,
3443
3477
  args: tool2.args
3444
3478
  };
@@ -3470,75 +3504,75 @@ var import_provider_utils8 = require("@ai-sdk/provider-utils");
3470
3504
  var import_ui_utils5 = require("@ai-sdk/ui-utils");
3471
3505
 
3472
3506
  // errors/invalid-tool-arguments-error.ts
3473
- var import_provider13 = require("@ai-sdk/provider");
3474
- var name10 = "AI_InvalidToolArgumentsError";
3475
- var marker10 = `vercel.ai.error.${name10}`;
3476
- var symbol10 = Symbol.for(marker10);
3477
- var _a10;
3478
- var InvalidToolArgumentsError = class extends import_provider13.AISDKError {
3507
+ var import_provider14 = require("@ai-sdk/provider");
3508
+ var name11 = "AI_InvalidToolArgumentsError";
3509
+ var marker11 = `vercel.ai.error.${name11}`;
3510
+ var symbol11 = Symbol.for(marker11);
3511
+ var _a11;
3512
+ var InvalidToolArgumentsError = class extends import_provider14.AISDKError {
3479
3513
  constructor({
3480
3514
  toolArgs,
3481
3515
  toolName,
3482
3516
  cause,
3483
- message = `Invalid arguments for tool ${toolName}: ${(0, import_provider13.getErrorMessage)(
3517
+ message = `Invalid arguments for tool ${toolName}: ${(0, import_provider14.getErrorMessage)(
3484
3518
  cause
3485
3519
  )}`
3486
3520
  }) {
3487
- super({ name: name10, message, cause });
3488
- this[_a10] = true;
3521
+ super({ name: name11, message, cause });
3522
+ this[_a11] = true;
3489
3523
  this.toolArgs = toolArgs;
3490
3524
  this.toolName = toolName;
3491
3525
  }
3492
3526
  static isInstance(error) {
3493
- return import_provider13.AISDKError.hasMarker(error, marker10);
3527
+ return import_provider14.AISDKError.hasMarker(error, marker11);
3494
3528
  }
3495
3529
  };
3496
- _a10 = symbol10;
3530
+ _a11 = symbol11;
3497
3531
 
3498
3532
  // errors/no-such-tool-error.ts
3499
- var import_provider14 = require("@ai-sdk/provider");
3500
- var name11 = "AI_NoSuchToolError";
3501
- var marker11 = `vercel.ai.error.${name11}`;
3502
- var symbol11 = Symbol.for(marker11);
3503
- var _a11;
3504
- var NoSuchToolError = class extends import_provider14.AISDKError {
3533
+ var import_provider15 = require("@ai-sdk/provider");
3534
+ var name12 = "AI_NoSuchToolError";
3535
+ var marker12 = `vercel.ai.error.${name12}`;
3536
+ var symbol12 = Symbol.for(marker12);
3537
+ var _a12;
3538
+ var NoSuchToolError = class extends import_provider15.AISDKError {
3505
3539
  constructor({
3506
3540
  toolName,
3507
3541
  availableTools = void 0,
3508
3542
  message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
3509
3543
  }) {
3510
- super({ name: name11, message });
3511
- this[_a11] = true;
3544
+ super({ name: name12, message });
3545
+ this[_a12] = true;
3512
3546
  this.toolName = toolName;
3513
3547
  this.availableTools = availableTools;
3514
3548
  }
3515
3549
  static isInstance(error) {
3516
- return import_provider14.AISDKError.hasMarker(error, marker11);
3550
+ return import_provider15.AISDKError.hasMarker(error, marker12);
3517
3551
  }
3518
3552
  };
3519
- _a11 = symbol11;
3553
+ _a12 = symbol12;
3520
3554
 
3521
3555
  // errors/tool-call-repair-error.ts
3522
- var import_provider15 = require("@ai-sdk/provider");
3523
- var name12 = "AI_ToolCallRepairError";
3524
- var marker12 = `vercel.ai.error.${name12}`;
3525
- var symbol12 = Symbol.for(marker12);
3526
- var _a12;
3527
- var ToolCallRepairError = class extends import_provider15.AISDKError {
3556
+ var import_provider16 = require("@ai-sdk/provider");
3557
+ var name13 = "AI_ToolCallRepairError";
3558
+ var marker13 = `vercel.ai.error.${name13}`;
3559
+ var symbol13 = Symbol.for(marker13);
3560
+ var _a13;
3561
+ var ToolCallRepairError = class extends import_provider16.AISDKError {
3528
3562
  constructor({
3529
3563
  cause,
3530
3564
  originalError,
3531
- message = `Error repairing tool call: ${(0, import_provider15.getErrorMessage)(cause)}`
3565
+ message = `Error repairing tool call: ${(0, import_provider16.getErrorMessage)(cause)}`
3532
3566
  }) {
3533
- super({ name: name12, message, cause });
3534
- this[_a12] = true;
3567
+ super({ name: name13, message, cause });
3568
+ this[_a13] = true;
3535
3569
  this.originalError = originalError;
3536
3570
  }
3537
3571
  static isInstance(error) {
3538
- return import_provider15.AISDKError.hasMarker(error, marker12);
3572
+ return import_provider16.AISDKError.hasMarker(error, marker13);
3539
3573
  }
3540
3574
  };
3541
- _a12 = symbol12;
3575
+ _a13 = symbol13;
3542
3576
 
3543
3577
  // core/generate-text/parse-tool-call.ts
3544
3578
  async function parseToolCall({
@@ -3683,7 +3717,7 @@ async function generateText({
3683
3717
  onStepFinish,
3684
3718
  ...settings
3685
3719
  }) {
3686
- var _a14;
3720
+ var _a15;
3687
3721
  if (maxSteps < 1) {
3688
3722
  throw new InvalidArgumentError({
3689
3723
  parameter: "maxSteps",
@@ -3700,7 +3734,7 @@ async function generateText({
3700
3734
  });
3701
3735
  const initialPrompt = standardizePrompt({
3702
3736
  prompt: {
3703
- system: (_a14 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a14 : system,
3737
+ system: (_a15 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a15 : system,
3704
3738
  prompt,
3705
3739
  messages
3706
3740
  },
@@ -3726,7 +3760,7 @@ async function generateText({
3726
3760
  }),
3727
3761
  tracer,
3728
3762
  fn: async (span) => {
3729
- var _a15, _b, _c, _d, _e, _f;
3763
+ var _a16, _b, _c, _d, _e, _f;
3730
3764
  const mode = {
3731
3765
  type: "regular",
3732
3766
  ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
@@ -3778,8 +3812,8 @@ async function generateText({
3778
3812
  "ai.prompt.tools": {
3779
3813
  // convert the language model level tools:
3780
3814
  input: () => {
3781
- var _a16;
3782
- return (_a16 = mode.tools) == null ? void 0 : _a16.map((tool2) => JSON.stringify(tool2));
3815
+ var _a17;
3816
+ return (_a17 = mode.tools) == null ? void 0 : _a17.map((tool2) => JSON.stringify(tool2));
3783
3817
  }
3784
3818
  },
3785
3819
  "ai.prompt.toolChoice": {
@@ -3799,7 +3833,7 @@ async function generateText({
3799
3833
  }),
3800
3834
  tracer,
3801
3835
  fn: async (span2) => {
3802
- var _a16, _b2, _c2, _d2, _e2, _f2;
3836
+ var _a17, _b2, _c2, _d2, _e2, _f2;
3803
3837
  const result = await model.doGenerate({
3804
3838
  mode,
3805
3839
  ...callSettings,
@@ -3811,7 +3845,7 @@ async function generateText({
3811
3845
  headers
3812
3846
  });
3813
3847
  const responseData = {
3814
- id: (_b2 = (_a16 = result.response) == null ? void 0 : _a16.id) != null ? _b2 : generateId3(),
3848
+ id: (_b2 = (_a17 = result.response) == null ? void 0 : _a17.id) != null ? _b2 : generateId3(),
3815
3849
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
3816
3850
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
3817
3851
  };
@@ -3845,7 +3879,7 @@ async function generateText({
3845
3879
  })
3846
3880
  );
3847
3881
  currentToolCalls = await Promise.all(
3848
- ((_a15 = currentModelResponse.toolCalls) != null ? _a15 : []).map(
3882
+ ((_a16 = currentModelResponse.toolCalls) != null ? _a16 : []).map(
3849
3883
  (toolCall) => parseToolCall({
3850
3884
  toolCall,
3851
3885
  tools,
@@ -4083,7 +4117,7 @@ var import_provider_utils10 = require("@ai-sdk/provider-utils");
4083
4117
  var import_ui_utils6 = require("@ai-sdk/ui-utils");
4084
4118
 
4085
4119
  // errors/index.ts
4086
- var import_provider16 = require("@ai-sdk/provider");
4120
+ var import_provider17 = require("@ai-sdk/provider");
4087
4121
 
4088
4122
  // core/generate-text/output.ts
4089
4123
  var text = () => ({
@@ -4163,7 +4197,7 @@ var object = ({
4163
4197
  };
4164
4198
 
4165
4199
  // core/generate-text/smooth-stream.ts
4166
- var import_provider17 = require("@ai-sdk/provider");
4200
+ var import_provider18 = require("@ai-sdk/provider");
4167
4201
  var CHUNKING_REGEXPS = {
4168
4202
  word: /\s*\S+\s+/m,
4169
4203
  line: /[^\n]*\n/m
@@ -4175,7 +4209,7 @@ function smoothStream({
4175
4209
  } = {}) {
4176
4210
  const chunkingRegexp = typeof chunking === "string" ? CHUNKING_REGEXPS[chunking] : chunking;
4177
4211
  if (chunkingRegexp == null) {
4178
- throw new import_provider17.InvalidArgumentError({
4212
+ throw new import_provider18.InvalidArgumentError({
4179
4213
  argument: "chunking",
4180
4214
  message: `Chunking must be "word" or "line" or a RegExp. Received: ${chunking}`
4181
4215
  });
@@ -4522,7 +4556,8 @@ function streamText({
4522
4556
  experimental_continueSteps: continueSteps = false,
4523
4557
  experimental_telemetry: telemetry,
4524
4558
  experimental_providerMetadata: providerMetadata,
4525
- experimental_toolCallStreaming: toolCallStreaming = false,
4559
+ experimental_toolCallStreaming = false,
4560
+ toolCallStreaming = experimental_toolCallStreaming,
4526
4561
  experimental_activeTools: activeTools,
4527
4562
  experimental_repairToolCall: repairToolCall,
4528
4563
  experimental_transform: transform,
@@ -4656,7 +4691,7 @@ var DefaultStreamTextResult = class {
4656
4691
  this.requestPromise = new DelayedPromise();
4657
4692
  this.responsePromise = new DelayedPromise();
4658
4693
  this.stepsPromise = new DelayedPromise();
4659
- var _a14;
4694
+ var _a15;
4660
4695
  if (maxSteps < 1) {
4661
4696
  throw new InvalidArgumentError({
4662
4697
  parameter: "maxSteps",
@@ -4767,7 +4802,7 @@ var DefaultStreamTextResult = class {
4767
4802
  }
4768
4803
  },
4769
4804
  async flush(controller) {
4770
- var _a15;
4805
+ var _a16;
4771
4806
  try {
4772
4807
  if (recordedSteps.length === 0) {
4773
4808
  return;
@@ -4800,7 +4835,7 @@ var DefaultStreamTextResult = class {
4800
4835
  reasoning: recordedReasoningText,
4801
4836
  toolCalls: lastStep.toolCalls,
4802
4837
  toolResults: lastStep.toolResults,
4803
- request: (_a15 = lastStep.request) != null ? _a15 : {},
4838
+ request: (_a16 = lastStep.request) != null ? _a16 : {},
4804
4839
  response: lastStep.response,
4805
4840
  warnings: lastStep.warnings,
4806
4841
  experimental_providerMetadata: lastStep.experimental_providerMetadata,
@@ -4814,8 +4849,8 @@ var DefaultStreamTextResult = class {
4814
4849
  "ai.response.text": { output: () => recordedFullText },
4815
4850
  "ai.response.toolCalls": {
4816
4851
  output: () => {
4817
- var _a16;
4818
- return ((_a16 = lastStep.toolCalls) == null ? void 0 : _a16.length) ? JSON.stringify(lastStep.toolCalls) : void 0;
4852
+ var _a17;
4853
+ return ((_a17 = lastStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(lastStep.toolCalls) : void 0;
4819
4854
  }
4820
4855
  },
4821
4856
  "ai.usage.promptTokens": usage.promptTokens,
@@ -4857,7 +4892,7 @@ var DefaultStreamTextResult = class {
4857
4892
  });
4858
4893
  const initialPrompt = standardizePrompt({
4859
4894
  prompt: {
4860
- system: (_a14 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a14 : system,
4895
+ system: (_a15 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a15 : system,
4861
4896
  prompt,
4862
4897
  messages
4863
4898
  },
@@ -4933,8 +4968,8 @@ var DefaultStreamTextResult = class {
4933
4968
  "ai.prompt.tools": {
4934
4969
  // convert the language model level tools:
4935
4970
  input: () => {
4936
- var _a15;
4937
- return (_a15 = mode.tools) == null ? void 0 : _a15.map((tool2) => JSON.stringify(tool2));
4971
+ var _a16;
4972
+ return (_a16 = mode.tools) == null ? void 0 : _a16.map((tool2) => JSON.stringify(tool2));
4938
4973
  }
4939
4974
  },
4940
4975
  "ai.prompt.toolChoice": {
@@ -5020,7 +5055,7 @@ var DefaultStreamTextResult = class {
5020
5055
  transformedStream.pipeThrough(
5021
5056
  new TransformStream({
5022
5057
  async transform(chunk, controller) {
5023
- var _a15, _b, _c;
5058
+ var _a16, _b, _c;
5024
5059
  if (stepFirstChunk) {
5025
5060
  const msToFirstChunk = now2() - startTimestampMs;
5026
5061
  stepFirstChunk = false;
@@ -5083,7 +5118,7 @@ var DefaultStreamTextResult = class {
5083
5118
  }
5084
5119
  case "response-metadata": {
5085
5120
  stepResponse = {
5086
- id: (_a15 = chunk.id) != null ? _a15 : stepResponse.id,
5121
+ id: (_a16 = chunk.id) != null ? _a16 : stepResponse.id,
5087
5122
  timestamp: (_b = chunk.timestamp) != null ? _b : stepResponse.timestamp,
5088
5123
  modelId: (_c = chunk.modelId) != null ? _c : stepResponse.modelId
5089
5124
  };
@@ -5536,9 +5571,9 @@ var DefaultStreamTextResult = class {
5536
5571
  );
5537
5572
  }
5538
5573
  toTextStreamResponse(init) {
5539
- var _a14;
5574
+ var _a15;
5540
5575
  return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
5541
- status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
5576
+ status: (_a15 = init == null ? void 0 : init.status) != null ? _a15 : 200,
5542
5577
  headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
5543
5578
  contentType: "text/plain; charset=utf-8"
5544
5579
  })
@@ -5585,7 +5620,7 @@ function appendResponseMessages({
5585
5620
  messages,
5586
5621
  responseMessages
5587
5622
  }) {
5588
- var _a14;
5623
+ var _a15;
5589
5624
  const clonedMessages = structuredClone(messages);
5590
5625
  for (const message of responseMessages) {
5591
5626
  const role = message.role;
@@ -5608,7 +5643,7 @@ function appendResponseMessages({
5608
5643
  }
5609
5644
  case "tool": {
5610
5645
  const previousMessage = clonedMessages[clonedMessages.length - 1];
5611
- (_a14 = previousMessage.toolInvocations) != null ? _a14 : previousMessage.toolInvocations = [];
5646
+ (_a15 = previousMessage.toolInvocations) != null ? _a15 : previousMessage.toolInvocations = [];
5612
5647
  if (previousMessage.role !== "assistant") {
5613
5648
  throw new Error(
5614
5649
  `Tool result must follow an assistant message: ${previousMessage.role}`
@@ -5637,7 +5672,7 @@ function appendResponseMessages({
5637
5672
  }
5638
5673
 
5639
5674
  // core/registry/custom-provider.ts
5640
- var import_provider18 = require("@ai-sdk/provider");
5675
+ var import_provider19 = require("@ai-sdk/provider");
5641
5676
  function experimental_customProvider({
5642
5677
  languageModels,
5643
5678
  textEmbeddingModels,
@@ -5651,7 +5686,7 @@ function experimental_customProvider({
5651
5686
  if (fallbackProvider) {
5652
5687
  return fallbackProvider.languageModel(modelId);
5653
5688
  }
5654
- throw new import_provider18.NoSuchModelError({ modelId, modelType: "languageModel" });
5689
+ throw new import_provider19.NoSuchModelError({ modelId, modelType: "languageModel" });
5655
5690
  },
5656
5691
  textEmbeddingModel(modelId) {
5657
5692
  if (textEmbeddingModels != null && modelId in textEmbeddingModels) {
@@ -5660,18 +5695,18 @@ function experimental_customProvider({
5660
5695
  if (fallbackProvider) {
5661
5696
  return fallbackProvider.textEmbeddingModel(modelId);
5662
5697
  }
5663
- throw new import_provider18.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
5698
+ throw new import_provider19.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
5664
5699
  }
5665
5700
  };
5666
5701
  }
5667
5702
 
5668
5703
  // core/registry/no-such-provider-error.ts
5669
- var import_provider19 = require("@ai-sdk/provider");
5670
- var name13 = "AI_NoSuchProviderError";
5671
- var marker13 = `vercel.ai.error.${name13}`;
5672
- var symbol13 = Symbol.for(marker13);
5673
- var _a13;
5674
- var NoSuchProviderError = class extends import_provider19.NoSuchModelError {
5704
+ var import_provider20 = require("@ai-sdk/provider");
5705
+ var name14 = "AI_NoSuchProviderError";
5706
+ var marker14 = `vercel.ai.error.${name14}`;
5707
+ var symbol14 = Symbol.for(marker14);
5708
+ var _a14;
5709
+ var NoSuchProviderError = class extends import_provider20.NoSuchModelError {
5675
5710
  constructor({
5676
5711
  modelId,
5677
5712
  modelType,
@@ -5679,19 +5714,19 @@ var NoSuchProviderError = class extends import_provider19.NoSuchModelError {
5679
5714
  availableProviders,
5680
5715
  message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
5681
5716
  }) {
5682
- super({ errorName: name13, modelId, modelType, message });
5683
- this[_a13] = true;
5717
+ super({ errorName: name14, modelId, modelType, message });
5718
+ this[_a14] = true;
5684
5719
  this.providerId = providerId;
5685
5720
  this.availableProviders = availableProviders;
5686
5721
  }
5687
5722
  static isInstance(error) {
5688
- return import_provider19.AISDKError.hasMarker(error, marker13);
5723
+ return import_provider20.AISDKError.hasMarker(error, marker14);
5689
5724
  }
5690
5725
  };
5691
- _a13 = symbol13;
5726
+ _a14 = symbol14;
5692
5727
 
5693
5728
  // core/registry/provider-registry.ts
5694
- var import_provider20 = require("@ai-sdk/provider");
5729
+ var import_provider21 = require("@ai-sdk/provider");
5695
5730
  function experimental_createProviderRegistry(providers) {
5696
5731
  const registry = new DefaultProviderRegistry();
5697
5732
  for (const [id, provider] of Object.entries(providers)) {
@@ -5721,7 +5756,7 @@ var DefaultProviderRegistry = class {
5721
5756
  splitId(id, modelType) {
5722
5757
  const index = id.indexOf(":");
5723
5758
  if (index === -1) {
5724
- throw new import_provider20.NoSuchModelError({
5759
+ throw new import_provider21.NoSuchModelError({
5725
5760
  modelId: id,
5726
5761
  modelType,
5727
5762
  message: `Invalid ${modelType} id for registry: ${id} (must be in the format "providerId:modelId")`
@@ -5730,21 +5765,21 @@ var DefaultProviderRegistry = class {
5730
5765
  return [id.slice(0, index), id.slice(index + 1)];
5731
5766
  }
5732
5767
  languageModel(id) {
5733
- var _a14, _b;
5768
+ var _a15, _b;
5734
5769
  const [providerId, modelId] = this.splitId(id, "languageModel");
5735
- const model = (_b = (_a14 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a14, modelId);
5770
+ const model = (_b = (_a15 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a15, modelId);
5736
5771
  if (model == null) {
5737
- throw new import_provider20.NoSuchModelError({ modelId: id, modelType: "languageModel" });
5772
+ throw new import_provider21.NoSuchModelError({ modelId: id, modelType: "languageModel" });
5738
5773
  }
5739
5774
  return model;
5740
5775
  }
5741
5776
  textEmbeddingModel(id) {
5742
- var _a14;
5777
+ var _a15;
5743
5778
  const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
5744
5779
  const provider = this.getProvider(providerId);
5745
- const model = (_a14 = provider.textEmbeddingModel) == null ? void 0 : _a14.call(provider, modelId);
5780
+ const model = (_a15 = provider.textEmbeddingModel) == null ? void 0 : _a15.call(provider, modelId);
5746
5781
  if (model == null) {
5747
- throw new import_provider20.NoSuchModelError({
5782
+ throw new import_provider21.NoSuchModelError({
5748
5783
  modelId: id,
5749
5784
  modelType: "textEmbeddingModel"
5750
5785
  });
@@ -5805,8 +5840,8 @@ function simulateReadableStream({
5805
5840
  chunkDelayInMs = 0,
5806
5841
  _internal
5807
5842
  }) {
5808
- var _a14;
5809
- const delay2 = (_a14 = _internal == null ? void 0 : _internal.delay) != null ? _a14 : delay;
5843
+ var _a15;
5844
+ const delay2 = (_a15 = _internal == null ? void 0 : _internal.delay) != null ? _a15 : delay;
5810
5845
  let index = 0;
5811
5846
  return new ReadableStream({
5812
5847
  async pull(controller) {
@@ -5825,7 +5860,7 @@ var import_ui_utils10 = require("@ai-sdk/ui-utils");
5825
5860
  function AssistantResponse({ threadId, messageId }, process2) {
5826
5861
  const stream = new ReadableStream({
5827
5862
  async start(controller) {
5828
- var _a14;
5863
+ var _a15;
5829
5864
  const textEncoder = new TextEncoder();
5830
5865
  const sendMessage = (message) => {
5831
5866
  controller.enqueue(
@@ -5847,7 +5882,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
5847
5882
  );
5848
5883
  };
5849
5884
  const forwardStream = async (stream2) => {
5850
- var _a15, _b;
5885
+ var _a16, _b;
5851
5886
  let result = void 0;
5852
5887
  for await (const value of stream2) {
5853
5888
  switch (value.event) {
@@ -5864,7 +5899,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
5864
5899
  break;
5865
5900
  }
5866
5901
  case "thread.message.delta": {
5867
- const content = (_a15 = value.data.delta.content) == null ? void 0 : _a15[0];
5902
+ const content = (_a16 = value.data.delta.content) == null ? void 0 : _a16[0];
5868
5903
  if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
5869
5904
  controller.enqueue(
5870
5905
  textEncoder.encode(
@@ -5898,7 +5933,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
5898
5933
  forwardStream
5899
5934
  });
5900
5935
  } catch (error) {
5901
- sendError((_a14 = error.message) != null ? _a14 : `${error}`);
5936
+ sendError((_a15 = error.message) != null ? _a15 : `${error}`);
5902
5937
  } finally {
5903
5938
  controller.close();
5904
5939
  }
@@ -5959,7 +5994,7 @@ function toDataStreamInternal(stream, callbacks) {
5959
5994
  return stream.pipeThrough(
5960
5995
  new TransformStream({
5961
5996
  transform: async (value, controller) => {
5962
- var _a14;
5997
+ var _a15;
5963
5998
  if (typeof value === "string") {
5964
5999
  controller.enqueue(value);
5965
6000
  return;
@@ -5967,7 +6002,7 @@ function toDataStreamInternal(stream, callbacks) {
5967
6002
  if ("event" in value) {
5968
6003
  if (value.event === "on_chat_model_stream") {
5969
6004
  forwardAIMessageChunk(
5970
- (_a14 = value.data) == null ? void 0 : _a14.chunk,
6005
+ (_a15 = value.data) == null ? void 0 : _a15.chunk,
5971
6006
  controller
5972
6007
  );
5973
6008
  }
@@ -5990,7 +6025,7 @@ function toDataStream(stream, callbacks) {
5990
6025
  );
5991
6026
  }
5992
6027
  function toDataStreamResponse(stream, options) {
5993
- var _a14;
6028
+ var _a15;
5994
6029
  const dataStream = toDataStreamInternal(
5995
6030
  stream,
5996
6031
  options == null ? void 0 : options.callbacks
@@ -5999,7 +6034,7 @@ function toDataStreamResponse(stream, options) {
5999
6034
  const init = options == null ? void 0 : options.init;
6000
6035
  const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
6001
6036
  return new Response(responseStream, {
6002
- status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
6037
+ status: (_a15 = init == null ? void 0 : init.status) != null ? _a15 : 200,
6003
6038
  statusText: init == null ? void 0 : init.statusText,
6004
6039
  headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
6005
6040
  contentType: "text/plain; charset=utf-8",
@@ -6054,14 +6089,14 @@ function toDataStream2(stream, callbacks) {
6054
6089
  );
6055
6090
  }
6056
6091
  function toDataStreamResponse2(stream, options = {}) {
6057
- var _a14;
6092
+ var _a15;
6058
6093
  const { init, data, callbacks } = options;
6059
6094
  const dataStream = toDataStreamInternal2(stream, callbacks).pipeThrough(
6060
6095
  new TextEncoderStream()
6061
6096
  );
6062
6097
  const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
6063
6098
  return new Response(responseStream, {
6064
- status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
6099
+ status: (_a15 = init == null ? void 0 : init.status) != null ? _a15 : 200,
6065
6100
  statusText: init == null ? void 0 : init.statusText,
6066
6101
  headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
6067
6102
  contentType: "text/plain; charset=utf-8",
@@ -6171,6 +6206,7 @@ var StreamData = class {
6171
6206
  LoadAPIKeyError,
6172
6207
  MessageConversionError,
6173
6208
  NoContentGeneratedError,
6209
+ NoImageGeneratedError,
6174
6210
  NoObjectGeneratedError,
6175
6211
  NoOutputSpecifiedError,
6176
6212
  NoSuchModelError,