ai 6.0.42 → 6.0.43

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  var __defProp = Object.defineProperty;
2
2
  var __export = (target, all) => {
3
- for (var name17 in all)
4
- __defProp(target, name17, { get: all[name17], enumerable: true });
3
+ for (var name18 in all)
4
+ __defProp(target, name18, { get: all[name18], enumerable: true });
5
5
  };
6
6
 
7
7
  // src/index.ts
@@ -26,7 +26,7 @@ import {
26
26
 
27
27
  // src/error/index.ts
28
28
  import {
29
- AISDKError as AISDKError18,
29
+ AISDKError as AISDKError19,
30
30
  APICallError,
31
31
  EmptyResponseBodyError,
32
32
  InvalidPromptError,
@@ -158,21 +158,22 @@ var ToolCallNotFoundForApprovalError = class extends AISDKError5 {
158
158
  };
159
159
  _a5 = symbol5;
160
160
 
161
- // src/error/no-image-generated-error.ts
161
+ // src/error/missing-tool-result-error.ts
162
162
  import { AISDKError as AISDKError6 } from "@ai-sdk/provider";
163
- var name6 = "AI_NoImageGeneratedError";
163
+ var name6 = "AI_MissingToolResultsError";
164
164
  var marker6 = `vercel.ai.error.${name6}`;
165
165
  var symbol6 = Symbol.for(marker6);
166
166
  var _a6;
167
- var NoImageGeneratedError = class extends AISDKError6 {
168
- constructor({
169
- message = "No image generated.",
170
- cause,
171
- responses
172
- }) {
173
- super({ name: name6, message, cause });
167
+ var MissingToolResultsError = class extends AISDKError6 {
168
+ constructor({ toolCallIds }) {
169
+ super({
170
+ name: name6,
171
+ message: `Tool result${toolCallIds.length > 1 ? "s are" : " is"} missing for tool call${toolCallIds.length > 1 ? "s" : ""} ${toolCallIds.join(
172
+ ", "
173
+ )}.`
174
+ });
174
175
  this[_a6] = true;
175
- this.responses = responses;
176
+ this.toolCallIds = toolCallIds;
176
177
  }
177
178
  static isInstance(error) {
178
179
  return AISDKError6.hasMarker(error, marker6);
@@ -180,13 +181,35 @@ var NoImageGeneratedError = class extends AISDKError6 {
180
181
  };
181
182
  _a6 = symbol6;
182
183
 
183
- // src/error/no-object-generated-error.ts
184
+ // src/error/no-image-generated-error.ts
184
185
  import { AISDKError as AISDKError7 } from "@ai-sdk/provider";
185
- var name7 = "AI_NoObjectGeneratedError";
186
+ var name7 = "AI_NoImageGeneratedError";
186
187
  var marker7 = `vercel.ai.error.${name7}`;
187
188
  var symbol7 = Symbol.for(marker7);
188
189
  var _a7;
189
- var NoObjectGeneratedError = class extends AISDKError7 {
190
+ var NoImageGeneratedError = class extends AISDKError7 {
191
+ constructor({
192
+ message = "No image generated.",
193
+ cause,
194
+ responses
195
+ }) {
196
+ super({ name: name7, message, cause });
197
+ this[_a7] = true;
198
+ this.responses = responses;
199
+ }
200
+ static isInstance(error) {
201
+ return AISDKError7.hasMarker(error, marker7);
202
+ }
203
+ };
204
+ _a7 = symbol7;
205
+
206
+ // src/error/no-object-generated-error.ts
207
+ import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
208
+ var name8 = "AI_NoObjectGeneratedError";
209
+ var marker8 = `vercel.ai.error.${name8}`;
210
+ var symbol8 = Symbol.for(marker8);
211
+ var _a8;
212
+ var NoObjectGeneratedError = class extends AISDKError8 {
190
213
  constructor({
191
214
  message = "No object generated.",
192
215
  cause,
@@ -195,43 +218,43 @@ var NoObjectGeneratedError = class extends AISDKError7 {
195
218
  usage,
196
219
  finishReason
197
220
  }) {
198
- super({ name: name7, message, cause });
199
- this[_a7] = true;
221
+ super({ name: name8, message, cause });
222
+ this[_a8] = true;
200
223
  this.text = text2;
201
224
  this.response = response;
202
225
  this.usage = usage;
203
226
  this.finishReason = finishReason;
204
227
  }
205
228
  static isInstance(error) {
206
- return AISDKError7.hasMarker(error, marker7);
229
+ return AISDKError8.hasMarker(error, marker8);
207
230
  }
208
231
  };
209
- _a7 = symbol7;
232
+ _a8 = symbol8;
210
233
 
211
234
  // src/error/no-output-generated-error.ts
212
- import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
213
- var name8 = "AI_NoOutputGeneratedError";
214
- var marker8 = `vercel.ai.error.${name8}`;
215
- var symbol8 = Symbol.for(marker8);
216
- var _a8;
217
- var NoOutputGeneratedError = class extends AISDKError8 {
235
+ import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
236
+ var name9 = "AI_NoOutputGeneratedError";
237
+ var marker9 = `vercel.ai.error.${name9}`;
238
+ var symbol9 = Symbol.for(marker9);
239
+ var _a9;
240
+ var NoOutputGeneratedError = class extends AISDKError9 {
218
241
  // used in isInstance
219
242
  constructor({
220
243
  message = "No output generated.",
221
244
  cause
222
245
  } = {}) {
223
- super({ name: name8, message, cause });
224
- this[_a8] = true;
246
+ super({ name: name9, message, cause });
247
+ this[_a9] = true;
225
248
  }
226
249
  static isInstance(error) {
227
- return AISDKError8.hasMarker(error, marker8);
250
+ return AISDKError9.hasMarker(error, marker9);
228
251
  }
229
252
  };
230
- _a8 = symbol8;
253
+ _a9 = symbol9;
231
254
 
232
255
  // src/error/no-speech-generated-error.ts
233
- import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
234
- var NoSpeechGeneratedError = class extends AISDKError9 {
256
+ import { AISDKError as AISDKError10 } from "@ai-sdk/provider";
257
+ var NoSpeechGeneratedError = class extends AISDKError10 {
235
258
  constructor(options) {
236
259
  super({
237
260
  name: "AI_NoSpeechGeneratedError",
@@ -242,53 +265,53 @@ var NoSpeechGeneratedError = class extends AISDKError9 {
242
265
  };
243
266
 
244
267
  // src/error/no-such-tool-error.ts
245
- import { AISDKError as AISDKError10 } from "@ai-sdk/provider";
246
- var name9 = "AI_NoSuchToolError";
247
- var marker9 = `vercel.ai.error.${name9}`;
248
- var symbol9 = Symbol.for(marker9);
249
- var _a9;
250
- var NoSuchToolError = class extends AISDKError10 {
268
+ import { AISDKError as AISDKError11 } from "@ai-sdk/provider";
269
+ var name10 = "AI_NoSuchToolError";
270
+ var marker10 = `vercel.ai.error.${name10}`;
271
+ var symbol10 = Symbol.for(marker10);
272
+ var _a10;
273
+ var NoSuchToolError = class extends AISDKError11 {
251
274
  constructor({
252
275
  toolName,
253
276
  availableTools = void 0,
254
277
  message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
255
278
  }) {
256
- super({ name: name9, message });
257
- this[_a9] = true;
279
+ super({ name: name10, message });
280
+ this[_a10] = true;
258
281
  this.toolName = toolName;
259
282
  this.availableTools = availableTools;
260
283
  }
261
284
  static isInstance(error) {
262
- return AISDKError10.hasMarker(error, marker9);
285
+ return AISDKError11.hasMarker(error, marker10);
263
286
  }
264
287
  };
265
- _a9 = symbol9;
288
+ _a10 = symbol10;
266
289
 
267
290
  // src/error/tool-call-repair-error.ts
268
- import { AISDKError as AISDKError11, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
269
- var name10 = "AI_ToolCallRepairError";
270
- var marker10 = `vercel.ai.error.${name10}`;
271
- var symbol10 = Symbol.for(marker10);
272
- var _a10;
273
- var ToolCallRepairError = class extends AISDKError11 {
291
+ import { AISDKError as AISDKError12, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
292
+ var name11 = "AI_ToolCallRepairError";
293
+ var marker11 = `vercel.ai.error.${name11}`;
294
+ var symbol11 = Symbol.for(marker11);
295
+ var _a11;
296
+ var ToolCallRepairError = class extends AISDKError12 {
274
297
  constructor({
275
298
  cause,
276
299
  originalError,
277
300
  message = `Error repairing tool call: ${getErrorMessage2(cause)}`
278
301
  }) {
279
- super({ name: name10, message, cause });
280
- this[_a10] = true;
302
+ super({ name: name11, message, cause });
303
+ this[_a11] = true;
281
304
  this.originalError = originalError;
282
305
  }
283
306
  static isInstance(error) {
284
- return AISDKError11.hasMarker(error, marker10);
307
+ return AISDKError12.hasMarker(error, marker11);
285
308
  }
286
309
  };
287
- _a10 = symbol10;
310
+ _a11 = symbol11;
288
311
 
289
312
  // src/error/unsupported-model-version-error.ts
290
- import { AISDKError as AISDKError12 } from "@ai-sdk/provider";
291
- var UnsupportedModelVersionError = class extends AISDKError12 {
313
+ import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
314
+ var UnsupportedModelVersionError = class extends AISDKError13 {
292
315
  constructor(options) {
293
316
  super({
294
317
  name: "AI_UnsupportedModelVersionError",
@@ -301,118 +324,118 @@ var UnsupportedModelVersionError = class extends AISDKError12 {
301
324
  };
302
325
 
303
326
  // src/error/ui-message-stream-error.ts
304
- import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
305
- var name11 = "AI_UIMessageStreamError";
306
- var marker11 = `vercel.ai.error.${name11}`;
307
- var symbol11 = Symbol.for(marker11);
308
- var _a11;
309
- var UIMessageStreamError = class extends AISDKError13 {
327
+ import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
328
+ var name12 = "AI_UIMessageStreamError";
329
+ var marker12 = `vercel.ai.error.${name12}`;
330
+ var symbol12 = Symbol.for(marker12);
331
+ var _a12;
332
+ var UIMessageStreamError = class extends AISDKError14 {
310
333
  constructor({
311
334
  chunkType,
312
335
  chunkId,
313
336
  message
314
337
  }) {
315
- super({ name: name11, message });
316
- this[_a11] = true;
338
+ super({ name: name12, message });
339
+ this[_a12] = true;
317
340
  this.chunkType = chunkType;
318
341
  this.chunkId = chunkId;
319
342
  }
320
343
  static isInstance(error) {
321
- return AISDKError13.hasMarker(error, marker11);
344
+ return AISDKError14.hasMarker(error, marker12);
322
345
  }
323
346
  };
324
- _a11 = symbol11;
347
+ _a12 = symbol12;
325
348
 
326
349
  // src/prompt/invalid-data-content-error.ts
327
- import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
328
- var name12 = "AI_InvalidDataContentError";
329
- var marker12 = `vercel.ai.error.${name12}`;
330
- var symbol12 = Symbol.for(marker12);
331
- var _a12;
332
- var InvalidDataContentError = class extends AISDKError14 {
350
+ import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
351
+ var name13 = "AI_InvalidDataContentError";
352
+ var marker13 = `vercel.ai.error.${name13}`;
353
+ var symbol13 = Symbol.for(marker13);
354
+ var _a13;
355
+ var InvalidDataContentError = class extends AISDKError15 {
333
356
  constructor({
334
357
  content,
335
358
  cause,
336
359
  message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
337
360
  }) {
338
- super({ name: name12, message, cause });
339
- this[_a12] = true;
361
+ super({ name: name13, message, cause });
362
+ this[_a13] = true;
340
363
  this.content = content;
341
364
  }
342
365
  static isInstance(error) {
343
- return AISDKError14.hasMarker(error, marker12);
366
+ return AISDKError15.hasMarker(error, marker13);
344
367
  }
345
368
  };
346
- _a12 = symbol12;
369
+ _a13 = symbol13;
347
370
 
348
371
  // src/prompt/invalid-message-role-error.ts
349
- import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
350
- var name13 = "AI_InvalidMessageRoleError";
351
- var marker13 = `vercel.ai.error.${name13}`;
352
- var symbol13 = Symbol.for(marker13);
353
- var _a13;
354
- var InvalidMessageRoleError = class extends AISDKError15 {
372
+ import { AISDKError as AISDKError16 } from "@ai-sdk/provider";
373
+ var name14 = "AI_InvalidMessageRoleError";
374
+ var marker14 = `vercel.ai.error.${name14}`;
375
+ var symbol14 = Symbol.for(marker14);
376
+ var _a14;
377
+ var InvalidMessageRoleError = class extends AISDKError16 {
355
378
  constructor({
356
379
  role,
357
380
  message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
358
381
  }) {
359
- super({ name: name13, message });
360
- this[_a13] = true;
382
+ super({ name: name14, message });
383
+ this[_a14] = true;
361
384
  this.role = role;
362
385
  }
363
386
  static isInstance(error) {
364
- return AISDKError15.hasMarker(error, marker13);
387
+ return AISDKError16.hasMarker(error, marker14);
365
388
  }
366
389
  };
367
- _a13 = symbol13;
390
+ _a14 = symbol14;
368
391
 
369
392
  // src/prompt/message-conversion-error.ts
370
- import { AISDKError as AISDKError16 } from "@ai-sdk/provider";
371
- var name14 = "AI_MessageConversionError";
372
- var marker14 = `vercel.ai.error.${name14}`;
373
- var symbol14 = Symbol.for(marker14);
374
- var _a14;
375
- var MessageConversionError = class extends AISDKError16 {
393
+ import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
394
+ var name15 = "AI_MessageConversionError";
395
+ var marker15 = `vercel.ai.error.${name15}`;
396
+ var symbol15 = Symbol.for(marker15);
397
+ var _a15;
398
+ var MessageConversionError = class extends AISDKError17 {
376
399
  constructor({
377
400
  originalMessage,
378
401
  message
379
402
  }) {
380
- super({ name: name14, message });
381
- this[_a14] = true;
403
+ super({ name: name15, message });
404
+ this[_a15] = true;
382
405
  this.originalMessage = originalMessage;
383
406
  }
384
407
  static isInstance(error) {
385
- return AISDKError16.hasMarker(error, marker14);
408
+ return AISDKError17.hasMarker(error, marker15);
386
409
  }
387
410
  };
388
- _a14 = symbol14;
411
+ _a15 = symbol15;
389
412
 
390
413
  // src/error/index.ts
391
414
  import { DownloadError } from "@ai-sdk/provider-utils";
392
415
 
393
416
  // src/util/retry-error.ts
394
- import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
395
- var name15 = "AI_RetryError";
396
- var marker15 = `vercel.ai.error.${name15}`;
397
- var symbol15 = Symbol.for(marker15);
398
- var _a15;
399
- var RetryError = class extends AISDKError17 {
417
+ import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
418
+ var name16 = "AI_RetryError";
419
+ var marker16 = `vercel.ai.error.${name16}`;
420
+ var symbol16 = Symbol.for(marker16);
421
+ var _a16;
422
+ var RetryError = class extends AISDKError18 {
400
423
  constructor({
401
424
  message,
402
425
  reason,
403
426
  errors
404
427
  }) {
405
- super({ name: name15, message });
406
- this[_a15] = true;
428
+ super({ name: name16, message });
429
+ this[_a16] = true;
407
430
  this.reason = reason;
408
431
  this.errors = errors;
409
432
  this.lastError = errors[errors.length - 1];
410
433
  }
411
434
  static isInstance(error) {
412
- return AISDKError17.hasMarker(error, marker15);
435
+ return AISDKError18.hasMarker(error, marker16);
413
436
  }
414
437
  };
415
- _a15 = symbol15;
438
+ _a16 = symbol16;
416
439
 
417
440
  // src/logger/log-warnings.ts
418
441
  function formatWarning({
@@ -675,7 +698,7 @@ function resolveEmbeddingModel(model) {
675
698
  return getGlobalProvider().embeddingModel(model);
676
699
  }
677
700
  function resolveTranscriptionModel(model) {
678
- var _a17, _b;
701
+ var _a18, _b;
679
702
  if (typeof model !== "string") {
680
703
  if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
681
704
  const unsupportedModel = model;
@@ -687,10 +710,10 @@ function resolveTranscriptionModel(model) {
687
710
  }
688
711
  return asTranscriptionModelV3(model);
689
712
  }
690
- return (_b = (_a17 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a17, model);
713
+ return (_b = (_a18 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a18, model);
691
714
  }
692
715
  function resolveSpeechModel(model) {
693
- var _a17, _b;
716
+ var _a18, _b;
694
717
  if (typeof model !== "string") {
695
718
  if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
696
719
  const unsupportedModel = model;
@@ -702,7 +725,7 @@ function resolveSpeechModel(model) {
702
725
  }
703
726
  return asSpeechModelV3(model);
704
727
  }
705
- return (_b = (_a17 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a17, model);
728
+ return (_b = (_a18 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a18, model);
706
729
  }
707
730
  function resolveImageModel(model) {
708
731
  if (typeof model !== "string") {
@@ -719,8 +742,8 @@ function resolveImageModel(model) {
719
742
  return getGlobalProvider().imageModel(model);
720
743
  }
721
744
  function getGlobalProvider() {
722
- var _a17;
723
- return (_a17 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a17 : gateway;
745
+ var _a18;
746
+ return (_a18 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a18 : gateway;
724
747
  }
725
748
 
726
749
  // src/prompt/call-settings.ts
@@ -944,11 +967,11 @@ import {
944
967
  } from "@ai-sdk/provider-utils";
945
968
 
946
969
  // src/version.ts
947
- var VERSION = true ? "6.0.42" : "0.0.0-test";
970
+ var VERSION = true ? "6.0.43" : "0.0.0-test";
948
971
 
949
972
  // src/util/download/download.ts
950
973
  var download = async ({ url }) => {
951
- var _a17;
974
+ var _a18;
952
975
  const urlText = url.toString();
953
976
  try {
954
977
  const response = await fetch(urlText, {
@@ -967,7 +990,7 @@ var download = async ({ url }) => {
967
990
  }
968
991
  return {
969
992
  data: new Uint8Array(await response.arrayBuffer()),
970
- mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
993
+ mediaType: (_a18 = response.headers.get("content-type")) != null ? _a18 : void 0
971
994
  };
972
995
  } catch (error) {
973
996
  if (DownloadError2.isInstance(error)) {
@@ -985,7 +1008,7 @@ var createDefaultDownloadFunction = (download2 = download) => (requestedDownload
985
1008
  );
986
1009
 
987
1010
  // src/prompt/data-content.ts
988
- import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
1011
+ import { AISDKError as AISDKError20 } from "@ai-sdk/provider";
989
1012
  import {
990
1013
  convertBase64ToUint8Array as convertBase64ToUint8Array2,
991
1014
  convertUint8ArrayToBase64
@@ -1016,8 +1039,8 @@ var dataContentSchema = z.union([
1016
1039
  z.custom(
1017
1040
  // Buffer might not be available in some environments such as CloudFlare:
1018
1041
  (value) => {
1019
- var _a17, _b;
1020
- return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
1042
+ var _a18, _b;
1043
+ return (_b = (_a18 = globalThis.Buffer) == null ? void 0 : _a18.isBuffer(value)) != null ? _b : false;
1021
1044
  },
1022
1045
  { message: "Must be a Buffer" }
1023
1046
  )
@@ -1040,7 +1063,7 @@ function convertToLanguageModelV3DataContent(content) {
1040
1063
  content.toString()
1041
1064
  );
1042
1065
  if (dataUrlMediaType == null || base64Content == null) {
1043
- throw new AISDKError19({
1066
+ throw new AISDKError20({
1044
1067
  name: "InvalidDataContentError",
1045
1068
  message: `Invalid data URL format in content ${content.toString()}`
1046
1069
  });
@@ -1095,6 +1118,32 @@ async function convertToLanguageModelPrompt({
1095
1118
  download2,
1096
1119
  supportedUrls
1097
1120
  );
1121
+ const approvalIdToToolCallId = /* @__PURE__ */ new Map();
1122
+ for (const message of prompt.messages) {
1123
+ if (message.role === "assistant" && Array.isArray(message.content)) {
1124
+ for (const part of message.content) {
1125
+ if (part.type === "tool-approval-request" && "approvalId" in part && "toolCallId" in part) {
1126
+ approvalIdToToolCallId.set(
1127
+ part.approvalId,
1128
+ part.toolCallId
1129
+ );
1130
+ }
1131
+ }
1132
+ }
1133
+ }
1134
+ const approvedToolCallIds = /* @__PURE__ */ new Set();
1135
+ for (const message of prompt.messages) {
1136
+ if (message.role === "tool") {
1137
+ for (const part of message.content) {
1138
+ if (part.type === "tool-approval-response") {
1139
+ const toolCallId = approvalIdToToolCallId.get(part.approvalId);
1140
+ if (toolCallId) {
1141
+ approvedToolCallIds.add(toolCallId);
1142
+ }
1143
+ }
1144
+ }
1145
+ }
1146
+ }
1098
1147
  const messages = [
1099
1148
  ...prompt.system != null ? typeof prompt.system === "string" ? [{ role: "system", content: prompt.system }] : asArray(prompt.system).map((message) => ({
1100
1149
  role: "system",
@@ -1118,7 +1167,51 @@ async function convertToLanguageModelPrompt({
1118
1167
  combinedMessages.push(message);
1119
1168
  }
1120
1169
  }
1121
- return combinedMessages;
1170
+ const toolCallIds = /* @__PURE__ */ new Set();
1171
+ for (const message of combinedMessages) {
1172
+ switch (message.role) {
1173
+ case "assistant": {
1174
+ for (const content of message.content) {
1175
+ if (content.type === "tool-call" && !content.providerExecuted) {
1176
+ toolCallIds.add(content.toolCallId);
1177
+ }
1178
+ }
1179
+ break;
1180
+ }
1181
+ case "tool": {
1182
+ for (const content of message.content) {
1183
+ if (content.type === "tool-result") {
1184
+ toolCallIds.delete(content.toolCallId);
1185
+ }
1186
+ }
1187
+ break;
1188
+ }
1189
+ case "user":
1190
+ case "system":
1191
+ for (const id of approvedToolCallIds) {
1192
+ toolCallIds.delete(id);
1193
+ }
1194
+ if (toolCallIds.size > 0) {
1195
+ throw new MissingToolResultsError({
1196
+ toolCallIds: Array.from(toolCallIds)
1197
+ });
1198
+ }
1199
+ break;
1200
+ }
1201
+ }
1202
+ for (const id of approvedToolCallIds) {
1203
+ toolCallIds.delete(id);
1204
+ }
1205
+ if (toolCallIds.size > 0) {
1206
+ throw new MissingToolResultsError({ toolCallIds: Array.from(toolCallIds) });
1207
+ }
1208
+ return combinedMessages.filter(
1209
+ // Filter out empty tool messages (e.g. if they only contained
1210
+ // tool-approval-response parts that were removed).
1211
+ // This prevents sending invalid empty messages to the provider.
1212
+ // Note: provider-executed tool-approval-response parts are preserved.
1213
+ (message) => message.role !== "tool" || message.content.length > 0
1214
+ );
1122
1215
  }
1123
1216
  function convertToLanguageModelMessage({
1124
1217
  message,
@@ -1257,8 +1350,8 @@ async function downloadAssets(messages, download2, supportedUrls) {
1257
1350
  ).flat().filter(
1258
1351
  (part) => part.type === "image" || part.type === "file"
1259
1352
  ).map((part) => {
1260
- var _a17;
1261
- const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
1353
+ var _a18;
1354
+ const mediaType = (_a18 = part.mediaType) != null ? _a18 : part.type === "image" ? "image/*" : void 0;
1262
1355
  let data = part.type === "image" ? part.image : part.data;
1263
1356
  if (typeof data === "string") {
1264
1357
  try {
@@ -1288,7 +1381,7 @@ async function downloadAssets(messages, download2, supportedUrls) {
1288
1381
  );
1289
1382
  }
1290
1383
  function convertPartToLanguageModelPart(part, downloadedAssets) {
1291
- var _a17;
1384
+ var _a18;
1292
1385
  if (part.type === "text") {
1293
1386
  return {
1294
1387
  type: "text",
@@ -1321,7 +1414,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1321
1414
  switch (type) {
1322
1415
  case "image": {
1323
1416
  if (data instanceof Uint8Array || typeof data === "string") {
1324
- mediaType = (_a17 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a17 : mediaType;
1417
+ mediaType = (_a18 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a18 : mediaType;
1325
1418
  }
1326
1419
  return {
1327
1420
  type: "file",
@@ -1509,10 +1602,10 @@ async function prepareToolsAndToolChoice({
1509
1602
  };
1510
1603
  }
1511
1604
  const filteredTools = activeTools != null ? Object.entries(tools).filter(
1512
- ([name17]) => activeTools.includes(name17)
1605
+ ([name18]) => activeTools.includes(name18)
1513
1606
  ) : Object.entries(tools);
1514
1607
  const languageModelTools = [];
1515
- for (const [name17, tool2] of filteredTools) {
1608
+ for (const [name18, tool2] of filteredTools) {
1516
1609
  const toolType = tool2.type;
1517
1610
  switch (toolType) {
1518
1611
  case void 0:
@@ -1520,7 +1613,7 @@ async function prepareToolsAndToolChoice({
1520
1613
  case "function":
1521
1614
  languageModelTools.push({
1522
1615
  type: "function",
1523
- name: name17,
1616
+ name: name18,
1524
1617
  description: tool2.description,
1525
1618
  inputSchema: await asSchema(tool2.inputSchema).jsonSchema,
1526
1619
  ...tool2.inputExamples != null ? { inputExamples: tool2.inputExamples } : {},
@@ -1531,7 +1624,7 @@ async function prepareToolsAndToolChoice({
1531
1624
  case "provider":
1532
1625
  languageModelTools.push({
1533
1626
  type: "provider",
1534
- name: name17,
1627
+ name: name18,
1535
1628
  id: tool2.id,
1536
1629
  args: tool2.args
1537
1630
  });
@@ -1821,14 +1914,14 @@ async function standardizePrompt(prompt) {
1821
1914
 
1822
1915
  // src/prompt/wrap-gateway-error.ts
1823
1916
  import { GatewayAuthenticationError } from "@ai-sdk/gateway";
1824
- import { AISDKError as AISDKError20 } from "@ai-sdk/provider";
1917
+ import { AISDKError as AISDKError21 } from "@ai-sdk/provider";
1825
1918
  function wrapGatewayError(error) {
1826
1919
  if (!GatewayAuthenticationError.isInstance(error))
1827
1920
  return error;
1828
1921
  const isProductionEnv = (process == null ? void 0 : process.env.NODE_ENV) === "production";
1829
1922
  const moreInfoURL = "https://ai-sdk.dev/unauthenticated-ai-gateway";
1830
1923
  if (isProductionEnv) {
1831
- return new AISDKError20({
1924
+ return new AISDKError21({
1832
1925
  name: "GatewayError",
1833
1926
  message: `Unauthenticated. Configure AI_GATEWAY_API_KEY or use a provider module. Learn more: ${moreInfoURL}`
1834
1927
  });
@@ -1869,7 +1962,7 @@ function getBaseTelemetryAttributes({
1869
1962
  telemetry,
1870
1963
  headers
1871
1964
  }) {
1872
- var _a17;
1965
+ var _a18;
1873
1966
  return {
1874
1967
  "ai.model.provider": model.provider,
1875
1968
  "ai.model.id": model.modelId,
@@ -1888,7 +1981,7 @@ function getBaseTelemetryAttributes({
1888
1981
  return attributes;
1889
1982
  }, {}),
1890
1983
  // add metadata as attributes:
1891
- ...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(
1984
+ ...Object.entries((_a18 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a18 : {}).reduce(
1892
1985
  (attributes, [key, value]) => {
1893
1986
  attributes[`ai.telemetry.metadata.${key}`] = value;
1894
1987
  return attributes;
@@ -1913,7 +2006,7 @@ var noopTracer = {
1913
2006
  startSpan() {
1914
2007
  return noopSpan;
1915
2008
  },
1916
- startActiveSpan(name17, arg1, arg2, arg3) {
2009
+ startActiveSpan(name18, arg1, arg2, arg3) {
1917
2010
  if (typeof arg1 === "function") {
1918
2011
  return arg1(noopSpan);
1919
2012
  }
@@ -1986,14 +2079,14 @@ import {
1986
2079
  context
1987
2080
  } from "@opentelemetry/api";
1988
2081
  async function recordSpan({
1989
- name: name17,
2082
+ name: name18,
1990
2083
  tracer,
1991
2084
  attributes,
1992
2085
  fn,
1993
2086
  endWhenDone = true
1994
2087
  }) {
1995
2088
  return tracer.startActiveSpan(
1996
- name17,
2089
+ name18,
1997
2090
  { attributes: await attributes },
1998
2091
  async (span) => {
1999
2092
  const ctx = context.active();
@@ -2124,12 +2217,12 @@ function createNullLanguageModelUsage() {
2124
2217
  };
2125
2218
  }
2126
2219
  function addLanguageModelUsage(usage1, usage2) {
2127
- var _a17, _b, _c, _d, _e, _f, _g, _h, _i, _j;
2220
+ var _a18, _b, _c, _d, _e, _f, _g, _h, _i, _j;
2128
2221
  return {
2129
2222
  inputTokens: addTokenCounts(usage1.inputTokens, usage2.inputTokens),
2130
2223
  inputTokenDetails: {
2131
2224
  noCacheTokens: addTokenCounts(
2132
- (_a17 = usage1.inputTokenDetails) == null ? void 0 : _a17.noCacheTokens,
2225
+ (_a18 = usage1.inputTokenDetails) == null ? void 0 : _a18.noCacheTokens,
2133
2226
  (_b = usage2.inputTokenDetails) == null ? void 0 : _b.noCacheTokens
2134
2227
  ),
2135
2228
  cacheReadTokens: addTokenCounts(
@@ -2948,7 +3041,7 @@ var text = () => ({
2948
3041
  });
2949
3042
  var object = ({
2950
3043
  schema: inputSchema,
2951
- name: name17,
3044
+ name: name18,
2952
3045
  description
2953
3046
  }) => {
2954
3047
  const schema = asSchema2(inputSchema);
@@ -2957,7 +3050,7 @@ var object = ({
2957
3050
  responseFormat: resolve(schema.jsonSchema).then((jsonSchema2) => ({
2958
3051
  type: "json",
2959
3052
  schema: jsonSchema2,
2960
- ...name17 != null && { name: name17 },
3053
+ ...name18 != null && { name: name18 },
2961
3054
  ...description != null && { description }
2962
3055
  })),
2963
3056
  async parseCompleteOutput({ text: text2 }, context2) {
@@ -3011,7 +3104,7 @@ var object = ({
3011
3104
  };
3012
3105
  var array = ({
3013
3106
  element: inputElementSchema,
3014
- name: name17,
3107
+ name: name18,
3015
3108
  description
3016
3109
  }) => {
3017
3110
  const elementSchema = asSchema2(inputElementSchema);
@@ -3031,7 +3124,7 @@ var array = ({
3031
3124
  required: ["elements"],
3032
3125
  additionalProperties: false
3033
3126
  },
3034
- ...name17 != null && { name: name17 },
3127
+ ...name18 != null && { name: name18 },
3035
3128
  ...description != null && { description }
3036
3129
  };
3037
3130
  }),
@@ -3123,7 +3216,7 @@ var array = ({
3123
3216
  };
3124
3217
  var choice = ({
3125
3218
  options: choiceOptions,
3126
- name: name17,
3219
+ name: name18,
3127
3220
  description
3128
3221
  }) => {
3129
3222
  return {
@@ -3140,7 +3233,7 @@ var choice = ({
3140
3233
  required: ["result"],
3141
3234
  additionalProperties: false
3142
3235
  },
3143
- ...name17 != null && { name: name17 },
3236
+ ...name18 != null && { name: name18 },
3144
3237
  ...description != null && { description }
3145
3238
  }),
3146
3239
  async parseCompleteOutput({ text: text2 }, context2) {
@@ -3201,14 +3294,14 @@ var choice = ({
3201
3294
  };
3202
3295
  };
3203
3296
  var json = ({
3204
- name: name17,
3297
+ name: name18,
3205
3298
  description
3206
3299
  } = {}) => {
3207
3300
  return {
3208
3301
  name: "json",
3209
3302
  responseFormat: Promise.resolve({
3210
3303
  type: "json",
3211
- ...name17 != null && { name: name17 },
3304
+ ...name18 != null && { name: name18 },
3212
3305
  ...description != null && { description }
3213
3306
  }),
3214
3307
  async parseCompleteOutput({ text: text2 }, context2) {
@@ -3257,7 +3350,7 @@ async function parseToolCall({
3257
3350
  system,
3258
3351
  messages
3259
3352
  }) {
3260
- var _a17;
3353
+ var _a18;
3261
3354
  try {
3262
3355
  if (tools == null) {
3263
3356
  if (toolCall.providerExecuted && toolCall.dynamic) {
@@ -3306,7 +3399,7 @@ async function parseToolCall({
3306
3399
  dynamic: true,
3307
3400
  invalid: true,
3308
3401
  error,
3309
- title: (_a17 = tools == null ? void 0 : tools[toolCall.toolName]) == null ? void 0 : _a17.title,
3402
+ title: (_a18 = tools == null ? void 0 : tools[toolCall.toolName]) == null ? void 0 : _a18.title,
3310
3403
  providerExecuted: toolCall.providerExecuted,
3311
3404
  providerMetadata: toolCall.providerMetadata
3312
3405
  };
@@ -3445,8 +3538,8 @@ function stepCountIs(stepCount) {
3445
3538
  }
3446
3539
  function hasToolCall(toolName) {
3447
3540
  return ({ steps }) => {
3448
- var _a17, _b, _c;
3449
- return (_c = (_b = (_a17 = steps[steps.length - 1]) == null ? void 0 : _a17.toolCalls) == null ? void 0 : _b.some(
3541
+ var _a18, _b, _c;
3542
+ return (_c = (_b = (_a18 = steps[steps.length - 1]) == null ? void 0 : _a18.toolCalls) == null ? void 0 : _b.some(
3450
3543
  (toolCall) => toolCall.toolName === toolName
3451
3544
  )) != null ? _c : false;
3452
3545
  };
@@ -3700,7 +3793,7 @@ async function generateText({
3700
3793
  }),
3701
3794
  tracer,
3702
3795
  fn: async (span) => {
3703
- var _a17, _b, _c, _d, _e, _f, _g, _h;
3796
+ var _a18, _b, _c, _d, _e, _f, _g, _h;
3704
3797
  const initialMessages = initialPrompt.messages;
3705
3798
  const responseMessages = [];
3706
3799
  const { approvedToolApprovals, deniedToolApprovals } = collectToolApprovals({ messages: initialMessages });
@@ -3795,7 +3888,7 @@ async function generateText({
3795
3888
  experimental_context
3796
3889
  }));
3797
3890
  const stepModel = resolveLanguageModel(
3798
- (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
3891
+ (_a18 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a18 : model
3799
3892
  );
3800
3893
  const promptMessages = await convertToLanguageModelPrompt({
3801
3894
  prompt: {
@@ -3813,7 +3906,7 @@ async function generateText({
3813
3906
  });
3814
3907
  currentModelResponse = await retry(
3815
3908
  () => {
3816
- var _a18;
3909
+ var _a19;
3817
3910
  return recordSpan({
3818
3911
  name: "ai.generateText.doGenerate",
3819
3912
  attributes: selectTelemetryAttributes({
@@ -3845,14 +3938,14 @@ async function generateText({
3845
3938
  "gen_ai.request.max_tokens": settings.maxOutputTokens,
3846
3939
  "gen_ai.request.presence_penalty": settings.presencePenalty,
3847
3940
  "gen_ai.request.stop_sequences": settings.stopSequences,
3848
- "gen_ai.request.temperature": (_a18 = settings.temperature) != null ? _a18 : void 0,
3941
+ "gen_ai.request.temperature": (_a19 = settings.temperature) != null ? _a19 : void 0,
3849
3942
  "gen_ai.request.top_k": settings.topK,
3850
3943
  "gen_ai.request.top_p": settings.topP
3851
3944
  }
3852
3945
  }),
3853
3946
  tracer,
3854
3947
  fn: async (span2) => {
3855
- var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h2;
3948
+ var _a20, _b2, _c2, _d2, _e2, _f2, _g2, _h2;
3856
3949
  const stepProviderOptions = mergeObjects(
3857
3950
  providerOptions,
3858
3951
  prepareStepResult == null ? void 0 : prepareStepResult.providerOptions
@@ -3868,7 +3961,7 @@ async function generateText({
3868
3961
  headers: headersWithUserAgent
3869
3962
  });
3870
3963
  const responseData = {
3871
- id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId2(),
3964
+ id: (_b2 = (_a20 = result.response) == null ? void 0 : _a20.id) != null ? _b2 : generateId2(),
3872
3965
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : /* @__PURE__ */ new Date(),
3873
3966
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
3874
3967
  headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
@@ -4748,7 +4841,7 @@ function processUIMessageStream({
4748
4841
  new TransformStream({
4749
4842
  async transform(chunk, controller) {
4750
4843
  await runUpdateMessageJob(async ({ state, write }) => {
4751
- var _a17, _b, _c, _d;
4844
+ var _a18, _b, _c, _d;
4752
4845
  function getToolInvocation(toolCallId) {
4753
4846
  const toolInvocations = state.message.parts.filter(isToolUIPart);
4754
4847
  const toolInvocation = toolInvocations.find(
@@ -4764,7 +4857,7 @@ function processUIMessageStream({
4764
4857
  return toolInvocation;
4765
4858
  }
4766
4859
  function updateToolPart(options) {
4767
- var _a18;
4860
+ var _a19;
4768
4861
  const part = state.message.parts.find(
4769
4862
  (part2) => isStaticToolUIPart(part2) && part2.toolCallId === options.toolCallId
4770
4863
  );
@@ -4780,7 +4873,7 @@ function processUIMessageStream({
4780
4873
  if (options.title !== void 0) {
4781
4874
  anyPart.title = options.title;
4782
4875
  }
4783
- anyPart.providerExecuted = (_a18 = anyOptions.providerExecuted) != null ? _a18 : part.providerExecuted;
4876
+ anyPart.providerExecuted = (_a19 = anyOptions.providerExecuted) != null ? _a19 : part.providerExecuted;
4784
4877
  if (anyOptions.providerMetadata != null) {
4785
4878
  part.callProviderMetadata = anyOptions.providerMetadata;
4786
4879
  }
@@ -4801,7 +4894,7 @@ function processUIMessageStream({
4801
4894
  }
4802
4895
  }
4803
4896
  function updateDynamicToolPart(options) {
4804
- var _a18, _b2;
4897
+ var _a19, _b2;
4805
4898
  const part = state.message.parts.find(
4806
4899
  (part2) => part2.type === "dynamic-tool" && part2.toolCallId === options.toolCallId
4807
4900
  );
@@ -4813,7 +4906,7 @@ function processUIMessageStream({
4813
4906
  anyPart.input = anyOptions.input;
4814
4907
  anyPart.output = anyOptions.output;
4815
4908
  anyPart.errorText = anyOptions.errorText;
4816
- anyPart.rawInput = (_a18 = anyOptions.rawInput) != null ? _a18 : anyPart.rawInput;
4909
+ anyPart.rawInput = (_a19 = anyOptions.rawInput) != null ? _a19 : anyPart.rawInput;
4817
4910
  anyPart.preliminary = anyOptions.preliminary;
4818
4911
  if (options.title !== void 0) {
4819
4912
  anyPart.title = options.title;
@@ -4873,7 +4966,7 @@ function processUIMessageStream({
4873
4966
  });
4874
4967
  }
4875
4968
  textPart.text += chunk.delta;
4876
- textPart.providerMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textPart.providerMetadata;
4969
+ textPart.providerMetadata = (_a18 = chunk.providerMetadata) != null ? _a18 : textPart.providerMetadata;
4877
4970
  write();
4878
4971
  break;
4879
4972
  }
@@ -5342,13 +5435,13 @@ function createAsyncIterableStream(source) {
5342
5435
  const reader = this.getReader();
5343
5436
  let finished = false;
5344
5437
  async function cleanup(cancelStream) {
5345
- var _a17;
5438
+ var _a18;
5346
5439
  if (finished)
5347
5440
  return;
5348
5441
  finished = true;
5349
5442
  try {
5350
5443
  if (cancelStream) {
5351
- await ((_a17 = reader.cancel) == null ? void 0 : _a17.call(reader));
5444
+ await ((_a18 = reader.cancel) == null ? void 0 : _a18.call(reader));
5352
5445
  }
5353
5446
  } finally {
5354
5447
  try {
@@ -5514,8 +5607,8 @@ function createStitchableStream() {
5514
5607
 
5515
5608
  // src/util/now.ts
5516
5609
  function now() {
5517
- var _a17, _b;
5518
- return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();
5610
+ var _a18, _b;
5611
+ return (_b = (_a18 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a18.now()) != null ? _b : Date.now();
5519
5612
  }
5520
5613
 
5521
5614
  // src/generate-text/run-tools-transformation.ts
@@ -5864,7 +5957,7 @@ function createOutputTransformStream(output) {
5864
5957
  }
5865
5958
  return new TransformStream({
5866
5959
  async transform(chunk, controller) {
5867
- var _a17;
5960
+ var _a18;
5868
5961
  if (chunk.type === "finish-step" && textChunk.length > 0) {
5869
5962
  publishTextChunk({ controller });
5870
5963
  }
@@ -5891,7 +5984,7 @@ function createOutputTransformStream(output) {
5891
5984
  }
5892
5985
  text2 += chunk.text;
5893
5986
  textChunk += chunk.text;
5894
- textProviderMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textProviderMetadata;
5987
+ textProviderMetadata = (_a18 = chunk.providerMetadata) != null ? _a18 : textProviderMetadata;
5895
5988
  const result = await output.parsePartialOutput({ text: text2 });
5896
5989
  if (result !== void 0) {
5897
5990
  const currentJson = JSON.stringify(result.partial);
@@ -5960,7 +6053,7 @@ var DefaultStreamTextResult = class {
5960
6053
  let activeReasoningContent = {};
5961
6054
  const eventProcessor = new TransformStream({
5962
6055
  async transform(chunk, controller) {
5963
- var _a17, _b, _c, _d;
6056
+ var _a18, _b, _c, _d;
5964
6057
  controller.enqueue(chunk);
5965
6058
  const { part } = chunk;
5966
6059
  if (part.type === "text-delta" || part.type === "reasoning-delta" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
@@ -5990,7 +6083,7 @@ var DefaultStreamTextResult = class {
5990
6083
  return;
5991
6084
  }
5992
6085
  activeText.text += part.text;
5993
- activeText.providerMetadata = (_a17 = part.providerMetadata) != null ? _a17 : activeText.providerMetadata;
6086
+ activeText.providerMetadata = (_a18 = part.providerMetadata) != null ? _a18 : activeText.providerMetadata;
5994
6087
  }
5995
6088
  if (part.type === "text-end") {
5996
6089
  const activeText = activeTextContent[part.id];
@@ -6155,8 +6248,8 @@ var DefaultStreamTextResult = class {
6155
6248
  "ai.response.text": { output: () => finalStep.text },
6156
6249
  "ai.response.toolCalls": {
6157
6250
  output: () => {
6158
- var _a17;
6159
- return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
6251
+ var _a18;
6252
+ return ((_a18 = finalStep.toolCalls) == null ? void 0 : _a18.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
6160
6253
  }
6161
6254
  },
6162
6255
  "ai.response.providerMetadata": JSON.stringify(
@@ -6378,7 +6471,7 @@ var DefaultStreamTextResult = class {
6378
6471
  responseMessages,
6379
6472
  usage
6380
6473
  }) {
6381
- var _a17, _b, _c, _d, _e, _f;
6474
+ var _a18, _b, _c, _d, _e, _f;
6382
6475
  const includeRawChunks2 = self.includeRawChunks;
6383
6476
  const stepTimeoutId = stepTimeoutMs != null ? setTimeout(() => stepAbortController.abort(), stepTimeoutMs) : void 0;
6384
6477
  let chunkTimeoutId = void 0;
@@ -6414,7 +6507,7 @@ var DefaultStreamTextResult = class {
6414
6507
  experimental_context
6415
6508
  }));
6416
6509
  const stepModel = resolveLanguageModel(
6417
- (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
6510
+ (_a18 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a18 : model
6418
6511
  );
6419
6512
  const promptMessages = await convertToLanguageModelPrompt({
6420
6513
  prompt: {
@@ -6527,7 +6620,7 @@ var DefaultStreamTextResult = class {
6527
6620
  streamWithToolResults.pipeThrough(
6528
6621
  new TransformStream({
6529
6622
  async transform(chunk, controller) {
6530
- var _a18, _b2, _c2, _d2, _e2;
6623
+ var _a19, _b2, _c2, _d2, _e2;
6531
6624
  resetChunkTimeout();
6532
6625
  if (chunk.type === "stream-start") {
6533
6626
  warnings = chunk.warnings;
@@ -6601,7 +6694,7 @@ var DefaultStreamTextResult = class {
6601
6694
  }
6602
6695
  case "response-metadata": {
6603
6696
  stepResponse = {
6604
- id: (_a18 = chunk.id) != null ? _a18 : stepResponse.id,
6697
+ id: (_a19 = chunk.id) != null ? _a19 : stepResponse.id,
6605
6698
  timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
6606
6699
  modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
6607
6700
  };
@@ -6933,14 +7026,14 @@ var DefaultStreamTextResult = class {
6933
7026
  );
6934
7027
  }
6935
7028
  async consumeStream(options) {
6936
- var _a17;
7029
+ var _a18;
6937
7030
  try {
6938
7031
  await consumeStream({
6939
7032
  stream: this.fullStream,
6940
7033
  onError: options == null ? void 0 : options.onError
6941
7034
  });
6942
7035
  } catch (error) {
6943
- (_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);
7036
+ (_a18 = options == null ? void 0 : options.onError) == null ? void 0 : _a18.call(options, error);
6944
7037
  }
6945
7038
  }
6946
7039
  get experimental_partialOutputStream() {
@@ -6960,8 +7053,8 @@ var DefaultStreamTextResult = class {
6960
7053
  );
6961
7054
  }
6962
7055
  get elementStream() {
6963
- var _a17, _b, _c;
6964
- const transform = (_a17 = this.outputSpecification) == null ? void 0 : _a17.createElementStreamTransform();
7056
+ var _a18, _b, _c;
7057
+ const transform = (_a18 = this.outputSpecification) == null ? void 0 : _a18.createElementStreamTransform();
6965
7058
  if (transform == null) {
6966
7059
  throw new UnsupportedFunctionalityError2({
6967
7060
  functionality: `element streams in ${(_c = (_b = this.outputSpecification) == null ? void 0 : _b.name) != null ? _c : "text"} mode`
@@ -6971,8 +7064,8 @@ var DefaultStreamTextResult = class {
6971
7064
  }
6972
7065
  get output() {
6973
7066
  return this.finalStep.then((step) => {
6974
- var _a17;
6975
- const output = (_a17 = this.outputSpecification) != null ? _a17 : text();
7067
+ var _a18;
7068
+ const output = (_a18 = this.outputSpecification) != null ? _a18 : text();
6976
7069
  return output.parseCompleteOutput(
6977
7070
  { text: step.text },
6978
7071
  {
@@ -6999,8 +7092,8 @@ var DefaultStreamTextResult = class {
6999
7092
  responseMessageId: generateMessageId
7000
7093
  }) : void 0;
7001
7094
  const isDynamic = (part) => {
7002
- var _a17;
7003
- const tool2 = (_a17 = this.tools) == null ? void 0 : _a17[part.toolName];
7095
+ var _a18;
7096
+ const tool2 = (_a18 = this.tools) == null ? void 0 : _a18[part.toolName];
7004
7097
  if (tool2 == null) {
7005
7098
  return part.dynamic;
7006
7099
  }
@@ -7339,10 +7432,10 @@ var ToolLoopAgent = class {
7339
7432
  return this.settings.tools;
7340
7433
  }
7341
7434
  async prepareCall(options) {
7342
- var _a17, _b, _c, _d;
7435
+ var _a18, _b, _c, _d;
7343
7436
  const baseCallArgs = {
7344
7437
  ...this.settings,
7345
- stopWhen: (_a17 = this.settings.stopWhen) != null ? _a17 : stepCountIs(20),
7438
+ stopWhen: (_a18 = this.settings.stopWhen) != null ? _a18 : stepCountIs(20),
7346
7439
  ...options
7347
7440
  };
7348
7441
  const preparedCallArgs = (_d = await ((_c = (_b = this.settings).prepareCall) == null ? void 0 : _c.call(_b, baseCallArgs))) != null ? _d : baseCallArgs;
@@ -7481,7 +7574,7 @@ function readUIMessageStream({
7481
7574
  onError,
7482
7575
  terminateOnError = false
7483
7576
  }) {
7484
- var _a17;
7577
+ var _a18;
7485
7578
  let controller;
7486
7579
  let hasErrored = false;
7487
7580
  const outputStream = new ReadableStream({
@@ -7490,7 +7583,7 @@ function readUIMessageStream({
7490
7583
  }
7491
7584
  });
7492
7585
  const state = createStreamingUIMessageState({
7493
- messageId: (_a17 = message == null ? void 0 : message.id) != null ? _a17 : "",
7586
+ messageId: (_a18 = message == null ? void 0 : message.id) != null ? _a18 : "",
7494
7587
  lastMessage: message
7495
7588
  });
7496
7589
  const handleError = (error) => {
@@ -7559,7 +7652,7 @@ async function convertToModelMessages(messages, options) {
7559
7652
  modelMessages.push({
7560
7653
  role: "user",
7561
7654
  content: message.parts.map((part) => {
7562
- var _a17;
7655
+ var _a18;
7563
7656
  if (isTextUIPart(part)) {
7564
7657
  return {
7565
7658
  type: "text",
@@ -7577,7 +7670,7 @@ async function convertToModelMessages(messages, options) {
7577
7670
  };
7578
7671
  }
7579
7672
  if (isDataUIPart(part)) {
7580
- return (_a17 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a17.call(
7673
+ return (_a18 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a18.call(
7581
7674
  options,
7582
7675
  part
7583
7676
  );
@@ -7590,7 +7683,7 @@ async function convertToModelMessages(messages, options) {
7590
7683
  if (message.parts != null) {
7591
7684
  let block = [];
7592
7685
  async function processBlock() {
7593
- var _a17, _b, _c, _d, _e, _f;
7686
+ var _a18, _b, _c, _d, _e, _f;
7594
7687
  if (block.length === 0) {
7595
7688
  return;
7596
7689
  }
@@ -7622,7 +7715,7 @@ async function convertToModelMessages(messages, options) {
7622
7715
  type: "tool-call",
7623
7716
  toolCallId: part.toolCallId,
7624
7717
  toolName,
7625
- input: part.state === "output-error" ? (_a17 = part.input) != null ? _a17 : "rawInput" in part ? part.rawInput : void 0 : part.input,
7718
+ input: part.state === "output-error" ? (_a18 = part.input) != null ? _a18 : "rawInput" in part ? part.rawInput : void 0 : part.input,
7626
7719
  providerExecuted: part.providerExecuted,
7627
7720
  ...part.callProviderMetadata != null ? { providerOptions: part.callProviderMetadata } : {}
7628
7721
  });
@@ -7668,8 +7761,8 @@ async function convertToModelMessages(messages, options) {
7668
7761
  });
7669
7762
  const toolParts = block.filter(
7670
7763
  (part) => {
7671
- var _a18;
7672
- return isToolUIPart(part) && (part.providerExecuted !== true || ((_a18 = part.approval) == null ? void 0 : _a18.approved) != null);
7764
+ var _a19;
7765
+ return isToolUIPart(part) && (part.providerExecuted !== true || ((_a19 = part.approval) == null ? void 0 : _a19.approved) != null);
7673
7766
  }
7674
7767
  );
7675
7768
  if (toolParts.length > 0) {
@@ -8261,7 +8354,7 @@ async function embed({
8261
8354
  }),
8262
8355
  tracer,
8263
8356
  fn: async (doEmbedSpan) => {
8264
- var _a17;
8357
+ var _a18;
8265
8358
  const modelResponse = await model.doEmbed({
8266
8359
  values: [value],
8267
8360
  abortSignal,
@@ -8269,7 +8362,7 @@ async function embed({
8269
8362
  providerOptions
8270
8363
  });
8271
8364
  const embedding2 = modelResponse.embeddings[0];
8272
- const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
8365
+ const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
8273
8366
  doEmbedSpan.setAttributes(
8274
8367
  await selectTelemetryAttributes({
8275
8368
  telemetry,
@@ -8383,7 +8476,7 @@ async function embedMany({
8383
8476
  }),
8384
8477
  tracer,
8385
8478
  fn: async (span) => {
8386
- var _a17;
8479
+ var _a18;
8387
8480
  const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
8388
8481
  model.maxEmbeddingsPerCall,
8389
8482
  model.supportsParallelCalls
@@ -8408,7 +8501,7 @@ async function embedMany({
8408
8501
  }),
8409
8502
  tracer,
8410
8503
  fn: async (doEmbedSpan) => {
8411
- var _a18;
8504
+ var _a19;
8412
8505
  const modelResponse = await model.doEmbed({
8413
8506
  values,
8414
8507
  abortSignal,
@@ -8416,7 +8509,7 @@ async function embedMany({
8416
8509
  providerOptions
8417
8510
  });
8418
8511
  const embeddings3 = modelResponse.embeddings;
8419
- const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
8512
+ const usage2 = (_a19 = modelResponse.usage) != null ? _a19 : { tokens: NaN };
8420
8513
  doEmbedSpan.setAttributes(
8421
8514
  await selectTelemetryAttributes({
8422
8515
  telemetry,
@@ -8497,7 +8590,7 @@ async function embedMany({
8497
8590
  }),
8498
8591
  tracer,
8499
8592
  fn: async (doEmbedSpan) => {
8500
- var _a18;
8593
+ var _a19;
8501
8594
  const modelResponse = await model.doEmbed({
8502
8595
  values: chunk,
8503
8596
  abortSignal,
@@ -8505,7 +8598,7 @@ async function embedMany({
8505
8598
  providerOptions
8506
8599
  });
8507
8600
  const embeddings2 = modelResponse.embeddings;
8508
- const usage = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
8601
+ const usage = (_a19 = modelResponse.usage) != null ? _a19 : { tokens: NaN };
8509
8602
  doEmbedSpan.setAttributes(
8510
8603
  await selectTelemetryAttributes({
8511
8604
  telemetry,
@@ -8544,7 +8637,7 @@ async function embedMany({
8544
8637
  result.providerMetadata
8545
8638
  )) {
8546
8639
  providerMetadata[providerName] = {
8547
- ...(_a17 = providerMetadata[providerName]) != null ? _a17 : {},
8640
+ ...(_a18 = providerMetadata[providerName]) != null ? _a18 : {},
8548
8641
  ...metadata
8549
8642
  };
8550
8643
  }
@@ -8608,7 +8701,7 @@ async function generateImage({
8608
8701
  abortSignal,
8609
8702
  headers
8610
8703
  }) {
8611
- var _a17, _b;
8704
+ var _a18, _b;
8612
8705
  const model = resolveImageModel(modelArg);
8613
8706
  const headersWithUserAgent = withUserAgentSuffix5(
8614
8707
  headers != null ? headers : {},
@@ -8618,7 +8711,7 @@ async function generateImage({
8618
8711
  maxRetries: maxRetriesArg,
8619
8712
  abortSignal
8620
8713
  });
8621
- const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a17 : 1;
8714
+ const maxImagesPerCallWithDefault = (_a18 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a18 : 1;
8622
8715
  const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
8623
8716
  const callImageCounts = Array.from({ length: callCount }, (_, i) => {
8624
8717
  if (i < callCount - 1) {
@@ -8659,13 +8752,13 @@ async function generateImage({
8659
8752
  images.push(
8660
8753
  ...result.images.map(
8661
8754
  (image) => {
8662
- var _a18;
8755
+ var _a19;
8663
8756
  return new DefaultGeneratedFile({
8664
8757
  data: image,
8665
- mediaType: (_a18 = detectMediaType({
8758
+ mediaType: (_a19 = detectMediaType({
8666
8759
  data: image,
8667
8760
  signatures: imageMediaTypeSignatures
8668
- })) != null ? _a18 : "image/png"
8761
+ })) != null ? _a19 : "image/png"
8669
8762
  });
8670
8763
  }
8671
8764
  )
@@ -8873,7 +8966,7 @@ var arrayOutputStrategy = (schema) => {
8873
8966
  isFirstDelta,
8874
8967
  isFinalDelta
8875
8968
  }) {
8876
- var _a17;
8969
+ var _a18;
8877
8970
  if (!isJSONObject(value) || !isJSONArray(value.elements)) {
8878
8971
  return {
8879
8972
  success: false,
@@ -8896,7 +8989,7 @@ var arrayOutputStrategy = (schema) => {
8896
8989
  }
8897
8990
  resultArray.push(result.value);
8898
8991
  }
8899
- const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;
8992
+ const publishedElementCount = (_a18 = latestObject == null ? void 0 : latestObject.length) != null ? _a18 : 0;
8900
8993
  let textDelta = "";
8901
8994
  if (isFirstDelta) {
8902
8995
  textDelta += "[";
@@ -9316,7 +9409,7 @@ async function generateObject(options) {
9316
9409
  }),
9317
9410
  tracer,
9318
9411
  fn: async (span) => {
9319
- var _a17;
9412
+ var _a18;
9320
9413
  let result;
9321
9414
  let finishReason;
9322
9415
  let usage;
@@ -9362,7 +9455,7 @@ async function generateObject(options) {
9362
9455
  }),
9363
9456
  tracer,
9364
9457
  fn: async (span2) => {
9365
- var _a18, _b, _c, _d, _e, _f, _g, _h;
9458
+ var _a19, _b, _c, _d, _e, _f, _g, _h;
9366
9459
  const result2 = await model.doGenerate({
9367
9460
  responseFormat: {
9368
9461
  type: "json",
@@ -9377,7 +9470,7 @@ async function generateObject(options) {
9377
9470
  headers: headersWithUserAgent
9378
9471
  });
9379
9472
  const responseData = {
9380
- id: (_b = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b : generateId2(),
9473
+ id: (_b = (_a19 = result2.response) == null ? void 0 : _a19.id) != null ? _b : generateId2(),
9381
9474
  timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
9382
9475
  modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
9383
9476
  headers: (_g = result2.response) == null ? void 0 : _g.headers,
@@ -9433,7 +9526,7 @@ async function generateObject(options) {
9433
9526
  usage = asLanguageModelUsage(generateResult.usage);
9434
9527
  warnings = generateResult.warnings;
9435
9528
  resultProviderMetadata = generateResult.providerMetadata;
9436
- request = (_a17 = generateResult.request) != null ? _a17 : {};
9529
+ request = (_a18 = generateResult.request) != null ? _a18 : {};
9437
9530
  response = generateResult.responseData;
9438
9531
  reasoning = generateResult.reasoning;
9439
9532
  logWarnings({
@@ -9496,9 +9589,9 @@ var DefaultGenerateObjectResult = class {
9496
9589
  this.reasoning = options.reasoning;
9497
9590
  }
9498
9591
  toJsonResponse(init) {
9499
- var _a17;
9592
+ var _a18;
9500
9593
  return new Response(JSON.stringify(this.object), {
9501
- status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
9594
+ status: (_a18 = init == null ? void 0 : init.status) != null ? _a18 : 200,
9502
9595
  headers: prepareHeaders(init == null ? void 0 : init.headers, {
9503
9596
  "content-type": "application/json; charset=utf-8"
9504
9597
  })
@@ -9627,8 +9720,8 @@ function simulateReadableStream({
9627
9720
  chunkDelayInMs = 0,
9628
9721
  _internal
9629
9722
  }) {
9630
- var _a17;
9631
- const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : delayFunction;
9723
+ var _a18;
9724
+ const delay2 = (_a18 = _internal == null ? void 0 : _internal.delay) != null ? _a18 : delayFunction;
9632
9725
  let index = 0;
9633
9726
  return new ReadableStream({
9634
9727
  async pull(controller) {
@@ -9886,7 +9979,7 @@ var DefaultStreamObjectResult = class {
9886
9979
  const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
9887
9980
  new TransformStream({
9888
9981
  async transform(chunk, controller) {
9889
- var _a17, _b, _c;
9982
+ var _a18, _b, _c;
9890
9983
  if (typeof chunk === "object" && chunk.type === "stream-start") {
9891
9984
  warnings = chunk.warnings;
9892
9985
  return;
@@ -9936,7 +10029,7 @@ var DefaultStreamObjectResult = class {
9936
10029
  switch (chunk.type) {
9937
10030
  case "response-metadata": {
9938
10031
  fullResponse = {
9939
- id: (_a17 = chunk.id) != null ? _a17 : fullResponse.id,
10032
+ id: (_a18 = chunk.id) != null ? _a18 : fullResponse.id,
9940
10033
  timestamp: (_b = chunk.timestamp) != null ? _b : fullResponse.timestamp,
9941
10034
  modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
9942
10035
  };
@@ -10208,7 +10301,7 @@ async function generateSpeech({
10208
10301
  abortSignal,
10209
10302
  headers
10210
10303
  }) {
10211
- var _a17;
10304
+ var _a18;
10212
10305
  const resolvedModel = resolveSpeechModel(model);
10213
10306
  if (!resolvedModel) {
10214
10307
  throw new Error("Model could not be resolved");
@@ -10245,10 +10338,10 @@ async function generateSpeech({
10245
10338
  return new DefaultSpeechResult({
10246
10339
  audio: new DefaultGeneratedAudioFile({
10247
10340
  data: result.audio,
10248
- mediaType: (_a17 = detectMediaType({
10341
+ mediaType: (_a18 = detectMediaType({
10249
10342
  data: result.audio,
10250
10343
  signatures: audioMediaTypeSignatures
10251
- })) != null ? _a17 : "audio/mp3"
10344
+ })) != null ? _a18 : "audio/mp3"
10252
10345
  }),
10253
10346
  warnings: result.warnings,
10254
10347
  responses: [result.response],
@@ -10257,11 +10350,11 @@ async function generateSpeech({
10257
10350
  }
10258
10351
  var DefaultSpeechResult = class {
10259
10352
  constructor(options) {
10260
- var _a17;
10353
+ var _a18;
10261
10354
  this.audio = options.audio;
10262
10355
  this.warnings = options.warnings;
10263
10356
  this.responses = options.responses;
10264
- this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
10357
+ this.providerMetadata = (_a18 = options.providerMetadata) != null ? _a18 : {};
10265
10358
  }
10266
10359
  };
10267
10360
 
@@ -10471,8 +10564,8 @@ function defaultTransform(text2) {
10471
10564
  return text2.replace(/^```(?:json)?\s*\n?/, "").replace(/\n?```\s*$/, "").trim();
10472
10565
  }
10473
10566
  function extractJsonMiddleware(options) {
10474
- var _a17;
10475
- const transform = (_a17 = options == null ? void 0 : options.transform) != null ? _a17 : defaultTransform;
10567
+ var _a18;
10568
+ const transform = (_a18 = options == null ? void 0 : options.transform) != null ? _a18 : defaultTransform;
10476
10569
  const hasCustomTransform = (options == null ? void 0 : options.transform) !== void 0;
10477
10570
  return {
10478
10571
  specificationVersion: "v3",
@@ -10840,13 +10933,13 @@ function addToolInputExamplesMiddleware({
10840
10933
  return {
10841
10934
  specificationVersion: "v3",
10842
10935
  transformParams: async ({ params }) => {
10843
- var _a17;
10844
- if (!((_a17 = params.tools) == null ? void 0 : _a17.length)) {
10936
+ var _a18;
10937
+ if (!((_a18 = params.tools) == null ? void 0 : _a18.length)) {
10845
10938
  return params;
10846
10939
  }
10847
10940
  const transformedTools = params.tools.map((tool2) => {
10848
- var _a18;
10849
- if (tool2.type !== "function" || !((_a18 = tool2.inputExamples) == null ? void 0 : _a18.length)) {
10941
+ var _a19;
10942
+ if (tool2.type !== "function" || !((_a19 = tool2.inputExamples) == null ? void 0 : _a19.length)) {
10850
10943
  return tool2;
10851
10944
  }
10852
10945
  const formattedExamples = tool2.inputExamples.map((example, index) => format(example, index)).join("\n");
@@ -10893,7 +10986,7 @@ var doWrap = ({
10893
10986
  modelId,
10894
10987
  providerId
10895
10988
  }) => {
10896
- var _a17, _b, _c;
10989
+ var _a18, _b, _c;
10897
10990
  async function doTransform({
10898
10991
  params,
10899
10992
  type
@@ -10902,7 +10995,7 @@ var doWrap = ({
10902
10995
  }
10903
10996
  return {
10904
10997
  specificationVersion: "v3",
10905
- provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
10998
+ provider: (_a18 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a18 : model.provider,
10906
10999
  modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
10907
11000
  supportedUrls: (_c = overrideSupportedUrls == null ? void 0 : overrideSupportedUrls({ model })) != null ? _c : model.supportedUrls,
10908
11001
  async doGenerate(params) {
@@ -10949,7 +11042,7 @@ var doWrap2 = ({
10949
11042
  modelId,
10950
11043
  providerId
10951
11044
  }) => {
10952
- var _a17, _b, _c, _d;
11045
+ var _a18, _b, _c, _d;
10953
11046
  async function doTransform({
10954
11047
  params
10955
11048
  }) {
@@ -10957,7 +11050,7 @@ var doWrap2 = ({
10957
11050
  }
10958
11051
  return {
10959
11052
  specificationVersion: "v3",
10960
- provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
11053
+ provider: (_a18 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a18 : model.provider,
10961
11054
  modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
10962
11055
  maxEmbeddingsPerCall: (_c = overrideMaxEmbeddingsPerCall == null ? void 0 : overrideMaxEmbeddingsPerCall({ model })) != null ? _c : model.maxEmbeddingsPerCall,
10963
11056
  supportsParallelCalls: (_d = overrideSupportsParallelCalls == null ? void 0 : overrideSupportsParallelCalls({ model })) != null ? _d : model.supportsParallelCalls,
@@ -10996,11 +11089,11 @@ var doWrap3 = ({
10996
11089
  modelId,
10997
11090
  providerId
10998
11091
  }) => {
10999
- var _a17, _b, _c;
11092
+ var _a18, _b, _c;
11000
11093
  async function doTransform({ params }) {
11001
11094
  return transformParams ? await transformParams({ params, model }) : params;
11002
11095
  }
11003
- const maxImagesPerCallRaw = (_a17 = overrideMaxImagesPerCall == null ? void 0 : overrideMaxImagesPerCall({ model })) != null ? _a17 : model.maxImagesPerCall;
11096
+ const maxImagesPerCallRaw = (_a18 = overrideMaxImagesPerCall == null ? void 0 : overrideMaxImagesPerCall({ model })) != null ? _a18 : model.maxImagesPerCall;
11004
11097
  const maxImagesPerCall = maxImagesPerCallRaw instanceof Function ? maxImagesPerCallRaw.bind(model) : maxImagesPerCallRaw;
11005
11098
  return {
11006
11099
  specificationVersion: "v3",
@@ -11139,11 +11232,11 @@ function customProvider({
11139
11232
  var experimental_customProvider = customProvider;
11140
11233
 
11141
11234
  // src/registry/no-such-provider-error.ts
11142
- import { AISDKError as AISDKError21, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
11143
- var name16 = "AI_NoSuchProviderError";
11144
- var marker16 = `vercel.ai.error.${name16}`;
11145
- var symbol16 = Symbol.for(marker16);
11146
- var _a16;
11235
+ import { AISDKError as AISDKError22, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
11236
+ var name17 = "AI_NoSuchProviderError";
11237
+ var marker17 = `vercel.ai.error.${name17}`;
11238
+ var symbol17 = Symbol.for(marker17);
11239
+ var _a17;
11147
11240
  var NoSuchProviderError = class extends NoSuchModelError3 {
11148
11241
  constructor({
11149
11242
  modelId,
@@ -11152,16 +11245,16 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
11152
11245
  availableProviders,
11153
11246
  message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
11154
11247
  }) {
11155
- super({ errorName: name16, modelId, modelType, message });
11156
- this[_a16] = true;
11248
+ super({ errorName: name17, modelId, modelType, message });
11249
+ this[_a17] = true;
11157
11250
  this.providerId = providerId;
11158
11251
  this.availableProviders = availableProviders;
11159
11252
  }
11160
11253
  static isInstance(error) {
11161
- return AISDKError21.hasMarker(error, marker16);
11254
+ return AISDKError22.hasMarker(error, marker17);
11162
11255
  }
11163
11256
  };
11164
- _a16 = symbol16;
11257
+ _a17 = symbol17;
11165
11258
 
11166
11259
  // src/registry/provider-registry.ts
11167
11260
  import {
@@ -11224,10 +11317,10 @@ var DefaultProviderRegistry = class {
11224
11317
  return [id.slice(0, index), id.slice(index + this.separator.length)];
11225
11318
  }
11226
11319
  languageModel(id) {
11227
- var _a17, _b;
11320
+ var _a18, _b;
11228
11321
  const [providerId, modelId] = this.splitId(id, "languageModel");
11229
- let model = (_b = (_a17 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
11230
- _a17,
11322
+ let model = (_b = (_a18 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
11323
+ _a18,
11231
11324
  modelId
11232
11325
  );
11233
11326
  if (model == null) {
@@ -11242,10 +11335,10 @@ var DefaultProviderRegistry = class {
11242
11335
  return model;
11243
11336
  }
11244
11337
  embeddingModel(id) {
11245
- var _a17;
11338
+ var _a18;
11246
11339
  const [providerId, modelId] = this.splitId(id, "embeddingModel");
11247
11340
  const provider = this.getProvider(providerId, "embeddingModel");
11248
- const model = (_a17 = provider.embeddingModel) == null ? void 0 : _a17.call(provider, modelId);
11341
+ const model = (_a18 = provider.embeddingModel) == null ? void 0 : _a18.call(provider, modelId);
11249
11342
  if (model == null) {
11250
11343
  throw new NoSuchModelError4({
11251
11344
  modelId: id,
@@ -11255,10 +11348,10 @@ var DefaultProviderRegistry = class {
11255
11348
  return model;
11256
11349
  }
11257
11350
  imageModel(id) {
11258
- var _a17;
11351
+ var _a18;
11259
11352
  const [providerId, modelId] = this.splitId(id, "imageModel");
11260
11353
  const provider = this.getProvider(providerId, "imageModel");
11261
- let model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
11354
+ let model = (_a18 = provider.imageModel) == null ? void 0 : _a18.call(provider, modelId);
11262
11355
  if (model == null) {
11263
11356
  throw new NoSuchModelError4({ modelId: id, modelType: "imageModel" });
11264
11357
  }
@@ -11271,10 +11364,10 @@ var DefaultProviderRegistry = class {
11271
11364
  return model;
11272
11365
  }
11273
11366
  transcriptionModel(id) {
11274
- var _a17;
11367
+ var _a18;
11275
11368
  const [providerId, modelId] = this.splitId(id, "transcriptionModel");
11276
11369
  const provider = this.getProvider(providerId, "transcriptionModel");
11277
- const model = (_a17 = provider.transcriptionModel) == null ? void 0 : _a17.call(provider, modelId);
11370
+ const model = (_a18 = provider.transcriptionModel) == null ? void 0 : _a18.call(provider, modelId);
11278
11371
  if (model == null) {
11279
11372
  throw new NoSuchModelError4({
11280
11373
  modelId: id,
@@ -11284,20 +11377,20 @@ var DefaultProviderRegistry = class {
11284
11377
  return model;
11285
11378
  }
11286
11379
  speechModel(id) {
11287
- var _a17;
11380
+ var _a18;
11288
11381
  const [providerId, modelId] = this.splitId(id, "speechModel");
11289
11382
  const provider = this.getProvider(providerId, "speechModel");
11290
- const model = (_a17 = provider.speechModel) == null ? void 0 : _a17.call(provider, modelId);
11383
+ const model = (_a18 = provider.speechModel) == null ? void 0 : _a18.call(provider, modelId);
11291
11384
  if (model == null) {
11292
11385
  throw new NoSuchModelError4({ modelId: id, modelType: "speechModel" });
11293
11386
  }
11294
11387
  return model;
11295
11388
  }
11296
11389
  rerankingModel(id) {
11297
- var _a17;
11390
+ var _a18;
11298
11391
  const [providerId, modelId] = this.splitId(id, "rerankingModel");
11299
11392
  const provider = this.getProvider(providerId, "rerankingModel");
11300
- const model = (_a17 = provider.rerankingModel) == null ? void 0 : _a17.call(provider, modelId);
11393
+ const model = (_a18 = provider.rerankingModel) == null ? void 0 : _a18.call(provider, modelId);
11301
11394
  if (model == null) {
11302
11395
  throw new NoSuchModelError4({ modelId: id, modelType: "rerankingModel" });
11303
11396
  }
@@ -11354,7 +11447,7 @@ async function rerank({
11354
11447
  }),
11355
11448
  tracer,
11356
11449
  fn: async () => {
11357
- var _a17, _b;
11450
+ var _a18, _b;
11358
11451
  const { ranking, response, providerMetadata, warnings } = await retry(
11359
11452
  () => recordSpan({
11360
11453
  name: "ai.rerank.doRerank",
@@ -11418,7 +11511,7 @@ async function rerank({
11418
11511
  providerMetadata,
11419
11512
  response: {
11420
11513
  id: response == null ? void 0 : response.id,
11421
- timestamp: (_a17 = response == null ? void 0 : response.timestamp) != null ? _a17 : /* @__PURE__ */ new Date(),
11514
+ timestamp: (_a18 = response == null ? void 0 : response.timestamp) != null ? _a18 : /* @__PURE__ */ new Date(),
11422
11515
  modelId: (_b = response == null ? void 0 : response.modelId) != null ? _b : model.modelId,
11423
11516
  headers: response == null ? void 0 : response.headers,
11424
11517
  body: response == null ? void 0 : response.body
@@ -11443,8 +11536,8 @@ var DefaultRerankResult = class {
11443
11536
  import { withUserAgentSuffix as withUserAgentSuffix8 } from "@ai-sdk/provider-utils";
11444
11537
 
11445
11538
  // src/error/no-transcript-generated-error.ts
11446
- import { AISDKError as AISDKError22 } from "@ai-sdk/provider";
11447
- var NoTranscriptGeneratedError = class extends AISDKError22 {
11539
+ import { AISDKError as AISDKError23 } from "@ai-sdk/provider";
11540
+ var NoTranscriptGeneratedError = class extends AISDKError23 {
11448
11541
  constructor(options) {
11449
11542
  super({
11450
11543
  name: "AI_NoTranscriptGeneratedError",
@@ -11478,16 +11571,16 @@ async function transcribe({
11478
11571
  const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
11479
11572
  const result = await retry(
11480
11573
  () => {
11481
- var _a17;
11574
+ var _a18;
11482
11575
  return resolvedModel.doGenerate({
11483
11576
  audio: audioData,
11484
11577
  abortSignal,
11485
11578
  headers: headersWithUserAgent,
11486
11579
  providerOptions,
11487
- mediaType: (_a17 = detectMediaType({
11580
+ mediaType: (_a18 = detectMediaType({
11488
11581
  data: audioData,
11489
11582
  signatures: audioMediaTypeSignatures
11490
- })) != null ? _a17 : "audio/wav"
11583
+ })) != null ? _a18 : "audio/wav"
11491
11584
  });
11492
11585
  }
11493
11586
  );
@@ -11511,14 +11604,14 @@ async function transcribe({
11511
11604
  }
11512
11605
  var DefaultTranscriptionResult = class {
11513
11606
  constructor(options) {
11514
- var _a17;
11607
+ var _a18;
11515
11608
  this.text = options.text;
11516
11609
  this.segments = options.segments;
11517
11610
  this.language = options.language;
11518
11611
  this.durationInSeconds = options.durationInSeconds;
11519
11612
  this.warnings = options.warnings;
11520
11613
  this.responses = options.responses;
11521
- this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
11614
+ this.providerMetadata = (_a18 = options.providerMetadata) != null ? _a18 : {};
11522
11615
  }
11523
11616
  };
11524
11617
 
@@ -11561,7 +11654,7 @@ async function callCompletionApi({
11561
11654
  onError,
11562
11655
  fetch: fetch2 = getOriginalFetch()
11563
11656
  }) {
11564
- var _a17;
11657
+ var _a18;
11565
11658
  try {
11566
11659
  setLoading(true);
11567
11660
  setError(void 0);
@@ -11589,7 +11682,7 @@ async function callCompletionApi({
11589
11682
  });
11590
11683
  if (!response.ok) {
11591
11684
  throw new Error(
11592
- (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
11685
+ (_a18 = await response.text()) != null ? _a18 : "Failed to fetch the chat response."
11593
11686
  );
11594
11687
  }
11595
11688
  if (!response.body) {
@@ -11675,12 +11768,12 @@ async function convertFileListToFileUIParts(files) {
11675
11768
  }
11676
11769
  return Promise.all(
11677
11770
  Array.from(files).map(async (file) => {
11678
- const { name: name17, type } = file;
11771
+ const { name: name18, type } = file;
11679
11772
  const dataUrl = await new Promise((resolve3, reject) => {
11680
11773
  const reader = new FileReader();
11681
11774
  reader.onload = (readerEvent) => {
11682
- var _a17;
11683
- resolve3((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
11775
+ var _a18;
11776
+ resolve3((_a18 = readerEvent.target) == null ? void 0 : _a18.result);
11684
11777
  };
11685
11778
  reader.onerror = (error) => reject(error);
11686
11779
  reader.readAsDataURL(file);
@@ -11688,7 +11781,7 @@ async function convertFileListToFileUIParts(files) {
11688
11781
  return {
11689
11782
  type: "file",
11690
11783
  mediaType: type,
11691
- filename: name17,
11784
+ filename: name18,
11692
11785
  url: dataUrl
11693
11786
  };
11694
11787
  })
@@ -11727,7 +11820,7 @@ var HttpChatTransport = class {
11727
11820
  abortSignal,
11728
11821
  ...options
11729
11822
  }) {
11730
- var _a17, _b, _c, _d, _e;
11823
+ var _a18, _b, _c, _d, _e;
11731
11824
  const resolvedBody = await resolve2(this.body);
11732
11825
  const resolvedHeaders = await resolve2(this.headers);
11733
11826
  const resolvedCredentials = await resolve2(this.credentials);
@@ -11735,7 +11828,7 @@ var HttpChatTransport = class {
11735
11828
  ...normalizeHeaders(resolvedHeaders),
11736
11829
  ...normalizeHeaders(options.headers)
11737
11830
  };
11738
- const preparedRequest = await ((_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
11831
+ const preparedRequest = await ((_a18 = this.prepareSendMessagesRequest) == null ? void 0 : _a18.call(this, {
11739
11832
  api: this.api,
11740
11833
  id: options.chatId,
11741
11834
  messages: options.messages,
@@ -11783,7 +11876,7 @@ var HttpChatTransport = class {
11783
11876
  return this.processResponseStream(response.body);
11784
11877
  }
11785
11878
  async reconnectToStream(options) {
11786
- var _a17, _b, _c, _d, _e;
11879
+ var _a18, _b, _c, _d, _e;
11787
11880
  const resolvedBody = await resolve2(this.body);
11788
11881
  const resolvedHeaders = await resolve2(this.headers);
11789
11882
  const resolvedCredentials = await resolve2(this.credentials);
@@ -11791,7 +11884,7 @@ var HttpChatTransport = class {
11791
11884
  ...normalizeHeaders(resolvedHeaders),
11792
11885
  ...normalizeHeaders(options.headers)
11793
11886
  };
11794
- const preparedRequest = await ((_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
11887
+ const preparedRequest = await ((_a18 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a18.call(this, {
11795
11888
  api: this.api,
11796
11889
  id: options.chatId,
11797
11890
  body: { ...resolvedBody, ...options.body },
@@ -11873,11 +11966,11 @@ var AbstractChat = class {
11873
11966
  * If a messageId is provided, the message will be replaced.
11874
11967
  */
11875
11968
  this.sendMessage = async (message, options) => {
11876
- var _a17, _b, _c, _d;
11969
+ var _a18, _b, _c, _d;
11877
11970
  if (message == null) {
11878
11971
  await this.makeRequest({
11879
11972
  trigger: "submit-message",
11880
- messageId: (_a17 = this.lastMessage) == null ? void 0 : _a17.id,
11973
+ messageId: (_a18 = this.lastMessage) == null ? void 0 : _a18.id,
11881
11974
  ...options
11882
11975
  });
11883
11976
  return;
@@ -11970,7 +12063,7 @@ var AbstractChat = class {
11970
12063
  approved,
11971
12064
  reason
11972
12065
  }) => this.jobExecutor.run(async () => {
11973
- var _a17, _b;
12066
+ var _a18, _b;
11974
12067
  const messages = this.state.messages;
11975
12068
  const lastMessage = messages[messages.length - 1];
11976
12069
  const updatePart = (part) => isToolUIPart(part) && part.state === "approval-requested" && part.approval.id === id ? {
@@ -11985,7 +12078,7 @@ var AbstractChat = class {
11985
12078
  if (this.activeResponse) {
11986
12079
  this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
11987
12080
  }
11988
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
12081
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a18 = this.sendAutomaticallyWhen) == null ? void 0 : _a18.call(this, { messages: this.state.messages }))) {
11989
12082
  this.makeRequest({
11990
12083
  trigger: "submit-message",
11991
12084
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -11999,7 +12092,7 @@ var AbstractChat = class {
11999
12092
  output,
12000
12093
  errorText
12001
12094
  }) => this.jobExecutor.run(async () => {
12002
- var _a17, _b;
12095
+ var _a18, _b;
12003
12096
  const messages = this.state.messages;
12004
12097
  const lastMessage = messages[messages.length - 1];
12005
12098
  const updatePart = (part) => isToolUIPart(part) && part.toolCallId === toolCallId ? { ...part, state, output, errorText } : part;
@@ -12010,7 +12103,7 @@ var AbstractChat = class {
12010
12103
  if (this.activeResponse) {
12011
12104
  this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
12012
12105
  }
12013
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
12106
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a18 = this.sendAutomaticallyWhen) == null ? void 0 : _a18.call(this, { messages: this.state.messages }))) {
12014
12107
  this.makeRequest({
12015
12108
  trigger: "submit-message",
12016
12109
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -12023,10 +12116,10 @@ var AbstractChat = class {
12023
12116
  * Abort the current request immediately, keep the generated tokens if any.
12024
12117
  */
12025
12118
  this.stop = async () => {
12026
- var _a17;
12119
+ var _a18;
12027
12120
  if (this.status !== "streaming" && this.status !== "submitted")
12028
12121
  return;
12029
- if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
12122
+ if ((_a18 = this.activeResponse) == null ? void 0 : _a18.abortController) {
12030
12123
  this.activeResponse.abortController.abort();
12031
12124
  }
12032
12125
  };
@@ -12081,7 +12174,7 @@ var AbstractChat = class {
12081
12174
  body,
12082
12175
  messageId
12083
12176
  }) {
12084
- var _a17, _b, _c, _d;
12177
+ var _a18, _b, _c, _d;
12085
12178
  this.setStatus({ status: "submitted", error: void 0 });
12086
12179
  const lastMessage = this.lastMessage;
12087
12180
  let isAbort = false;
@@ -12130,9 +12223,9 @@ var AbstractChat = class {
12130
12223
  () => job({
12131
12224
  state: activeResponse.state,
12132
12225
  write: () => {
12133
- var _a18;
12226
+ var _a19;
12134
12227
  this.setStatus({ status: "streaming" });
12135
- const replaceLastMessage = activeResponse.state.message.id === ((_a18 = this.lastMessage) == null ? void 0 : _a18.id);
12228
+ const replaceLastMessage = activeResponse.state.message.id === ((_a19 = this.lastMessage) == null ? void 0 : _a19.id);
12136
12229
  if (replaceLastMessage) {
12137
12230
  this.state.replaceMessage(
12138
12231
  this.state.messages.length - 1,
@@ -12184,7 +12277,7 @@ var AbstractChat = class {
12184
12277
  isAbort,
12185
12278
  isDisconnect,
12186
12279
  isError,
12187
- finishReason: (_a17 = this.activeResponse) == null ? void 0 : _a17.state.finishReason
12280
+ finishReason: (_a18 = this.activeResponse) == null ? void 0 : _a18.state.finishReason
12188
12281
  });
12189
12282
  } catch (err) {
12190
12283
  console.error(err);
@@ -12322,7 +12415,7 @@ var TextStreamChatTransport = class extends HttpChatTransport {
12322
12415
  }
12323
12416
  };
12324
12417
  export {
12325
- AISDKError18 as AISDKError,
12418
+ AISDKError19 as AISDKError,
12326
12419
  APICallError,
12327
12420
  AbstractChat,
12328
12421
  DefaultChatTransport,
@@ -12344,6 +12437,7 @@ export {
12344
12437
  LoadAPIKeyError,
12345
12438
  LoadSettingError,
12346
12439
  MessageConversionError,
12440
+ MissingToolResultsError,
12347
12441
  NoContentGeneratedError,
12348
12442
  NoImageGeneratedError,
12349
12443
  NoObjectGeneratedError,