ai 5.0.9 → 5.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  var __defProp = Object.defineProperty;
2
2
  var __export = (target, all) => {
3
- for (var name16 in all)
4
- __defProp(target, name16, { get: all[name16], enumerable: true });
3
+ for (var name17 in all)
4
+ __defProp(target, name17, { get: all[name17], enumerable: true });
5
5
  };
6
6
 
7
7
  // src/index.ts
@@ -18,6 +18,7 @@ import {
18
18
  // src/generate-text/generate-text.ts
19
19
  import {
20
20
  createIdGenerator,
21
+ executeTool,
21
22
  getErrorMessage as getErrorMessage5
22
23
  } from "@ai-sdk/provider-utils";
23
24
 
@@ -44,7 +45,7 @@ import { gateway } from "@ai-sdk/gateway";
44
45
 
45
46
  // src/error/index.ts
46
47
  import {
47
- AISDKError as AISDKError16,
48
+ AISDKError as AISDKError17,
48
49
  APICallError,
49
50
  EmptyResponseBodyError,
50
51
  InvalidPromptError,
@@ -136,11 +137,11 @@ var symbol5 = Symbol.for(marker5);
136
137
  var _a5;
137
138
  var MCPClientError = class extends AISDKError5 {
138
139
  constructor({
139
- name: name16 = "MCPClientError",
140
+ name: name17 = "MCPClientError",
140
141
  message,
141
142
  cause
142
143
  }) {
143
- super({ name: name16, message, cause });
144
+ super({ name: name17, message, cause });
144
145
  this[_a5] = true;
145
146
  }
146
147
  static isInstance(error) {
@@ -199,54 +200,75 @@ var NoObjectGeneratedError = class extends AISDKError7 {
199
200
  };
200
201
  _a7 = symbol7;
201
202
 
202
- // src/error/no-such-tool-error.ts
203
+ // src/error/no-output-generated-error.ts
203
204
  import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
204
- var name8 = "AI_NoSuchToolError";
205
+ var name8 = "AI_NoOutputGeneratedError";
205
206
  var marker8 = `vercel.ai.error.${name8}`;
206
207
  var symbol8 = Symbol.for(marker8);
207
208
  var _a8;
208
- var NoSuchToolError = class extends AISDKError8 {
209
+ var NoOutputGeneratedError = class extends AISDKError8 {
210
+ // used in isInstance
211
+ constructor({
212
+ message = "No output generated.",
213
+ cause
214
+ } = {}) {
215
+ super({ name: name8, message, cause });
216
+ this[_a8] = true;
217
+ }
218
+ static isInstance(error) {
219
+ return AISDKError8.hasMarker(error, marker8);
220
+ }
221
+ };
222
+ _a8 = symbol8;
223
+
224
+ // src/error/no-such-tool-error.ts
225
+ import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
226
+ var name9 = "AI_NoSuchToolError";
227
+ var marker9 = `vercel.ai.error.${name9}`;
228
+ var symbol9 = Symbol.for(marker9);
229
+ var _a9;
230
+ var NoSuchToolError = class extends AISDKError9 {
209
231
  constructor({
210
232
  toolName,
211
233
  availableTools = void 0,
212
234
  message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
213
235
  }) {
214
- super({ name: name8, message });
215
- this[_a8] = true;
236
+ super({ name: name9, message });
237
+ this[_a9] = true;
216
238
  this.toolName = toolName;
217
239
  this.availableTools = availableTools;
218
240
  }
219
241
  static isInstance(error) {
220
- return AISDKError8.hasMarker(error, marker8);
242
+ return AISDKError9.hasMarker(error, marker9);
221
243
  }
222
244
  };
223
- _a8 = symbol8;
245
+ _a9 = symbol9;
224
246
 
225
247
  // src/error/tool-call-repair-error.ts
226
- import { AISDKError as AISDKError9, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
227
- var name9 = "AI_ToolCallRepairError";
228
- var marker9 = `vercel.ai.error.${name9}`;
229
- var symbol9 = Symbol.for(marker9);
230
- var _a9;
231
- var ToolCallRepairError = class extends AISDKError9 {
248
+ import { AISDKError as AISDKError10, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
249
+ var name10 = "AI_ToolCallRepairError";
250
+ var marker10 = `vercel.ai.error.${name10}`;
251
+ var symbol10 = Symbol.for(marker10);
252
+ var _a10;
253
+ var ToolCallRepairError = class extends AISDKError10 {
232
254
  constructor({
233
255
  cause,
234
256
  originalError,
235
257
  message = `Error repairing tool call: ${getErrorMessage2(cause)}`
236
258
  }) {
237
- super({ name: name9, message, cause });
238
- this[_a9] = true;
259
+ super({ name: name10, message, cause });
260
+ this[_a10] = true;
239
261
  this.originalError = originalError;
240
262
  }
241
263
  static isInstance(error) {
242
- return AISDKError9.hasMarker(error, marker9);
264
+ return AISDKError10.hasMarker(error, marker10);
243
265
  }
244
266
  };
245
- _a9 = symbol9;
267
+ _a10 = symbol10;
246
268
 
247
269
  // src/error/unsupported-model-version-error.ts
248
- import { AISDKError as AISDKError10 } from "@ai-sdk/provider";
249
- var UnsupportedModelVersionError = class extends AISDKError10 {
270
+ import { AISDKError as AISDKError11 } from "@ai-sdk/provider";
271
+ var UnsupportedModelVersionError = class extends AISDKError11 {
250
272
  constructor(options) {
251
273
  super({
252
274
  name: "AI_UnsupportedModelVersionError",
@@ -259,76 +281,76 @@ var UnsupportedModelVersionError = class extends AISDKError10 {
259
281
  };
260
282
 
261
283
  // src/prompt/invalid-data-content-error.ts
262
- import { AISDKError as AISDKError11 } from "@ai-sdk/provider";
263
- var name10 = "AI_InvalidDataContentError";
264
- var marker10 = `vercel.ai.error.${name10}`;
265
- var symbol10 = Symbol.for(marker10);
266
- var _a10;
267
- var InvalidDataContentError = class extends AISDKError11 {
284
+ import { AISDKError as AISDKError12 } from "@ai-sdk/provider";
285
+ var name11 = "AI_InvalidDataContentError";
286
+ var marker11 = `vercel.ai.error.${name11}`;
287
+ var symbol11 = Symbol.for(marker11);
288
+ var _a11;
289
+ var InvalidDataContentError = class extends AISDKError12 {
268
290
  constructor({
269
291
  content,
270
292
  cause,
271
293
  message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
272
294
  }) {
273
- super({ name: name10, message, cause });
274
- this[_a10] = true;
295
+ super({ name: name11, message, cause });
296
+ this[_a11] = true;
275
297
  this.content = content;
276
298
  }
277
299
  static isInstance(error) {
278
- return AISDKError11.hasMarker(error, marker10);
300
+ return AISDKError12.hasMarker(error, marker11);
279
301
  }
280
302
  };
281
- _a10 = symbol10;
303
+ _a11 = symbol11;
282
304
 
283
305
  // src/prompt/invalid-message-role-error.ts
284
- import { AISDKError as AISDKError12 } from "@ai-sdk/provider";
285
- var name11 = "AI_InvalidMessageRoleError";
286
- var marker11 = `vercel.ai.error.${name11}`;
287
- var symbol11 = Symbol.for(marker11);
288
- var _a11;
289
- var InvalidMessageRoleError = class extends AISDKError12 {
306
+ import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
307
+ var name12 = "AI_InvalidMessageRoleError";
308
+ var marker12 = `vercel.ai.error.${name12}`;
309
+ var symbol12 = Symbol.for(marker12);
310
+ var _a12;
311
+ var InvalidMessageRoleError = class extends AISDKError13 {
290
312
  constructor({
291
313
  role,
292
314
  message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
293
315
  }) {
294
- super({ name: name11, message });
295
- this[_a11] = true;
316
+ super({ name: name12, message });
317
+ this[_a12] = true;
296
318
  this.role = role;
297
319
  }
298
320
  static isInstance(error) {
299
- return AISDKError12.hasMarker(error, marker11);
321
+ return AISDKError13.hasMarker(error, marker12);
300
322
  }
301
323
  };
302
- _a11 = symbol11;
324
+ _a12 = symbol12;
303
325
 
304
326
  // src/prompt/message-conversion-error.ts
305
- import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
306
- var name12 = "AI_MessageConversionError";
307
- var marker12 = `vercel.ai.error.${name12}`;
308
- var symbol12 = Symbol.for(marker12);
309
- var _a12;
310
- var MessageConversionError = class extends AISDKError13 {
327
+ import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
328
+ var name13 = "AI_MessageConversionError";
329
+ var marker13 = `vercel.ai.error.${name13}`;
330
+ var symbol13 = Symbol.for(marker13);
331
+ var _a13;
332
+ var MessageConversionError = class extends AISDKError14 {
311
333
  constructor({
312
334
  originalMessage,
313
335
  message
314
336
  }) {
315
- super({ name: name12, message });
316
- this[_a12] = true;
337
+ super({ name: name13, message });
338
+ this[_a13] = true;
317
339
  this.originalMessage = originalMessage;
318
340
  }
319
341
  static isInstance(error) {
320
- return AISDKError13.hasMarker(error, marker12);
342
+ return AISDKError14.hasMarker(error, marker13);
321
343
  }
322
344
  };
323
- _a12 = symbol12;
345
+ _a13 = symbol13;
324
346
 
325
347
  // src/util/download-error.ts
326
- import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
327
- var name13 = "AI_DownloadError";
328
- var marker13 = `vercel.ai.error.${name13}`;
329
- var symbol13 = Symbol.for(marker13);
330
- var _a13;
331
- var DownloadError = class extends AISDKError14 {
348
+ import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
349
+ var name14 = "AI_DownloadError";
350
+ var marker14 = `vercel.ai.error.${name14}`;
351
+ var symbol14 = Symbol.for(marker14);
352
+ var _a14;
353
+ var DownloadError = class extends AISDKError15 {
332
354
  constructor({
333
355
  url,
334
356
  statusCode,
@@ -336,41 +358,41 @@ var DownloadError = class extends AISDKError14 {
336
358
  cause,
337
359
  message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
338
360
  }) {
339
- super({ name: name13, message, cause });
340
- this[_a13] = true;
361
+ super({ name: name14, message, cause });
362
+ this[_a14] = true;
341
363
  this.url = url;
342
364
  this.statusCode = statusCode;
343
365
  this.statusText = statusText;
344
366
  }
345
367
  static isInstance(error) {
346
- return AISDKError14.hasMarker(error, marker13);
368
+ return AISDKError15.hasMarker(error, marker14);
347
369
  }
348
370
  };
349
- _a13 = symbol13;
371
+ _a14 = symbol14;
350
372
 
351
373
  // src/util/retry-error.ts
352
- import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
353
- var name14 = "AI_RetryError";
354
- var marker14 = `vercel.ai.error.${name14}`;
355
- var symbol14 = Symbol.for(marker14);
356
- var _a14;
357
- var RetryError = class extends AISDKError15 {
374
+ import { AISDKError as AISDKError16 } from "@ai-sdk/provider";
375
+ var name15 = "AI_RetryError";
376
+ var marker15 = `vercel.ai.error.${name15}`;
377
+ var symbol15 = Symbol.for(marker15);
378
+ var _a15;
379
+ var RetryError = class extends AISDKError16 {
358
380
  constructor({
359
381
  message,
360
382
  reason,
361
383
  errors
362
384
  }) {
363
- super({ name: name14, message });
364
- this[_a14] = true;
385
+ super({ name: name15, message });
386
+ this[_a15] = true;
365
387
  this.reason = reason;
366
388
  this.errors = errors;
367
389
  this.lastError = errors[errors.length - 1];
368
390
  }
369
391
  static isInstance(error) {
370
- return AISDKError15.hasMarker(error, marker14);
392
+ return AISDKError16.hasMarker(error, marker15);
371
393
  }
372
394
  };
373
- _a14 = symbol14;
395
+ _a15 = symbol15;
374
396
 
375
397
  // src/model/resolve-model.ts
376
398
  function resolveLanguageModel(model) {
@@ -402,8 +424,8 @@ function resolveEmbeddingModel(model) {
402
424
  );
403
425
  }
404
426
  function getGlobalProvider() {
405
- var _a16;
406
- return (_a16 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a16 : gateway;
427
+ var _a17;
428
+ return (_a17 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a17 : gateway;
407
429
  }
408
430
 
409
431
  // src/prompt/convert-to-language-model-prompt.ts
@@ -551,7 +573,7 @@ function detectMediaType({
551
573
 
552
574
  // src/util/download.ts
553
575
  async function download({ url }) {
554
- var _a16;
576
+ var _a17;
555
577
  const urlText = url.toString();
556
578
  try {
557
579
  const response = await fetch(urlText);
@@ -564,7 +586,7 @@ async function download({ url }) {
564
586
  }
565
587
  return {
566
588
  data: new Uint8Array(await response.arrayBuffer()),
567
- mediaType: (_a16 = response.headers.get("content-type")) != null ? _a16 : void 0
589
+ mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
568
590
  };
569
591
  } catch (error) {
570
592
  if (DownloadError.isInstance(error)) {
@@ -575,7 +597,7 @@ async function download({ url }) {
575
597
  }
576
598
 
577
599
  // src/prompt/data-content.ts
578
- import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
600
+ import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
579
601
  import {
580
602
  convertBase64ToUint8Array as convertBase64ToUint8Array2,
581
603
  convertUint8ArrayToBase64
@@ -606,8 +628,8 @@ var dataContentSchema = z.union([
606
628
  z.custom(
607
629
  // Buffer might not be available in some environments such as CloudFlare:
608
630
  (value) => {
609
- var _a16, _b;
610
- return (_b = (_a16 = globalThis.Buffer) == null ? void 0 : _a16.isBuffer(value)) != null ? _b : false;
631
+ var _a17, _b;
632
+ return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
611
633
  },
612
634
  { message: "Must be a Buffer" }
613
635
  )
@@ -630,7 +652,7 @@ function convertToLanguageModelV2DataContent(content) {
630
652
  content.toString()
631
653
  );
632
654
  if (dataUrlMediaType == null || base64Content == null) {
633
- throw new AISDKError17({
655
+ throw new AISDKError18({
634
656
  name: "InvalidDataContentError",
635
657
  message: `Invalid data URL format in content ${content.toString()}`
636
658
  });
@@ -805,8 +827,8 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
805
827
  ).flat().filter(
806
828
  (part) => part.type === "image" || part.type === "file"
807
829
  ).map((part) => {
808
- var _a16;
809
- const mediaType = (_a16 = part.mediaType) != null ? _a16 : part.type === "image" ? "image/*" : void 0;
830
+ var _a17;
831
+ const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
810
832
  let data = part.type === "image" ? part.image : part.data;
811
833
  if (typeof data === "string") {
812
834
  try {
@@ -833,7 +855,7 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
833
855
  );
834
856
  }
835
857
  function convertPartToLanguageModelPart(part, downloadedAssets) {
836
- var _a16;
858
+ var _a17;
837
859
  if (part.type === "text") {
838
860
  return {
839
861
  type: "text",
@@ -866,7 +888,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
866
888
  switch (type) {
867
889
  case "image": {
868
890
  if (data instanceof Uint8Array || typeof data === "string") {
869
- mediaType = (_a16 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a16 : mediaType;
891
+ mediaType = (_a17 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a17 : mediaType;
870
892
  }
871
893
  return {
872
894
  type: "file",
@@ -1006,10 +1028,10 @@ function prepareToolsAndToolChoice({
1006
1028
  };
1007
1029
  }
1008
1030
  const filteredTools = activeTools != null ? Object.entries(tools).filter(
1009
- ([name16]) => activeTools.includes(name16)
1031
+ ([name17]) => activeTools.includes(name17)
1010
1032
  ) : Object.entries(tools);
1011
1033
  return {
1012
- tools: filteredTools.map(([name16, tool3]) => {
1034
+ tools: filteredTools.map(([name17, tool3]) => {
1013
1035
  const toolType = tool3.type;
1014
1036
  switch (toolType) {
1015
1037
  case void 0:
@@ -1017,7 +1039,7 @@ function prepareToolsAndToolChoice({
1017
1039
  case "function":
1018
1040
  return {
1019
1041
  type: "function",
1020
- name: name16,
1042
+ name: name17,
1021
1043
  description: tool3.description,
1022
1044
  inputSchema: asSchema(tool3.inputSchema).jsonSchema,
1023
1045
  providerOptions: tool3.providerOptions
@@ -1025,7 +1047,7 @@ function prepareToolsAndToolChoice({
1025
1047
  case "provider-defined":
1026
1048
  return {
1027
1049
  type: "provider-defined",
1028
- name: name16,
1050
+ name: name17,
1029
1051
  id: tool3.id,
1030
1052
  args: tool3.args
1031
1053
  };
@@ -1254,10 +1276,10 @@ import {
1254
1276
  GatewayAuthenticationError,
1255
1277
  GatewayModelNotFoundError
1256
1278
  } from "@ai-sdk/gateway";
1257
- import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
1279
+ import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
1258
1280
  function wrapGatewayError(error) {
1259
1281
  if (GatewayAuthenticationError.isInstance(error) || GatewayModelNotFoundError.isInstance(error)) {
1260
- return new AISDKError18({
1282
+ return new AISDKError19({
1261
1283
  name: "GatewayError",
1262
1284
  message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
1263
1285
  cause: error
@@ -1288,7 +1310,7 @@ function getBaseTelemetryAttributes({
1288
1310
  telemetry,
1289
1311
  headers
1290
1312
  }) {
1291
- var _a16;
1313
+ var _a17;
1292
1314
  return {
1293
1315
  "ai.model.provider": model.provider,
1294
1316
  "ai.model.id": model.modelId,
@@ -1298,7 +1320,7 @@ function getBaseTelemetryAttributes({
1298
1320
  return attributes;
1299
1321
  }, {}),
1300
1322
  // add metadata as attributes:
1301
- ...Object.entries((_a16 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a16 : {}).reduce(
1323
+ ...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(
1302
1324
  (attributes, [key, value]) => {
1303
1325
  attributes[`ai.telemetry.metadata.${key}`] = value;
1304
1326
  return attributes;
@@ -1323,7 +1345,7 @@ var noopTracer = {
1323
1345
  startSpan() {
1324
1346
  return noopSpan;
1325
1347
  },
1326
- startActiveSpan(name16, arg1, arg2, arg3) {
1348
+ startActiveSpan(name17, arg1, arg2, arg3) {
1327
1349
  if (typeof arg1 === "function") {
1328
1350
  return arg1(noopSpan);
1329
1351
  }
@@ -1393,13 +1415,13 @@ function getTracer({
1393
1415
  // src/telemetry/record-span.ts
1394
1416
  import { SpanStatusCode } from "@opentelemetry/api";
1395
1417
  function recordSpan({
1396
- name: name16,
1418
+ name: name17,
1397
1419
  tracer,
1398
1420
  attributes,
1399
1421
  fn,
1400
1422
  endWhenDone = true
1401
1423
  }) {
1402
- return tracer.startActiveSpan(name16, { attributes }, async (span) => {
1424
+ return tracer.startActiveSpan(name17, { attributes }, async (span) => {
1403
1425
  try {
1404
1426
  const result = await fn(span);
1405
1427
  if (endWhenDone) {
@@ -1845,8 +1867,8 @@ function stepCountIs(stepCount) {
1845
1867
  }
1846
1868
  function hasToolCall(toolName) {
1847
1869
  return ({ steps }) => {
1848
- var _a16, _b, _c;
1849
- return (_c = (_b = (_a16 = steps[steps.length - 1]) == null ? void 0 : _a16.toolCalls) == null ? void 0 : _b.some(
1870
+ var _a17, _b, _c;
1871
+ return (_c = (_b = (_a17 = steps[steps.length - 1]) == null ? void 0 : _a17.toolCalls) == null ? void 0 : _b.some(
1850
1872
  (toolCall) => toolCall.toolName === toolName
1851
1873
  )) != null ? _c : false;
1852
1874
  };
@@ -2044,7 +2066,7 @@ async function generateText({
2044
2066
  }),
2045
2067
  tracer,
2046
2068
  fn: async (span) => {
2047
- var _a16, _b, _c, _d, _e, _f;
2069
+ var _a17, _b, _c, _d, _e, _f;
2048
2070
  const callSettings2 = prepareCallSettings(settings);
2049
2071
  let currentModelResponse;
2050
2072
  let clientToolCalls = [];
@@ -2064,7 +2086,7 @@ async function generateText({
2064
2086
  }));
2065
2087
  const promptMessages = await convertToLanguageModelPrompt({
2066
2088
  prompt: {
2067
- system: (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a16 : initialPrompt.system,
2089
+ system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
2068
2090
  messages: (_b = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _b : stepInputMessages
2069
2091
  },
2070
2092
  supportedUrls: await model.supportedUrls
@@ -2079,7 +2101,7 @@ async function generateText({
2079
2101
  });
2080
2102
  currentModelResponse = await retry(
2081
2103
  () => {
2082
- var _a17;
2104
+ var _a18;
2083
2105
  return recordSpan({
2084
2106
  name: "ai.generateText.doGenerate",
2085
2107
  attributes: selectTelemetryAttributes({
@@ -2111,14 +2133,14 @@ async function generateText({
2111
2133
  "gen_ai.request.max_tokens": settings.maxOutputTokens,
2112
2134
  "gen_ai.request.presence_penalty": settings.presencePenalty,
2113
2135
  "gen_ai.request.stop_sequences": settings.stopSequences,
2114
- "gen_ai.request.temperature": (_a17 = settings.temperature) != null ? _a17 : void 0,
2136
+ "gen_ai.request.temperature": (_a18 = settings.temperature) != null ? _a18 : void 0,
2115
2137
  "gen_ai.request.top_k": settings.topK,
2116
2138
  "gen_ai.request.top_p": settings.topP
2117
2139
  }
2118
2140
  }),
2119
2141
  tracer,
2120
2142
  fn: async (span2) => {
2121
- var _a18, _b2, _c2, _d2, _e2, _f2, _g, _h;
2143
+ var _a19, _b2, _c2, _d2, _e2, _f2, _g, _h;
2122
2144
  const result = await stepModel.doGenerate({
2123
2145
  ...callSettings2,
2124
2146
  tools: stepTools,
@@ -2130,7 +2152,7 @@ async function generateText({
2130
2152
  headers
2131
2153
  });
2132
2154
  const responseData = {
2133
- id: (_b2 = (_a18 = result.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId3(),
2155
+ id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
2134
2156
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
2135
2157
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
2136
2158
  headers: (_g = result.response) == null ? void 0 : _g.headers,
@@ -2340,12 +2362,22 @@ async function executeTools({
2340
2362
  tracer,
2341
2363
  fn: async (span) => {
2342
2364
  try {
2343
- const output = await tool3.execute(input, {
2344
- toolCallId,
2345
- messages,
2346
- abortSignal,
2347
- experimental_context
2365
+ const stream = executeTool({
2366
+ execute: tool3.execute.bind(tool3),
2367
+ input,
2368
+ options: {
2369
+ toolCallId,
2370
+ messages,
2371
+ abortSignal,
2372
+ experimental_context
2373
+ }
2348
2374
  });
2375
+ let output;
2376
+ for await (const part of stream) {
2377
+ if (part.type === "final") {
2378
+ output = part.output;
2379
+ }
2380
+ }
2349
2381
  try {
2350
2382
  span.setAttributes(
2351
2383
  selectTelemetryAttributes({
@@ -2746,7 +2778,8 @@ var uiMessageChunkSchema = z7.union([
2746
2778
  toolCallId: z7.string(),
2747
2779
  output: z7.unknown(),
2748
2780
  providerExecuted: z7.boolean().optional(),
2749
- dynamic: z7.boolean().optional()
2781
+ dynamic: z7.boolean().optional(),
2782
+ preliminary: z7.boolean().optional()
2750
2783
  }),
2751
2784
  z7.strictObject({
2752
2785
  type: z7.literal("tool-output-error"),
@@ -3241,7 +3274,7 @@ function processUIMessageStream({
3241
3274
  new TransformStream({
3242
3275
  async transform(chunk, controller) {
3243
3276
  await runUpdateMessageJob(async ({ state, write }) => {
3244
- var _a16, _b, _c, _d;
3277
+ var _a17, _b, _c, _d;
3245
3278
  function getToolInvocation(toolCallId) {
3246
3279
  const toolInvocations = state.message.parts.filter(isToolUIPart);
3247
3280
  const toolInvocation = toolInvocations.find(
@@ -3269,7 +3302,7 @@ function processUIMessageStream({
3269
3302
  return toolInvocation;
3270
3303
  }
3271
3304
  function updateToolPart(options) {
3272
- var _a17;
3305
+ var _a18;
3273
3306
  const part = state.message.parts.find(
3274
3307
  (part2) => isToolUIPart(part2) && part2.toolCallId === options.toolCallId
3275
3308
  );
@@ -3281,7 +3314,8 @@ function processUIMessageStream({
3281
3314
  anyPart.output = anyOptions.output;
3282
3315
  anyPart.errorText = anyOptions.errorText;
3283
3316
  anyPart.rawInput = anyOptions.rawInput;
3284
- anyPart.providerExecuted = (_a17 = anyOptions.providerExecuted) != null ? _a17 : part.providerExecuted;
3317
+ anyPart.preliminary = anyOptions.preliminary;
3318
+ anyPart.providerExecuted = (_a18 = anyOptions.providerExecuted) != null ? _a18 : part.providerExecuted;
3285
3319
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
3286
3320
  part.callProviderMetadata = anyOptions.providerMetadata;
3287
3321
  }
@@ -3295,12 +3329,13 @@ function processUIMessageStream({
3295
3329
  rawInput: anyOptions.rawInput,
3296
3330
  errorText: anyOptions.errorText,
3297
3331
  providerExecuted: anyOptions.providerExecuted,
3332
+ preliminary: anyOptions.preliminary,
3298
3333
  ...anyOptions.providerMetadata != null ? { callProviderMetadata: anyOptions.providerMetadata } : {}
3299
3334
  });
3300
3335
  }
3301
3336
  }
3302
3337
  function updateDynamicToolPart(options) {
3303
- var _a17;
3338
+ var _a18;
3304
3339
  const part = state.message.parts.find(
3305
3340
  (part2) => part2.type === "dynamic-tool" && part2.toolCallId === options.toolCallId
3306
3341
  );
@@ -3312,7 +3347,8 @@ function processUIMessageStream({
3312
3347
  anyPart.input = anyOptions.input;
3313
3348
  anyPart.output = anyOptions.output;
3314
3349
  anyPart.errorText = anyOptions.errorText;
3315
- anyPart.rawInput = (_a17 = anyOptions.rawInput) != null ? _a17 : anyPart.rawInput;
3350
+ anyPart.rawInput = (_a18 = anyOptions.rawInput) != null ? _a18 : anyPart.rawInput;
3351
+ anyPart.preliminary = anyOptions.preliminary;
3316
3352
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
3317
3353
  part.callProviderMetadata = anyOptions.providerMetadata;
3318
3354
  }
@@ -3325,6 +3361,7 @@ function processUIMessageStream({
3325
3361
  input: anyOptions.input,
3326
3362
  output: anyOptions.output,
3327
3363
  errorText: anyOptions.errorText,
3364
+ preliminary: anyOptions.preliminary,
3328
3365
  ...anyOptions.providerMetadata != null ? { callProviderMetadata: anyOptions.providerMetadata } : {}
3329
3366
  });
3330
3367
  }
@@ -3357,7 +3394,7 @@ function processUIMessageStream({
3357
3394
  case "text-delta": {
3358
3395
  const textPart = state.activeTextParts[chunk.id];
3359
3396
  textPart.text += chunk.delta;
3360
- textPart.providerMetadata = (_a16 = chunk.providerMetadata) != null ? _a16 : textPart.providerMetadata;
3397
+ textPart.providerMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textPart.providerMetadata;
3361
3398
  write();
3362
3399
  break;
3363
3400
  }
@@ -3541,7 +3578,8 @@ function processUIMessageStream({
3541
3578
  toolName: toolInvocation.toolName,
3542
3579
  state: "output-available",
3543
3580
  input: toolInvocation.input,
3544
- output: chunk.output
3581
+ output: chunk.output,
3582
+ preliminary: chunk.preliminary
3545
3583
  });
3546
3584
  } else {
3547
3585
  const toolInvocation = getToolInvocation(chunk.toolCallId);
@@ -3551,7 +3589,8 @@ function processUIMessageStream({
3551
3589
  state: "output-available",
3552
3590
  input: toolInvocation.input,
3553
3591
  output: chunk.output,
3554
- providerExecuted: chunk.providerExecuted
3592
+ providerExecuted: chunk.providerExecuted,
3593
+ preliminary: chunk.preliminary
3555
3594
  });
3556
3595
  }
3557
3596
  write();
@@ -3900,17 +3939,17 @@ var DelayedPromise = class {
3900
3939
  return this._promise;
3901
3940
  }
3902
3941
  resolve(value) {
3903
- var _a16;
3942
+ var _a17;
3904
3943
  this.status = { type: "resolved", value };
3905
3944
  if (this._promise) {
3906
- (_a16 = this._resolve) == null ? void 0 : _a16.call(this, value);
3945
+ (_a17 = this._resolve) == null ? void 0 : _a17.call(this, value);
3907
3946
  }
3908
3947
  }
3909
3948
  reject(error) {
3910
- var _a16;
3949
+ var _a17;
3911
3950
  this.status = { type: "rejected", error };
3912
3951
  if (this._promise) {
3913
- (_a16 = this._reject) == null ? void 0 : _a16.call(this, error);
3952
+ (_a17 = this._reject) == null ? void 0 : _a17.call(this, error);
3914
3953
  }
3915
3954
  }
3916
3955
  };
@@ -3941,12 +3980,13 @@ function filterStreamErrors(readable, onError) {
3941
3980
 
3942
3981
  // src/util/now.ts
3943
3982
  function now() {
3944
- var _a16, _b;
3945
- return (_b = (_a16 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a16.now()) != null ? _b : Date.now();
3983
+ var _a17, _b;
3984
+ return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();
3946
3985
  }
3947
3986
 
3948
3987
  // src/generate-text/run-tools-transformation.ts
3949
3988
  import {
3989
+ executeTool as executeTool2,
3950
3990
  generateId,
3951
3991
  getErrorMessage as getErrorMessage6
3952
3992
  } from "@ai-sdk/provider-utils";
@@ -4074,12 +4114,29 @@ function runToolsTransformation({
4074
4114
  fn: async (span) => {
4075
4115
  let output;
4076
4116
  try {
4077
- output = await tool3.execute(toolCall.input, {
4078
- toolCallId: toolCall.toolCallId,
4079
- messages,
4080
- abortSignal,
4081
- experimental_context
4117
+ const stream = executeTool2({
4118
+ execute: tool3.execute.bind(tool3),
4119
+ input: toolCall.input,
4120
+ options: {
4121
+ toolCallId: toolCall.toolCallId,
4122
+ messages,
4123
+ abortSignal,
4124
+ experimental_context
4125
+ }
4082
4126
  });
4127
+ for await (const part of stream) {
4128
+ toolResultsStreamController.enqueue({
4129
+ ...toolCall,
4130
+ type: "tool-result",
4131
+ output: part.output,
4132
+ ...part.type === "preliminary" && {
4133
+ preliminary: true
4134
+ }
4135
+ });
4136
+ if (part.type === "final") {
4137
+ output = part.output;
4138
+ }
4139
+ }
4083
4140
  } catch (error) {
4084
4141
  recordErrorOnSpan(span, error);
4085
4142
  toolResultsStreamController.enqueue({
@@ -4091,11 +4148,6 @@ function runToolsTransformation({
4091
4148
  attemptClose();
4092
4149
  return;
4093
4150
  }
4094
- toolResultsStreamController.enqueue({
4095
- ...toolCall,
4096
- type: "tool-result",
4097
- output
4098
- });
4099
4151
  outstandingToolResults.delete(toolExecutionId);
4100
4152
  attemptClose();
4101
4153
  try {
@@ -4366,7 +4418,7 @@ var DefaultStreamTextResult = class {
4366
4418
  let activeReasoningContent = {};
4367
4419
  const eventProcessor = new TransformStream({
4368
4420
  async transform(chunk, controller) {
4369
- var _a16, _b, _c;
4421
+ var _a17, _b, _c;
4370
4422
  controller.enqueue(chunk);
4371
4423
  const { part } = chunk;
4372
4424
  if (part.type === "text-delta" || part.type === "reasoning-delta" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
@@ -4396,7 +4448,7 @@ var DefaultStreamTextResult = class {
4396
4448
  return;
4397
4449
  }
4398
4450
  activeText.text += part.text;
4399
- activeText.providerMetadata = (_a16 = part.providerMetadata) != null ? _a16 : activeText.providerMetadata;
4451
+ activeText.providerMetadata = (_a17 = part.providerMetadata) != null ? _a17 : activeText.providerMetadata;
4400
4452
  }
4401
4453
  if (part.type === "text-end") {
4402
4454
  delete activeTextContent[part.id];
@@ -4448,7 +4500,7 @@ var DefaultStreamTextResult = class {
4448
4500
  if (part.type === "tool-call") {
4449
4501
  recordedContent.push(part);
4450
4502
  }
4451
- if (part.type === "tool-result") {
4503
+ if (part.type === "tool-result" && !part.preliminary) {
4452
4504
  recordedContent.push(part);
4453
4505
  }
4454
4506
  if (part.type === "tool-error") {
@@ -4491,6 +4543,12 @@ var DefaultStreamTextResult = class {
4491
4543
  async flush(controller) {
4492
4544
  try {
4493
4545
  if (recordedSteps.length === 0) {
4546
+ const error = new NoOutputGeneratedError({
4547
+ message: "No output generated. Check the stream for errors."
4548
+ });
4549
+ self._finishReason.reject(error);
4550
+ self._totalUsage.reject(error);
4551
+ self._steps.reject(error);
4494
4552
  return;
4495
4553
  }
4496
4554
  const finishReason = recordedFinishReason != null ? recordedFinishReason : "unknown";
@@ -4533,8 +4591,8 @@ var DefaultStreamTextResult = class {
4533
4591
  "ai.response.text": { output: () => finalStep.text },
4534
4592
  "ai.response.toolCalls": {
4535
4593
  output: () => {
4536
- var _a16;
4537
- return ((_a16 = finalStep.toolCalls) == null ? void 0 : _a16.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
4594
+ var _a17;
4595
+ return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
4538
4596
  }
4539
4597
  },
4540
4598
  "ai.response.providerMetadata": JSON.stringify(
@@ -4621,7 +4679,7 @@ var DefaultStreamTextResult = class {
4621
4679
  responseMessages,
4622
4680
  usage
4623
4681
  }) {
4624
- var _a16, _b, _c, _d, _e;
4682
+ var _a17, _b, _c, _d, _e;
4625
4683
  const includeRawChunks2 = self.includeRawChunks;
4626
4684
  stepFinish = new DelayedPromise();
4627
4685
  const initialPrompt = await standardizePrompt({
@@ -4641,7 +4699,7 @@ var DefaultStreamTextResult = class {
4641
4699
  }));
4642
4700
  const promptMessages = await convertToLanguageModelPrompt({
4643
4701
  prompt: {
4644
- system: (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a16 : initialPrompt.system,
4702
+ system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
4645
4703
  messages: (_b = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _b : stepInputMessages
4646
4704
  },
4647
4705
  supportedUrls: await model.supportedUrls
@@ -4751,7 +4809,7 @@ var DefaultStreamTextResult = class {
4751
4809
  streamWithToolResults.pipeThrough(
4752
4810
  new TransformStream({
4753
4811
  async transform(chunk, controller) {
4754
- var _a17, _b2, _c2, _d2;
4812
+ var _a18, _b2, _c2, _d2;
4755
4813
  if (chunk.type === "stream-start") {
4756
4814
  warnings = chunk.warnings;
4757
4815
  return;
@@ -4811,7 +4869,9 @@ var DefaultStreamTextResult = class {
4811
4869
  }
4812
4870
  case "tool-result": {
4813
4871
  controller.enqueue(chunk);
4814
- stepToolOutputs.push(chunk);
4872
+ if (!chunk.preliminary) {
4873
+ stepToolOutputs.push(chunk);
4874
+ }
4815
4875
  break;
4816
4876
  }
4817
4877
  case "tool-error": {
@@ -4821,7 +4881,7 @@ var DefaultStreamTextResult = class {
4821
4881
  }
4822
4882
  case "response-metadata": {
4823
4883
  stepResponse = {
4824
- id: (_a17 = chunk.id) != null ? _a17 : stepResponse.id,
4884
+ id: (_a18 = chunk.id) != null ? _a18 : stepResponse.id,
4825
4885
  timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
4826
4886
  modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
4827
4887
  };
@@ -5020,6 +5080,7 @@ var DefaultStreamTextResult = class {
5020
5080
  });
5021
5081
  }
5022
5082
  get steps() {
5083
+ this.consumeStream();
5023
5084
  return this._steps.promise;
5024
5085
  }
5025
5086
  get finalStep() {
@@ -5077,9 +5138,11 @@ var DefaultStreamTextResult = class {
5077
5138
  return this.finalStep.then((step) => step.response);
5078
5139
  }
5079
5140
  get totalUsage() {
5141
+ this.consumeStream();
5080
5142
  return this._totalUsage.promise;
5081
5143
  }
5082
5144
  get finishReason() {
5145
+ this.consumeStream();
5083
5146
  return this._finishReason.promise;
5084
5147
  }
5085
5148
  /**
@@ -5120,14 +5183,14 @@ var DefaultStreamTextResult = class {
5120
5183
  );
5121
5184
  }
5122
5185
  async consumeStream(options) {
5123
- var _a16;
5186
+ var _a17;
5124
5187
  try {
5125
5188
  await consumeStream({
5126
5189
  stream: this.fullStream,
5127
5190
  onError: options == null ? void 0 : options.onError
5128
5191
  });
5129
5192
  } catch (error) {
5130
- (_a16 = options == null ? void 0 : options.onError) == null ? void 0 : _a16.call(options, error);
5193
+ (_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);
5131
5194
  }
5132
5195
  }
5133
5196
  get experimental_partialOutputStream() {
@@ -5163,9 +5226,9 @@ var DefaultStreamTextResult = class {
5163
5226
  }) : void 0;
5164
5227
  const toolNamesByCallId = {};
5165
5228
  const isDynamic = (toolCallId) => {
5166
- var _a16, _b;
5229
+ var _a17, _b;
5167
5230
  const toolName = toolNamesByCallId[toolCallId];
5168
- const dynamic = ((_b = (_a16 = this.tools) == null ? void 0 : _a16[toolName]) == null ? void 0 : _b.type) === "dynamic";
5231
+ const dynamic = ((_b = (_a17 = this.tools) == null ? void 0 : _a17[toolName]) == null ? void 0 : _b.type) === "dynamic";
5169
5232
  return dynamic ? true : void 0;
5170
5233
  };
5171
5234
  const baseStream = this.fullStream.pipeThrough(
@@ -5310,6 +5373,7 @@ var DefaultStreamTextResult = class {
5310
5373
  toolCallId: part.toolCallId,
5311
5374
  output: part.output,
5312
5375
  ...part.providerExecuted != null ? { providerExecuted: part.providerExecuted } : {},
5376
+ ...part.preliminary != null ? { preliminary: part.preliminary } : {},
5313
5377
  ...dynamic != null ? { dynamic } : {}
5314
5378
  });
5315
5379
  break;
@@ -5529,7 +5593,7 @@ async function embed({
5529
5593
  }),
5530
5594
  tracer,
5531
5595
  fn: async (doEmbedSpan) => {
5532
- var _a16;
5596
+ var _a17;
5533
5597
  const modelResponse = await model.doEmbed({
5534
5598
  values: [value],
5535
5599
  abortSignal,
@@ -5537,7 +5601,7 @@ async function embed({
5537
5601
  providerOptions
5538
5602
  });
5539
5603
  const embedding2 = modelResponse.embeddings[0];
5540
- const usage2 = (_a16 = modelResponse.usage) != null ? _a16 : { tokens: NaN };
5604
+ const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
5541
5605
  doEmbedSpan.setAttributes(
5542
5606
  selectTelemetryAttributes({
5543
5607
  telemetry,
@@ -5640,7 +5704,7 @@ async function embedMany({
5640
5704
  }),
5641
5705
  tracer,
5642
5706
  fn: async (span) => {
5643
- var _a16;
5707
+ var _a17;
5644
5708
  const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
5645
5709
  model.maxEmbeddingsPerCall,
5646
5710
  model.supportsParallelCalls
@@ -5666,7 +5730,7 @@ async function embedMany({
5666
5730
  }),
5667
5731
  tracer,
5668
5732
  fn: async (doEmbedSpan) => {
5669
- var _a17;
5733
+ var _a18;
5670
5734
  const modelResponse = await model.doEmbed({
5671
5735
  values,
5672
5736
  abortSignal,
@@ -5674,7 +5738,7 @@ async function embedMany({
5674
5738
  providerOptions
5675
5739
  });
5676
5740
  const embeddings3 = modelResponse.embeddings;
5677
- const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
5741
+ const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
5678
5742
  doEmbedSpan.setAttributes(
5679
5743
  selectTelemetryAttributes({
5680
5744
  telemetry,
@@ -5748,7 +5812,7 @@ async function embedMany({
5748
5812
  }),
5749
5813
  tracer,
5750
5814
  fn: async (doEmbedSpan) => {
5751
- var _a17;
5815
+ var _a18;
5752
5816
  const modelResponse = await model.doEmbed({
5753
5817
  values: chunk,
5754
5818
  abortSignal,
@@ -5756,7 +5820,7 @@ async function embedMany({
5756
5820
  providerOptions
5757
5821
  });
5758
5822
  const embeddings2 = modelResponse.embeddings;
5759
- const usage = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
5823
+ const usage = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
5760
5824
  doEmbedSpan.setAttributes(
5761
5825
  selectTelemetryAttributes({
5762
5826
  telemetry,
@@ -5793,7 +5857,7 @@ async function embedMany({
5793
5857
  result.providerMetadata
5794
5858
  )) {
5795
5859
  providerMetadata[providerName] = {
5796
- ...(_a16 = providerMetadata[providerName]) != null ? _a16 : {},
5860
+ ...(_a17 = providerMetadata[providerName]) != null ? _a17 : {},
5797
5861
  ...metadata
5798
5862
  };
5799
5863
  }
@@ -5846,7 +5910,7 @@ async function generateImage({
5846
5910
  abortSignal,
5847
5911
  headers
5848
5912
  }) {
5849
- var _a16, _b;
5913
+ var _a17, _b;
5850
5914
  if (model.specificationVersion !== "v2") {
5851
5915
  throw new UnsupportedModelVersionError({
5852
5916
  version: model.specificationVersion,
@@ -5858,7 +5922,7 @@ async function generateImage({
5858
5922
  maxRetries: maxRetriesArg,
5859
5923
  abortSignal
5860
5924
  });
5861
- const maxImagesPerCallWithDefault = (_a16 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a16 : 1;
5925
+ const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a17 : 1;
5862
5926
  const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
5863
5927
  const callImageCounts = Array.from({ length: callCount }, (_, i) => {
5864
5928
  if (i < callCount - 1) {
@@ -5891,13 +5955,13 @@ async function generateImage({
5891
5955
  images.push(
5892
5956
  ...result.images.map(
5893
5957
  (image) => {
5894
- var _a17;
5958
+ var _a18;
5895
5959
  return new DefaultGeneratedFile({
5896
5960
  data: image,
5897
- mediaType: (_a17 = detectMediaType({
5961
+ mediaType: (_a18 = detectMediaType({
5898
5962
  data: image,
5899
5963
  signatures: imageMediaTypeSignatures
5900
- })) != null ? _a17 : "image/png"
5964
+ })) != null ? _a18 : "image/png"
5901
5965
  });
5902
5966
  }
5903
5967
  )
@@ -6028,7 +6092,7 @@ var arrayOutputStrategy = (schema) => {
6028
6092
  isFirstDelta,
6029
6093
  isFinalDelta
6030
6094
  }) {
6031
- var _a16;
6095
+ var _a17;
6032
6096
  if (!isJSONObject(value) || !isJSONArray(value.elements)) {
6033
6097
  return {
6034
6098
  success: false,
@@ -6051,7 +6115,7 @@ var arrayOutputStrategy = (schema) => {
6051
6115
  }
6052
6116
  resultArray.push(result.value);
6053
6117
  }
6054
- const publishedElementCount = (_a16 = latestObject == null ? void 0 : latestObject.length) != null ? _a16 : 0;
6118
+ const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;
6055
6119
  let textDelta = "";
6056
6120
  if (isFirstDelta) {
6057
6121
  textDelta += "[";
@@ -6465,7 +6529,7 @@ async function generateObject(options) {
6465
6529
  }),
6466
6530
  tracer,
6467
6531
  fn: async (span) => {
6468
- var _a16;
6532
+ var _a17;
6469
6533
  let result;
6470
6534
  let finishReason;
6471
6535
  let usage;
@@ -6509,7 +6573,7 @@ async function generateObject(options) {
6509
6573
  }),
6510
6574
  tracer,
6511
6575
  fn: async (span2) => {
6512
- var _a17, _b, _c, _d, _e, _f, _g, _h;
6576
+ var _a18, _b, _c, _d, _e, _f, _g, _h;
6513
6577
  const result2 = await model.doGenerate({
6514
6578
  responseFormat: {
6515
6579
  type: "json",
@@ -6524,7 +6588,7 @@ async function generateObject(options) {
6524
6588
  headers
6525
6589
  });
6526
6590
  const responseData = {
6527
- id: (_b = (_a17 = result2.response) == null ? void 0 : _a17.id) != null ? _b : generateId3(),
6591
+ id: (_b = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b : generateId3(),
6528
6592
  timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
6529
6593
  modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
6530
6594
  headers: (_g = result2.response) == null ? void 0 : _g.headers,
@@ -6572,7 +6636,7 @@ async function generateObject(options) {
6572
6636
  usage = generateResult.usage;
6573
6637
  warnings = generateResult.warnings;
6574
6638
  resultProviderMetadata = generateResult.providerMetadata;
6575
- request = (_a16 = generateResult.request) != null ? _a16 : {};
6639
+ request = (_a17 = generateResult.request) != null ? _a17 : {};
6576
6640
  response = generateResult.responseData;
6577
6641
  const object2 = await parseAndValidateObjectResultWithRepair(
6578
6642
  result,
@@ -6627,9 +6691,9 @@ var DefaultGenerateObjectResult = class {
6627
6691
  this.request = options.request;
6628
6692
  }
6629
6693
  toJsonResponse(init) {
6630
- var _a16;
6694
+ var _a17;
6631
6695
  return new Response(JSON.stringify(this.object), {
6632
- status: (_a16 = init == null ? void 0 : init.status) != null ? _a16 : 200,
6696
+ status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
6633
6697
  headers: prepareHeaders(init == null ? void 0 : init.headers, {
6634
6698
  "content-type": "application/json; charset=utf-8"
6635
6699
  })
@@ -6757,8 +6821,8 @@ function simulateReadableStream({
6757
6821
  chunkDelayInMs = 0,
6758
6822
  _internal
6759
6823
  }) {
6760
- var _a16;
6761
- const delay2 = (_a16 = _internal == null ? void 0 : _internal.delay) != null ? _a16 : delayFunction;
6824
+ var _a17;
6825
+ const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : delayFunction;
6762
6826
  let index = 0;
6763
6827
  return new ReadableStream({
6764
6828
  async pull(controller) {
@@ -7013,7 +7077,7 @@ var DefaultStreamObjectResult = class {
7013
7077
  const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
7014
7078
  new TransformStream({
7015
7079
  async transform(chunk, controller) {
7016
- var _a16, _b, _c;
7080
+ var _a17, _b, _c;
7017
7081
  if (typeof chunk === "object" && chunk.type === "stream-start") {
7018
7082
  warnings = chunk.warnings;
7019
7083
  return;
@@ -7063,7 +7127,7 @@ var DefaultStreamObjectResult = class {
7063
7127
  switch (chunk.type) {
7064
7128
  case "response-metadata": {
7065
7129
  fullResponse = {
7066
- id: (_a16 = chunk.id) != null ? _a16 : fullResponse.id,
7130
+ id: (_a17 = chunk.id) != null ? _a17 : fullResponse.id,
7067
7131
  timestamp: (_b = chunk.timestamp) != null ? _b : fullResponse.timestamp,
7068
7132
  modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
7069
7133
  };
@@ -7287,8 +7351,8 @@ var DefaultStreamObjectResult = class {
7287
7351
  };
7288
7352
 
7289
7353
  // src/error/no-speech-generated-error.ts
7290
- import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
7291
- var NoSpeechGeneratedError = class extends AISDKError19 {
7354
+ import { AISDKError as AISDKError20 } from "@ai-sdk/provider";
7355
+ var NoSpeechGeneratedError = class extends AISDKError20 {
7292
7356
  constructor(options) {
7293
7357
  super({
7294
7358
  name: "AI_NoSpeechGeneratedError",
@@ -7337,7 +7401,7 @@ async function generateSpeech({
7337
7401
  abortSignal,
7338
7402
  headers
7339
7403
  }) {
7340
- var _a16;
7404
+ var _a17;
7341
7405
  if (model.specificationVersion !== "v2") {
7342
7406
  throw new UnsupportedModelVersionError({
7343
7407
  version: model.specificationVersion,
@@ -7368,10 +7432,10 @@ async function generateSpeech({
7368
7432
  return new DefaultSpeechResult({
7369
7433
  audio: new DefaultGeneratedAudioFile({
7370
7434
  data: result.audio,
7371
- mediaType: (_a16 = detectMediaType({
7435
+ mediaType: (_a17 = detectMediaType({
7372
7436
  data: result.audio,
7373
7437
  signatures: audioMediaTypeSignatures
7374
- })) != null ? _a16 : "audio/mp3"
7438
+ })) != null ? _a17 : "audio/mp3"
7375
7439
  }),
7376
7440
  warnings: result.warnings,
7377
7441
  responses: [result.response],
@@ -7380,11 +7444,11 @@ async function generateSpeech({
7380
7444
  }
7381
7445
  var DefaultSpeechResult = class {
7382
7446
  constructor(options) {
7383
- var _a16;
7447
+ var _a17;
7384
7448
  this.audio = options.audio;
7385
7449
  this.warnings = options.warnings;
7386
7450
  this.responses = options.responses;
7387
- this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
7451
+ this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
7388
7452
  }
7389
7453
  };
7390
7454
 
@@ -7802,7 +7866,7 @@ var doWrap = ({
7802
7866
  modelId,
7803
7867
  providerId
7804
7868
  }) => {
7805
- var _a16, _b, _c;
7869
+ var _a17, _b, _c;
7806
7870
  async function doTransform({
7807
7871
  params,
7808
7872
  type
@@ -7811,7 +7875,7 @@ var doWrap = ({
7811
7875
  }
7812
7876
  return {
7813
7877
  specificationVersion: "v2",
7814
- provider: (_a16 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a16 : model.provider,
7878
+ provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
7815
7879
  modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
7816
7880
  supportedUrls: (_c = overrideSupportedUrls == null ? void 0 : overrideSupportedUrls({ model })) != null ? _c : model.supportedUrls,
7817
7881
  async doGenerate(params) {
@@ -7919,11 +7983,11 @@ function customProvider({
7919
7983
  var experimental_customProvider = customProvider;
7920
7984
 
7921
7985
  // src/registry/no-such-provider-error.ts
7922
- import { AISDKError as AISDKError20, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
7923
- var name15 = "AI_NoSuchProviderError";
7924
- var marker15 = `vercel.ai.error.${name15}`;
7925
- var symbol15 = Symbol.for(marker15);
7926
- var _a15;
7986
+ import { AISDKError as AISDKError21, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
7987
+ var name16 = "AI_NoSuchProviderError";
7988
+ var marker16 = `vercel.ai.error.${name16}`;
7989
+ var symbol16 = Symbol.for(marker16);
7990
+ var _a16;
7927
7991
  var NoSuchProviderError = class extends NoSuchModelError3 {
7928
7992
  constructor({
7929
7993
  modelId,
@@ -7932,16 +7996,16 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
7932
7996
  availableProviders,
7933
7997
  message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
7934
7998
  }) {
7935
- super({ errorName: name15, modelId, modelType, message });
7936
- this[_a15] = true;
7999
+ super({ errorName: name16, modelId, modelType, message });
8000
+ this[_a16] = true;
7937
8001
  this.providerId = providerId;
7938
8002
  this.availableProviders = availableProviders;
7939
8003
  }
7940
8004
  static isInstance(error) {
7941
- return AISDKError20.hasMarker(error, marker15);
8005
+ return AISDKError21.hasMarker(error, marker16);
7942
8006
  }
7943
8007
  };
7944
- _a15 = symbol15;
8008
+ _a16 = symbol16;
7945
8009
 
7946
8010
  // src/registry/provider-registry.ts
7947
8011
  import {
@@ -8000,10 +8064,10 @@ var DefaultProviderRegistry = class {
8000
8064
  return [id.slice(0, index), id.slice(index + this.separator.length)];
8001
8065
  }
8002
8066
  languageModel(id) {
8003
- var _a16, _b;
8067
+ var _a17, _b;
8004
8068
  const [providerId, modelId] = this.splitId(id, "languageModel");
8005
- let model = (_b = (_a16 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
8006
- _a16,
8069
+ let model = (_b = (_a17 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
8070
+ _a17,
8007
8071
  modelId
8008
8072
  );
8009
8073
  if (model == null) {
@@ -8018,10 +8082,10 @@ var DefaultProviderRegistry = class {
8018
8082
  return model;
8019
8083
  }
8020
8084
  textEmbeddingModel(id) {
8021
- var _a16;
8085
+ var _a17;
8022
8086
  const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
8023
8087
  const provider = this.getProvider(providerId, "textEmbeddingModel");
8024
- const model = (_a16 = provider.textEmbeddingModel) == null ? void 0 : _a16.call(provider, modelId);
8088
+ const model = (_a17 = provider.textEmbeddingModel) == null ? void 0 : _a17.call(provider, modelId);
8025
8089
  if (model == null) {
8026
8090
  throw new NoSuchModelError4({
8027
8091
  modelId: id,
@@ -8031,20 +8095,20 @@ var DefaultProviderRegistry = class {
8031
8095
  return model;
8032
8096
  }
8033
8097
  imageModel(id) {
8034
- var _a16;
8098
+ var _a17;
8035
8099
  const [providerId, modelId] = this.splitId(id, "imageModel");
8036
8100
  const provider = this.getProvider(providerId, "imageModel");
8037
- const model = (_a16 = provider.imageModel) == null ? void 0 : _a16.call(provider, modelId);
8101
+ const model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
8038
8102
  if (model == null) {
8039
8103
  throw new NoSuchModelError4({ modelId: id, modelType: "imageModel" });
8040
8104
  }
8041
8105
  return model;
8042
8106
  }
8043
8107
  transcriptionModel(id) {
8044
- var _a16;
8108
+ var _a17;
8045
8109
  const [providerId, modelId] = this.splitId(id, "transcriptionModel");
8046
8110
  const provider = this.getProvider(providerId, "transcriptionModel");
8047
- const model = (_a16 = provider.transcriptionModel) == null ? void 0 : _a16.call(provider, modelId);
8111
+ const model = (_a17 = provider.transcriptionModel) == null ? void 0 : _a17.call(provider, modelId);
8048
8112
  if (model == null) {
8049
8113
  throw new NoSuchModelError4({
8050
8114
  modelId: id,
@@ -8054,10 +8118,10 @@ var DefaultProviderRegistry = class {
8054
8118
  return model;
8055
8119
  }
8056
8120
  speechModel(id) {
8057
- var _a16;
8121
+ var _a17;
8058
8122
  const [providerId, modelId] = this.splitId(id, "speechModel");
8059
8123
  const provider = this.getProvider(providerId, "speechModel");
8060
- const model = (_a16 = provider.speechModel) == null ? void 0 : _a16.call(provider, modelId);
8124
+ const model = (_a17 = provider.speechModel) == null ? void 0 : _a17.call(provider, modelId);
8061
8125
  if (model == null) {
8062
8126
  throw new NoSuchModelError4({ modelId: id, modelType: "speechModel" });
8063
8127
  }
@@ -8230,13 +8294,13 @@ var SseMCPTransport = class {
8230
8294
  }
8231
8295
  this.abortController = new AbortController();
8232
8296
  const establishConnection = async () => {
8233
- var _a16, _b, _c;
8297
+ var _a17, _b, _c;
8234
8298
  try {
8235
8299
  const headers = new Headers(this.headers);
8236
8300
  headers.set("Accept", "text/event-stream");
8237
8301
  const response = await fetch(this.url.href, {
8238
8302
  headers,
8239
- signal: (_a16 = this.abortController) == null ? void 0 : _a16.signal
8303
+ signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
8240
8304
  });
8241
8305
  if (!response.ok || !response.body) {
8242
8306
  const error = new MCPClientError({
@@ -8248,7 +8312,7 @@ var SseMCPTransport = class {
8248
8312
  const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream());
8249
8313
  const reader = stream.getReader();
8250
8314
  const processEvents = async () => {
8251
- var _a17, _b2, _c2;
8315
+ var _a18, _b2, _c2;
8252
8316
  try {
8253
8317
  while (true) {
8254
8318
  const { done, value } = await reader.read();
@@ -8276,7 +8340,7 @@ var SseMCPTransport = class {
8276
8340
  const message = JSONRPCMessageSchema.parse(
8277
8341
  JSON.parse(data)
8278
8342
  );
8279
- (_a17 = this.onmessage) == null ? void 0 : _a17.call(this, message);
8343
+ (_a18 = this.onmessage) == null ? void 0 : _a18.call(this, message);
8280
8344
  } catch (error) {
8281
8345
  const e = new MCPClientError({
8282
8346
  message: "MCP SSE Transport Error: Failed to parse message",
@@ -8310,14 +8374,14 @@ var SseMCPTransport = class {
8310
8374
  });
8311
8375
  }
8312
8376
  async close() {
8313
- var _a16, _b, _c;
8377
+ var _a17, _b, _c;
8314
8378
  this.connected = false;
8315
- (_a16 = this.sseConnection) == null ? void 0 : _a16.close();
8379
+ (_a17 = this.sseConnection) == null ? void 0 : _a17.close();
8316
8380
  (_b = this.abortController) == null ? void 0 : _b.abort();
8317
8381
  (_c = this.onclose) == null ? void 0 : _c.call(this);
8318
8382
  }
8319
8383
  async send(message) {
8320
- var _a16, _b, _c;
8384
+ var _a17, _b, _c;
8321
8385
  if (!this.endpoint || !this.connected) {
8322
8386
  throw new MCPClientError({
8323
8387
  message: "MCP SSE Transport Error: Not connected"
@@ -8330,7 +8394,7 @@ var SseMCPTransport = class {
8330
8394
  method: "POST",
8331
8395
  headers,
8332
8396
  body: JSON.stringify(message),
8333
- signal: (_a16 = this.abortController) == null ? void 0 : _a16.signal
8397
+ signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
8334
8398
  };
8335
8399
  const response = await fetch(this.endpoint, init);
8336
8400
  if (!response.ok) {
@@ -8371,7 +8435,7 @@ async function createMCPClient(config) {
8371
8435
  var DefaultMCPClient = class {
8372
8436
  constructor({
8373
8437
  transport: transportConfig,
8374
- name: name16 = "ai-sdk-mcp-client",
8438
+ name: name17 = "ai-sdk-mcp-client",
8375
8439
  onUncaughtError
8376
8440
  }) {
8377
8441
  this.requestMessageId = 0;
@@ -8398,7 +8462,7 @@ var DefaultMCPClient = class {
8398
8462
  this.onResponse(message);
8399
8463
  };
8400
8464
  this.clientInfo = {
8401
- name: name16,
8465
+ name: name17,
8402
8466
  version: CLIENT_VERSION
8403
8467
  };
8404
8468
  }
@@ -8438,10 +8502,10 @@ var DefaultMCPClient = class {
8438
8502
  }
8439
8503
  }
8440
8504
  async close() {
8441
- var _a16;
8505
+ var _a17;
8442
8506
  if (this.isClosed)
8443
8507
  return;
8444
- await ((_a16 = this.transport) == null ? void 0 : _a16.close());
8508
+ await ((_a17 = this.transport) == null ? void 0 : _a17.close());
8445
8509
  this.onClose();
8446
8510
  }
8447
8511
  assertCapability(method) {
@@ -8531,13 +8595,13 @@ var DefaultMCPClient = class {
8531
8595
  }
8532
8596
  }
8533
8597
  async callTool({
8534
- name: name16,
8598
+ name: name17,
8535
8599
  args,
8536
8600
  options
8537
8601
  }) {
8538
8602
  try {
8539
8603
  return this.request({
8540
- request: { method: "tools/call", params: { name: name16, arguments: args } },
8604
+ request: { method: "tools/call", params: { name: name17, arguments: args } },
8541
8605
  resultSchema: CallToolResultSchema,
8542
8606
  options: {
8543
8607
  signal: options == null ? void 0 : options.abortSignal
@@ -8561,34 +8625,34 @@ var DefaultMCPClient = class {
8561
8625
  async tools({
8562
8626
  schemas = "automatic"
8563
8627
  } = {}) {
8564
- var _a16;
8628
+ var _a17;
8565
8629
  const tools = {};
8566
8630
  try {
8567
8631
  const listToolsResult = await this.listTools();
8568
- for (const { name: name16, description, inputSchema } of listToolsResult.tools) {
8569
- if (schemas !== "automatic" && !(name16 in schemas)) {
8632
+ for (const { name: name17, description, inputSchema } of listToolsResult.tools) {
8633
+ if (schemas !== "automatic" && !(name17 in schemas)) {
8570
8634
  continue;
8571
8635
  }
8572
8636
  const self = this;
8573
8637
  const execute = async (args, options) => {
8574
- var _a17;
8575
- (_a17 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a17.throwIfAborted();
8576
- return self.callTool({ name: name16, args, options });
8638
+ var _a18;
8639
+ (_a18 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a18.throwIfAborted();
8640
+ return self.callTool({ name: name17, args, options });
8577
8641
  };
8578
8642
  const toolWithExecute = schemas === "automatic" ? dynamicTool({
8579
8643
  description,
8580
8644
  inputSchema: jsonSchema({
8581
8645
  ...inputSchema,
8582
- properties: (_a16 = inputSchema.properties) != null ? _a16 : {},
8646
+ properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
8583
8647
  additionalProperties: false
8584
8648
  }),
8585
8649
  execute
8586
8650
  }) : tool({
8587
8651
  description,
8588
- inputSchema: schemas[name16].inputSchema,
8652
+ inputSchema: schemas[name17].inputSchema,
8589
8653
  execute
8590
8654
  });
8591
- tools[name16] = toolWithExecute;
8655
+ tools[name17] = toolWithExecute;
8592
8656
  }
8593
8657
  return tools;
8594
8658
  } catch (error) {
@@ -8633,8 +8697,8 @@ var DefaultMCPClient = class {
8633
8697
  };
8634
8698
 
8635
8699
  // src/error/no-transcript-generated-error.ts
8636
- import { AISDKError as AISDKError21 } from "@ai-sdk/provider";
8637
- var NoTranscriptGeneratedError = class extends AISDKError21 {
8700
+ import { AISDKError as AISDKError22 } from "@ai-sdk/provider";
8701
+ var NoTranscriptGeneratedError = class extends AISDKError22 {
8638
8702
  constructor(options) {
8639
8703
  super({
8640
8704
  name: "AI_NoTranscriptGeneratedError",
@@ -8667,16 +8731,16 @@ async function transcribe({
8667
8731
  const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
8668
8732
  const result = await retry(
8669
8733
  () => {
8670
- var _a16;
8734
+ var _a17;
8671
8735
  return model.doGenerate({
8672
8736
  audio: audioData,
8673
8737
  abortSignal,
8674
8738
  headers,
8675
8739
  providerOptions,
8676
- mediaType: (_a16 = detectMediaType({
8740
+ mediaType: (_a17 = detectMediaType({
8677
8741
  data: audioData,
8678
8742
  signatures: audioMediaTypeSignatures
8679
- })) != null ? _a16 : "audio/wav"
8743
+ })) != null ? _a17 : "audio/wav"
8680
8744
  });
8681
8745
  }
8682
8746
  );
@@ -8695,14 +8759,14 @@ async function transcribe({
8695
8759
  }
8696
8760
  var DefaultTranscriptionResult = class {
8697
8761
  constructor(options) {
8698
- var _a16;
8762
+ var _a17;
8699
8763
  this.text = options.text;
8700
8764
  this.segments = options.segments;
8701
8765
  this.language = options.language;
8702
8766
  this.durationInSeconds = options.durationInSeconds;
8703
8767
  this.warnings = options.warnings;
8704
8768
  this.responses = options.responses;
8705
- this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
8769
+ this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
8706
8770
  }
8707
8771
  };
8708
8772
 
@@ -8741,7 +8805,7 @@ async function callCompletionApi({
8741
8805
  onError,
8742
8806
  fetch: fetch2 = getOriginalFetch()
8743
8807
  }) {
8744
- var _a16;
8808
+ var _a17;
8745
8809
  try {
8746
8810
  setLoading(true);
8747
8811
  setError(void 0);
@@ -8765,7 +8829,7 @@ async function callCompletionApi({
8765
8829
  });
8766
8830
  if (!response.ok) {
8767
8831
  throw new Error(
8768
- (_a16 = await response.text()) != null ? _a16 : "Failed to fetch the chat response."
8832
+ (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
8769
8833
  );
8770
8834
  }
8771
8835
  if (!response.body) {
@@ -8851,12 +8915,12 @@ async function convertFileListToFileUIParts(files) {
8851
8915
  }
8852
8916
  return Promise.all(
8853
8917
  Array.from(files).map(async (file) => {
8854
- const { name: name16, type } = file;
8918
+ const { name: name17, type } = file;
8855
8919
  const dataUrl = await new Promise((resolve2, reject) => {
8856
8920
  const reader = new FileReader();
8857
8921
  reader.onload = (readerEvent) => {
8858
- var _a16;
8859
- resolve2((_a16 = readerEvent.target) == null ? void 0 : _a16.result);
8922
+ var _a17;
8923
+ resolve2((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
8860
8924
  };
8861
8925
  reader.onerror = (error) => reject(error);
8862
8926
  reader.readAsDataURL(file);
@@ -8864,7 +8928,7 @@ async function convertFileListToFileUIParts(files) {
8864
8928
  return {
8865
8929
  type: "file",
8866
8930
  mediaType: type,
8867
- filename: name16,
8931
+ filename: name17,
8868
8932
  url: dataUrl
8869
8933
  };
8870
8934
  })
@@ -8898,11 +8962,11 @@ var HttpChatTransport = class {
8898
8962
  abortSignal,
8899
8963
  ...options
8900
8964
  }) {
8901
- var _a16, _b, _c, _d, _e;
8965
+ var _a17, _b, _c, _d, _e;
8902
8966
  const resolvedBody = await resolve(this.body);
8903
8967
  const resolvedHeaders = await resolve(this.headers);
8904
8968
  const resolvedCredentials = await resolve(this.credentials);
8905
- const preparedRequest = await ((_a16 = this.prepareSendMessagesRequest) == null ? void 0 : _a16.call(this, {
8969
+ const preparedRequest = await ((_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
8906
8970
  api: this.api,
8907
8971
  id: options.chatId,
8908
8972
  messages: options.messages,
@@ -8946,11 +9010,11 @@ var HttpChatTransport = class {
8946
9010
  return this.processResponseStream(response.body);
8947
9011
  }
8948
9012
  async reconnectToStream(options) {
8949
- var _a16, _b, _c, _d, _e;
9013
+ var _a17, _b, _c, _d, _e;
8950
9014
  const resolvedBody = await resolve(this.body);
8951
9015
  const resolvedHeaders = await resolve(this.headers);
8952
9016
  const resolvedCredentials = await resolve(this.credentials);
8953
- const preparedRequest = await ((_a16 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a16.call(this, {
9017
+ const preparedRequest = await ((_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
8954
9018
  api: this.api,
8955
9019
  id: options.chatId,
8956
9020
  body: { ...resolvedBody, ...options.body },
@@ -9028,11 +9092,11 @@ var AbstractChat = class {
9028
9092
  * If a messageId is provided, the message will be replaced.
9029
9093
  */
9030
9094
  this.sendMessage = async (message, options) => {
9031
- var _a16, _b, _c, _d;
9095
+ var _a17, _b, _c, _d;
9032
9096
  if (message == null) {
9033
9097
  await this.makeRequest({
9034
9098
  trigger: "submit-message",
9035
- messageId: (_a16 = this.lastMessage) == null ? void 0 : _a16.id,
9099
+ messageId: (_a17 = this.lastMessage) == null ? void 0 : _a17.id,
9036
9100
  ...options
9037
9101
  });
9038
9102
  return;
@@ -9125,7 +9189,7 @@ var AbstractChat = class {
9125
9189
  toolCallId,
9126
9190
  output
9127
9191
  }) => this.jobExecutor.run(async () => {
9128
- var _a16, _b;
9192
+ var _a17, _b;
9129
9193
  const messages = this.state.messages;
9130
9194
  const lastMessage = messages[messages.length - 1];
9131
9195
  this.state.replaceMessage(messages.length - 1, {
@@ -9144,7 +9208,7 @@ var AbstractChat = class {
9144
9208
  } : part
9145
9209
  );
9146
9210
  }
9147
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a16 = this.sendAutomaticallyWhen) == null ? void 0 : _a16.call(this, { messages: this.state.messages }))) {
9211
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
9148
9212
  this.makeRequest({
9149
9213
  trigger: "submit-message",
9150
9214
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -9155,10 +9219,10 @@ var AbstractChat = class {
9155
9219
  * Abort the current request immediately, keep the generated tokens if any.
9156
9220
  */
9157
9221
  this.stop = async () => {
9158
- var _a16;
9222
+ var _a17;
9159
9223
  if (this.status !== "streaming" && this.status !== "submitted")
9160
9224
  return;
9161
- if ((_a16 = this.activeResponse) == null ? void 0 : _a16.abortController) {
9225
+ if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
9162
9226
  this.activeResponse.abortController.abort();
9163
9227
  }
9164
9228
  };
@@ -9213,7 +9277,7 @@ var AbstractChat = class {
9213
9277
  body,
9214
9278
  messageId
9215
9279
  }) {
9216
- var _a16, _b, _c;
9280
+ var _a17, _b, _c;
9217
9281
  this.setStatus({ status: "submitted", error: void 0 });
9218
9282
  const lastMessage = this.lastMessage;
9219
9283
  try {
@@ -9256,9 +9320,9 @@ var AbstractChat = class {
9256
9320
  () => job({
9257
9321
  state: activeResponse.state,
9258
9322
  write: () => {
9259
- var _a17;
9323
+ var _a18;
9260
9324
  this.setStatus({ status: "streaming" });
9261
- const replaceLastMessage = activeResponse.state.message.id === ((_a17 = this.lastMessage) == null ? void 0 : _a17.id);
9325
+ const replaceLastMessage = activeResponse.state.message.id === ((_a18 = this.lastMessage) == null ? void 0 : _a18.id);
9262
9326
  if (replaceLastMessage) {
9263
9327
  this.state.replaceMessage(
9264
9328
  this.state.messages.length - 1,
@@ -9287,7 +9351,7 @@ var AbstractChat = class {
9287
9351
  throw error;
9288
9352
  }
9289
9353
  });
9290
- (_a16 = this.onFinish) == null ? void 0 : _a16.call(this, { message: activeResponse.state.message });
9354
+ (_a17 = this.onFinish) == null ? void 0 : _a17.call(this, { message: activeResponse.state.message });
9291
9355
  this.setStatus({ status: "ready" });
9292
9356
  } catch (err) {
9293
9357
  if (err.name === "AbortError") {
@@ -9351,14 +9415,16 @@ function convertToModelMessages(messages, options) {
9351
9415
  case "text":
9352
9416
  return {
9353
9417
  type: "text",
9354
- text: part.text
9418
+ text: part.text,
9419
+ ...part.providerMetadata != null ? { providerOptions: part.providerMetadata } : {}
9355
9420
  };
9356
9421
  case "file":
9357
9422
  return {
9358
9423
  type: "file",
9359
9424
  mediaType: part.mediaType,
9360
9425
  filename: part.filename,
9361
- data: part.url
9426
+ data: part.url,
9427
+ ...part.providerMetadata != null ? { providerOptions: part.providerMetadata } : {}
9362
9428
  };
9363
9429
  default:
9364
9430
  return part;
@@ -9370,7 +9436,7 @@ function convertToModelMessages(messages, options) {
9370
9436
  case "assistant": {
9371
9437
  if (message.parts != null) {
9372
9438
  let processBlock2 = function() {
9373
- var _a16, _b;
9439
+ var _a17, _b;
9374
9440
  if (block.length === 0) {
9375
9441
  return;
9376
9442
  }
@@ -9423,7 +9489,7 @@ function convertToModelMessages(messages, options) {
9423
9489
  type: "tool-call",
9424
9490
  toolCallId: part.toolCallId,
9425
9491
  toolName,
9426
- input: part.state === "output-error" ? (_a16 = part.input) != null ? _a16 : part.rawInput : part.input,
9492
+ input: part.state === "output-error" ? (_a17 = part.input) != null ? _a17 : part.rawInput : part.input,
9427
9493
  providerExecuted: part.providerExecuted,
9428
9494
  ...part.callProviderMetadata != null ? { providerOptions: part.callProviderMetadata } : {}
9429
9495
  });
@@ -9456,7 +9522,7 @@ function convertToModelMessages(messages, options) {
9456
9522
  modelMessages.push({
9457
9523
  role: "tool",
9458
9524
  content: toolParts.map((toolPart) => {
9459
- var _a17;
9525
+ var _a18;
9460
9526
  switch (toolPart.state) {
9461
9527
  case "output-error":
9462
9528
  case "output-available": {
@@ -9467,7 +9533,7 @@ function convertToModelMessages(messages, options) {
9467
9533
  toolName,
9468
9534
  output: createToolModelOutput({
9469
9535
  output: toolPart.state === "output-error" ? toolPart.errorText : toolPart.output,
9470
- tool: (_a17 = options == null ? void 0 : options.tools) == null ? void 0 : _a17[toolName],
9536
+ tool: (_a18 = options == null ? void 0 : options.tools) == null ? void 0 : _a18[toolName],
9471
9537
  errorMode: toolPart.state === "output-error" ? "text" : "none"
9472
9538
  })
9473
9539
  };
@@ -9660,7 +9726,7 @@ function readUIMessageStream({
9660
9726
  onError,
9661
9727
  terminateOnError = false
9662
9728
  }) {
9663
- var _a16;
9729
+ var _a17;
9664
9730
  let controller;
9665
9731
  let hasErrored = false;
9666
9732
  const outputStream = new ReadableStream({
@@ -9669,7 +9735,7 @@ function readUIMessageStream({
9669
9735
  }
9670
9736
  });
9671
9737
  const state = createStreamingUIMessageState({
9672
- messageId: (_a16 = message == null ? void 0 : message.id) != null ? _a16 : "",
9738
+ messageId: (_a17 = message == null ? void 0 : message.id) != null ? _a17 : "",
9673
9739
  lastMessage: message
9674
9740
  });
9675
9741
  const handleError = (error) => {
@@ -9701,7 +9767,7 @@ function readUIMessageStream({
9701
9767
  return createAsyncIterableStream(outputStream);
9702
9768
  }
9703
9769
  export {
9704
- AISDKError16 as AISDKError,
9770
+ AISDKError17 as AISDKError,
9705
9771
  APICallError,
9706
9772
  AbstractChat,
9707
9773
  DefaultChatTransport,
@@ -9724,6 +9790,7 @@ export {
9724
9790
  NoContentGeneratedError,
9725
9791
  NoImageGeneratedError,
9726
9792
  NoObjectGeneratedError,
9793
+ NoOutputGeneratedError,
9727
9794
  NoOutputSpecifiedError,
9728
9795
  NoSuchModelError,
9729
9796
  NoSuchProviderError,