ai 6.0.0-beta.71 → 6.0.0-beta.73

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  var __defProp = Object.defineProperty;
2
2
  var __export = (target, all) => {
3
- for (var name17 in all)
4
- __defProp(target, name17, { get: all[name17], enumerable: true });
3
+ for (var name16 in all)
4
+ __defProp(target, name16, { get: all[name16], enumerable: true });
5
5
  };
6
6
 
7
7
  // src/index.ts
@@ -9,11 +9,11 @@ import { createGateway, gateway as gateway2 } from "@ai-sdk/gateway";
9
9
  import {
10
10
  asSchema as asSchema5,
11
11
  createIdGenerator as createIdGenerator5,
12
- dynamicTool as dynamicTool2,
12
+ dynamicTool,
13
13
  generateId,
14
- jsonSchema as jsonSchema2,
14
+ jsonSchema,
15
15
  parseJsonEventStream as parseJsonEventStream3,
16
- tool as tool2,
16
+ tool,
17
17
  zodSchema as zodSchema3
18
18
  } from "@ai-sdk/provider-utils";
19
19
 
@@ -97,7 +97,7 @@ import { gateway } from "@ai-sdk/gateway";
97
97
 
98
98
  // src/error/index.ts
99
99
  import {
100
- AISDKError as AISDKError18,
100
+ AISDKError as AISDKError17,
101
101
  APICallError,
102
102
  EmptyResponseBodyError,
103
103
  InvalidPromptError,
@@ -183,24 +183,21 @@ var InvalidToolInputError = class extends AISDKError4 {
183
183
  };
184
184
  _a4 = symbol4;
185
185
 
186
- // src/error/mcp-client-error.ts
186
+ // src/error/no-image-generated-error.ts
187
187
  import { AISDKError as AISDKError5 } from "@ai-sdk/provider";
188
- var name5 = "AI_MCPClientError";
188
+ var name5 = "AI_NoImageGeneratedError";
189
189
  var marker5 = `vercel.ai.error.${name5}`;
190
190
  var symbol5 = Symbol.for(marker5);
191
191
  var _a5;
192
- var MCPClientError = class extends AISDKError5 {
192
+ var NoImageGeneratedError = class extends AISDKError5 {
193
193
  constructor({
194
- name: name17 = "MCPClientError",
195
- message,
194
+ message = "No image generated.",
196
195
  cause,
197
- data,
198
- code
196
+ responses
199
197
  }) {
200
- super({ name: name17, message, cause });
198
+ super({ name: name5, message, cause });
201
199
  this[_a5] = true;
202
- this.data = data;
203
- this.code = code;
200
+ this.responses = responses;
204
201
  }
205
202
  static isInstance(error) {
206
203
  return AISDKError5.hasMarker(error, marker5);
@@ -208,35 +205,13 @@ var MCPClientError = class extends AISDKError5 {
208
205
  };
209
206
  _a5 = symbol5;
210
207
 
211
- // src/error/no-image-generated-error.ts
208
+ // src/error/no-object-generated-error.ts
212
209
  import { AISDKError as AISDKError6 } from "@ai-sdk/provider";
213
- var name6 = "AI_NoImageGeneratedError";
210
+ var name6 = "AI_NoObjectGeneratedError";
214
211
  var marker6 = `vercel.ai.error.${name6}`;
215
212
  var symbol6 = Symbol.for(marker6);
216
213
  var _a6;
217
- var NoImageGeneratedError = class extends AISDKError6 {
218
- constructor({
219
- message = "No image generated.",
220
- cause,
221
- responses
222
- }) {
223
- super({ name: name6, message, cause });
224
- this[_a6] = true;
225
- this.responses = responses;
226
- }
227
- static isInstance(error) {
228
- return AISDKError6.hasMarker(error, marker6);
229
- }
230
- };
231
- _a6 = symbol6;
232
-
233
- // src/error/no-object-generated-error.ts
234
- import { AISDKError as AISDKError7 } from "@ai-sdk/provider";
235
- var name7 = "AI_NoObjectGeneratedError";
236
- var marker7 = `vercel.ai.error.${name7}`;
237
- var symbol7 = Symbol.for(marker7);
238
- var _a7;
239
- var NoObjectGeneratedError = class extends AISDKError7 {
214
+ var NoObjectGeneratedError = class extends AISDKError6 {
240
215
  constructor({
241
216
  message = "No object generated.",
242
217
  cause,
@@ -245,43 +220,43 @@ var NoObjectGeneratedError = class extends AISDKError7 {
245
220
  usage,
246
221
  finishReason
247
222
  }) {
248
- super({ name: name7, message, cause });
249
- this[_a7] = true;
223
+ super({ name: name6, message, cause });
224
+ this[_a6] = true;
250
225
  this.text = text2;
251
226
  this.response = response;
252
227
  this.usage = usage;
253
228
  this.finishReason = finishReason;
254
229
  }
255
230
  static isInstance(error) {
256
- return AISDKError7.hasMarker(error, marker7);
231
+ return AISDKError6.hasMarker(error, marker6);
257
232
  }
258
233
  };
259
- _a7 = symbol7;
234
+ _a6 = symbol6;
260
235
 
261
236
  // src/error/no-output-generated-error.ts
262
- import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
263
- var name8 = "AI_NoOutputGeneratedError";
264
- var marker8 = `vercel.ai.error.${name8}`;
265
- var symbol8 = Symbol.for(marker8);
266
- var _a8;
267
- var NoOutputGeneratedError = class extends AISDKError8 {
237
+ import { AISDKError as AISDKError7 } from "@ai-sdk/provider";
238
+ var name7 = "AI_NoOutputGeneratedError";
239
+ var marker7 = `vercel.ai.error.${name7}`;
240
+ var symbol7 = Symbol.for(marker7);
241
+ var _a7;
242
+ var NoOutputGeneratedError = class extends AISDKError7 {
268
243
  // used in isInstance
269
244
  constructor({
270
245
  message = "No output generated.",
271
246
  cause
272
247
  } = {}) {
273
- super({ name: name8, message, cause });
274
- this[_a8] = true;
248
+ super({ name: name7, message, cause });
249
+ this[_a7] = true;
275
250
  }
276
251
  static isInstance(error) {
277
- return AISDKError8.hasMarker(error, marker8);
252
+ return AISDKError7.hasMarker(error, marker7);
278
253
  }
279
254
  };
280
- _a8 = symbol8;
255
+ _a7 = symbol7;
281
256
 
282
257
  // src/error/no-speech-generated-error.ts
283
- import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
284
- var NoSpeechGeneratedError = class extends AISDKError9 {
258
+ import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
259
+ var NoSpeechGeneratedError = class extends AISDKError8 {
285
260
  constructor(options) {
286
261
  super({
287
262
  name: "AI_NoSpeechGeneratedError",
@@ -292,53 +267,53 @@ var NoSpeechGeneratedError = class extends AISDKError9 {
292
267
  };
293
268
 
294
269
  // src/error/no-such-tool-error.ts
295
- import { AISDKError as AISDKError10 } from "@ai-sdk/provider";
296
- var name9 = "AI_NoSuchToolError";
297
- var marker9 = `vercel.ai.error.${name9}`;
298
- var symbol9 = Symbol.for(marker9);
299
- var _a9;
300
- var NoSuchToolError = class extends AISDKError10 {
270
+ import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
271
+ var name8 = "AI_NoSuchToolError";
272
+ var marker8 = `vercel.ai.error.${name8}`;
273
+ var symbol8 = Symbol.for(marker8);
274
+ var _a8;
275
+ var NoSuchToolError = class extends AISDKError9 {
301
276
  constructor({
302
277
  toolName,
303
278
  availableTools = void 0,
304
279
  message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
305
280
  }) {
306
- super({ name: name9, message });
307
- this[_a9] = true;
281
+ super({ name: name8, message });
282
+ this[_a8] = true;
308
283
  this.toolName = toolName;
309
284
  this.availableTools = availableTools;
310
285
  }
311
286
  static isInstance(error) {
312
- return AISDKError10.hasMarker(error, marker9);
287
+ return AISDKError9.hasMarker(error, marker8);
313
288
  }
314
289
  };
315
- _a9 = symbol9;
290
+ _a8 = symbol8;
316
291
 
317
292
  // src/error/tool-call-repair-error.ts
318
- import { AISDKError as AISDKError11, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
319
- var name10 = "AI_ToolCallRepairError";
320
- var marker10 = `vercel.ai.error.${name10}`;
321
- var symbol10 = Symbol.for(marker10);
322
- var _a10;
323
- var ToolCallRepairError = class extends AISDKError11 {
293
+ import { AISDKError as AISDKError10, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
294
+ var name9 = "AI_ToolCallRepairError";
295
+ var marker9 = `vercel.ai.error.${name9}`;
296
+ var symbol9 = Symbol.for(marker9);
297
+ var _a9;
298
+ var ToolCallRepairError = class extends AISDKError10 {
324
299
  constructor({
325
300
  cause,
326
301
  originalError,
327
302
  message = `Error repairing tool call: ${getErrorMessage2(cause)}`
328
303
  }) {
329
- super({ name: name10, message, cause });
330
- this[_a10] = true;
304
+ super({ name: name9, message, cause });
305
+ this[_a9] = true;
331
306
  this.originalError = originalError;
332
307
  }
333
308
  static isInstance(error) {
334
- return AISDKError11.hasMarker(error, marker10);
309
+ return AISDKError10.hasMarker(error, marker9);
335
310
  }
336
311
  };
337
- _a10 = symbol10;
312
+ _a9 = symbol9;
338
313
 
339
314
  // src/error/unsupported-model-version-error.ts
340
- import { AISDKError as AISDKError12 } from "@ai-sdk/provider";
341
- var UnsupportedModelVersionError = class extends AISDKError12 {
315
+ import { AISDKError as AISDKError11 } from "@ai-sdk/provider";
316
+ var UnsupportedModelVersionError = class extends AISDKError11 {
342
317
  constructor(options) {
343
318
  super({
344
319
  name: "AI_UnsupportedModelVersionError",
@@ -351,76 +326,76 @@ var UnsupportedModelVersionError = class extends AISDKError12 {
351
326
  };
352
327
 
353
328
  // src/prompt/invalid-data-content-error.ts
354
- import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
355
- var name11 = "AI_InvalidDataContentError";
356
- var marker11 = `vercel.ai.error.${name11}`;
357
- var symbol11 = Symbol.for(marker11);
358
- var _a11;
359
- var InvalidDataContentError = class extends AISDKError13 {
329
+ import { AISDKError as AISDKError12 } from "@ai-sdk/provider";
330
+ var name10 = "AI_InvalidDataContentError";
331
+ var marker10 = `vercel.ai.error.${name10}`;
332
+ var symbol10 = Symbol.for(marker10);
333
+ var _a10;
334
+ var InvalidDataContentError = class extends AISDKError12 {
360
335
  constructor({
361
336
  content,
362
337
  cause,
363
338
  message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
364
339
  }) {
365
- super({ name: name11, message, cause });
366
- this[_a11] = true;
340
+ super({ name: name10, message, cause });
341
+ this[_a10] = true;
367
342
  this.content = content;
368
343
  }
369
344
  static isInstance(error) {
370
- return AISDKError13.hasMarker(error, marker11);
345
+ return AISDKError12.hasMarker(error, marker10);
371
346
  }
372
347
  };
373
- _a11 = symbol11;
348
+ _a10 = symbol10;
374
349
 
375
350
  // src/prompt/invalid-message-role-error.ts
376
- import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
377
- var name12 = "AI_InvalidMessageRoleError";
378
- var marker12 = `vercel.ai.error.${name12}`;
379
- var symbol12 = Symbol.for(marker12);
380
- var _a12;
381
- var InvalidMessageRoleError = class extends AISDKError14 {
351
+ import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
352
+ var name11 = "AI_InvalidMessageRoleError";
353
+ var marker11 = `vercel.ai.error.${name11}`;
354
+ var symbol11 = Symbol.for(marker11);
355
+ var _a11;
356
+ var InvalidMessageRoleError = class extends AISDKError13 {
382
357
  constructor({
383
358
  role,
384
359
  message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
385
360
  }) {
386
- super({ name: name12, message });
387
- this[_a12] = true;
361
+ super({ name: name11, message });
362
+ this[_a11] = true;
388
363
  this.role = role;
389
364
  }
390
365
  static isInstance(error) {
391
- return AISDKError14.hasMarker(error, marker12);
366
+ return AISDKError13.hasMarker(error, marker11);
392
367
  }
393
368
  };
394
- _a12 = symbol12;
369
+ _a11 = symbol11;
395
370
 
396
371
  // src/prompt/message-conversion-error.ts
397
- import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
398
- var name13 = "AI_MessageConversionError";
399
- var marker13 = `vercel.ai.error.${name13}`;
400
- var symbol13 = Symbol.for(marker13);
401
- var _a13;
402
- var MessageConversionError = class extends AISDKError15 {
372
+ import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
373
+ var name12 = "AI_MessageConversionError";
374
+ var marker12 = `vercel.ai.error.${name12}`;
375
+ var symbol12 = Symbol.for(marker12);
376
+ var _a12;
377
+ var MessageConversionError = class extends AISDKError14 {
403
378
  constructor({
404
379
  originalMessage,
405
380
  message
406
381
  }) {
407
- super({ name: name13, message });
408
- this[_a13] = true;
382
+ super({ name: name12, message });
383
+ this[_a12] = true;
409
384
  this.originalMessage = originalMessage;
410
385
  }
411
386
  static isInstance(error) {
412
- return AISDKError15.hasMarker(error, marker13);
387
+ return AISDKError14.hasMarker(error, marker12);
413
388
  }
414
389
  };
415
- _a13 = symbol13;
390
+ _a12 = symbol12;
416
391
 
417
392
  // src/util/download/download-error.ts
418
- import { AISDKError as AISDKError16 } from "@ai-sdk/provider";
419
- var name14 = "AI_DownloadError";
420
- var marker14 = `vercel.ai.error.${name14}`;
421
- var symbol14 = Symbol.for(marker14);
422
- var _a14;
423
- var DownloadError = class extends AISDKError16 {
393
+ import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
394
+ var name13 = "AI_DownloadError";
395
+ var marker13 = `vercel.ai.error.${name13}`;
396
+ var symbol13 = Symbol.for(marker13);
397
+ var _a13;
398
+ var DownloadError = class extends AISDKError15 {
424
399
  constructor({
425
400
  url,
426
401
  statusCode,
@@ -428,41 +403,41 @@ var DownloadError = class extends AISDKError16 {
428
403
  cause,
429
404
  message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
430
405
  }) {
431
- super({ name: name14, message, cause });
432
- this[_a14] = true;
406
+ super({ name: name13, message, cause });
407
+ this[_a13] = true;
433
408
  this.url = url;
434
409
  this.statusCode = statusCode;
435
410
  this.statusText = statusText;
436
411
  }
437
412
  static isInstance(error) {
438
- return AISDKError16.hasMarker(error, marker14);
413
+ return AISDKError15.hasMarker(error, marker13);
439
414
  }
440
415
  };
441
- _a14 = symbol14;
416
+ _a13 = symbol13;
442
417
 
443
418
  // src/util/retry-error.ts
444
- import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
445
- var name15 = "AI_RetryError";
446
- var marker15 = `vercel.ai.error.${name15}`;
447
- var symbol15 = Symbol.for(marker15);
448
- var _a15;
449
- var RetryError = class extends AISDKError17 {
419
+ import { AISDKError as AISDKError16 } from "@ai-sdk/provider";
420
+ var name14 = "AI_RetryError";
421
+ var marker14 = `vercel.ai.error.${name14}`;
422
+ var symbol14 = Symbol.for(marker14);
423
+ var _a14;
424
+ var RetryError = class extends AISDKError16 {
450
425
  constructor({
451
426
  message,
452
427
  reason,
453
428
  errors
454
429
  }) {
455
- super({ name: name15, message });
456
- this[_a15] = true;
430
+ super({ name: name14, message });
431
+ this[_a14] = true;
457
432
  this.reason = reason;
458
433
  this.errors = errors;
459
434
  this.lastError = errors[errors.length - 1];
460
435
  }
461
436
  static isInstance(error) {
462
- return AISDKError17.hasMarker(error, marker15);
437
+ return AISDKError16.hasMarker(error, marker14);
463
438
  }
464
439
  };
465
- _a15 = symbol15;
440
+ _a14 = symbol14;
466
441
 
467
442
  // src/model/resolve-model.ts
468
443
  function transformToV3LanguageModel(model) {
@@ -538,7 +513,7 @@ function resolveEmbeddingModel(model) {
538
513
  );
539
514
  }
540
515
  function resolveTranscriptionModel(model) {
541
- var _a17, _b;
516
+ var _a16, _b;
542
517
  if (typeof model !== "string") {
543
518
  if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
544
519
  const unsupportedModel = model;
@@ -553,10 +528,10 @@ function resolveTranscriptionModel(model) {
553
528
  }
554
529
  return model;
555
530
  }
556
- return (_b = (_a17 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a17, model);
531
+ return (_b = (_a16 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a16, model);
557
532
  }
558
533
  function resolveSpeechModel(model) {
559
- var _a17, _b;
534
+ var _a16, _b;
560
535
  if (typeof model !== "string") {
561
536
  if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
562
537
  const unsupportedModel = model;
@@ -571,11 +546,11 @@ function resolveSpeechModel(model) {
571
546
  }
572
547
  return model;
573
548
  }
574
- return (_b = (_a17 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a17, model);
549
+ return (_b = (_a16 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a16, model);
575
550
  }
576
551
  function getGlobalProvider() {
577
- var _a17;
578
- return (_a17 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a17 : gateway;
552
+ var _a16;
553
+ return (_a16 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a16 : gateway;
579
554
  }
580
555
 
581
556
  // src/prompt/convert-to-language-model-prompt.ts
@@ -775,11 +750,11 @@ import {
775
750
  } from "@ai-sdk/provider-utils";
776
751
 
777
752
  // src/version.ts
778
- var VERSION = true ? "6.0.0-beta.71" : "0.0.0-test";
753
+ var VERSION = true ? "6.0.0-beta.73" : "0.0.0-test";
779
754
 
780
755
  // src/util/download/download.ts
781
756
  var download = async ({ url }) => {
782
- var _a17;
757
+ var _a16;
783
758
  const urlText = url.toString();
784
759
  try {
785
760
  const response = await fetch(urlText, {
@@ -798,7 +773,7 @@ var download = async ({ url }) => {
798
773
  }
799
774
  return {
800
775
  data: new Uint8Array(await response.arrayBuffer()),
801
- mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
776
+ mediaType: (_a16 = response.headers.get("content-type")) != null ? _a16 : void 0
802
777
  };
803
778
  } catch (error) {
804
779
  if (DownloadError.isInstance(error)) {
@@ -816,7 +791,7 @@ var createDefaultDownloadFunction = (download2 = download) => (requestedDownload
816
791
  );
817
792
 
818
793
  // src/prompt/data-content.ts
819
- import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
794
+ import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
820
795
  import {
821
796
  convertBase64ToUint8Array as convertBase64ToUint8Array2,
822
797
  convertUint8ArrayToBase64
@@ -847,8 +822,8 @@ var dataContentSchema = z.union([
847
822
  z.custom(
848
823
  // Buffer might not be available in some environments such as CloudFlare:
849
824
  (value) => {
850
- var _a17, _b;
851
- return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
825
+ var _a16, _b;
826
+ return (_b = (_a16 = globalThis.Buffer) == null ? void 0 : _a16.isBuffer(value)) != null ? _b : false;
852
827
  },
853
828
  { message: "Must be a Buffer" }
854
829
  )
@@ -871,7 +846,7 @@ function convertToLanguageModelV3DataContent(content) {
871
846
  content.toString()
872
847
  );
873
848
  if (dataUrlMediaType == null || base64Content == null) {
874
- throw new AISDKError19({
849
+ throw new AISDKError18({
875
850
  name: "InvalidDataContentError",
876
851
  message: `Invalid data URL format in content ${content.toString()}`
877
852
  });
@@ -1062,8 +1037,8 @@ async function downloadAssets(messages, download2, supportedUrls) {
1062
1037
  ).flat().filter(
1063
1038
  (part) => part.type === "image" || part.type === "file"
1064
1039
  ).map((part) => {
1065
- var _a17;
1066
- const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
1040
+ var _a16;
1041
+ const mediaType = (_a16 = part.mediaType) != null ? _a16 : part.type === "image" ? "image/*" : void 0;
1067
1042
  let data = part.type === "image" ? part.image : part.data;
1068
1043
  if (typeof data === "string") {
1069
1044
  try {
@@ -1093,7 +1068,7 @@ async function downloadAssets(messages, download2, supportedUrls) {
1093
1068
  );
1094
1069
  }
1095
1070
  function convertPartToLanguageModelPart(part, downloadedAssets) {
1096
- var _a17;
1071
+ var _a16;
1097
1072
  if (part.type === "text") {
1098
1073
  return {
1099
1074
  type: "text",
@@ -1126,7 +1101,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1126
1101
  switch (type) {
1127
1102
  case "image": {
1128
1103
  if (data instanceof Uint8Array || typeof data === "string") {
1129
- mediaType = (_a17 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a17 : mediaType;
1104
+ mediaType = (_a16 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a16 : mediaType;
1130
1105
  }
1131
1106
  return {
1132
1107
  type: "file",
@@ -1181,7 +1156,7 @@ function mapToolResultOutput(output) {
1181
1156
  import { getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
1182
1157
  function createToolModelOutput({
1183
1158
  output,
1184
- tool: tool3,
1159
+ tool: tool2,
1185
1160
  errorMode
1186
1161
  }) {
1187
1162
  if (errorMode === "text") {
@@ -1189,8 +1164,8 @@ function createToolModelOutput({
1189
1164
  } else if (errorMode === "json") {
1190
1165
  return { type: "error-json", value: toJSONValue(output) };
1191
1166
  }
1192
- if (tool3 == null ? void 0 : tool3.toModelOutput) {
1193
- return tool3.toModelOutput(output);
1167
+ if (tool2 == null ? void 0 : tool2.toModelOutput) {
1168
+ return tool2.toModelOutput(output);
1194
1169
  }
1195
1170
  return typeof output === "string" ? { type: "text", value: output } : { type: "json", value: toJSONValue(output) };
1196
1171
  }
@@ -1312,29 +1287,29 @@ async function prepareToolsAndToolChoice({
1312
1287
  };
1313
1288
  }
1314
1289
  const filteredTools = activeTools != null ? Object.entries(tools).filter(
1315
- ([name17]) => activeTools.includes(name17)
1290
+ ([name16]) => activeTools.includes(name16)
1316
1291
  ) : Object.entries(tools);
1317
1292
  const languageModelTools = [];
1318
- for (const [name17, tool3] of filteredTools) {
1319
- const toolType = tool3.type;
1293
+ for (const [name16, tool2] of filteredTools) {
1294
+ const toolType = tool2.type;
1320
1295
  switch (toolType) {
1321
1296
  case void 0:
1322
1297
  case "dynamic":
1323
1298
  case "function":
1324
1299
  languageModelTools.push({
1325
1300
  type: "function",
1326
- name: name17,
1327
- description: tool3.description,
1328
- inputSchema: await asSchema(tool3.inputSchema).jsonSchema,
1329
- providerOptions: tool3.providerOptions
1301
+ name: name16,
1302
+ description: tool2.description,
1303
+ inputSchema: await asSchema(tool2.inputSchema).jsonSchema,
1304
+ providerOptions: tool2.providerOptions
1330
1305
  });
1331
1306
  break;
1332
1307
  case "provider-defined":
1333
1308
  languageModelTools.push({
1334
1309
  type: "provider-defined",
1335
- name: name17,
1336
- id: tool3.id,
1337
- args: tool3.args
1310
+ name: name16,
1311
+ id: tool2.id,
1312
+ args: tool2.args
1338
1313
  });
1339
1314
  break;
1340
1315
  default: {
@@ -1626,10 +1601,10 @@ import {
1626
1601
  GatewayAuthenticationError,
1627
1602
  GatewayModelNotFoundError
1628
1603
  } from "@ai-sdk/gateway";
1629
- import { AISDKError as AISDKError20 } from "@ai-sdk/provider";
1604
+ import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
1630
1605
  function wrapGatewayError(error) {
1631
1606
  if (GatewayAuthenticationError.isInstance(error) || GatewayModelNotFoundError.isInstance(error)) {
1632
- return new AISDKError20({
1607
+ return new AISDKError19({
1633
1608
  name: "GatewayError",
1634
1609
  message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
1635
1610
  cause: error
@@ -1660,7 +1635,7 @@ function getBaseTelemetryAttributes({
1660
1635
  telemetry,
1661
1636
  headers
1662
1637
  }) {
1663
- var _a17;
1638
+ var _a16;
1664
1639
  return {
1665
1640
  "ai.model.provider": model.provider,
1666
1641
  "ai.model.id": model.modelId,
@@ -1670,7 +1645,7 @@ function getBaseTelemetryAttributes({
1670
1645
  return attributes;
1671
1646
  }, {}),
1672
1647
  // add metadata as attributes:
1673
- ...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(
1648
+ ...Object.entries((_a16 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a16 : {}).reduce(
1674
1649
  (attributes, [key, value]) => {
1675
1650
  attributes[`ai.telemetry.metadata.${key}`] = value;
1676
1651
  return attributes;
@@ -1695,7 +1670,7 @@ var noopTracer = {
1695
1670
  startSpan() {
1696
1671
  return noopSpan;
1697
1672
  },
1698
- startActiveSpan(name17, arg1, arg2, arg3) {
1673
+ startActiveSpan(name16, arg1, arg2, arg3) {
1699
1674
  if (typeof arg1 === "function") {
1700
1675
  return arg1(noopSpan);
1701
1676
  }
@@ -1765,14 +1740,14 @@ function getTracer({
1765
1740
  // src/telemetry/record-span.ts
1766
1741
  import { SpanStatusCode } from "@opentelemetry/api";
1767
1742
  async function recordSpan({
1768
- name: name17,
1743
+ name: name16,
1769
1744
  tracer,
1770
1745
  attributes,
1771
1746
  fn,
1772
1747
  endWhenDone = true
1773
1748
  }) {
1774
1749
  return tracer.startActiveSpan(
1775
- name17,
1750
+ name16,
1776
1751
  { attributes: await attributes },
1777
1752
  async (span) => {
1778
1753
  try {
@@ -2091,8 +2066,8 @@ async function executeToolCall({
2091
2066
  onPreliminaryToolResult
2092
2067
  }) {
2093
2068
  const { toolName, toolCallId, input } = toolCall;
2094
- const tool3 = tools == null ? void 0 : tools[toolName];
2095
- if ((tool3 == null ? void 0 : tool3.execute) == null) {
2069
+ const tool2 = tools == null ? void 0 : tools[toolName];
2070
+ if ((tool2 == null ? void 0 : tool2.execute) == null) {
2096
2071
  return void 0;
2097
2072
  }
2098
2073
  return recordSpan({
@@ -2116,7 +2091,7 @@ async function executeToolCall({
2116
2091
  let output;
2117
2092
  try {
2118
2093
  const stream = executeTool({
2119
- execute: tool3.execute.bind(tool3),
2094
+ execute: tool2.execute.bind(tool2),
2120
2095
  input,
2121
2096
  options: {
2122
2097
  toolCallId,
@@ -2145,7 +2120,7 @@ async function executeToolCall({
2145
2120
  toolName,
2146
2121
  input,
2147
2122
  error,
2148
- dynamic: tool3.type === "dynamic"
2123
+ dynamic: tool2.type === "dynamic"
2149
2124
  };
2150
2125
  }
2151
2126
  try {
@@ -2167,7 +2142,7 @@ async function executeToolCall({
2167
2142
  toolName,
2168
2143
  input,
2169
2144
  output,
2170
- dynamic: tool3.type === "dynamic"
2145
+ dynamic: tool2.type === "dynamic"
2171
2146
  };
2172
2147
  }
2173
2148
  });
@@ -2223,18 +2198,18 @@ var DefaultGeneratedFileWithType = class extends DefaultGeneratedFile {
2223
2198
 
2224
2199
  // src/generate-text/is-approval-needed.ts
2225
2200
  async function isApprovalNeeded({
2226
- tool: tool3,
2201
+ tool: tool2,
2227
2202
  toolCall,
2228
2203
  messages,
2229
2204
  experimental_context
2230
2205
  }) {
2231
- if (tool3.needsApproval == null) {
2206
+ if (tool2.needsApproval == null) {
2232
2207
  return false;
2233
2208
  }
2234
- if (typeof tool3.needsApproval === "boolean") {
2235
- return tool3.needsApproval;
2209
+ if (typeof tool2.needsApproval === "boolean") {
2210
+ return tool2.needsApproval;
2236
2211
  }
2237
- return await tool3.needsApproval(toolCall.input, {
2212
+ return await tool2.needsApproval(toolCall.input, {
2238
2213
  toolCallId: toolCall.toolCallId,
2239
2214
  messages,
2240
2215
  experimental_context
@@ -2329,8 +2304,8 @@ async function doParseToolCall({
2329
2304
  tools
2330
2305
  }) {
2331
2306
  const toolName = toolCall.toolName;
2332
- const tool3 = tools[toolName];
2333
- if (tool3 == null) {
2307
+ const tool2 = tools[toolName];
2308
+ if (tool2 == null) {
2334
2309
  if (toolCall.providerExecuted && toolCall.dynamic) {
2335
2310
  return await parseProviderExecutedDynamicToolCall(toolCall);
2336
2311
  }
@@ -2339,7 +2314,7 @@ async function doParseToolCall({
2339
2314
  availableTools: Object.keys(tools)
2340
2315
  });
2341
2316
  }
2342
- const schema = asSchema2(tool3.inputSchema);
2317
+ const schema = asSchema2(tool2.inputSchema);
2343
2318
  const parseResult = toolCall.input.trim() === "" ? await safeValidateTypes2({ value: {}, schema }) : await safeParseJSON({ text: toolCall.input, schema });
2344
2319
  if (parseResult.success === false) {
2345
2320
  throw new InvalidToolInputError({
@@ -2348,7 +2323,7 @@ async function doParseToolCall({
2348
2323
  cause: parseResult.error
2349
2324
  });
2350
2325
  }
2351
- return tool3.type === "dynamic" ? {
2326
+ return tool2.type === "dynamic" ? {
2352
2327
  type: "tool-call",
2353
2328
  toolCallId: toolCall.toolCallId,
2354
2329
  toolName: toolCall.toolName,
@@ -2434,8 +2409,8 @@ function stepCountIs(stepCount) {
2434
2409
  }
2435
2410
  function hasToolCall(toolName) {
2436
2411
  return ({ steps }) => {
2437
- var _a17, _b, _c;
2438
- return (_c = (_b = (_a17 = steps[steps.length - 1]) == null ? void 0 : _a17.toolCalls) == null ? void 0 : _b.some(
2412
+ var _a16, _b, _c;
2413
+ return (_c = (_b = (_a16 = steps[steps.length - 1]) == null ? void 0 : _a16.toolCalls) == null ? void 0 : _b.some(
2439
2414
  (toolCall) => toolCall.toolName === toolName
2440
2415
  )) != null ? _c : false;
2441
2416
  };
@@ -2623,7 +2598,7 @@ async function generateText({
2623
2598
  }),
2624
2599
  tracer,
2625
2600
  fn: async (span) => {
2626
- var _a17, _b, _c, _d, _e, _f, _g;
2601
+ var _a16, _b, _c, _d, _e, _f, _g;
2627
2602
  const initialMessages = initialPrompt.messages;
2628
2603
  const responseMessages = [];
2629
2604
  const { approvedToolApprovals, deniedToolApprovals } = collectToolApprovals({ messages: initialMessages });
@@ -2680,7 +2655,7 @@ async function generateText({
2680
2655
  messages: stepInputMessages
2681
2656
  }));
2682
2657
  const stepModel = resolveLanguageModel(
2683
- (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
2658
+ (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
2684
2659
  );
2685
2660
  const promptMessages = await convertToLanguageModelPrompt({
2686
2661
  prompt: {
@@ -2697,7 +2672,7 @@ async function generateText({
2697
2672
  });
2698
2673
  currentModelResponse = await retry(
2699
2674
  () => {
2700
- var _a18;
2675
+ var _a17;
2701
2676
  return recordSpan({
2702
2677
  name: "ai.generateText.doGenerate",
2703
2678
  attributes: selectTelemetryAttributes({
@@ -2717,7 +2692,7 @@ async function generateText({
2717
2692
  },
2718
2693
  "ai.prompt.tools": {
2719
2694
  // convert the language model level tools:
2720
- input: () => stepTools == null ? void 0 : stepTools.map((tool3) => JSON.stringify(tool3))
2695
+ input: () => stepTools == null ? void 0 : stepTools.map((tool2) => JSON.stringify(tool2))
2721
2696
  },
2722
2697
  "ai.prompt.toolChoice": {
2723
2698
  input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
@@ -2729,14 +2704,14 @@ async function generateText({
2729
2704
  "gen_ai.request.max_tokens": settings.maxOutputTokens,
2730
2705
  "gen_ai.request.presence_penalty": settings.presencePenalty,
2731
2706
  "gen_ai.request.stop_sequences": settings.stopSequences,
2732
- "gen_ai.request.temperature": (_a18 = settings.temperature) != null ? _a18 : void 0,
2707
+ "gen_ai.request.temperature": (_a17 = settings.temperature) != null ? _a17 : void 0,
2733
2708
  "gen_ai.request.top_k": settings.topK,
2734
2709
  "gen_ai.request.top_p": settings.topP
2735
2710
  }
2736
2711
  }),
2737
2712
  tracer,
2738
2713
  fn: async (span2) => {
2739
- var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h;
2714
+ var _a18, _b2, _c2, _d2, _e2, _f2, _g2, _h;
2740
2715
  const result = await stepModel.doGenerate({
2741
2716
  ...callSettings2,
2742
2717
  tools: stepTools,
@@ -2748,7 +2723,7 @@ async function generateText({
2748
2723
  headers: headersWithUserAgent
2749
2724
  });
2750
2725
  const responseData = {
2751
- id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId2(),
2726
+ id: (_b2 = (_a18 = result.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId2(),
2752
2727
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
2753
2728
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
2754
2729
  headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
@@ -2809,12 +2784,12 @@ async function generateText({
2809
2784
  if (toolCall.invalid) {
2810
2785
  continue;
2811
2786
  }
2812
- const tool3 = tools == null ? void 0 : tools[toolCall.toolName];
2813
- if (tool3 == null) {
2787
+ const tool2 = tools == null ? void 0 : tools[toolCall.toolName];
2788
+ if (tool2 == null) {
2814
2789
  continue;
2815
2790
  }
2816
- if ((tool3 == null ? void 0 : tool3.onInputAvailable) != null) {
2817
- await tool3.onInputAvailable({
2791
+ if ((tool2 == null ? void 0 : tool2.onInputAvailable) != null) {
2792
+ await tool2.onInputAvailable({
2818
2793
  input: toolCall.input,
2819
2794
  toolCallId: toolCall.toolCallId,
2820
2795
  messages: stepInputMessages,
@@ -2823,7 +2798,7 @@ async function generateText({
2823
2798
  });
2824
2799
  }
2825
2800
  if (await isApprovalNeeded({
2826
- tool: tool3,
2801
+ tool: tool2,
2827
2802
  toolCall,
2828
2803
  messages: stepInputMessages,
2829
2804
  experimental_context
@@ -3888,7 +3863,7 @@ function processUIMessageStream({
3888
3863
  new TransformStream({
3889
3864
  async transform(chunk, controller) {
3890
3865
  await runUpdateMessageJob(async ({ state, write }) => {
3891
- var _a17, _b, _c, _d;
3866
+ var _a16, _b, _c, _d;
3892
3867
  function getToolInvocation(toolCallId) {
3893
3868
  const toolInvocations = state.message.parts.filter(
3894
3869
  isToolOrDynamicToolUIPart
@@ -3904,7 +3879,7 @@ function processUIMessageStream({
3904
3879
  return toolInvocation;
3905
3880
  }
3906
3881
  function updateToolPart(options) {
3907
- var _a18;
3882
+ var _a17;
3908
3883
  const part = state.message.parts.find(
3909
3884
  (part2) => isToolUIPart(part2) && part2.toolCallId === options.toolCallId
3910
3885
  );
@@ -3917,7 +3892,7 @@ function processUIMessageStream({
3917
3892
  anyPart.errorText = anyOptions.errorText;
3918
3893
  anyPart.rawInput = anyOptions.rawInput;
3919
3894
  anyPart.preliminary = anyOptions.preliminary;
3920
- anyPart.providerExecuted = (_a18 = anyOptions.providerExecuted) != null ? _a18 : part.providerExecuted;
3895
+ anyPart.providerExecuted = (_a17 = anyOptions.providerExecuted) != null ? _a17 : part.providerExecuted;
3921
3896
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
3922
3897
  part.callProviderMetadata = anyOptions.providerMetadata;
3923
3898
  }
@@ -3937,7 +3912,7 @@ function processUIMessageStream({
3937
3912
  }
3938
3913
  }
3939
3914
  function updateDynamicToolPart(options) {
3940
- var _a18, _b2;
3915
+ var _a17, _b2;
3941
3916
  const part = state.message.parts.find(
3942
3917
  (part2) => part2.type === "dynamic-tool" && part2.toolCallId === options.toolCallId
3943
3918
  );
@@ -3949,7 +3924,7 @@ function processUIMessageStream({
3949
3924
  anyPart.input = anyOptions.input;
3950
3925
  anyPart.output = anyOptions.output;
3951
3926
  anyPart.errorText = anyOptions.errorText;
3952
- anyPart.rawInput = (_a18 = anyOptions.rawInput) != null ? _a18 : anyPart.rawInput;
3927
+ anyPart.rawInput = (_a17 = anyOptions.rawInput) != null ? _a17 : anyPart.rawInput;
3953
3928
  anyPart.preliminary = anyOptions.preliminary;
3954
3929
  anyPart.providerExecuted = (_b2 = anyOptions.providerExecuted) != null ? _b2 : part.providerExecuted;
3955
3930
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
@@ -3998,7 +3973,7 @@ function processUIMessageStream({
3998
3973
  case "text-delta": {
3999
3974
  const textPart = state.activeTextParts[chunk.id];
4000
3975
  textPart.text += chunk.delta;
4001
- textPart.providerMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textPart.providerMetadata;
3976
+ textPart.providerMetadata = (_a16 = chunk.providerMetadata) != null ? _a16 : textPart.providerMetadata;
4002
3977
  write();
4003
3978
  break;
4004
3979
  }
@@ -4422,11 +4397,11 @@ function createAsyncIterableStream(source) {
4422
4397
  const reader = this.getReader();
4423
4398
  let finished = false;
4424
4399
  async function cleanup(cancelStream) {
4425
- var _a17;
4400
+ var _a16;
4426
4401
  finished = true;
4427
4402
  try {
4428
4403
  if (cancelStream) {
4429
- await ((_a17 = reader.cancel) == null ? void 0 : _a17.call(reader));
4404
+ await ((_a16 = reader.cancel) == null ? void 0 : _a16.call(reader));
4430
4405
  }
4431
4406
  } finally {
4432
4407
  try {
@@ -4613,25 +4588,25 @@ var DelayedPromise = class {
4613
4588
  return this._promise;
4614
4589
  }
4615
4590
  resolve(value) {
4616
- var _a17;
4591
+ var _a16;
4617
4592
  this.status = { type: "resolved", value };
4618
4593
  if (this._promise) {
4619
- (_a17 = this._resolve) == null ? void 0 : _a17.call(this, value);
4594
+ (_a16 = this._resolve) == null ? void 0 : _a16.call(this, value);
4620
4595
  }
4621
4596
  }
4622
4597
  reject(error) {
4623
- var _a17;
4598
+ var _a16;
4624
4599
  this.status = { type: "rejected", error };
4625
4600
  if (this._promise) {
4626
- (_a17 = this._reject) == null ? void 0 : _a17.call(this, error);
4601
+ (_a16 = this._reject) == null ? void 0 : _a16.call(this, error);
4627
4602
  }
4628
4603
  }
4629
4604
  };
4630
4605
 
4631
4606
  // src/util/now.ts
4632
4607
  function now() {
4633
- var _a17, _b;
4634
- return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();
4608
+ var _a16, _b;
4609
+ return (_b = (_a16 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a16.now()) != null ? _b : Date.now();
4635
4610
  }
4636
4611
 
4637
4612
  // src/generate-text/run-tools-transformation.ts
@@ -4729,12 +4704,12 @@ function runToolsTransformation({
4729
4704
  });
4730
4705
  break;
4731
4706
  }
4732
- const tool3 = tools == null ? void 0 : tools[toolCall.toolName];
4733
- if (tool3 == null) {
4707
+ const tool2 = tools == null ? void 0 : tools[toolCall.toolName];
4708
+ if (tool2 == null) {
4734
4709
  break;
4735
4710
  }
4736
- if (tool3.onInputAvailable != null) {
4737
- await tool3.onInputAvailable({
4711
+ if (tool2.onInputAvailable != null) {
4712
+ await tool2.onInputAvailable({
4738
4713
  input: toolCall.input,
4739
4714
  toolCallId: toolCall.toolCallId,
4740
4715
  messages,
@@ -4743,7 +4718,7 @@ function runToolsTransformation({
4743
4718
  });
4744
4719
  }
4745
4720
  if (await isApprovalNeeded({
4746
- tool: tool3,
4721
+ tool: tool2,
4747
4722
  toolCall,
4748
4723
  messages,
4749
4724
  experimental_context
@@ -4756,7 +4731,7 @@ function runToolsTransformation({
4756
4731
  break;
4757
4732
  }
4758
4733
  toolInputs.set(toolCall.toolCallId, toolCall.input);
4759
- if (tool3.execute != null && toolCall.providerExecuted !== true) {
4734
+ if (tool2.execute != null && toolCall.providerExecuted !== true) {
4760
4735
  const toolExecutionId = generateId2();
4761
4736
  outstandingToolResults.add(toolExecutionId);
4762
4737
  executeToolCall({
@@ -5034,7 +5009,7 @@ var DefaultStreamTextResult = class {
5034
5009
  let activeReasoningContent = {};
5035
5010
  const eventProcessor = new TransformStream({
5036
5011
  async transform(chunk, controller) {
5037
- var _a17, _b, _c, _d;
5012
+ var _a16, _b, _c, _d;
5038
5013
  controller.enqueue(chunk);
5039
5014
  const { part } = chunk;
5040
5015
  if (part.type === "text-delta" || part.type === "reasoning-delta" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
@@ -5064,7 +5039,7 @@ var DefaultStreamTextResult = class {
5064
5039
  return;
5065
5040
  }
5066
5041
  activeText.text += part.text;
5067
- activeText.providerMetadata = (_a17 = part.providerMetadata) != null ? _a17 : activeText.providerMetadata;
5042
+ activeText.providerMetadata = (_a16 = part.providerMetadata) != null ? _a16 : activeText.providerMetadata;
5068
5043
  }
5069
5044
  if (part.type === "text-end") {
5070
5045
  const activeText = activeTextContent[part.id];
@@ -5223,8 +5198,8 @@ var DefaultStreamTextResult = class {
5223
5198
  "ai.response.text": { output: () => finalStep.text },
5224
5199
  "ai.response.toolCalls": {
5225
5200
  output: () => {
5226
- var _a17;
5227
- return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
5201
+ var _a16;
5202
+ return ((_a16 = finalStep.toolCalls) == null ? void 0 : _a16.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
5228
5203
  }
5229
5204
  },
5230
5205
  "ai.response.providerMetadata": JSON.stringify(
@@ -5404,7 +5379,7 @@ var DefaultStreamTextResult = class {
5404
5379
  responseMessages,
5405
5380
  usage
5406
5381
  }) {
5407
- var _a17, _b, _c, _d, _e;
5382
+ var _a16, _b, _c, _d, _e;
5408
5383
  const includeRawChunks2 = self.includeRawChunks;
5409
5384
  stepFinish = new DelayedPromise();
5410
5385
  const stepInputMessages = [...initialMessages, ...responseMessages];
@@ -5415,7 +5390,7 @@ var DefaultStreamTextResult = class {
5415
5390
  messages: stepInputMessages
5416
5391
  }));
5417
5392
  const stepModel = resolveLanguageModel(
5418
- (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
5393
+ (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
5419
5394
  );
5420
5395
  const promptMessages = await convertToLanguageModelPrompt({
5421
5396
  prompt: {
@@ -5454,7 +5429,7 @@ var DefaultStreamTextResult = class {
5454
5429
  },
5455
5430
  "ai.prompt.tools": {
5456
5431
  // convert the language model level tools:
5457
- input: () => stepTools == null ? void 0 : stepTools.map((tool3) => JSON.stringify(tool3))
5432
+ input: () => stepTools == null ? void 0 : stepTools.map((tool2) => JSON.stringify(tool2))
5458
5433
  },
5459
5434
  "ai.prompt.toolChoice": {
5460
5435
  input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
@@ -5526,7 +5501,7 @@ var DefaultStreamTextResult = class {
5526
5501
  streamWithToolResults.pipeThrough(
5527
5502
  new TransformStream({
5528
5503
  async transform(chunk, controller) {
5529
- var _a18, _b2, _c2, _d2, _e2;
5504
+ var _a17, _b2, _c2, _d2, _e2;
5530
5505
  if (chunk.type === "stream-start") {
5531
5506
  warnings = chunk.warnings;
5532
5507
  return;
@@ -5599,7 +5574,7 @@ var DefaultStreamTextResult = class {
5599
5574
  }
5600
5575
  case "response-metadata": {
5601
5576
  stepResponse = {
5602
- id: (_a18 = chunk.id) != null ? _a18 : stepResponse.id,
5577
+ id: (_a17 = chunk.id) != null ? _a17 : stepResponse.id,
5603
5578
  timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
5604
5579
  modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
5605
5580
  };
@@ -5627,9 +5602,9 @@ var DefaultStreamTextResult = class {
5627
5602
  }
5628
5603
  case "tool-input-start": {
5629
5604
  activeToolCallToolNames[chunk.id] = chunk.toolName;
5630
- const tool3 = tools == null ? void 0 : tools[chunk.toolName];
5631
- if ((tool3 == null ? void 0 : tool3.onInputStart) != null) {
5632
- await tool3.onInputStart({
5605
+ const tool2 = tools == null ? void 0 : tools[chunk.toolName];
5606
+ if ((tool2 == null ? void 0 : tool2.onInputStart) != null) {
5607
+ await tool2.onInputStart({
5633
5608
  toolCallId: chunk.id,
5634
5609
  messages: stepInputMessages,
5635
5610
  abortSignal,
@@ -5638,7 +5613,7 @@ var DefaultStreamTextResult = class {
5638
5613
  }
5639
5614
  controller.enqueue({
5640
5615
  ...chunk,
5641
- dynamic: (_e2 = chunk.dynamic) != null ? _e2 : (tool3 == null ? void 0 : tool3.type) === "dynamic"
5616
+ dynamic: (_e2 = chunk.dynamic) != null ? _e2 : (tool2 == null ? void 0 : tool2.type) === "dynamic"
5642
5617
  });
5643
5618
  break;
5644
5619
  }
@@ -5649,9 +5624,9 @@ var DefaultStreamTextResult = class {
5649
5624
  }
5650
5625
  case "tool-input-delta": {
5651
5626
  const toolName = activeToolCallToolNames[chunk.id];
5652
- const tool3 = tools == null ? void 0 : tools[toolName];
5653
- if ((tool3 == null ? void 0 : tool3.onInputDelta) != null) {
5654
- await tool3.onInputDelta({
5627
+ const tool2 = tools == null ? void 0 : tools[toolName];
5628
+ if ((tool2 == null ? void 0 : tool2.onInputDelta) != null) {
5629
+ await tool2.onInputDelta({
5655
5630
  inputTextDelta: chunk.delta,
5656
5631
  toolCallId: chunk.id,
5657
5632
  messages: stepInputMessages,
@@ -5901,14 +5876,14 @@ var DefaultStreamTextResult = class {
5901
5876
  );
5902
5877
  }
5903
5878
  async consumeStream(options) {
5904
- var _a17;
5879
+ var _a16;
5905
5880
  try {
5906
5881
  await consumeStream({
5907
5882
  stream: this.fullStream,
5908
5883
  onError: options == null ? void 0 : options.onError
5909
5884
  });
5910
5885
  } catch (error) {
5911
- (_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);
5886
+ (_a16 = options == null ? void 0 : options.onError) == null ? void 0 : _a16.call(options, error);
5912
5887
  }
5913
5888
  }
5914
5889
  get experimental_partialOutputStream() {
@@ -5946,12 +5921,12 @@ var DefaultStreamTextResult = class {
5946
5921
  responseMessageId: generateMessageId
5947
5922
  }) : void 0;
5948
5923
  const isDynamic = (part) => {
5949
- var _a17;
5950
- const tool3 = (_a17 = this.tools) == null ? void 0 : _a17[part.toolName];
5951
- if (tool3 == null) {
5924
+ var _a16;
5925
+ const tool2 = (_a16 = this.tools) == null ? void 0 : _a16[part.toolName];
5926
+ if (tool2 == null) {
5952
5927
  return part.dynamic;
5953
5928
  }
5954
- return (tool3 == null ? void 0 : tool3.type) === "dynamic" ? true : void 0;
5929
+ return (tool2 == null ? void 0 : tool2.type) === "dynamic" ? true : void 0;
5955
5930
  };
5956
5931
  const baseStream = this.fullStream.pipeThrough(
5957
5932
  new TransformStream({
@@ -6281,10 +6256,10 @@ var ToolLoopAgent = class {
6281
6256
  return this.settings.tools;
6282
6257
  }
6283
6258
  async prepareCall(options) {
6284
- var _a17, _b, _c, _d;
6259
+ var _a16, _b, _c, _d;
6285
6260
  const baseCallArgs = {
6286
6261
  ...this.settings,
6287
- stopWhen: (_a17 = this.settings.stopWhen) != null ? _a17 : stepCountIs(20),
6262
+ stopWhen: (_a16 = this.settings.stopWhen) != null ? _a16 : stepCountIs(20),
6288
6263
  ...options
6289
6264
  };
6290
6265
  const preparedCallArgs = (_d = await ((_c = (_b = this.settings).prepareCall) == null ? void 0 : _c.call(_b, baseCallArgs))) != null ? _d : baseCallArgs;
@@ -6405,7 +6380,7 @@ function readUIMessageStream({
6405
6380
  onError,
6406
6381
  terminateOnError = false
6407
6382
  }) {
6408
- var _a17;
6383
+ var _a16;
6409
6384
  let controller;
6410
6385
  let hasErrored = false;
6411
6386
  const outputStream = new ReadableStream({
@@ -6414,7 +6389,7 @@ function readUIMessageStream({
6414
6389
  }
6415
6390
  });
6416
6391
  const state = createStreamingUIMessageState({
6417
- messageId: (_a17 = message == null ? void 0 : message.id) != null ? _a17 : "",
6392
+ messageId: (_a16 = message == null ? void 0 : message.id) != null ? _a16 : "",
6418
6393
  lastMessage: message
6419
6394
  });
6420
6395
  const handleError = (error) => {
@@ -6483,7 +6458,7 @@ function convertToModelMessages(messages, options) {
6483
6458
  modelMessages.push({
6484
6459
  role: "user",
6485
6460
  content: message.parts.map((part) => {
6486
- var _a17;
6461
+ var _a16;
6487
6462
  if (isTextUIPart(part)) {
6488
6463
  return {
6489
6464
  type: "text",
@@ -6501,7 +6476,7 @@ function convertToModelMessages(messages, options) {
6501
6476
  };
6502
6477
  }
6503
6478
  if (isDataUIPart(part)) {
6504
- return (_a17 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a17.call(
6479
+ return (_a16 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a16.call(
6505
6480
  options,
6506
6481
  part
6507
6482
  );
@@ -6513,7 +6488,7 @@ function convertToModelMessages(messages, options) {
6513
6488
  case "assistant": {
6514
6489
  if (message.parts != null) {
6515
6490
  let processBlock2 = function() {
6516
- var _a17, _b, _c;
6491
+ var _a16, _b, _c;
6517
6492
  if (block.length === 0) {
6518
6493
  return;
6519
6494
  }
@@ -6545,7 +6520,7 @@ function convertToModelMessages(messages, options) {
6545
6520
  type: "tool-call",
6546
6521
  toolCallId: part.toolCallId,
6547
6522
  toolName,
6548
- input: part.state === "output-error" ? (_a17 = part.input) != null ? _a17 : "rawInput" in part ? part.rawInput : void 0 : part.input,
6523
+ input: part.state === "output-error" ? (_a16 = part.input) != null ? _a16 : "rawInput" in part ? part.rawInput : void 0 : part.input,
6549
6524
  providerExecuted: part.providerExecuted,
6550
6525
  ...part.callProviderMetadata != null ? { providerOptions: part.callProviderMetadata } : {}
6551
6526
  });
@@ -6594,9 +6569,9 @@ function convertToModelMessages(messages, options) {
6594
6569
  role: "tool",
6595
6570
  content: toolParts.flatMap(
6596
6571
  (toolPart) => {
6597
- var _a18, _b2, _c2;
6572
+ var _a17, _b2, _c2;
6598
6573
  const outputs = [];
6599
- if (((_a18 = toolPart.approval) == null ? void 0 : _a18.approved) != null) {
6574
+ if (((_a17 = toolPart.approval) == null ? void 0 : _a17.approved) != null) {
6600
6575
  outputs.push({
6601
6576
  type: "tool-approval-response",
6602
6577
  approvalId: toolPart.approval.id,
@@ -6993,8 +6968,8 @@ async function safeValidateUIMessages({
6993
6968
  );
6994
6969
  for (const toolPart of toolParts) {
6995
6970
  const toolName = toolPart.type.slice(5);
6996
- const tool3 = tools[toolName];
6997
- if (!tool3) {
6971
+ const tool2 = tools[toolName];
6972
+ if (!tool2) {
6998
6973
  return {
6999
6974
  success: false,
7000
6975
  error: new TypeValidationError2({
@@ -7006,13 +6981,13 @@ async function safeValidateUIMessages({
7006
6981
  if (toolPart.state === "input-available" || toolPart.state === "output-available" || toolPart.state === "output-error") {
7007
6982
  await validateTypes2({
7008
6983
  value: toolPart.input,
7009
- schema: tool3.inputSchema
6984
+ schema: tool2.inputSchema
7010
6985
  });
7011
6986
  }
7012
- if (toolPart.state === "output-available" && tool3.outputSchema) {
6987
+ if (toolPart.state === "output-available" && tool2.outputSchema) {
7013
6988
  await validateTypes2({
7014
6989
  value: toolPart.output,
7015
- schema: tool3.outputSchema
6990
+ schema: tool2.outputSchema
7016
6991
  });
7017
6992
  }
7018
6993
  }
@@ -7162,7 +7137,7 @@ async function embed({
7162
7137
  }),
7163
7138
  tracer,
7164
7139
  fn: async (doEmbedSpan) => {
7165
- var _a17;
7140
+ var _a16;
7166
7141
  const modelResponse = await model.doEmbed({
7167
7142
  values: [value],
7168
7143
  abortSignal,
@@ -7170,7 +7145,7 @@ async function embed({
7170
7145
  providerOptions
7171
7146
  });
7172
7147
  const embedding2 = modelResponse.embeddings[0];
7173
- const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
7148
+ const usage2 = (_a16 = modelResponse.usage) != null ? _a16 : { tokens: NaN };
7174
7149
  doEmbedSpan.setAttributes(
7175
7150
  await selectTelemetryAttributes({
7176
7151
  telemetry,
@@ -7280,7 +7255,7 @@ async function embedMany({
7280
7255
  }),
7281
7256
  tracer,
7282
7257
  fn: async (span) => {
7283
- var _a17;
7258
+ var _a16;
7284
7259
  const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
7285
7260
  model.maxEmbeddingsPerCall,
7286
7261
  model.supportsParallelCalls
@@ -7306,7 +7281,7 @@ async function embedMany({
7306
7281
  }),
7307
7282
  tracer,
7308
7283
  fn: async (doEmbedSpan) => {
7309
- var _a18;
7284
+ var _a17;
7310
7285
  const modelResponse = await model.doEmbed({
7311
7286
  values,
7312
7287
  abortSignal,
@@ -7314,7 +7289,7 @@ async function embedMany({
7314
7289
  providerOptions
7315
7290
  });
7316
7291
  const embeddings3 = modelResponse.embeddings;
7317
- const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
7292
+ const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
7318
7293
  doEmbedSpan.setAttributes(
7319
7294
  await selectTelemetryAttributes({
7320
7295
  telemetry,
@@ -7388,7 +7363,7 @@ async function embedMany({
7388
7363
  }),
7389
7364
  tracer,
7390
7365
  fn: async (doEmbedSpan) => {
7391
- var _a18;
7366
+ var _a17;
7392
7367
  const modelResponse = await model.doEmbed({
7393
7368
  values: chunk,
7394
7369
  abortSignal,
@@ -7396,7 +7371,7 @@ async function embedMany({
7396
7371
  providerOptions
7397
7372
  });
7398
7373
  const embeddings2 = modelResponse.embeddings;
7399
- const usage = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
7374
+ const usage = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
7400
7375
  doEmbedSpan.setAttributes(
7401
7376
  await selectTelemetryAttributes({
7402
7377
  telemetry,
@@ -7433,7 +7408,7 @@ async function embedMany({
7433
7408
  result.providerMetadata
7434
7409
  )) {
7435
7410
  providerMetadata[providerName] = {
7436
- ...(_a17 = providerMetadata[providerName]) != null ? _a17 : {},
7411
+ ...(_a16 = providerMetadata[providerName]) != null ? _a16 : {},
7437
7412
  ...metadata
7438
7413
  };
7439
7414
  }
@@ -7487,7 +7462,7 @@ async function generateImage({
7487
7462
  abortSignal,
7488
7463
  headers
7489
7464
  }) {
7490
- var _a17, _b;
7465
+ var _a16, _b;
7491
7466
  if (model.specificationVersion !== "v3") {
7492
7467
  throw new UnsupportedModelVersionError({
7493
7468
  version: model.specificationVersion,
@@ -7503,7 +7478,7 @@ async function generateImage({
7503
7478
  maxRetries: maxRetriesArg,
7504
7479
  abortSignal
7505
7480
  });
7506
- const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a17 : 1;
7481
+ const maxImagesPerCallWithDefault = (_a16 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a16 : 1;
7507
7482
  const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
7508
7483
  const callImageCounts = Array.from({ length: callCount }, (_, i) => {
7509
7484
  if (i < callCount - 1) {
@@ -7536,13 +7511,13 @@ async function generateImage({
7536
7511
  images.push(
7537
7512
  ...result.images.map(
7538
7513
  (image) => {
7539
- var _a18;
7514
+ var _a17;
7540
7515
  return new DefaultGeneratedFile({
7541
7516
  data: image,
7542
- mediaType: (_a18 = detectMediaType({
7517
+ mediaType: (_a17 = detectMediaType({
7543
7518
  data: image,
7544
7519
  signatures: imageMediaTypeSignatures
7545
- })) != null ? _a18 : "image/png"
7520
+ })) != null ? _a17 : "image/png"
7546
7521
  });
7547
7522
  }
7548
7523
  )
@@ -7663,7 +7638,7 @@ var objectOutputStrategy = (schema) => ({
7663
7638
  });
7664
7639
  var arrayOutputStrategy = (schema) => {
7665
7640
  return {
7666
- type: "enum",
7641
+ type: "array",
7667
7642
  // wrap in object that contains array of elements, since most LLMs will not
7668
7643
  // be able to generate an array directly:
7669
7644
  // possible future optimization: use arrays directly when model supports grammar-guided generation
@@ -7685,7 +7660,7 @@ var arrayOutputStrategy = (schema) => {
7685
7660
  isFirstDelta,
7686
7661
  isFinalDelta
7687
7662
  }) {
7688
- var _a17;
7663
+ var _a16;
7689
7664
  if (!isJSONObject(value) || !isJSONArray(value.elements)) {
7690
7665
  return {
7691
7666
  success: false,
@@ -7708,7 +7683,7 @@ var arrayOutputStrategy = (schema) => {
7708
7683
  }
7709
7684
  resultArray.push(result.value);
7710
7685
  }
7711
- const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;
7686
+ const publishedElementCount = (_a16 = latestObject == null ? void 0 : latestObject.length) != null ? _a16 : 0;
7712
7687
  let textDelta = "";
7713
7688
  if (isFirstDelta) {
7714
7689
  textDelta += "[";
@@ -8104,7 +8079,7 @@ async function generateObject(options) {
8104
8079
  settings: { ...callSettings, maxRetries }
8105
8080
  });
8106
8081
  const tracer = getTracer(telemetry);
8107
- const jsonSchema3 = await outputStrategy.jsonSchema();
8082
+ const jsonSchema2 = await outputStrategy.jsonSchema();
8108
8083
  try {
8109
8084
  return await recordSpan({
8110
8085
  name: "ai.generateObject",
@@ -8120,7 +8095,7 @@ async function generateObject(options) {
8120
8095
  "ai.prompt": {
8121
8096
  input: () => JSON.stringify({ system, prompt, messages })
8122
8097
  },
8123
- "ai.schema": jsonSchema3 != null ? { input: () => JSON.stringify(jsonSchema3) } : void 0,
8098
+ "ai.schema": jsonSchema2 != null ? { input: () => JSON.stringify(jsonSchema2) } : void 0,
8124
8099
  "ai.schema.name": schemaName,
8125
8100
  "ai.schema.description": schemaDescription,
8126
8101
  "ai.settings.output": outputStrategy.type
@@ -8128,7 +8103,7 @@ async function generateObject(options) {
8128
8103
  }),
8129
8104
  tracer,
8130
8105
  fn: async (span) => {
8131
- var _a17;
8106
+ var _a16;
8132
8107
  let result;
8133
8108
  let finishReason;
8134
8109
  let usage;
@@ -8174,11 +8149,11 @@ async function generateObject(options) {
8174
8149
  }),
8175
8150
  tracer,
8176
8151
  fn: async (span2) => {
8177
- var _a18, _b, _c, _d, _e, _f, _g, _h;
8152
+ var _a17, _b, _c, _d, _e, _f, _g, _h;
8178
8153
  const result2 = await model.doGenerate({
8179
8154
  responseFormat: {
8180
8155
  type: "json",
8181
- schema: jsonSchema3,
8156
+ schema: jsonSchema2,
8182
8157
  name: schemaName,
8183
8158
  description: schemaDescription
8184
8159
  },
@@ -8189,7 +8164,7 @@ async function generateObject(options) {
8189
8164
  headers: headersWithUserAgent
8190
8165
  });
8191
8166
  const responseData = {
8192
- id: (_b = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b : generateId2(),
8167
+ id: (_b = (_a17 = result2.response) == null ? void 0 : _a17.id) != null ? _b : generateId2(),
8193
8168
  timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
8194
8169
  modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
8195
8170
  headers: (_g = result2.response) == null ? void 0 : _g.headers,
@@ -8243,7 +8218,7 @@ async function generateObject(options) {
8243
8218
  usage = generateResult.usage;
8244
8219
  warnings = generateResult.warnings;
8245
8220
  resultProviderMetadata = generateResult.providerMetadata;
8246
- request = (_a17 = generateResult.request) != null ? _a17 : {};
8221
+ request = (_a16 = generateResult.request) != null ? _a16 : {};
8247
8222
  response = generateResult.responseData;
8248
8223
  reasoning = generateResult.reasoning;
8249
8224
  logWarnings(warnings);
@@ -8302,9 +8277,9 @@ var DefaultGenerateObjectResult = class {
8302
8277
  this.reasoning = options.reasoning;
8303
8278
  }
8304
8279
  toJsonResponse(init) {
8305
- var _a17;
8280
+ var _a16;
8306
8281
  return new Response(JSON.stringify(this.object), {
8307
- status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
8282
+ status: (_a16 = init == null ? void 0 : init.status) != null ? _a16 : 200,
8308
8283
  headers: prepareHeaders(init == null ? void 0 : init.headers, {
8309
8284
  "content-type": "application/json; charset=utf-8"
8310
8285
  })
@@ -8432,8 +8407,8 @@ function simulateReadableStream({
8432
8407
  chunkDelayInMs = 0,
8433
8408
  _internal
8434
8409
  }) {
8435
- var _a17;
8436
- const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : delayFunction;
8410
+ var _a16;
8411
+ const delay2 = (_a16 = _internal == null ? void 0 : _internal.delay) != null ? _a16 : delayFunction;
8437
8412
  let index = 0;
8438
8413
  return new ReadableStream({
8439
8414
  async pull(controller) {
@@ -8695,7 +8670,7 @@ var DefaultStreamObjectResult = class {
8695
8670
  const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
8696
8671
  new TransformStream({
8697
8672
  async transform(chunk, controller) {
8698
- var _a17, _b, _c;
8673
+ var _a16, _b, _c;
8699
8674
  if (typeof chunk === "object" && chunk.type === "stream-start") {
8700
8675
  warnings = chunk.warnings;
8701
8676
  return;
@@ -8745,7 +8720,7 @@ var DefaultStreamObjectResult = class {
8745
8720
  switch (chunk.type) {
8746
8721
  case "response-metadata": {
8747
8722
  fullResponse = {
8748
- id: (_a17 = chunk.id) != null ? _a17 : fullResponse.id,
8723
+ id: (_a16 = chunk.id) != null ? _a16 : fullResponse.id,
8749
8724
  timestamp: (_b = chunk.timestamp) != null ? _b : fullResponse.timestamp,
8750
8725
  modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
8751
8726
  };
@@ -9012,7 +8987,7 @@ async function generateSpeech({
9012
8987
  abortSignal,
9013
8988
  headers
9014
8989
  }) {
9015
- var _a17;
8990
+ var _a16;
9016
8991
  const resolvedModel = resolveSpeechModel(model);
9017
8992
  if (!resolvedModel) {
9018
8993
  throw new Error("Model could not be resolved");
@@ -9045,10 +9020,10 @@ async function generateSpeech({
9045
9020
  return new DefaultSpeechResult({
9046
9021
  audio: new DefaultGeneratedAudioFile({
9047
9022
  data: result.audio,
9048
- mediaType: (_a17 = detectMediaType({
9023
+ mediaType: (_a16 = detectMediaType({
9049
9024
  data: result.audio,
9050
9025
  signatures: audioMediaTypeSignatures
9051
- })) != null ? _a17 : "audio/mp3"
9026
+ })) != null ? _a16 : "audio/mp3"
9052
9027
  }),
9053
9028
  warnings: result.warnings,
9054
9029
  responses: [result.response],
@@ -9057,11 +9032,11 @@ async function generateSpeech({
9057
9032
  }
9058
9033
  var DefaultSpeechResult = class {
9059
9034
  constructor(options) {
9060
- var _a17;
9035
+ var _a16;
9061
9036
  this.audio = options.audio;
9062
9037
  this.warnings = options.warnings;
9063
9038
  this.responses = options.responses;
9064
- this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
9039
+ this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
9065
9040
  }
9066
9041
  };
9067
9042
 
@@ -9098,9 +9073,9 @@ var object = ({
9098
9073
  const schema = asSchema4(inputSchema);
9099
9074
  return {
9100
9075
  type: "object",
9101
- responseFormat: resolve(schema.jsonSchema).then((jsonSchema3) => ({
9076
+ responseFormat: resolve(schema.jsonSchema).then((jsonSchema2) => ({
9102
9077
  type: "json",
9103
- schema: jsonSchema3
9078
+ schema: jsonSchema2
9104
9079
  })),
9105
9080
  async parseOutput({ text: text2 }, context) {
9106
9081
  const parseResult = await safeParseJSON4({ text: text2 });
@@ -9159,8 +9134,8 @@ var array = ({
9159
9134
  return {
9160
9135
  type: "object",
9161
9136
  // JSON schema that describes an array of elements:
9162
- responseFormat: resolve(elementSchema.jsonSchema).then((jsonSchema3) => {
9163
- const { $schema, ...itemSchema } = jsonSchema3;
9137
+ responseFormat: resolve(elementSchema.jsonSchema).then((jsonSchema2) => {
9138
+ const { $schema, ...itemSchema } = jsonSchema2;
9164
9139
  return {
9165
9140
  type: "json",
9166
9141
  schema: {
@@ -9757,7 +9732,7 @@ var doWrap = ({
9757
9732
  modelId,
9758
9733
  providerId
9759
9734
  }) => {
9760
- var _a17, _b, _c;
9735
+ var _a16, _b, _c;
9761
9736
  async function doTransform({
9762
9737
  params,
9763
9738
  type
@@ -9766,7 +9741,7 @@ var doWrap = ({
9766
9741
  }
9767
9742
  return {
9768
9743
  specificationVersion: "v3",
9769
- provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
9744
+ provider: (_a16 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a16 : model.provider,
9770
9745
  modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
9771
9746
  supportedUrls: (_c = overrideSupportedUrls == null ? void 0 : overrideSupportedUrls({ model })) != null ? _c : model.supportedUrls,
9772
9747
  async doGenerate(params) {
@@ -9874,11 +9849,11 @@ function customProvider({
9874
9849
  var experimental_customProvider = customProvider;
9875
9850
 
9876
9851
  // src/registry/no-such-provider-error.ts
9877
- import { AISDKError as AISDKError21, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
9878
- var name16 = "AI_NoSuchProviderError";
9879
- var marker16 = `vercel.ai.error.${name16}`;
9880
- var symbol16 = Symbol.for(marker16);
9881
- var _a16;
9852
+ import { AISDKError as AISDKError20, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
9853
+ var name15 = "AI_NoSuchProviderError";
9854
+ var marker15 = `vercel.ai.error.${name15}`;
9855
+ var symbol15 = Symbol.for(marker15);
9856
+ var _a15;
9882
9857
  var NoSuchProviderError = class extends NoSuchModelError3 {
9883
9858
  constructor({
9884
9859
  modelId,
@@ -9887,16 +9862,16 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
9887
9862
  availableProviders,
9888
9863
  message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
9889
9864
  }) {
9890
- super({ errorName: name16, modelId, modelType, message });
9891
- this[_a16] = true;
9865
+ super({ errorName: name15, modelId, modelType, message });
9866
+ this[_a15] = true;
9892
9867
  this.providerId = providerId;
9893
9868
  this.availableProviders = availableProviders;
9894
9869
  }
9895
9870
  static isInstance(error) {
9896
- return AISDKError21.hasMarker(error, marker16);
9871
+ return AISDKError20.hasMarker(error, marker15);
9897
9872
  }
9898
9873
  };
9899
- _a16 = symbol16;
9874
+ _a15 = symbol15;
9900
9875
 
9901
9876
  // src/registry/provider-registry.ts
9902
9877
  import {
@@ -9955,10 +9930,10 @@ var DefaultProviderRegistry = class {
9955
9930
  return [id.slice(0, index), id.slice(index + this.separator.length)];
9956
9931
  }
9957
9932
  languageModel(id) {
9958
- var _a17, _b;
9933
+ var _a16, _b;
9959
9934
  const [providerId, modelId] = this.splitId(id, "languageModel");
9960
- let model = (_b = (_a17 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
9961
- _a17,
9935
+ let model = (_b = (_a16 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
9936
+ _a16,
9962
9937
  modelId
9963
9938
  );
9964
9939
  if (model == null) {
@@ -9973,10 +9948,10 @@ var DefaultProviderRegistry = class {
9973
9948
  return model;
9974
9949
  }
9975
9950
  textEmbeddingModel(id) {
9976
- var _a17;
9951
+ var _a16;
9977
9952
  const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
9978
9953
  const provider = this.getProvider(providerId, "textEmbeddingModel");
9979
- const model = (_a17 = provider.textEmbeddingModel) == null ? void 0 : _a17.call(provider, modelId);
9954
+ const model = (_a16 = provider.textEmbeddingModel) == null ? void 0 : _a16.call(provider, modelId);
9980
9955
  if (model == null) {
9981
9956
  throw new NoSuchModelError4({
9982
9957
  modelId: id,
@@ -9986,20 +9961,20 @@ var DefaultProviderRegistry = class {
9986
9961
  return model;
9987
9962
  }
9988
9963
  imageModel(id) {
9989
- var _a17;
9964
+ var _a16;
9990
9965
  const [providerId, modelId] = this.splitId(id, "imageModel");
9991
9966
  const provider = this.getProvider(providerId, "imageModel");
9992
- const model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
9967
+ const model = (_a16 = provider.imageModel) == null ? void 0 : _a16.call(provider, modelId);
9993
9968
  if (model == null) {
9994
9969
  throw new NoSuchModelError4({ modelId: id, modelType: "imageModel" });
9995
9970
  }
9996
9971
  return model;
9997
9972
  }
9998
9973
  transcriptionModel(id) {
9999
- var _a17;
9974
+ var _a16;
10000
9975
  const [providerId, modelId] = this.splitId(id, "transcriptionModel");
10001
9976
  const provider = this.getProvider(providerId, "transcriptionModel");
10002
- const model = (_a17 = provider.transcriptionModel) == null ? void 0 : _a17.call(provider, modelId);
9977
+ const model = (_a16 = provider.transcriptionModel) == null ? void 0 : _a16.call(provider, modelId);
10003
9978
  if (model == null) {
10004
9979
  throw new NoSuchModelError4({
10005
9980
  modelId: id,
@@ -10009,10 +9984,10 @@ var DefaultProviderRegistry = class {
10009
9984
  return model;
10010
9985
  }
10011
9986
  speechModel(id) {
10012
- var _a17;
9987
+ var _a16;
10013
9988
  const [providerId, modelId] = this.splitId(id, "speechModel");
10014
9989
  const provider = this.getProvider(providerId, "speechModel");
10015
- const model = (_a17 = provider.speechModel) == null ? void 0 : _a17.call(provider, modelId);
9990
+ const model = (_a16 = provider.speechModel) == null ? void 0 : _a16.call(provider, modelId);
10016
9991
  if (model == null) {
10017
9992
  throw new NoSuchModelError4({ modelId: id, modelType: "speechModel" });
10018
9993
  }
@@ -10020,597 +9995,12 @@ var DefaultProviderRegistry = class {
10020
9995
  }
10021
9996
  };
10022
9997
 
10023
- // src/tool/mcp/mcp-client.ts
10024
- import {
10025
- dynamicTool,
10026
- jsonSchema,
10027
- tool
10028
- } from "@ai-sdk/provider-utils";
10029
-
10030
- // src/tool/mcp/mcp-sse-transport.ts
10031
- import {
10032
- EventSourceParserStream,
10033
- withUserAgentSuffix as withUserAgentSuffix8,
10034
- getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent2
10035
- } from "@ai-sdk/provider-utils";
10036
-
10037
- // src/tool/mcp/json-rpc-message.ts
10038
- import { z as z10 } from "zod/v4";
10039
-
10040
- // src/tool/mcp/types.ts
10041
- import { z as z9 } from "zod/v4";
10042
- var LATEST_PROTOCOL_VERSION = "2025-06-18";
10043
- var SUPPORTED_PROTOCOL_VERSIONS = [
10044
- LATEST_PROTOCOL_VERSION,
10045
- "2025-03-26",
10046
- "2024-11-05"
10047
- ];
10048
- var ClientOrServerImplementationSchema = z9.looseObject({
10049
- name: z9.string(),
10050
- version: z9.string()
10051
- });
10052
- var BaseParamsSchema = z9.looseObject({
10053
- _meta: z9.optional(z9.object({}).loose())
10054
- });
10055
- var ResultSchema = BaseParamsSchema;
10056
- var RequestSchema = z9.object({
10057
- method: z9.string(),
10058
- params: z9.optional(BaseParamsSchema)
10059
- });
10060
- var ServerCapabilitiesSchema = z9.looseObject({
10061
- experimental: z9.optional(z9.object({}).loose()),
10062
- logging: z9.optional(z9.object({}).loose()),
10063
- prompts: z9.optional(
10064
- z9.looseObject({
10065
- listChanged: z9.optional(z9.boolean())
10066
- })
10067
- ),
10068
- resources: z9.optional(
10069
- z9.looseObject({
10070
- subscribe: z9.optional(z9.boolean()),
10071
- listChanged: z9.optional(z9.boolean())
10072
- })
10073
- ),
10074
- tools: z9.optional(
10075
- z9.looseObject({
10076
- listChanged: z9.optional(z9.boolean())
10077
- })
10078
- )
10079
- });
10080
- var InitializeResultSchema = ResultSchema.extend({
10081
- protocolVersion: z9.string(),
10082
- capabilities: ServerCapabilitiesSchema,
10083
- serverInfo: ClientOrServerImplementationSchema,
10084
- instructions: z9.optional(z9.string())
10085
- });
10086
- var PaginatedResultSchema = ResultSchema.extend({
10087
- nextCursor: z9.optional(z9.string())
10088
- });
10089
- var ToolSchema = z9.object({
10090
- name: z9.string(),
10091
- description: z9.optional(z9.string()),
10092
- inputSchema: z9.object({
10093
- type: z9.literal("object"),
10094
- properties: z9.optional(z9.object({}).loose())
10095
- }).loose()
10096
- }).loose();
10097
- var ListToolsResultSchema = PaginatedResultSchema.extend({
10098
- tools: z9.array(ToolSchema)
10099
- });
10100
- var TextContentSchema = z9.object({
10101
- type: z9.literal("text"),
10102
- text: z9.string()
10103
- }).loose();
10104
- var ImageContentSchema = z9.object({
10105
- type: z9.literal("image"),
10106
- data: z9.base64(),
10107
- mimeType: z9.string()
10108
- }).loose();
10109
- var ResourceContentsSchema = z9.object({
10110
- /**
10111
- * The URI of this resource.
10112
- */
10113
- uri: z9.string(),
10114
- /**
10115
- * The MIME type of this resource, if known.
10116
- */
10117
- mimeType: z9.optional(z9.string())
10118
- }).loose();
10119
- var TextResourceContentsSchema = ResourceContentsSchema.extend({
10120
- text: z9.string()
10121
- });
10122
- var BlobResourceContentsSchema = ResourceContentsSchema.extend({
10123
- blob: z9.base64()
10124
- });
10125
- var EmbeddedResourceSchema = z9.object({
10126
- type: z9.literal("resource"),
10127
- resource: z9.union([TextResourceContentsSchema, BlobResourceContentsSchema])
10128
- }).loose();
10129
- var CallToolResultSchema = ResultSchema.extend({
10130
- content: z9.array(
10131
- z9.union([TextContentSchema, ImageContentSchema, EmbeddedResourceSchema])
10132
- ),
10133
- isError: z9.boolean().default(false).optional()
10134
- }).or(
10135
- ResultSchema.extend({
10136
- toolResult: z9.unknown()
10137
- })
10138
- );
10139
-
10140
- // src/tool/mcp/json-rpc-message.ts
10141
- var JSONRPC_VERSION = "2.0";
10142
- var JSONRPCRequestSchema = z10.object({
10143
- jsonrpc: z10.literal(JSONRPC_VERSION),
10144
- id: z10.union([z10.string(), z10.number().int()])
10145
- }).merge(RequestSchema).strict();
10146
- var JSONRPCResponseSchema = z10.object({
10147
- jsonrpc: z10.literal(JSONRPC_VERSION),
10148
- id: z10.union([z10.string(), z10.number().int()]),
10149
- result: ResultSchema
10150
- }).strict();
10151
- var JSONRPCErrorSchema = z10.object({
10152
- jsonrpc: z10.literal(JSONRPC_VERSION),
10153
- id: z10.union([z10.string(), z10.number().int()]),
10154
- error: z10.object({
10155
- code: z10.number().int(),
10156
- message: z10.string(),
10157
- data: z10.optional(z10.unknown())
10158
- })
10159
- }).strict();
10160
- var JSONRPCNotificationSchema = z10.object({
10161
- jsonrpc: z10.literal(JSONRPC_VERSION)
10162
- }).merge(
10163
- z10.object({
10164
- method: z10.string(),
10165
- params: z10.optional(BaseParamsSchema)
10166
- })
10167
- ).strict();
10168
- var JSONRPCMessageSchema = z10.union([
10169
- JSONRPCRequestSchema,
10170
- JSONRPCNotificationSchema,
10171
- JSONRPCResponseSchema,
10172
- JSONRPCErrorSchema
10173
- ]);
10174
-
10175
- // src/tool/mcp/mcp-sse-transport.ts
10176
- var SseMCPTransport = class {
10177
- constructor({
10178
- url,
10179
- headers
10180
- }) {
10181
- this.connected = false;
10182
- this.url = new URL(url);
10183
- this.headers = headers;
10184
- }
10185
- async start() {
10186
- return new Promise((resolve3, reject) => {
10187
- if (this.connected) {
10188
- return resolve3();
10189
- }
10190
- this.abortController = new AbortController();
10191
- const establishConnection = async () => {
10192
- var _a17, _b, _c;
10193
- try {
10194
- const headers = withUserAgentSuffix8(
10195
- {
10196
- ...this.headers,
10197
- Accept: "text/event-stream"
10198
- },
10199
- `ai-sdk/${VERSION}`,
10200
- getRuntimeEnvironmentUserAgent2()
10201
- );
10202
- const response = await fetch(this.url.href, {
10203
- headers,
10204
- signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
10205
- });
10206
- if (!response.ok || !response.body) {
10207
- const error = new MCPClientError({
10208
- message: `MCP SSE Transport Error: ${response.status} ${response.statusText}`
10209
- });
10210
- (_b = this.onerror) == null ? void 0 : _b.call(this, error);
10211
- return reject(error);
10212
- }
10213
- const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream());
10214
- const reader = stream.getReader();
10215
- const processEvents = async () => {
10216
- var _a18, _b2, _c2;
10217
- try {
10218
- while (true) {
10219
- const { done, value } = await reader.read();
10220
- if (done) {
10221
- if (this.connected) {
10222
- this.connected = false;
10223
- throw new MCPClientError({
10224
- message: "MCP SSE Transport Error: Connection closed unexpectedly"
10225
- });
10226
- }
10227
- return;
10228
- }
10229
- const { event, data } = value;
10230
- if (event === "endpoint") {
10231
- this.endpoint = new URL(data, this.url);
10232
- if (this.endpoint.origin !== this.url.origin) {
10233
- throw new MCPClientError({
10234
- message: `MCP SSE Transport Error: Endpoint origin does not match connection origin: ${this.endpoint.origin}`
10235
- });
10236
- }
10237
- this.connected = true;
10238
- resolve3();
10239
- } else if (event === "message") {
10240
- try {
10241
- const message = JSONRPCMessageSchema.parse(
10242
- JSON.parse(data)
10243
- );
10244
- (_a18 = this.onmessage) == null ? void 0 : _a18.call(this, message);
10245
- } catch (error) {
10246
- const e = new MCPClientError({
10247
- message: "MCP SSE Transport Error: Failed to parse message",
10248
- cause: error
10249
- });
10250
- (_b2 = this.onerror) == null ? void 0 : _b2.call(this, e);
10251
- }
10252
- }
10253
- }
10254
- } catch (error) {
10255
- if (error instanceof Error && error.name === "AbortError") {
10256
- return;
10257
- }
10258
- (_c2 = this.onerror) == null ? void 0 : _c2.call(this, error);
10259
- reject(error);
10260
- }
10261
- };
10262
- this.sseConnection = {
10263
- close: () => reader.cancel()
10264
- };
10265
- processEvents();
10266
- } catch (error) {
10267
- if (error instanceof Error && error.name === "AbortError") {
10268
- return;
10269
- }
10270
- (_c = this.onerror) == null ? void 0 : _c.call(this, error);
10271
- reject(error);
10272
- }
10273
- };
10274
- establishConnection();
10275
- });
10276
- }
10277
- async close() {
10278
- var _a17, _b, _c;
10279
- this.connected = false;
10280
- (_a17 = this.sseConnection) == null ? void 0 : _a17.close();
10281
- (_b = this.abortController) == null ? void 0 : _b.abort();
10282
- (_c = this.onclose) == null ? void 0 : _c.call(this);
10283
- }
10284
- async send(message) {
10285
- var _a17, _b, _c;
10286
- if (!this.endpoint || !this.connected) {
10287
- throw new MCPClientError({
10288
- message: "MCP SSE Transport Error: Not connected"
10289
- });
10290
- }
10291
- try {
10292
- const headers = withUserAgentSuffix8(
10293
- {
10294
- ...this.headers,
10295
- "Content-Type": "application/json"
10296
- },
10297
- `ai-sdk/${VERSION}`,
10298
- getRuntimeEnvironmentUserAgent2()
10299
- );
10300
- const init = {
10301
- method: "POST",
10302
- headers,
10303
- body: JSON.stringify(message),
10304
- signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
10305
- };
10306
- const response = await fetch(this.endpoint, init);
10307
- if (!response.ok) {
10308
- const text2 = await response.text().catch(() => null);
10309
- const error = new MCPClientError({
10310
- message: `MCP SSE Transport Error: POSTing to endpoint (HTTP ${response.status}): ${text2}`
10311
- });
10312
- (_b = this.onerror) == null ? void 0 : _b.call(this, error);
10313
- return;
10314
- }
10315
- } catch (error) {
10316
- (_c = this.onerror) == null ? void 0 : _c.call(this, error);
10317
- return;
10318
- }
10319
- }
10320
- };
10321
-
10322
- // src/tool/mcp/mcp-transport.ts
10323
- function createMcpTransport(config) {
10324
- if (config.type !== "sse") {
10325
- throw new MCPClientError({
10326
- message: "Unsupported or invalid transport configuration. If you are using a custom transport, make sure it implements the MCPTransport interface."
10327
- });
10328
- }
10329
- return new SseMCPTransport(config);
10330
- }
10331
- function isCustomMcpTransport(transport) {
10332
- return "start" in transport && typeof transport.start === "function" && "send" in transport && typeof transport.send === "function" && "close" in transport && typeof transport.close === "function";
10333
- }
10334
-
10335
- // src/tool/mcp/mcp-client.ts
10336
- var CLIENT_VERSION = "1.0.0";
10337
- async function createMCPClient(config) {
10338
- const client = new DefaultMCPClient(config);
10339
- await client.init();
10340
- return client;
10341
- }
10342
- var DefaultMCPClient = class {
10343
- constructor({
10344
- transport: transportConfig,
10345
- name: name17 = "ai-sdk-mcp-client",
10346
- onUncaughtError
10347
- }) {
10348
- this.requestMessageId = 0;
10349
- this.responseHandlers = /* @__PURE__ */ new Map();
10350
- this.serverCapabilities = {};
10351
- this.isClosed = true;
10352
- this.onUncaughtError = onUncaughtError;
10353
- if (isCustomMcpTransport(transportConfig)) {
10354
- this.transport = transportConfig;
10355
- } else {
10356
- this.transport = createMcpTransport(transportConfig);
10357
- }
10358
- this.transport.onclose = () => this.onClose();
10359
- this.transport.onerror = (error) => this.onError(error);
10360
- this.transport.onmessage = (message) => {
10361
- if ("method" in message) {
10362
- this.onError(
10363
- new MCPClientError({
10364
- message: "Unsupported message type"
10365
- })
10366
- );
10367
- return;
10368
- }
10369
- this.onResponse(message);
10370
- };
10371
- this.clientInfo = {
10372
- name: name17,
10373
- version: CLIENT_VERSION
10374
- };
10375
- }
10376
- async init() {
10377
- try {
10378
- await this.transport.start();
10379
- this.isClosed = false;
10380
- const result = await this.request({
10381
- request: {
10382
- method: "initialize",
10383
- params: {
10384
- protocolVersion: LATEST_PROTOCOL_VERSION,
10385
- capabilities: {},
10386
- clientInfo: this.clientInfo
10387
- }
10388
- },
10389
- resultSchema: InitializeResultSchema
10390
- });
10391
- if (result === void 0) {
10392
- throw new MCPClientError({
10393
- message: "Server sent invalid initialize result"
10394
- });
10395
- }
10396
- if (!SUPPORTED_PROTOCOL_VERSIONS.includes(result.protocolVersion)) {
10397
- throw new MCPClientError({
10398
- message: `Server's protocol version is not supported: ${result.protocolVersion}`
10399
- });
10400
- }
10401
- this.serverCapabilities = result.capabilities;
10402
- await this.notification({
10403
- method: "notifications/initialized"
10404
- });
10405
- return this;
10406
- } catch (error) {
10407
- await this.close();
10408
- throw error;
10409
- }
10410
- }
10411
- async close() {
10412
- var _a17;
10413
- if (this.isClosed)
10414
- return;
10415
- await ((_a17 = this.transport) == null ? void 0 : _a17.close());
10416
- this.onClose();
10417
- }
10418
- assertCapability(method) {
10419
- switch (method) {
10420
- case "initialize":
10421
- break;
10422
- case "tools/list":
10423
- case "tools/call":
10424
- if (!this.serverCapabilities.tools) {
10425
- throw new MCPClientError({
10426
- message: `Server does not support tools`
10427
- });
10428
- }
10429
- break;
10430
- default:
10431
- throw new MCPClientError({
10432
- message: `Unsupported method: ${method}`
10433
- });
10434
- }
10435
- }
10436
- async request({
10437
- request,
10438
- resultSchema,
10439
- options
10440
- }) {
10441
- return new Promise((resolve3, reject) => {
10442
- if (this.isClosed) {
10443
- return reject(
10444
- new MCPClientError({
10445
- message: "Attempted to send a request from a closed client"
10446
- })
10447
- );
10448
- }
10449
- this.assertCapability(request.method);
10450
- const signal = options == null ? void 0 : options.signal;
10451
- signal == null ? void 0 : signal.throwIfAborted();
10452
- const messageId = this.requestMessageId++;
10453
- const jsonrpcRequest = {
10454
- ...request,
10455
- jsonrpc: "2.0",
10456
- id: messageId
10457
- };
10458
- const cleanup = () => {
10459
- this.responseHandlers.delete(messageId);
10460
- };
10461
- this.responseHandlers.set(messageId, (response) => {
10462
- if (signal == null ? void 0 : signal.aborted) {
10463
- return reject(
10464
- new MCPClientError({
10465
- message: "Request was aborted",
10466
- cause: signal.reason
10467
- })
10468
- );
10469
- }
10470
- if (response instanceof Error) {
10471
- return reject(response);
10472
- }
10473
- try {
10474
- const result = resultSchema.parse(response.result);
10475
- resolve3(result);
10476
- } catch (error) {
10477
- const parseError = new MCPClientError({
10478
- message: "Failed to parse server response",
10479
- cause: error
10480
- });
10481
- reject(parseError);
10482
- }
10483
- });
10484
- this.transport.send(jsonrpcRequest).catch((error) => {
10485
- cleanup();
10486
- reject(error);
10487
- });
10488
- });
10489
- }
10490
- async listTools({
10491
- params,
10492
- options
10493
- } = {}) {
10494
- try {
10495
- return this.request({
10496
- request: { method: "tools/list", params },
10497
- resultSchema: ListToolsResultSchema,
10498
- options
10499
- });
10500
- } catch (error) {
10501
- throw error;
10502
- }
10503
- }
10504
- async callTool({
10505
- name: name17,
10506
- args,
10507
- options
10508
- }) {
10509
- try {
10510
- return this.request({
10511
- request: { method: "tools/call", params: { name: name17, arguments: args } },
10512
- resultSchema: CallToolResultSchema,
10513
- options: {
10514
- signal: options == null ? void 0 : options.abortSignal
10515
- }
10516
- });
10517
- } catch (error) {
10518
- throw error;
10519
- }
10520
- }
10521
- async notification(notification) {
10522
- const jsonrpcNotification = {
10523
- ...notification,
10524
- jsonrpc: "2.0"
10525
- };
10526
- await this.transport.send(jsonrpcNotification);
10527
- }
10528
- /**
10529
- * Returns a set of AI SDK tools from the MCP server
10530
- * @returns A record of tool names to their implementations
10531
- */
10532
- async tools({
10533
- schemas = "automatic"
10534
- } = {}) {
10535
- var _a17;
10536
- const tools = {};
10537
- try {
10538
- const listToolsResult = await this.listTools();
10539
- for (const { name: name17, description, inputSchema } of listToolsResult.tools) {
10540
- if (schemas !== "automatic" && !(name17 in schemas)) {
10541
- continue;
10542
- }
10543
- const self = this;
10544
- const execute = async (args, options) => {
10545
- var _a18;
10546
- (_a18 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a18.throwIfAborted();
10547
- return self.callTool({ name: name17, args, options });
10548
- };
10549
- const toolWithExecute = schemas === "automatic" ? dynamicTool({
10550
- description,
10551
- inputSchema: jsonSchema({
10552
- ...inputSchema,
10553
- properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
10554
- additionalProperties: false
10555
- }),
10556
- execute
10557
- }) : tool({
10558
- description,
10559
- inputSchema: schemas[name17].inputSchema,
10560
- execute
10561
- });
10562
- tools[name17] = toolWithExecute;
10563
- }
10564
- return tools;
10565
- } catch (error) {
10566
- throw error;
10567
- }
10568
- }
10569
- onClose() {
10570
- if (this.isClosed)
10571
- return;
10572
- this.isClosed = true;
10573
- const error = new MCPClientError({
10574
- message: "Connection closed"
10575
- });
10576
- for (const handler of this.responseHandlers.values()) {
10577
- handler(error);
10578
- }
10579
- this.responseHandlers.clear();
10580
- }
10581
- onError(error) {
10582
- if (this.onUncaughtError) {
10583
- this.onUncaughtError(error);
10584
- }
10585
- }
10586
- onResponse(response) {
10587
- const messageId = Number(response.id);
10588
- const handler = this.responseHandlers.get(messageId);
10589
- if (handler === void 0) {
10590
- throw new MCPClientError({
10591
- message: `Protocol error: Received a response for an unknown message ID: ${JSON.stringify(
10592
- response
10593
- )}`
10594
- });
10595
- }
10596
- this.responseHandlers.delete(messageId);
10597
- handler(
10598
- "result" in response ? response : new MCPClientError({
10599
- message: response.error.message,
10600
- code: response.error.code,
10601
- data: response.error.data,
10602
- cause: response.error
10603
- })
10604
- );
10605
- }
10606
- };
10607
-
10608
9998
  // src/transcribe/transcribe.ts
10609
- import { withUserAgentSuffix as withUserAgentSuffix9 } from "@ai-sdk/provider-utils";
9999
+ import { withUserAgentSuffix as withUserAgentSuffix8 } from "@ai-sdk/provider-utils";
10610
10000
 
10611
10001
  // src/error/no-transcript-generated-error.ts
10612
- import { AISDKError as AISDKError22 } from "@ai-sdk/provider";
10613
- var NoTranscriptGeneratedError = class extends AISDKError22 {
10002
+ import { AISDKError as AISDKError21 } from "@ai-sdk/provider";
10003
+ var NoTranscriptGeneratedError = class extends AISDKError21 {
10614
10004
  constructor(options) {
10615
10005
  super({
10616
10006
  name: "AI_NoTranscriptGeneratedError",
@@ -10637,23 +10027,23 @@ async function transcribe({
10637
10027
  maxRetries: maxRetriesArg,
10638
10028
  abortSignal
10639
10029
  });
10640
- const headersWithUserAgent = withUserAgentSuffix9(
10030
+ const headersWithUserAgent = withUserAgentSuffix8(
10641
10031
  headers != null ? headers : {},
10642
10032
  `ai/${VERSION}`
10643
10033
  );
10644
10034
  const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
10645
10035
  const result = await retry(
10646
10036
  () => {
10647
- var _a17;
10037
+ var _a16;
10648
10038
  return resolvedModel.doGenerate({
10649
10039
  audio: audioData,
10650
10040
  abortSignal,
10651
10041
  headers: headersWithUserAgent,
10652
10042
  providerOptions,
10653
- mediaType: (_a17 = detectMediaType({
10043
+ mediaType: (_a16 = detectMediaType({
10654
10044
  data: audioData,
10655
10045
  signatures: audioMediaTypeSignatures
10656
- })) != null ? _a17 : "audio/wav"
10046
+ })) != null ? _a16 : "audio/wav"
10657
10047
  });
10658
10048
  }
10659
10049
  );
@@ -10673,22 +10063,22 @@ async function transcribe({
10673
10063
  }
10674
10064
  var DefaultTranscriptionResult = class {
10675
10065
  constructor(options) {
10676
- var _a17;
10066
+ var _a16;
10677
10067
  this.text = options.text;
10678
10068
  this.segments = options.segments;
10679
10069
  this.language = options.language;
10680
10070
  this.durationInSeconds = options.durationInSeconds;
10681
10071
  this.warnings = options.warnings;
10682
10072
  this.responses = options.responses;
10683
- this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
10073
+ this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
10684
10074
  }
10685
10075
  };
10686
10076
 
10687
10077
  // src/ui/call-completion-api.ts
10688
10078
  import {
10689
10079
  parseJsonEventStream,
10690
- withUserAgentSuffix as withUserAgentSuffix10,
10691
- getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent3
10080
+ withUserAgentSuffix as withUserAgentSuffix9,
10081
+ getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent2
10692
10082
  } from "@ai-sdk/provider-utils";
10693
10083
 
10694
10084
  // src/ui/process-text-stream.ts
@@ -10723,7 +10113,7 @@ async function callCompletionApi({
10723
10113
  onError,
10724
10114
  fetch: fetch2 = getOriginalFetch()
10725
10115
  }) {
10726
- var _a17;
10116
+ var _a16;
10727
10117
  try {
10728
10118
  setLoading(true);
10729
10119
  setError(void 0);
@@ -10737,13 +10127,13 @@ async function callCompletionApi({
10737
10127
  ...body
10738
10128
  }),
10739
10129
  credentials,
10740
- headers: withUserAgentSuffix10(
10130
+ headers: withUserAgentSuffix9(
10741
10131
  {
10742
10132
  "Content-Type": "application/json",
10743
10133
  ...headers
10744
10134
  },
10745
10135
  `ai-sdk/${VERSION}`,
10746
- getRuntimeEnvironmentUserAgent3()
10136
+ getRuntimeEnvironmentUserAgent2()
10747
10137
  ),
10748
10138
  signal: abortController.signal
10749
10139
  }).catch((err) => {
@@ -10751,7 +10141,7 @@ async function callCompletionApi({
10751
10141
  });
10752
10142
  if (!response.ok) {
10753
10143
  throw new Error(
10754
- (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
10144
+ (_a16 = await response.text()) != null ? _a16 : "Failed to fetch the chat response."
10755
10145
  );
10756
10146
  }
10757
10147
  if (!response.body) {
@@ -10837,12 +10227,12 @@ async function convertFileListToFileUIParts(files) {
10837
10227
  }
10838
10228
  return Promise.all(
10839
10229
  Array.from(files).map(async (file) => {
10840
- const { name: name17, type } = file;
10230
+ const { name: name16, type } = file;
10841
10231
  const dataUrl = await new Promise((resolve3, reject) => {
10842
10232
  const reader = new FileReader();
10843
10233
  reader.onload = (readerEvent) => {
10844
- var _a17;
10845
- resolve3((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
10234
+ var _a16;
10235
+ resolve3((_a16 = readerEvent.target) == null ? void 0 : _a16.result);
10846
10236
  };
10847
10237
  reader.onerror = (error) => reject(error);
10848
10238
  reader.readAsDataURL(file);
@@ -10850,7 +10240,7 @@ async function convertFileListToFileUIParts(files) {
10850
10240
  return {
10851
10241
  type: "file",
10852
10242
  mediaType: type,
10853
- filename: name17,
10243
+ filename: name16,
10854
10244
  url: dataUrl
10855
10245
  };
10856
10246
  })
@@ -10863,8 +10253,8 @@ import { parseJsonEventStream as parseJsonEventStream2 } from "@ai-sdk/provider-
10863
10253
  // src/ui/http-chat-transport.ts
10864
10254
  import {
10865
10255
  resolve as resolve2,
10866
- withUserAgentSuffix as withUserAgentSuffix11,
10867
- getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent4
10256
+ withUserAgentSuffix as withUserAgentSuffix10,
10257
+ getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent3
10868
10258
  } from "@ai-sdk/provider-utils";
10869
10259
  var HttpChatTransport = class {
10870
10260
  constructor({
@@ -10888,11 +10278,11 @@ var HttpChatTransport = class {
10888
10278
  abortSignal,
10889
10279
  ...options
10890
10280
  }) {
10891
- var _a17, _b, _c, _d, _e;
10281
+ var _a16, _b, _c, _d, _e;
10892
10282
  const resolvedBody = await resolve2(this.body);
10893
10283
  const resolvedHeaders = await resolve2(this.headers);
10894
10284
  const resolvedCredentials = await resolve2(this.credentials);
10895
- const preparedRequest = await ((_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
10285
+ const preparedRequest = await ((_a16 = this.prepareSendMessagesRequest) == null ? void 0 : _a16.call(this, {
10896
10286
  api: this.api,
10897
10287
  id: options.chatId,
10898
10288
  messages: options.messages,
@@ -10917,13 +10307,13 @@ var HttpChatTransport = class {
10917
10307
  const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
10918
10308
  const response = await fetch2(api, {
10919
10309
  method: "POST",
10920
- headers: withUserAgentSuffix11(
10310
+ headers: withUserAgentSuffix10(
10921
10311
  {
10922
10312
  "Content-Type": "application/json",
10923
10313
  ...headers
10924
10314
  },
10925
10315
  `ai-sdk/${VERSION}`,
10926
- getRuntimeEnvironmentUserAgent4()
10316
+ getRuntimeEnvironmentUserAgent3()
10927
10317
  ),
10928
10318
  body: JSON.stringify(body),
10929
10319
  credentials,
@@ -10940,11 +10330,11 @@ var HttpChatTransport = class {
10940
10330
  return this.processResponseStream(response.body);
10941
10331
  }
10942
10332
  async reconnectToStream(options) {
10943
- var _a17, _b, _c, _d, _e;
10333
+ var _a16, _b, _c, _d, _e;
10944
10334
  const resolvedBody = await resolve2(this.body);
10945
10335
  const resolvedHeaders = await resolve2(this.headers);
10946
10336
  const resolvedCredentials = await resolve2(this.credentials);
10947
- const preparedRequest = await ((_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
10337
+ const preparedRequest = await ((_a16 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a16.call(this, {
10948
10338
  api: this.api,
10949
10339
  id: options.chatId,
10950
10340
  body: { ...resolvedBody, ...options.body },
@@ -10958,10 +10348,10 @@ var HttpChatTransport = class {
10958
10348
  const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
10959
10349
  const response = await fetch2(api, {
10960
10350
  method: "GET",
10961
- headers: withUserAgentSuffix11(
10351
+ headers: withUserAgentSuffix10(
10962
10352
  headers,
10963
10353
  `ai-sdk/${VERSION}`,
10964
- getRuntimeEnvironmentUserAgent4()
10354
+ getRuntimeEnvironmentUserAgent3()
10965
10355
  ),
10966
10356
  credentials
10967
10357
  });
@@ -11026,11 +10416,11 @@ var AbstractChat = class {
11026
10416
  * If a messageId is provided, the message will be replaced.
11027
10417
  */
11028
10418
  this.sendMessage = async (message, options) => {
11029
- var _a17, _b, _c, _d;
10419
+ var _a16, _b, _c, _d;
11030
10420
  if (message == null) {
11031
10421
  await this.makeRequest({
11032
10422
  trigger: "submit-message",
11033
- messageId: (_a17 = this.lastMessage) == null ? void 0 : _a17.id,
10423
+ messageId: (_a16 = this.lastMessage) == null ? void 0 : _a16.id,
11034
10424
  ...options
11035
10425
  });
11036
10426
  return;
@@ -11123,7 +10513,7 @@ var AbstractChat = class {
11123
10513
  approved,
11124
10514
  reason
11125
10515
  }) => this.jobExecutor.run(async () => {
11126
- var _a17, _b;
10516
+ var _a16, _b;
11127
10517
  const messages = this.state.messages;
11128
10518
  const lastMessage = messages[messages.length - 1];
11129
10519
  const updatePart = (part) => isToolOrDynamicToolUIPart(part) && part.state === "approval-requested" && part.approval.id === id ? {
@@ -11138,7 +10528,7 @@ var AbstractChat = class {
11138
10528
  if (this.activeResponse) {
11139
10529
  this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
11140
10530
  }
11141
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
10531
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a16 = this.sendAutomaticallyWhen) == null ? void 0 : _a16.call(this, { messages: this.state.messages }))) {
11142
10532
  this.makeRequest({
11143
10533
  trigger: "submit-message",
11144
10534
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -11147,12 +10537,12 @@ var AbstractChat = class {
11147
10537
  });
11148
10538
  this.addToolResult = async ({
11149
10539
  state = "output-available",
11150
- tool: tool3,
10540
+ tool: tool2,
11151
10541
  toolCallId,
11152
10542
  output,
11153
10543
  errorText
11154
10544
  }) => this.jobExecutor.run(async () => {
11155
- var _a17, _b;
10545
+ var _a16, _b;
11156
10546
  const messages = this.state.messages;
11157
10547
  const lastMessage = messages[messages.length - 1];
11158
10548
  const updatePart = (part) => isToolOrDynamicToolUIPart(part) && part.toolCallId === toolCallId ? { ...part, state, output, errorText } : part;
@@ -11163,7 +10553,7 @@ var AbstractChat = class {
11163
10553
  if (this.activeResponse) {
11164
10554
  this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
11165
10555
  }
11166
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
10556
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a16 = this.sendAutomaticallyWhen) == null ? void 0 : _a16.call(this, { messages: this.state.messages }))) {
11167
10557
  this.makeRequest({
11168
10558
  trigger: "submit-message",
11169
10559
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -11174,10 +10564,10 @@ var AbstractChat = class {
11174
10564
  * Abort the current request immediately, keep the generated tokens if any.
11175
10565
  */
11176
10566
  this.stop = async () => {
11177
- var _a17;
10567
+ var _a16;
11178
10568
  if (this.status !== "streaming" && this.status !== "submitted")
11179
10569
  return;
11180
- if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
10570
+ if ((_a16 = this.activeResponse) == null ? void 0 : _a16.abortController) {
11181
10571
  this.activeResponse.abortController.abort();
11182
10572
  }
11183
10573
  };
@@ -11232,7 +10622,7 @@ var AbstractChat = class {
11232
10622
  body,
11233
10623
  messageId
11234
10624
  }) {
11235
- var _a17, _b, _c;
10625
+ var _a16, _b, _c;
11236
10626
  this.setStatus({ status: "submitted", error: void 0 });
11237
10627
  const lastMessage = this.lastMessage;
11238
10628
  let isAbort = false;
@@ -11281,9 +10671,9 @@ var AbstractChat = class {
11281
10671
  () => job({
11282
10672
  state: activeResponse.state,
11283
10673
  write: () => {
11284
- var _a18;
10674
+ var _a17;
11285
10675
  this.setStatus({ status: "streaming" });
11286
- const replaceLastMessage = activeResponse.state.message.id === ((_a18 = this.lastMessage) == null ? void 0 : _a18.id);
10676
+ const replaceLastMessage = activeResponse.state.message.id === ((_a17 = this.lastMessage) == null ? void 0 : _a17.id);
11287
10677
  if (replaceLastMessage) {
11288
10678
  this.state.replaceMessage(
11289
10679
  this.state.messages.length - 1,
@@ -11329,7 +10719,7 @@ var AbstractChat = class {
11329
10719
  this.setStatus({ status: "error", error: err });
11330
10720
  } finally {
11331
10721
  try {
11332
- (_a17 = this.onFinish) == null ? void 0 : _a17.call(this, {
10722
+ (_a16 = this.onFinish) == null ? void 0 : _a16.call(this, {
11333
10723
  message: this.activeResponse.state.message,
11334
10724
  messages: this.state.messages,
11335
10725
  isAbort,
@@ -11432,7 +10822,7 @@ var TextStreamChatTransport = class extends HttpChatTransport {
11432
10822
  }
11433
10823
  };
11434
10824
  export {
11435
- AISDKError18 as AISDKError,
10825
+ AISDKError17 as AISDKError,
11436
10826
  APICallError,
11437
10827
  AbstractChat,
11438
10828
  DefaultChatTransport,
@@ -11451,7 +10841,6 @@ export {
11451
10841
  JsonToSseTransformStream,
11452
10842
  LoadAPIKeyError,
11453
10843
  LoadSettingError,
11454
- MCPClientError,
11455
10844
  MessageConversionError,
11456
10845
  NoContentGeneratedError,
11457
10846
  NoImageGeneratedError,
@@ -11496,10 +10885,9 @@ export {
11496
10885
  createUIMessageStreamResponse,
11497
10886
  customProvider,
11498
10887
  defaultSettingsMiddleware,
11499
- dynamicTool2 as dynamicTool,
10888
+ dynamicTool,
11500
10889
  embed,
11501
10890
  embedMany,
11502
- createMCPClient as experimental_createMCPClient,
11503
10891
  experimental_createProviderRegistry,
11504
10892
  experimental_customProvider,
11505
10893
  generateImage as experimental_generateImage,
@@ -11521,7 +10909,7 @@ export {
11521
10909
  isTextUIPart,
11522
10910
  isToolOrDynamicToolUIPart,
11523
10911
  isToolUIPart,
11524
- jsonSchema2 as jsonSchema,
10912
+ jsonSchema,
11525
10913
  lastAssistantMessageIsCompleteWithApprovalResponses,
11526
10914
  lastAssistantMessageIsCompleteWithToolCalls,
11527
10915
  modelMessageSchema,
@@ -11540,7 +10928,7 @@ export {
11540
10928
  streamObject,
11541
10929
  streamText,
11542
10930
  systemModelMessageSchema,
11543
- tool2 as tool,
10931
+ tool,
11544
10932
  toolModelMessageSchema,
11545
10933
  uiMessageChunkSchema,
11546
10934
  userModelMessageSchema,