ai 6.0.0-beta.70 → 6.0.0-beta.72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  var __defProp = Object.defineProperty;
2
2
  var __export = (target, all) => {
3
- for (var name17 in all)
4
- __defProp(target, name17, { get: all[name17], enumerable: true });
3
+ for (var name16 in all)
4
+ __defProp(target, name16, { get: all[name16], enumerable: true });
5
5
  };
6
6
 
7
7
  // src/index.ts
@@ -9,11 +9,11 @@ import { createGateway, gateway as gateway2 } from "@ai-sdk/gateway";
9
9
  import {
10
10
  asSchema as asSchema5,
11
11
  createIdGenerator as createIdGenerator5,
12
- dynamicTool as dynamicTool2,
12
+ dynamicTool,
13
13
  generateId,
14
- jsonSchema as jsonSchema2,
14
+ jsonSchema,
15
15
  parseJsonEventStream as parseJsonEventStream3,
16
- tool as tool2,
16
+ tool,
17
17
  zodSchema as zodSchema3
18
18
  } from "@ai-sdk/provider-utils";
19
19
 
@@ -97,7 +97,7 @@ import { gateway } from "@ai-sdk/gateway";
97
97
 
98
98
  // src/error/index.ts
99
99
  import {
100
- AISDKError as AISDKError18,
100
+ AISDKError as AISDKError17,
101
101
  APICallError,
102
102
  EmptyResponseBodyError,
103
103
  InvalidPromptError,
@@ -183,24 +183,21 @@ var InvalidToolInputError = class extends AISDKError4 {
183
183
  };
184
184
  _a4 = symbol4;
185
185
 
186
- // src/error/mcp-client-error.ts
186
+ // src/error/no-image-generated-error.ts
187
187
  import { AISDKError as AISDKError5 } from "@ai-sdk/provider";
188
- var name5 = "AI_MCPClientError";
188
+ var name5 = "AI_NoImageGeneratedError";
189
189
  var marker5 = `vercel.ai.error.${name5}`;
190
190
  var symbol5 = Symbol.for(marker5);
191
191
  var _a5;
192
- var MCPClientError = class extends AISDKError5 {
192
+ var NoImageGeneratedError = class extends AISDKError5 {
193
193
  constructor({
194
- name: name17 = "MCPClientError",
195
- message,
194
+ message = "No image generated.",
196
195
  cause,
197
- data,
198
- code
196
+ responses
199
197
  }) {
200
- super({ name: name17, message, cause });
198
+ super({ name: name5, message, cause });
201
199
  this[_a5] = true;
202
- this.data = data;
203
- this.code = code;
200
+ this.responses = responses;
204
201
  }
205
202
  static isInstance(error) {
206
203
  return AISDKError5.hasMarker(error, marker5);
@@ -208,35 +205,13 @@ var MCPClientError = class extends AISDKError5 {
208
205
  };
209
206
  _a5 = symbol5;
210
207
 
211
- // src/error/no-image-generated-error.ts
208
+ // src/error/no-object-generated-error.ts
212
209
  import { AISDKError as AISDKError6 } from "@ai-sdk/provider";
213
- var name6 = "AI_NoImageGeneratedError";
210
+ var name6 = "AI_NoObjectGeneratedError";
214
211
  var marker6 = `vercel.ai.error.${name6}`;
215
212
  var symbol6 = Symbol.for(marker6);
216
213
  var _a6;
217
- var NoImageGeneratedError = class extends AISDKError6 {
218
- constructor({
219
- message = "No image generated.",
220
- cause,
221
- responses
222
- }) {
223
- super({ name: name6, message, cause });
224
- this[_a6] = true;
225
- this.responses = responses;
226
- }
227
- static isInstance(error) {
228
- return AISDKError6.hasMarker(error, marker6);
229
- }
230
- };
231
- _a6 = symbol6;
232
-
233
- // src/error/no-object-generated-error.ts
234
- import { AISDKError as AISDKError7 } from "@ai-sdk/provider";
235
- var name7 = "AI_NoObjectGeneratedError";
236
- var marker7 = `vercel.ai.error.${name7}`;
237
- var symbol7 = Symbol.for(marker7);
238
- var _a7;
239
- var NoObjectGeneratedError = class extends AISDKError7 {
214
+ var NoObjectGeneratedError = class extends AISDKError6 {
240
215
  constructor({
241
216
  message = "No object generated.",
242
217
  cause,
@@ -245,43 +220,43 @@ var NoObjectGeneratedError = class extends AISDKError7 {
245
220
  usage,
246
221
  finishReason
247
222
  }) {
248
- super({ name: name7, message, cause });
249
- this[_a7] = true;
223
+ super({ name: name6, message, cause });
224
+ this[_a6] = true;
250
225
  this.text = text2;
251
226
  this.response = response;
252
227
  this.usage = usage;
253
228
  this.finishReason = finishReason;
254
229
  }
255
230
  static isInstance(error) {
256
- return AISDKError7.hasMarker(error, marker7);
231
+ return AISDKError6.hasMarker(error, marker6);
257
232
  }
258
233
  };
259
- _a7 = symbol7;
234
+ _a6 = symbol6;
260
235
 
261
236
  // src/error/no-output-generated-error.ts
262
- import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
263
- var name8 = "AI_NoOutputGeneratedError";
264
- var marker8 = `vercel.ai.error.${name8}`;
265
- var symbol8 = Symbol.for(marker8);
266
- var _a8;
267
- var NoOutputGeneratedError = class extends AISDKError8 {
237
+ import { AISDKError as AISDKError7 } from "@ai-sdk/provider";
238
+ var name7 = "AI_NoOutputGeneratedError";
239
+ var marker7 = `vercel.ai.error.${name7}`;
240
+ var symbol7 = Symbol.for(marker7);
241
+ var _a7;
242
+ var NoOutputGeneratedError = class extends AISDKError7 {
268
243
  // used in isInstance
269
244
  constructor({
270
245
  message = "No output generated.",
271
246
  cause
272
247
  } = {}) {
273
- super({ name: name8, message, cause });
274
- this[_a8] = true;
248
+ super({ name: name7, message, cause });
249
+ this[_a7] = true;
275
250
  }
276
251
  static isInstance(error) {
277
- return AISDKError8.hasMarker(error, marker8);
252
+ return AISDKError7.hasMarker(error, marker7);
278
253
  }
279
254
  };
280
- _a8 = symbol8;
255
+ _a7 = symbol7;
281
256
 
282
257
  // src/error/no-speech-generated-error.ts
283
- import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
284
- var NoSpeechGeneratedError = class extends AISDKError9 {
258
+ import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
259
+ var NoSpeechGeneratedError = class extends AISDKError8 {
285
260
  constructor(options) {
286
261
  super({
287
262
  name: "AI_NoSpeechGeneratedError",
@@ -292,53 +267,53 @@ var NoSpeechGeneratedError = class extends AISDKError9 {
292
267
  };
293
268
 
294
269
  // src/error/no-such-tool-error.ts
295
- import { AISDKError as AISDKError10 } from "@ai-sdk/provider";
296
- var name9 = "AI_NoSuchToolError";
297
- var marker9 = `vercel.ai.error.${name9}`;
298
- var symbol9 = Symbol.for(marker9);
299
- var _a9;
300
- var NoSuchToolError = class extends AISDKError10 {
270
+ import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
271
+ var name8 = "AI_NoSuchToolError";
272
+ var marker8 = `vercel.ai.error.${name8}`;
273
+ var symbol8 = Symbol.for(marker8);
274
+ var _a8;
275
+ var NoSuchToolError = class extends AISDKError9 {
301
276
  constructor({
302
277
  toolName,
303
278
  availableTools = void 0,
304
279
  message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
305
280
  }) {
306
- super({ name: name9, message });
307
- this[_a9] = true;
281
+ super({ name: name8, message });
282
+ this[_a8] = true;
308
283
  this.toolName = toolName;
309
284
  this.availableTools = availableTools;
310
285
  }
311
286
  static isInstance(error) {
312
- return AISDKError10.hasMarker(error, marker9);
287
+ return AISDKError9.hasMarker(error, marker8);
313
288
  }
314
289
  };
315
- _a9 = symbol9;
290
+ _a8 = symbol8;
316
291
 
317
292
  // src/error/tool-call-repair-error.ts
318
- import { AISDKError as AISDKError11, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
319
- var name10 = "AI_ToolCallRepairError";
320
- var marker10 = `vercel.ai.error.${name10}`;
321
- var symbol10 = Symbol.for(marker10);
322
- var _a10;
323
- var ToolCallRepairError = class extends AISDKError11 {
293
+ import { AISDKError as AISDKError10, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
294
+ var name9 = "AI_ToolCallRepairError";
295
+ var marker9 = `vercel.ai.error.${name9}`;
296
+ var symbol9 = Symbol.for(marker9);
297
+ var _a9;
298
+ var ToolCallRepairError = class extends AISDKError10 {
324
299
  constructor({
325
300
  cause,
326
301
  originalError,
327
302
  message = `Error repairing tool call: ${getErrorMessage2(cause)}`
328
303
  }) {
329
- super({ name: name10, message, cause });
330
- this[_a10] = true;
304
+ super({ name: name9, message, cause });
305
+ this[_a9] = true;
331
306
  this.originalError = originalError;
332
307
  }
333
308
  static isInstance(error) {
334
- return AISDKError11.hasMarker(error, marker10);
309
+ return AISDKError10.hasMarker(error, marker9);
335
310
  }
336
311
  };
337
- _a10 = symbol10;
312
+ _a9 = symbol9;
338
313
 
339
314
  // src/error/unsupported-model-version-error.ts
340
- import { AISDKError as AISDKError12 } from "@ai-sdk/provider";
341
- var UnsupportedModelVersionError = class extends AISDKError12 {
315
+ import { AISDKError as AISDKError11 } from "@ai-sdk/provider";
316
+ var UnsupportedModelVersionError = class extends AISDKError11 {
342
317
  constructor(options) {
343
318
  super({
344
319
  name: "AI_UnsupportedModelVersionError",
@@ -351,76 +326,76 @@ var UnsupportedModelVersionError = class extends AISDKError12 {
351
326
  };
352
327
 
353
328
  // src/prompt/invalid-data-content-error.ts
354
- import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
355
- var name11 = "AI_InvalidDataContentError";
356
- var marker11 = `vercel.ai.error.${name11}`;
357
- var symbol11 = Symbol.for(marker11);
358
- var _a11;
359
- var InvalidDataContentError = class extends AISDKError13 {
329
+ import { AISDKError as AISDKError12 } from "@ai-sdk/provider";
330
+ var name10 = "AI_InvalidDataContentError";
331
+ var marker10 = `vercel.ai.error.${name10}`;
332
+ var symbol10 = Symbol.for(marker10);
333
+ var _a10;
334
+ var InvalidDataContentError = class extends AISDKError12 {
360
335
  constructor({
361
336
  content,
362
337
  cause,
363
338
  message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
364
339
  }) {
365
- super({ name: name11, message, cause });
366
- this[_a11] = true;
340
+ super({ name: name10, message, cause });
341
+ this[_a10] = true;
367
342
  this.content = content;
368
343
  }
369
344
  static isInstance(error) {
370
- return AISDKError13.hasMarker(error, marker11);
345
+ return AISDKError12.hasMarker(error, marker10);
371
346
  }
372
347
  };
373
- _a11 = symbol11;
348
+ _a10 = symbol10;
374
349
 
375
350
  // src/prompt/invalid-message-role-error.ts
376
- import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
377
- var name12 = "AI_InvalidMessageRoleError";
378
- var marker12 = `vercel.ai.error.${name12}`;
379
- var symbol12 = Symbol.for(marker12);
380
- var _a12;
381
- var InvalidMessageRoleError = class extends AISDKError14 {
351
+ import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
352
+ var name11 = "AI_InvalidMessageRoleError";
353
+ var marker11 = `vercel.ai.error.${name11}`;
354
+ var symbol11 = Symbol.for(marker11);
355
+ var _a11;
356
+ var InvalidMessageRoleError = class extends AISDKError13 {
382
357
  constructor({
383
358
  role,
384
359
  message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
385
360
  }) {
386
- super({ name: name12, message });
387
- this[_a12] = true;
361
+ super({ name: name11, message });
362
+ this[_a11] = true;
388
363
  this.role = role;
389
364
  }
390
365
  static isInstance(error) {
391
- return AISDKError14.hasMarker(error, marker12);
366
+ return AISDKError13.hasMarker(error, marker11);
392
367
  }
393
368
  };
394
- _a12 = symbol12;
369
+ _a11 = symbol11;
395
370
 
396
371
  // src/prompt/message-conversion-error.ts
397
- import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
398
- var name13 = "AI_MessageConversionError";
399
- var marker13 = `vercel.ai.error.${name13}`;
400
- var symbol13 = Symbol.for(marker13);
401
- var _a13;
402
- var MessageConversionError = class extends AISDKError15 {
372
+ import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
373
+ var name12 = "AI_MessageConversionError";
374
+ var marker12 = `vercel.ai.error.${name12}`;
375
+ var symbol12 = Symbol.for(marker12);
376
+ var _a12;
377
+ var MessageConversionError = class extends AISDKError14 {
403
378
  constructor({
404
379
  originalMessage,
405
380
  message
406
381
  }) {
407
- super({ name: name13, message });
408
- this[_a13] = true;
382
+ super({ name: name12, message });
383
+ this[_a12] = true;
409
384
  this.originalMessage = originalMessage;
410
385
  }
411
386
  static isInstance(error) {
412
- return AISDKError15.hasMarker(error, marker13);
387
+ return AISDKError14.hasMarker(error, marker12);
413
388
  }
414
389
  };
415
- _a13 = symbol13;
390
+ _a12 = symbol12;
416
391
 
417
392
  // src/util/download/download-error.ts
418
- import { AISDKError as AISDKError16 } from "@ai-sdk/provider";
419
- var name14 = "AI_DownloadError";
420
- var marker14 = `vercel.ai.error.${name14}`;
421
- var symbol14 = Symbol.for(marker14);
422
- var _a14;
423
- var DownloadError = class extends AISDKError16 {
393
+ import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
394
+ var name13 = "AI_DownloadError";
395
+ var marker13 = `vercel.ai.error.${name13}`;
396
+ var symbol13 = Symbol.for(marker13);
397
+ var _a13;
398
+ var DownloadError = class extends AISDKError15 {
424
399
  constructor({
425
400
  url,
426
401
  statusCode,
@@ -428,41 +403,41 @@ var DownloadError = class extends AISDKError16 {
428
403
  cause,
429
404
  message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
430
405
  }) {
431
- super({ name: name14, message, cause });
432
- this[_a14] = true;
406
+ super({ name: name13, message, cause });
407
+ this[_a13] = true;
433
408
  this.url = url;
434
409
  this.statusCode = statusCode;
435
410
  this.statusText = statusText;
436
411
  }
437
412
  static isInstance(error) {
438
- return AISDKError16.hasMarker(error, marker14);
413
+ return AISDKError15.hasMarker(error, marker13);
439
414
  }
440
415
  };
441
- _a14 = symbol14;
416
+ _a13 = symbol13;
442
417
 
443
418
  // src/util/retry-error.ts
444
- import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
445
- var name15 = "AI_RetryError";
446
- var marker15 = `vercel.ai.error.${name15}`;
447
- var symbol15 = Symbol.for(marker15);
448
- var _a15;
449
- var RetryError = class extends AISDKError17 {
419
+ import { AISDKError as AISDKError16 } from "@ai-sdk/provider";
420
+ var name14 = "AI_RetryError";
421
+ var marker14 = `vercel.ai.error.${name14}`;
422
+ var symbol14 = Symbol.for(marker14);
423
+ var _a14;
424
+ var RetryError = class extends AISDKError16 {
450
425
  constructor({
451
426
  message,
452
427
  reason,
453
428
  errors
454
429
  }) {
455
- super({ name: name15, message });
456
- this[_a15] = true;
430
+ super({ name: name14, message });
431
+ this[_a14] = true;
457
432
  this.reason = reason;
458
433
  this.errors = errors;
459
434
  this.lastError = errors[errors.length - 1];
460
435
  }
461
436
  static isInstance(error) {
462
- return AISDKError17.hasMarker(error, marker15);
437
+ return AISDKError16.hasMarker(error, marker14);
463
438
  }
464
439
  };
465
- _a15 = symbol15;
440
+ _a14 = symbol14;
466
441
 
467
442
  // src/model/resolve-model.ts
468
443
  function transformToV3LanguageModel(model) {
@@ -538,7 +513,7 @@ function resolveEmbeddingModel(model) {
538
513
  );
539
514
  }
540
515
  function resolveTranscriptionModel(model) {
541
- var _a17, _b;
516
+ var _a16, _b;
542
517
  if (typeof model !== "string") {
543
518
  if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
544
519
  const unsupportedModel = model;
@@ -553,10 +528,10 @@ function resolveTranscriptionModel(model) {
553
528
  }
554
529
  return model;
555
530
  }
556
- return (_b = (_a17 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a17, model);
531
+ return (_b = (_a16 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a16, model);
557
532
  }
558
533
  function resolveSpeechModel(model) {
559
- var _a17, _b;
534
+ var _a16, _b;
560
535
  if (typeof model !== "string") {
561
536
  if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
562
537
  const unsupportedModel = model;
@@ -571,11 +546,11 @@ function resolveSpeechModel(model) {
571
546
  }
572
547
  return model;
573
548
  }
574
- return (_b = (_a17 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a17, model);
549
+ return (_b = (_a16 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a16, model);
575
550
  }
576
551
  function getGlobalProvider() {
577
- var _a17;
578
- return (_a17 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a17 : gateway;
552
+ var _a16;
553
+ return (_a16 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a16 : gateway;
579
554
  }
580
555
 
581
556
  // src/prompt/convert-to-language-model-prompt.ts
@@ -775,11 +750,11 @@ import {
775
750
  } from "@ai-sdk/provider-utils";
776
751
 
777
752
  // src/version.ts
778
- var VERSION = true ? "6.0.0-beta.70" : "0.0.0-test";
753
+ var VERSION = true ? "6.0.0-beta.72" : "0.0.0-test";
779
754
 
780
755
  // src/util/download/download.ts
781
756
  var download = async ({ url }) => {
782
- var _a17;
757
+ var _a16;
783
758
  const urlText = url.toString();
784
759
  try {
785
760
  const response = await fetch(urlText, {
@@ -798,7 +773,7 @@ var download = async ({ url }) => {
798
773
  }
799
774
  return {
800
775
  data: new Uint8Array(await response.arrayBuffer()),
801
- mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
776
+ mediaType: (_a16 = response.headers.get("content-type")) != null ? _a16 : void 0
802
777
  };
803
778
  } catch (error) {
804
779
  if (DownloadError.isInstance(error)) {
@@ -816,7 +791,7 @@ var createDefaultDownloadFunction = (download2 = download) => (requestedDownload
816
791
  );
817
792
 
818
793
  // src/prompt/data-content.ts
819
- import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
794
+ import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
820
795
  import {
821
796
  convertBase64ToUint8Array as convertBase64ToUint8Array2,
822
797
  convertUint8ArrayToBase64
@@ -847,8 +822,8 @@ var dataContentSchema = z.union([
847
822
  z.custom(
848
823
  // Buffer might not be available in some environments such as CloudFlare:
849
824
  (value) => {
850
- var _a17, _b;
851
- return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
825
+ var _a16, _b;
826
+ return (_b = (_a16 = globalThis.Buffer) == null ? void 0 : _a16.isBuffer(value)) != null ? _b : false;
852
827
  },
853
828
  { message: "Must be a Buffer" }
854
829
  )
@@ -871,7 +846,7 @@ function convertToLanguageModelV3DataContent(content) {
871
846
  content.toString()
872
847
  );
873
848
  if (dataUrlMediaType == null || base64Content == null) {
874
- throw new AISDKError19({
849
+ throw new AISDKError18({
875
850
  name: "InvalidDataContentError",
876
851
  message: `Invalid data URL format in content ${content.toString()}`
877
852
  });
@@ -1062,8 +1037,8 @@ async function downloadAssets(messages, download2, supportedUrls) {
1062
1037
  ).flat().filter(
1063
1038
  (part) => part.type === "image" || part.type === "file"
1064
1039
  ).map((part) => {
1065
- var _a17;
1066
- const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
1040
+ var _a16;
1041
+ const mediaType = (_a16 = part.mediaType) != null ? _a16 : part.type === "image" ? "image/*" : void 0;
1067
1042
  let data = part.type === "image" ? part.image : part.data;
1068
1043
  if (typeof data === "string") {
1069
1044
  try {
@@ -1093,7 +1068,7 @@ async function downloadAssets(messages, download2, supportedUrls) {
1093
1068
  );
1094
1069
  }
1095
1070
  function convertPartToLanguageModelPart(part, downloadedAssets) {
1096
- var _a17;
1071
+ var _a16;
1097
1072
  if (part.type === "text") {
1098
1073
  return {
1099
1074
  type: "text",
@@ -1126,7 +1101,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1126
1101
  switch (type) {
1127
1102
  case "image": {
1128
1103
  if (data instanceof Uint8Array || typeof data === "string") {
1129
- mediaType = (_a17 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a17 : mediaType;
1104
+ mediaType = (_a16 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a16 : mediaType;
1130
1105
  }
1131
1106
  return {
1132
1107
  type: "file",
@@ -1181,7 +1156,7 @@ function mapToolResultOutput(output) {
1181
1156
  import { getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
1182
1157
  function createToolModelOutput({
1183
1158
  output,
1184
- tool: tool3,
1159
+ tool: tool2,
1185
1160
  errorMode
1186
1161
  }) {
1187
1162
  if (errorMode === "text") {
@@ -1189,8 +1164,8 @@ function createToolModelOutput({
1189
1164
  } else if (errorMode === "json") {
1190
1165
  return { type: "error-json", value: toJSONValue(output) };
1191
1166
  }
1192
- if (tool3 == null ? void 0 : tool3.toModelOutput) {
1193
- return tool3.toModelOutput(output);
1167
+ if (tool2 == null ? void 0 : tool2.toModelOutput) {
1168
+ return tool2.toModelOutput(output);
1194
1169
  }
1195
1170
  return typeof output === "string" ? { type: "text", value: output } : { type: "json", value: toJSONValue(output) };
1196
1171
  }
@@ -1312,29 +1287,29 @@ async function prepareToolsAndToolChoice({
1312
1287
  };
1313
1288
  }
1314
1289
  const filteredTools = activeTools != null ? Object.entries(tools).filter(
1315
- ([name17]) => activeTools.includes(name17)
1290
+ ([name16]) => activeTools.includes(name16)
1316
1291
  ) : Object.entries(tools);
1317
1292
  const languageModelTools = [];
1318
- for (const [name17, tool3] of filteredTools) {
1319
- const toolType = tool3.type;
1293
+ for (const [name16, tool2] of filteredTools) {
1294
+ const toolType = tool2.type;
1320
1295
  switch (toolType) {
1321
1296
  case void 0:
1322
1297
  case "dynamic":
1323
1298
  case "function":
1324
1299
  languageModelTools.push({
1325
1300
  type: "function",
1326
- name: name17,
1327
- description: tool3.description,
1328
- inputSchema: await asSchema(tool3.inputSchema).jsonSchema,
1329
- providerOptions: tool3.providerOptions
1301
+ name: name16,
1302
+ description: tool2.description,
1303
+ inputSchema: await asSchema(tool2.inputSchema).jsonSchema,
1304
+ providerOptions: tool2.providerOptions
1330
1305
  });
1331
1306
  break;
1332
1307
  case "provider-defined":
1333
1308
  languageModelTools.push({
1334
1309
  type: "provider-defined",
1335
- name: name17,
1336
- id: tool3.id,
1337
- args: tool3.args
1310
+ name: name16,
1311
+ id: tool2.id,
1312
+ args: tool2.args
1338
1313
  });
1339
1314
  break;
1340
1315
  default: {
@@ -1626,10 +1601,10 @@ import {
1626
1601
  GatewayAuthenticationError,
1627
1602
  GatewayModelNotFoundError
1628
1603
  } from "@ai-sdk/gateway";
1629
- import { AISDKError as AISDKError20 } from "@ai-sdk/provider";
1604
+ import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
1630
1605
  function wrapGatewayError(error) {
1631
1606
  if (GatewayAuthenticationError.isInstance(error) || GatewayModelNotFoundError.isInstance(error)) {
1632
- return new AISDKError20({
1607
+ return new AISDKError19({
1633
1608
  name: "GatewayError",
1634
1609
  message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
1635
1610
  cause: error
@@ -1660,7 +1635,7 @@ function getBaseTelemetryAttributes({
1660
1635
  telemetry,
1661
1636
  headers
1662
1637
  }) {
1663
- var _a17;
1638
+ var _a16;
1664
1639
  return {
1665
1640
  "ai.model.provider": model.provider,
1666
1641
  "ai.model.id": model.modelId,
@@ -1670,7 +1645,7 @@ function getBaseTelemetryAttributes({
1670
1645
  return attributes;
1671
1646
  }, {}),
1672
1647
  // add metadata as attributes:
1673
- ...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(
1648
+ ...Object.entries((_a16 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a16 : {}).reduce(
1674
1649
  (attributes, [key, value]) => {
1675
1650
  attributes[`ai.telemetry.metadata.${key}`] = value;
1676
1651
  return attributes;
@@ -1695,7 +1670,7 @@ var noopTracer = {
1695
1670
  startSpan() {
1696
1671
  return noopSpan;
1697
1672
  },
1698
- startActiveSpan(name17, arg1, arg2, arg3) {
1673
+ startActiveSpan(name16, arg1, arg2, arg3) {
1699
1674
  if (typeof arg1 === "function") {
1700
1675
  return arg1(noopSpan);
1701
1676
  }
@@ -1765,14 +1740,14 @@ function getTracer({
1765
1740
  // src/telemetry/record-span.ts
1766
1741
  import { SpanStatusCode } from "@opentelemetry/api";
1767
1742
  async function recordSpan({
1768
- name: name17,
1743
+ name: name16,
1769
1744
  tracer,
1770
1745
  attributes,
1771
1746
  fn,
1772
1747
  endWhenDone = true
1773
1748
  }) {
1774
1749
  return tracer.startActiveSpan(
1775
- name17,
1750
+ name16,
1776
1751
  { attributes: await attributes },
1777
1752
  async (span) => {
1778
1753
  try {
@@ -2091,8 +2066,8 @@ async function executeToolCall({
2091
2066
  onPreliminaryToolResult
2092
2067
  }) {
2093
2068
  const { toolName, toolCallId, input } = toolCall;
2094
- const tool3 = tools == null ? void 0 : tools[toolName];
2095
- if ((tool3 == null ? void 0 : tool3.execute) == null) {
2069
+ const tool2 = tools == null ? void 0 : tools[toolName];
2070
+ if ((tool2 == null ? void 0 : tool2.execute) == null) {
2096
2071
  return void 0;
2097
2072
  }
2098
2073
  return recordSpan({
@@ -2116,7 +2091,7 @@ async function executeToolCall({
2116
2091
  let output;
2117
2092
  try {
2118
2093
  const stream = executeTool({
2119
- execute: tool3.execute.bind(tool3),
2094
+ execute: tool2.execute.bind(tool2),
2120
2095
  input,
2121
2096
  options: {
2122
2097
  toolCallId,
@@ -2145,7 +2120,7 @@ async function executeToolCall({
2145
2120
  toolName,
2146
2121
  input,
2147
2122
  error,
2148
- dynamic: tool3.type === "dynamic"
2123
+ dynamic: tool2.type === "dynamic"
2149
2124
  };
2150
2125
  }
2151
2126
  try {
@@ -2167,7 +2142,7 @@ async function executeToolCall({
2167
2142
  toolName,
2168
2143
  input,
2169
2144
  output,
2170
- dynamic: tool3.type === "dynamic"
2145
+ dynamic: tool2.type === "dynamic"
2171
2146
  };
2172
2147
  }
2173
2148
  });
@@ -2223,18 +2198,18 @@ var DefaultGeneratedFileWithType = class extends DefaultGeneratedFile {
2223
2198
 
2224
2199
  // src/generate-text/is-approval-needed.ts
2225
2200
  async function isApprovalNeeded({
2226
- tool: tool3,
2201
+ tool: tool2,
2227
2202
  toolCall,
2228
2203
  messages,
2229
2204
  experimental_context
2230
2205
  }) {
2231
- if (tool3.needsApproval == null) {
2206
+ if (tool2.needsApproval == null) {
2232
2207
  return false;
2233
2208
  }
2234
- if (typeof tool3.needsApproval === "boolean") {
2235
- return tool3.needsApproval;
2209
+ if (typeof tool2.needsApproval === "boolean") {
2210
+ return tool2.needsApproval;
2236
2211
  }
2237
- return await tool3.needsApproval(toolCall.input, {
2212
+ return await tool2.needsApproval(toolCall.input, {
2238
2213
  toolCallId: toolCall.toolCallId,
2239
2214
  messages,
2240
2215
  experimental_context
@@ -2329,8 +2304,8 @@ async function doParseToolCall({
2329
2304
  tools
2330
2305
  }) {
2331
2306
  const toolName = toolCall.toolName;
2332
- const tool3 = tools[toolName];
2333
- if (tool3 == null) {
2307
+ const tool2 = tools[toolName];
2308
+ if (tool2 == null) {
2334
2309
  if (toolCall.providerExecuted && toolCall.dynamic) {
2335
2310
  return await parseProviderExecutedDynamicToolCall(toolCall);
2336
2311
  }
@@ -2339,7 +2314,7 @@ async function doParseToolCall({
2339
2314
  availableTools: Object.keys(tools)
2340
2315
  });
2341
2316
  }
2342
- const schema = asSchema2(tool3.inputSchema);
2317
+ const schema = asSchema2(tool2.inputSchema);
2343
2318
  const parseResult = toolCall.input.trim() === "" ? await safeValidateTypes2({ value: {}, schema }) : await safeParseJSON({ text: toolCall.input, schema });
2344
2319
  if (parseResult.success === false) {
2345
2320
  throw new InvalidToolInputError({
@@ -2348,7 +2323,7 @@ async function doParseToolCall({
2348
2323
  cause: parseResult.error
2349
2324
  });
2350
2325
  }
2351
- return tool3.type === "dynamic" ? {
2326
+ return tool2.type === "dynamic" ? {
2352
2327
  type: "tool-call",
2353
2328
  toolCallId: toolCall.toolCallId,
2354
2329
  toolName: toolCall.toolName,
@@ -2434,8 +2409,8 @@ function stepCountIs(stepCount) {
2434
2409
  }
2435
2410
  function hasToolCall(toolName) {
2436
2411
  return ({ steps }) => {
2437
- var _a17, _b, _c;
2438
- return (_c = (_b = (_a17 = steps[steps.length - 1]) == null ? void 0 : _a17.toolCalls) == null ? void 0 : _b.some(
2412
+ var _a16, _b, _c;
2413
+ return (_c = (_b = (_a16 = steps[steps.length - 1]) == null ? void 0 : _a16.toolCalls) == null ? void 0 : _b.some(
2439
2414
  (toolCall) => toolCall.toolName === toolName
2440
2415
  )) != null ? _c : false;
2441
2416
  };
@@ -2559,7 +2534,8 @@ async function generateText({
2559
2534
  abortSignal,
2560
2535
  headers,
2561
2536
  stopWhen = stepCountIs(1),
2562
- experimental_output: output,
2537
+ experimental_output,
2538
+ output = experimental_output,
2563
2539
  experimental_telemetry: telemetry,
2564
2540
  providerOptions,
2565
2541
  experimental_activeTools,
@@ -2622,7 +2598,7 @@ async function generateText({
2622
2598
  }),
2623
2599
  tracer,
2624
2600
  fn: async (span) => {
2625
- var _a17, _b, _c, _d, _e, _f, _g;
2601
+ var _a16, _b, _c, _d, _e, _f, _g;
2626
2602
  const initialMessages = initialPrompt.messages;
2627
2603
  const responseMessages = [];
2628
2604
  const { approvedToolApprovals, deniedToolApprovals } = collectToolApprovals({ messages: initialMessages });
@@ -2679,7 +2655,7 @@ async function generateText({
2679
2655
  messages: stepInputMessages
2680
2656
  }));
2681
2657
  const stepModel = resolveLanguageModel(
2682
- (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
2658
+ (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
2683
2659
  );
2684
2660
  const promptMessages = await convertToLanguageModelPrompt({
2685
2661
  prompt: {
@@ -2696,7 +2672,7 @@ async function generateText({
2696
2672
  });
2697
2673
  currentModelResponse = await retry(
2698
2674
  () => {
2699
- var _a18;
2675
+ var _a17;
2700
2676
  return recordSpan({
2701
2677
  name: "ai.generateText.doGenerate",
2702
2678
  attributes: selectTelemetryAttributes({
@@ -2716,7 +2692,7 @@ async function generateText({
2716
2692
  },
2717
2693
  "ai.prompt.tools": {
2718
2694
  // convert the language model level tools:
2719
- input: () => stepTools == null ? void 0 : stepTools.map((tool3) => JSON.stringify(tool3))
2695
+ input: () => stepTools == null ? void 0 : stepTools.map((tool2) => JSON.stringify(tool2))
2720
2696
  },
2721
2697
  "ai.prompt.toolChoice": {
2722
2698
  input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
@@ -2728,14 +2704,14 @@ async function generateText({
2728
2704
  "gen_ai.request.max_tokens": settings.maxOutputTokens,
2729
2705
  "gen_ai.request.presence_penalty": settings.presencePenalty,
2730
2706
  "gen_ai.request.stop_sequences": settings.stopSequences,
2731
- "gen_ai.request.temperature": (_a18 = settings.temperature) != null ? _a18 : void 0,
2707
+ "gen_ai.request.temperature": (_a17 = settings.temperature) != null ? _a17 : void 0,
2732
2708
  "gen_ai.request.top_k": settings.topK,
2733
2709
  "gen_ai.request.top_p": settings.topP
2734
2710
  }
2735
2711
  }),
2736
2712
  tracer,
2737
2713
  fn: async (span2) => {
2738
- var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h;
2714
+ var _a18, _b2, _c2, _d2, _e2, _f2, _g2, _h;
2739
2715
  const result = await stepModel.doGenerate({
2740
2716
  ...callSettings2,
2741
2717
  tools: stepTools,
@@ -2747,7 +2723,7 @@ async function generateText({
2747
2723
  headers: headersWithUserAgent
2748
2724
  });
2749
2725
  const responseData = {
2750
- id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId2(),
2726
+ id: (_b2 = (_a18 = result.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId2(),
2751
2727
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
2752
2728
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
2753
2729
  headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
@@ -2808,12 +2784,12 @@ async function generateText({
2808
2784
  if (toolCall.invalid) {
2809
2785
  continue;
2810
2786
  }
2811
- const tool3 = tools == null ? void 0 : tools[toolCall.toolName];
2812
- if (tool3 == null) {
2787
+ const tool2 = tools == null ? void 0 : tools[toolCall.toolName];
2788
+ if (tool2 == null) {
2813
2789
  continue;
2814
2790
  }
2815
- if ((tool3 == null ? void 0 : tool3.onInputAvailable) != null) {
2816
- await tool3.onInputAvailable({
2791
+ if ((tool2 == null ? void 0 : tool2.onInputAvailable) != null) {
2792
+ await tool2.onInputAvailable({
2817
2793
  input: toolCall.input,
2818
2794
  toolCallId: toolCall.toolCallId,
2819
2795
  messages: stepInputMessages,
@@ -2822,7 +2798,7 @@ async function generateText({
2822
2798
  });
2823
2799
  }
2824
2800
  if (await isApprovalNeeded({
2825
- tool: tool3,
2801
+ tool: tool2,
2826
2802
  toolCall,
2827
2803
  messages: stepInputMessages,
2828
2804
  experimental_context
@@ -3070,6 +3046,9 @@ var DefaultGenerateTextResult = class {
3070
3046
  return this.finalStep.usage;
3071
3047
  }
3072
3048
  get experimental_output() {
3049
+ return this.output;
3050
+ }
3051
+ get output() {
3073
3052
  if (this.resolvedOutput == null) {
3074
3053
  throw new NoOutputSpecifiedError();
3075
3054
  }
@@ -3884,7 +3863,7 @@ function processUIMessageStream({
3884
3863
  new TransformStream({
3885
3864
  async transform(chunk, controller) {
3886
3865
  await runUpdateMessageJob(async ({ state, write }) => {
3887
- var _a17, _b, _c, _d;
3866
+ var _a16, _b, _c, _d;
3888
3867
  function getToolInvocation(toolCallId) {
3889
3868
  const toolInvocations = state.message.parts.filter(
3890
3869
  isToolOrDynamicToolUIPart
@@ -3900,7 +3879,7 @@ function processUIMessageStream({
3900
3879
  return toolInvocation;
3901
3880
  }
3902
3881
  function updateToolPart(options) {
3903
- var _a18;
3882
+ var _a17;
3904
3883
  const part = state.message.parts.find(
3905
3884
  (part2) => isToolUIPart(part2) && part2.toolCallId === options.toolCallId
3906
3885
  );
@@ -3913,7 +3892,7 @@ function processUIMessageStream({
3913
3892
  anyPart.errorText = anyOptions.errorText;
3914
3893
  anyPart.rawInput = anyOptions.rawInput;
3915
3894
  anyPart.preliminary = anyOptions.preliminary;
3916
- anyPart.providerExecuted = (_a18 = anyOptions.providerExecuted) != null ? _a18 : part.providerExecuted;
3895
+ anyPart.providerExecuted = (_a17 = anyOptions.providerExecuted) != null ? _a17 : part.providerExecuted;
3917
3896
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
3918
3897
  part.callProviderMetadata = anyOptions.providerMetadata;
3919
3898
  }
@@ -3933,7 +3912,7 @@ function processUIMessageStream({
3933
3912
  }
3934
3913
  }
3935
3914
  function updateDynamicToolPart(options) {
3936
- var _a18, _b2;
3915
+ var _a17, _b2;
3937
3916
  const part = state.message.parts.find(
3938
3917
  (part2) => part2.type === "dynamic-tool" && part2.toolCallId === options.toolCallId
3939
3918
  );
@@ -3945,7 +3924,7 @@ function processUIMessageStream({
3945
3924
  anyPart.input = anyOptions.input;
3946
3925
  anyPart.output = anyOptions.output;
3947
3926
  anyPart.errorText = anyOptions.errorText;
3948
- anyPart.rawInput = (_a18 = anyOptions.rawInput) != null ? _a18 : anyPart.rawInput;
3927
+ anyPart.rawInput = (_a17 = anyOptions.rawInput) != null ? _a17 : anyPart.rawInput;
3949
3928
  anyPart.preliminary = anyOptions.preliminary;
3950
3929
  anyPart.providerExecuted = (_b2 = anyOptions.providerExecuted) != null ? _b2 : part.providerExecuted;
3951
3930
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
@@ -3994,7 +3973,7 @@ function processUIMessageStream({
3994
3973
  case "text-delta": {
3995
3974
  const textPart = state.activeTextParts[chunk.id];
3996
3975
  textPart.text += chunk.delta;
3997
- textPart.providerMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textPart.providerMetadata;
3976
+ textPart.providerMetadata = (_a16 = chunk.providerMetadata) != null ? _a16 : textPart.providerMetadata;
3998
3977
  write();
3999
3978
  break;
4000
3979
  }
@@ -4418,11 +4397,11 @@ function createAsyncIterableStream(source) {
4418
4397
  const reader = this.getReader();
4419
4398
  let finished = false;
4420
4399
  async function cleanup(cancelStream) {
4421
- var _a17;
4400
+ var _a16;
4422
4401
  finished = true;
4423
4402
  try {
4424
4403
  if (cancelStream) {
4425
- await ((_a17 = reader.cancel) == null ? void 0 : _a17.call(reader));
4404
+ await ((_a16 = reader.cancel) == null ? void 0 : _a16.call(reader));
4426
4405
  }
4427
4406
  } finally {
4428
4407
  try {
@@ -4609,25 +4588,25 @@ var DelayedPromise = class {
4609
4588
  return this._promise;
4610
4589
  }
4611
4590
  resolve(value) {
4612
- var _a17;
4591
+ var _a16;
4613
4592
  this.status = { type: "resolved", value };
4614
4593
  if (this._promise) {
4615
- (_a17 = this._resolve) == null ? void 0 : _a17.call(this, value);
4594
+ (_a16 = this._resolve) == null ? void 0 : _a16.call(this, value);
4616
4595
  }
4617
4596
  }
4618
4597
  reject(error) {
4619
- var _a17;
4598
+ var _a16;
4620
4599
  this.status = { type: "rejected", error };
4621
4600
  if (this._promise) {
4622
- (_a17 = this._reject) == null ? void 0 : _a17.call(this, error);
4601
+ (_a16 = this._reject) == null ? void 0 : _a16.call(this, error);
4623
4602
  }
4624
4603
  }
4625
4604
  };
4626
4605
 
4627
4606
  // src/util/now.ts
4628
4607
  function now() {
4629
- var _a17, _b;
4630
- return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();
4608
+ var _a16, _b;
4609
+ return (_b = (_a16 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a16.now()) != null ? _b : Date.now();
4631
4610
  }
4632
4611
 
4633
4612
  // src/generate-text/run-tools-transformation.ts
@@ -4725,12 +4704,12 @@ function runToolsTransformation({
4725
4704
  });
4726
4705
  break;
4727
4706
  }
4728
- const tool3 = tools == null ? void 0 : tools[toolCall.toolName];
4729
- if (tool3 == null) {
4707
+ const tool2 = tools == null ? void 0 : tools[toolCall.toolName];
4708
+ if (tool2 == null) {
4730
4709
  break;
4731
4710
  }
4732
- if (tool3.onInputAvailable != null) {
4733
- await tool3.onInputAvailable({
4711
+ if (tool2.onInputAvailable != null) {
4712
+ await tool2.onInputAvailable({
4734
4713
  input: toolCall.input,
4735
4714
  toolCallId: toolCall.toolCallId,
4736
4715
  messages,
@@ -4739,7 +4718,7 @@ function runToolsTransformation({
4739
4718
  });
4740
4719
  }
4741
4720
  if (await isApprovalNeeded({
4742
- tool: tool3,
4721
+ tool: tool2,
4743
4722
  toolCall,
4744
4723
  messages,
4745
4724
  experimental_context
@@ -4752,7 +4731,7 @@ function runToolsTransformation({
4752
4731
  break;
4753
4732
  }
4754
4733
  toolInputs.set(toolCall.toolCallId, toolCall.input);
4755
- if (tool3.execute != null && toolCall.providerExecuted !== true) {
4734
+ if (tool2.execute != null && toolCall.providerExecuted !== true) {
4756
4735
  const toolExecutionId = generateId2();
4757
4736
  outstandingToolResults.add(toolExecutionId);
4758
4737
  executeToolCall({
@@ -4856,7 +4835,8 @@ function streamText({
4856
4835
  abortSignal,
4857
4836
  headers,
4858
4837
  stopWhen = stepCountIs(1),
4859
- experimental_output: output,
4838
+ experimental_output,
4839
+ output = experimental_output,
4860
4840
  experimental_telemetry: telemetry,
4861
4841
  prepareStep,
4862
4842
  providerOptions,
@@ -5029,7 +5009,7 @@ var DefaultStreamTextResult = class {
5029
5009
  let activeReasoningContent = {};
5030
5010
  const eventProcessor = new TransformStream({
5031
5011
  async transform(chunk, controller) {
5032
- var _a17, _b, _c, _d;
5012
+ var _a16, _b, _c, _d;
5033
5013
  controller.enqueue(chunk);
5034
5014
  const { part } = chunk;
5035
5015
  if (part.type === "text-delta" || part.type === "reasoning-delta" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
@@ -5059,7 +5039,7 @@ var DefaultStreamTextResult = class {
5059
5039
  return;
5060
5040
  }
5061
5041
  activeText.text += part.text;
5062
- activeText.providerMetadata = (_a17 = part.providerMetadata) != null ? _a17 : activeText.providerMetadata;
5042
+ activeText.providerMetadata = (_a16 = part.providerMetadata) != null ? _a16 : activeText.providerMetadata;
5063
5043
  }
5064
5044
  if (part.type === "text-end") {
5065
5045
  const activeText = activeTextContent[part.id];
@@ -5218,8 +5198,8 @@ var DefaultStreamTextResult = class {
5218
5198
  "ai.response.text": { output: () => finalStep.text },
5219
5199
  "ai.response.toolCalls": {
5220
5200
  output: () => {
5221
- var _a17;
5222
- return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
5201
+ var _a16;
5202
+ return ((_a16 = finalStep.toolCalls) == null ? void 0 : _a16.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
5223
5203
  }
5224
5204
  },
5225
5205
  "ai.response.providerMetadata": JSON.stringify(
@@ -5399,7 +5379,7 @@ var DefaultStreamTextResult = class {
5399
5379
  responseMessages,
5400
5380
  usage
5401
5381
  }) {
5402
- var _a17, _b, _c, _d, _e;
5382
+ var _a16, _b, _c, _d, _e;
5403
5383
  const includeRawChunks2 = self.includeRawChunks;
5404
5384
  stepFinish = new DelayedPromise();
5405
5385
  const stepInputMessages = [...initialMessages, ...responseMessages];
@@ -5410,7 +5390,7 @@ var DefaultStreamTextResult = class {
5410
5390
  messages: stepInputMessages
5411
5391
  }));
5412
5392
  const stepModel = resolveLanguageModel(
5413
- (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
5393
+ (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
5414
5394
  );
5415
5395
  const promptMessages = await convertToLanguageModelPrompt({
5416
5396
  prompt: {
@@ -5449,7 +5429,7 @@ var DefaultStreamTextResult = class {
5449
5429
  },
5450
5430
  "ai.prompt.tools": {
5451
5431
  // convert the language model level tools:
5452
- input: () => stepTools == null ? void 0 : stepTools.map((tool3) => JSON.stringify(tool3))
5432
+ input: () => stepTools == null ? void 0 : stepTools.map((tool2) => JSON.stringify(tool2))
5453
5433
  },
5454
5434
  "ai.prompt.toolChoice": {
5455
5435
  input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
@@ -5521,7 +5501,7 @@ var DefaultStreamTextResult = class {
5521
5501
  streamWithToolResults.pipeThrough(
5522
5502
  new TransformStream({
5523
5503
  async transform(chunk, controller) {
5524
- var _a18, _b2, _c2, _d2, _e2;
5504
+ var _a17, _b2, _c2, _d2, _e2;
5525
5505
  if (chunk.type === "stream-start") {
5526
5506
  warnings = chunk.warnings;
5527
5507
  return;
@@ -5594,7 +5574,7 @@ var DefaultStreamTextResult = class {
5594
5574
  }
5595
5575
  case "response-metadata": {
5596
5576
  stepResponse = {
5597
- id: (_a18 = chunk.id) != null ? _a18 : stepResponse.id,
5577
+ id: (_a17 = chunk.id) != null ? _a17 : stepResponse.id,
5598
5578
  timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
5599
5579
  modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
5600
5580
  };
@@ -5622,9 +5602,9 @@ var DefaultStreamTextResult = class {
5622
5602
  }
5623
5603
  case "tool-input-start": {
5624
5604
  activeToolCallToolNames[chunk.id] = chunk.toolName;
5625
- const tool3 = tools == null ? void 0 : tools[chunk.toolName];
5626
- if ((tool3 == null ? void 0 : tool3.onInputStart) != null) {
5627
- await tool3.onInputStart({
5605
+ const tool2 = tools == null ? void 0 : tools[chunk.toolName];
5606
+ if ((tool2 == null ? void 0 : tool2.onInputStart) != null) {
5607
+ await tool2.onInputStart({
5628
5608
  toolCallId: chunk.id,
5629
5609
  messages: stepInputMessages,
5630
5610
  abortSignal,
@@ -5633,7 +5613,7 @@ var DefaultStreamTextResult = class {
5633
5613
  }
5634
5614
  controller.enqueue({
5635
5615
  ...chunk,
5636
- dynamic: (_e2 = chunk.dynamic) != null ? _e2 : (tool3 == null ? void 0 : tool3.type) === "dynamic"
5616
+ dynamic: (_e2 = chunk.dynamic) != null ? _e2 : (tool2 == null ? void 0 : tool2.type) === "dynamic"
5637
5617
  });
5638
5618
  break;
5639
5619
  }
@@ -5644,9 +5624,9 @@ var DefaultStreamTextResult = class {
5644
5624
  }
5645
5625
  case "tool-input-delta": {
5646
5626
  const toolName = activeToolCallToolNames[chunk.id];
5647
- const tool3 = tools == null ? void 0 : tools[toolName];
5648
- if ((tool3 == null ? void 0 : tool3.onInputDelta) != null) {
5649
- await tool3.onInputDelta({
5627
+ const tool2 = tools == null ? void 0 : tools[toolName];
5628
+ if ((tool2 == null ? void 0 : tool2.onInputDelta) != null) {
5629
+ await tool2.onInputDelta({
5650
5630
  inputTextDelta: chunk.delta,
5651
5631
  toolCallId: chunk.id,
5652
5632
  messages: stepInputMessages,
@@ -5896,17 +5876,20 @@ var DefaultStreamTextResult = class {
5896
5876
  );
5897
5877
  }
5898
5878
  async consumeStream(options) {
5899
- var _a17;
5879
+ var _a16;
5900
5880
  try {
5901
5881
  await consumeStream({
5902
5882
  stream: this.fullStream,
5903
5883
  onError: options == null ? void 0 : options.onError
5904
5884
  });
5905
5885
  } catch (error) {
5906
- (_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);
5886
+ (_a16 = options == null ? void 0 : options.onError) == null ? void 0 : _a16.call(options, error);
5907
5887
  }
5908
5888
  }
5909
5889
  get experimental_partialOutputStream() {
5890
+ return this.partialOutputStream;
5891
+ }
5892
+ get partialOutputStream() {
5910
5893
  if (this.output == null) {
5911
5894
  throw new NoOutputSpecifiedError();
5912
5895
  }
@@ -5938,12 +5921,12 @@ var DefaultStreamTextResult = class {
5938
5921
  responseMessageId: generateMessageId
5939
5922
  }) : void 0;
5940
5923
  const isDynamic = (part) => {
5941
- var _a17;
5942
- const tool3 = (_a17 = this.tools) == null ? void 0 : _a17[part.toolName];
5943
- if (tool3 == null) {
5924
+ var _a16;
5925
+ const tool2 = (_a16 = this.tools) == null ? void 0 : _a16[part.toolName];
5926
+ if (tool2 == null) {
5944
5927
  return part.dynamic;
5945
5928
  }
5946
- return (tool3 == null ? void 0 : tool3.type) === "dynamic" ? true : void 0;
5929
+ return (tool2 == null ? void 0 : tool2.type) === "dynamic" ? true : void 0;
5947
5930
  };
5948
5931
  const baseStream = this.fullStream.pipeThrough(
5949
5932
  new TransformStream({
@@ -6273,10 +6256,10 @@ var ToolLoopAgent = class {
6273
6256
  return this.settings.tools;
6274
6257
  }
6275
6258
  async prepareCall(options) {
6276
- var _a17, _b, _c, _d;
6259
+ var _a16, _b, _c, _d;
6277
6260
  const baseCallArgs = {
6278
6261
  ...this.settings,
6279
- stopWhen: (_a17 = this.settings.stopWhen) != null ? _a17 : stepCountIs(20),
6262
+ stopWhen: (_a16 = this.settings.stopWhen) != null ? _a16 : stepCountIs(20),
6280
6263
  ...options
6281
6264
  };
6282
6265
  const preparedCallArgs = (_d = await ((_c = (_b = this.settings).prepareCall) == null ? void 0 : _c.call(_b, baseCallArgs))) != null ? _d : baseCallArgs;
@@ -6397,7 +6380,7 @@ function readUIMessageStream({
6397
6380
  onError,
6398
6381
  terminateOnError = false
6399
6382
  }) {
6400
- var _a17;
6383
+ var _a16;
6401
6384
  let controller;
6402
6385
  let hasErrored = false;
6403
6386
  const outputStream = new ReadableStream({
@@ -6406,7 +6389,7 @@ function readUIMessageStream({
6406
6389
  }
6407
6390
  });
6408
6391
  const state = createStreamingUIMessageState({
6409
- messageId: (_a17 = message == null ? void 0 : message.id) != null ? _a17 : "",
6392
+ messageId: (_a16 = message == null ? void 0 : message.id) != null ? _a16 : "",
6410
6393
  lastMessage: message
6411
6394
  });
6412
6395
  const handleError = (error) => {
@@ -6475,7 +6458,7 @@ function convertToModelMessages(messages, options) {
6475
6458
  modelMessages.push({
6476
6459
  role: "user",
6477
6460
  content: message.parts.map((part) => {
6478
- var _a17;
6461
+ var _a16;
6479
6462
  if (isTextUIPart(part)) {
6480
6463
  return {
6481
6464
  type: "text",
@@ -6493,7 +6476,7 @@ function convertToModelMessages(messages, options) {
6493
6476
  };
6494
6477
  }
6495
6478
  if (isDataUIPart(part)) {
6496
- return (_a17 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a17.call(
6479
+ return (_a16 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a16.call(
6497
6480
  options,
6498
6481
  part
6499
6482
  );
@@ -6505,7 +6488,7 @@ function convertToModelMessages(messages, options) {
6505
6488
  case "assistant": {
6506
6489
  if (message.parts != null) {
6507
6490
  let processBlock2 = function() {
6508
- var _a17, _b, _c;
6491
+ var _a16, _b, _c;
6509
6492
  if (block.length === 0) {
6510
6493
  return;
6511
6494
  }
@@ -6537,7 +6520,7 @@ function convertToModelMessages(messages, options) {
6537
6520
  type: "tool-call",
6538
6521
  toolCallId: part.toolCallId,
6539
6522
  toolName,
6540
- input: part.state === "output-error" ? (_a17 = part.input) != null ? _a17 : "rawInput" in part ? part.rawInput : void 0 : part.input,
6523
+ input: part.state === "output-error" ? (_a16 = part.input) != null ? _a16 : "rawInput" in part ? part.rawInput : void 0 : part.input,
6541
6524
  providerExecuted: part.providerExecuted,
6542
6525
  ...part.callProviderMetadata != null ? { providerOptions: part.callProviderMetadata } : {}
6543
6526
  });
@@ -6586,9 +6569,9 @@ function convertToModelMessages(messages, options) {
6586
6569
  role: "tool",
6587
6570
  content: toolParts.flatMap(
6588
6571
  (toolPart) => {
6589
- var _a18, _b2, _c2;
6572
+ var _a17, _b2, _c2;
6590
6573
  const outputs = [];
6591
- if (((_a18 = toolPart.approval) == null ? void 0 : _a18.approved) != null) {
6574
+ if (((_a17 = toolPart.approval) == null ? void 0 : _a17.approved) != null) {
6592
6575
  outputs.push({
6593
6576
  type: "tool-approval-response",
6594
6577
  approvalId: toolPart.approval.id,
@@ -6985,8 +6968,8 @@ async function safeValidateUIMessages({
6985
6968
  );
6986
6969
  for (const toolPart of toolParts) {
6987
6970
  const toolName = toolPart.type.slice(5);
6988
- const tool3 = tools[toolName];
6989
- if (!tool3) {
6971
+ const tool2 = tools[toolName];
6972
+ if (!tool2) {
6990
6973
  return {
6991
6974
  success: false,
6992
6975
  error: new TypeValidationError2({
@@ -6998,13 +6981,13 @@ async function safeValidateUIMessages({
6998
6981
  if (toolPart.state === "input-available" || toolPart.state === "output-available" || toolPart.state === "output-error") {
6999
6982
  await validateTypes2({
7000
6983
  value: toolPart.input,
7001
- schema: tool3.inputSchema
6984
+ schema: tool2.inputSchema
7002
6985
  });
7003
6986
  }
7004
- if (toolPart.state === "output-available" && tool3.outputSchema) {
6987
+ if (toolPart.state === "output-available" && tool2.outputSchema) {
7005
6988
  await validateTypes2({
7006
6989
  value: toolPart.output,
7007
- schema: tool3.outputSchema
6990
+ schema: tool2.outputSchema
7008
6991
  });
7009
6992
  }
7010
6993
  }
@@ -7154,7 +7137,7 @@ async function embed({
7154
7137
  }),
7155
7138
  tracer,
7156
7139
  fn: async (doEmbedSpan) => {
7157
- var _a17;
7140
+ var _a16;
7158
7141
  const modelResponse = await model.doEmbed({
7159
7142
  values: [value],
7160
7143
  abortSignal,
@@ -7162,7 +7145,7 @@ async function embed({
7162
7145
  providerOptions
7163
7146
  });
7164
7147
  const embedding2 = modelResponse.embeddings[0];
7165
- const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
7148
+ const usage2 = (_a16 = modelResponse.usage) != null ? _a16 : { tokens: NaN };
7166
7149
  doEmbedSpan.setAttributes(
7167
7150
  await selectTelemetryAttributes({
7168
7151
  telemetry,
@@ -7272,7 +7255,7 @@ async function embedMany({
7272
7255
  }),
7273
7256
  tracer,
7274
7257
  fn: async (span) => {
7275
- var _a17;
7258
+ var _a16;
7276
7259
  const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
7277
7260
  model.maxEmbeddingsPerCall,
7278
7261
  model.supportsParallelCalls
@@ -7298,7 +7281,7 @@ async function embedMany({
7298
7281
  }),
7299
7282
  tracer,
7300
7283
  fn: async (doEmbedSpan) => {
7301
- var _a18;
7284
+ var _a17;
7302
7285
  const modelResponse = await model.doEmbed({
7303
7286
  values,
7304
7287
  abortSignal,
@@ -7306,7 +7289,7 @@ async function embedMany({
7306
7289
  providerOptions
7307
7290
  });
7308
7291
  const embeddings3 = modelResponse.embeddings;
7309
- const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
7292
+ const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
7310
7293
  doEmbedSpan.setAttributes(
7311
7294
  await selectTelemetryAttributes({
7312
7295
  telemetry,
@@ -7380,7 +7363,7 @@ async function embedMany({
7380
7363
  }),
7381
7364
  tracer,
7382
7365
  fn: async (doEmbedSpan) => {
7383
- var _a18;
7366
+ var _a17;
7384
7367
  const modelResponse = await model.doEmbed({
7385
7368
  values: chunk,
7386
7369
  abortSignal,
@@ -7388,7 +7371,7 @@ async function embedMany({
7388
7371
  providerOptions
7389
7372
  });
7390
7373
  const embeddings2 = modelResponse.embeddings;
7391
- const usage = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
7374
+ const usage = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
7392
7375
  doEmbedSpan.setAttributes(
7393
7376
  await selectTelemetryAttributes({
7394
7377
  telemetry,
@@ -7425,7 +7408,7 @@ async function embedMany({
7425
7408
  result.providerMetadata
7426
7409
  )) {
7427
7410
  providerMetadata[providerName] = {
7428
- ...(_a17 = providerMetadata[providerName]) != null ? _a17 : {},
7411
+ ...(_a16 = providerMetadata[providerName]) != null ? _a16 : {},
7429
7412
  ...metadata
7430
7413
  };
7431
7414
  }
@@ -7479,7 +7462,7 @@ async function generateImage({
7479
7462
  abortSignal,
7480
7463
  headers
7481
7464
  }) {
7482
- var _a17, _b;
7465
+ var _a16, _b;
7483
7466
  if (model.specificationVersion !== "v3") {
7484
7467
  throw new UnsupportedModelVersionError({
7485
7468
  version: model.specificationVersion,
@@ -7495,7 +7478,7 @@ async function generateImage({
7495
7478
  maxRetries: maxRetriesArg,
7496
7479
  abortSignal
7497
7480
  });
7498
- const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a17 : 1;
7481
+ const maxImagesPerCallWithDefault = (_a16 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a16 : 1;
7499
7482
  const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
7500
7483
  const callImageCounts = Array.from({ length: callCount }, (_, i) => {
7501
7484
  if (i < callCount - 1) {
@@ -7528,13 +7511,13 @@ async function generateImage({
7528
7511
  images.push(
7529
7512
  ...result.images.map(
7530
7513
  (image) => {
7531
- var _a18;
7514
+ var _a17;
7532
7515
  return new DefaultGeneratedFile({
7533
7516
  data: image,
7534
- mediaType: (_a18 = detectMediaType({
7517
+ mediaType: (_a17 = detectMediaType({
7535
7518
  data: image,
7536
7519
  signatures: imageMediaTypeSignatures
7537
- })) != null ? _a18 : "image/png"
7520
+ })) != null ? _a17 : "image/png"
7538
7521
  });
7539
7522
  }
7540
7523
  )
@@ -7677,7 +7660,7 @@ var arrayOutputStrategy = (schema) => {
7677
7660
  isFirstDelta,
7678
7661
  isFinalDelta
7679
7662
  }) {
7680
- var _a17;
7663
+ var _a16;
7681
7664
  if (!isJSONObject(value) || !isJSONArray(value.elements)) {
7682
7665
  return {
7683
7666
  success: false,
@@ -7700,7 +7683,7 @@ var arrayOutputStrategy = (schema) => {
7700
7683
  }
7701
7684
  resultArray.push(result.value);
7702
7685
  }
7703
- const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;
7686
+ const publishedElementCount = (_a16 = latestObject == null ? void 0 : latestObject.length) != null ? _a16 : 0;
7704
7687
  let textDelta = "";
7705
7688
  if (isFirstDelta) {
7706
7689
  textDelta += "[";
@@ -8096,7 +8079,7 @@ async function generateObject(options) {
8096
8079
  settings: { ...callSettings, maxRetries }
8097
8080
  });
8098
8081
  const tracer = getTracer(telemetry);
8099
- const jsonSchema3 = await outputStrategy.jsonSchema();
8082
+ const jsonSchema2 = await outputStrategy.jsonSchema();
8100
8083
  try {
8101
8084
  return await recordSpan({
8102
8085
  name: "ai.generateObject",
@@ -8112,7 +8095,7 @@ async function generateObject(options) {
8112
8095
  "ai.prompt": {
8113
8096
  input: () => JSON.stringify({ system, prompt, messages })
8114
8097
  },
8115
- "ai.schema": jsonSchema3 != null ? { input: () => JSON.stringify(jsonSchema3) } : void 0,
8098
+ "ai.schema": jsonSchema2 != null ? { input: () => JSON.stringify(jsonSchema2) } : void 0,
8116
8099
  "ai.schema.name": schemaName,
8117
8100
  "ai.schema.description": schemaDescription,
8118
8101
  "ai.settings.output": outputStrategy.type
@@ -8120,7 +8103,7 @@ async function generateObject(options) {
8120
8103
  }),
8121
8104
  tracer,
8122
8105
  fn: async (span) => {
8123
- var _a17;
8106
+ var _a16;
8124
8107
  let result;
8125
8108
  let finishReason;
8126
8109
  let usage;
@@ -8166,11 +8149,11 @@ async function generateObject(options) {
8166
8149
  }),
8167
8150
  tracer,
8168
8151
  fn: async (span2) => {
8169
- var _a18, _b, _c, _d, _e, _f, _g, _h;
8152
+ var _a17, _b, _c, _d, _e, _f, _g, _h;
8170
8153
  const result2 = await model.doGenerate({
8171
8154
  responseFormat: {
8172
8155
  type: "json",
8173
- schema: jsonSchema3,
8156
+ schema: jsonSchema2,
8174
8157
  name: schemaName,
8175
8158
  description: schemaDescription
8176
8159
  },
@@ -8181,7 +8164,7 @@ async function generateObject(options) {
8181
8164
  headers: headersWithUserAgent
8182
8165
  });
8183
8166
  const responseData = {
8184
- id: (_b = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b : generateId2(),
8167
+ id: (_b = (_a17 = result2.response) == null ? void 0 : _a17.id) != null ? _b : generateId2(),
8185
8168
  timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
8186
8169
  modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
8187
8170
  headers: (_g = result2.response) == null ? void 0 : _g.headers,
@@ -8235,7 +8218,7 @@ async function generateObject(options) {
8235
8218
  usage = generateResult.usage;
8236
8219
  warnings = generateResult.warnings;
8237
8220
  resultProviderMetadata = generateResult.providerMetadata;
8238
- request = (_a17 = generateResult.request) != null ? _a17 : {};
8221
+ request = (_a16 = generateResult.request) != null ? _a16 : {};
8239
8222
  response = generateResult.responseData;
8240
8223
  reasoning = generateResult.reasoning;
8241
8224
  logWarnings(warnings);
@@ -8294,9 +8277,9 @@ var DefaultGenerateObjectResult = class {
8294
8277
  this.reasoning = options.reasoning;
8295
8278
  }
8296
8279
  toJsonResponse(init) {
8297
- var _a17;
8280
+ var _a16;
8298
8281
  return new Response(JSON.stringify(this.object), {
8299
- status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
8282
+ status: (_a16 = init == null ? void 0 : init.status) != null ? _a16 : 200,
8300
8283
  headers: prepareHeaders(init == null ? void 0 : init.headers, {
8301
8284
  "content-type": "application/json; charset=utf-8"
8302
8285
  })
@@ -8424,8 +8407,8 @@ function simulateReadableStream({
8424
8407
  chunkDelayInMs = 0,
8425
8408
  _internal
8426
8409
  }) {
8427
- var _a17;
8428
- const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : delayFunction;
8410
+ var _a16;
8411
+ const delay2 = (_a16 = _internal == null ? void 0 : _internal.delay) != null ? _a16 : delayFunction;
8429
8412
  let index = 0;
8430
8413
  return new ReadableStream({
8431
8414
  async pull(controller) {
@@ -8687,7 +8670,7 @@ var DefaultStreamObjectResult = class {
8687
8670
  const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
8688
8671
  new TransformStream({
8689
8672
  async transform(chunk, controller) {
8690
- var _a17, _b, _c;
8673
+ var _a16, _b, _c;
8691
8674
  if (typeof chunk === "object" && chunk.type === "stream-start") {
8692
8675
  warnings = chunk.warnings;
8693
8676
  return;
@@ -8737,7 +8720,7 @@ var DefaultStreamObjectResult = class {
8737
8720
  switch (chunk.type) {
8738
8721
  case "response-metadata": {
8739
8722
  fullResponse = {
8740
- id: (_a17 = chunk.id) != null ? _a17 : fullResponse.id,
8723
+ id: (_a16 = chunk.id) != null ? _a16 : fullResponse.id,
8741
8724
  timestamp: (_b = chunk.timestamp) != null ? _b : fullResponse.timestamp,
8742
8725
  modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
8743
8726
  };
@@ -9004,7 +8987,7 @@ async function generateSpeech({
9004
8987
  abortSignal,
9005
8988
  headers
9006
8989
  }) {
9007
- var _a17;
8990
+ var _a16;
9008
8991
  const resolvedModel = resolveSpeechModel(model);
9009
8992
  if (!resolvedModel) {
9010
8993
  throw new Error("Model could not be resolved");
@@ -9037,10 +9020,10 @@ async function generateSpeech({
9037
9020
  return new DefaultSpeechResult({
9038
9021
  audio: new DefaultGeneratedAudioFile({
9039
9022
  data: result.audio,
9040
- mediaType: (_a17 = detectMediaType({
9023
+ mediaType: (_a16 = detectMediaType({
9041
9024
  data: result.audio,
9042
9025
  signatures: audioMediaTypeSignatures
9043
- })) != null ? _a17 : "audio/mp3"
9026
+ })) != null ? _a16 : "audio/mp3"
9044
9027
  }),
9045
9028
  warnings: result.warnings,
9046
9029
  responses: [result.response],
@@ -9049,11 +9032,11 @@ async function generateSpeech({
9049
9032
  }
9050
9033
  var DefaultSpeechResult = class {
9051
9034
  constructor(options) {
9052
- var _a17;
9035
+ var _a16;
9053
9036
  this.audio = options.audio;
9054
9037
  this.warnings = options.warnings;
9055
9038
  this.responses = options.responses;
9056
- this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
9039
+ this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
9057
9040
  }
9058
9041
  };
9059
9042
 
@@ -9090,9 +9073,9 @@ var object = ({
9090
9073
  const schema = asSchema4(inputSchema);
9091
9074
  return {
9092
9075
  type: "object",
9093
- responseFormat: resolve(schema.jsonSchema).then((jsonSchema3) => ({
9076
+ responseFormat: resolve(schema.jsonSchema).then((jsonSchema2) => ({
9094
9077
  type: "json",
9095
- schema: jsonSchema3
9078
+ schema: jsonSchema2
9096
9079
  })),
9097
9080
  async parseOutput({ text: text2 }, context) {
9098
9081
  const parseResult = await safeParseJSON4({ text: text2 });
@@ -9151,8 +9134,8 @@ var array = ({
9151
9134
  return {
9152
9135
  type: "object",
9153
9136
  // JSON schema that describes an array of elements:
9154
- responseFormat: resolve(elementSchema.jsonSchema).then((jsonSchema3) => {
9155
- const { $schema, ...itemSchema } = jsonSchema3;
9137
+ responseFormat: resolve(elementSchema.jsonSchema).then((jsonSchema2) => {
9138
+ const { $schema, ...itemSchema } = jsonSchema2;
9156
9139
  return {
9157
9140
  type: "json",
9158
9141
  schema: {
@@ -9749,7 +9732,7 @@ var doWrap = ({
9749
9732
  modelId,
9750
9733
  providerId
9751
9734
  }) => {
9752
- var _a17, _b, _c;
9735
+ var _a16, _b, _c;
9753
9736
  async function doTransform({
9754
9737
  params,
9755
9738
  type
@@ -9758,7 +9741,7 @@ var doWrap = ({
9758
9741
  }
9759
9742
  return {
9760
9743
  specificationVersion: "v3",
9761
- provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
9744
+ provider: (_a16 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a16 : model.provider,
9762
9745
  modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
9763
9746
  supportedUrls: (_c = overrideSupportedUrls == null ? void 0 : overrideSupportedUrls({ model })) != null ? _c : model.supportedUrls,
9764
9747
  async doGenerate(params) {
@@ -9866,11 +9849,11 @@ function customProvider({
9866
9849
  var experimental_customProvider = customProvider;
9867
9850
 
9868
9851
  // src/registry/no-such-provider-error.ts
9869
- import { AISDKError as AISDKError21, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
9870
- var name16 = "AI_NoSuchProviderError";
9871
- var marker16 = `vercel.ai.error.${name16}`;
9872
- var symbol16 = Symbol.for(marker16);
9873
- var _a16;
9852
+ import { AISDKError as AISDKError20, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
9853
+ var name15 = "AI_NoSuchProviderError";
9854
+ var marker15 = `vercel.ai.error.${name15}`;
9855
+ var symbol15 = Symbol.for(marker15);
9856
+ var _a15;
9874
9857
  var NoSuchProviderError = class extends NoSuchModelError3 {
9875
9858
  constructor({
9876
9859
  modelId,
@@ -9879,16 +9862,16 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
9879
9862
  availableProviders,
9880
9863
  message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
9881
9864
  }) {
9882
- super({ errorName: name16, modelId, modelType, message });
9883
- this[_a16] = true;
9865
+ super({ errorName: name15, modelId, modelType, message });
9866
+ this[_a15] = true;
9884
9867
  this.providerId = providerId;
9885
9868
  this.availableProviders = availableProviders;
9886
9869
  }
9887
9870
  static isInstance(error) {
9888
- return AISDKError21.hasMarker(error, marker16);
9871
+ return AISDKError20.hasMarker(error, marker15);
9889
9872
  }
9890
9873
  };
9891
- _a16 = symbol16;
9874
+ _a15 = symbol15;
9892
9875
 
9893
9876
  // src/registry/provider-registry.ts
9894
9877
  import {
@@ -9947,10 +9930,10 @@ var DefaultProviderRegistry = class {
9947
9930
  return [id.slice(0, index), id.slice(index + this.separator.length)];
9948
9931
  }
9949
9932
  languageModel(id) {
9950
- var _a17, _b;
9933
+ var _a16, _b;
9951
9934
  const [providerId, modelId] = this.splitId(id, "languageModel");
9952
- let model = (_b = (_a17 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
9953
- _a17,
9935
+ let model = (_b = (_a16 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
9936
+ _a16,
9954
9937
  modelId
9955
9938
  );
9956
9939
  if (model == null) {
@@ -9965,10 +9948,10 @@ var DefaultProviderRegistry = class {
9965
9948
  return model;
9966
9949
  }
9967
9950
  textEmbeddingModel(id) {
9968
- var _a17;
9951
+ var _a16;
9969
9952
  const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
9970
9953
  const provider = this.getProvider(providerId, "textEmbeddingModel");
9971
- const model = (_a17 = provider.textEmbeddingModel) == null ? void 0 : _a17.call(provider, modelId);
9954
+ const model = (_a16 = provider.textEmbeddingModel) == null ? void 0 : _a16.call(provider, modelId);
9972
9955
  if (model == null) {
9973
9956
  throw new NoSuchModelError4({
9974
9957
  modelId: id,
@@ -9978,20 +9961,20 @@ var DefaultProviderRegistry = class {
9978
9961
  return model;
9979
9962
  }
9980
9963
  imageModel(id) {
9981
- var _a17;
9964
+ var _a16;
9982
9965
  const [providerId, modelId] = this.splitId(id, "imageModel");
9983
9966
  const provider = this.getProvider(providerId, "imageModel");
9984
- const model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
9967
+ const model = (_a16 = provider.imageModel) == null ? void 0 : _a16.call(provider, modelId);
9985
9968
  if (model == null) {
9986
9969
  throw new NoSuchModelError4({ modelId: id, modelType: "imageModel" });
9987
9970
  }
9988
9971
  return model;
9989
9972
  }
9990
9973
  transcriptionModel(id) {
9991
- var _a17;
9974
+ var _a16;
9992
9975
  const [providerId, modelId] = this.splitId(id, "transcriptionModel");
9993
9976
  const provider = this.getProvider(providerId, "transcriptionModel");
9994
- const model = (_a17 = provider.transcriptionModel) == null ? void 0 : _a17.call(provider, modelId);
9977
+ const model = (_a16 = provider.transcriptionModel) == null ? void 0 : _a16.call(provider, modelId);
9995
9978
  if (model == null) {
9996
9979
  throw new NoSuchModelError4({
9997
9980
  modelId: id,
@@ -10001,10 +9984,10 @@ var DefaultProviderRegistry = class {
10001
9984
  return model;
10002
9985
  }
10003
9986
  speechModel(id) {
10004
- var _a17;
9987
+ var _a16;
10005
9988
  const [providerId, modelId] = this.splitId(id, "speechModel");
10006
9989
  const provider = this.getProvider(providerId, "speechModel");
10007
- const model = (_a17 = provider.speechModel) == null ? void 0 : _a17.call(provider, modelId);
9990
+ const model = (_a16 = provider.speechModel) == null ? void 0 : _a16.call(provider, modelId);
10008
9991
  if (model == null) {
10009
9992
  throw new NoSuchModelError4({ modelId: id, modelType: "speechModel" });
10010
9993
  }
@@ -10012,597 +9995,12 @@ var DefaultProviderRegistry = class {
10012
9995
  }
10013
9996
  };
10014
9997
 
10015
- // src/tool/mcp/mcp-client.ts
10016
- import {
10017
- dynamicTool,
10018
- jsonSchema,
10019
- tool
10020
- } from "@ai-sdk/provider-utils";
10021
-
10022
- // src/tool/mcp/mcp-sse-transport.ts
10023
- import {
10024
- EventSourceParserStream,
10025
- withUserAgentSuffix as withUserAgentSuffix8,
10026
- getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent2
10027
- } from "@ai-sdk/provider-utils";
10028
-
10029
- // src/tool/mcp/json-rpc-message.ts
10030
- import { z as z10 } from "zod/v4";
10031
-
10032
- // src/tool/mcp/types.ts
10033
- import { z as z9 } from "zod/v4";
10034
- var LATEST_PROTOCOL_VERSION = "2025-06-18";
10035
- var SUPPORTED_PROTOCOL_VERSIONS = [
10036
- LATEST_PROTOCOL_VERSION,
10037
- "2025-03-26",
10038
- "2024-11-05"
10039
- ];
10040
- var ClientOrServerImplementationSchema = z9.looseObject({
10041
- name: z9.string(),
10042
- version: z9.string()
10043
- });
10044
- var BaseParamsSchema = z9.looseObject({
10045
- _meta: z9.optional(z9.object({}).loose())
10046
- });
10047
- var ResultSchema = BaseParamsSchema;
10048
- var RequestSchema = z9.object({
10049
- method: z9.string(),
10050
- params: z9.optional(BaseParamsSchema)
10051
- });
10052
- var ServerCapabilitiesSchema = z9.looseObject({
10053
- experimental: z9.optional(z9.object({}).loose()),
10054
- logging: z9.optional(z9.object({}).loose()),
10055
- prompts: z9.optional(
10056
- z9.looseObject({
10057
- listChanged: z9.optional(z9.boolean())
10058
- })
10059
- ),
10060
- resources: z9.optional(
10061
- z9.looseObject({
10062
- subscribe: z9.optional(z9.boolean()),
10063
- listChanged: z9.optional(z9.boolean())
10064
- })
10065
- ),
10066
- tools: z9.optional(
10067
- z9.looseObject({
10068
- listChanged: z9.optional(z9.boolean())
10069
- })
10070
- )
10071
- });
10072
- var InitializeResultSchema = ResultSchema.extend({
10073
- protocolVersion: z9.string(),
10074
- capabilities: ServerCapabilitiesSchema,
10075
- serverInfo: ClientOrServerImplementationSchema,
10076
- instructions: z9.optional(z9.string())
10077
- });
10078
- var PaginatedResultSchema = ResultSchema.extend({
10079
- nextCursor: z9.optional(z9.string())
10080
- });
10081
- var ToolSchema = z9.object({
10082
- name: z9.string(),
10083
- description: z9.optional(z9.string()),
10084
- inputSchema: z9.object({
10085
- type: z9.literal("object"),
10086
- properties: z9.optional(z9.object({}).loose())
10087
- }).loose()
10088
- }).loose();
10089
- var ListToolsResultSchema = PaginatedResultSchema.extend({
10090
- tools: z9.array(ToolSchema)
10091
- });
10092
- var TextContentSchema = z9.object({
10093
- type: z9.literal("text"),
10094
- text: z9.string()
10095
- }).loose();
10096
- var ImageContentSchema = z9.object({
10097
- type: z9.literal("image"),
10098
- data: z9.base64(),
10099
- mimeType: z9.string()
10100
- }).loose();
10101
- var ResourceContentsSchema = z9.object({
10102
- /**
10103
- * The URI of this resource.
10104
- */
10105
- uri: z9.string(),
10106
- /**
10107
- * The MIME type of this resource, if known.
10108
- */
10109
- mimeType: z9.optional(z9.string())
10110
- }).loose();
10111
- var TextResourceContentsSchema = ResourceContentsSchema.extend({
10112
- text: z9.string()
10113
- });
10114
- var BlobResourceContentsSchema = ResourceContentsSchema.extend({
10115
- blob: z9.base64()
10116
- });
10117
- var EmbeddedResourceSchema = z9.object({
10118
- type: z9.literal("resource"),
10119
- resource: z9.union([TextResourceContentsSchema, BlobResourceContentsSchema])
10120
- }).loose();
10121
- var CallToolResultSchema = ResultSchema.extend({
10122
- content: z9.array(
10123
- z9.union([TextContentSchema, ImageContentSchema, EmbeddedResourceSchema])
10124
- ),
10125
- isError: z9.boolean().default(false).optional()
10126
- }).or(
10127
- ResultSchema.extend({
10128
- toolResult: z9.unknown()
10129
- })
10130
- );
10131
-
10132
- // src/tool/mcp/json-rpc-message.ts
10133
- var JSONRPC_VERSION = "2.0";
10134
- var JSONRPCRequestSchema = z10.object({
10135
- jsonrpc: z10.literal(JSONRPC_VERSION),
10136
- id: z10.union([z10.string(), z10.number().int()])
10137
- }).merge(RequestSchema).strict();
10138
- var JSONRPCResponseSchema = z10.object({
10139
- jsonrpc: z10.literal(JSONRPC_VERSION),
10140
- id: z10.union([z10.string(), z10.number().int()]),
10141
- result: ResultSchema
10142
- }).strict();
10143
- var JSONRPCErrorSchema = z10.object({
10144
- jsonrpc: z10.literal(JSONRPC_VERSION),
10145
- id: z10.union([z10.string(), z10.number().int()]),
10146
- error: z10.object({
10147
- code: z10.number().int(),
10148
- message: z10.string(),
10149
- data: z10.optional(z10.unknown())
10150
- })
10151
- }).strict();
10152
- var JSONRPCNotificationSchema = z10.object({
10153
- jsonrpc: z10.literal(JSONRPC_VERSION)
10154
- }).merge(
10155
- z10.object({
10156
- method: z10.string(),
10157
- params: z10.optional(BaseParamsSchema)
10158
- })
10159
- ).strict();
10160
- var JSONRPCMessageSchema = z10.union([
10161
- JSONRPCRequestSchema,
10162
- JSONRPCNotificationSchema,
10163
- JSONRPCResponseSchema,
10164
- JSONRPCErrorSchema
10165
- ]);
10166
-
10167
- // src/tool/mcp/mcp-sse-transport.ts
10168
- var SseMCPTransport = class {
10169
- constructor({
10170
- url,
10171
- headers
10172
- }) {
10173
- this.connected = false;
10174
- this.url = new URL(url);
10175
- this.headers = headers;
10176
- }
10177
- async start() {
10178
- return new Promise((resolve3, reject) => {
10179
- if (this.connected) {
10180
- return resolve3();
10181
- }
10182
- this.abortController = new AbortController();
10183
- const establishConnection = async () => {
10184
- var _a17, _b, _c;
10185
- try {
10186
- const headers = withUserAgentSuffix8(
10187
- {
10188
- ...this.headers,
10189
- Accept: "text/event-stream"
10190
- },
10191
- `ai-sdk/${VERSION}`,
10192
- getRuntimeEnvironmentUserAgent2()
10193
- );
10194
- const response = await fetch(this.url.href, {
10195
- headers,
10196
- signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
10197
- });
10198
- if (!response.ok || !response.body) {
10199
- const error = new MCPClientError({
10200
- message: `MCP SSE Transport Error: ${response.status} ${response.statusText}`
10201
- });
10202
- (_b = this.onerror) == null ? void 0 : _b.call(this, error);
10203
- return reject(error);
10204
- }
10205
- const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream());
10206
- const reader = stream.getReader();
10207
- const processEvents = async () => {
10208
- var _a18, _b2, _c2;
10209
- try {
10210
- while (true) {
10211
- const { done, value } = await reader.read();
10212
- if (done) {
10213
- if (this.connected) {
10214
- this.connected = false;
10215
- throw new MCPClientError({
10216
- message: "MCP SSE Transport Error: Connection closed unexpectedly"
10217
- });
10218
- }
10219
- return;
10220
- }
10221
- const { event, data } = value;
10222
- if (event === "endpoint") {
10223
- this.endpoint = new URL(data, this.url);
10224
- if (this.endpoint.origin !== this.url.origin) {
10225
- throw new MCPClientError({
10226
- message: `MCP SSE Transport Error: Endpoint origin does not match connection origin: ${this.endpoint.origin}`
10227
- });
10228
- }
10229
- this.connected = true;
10230
- resolve3();
10231
- } else if (event === "message") {
10232
- try {
10233
- const message = JSONRPCMessageSchema.parse(
10234
- JSON.parse(data)
10235
- );
10236
- (_a18 = this.onmessage) == null ? void 0 : _a18.call(this, message);
10237
- } catch (error) {
10238
- const e = new MCPClientError({
10239
- message: "MCP SSE Transport Error: Failed to parse message",
10240
- cause: error
10241
- });
10242
- (_b2 = this.onerror) == null ? void 0 : _b2.call(this, e);
10243
- }
10244
- }
10245
- }
10246
- } catch (error) {
10247
- if (error instanceof Error && error.name === "AbortError") {
10248
- return;
10249
- }
10250
- (_c2 = this.onerror) == null ? void 0 : _c2.call(this, error);
10251
- reject(error);
10252
- }
10253
- };
10254
- this.sseConnection = {
10255
- close: () => reader.cancel()
10256
- };
10257
- processEvents();
10258
- } catch (error) {
10259
- if (error instanceof Error && error.name === "AbortError") {
10260
- return;
10261
- }
10262
- (_c = this.onerror) == null ? void 0 : _c.call(this, error);
10263
- reject(error);
10264
- }
10265
- };
10266
- establishConnection();
10267
- });
10268
- }
10269
- async close() {
10270
- var _a17, _b, _c;
10271
- this.connected = false;
10272
- (_a17 = this.sseConnection) == null ? void 0 : _a17.close();
10273
- (_b = this.abortController) == null ? void 0 : _b.abort();
10274
- (_c = this.onclose) == null ? void 0 : _c.call(this);
10275
- }
10276
- async send(message) {
10277
- var _a17, _b, _c;
10278
- if (!this.endpoint || !this.connected) {
10279
- throw new MCPClientError({
10280
- message: "MCP SSE Transport Error: Not connected"
10281
- });
10282
- }
10283
- try {
10284
- const headers = withUserAgentSuffix8(
10285
- {
10286
- ...this.headers,
10287
- "Content-Type": "application/json"
10288
- },
10289
- `ai-sdk/${VERSION}`,
10290
- getRuntimeEnvironmentUserAgent2()
10291
- );
10292
- const init = {
10293
- method: "POST",
10294
- headers,
10295
- body: JSON.stringify(message),
10296
- signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
10297
- };
10298
- const response = await fetch(this.endpoint, init);
10299
- if (!response.ok) {
10300
- const text2 = await response.text().catch(() => null);
10301
- const error = new MCPClientError({
10302
- message: `MCP SSE Transport Error: POSTing to endpoint (HTTP ${response.status}): ${text2}`
10303
- });
10304
- (_b = this.onerror) == null ? void 0 : _b.call(this, error);
10305
- return;
10306
- }
10307
- } catch (error) {
10308
- (_c = this.onerror) == null ? void 0 : _c.call(this, error);
10309
- return;
10310
- }
10311
- }
10312
- };
10313
-
10314
- // src/tool/mcp/mcp-transport.ts
10315
- function createMcpTransport(config) {
10316
- if (config.type !== "sse") {
10317
- throw new MCPClientError({
10318
- message: "Unsupported or invalid transport configuration. If you are using a custom transport, make sure it implements the MCPTransport interface."
10319
- });
10320
- }
10321
- return new SseMCPTransport(config);
10322
- }
10323
- function isCustomMcpTransport(transport) {
10324
- return "start" in transport && typeof transport.start === "function" && "send" in transport && typeof transport.send === "function" && "close" in transport && typeof transport.close === "function";
10325
- }
10326
-
10327
- // src/tool/mcp/mcp-client.ts
10328
- var CLIENT_VERSION = "1.0.0";
10329
- async function createMCPClient(config) {
10330
- const client = new DefaultMCPClient(config);
10331
- await client.init();
10332
- return client;
10333
- }
10334
- var DefaultMCPClient = class {
10335
- constructor({
10336
- transport: transportConfig,
10337
- name: name17 = "ai-sdk-mcp-client",
10338
- onUncaughtError
10339
- }) {
10340
- this.requestMessageId = 0;
10341
- this.responseHandlers = /* @__PURE__ */ new Map();
10342
- this.serverCapabilities = {};
10343
- this.isClosed = true;
10344
- this.onUncaughtError = onUncaughtError;
10345
- if (isCustomMcpTransport(transportConfig)) {
10346
- this.transport = transportConfig;
10347
- } else {
10348
- this.transport = createMcpTransport(transportConfig);
10349
- }
10350
- this.transport.onclose = () => this.onClose();
10351
- this.transport.onerror = (error) => this.onError(error);
10352
- this.transport.onmessage = (message) => {
10353
- if ("method" in message) {
10354
- this.onError(
10355
- new MCPClientError({
10356
- message: "Unsupported message type"
10357
- })
10358
- );
10359
- return;
10360
- }
10361
- this.onResponse(message);
10362
- };
10363
- this.clientInfo = {
10364
- name: name17,
10365
- version: CLIENT_VERSION
10366
- };
10367
- }
10368
- async init() {
10369
- try {
10370
- await this.transport.start();
10371
- this.isClosed = false;
10372
- const result = await this.request({
10373
- request: {
10374
- method: "initialize",
10375
- params: {
10376
- protocolVersion: LATEST_PROTOCOL_VERSION,
10377
- capabilities: {},
10378
- clientInfo: this.clientInfo
10379
- }
10380
- },
10381
- resultSchema: InitializeResultSchema
10382
- });
10383
- if (result === void 0) {
10384
- throw new MCPClientError({
10385
- message: "Server sent invalid initialize result"
10386
- });
10387
- }
10388
- if (!SUPPORTED_PROTOCOL_VERSIONS.includes(result.protocolVersion)) {
10389
- throw new MCPClientError({
10390
- message: `Server's protocol version is not supported: ${result.protocolVersion}`
10391
- });
10392
- }
10393
- this.serverCapabilities = result.capabilities;
10394
- await this.notification({
10395
- method: "notifications/initialized"
10396
- });
10397
- return this;
10398
- } catch (error) {
10399
- await this.close();
10400
- throw error;
10401
- }
10402
- }
10403
- async close() {
10404
- var _a17;
10405
- if (this.isClosed)
10406
- return;
10407
- await ((_a17 = this.transport) == null ? void 0 : _a17.close());
10408
- this.onClose();
10409
- }
10410
- assertCapability(method) {
10411
- switch (method) {
10412
- case "initialize":
10413
- break;
10414
- case "tools/list":
10415
- case "tools/call":
10416
- if (!this.serverCapabilities.tools) {
10417
- throw new MCPClientError({
10418
- message: `Server does not support tools`
10419
- });
10420
- }
10421
- break;
10422
- default:
10423
- throw new MCPClientError({
10424
- message: `Unsupported method: ${method}`
10425
- });
10426
- }
10427
- }
10428
- async request({
10429
- request,
10430
- resultSchema,
10431
- options
10432
- }) {
10433
- return new Promise((resolve3, reject) => {
10434
- if (this.isClosed) {
10435
- return reject(
10436
- new MCPClientError({
10437
- message: "Attempted to send a request from a closed client"
10438
- })
10439
- );
10440
- }
10441
- this.assertCapability(request.method);
10442
- const signal = options == null ? void 0 : options.signal;
10443
- signal == null ? void 0 : signal.throwIfAborted();
10444
- const messageId = this.requestMessageId++;
10445
- const jsonrpcRequest = {
10446
- ...request,
10447
- jsonrpc: "2.0",
10448
- id: messageId
10449
- };
10450
- const cleanup = () => {
10451
- this.responseHandlers.delete(messageId);
10452
- };
10453
- this.responseHandlers.set(messageId, (response) => {
10454
- if (signal == null ? void 0 : signal.aborted) {
10455
- return reject(
10456
- new MCPClientError({
10457
- message: "Request was aborted",
10458
- cause: signal.reason
10459
- })
10460
- );
10461
- }
10462
- if (response instanceof Error) {
10463
- return reject(response);
10464
- }
10465
- try {
10466
- const result = resultSchema.parse(response.result);
10467
- resolve3(result);
10468
- } catch (error) {
10469
- const parseError = new MCPClientError({
10470
- message: "Failed to parse server response",
10471
- cause: error
10472
- });
10473
- reject(parseError);
10474
- }
10475
- });
10476
- this.transport.send(jsonrpcRequest).catch((error) => {
10477
- cleanup();
10478
- reject(error);
10479
- });
10480
- });
10481
- }
10482
- async listTools({
10483
- params,
10484
- options
10485
- } = {}) {
10486
- try {
10487
- return this.request({
10488
- request: { method: "tools/list", params },
10489
- resultSchema: ListToolsResultSchema,
10490
- options
10491
- });
10492
- } catch (error) {
10493
- throw error;
10494
- }
10495
- }
10496
- async callTool({
10497
- name: name17,
10498
- args,
10499
- options
10500
- }) {
10501
- try {
10502
- return this.request({
10503
- request: { method: "tools/call", params: { name: name17, arguments: args } },
10504
- resultSchema: CallToolResultSchema,
10505
- options: {
10506
- signal: options == null ? void 0 : options.abortSignal
10507
- }
10508
- });
10509
- } catch (error) {
10510
- throw error;
10511
- }
10512
- }
10513
- async notification(notification) {
10514
- const jsonrpcNotification = {
10515
- ...notification,
10516
- jsonrpc: "2.0"
10517
- };
10518
- await this.transport.send(jsonrpcNotification);
10519
- }
10520
- /**
10521
- * Returns a set of AI SDK tools from the MCP server
10522
- * @returns A record of tool names to their implementations
10523
- */
10524
- async tools({
10525
- schemas = "automatic"
10526
- } = {}) {
10527
- var _a17;
10528
- const tools = {};
10529
- try {
10530
- const listToolsResult = await this.listTools();
10531
- for (const { name: name17, description, inputSchema } of listToolsResult.tools) {
10532
- if (schemas !== "automatic" && !(name17 in schemas)) {
10533
- continue;
10534
- }
10535
- const self = this;
10536
- const execute = async (args, options) => {
10537
- var _a18;
10538
- (_a18 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a18.throwIfAborted();
10539
- return self.callTool({ name: name17, args, options });
10540
- };
10541
- const toolWithExecute = schemas === "automatic" ? dynamicTool({
10542
- description,
10543
- inputSchema: jsonSchema({
10544
- ...inputSchema,
10545
- properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
10546
- additionalProperties: false
10547
- }),
10548
- execute
10549
- }) : tool({
10550
- description,
10551
- inputSchema: schemas[name17].inputSchema,
10552
- execute
10553
- });
10554
- tools[name17] = toolWithExecute;
10555
- }
10556
- return tools;
10557
- } catch (error) {
10558
- throw error;
10559
- }
10560
- }
10561
- onClose() {
10562
- if (this.isClosed)
10563
- return;
10564
- this.isClosed = true;
10565
- const error = new MCPClientError({
10566
- message: "Connection closed"
10567
- });
10568
- for (const handler of this.responseHandlers.values()) {
10569
- handler(error);
10570
- }
10571
- this.responseHandlers.clear();
10572
- }
10573
- onError(error) {
10574
- if (this.onUncaughtError) {
10575
- this.onUncaughtError(error);
10576
- }
10577
- }
10578
- onResponse(response) {
10579
- const messageId = Number(response.id);
10580
- const handler = this.responseHandlers.get(messageId);
10581
- if (handler === void 0) {
10582
- throw new MCPClientError({
10583
- message: `Protocol error: Received a response for an unknown message ID: ${JSON.stringify(
10584
- response
10585
- )}`
10586
- });
10587
- }
10588
- this.responseHandlers.delete(messageId);
10589
- handler(
10590
- "result" in response ? response : new MCPClientError({
10591
- message: response.error.message,
10592
- code: response.error.code,
10593
- data: response.error.data,
10594
- cause: response.error
10595
- })
10596
- );
10597
- }
10598
- };
10599
-
10600
9998
  // src/transcribe/transcribe.ts
10601
- import { withUserAgentSuffix as withUserAgentSuffix9 } from "@ai-sdk/provider-utils";
9999
+ import { withUserAgentSuffix as withUserAgentSuffix8 } from "@ai-sdk/provider-utils";
10602
10000
 
10603
10001
  // src/error/no-transcript-generated-error.ts
10604
- import { AISDKError as AISDKError22 } from "@ai-sdk/provider";
10605
- var NoTranscriptGeneratedError = class extends AISDKError22 {
10002
+ import { AISDKError as AISDKError21 } from "@ai-sdk/provider";
10003
+ var NoTranscriptGeneratedError = class extends AISDKError21 {
10606
10004
  constructor(options) {
10607
10005
  super({
10608
10006
  name: "AI_NoTranscriptGeneratedError",
@@ -10629,23 +10027,23 @@ async function transcribe({
10629
10027
  maxRetries: maxRetriesArg,
10630
10028
  abortSignal
10631
10029
  });
10632
- const headersWithUserAgent = withUserAgentSuffix9(
10030
+ const headersWithUserAgent = withUserAgentSuffix8(
10633
10031
  headers != null ? headers : {},
10634
10032
  `ai/${VERSION}`
10635
10033
  );
10636
10034
  const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
10637
10035
  const result = await retry(
10638
10036
  () => {
10639
- var _a17;
10037
+ var _a16;
10640
10038
  return resolvedModel.doGenerate({
10641
10039
  audio: audioData,
10642
10040
  abortSignal,
10643
10041
  headers: headersWithUserAgent,
10644
10042
  providerOptions,
10645
- mediaType: (_a17 = detectMediaType({
10043
+ mediaType: (_a16 = detectMediaType({
10646
10044
  data: audioData,
10647
10045
  signatures: audioMediaTypeSignatures
10648
- })) != null ? _a17 : "audio/wav"
10046
+ })) != null ? _a16 : "audio/wav"
10649
10047
  });
10650
10048
  }
10651
10049
  );
@@ -10665,22 +10063,22 @@ async function transcribe({
10665
10063
  }
10666
10064
  var DefaultTranscriptionResult = class {
10667
10065
  constructor(options) {
10668
- var _a17;
10066
+ var _a16;
10669
10067
  this.text = options.text;
10670
10068
  this.segments = options.segments;
10671
10069
  this.language = options.language;
10672
10070
  this.durationInSeconds = options.durationInSeconds;
10673
10071
  this.warnings = options.warnings;
10674
10072
  this.responses = options.responses;
10675
- this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
10073
+ this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
10676
10074
  }
10677
10075
  };
10678
10076
 
10679
10077
  // src/ui/call-completion-api.ts
10680
10078
  import {
10681
10079
  parseJsonEventStream,
10682
- withUserAgentSuffix as withUserAgentSuffix10,
10683
- getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent3
10080
+ withUserAgentSuffix as withUserAgentSuffix9,
10081
+ getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent2
10684
10082
  } from "@ai-sdk/provider-utils";
10685
10083
 
10686
10084
  // src/ui/process-text-stream.ts
@@ -10715,7 +10113,7 @@ async function callCompletionApi({
10715
10113
  onError,
10716
10114
  fetch: fetch2 = getOriginalFetch()
10717
10115
  }) {
10718
- var _a17;
10116
+ var _a16;
10719
10117
  try {
10720
10118
  setLoading(true);
10721
10119
  setError(void 0);
@@ -10729,13 +10127,13 @@ async function callCompletionApi({
10729
10127
  ...body
10730
10128
  }),
10731
10129
  credentials,
10732
- headers: withUserAgentSuffix10(
10130
+ headers: withUserAgentSuffix9(
10733
10131
  {
10734
10132
  "Content-Type": "application/json",
10735
10133
  ...headers
10736
10134
  },
10737
10135
  `ai-sdk/${VERSION}`,
10738
- getRuntimeEnvironmentUserAgent3()
10136
+ getRuntimeEnvironmentUserAgent2()
10739
10137
  ),
10740
10138
  signal: abortController.signal
10741
10139
  }).catch((err) => {
@@ -10743,7 +10141,7 @@ async function callCompletionApi({
10743
10141
  });
10744
10142
  if (!response.ok) {
10745
10143
  throw new Error(
10746
- (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
10144
+ (_a16 = await response.text()) != null ? _a16 : "Failed to fetch the chat response."
10747
10145
  );
10748
10146
  }
10749
10147
  if (!response.body) {
@@ -10829,12 +10227,12 @@ async function convertFileListToFileUIParts(files) {
10829
10227
  }
10830
10228
  return Promise.all(
10831
10229
  Array.from(files).map(async (file) => {
10832
- const { name: name17, type } = file;
10230
+ const { name: name16, type } = file;
10833
10231
  const dataUrl = await new Promise((resolve3, reject) => {
10834
10232
  const reader = new FileReader();
10835
10233
  reader.onload = (readerEvent) => {
10836
- var _a17;
10837
- resolve3((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
10234
+ var _a16;
10235
+ resolve3((_a16 = readerEvent.target) == null ? void 0 : _a16.result);
10838
10236
  };
10839
10237
  reader.onerror = (error) => reject(error);
10840
10238
  reader.readAsDataURL(file);
@@ -10842,7 +10240,7 @@ async function convertFileListToFileUIParts(files) {
10842
10240
  return {
10843
10241
  type: "file",
10844
10242
  mediaType: type,
10845
- filename: name17,
10243
+ filename: name16,
10846
10244
  url: dataUrl
10847
10245
  };
10848
10246
  })
@@ -10855,8 +10253,8 @@ import { parseJsonEventStream as parseJsonEventStream2 } from "@ai-sdk/provider-
10855
10253
  // src/ui/http-chat-transport.ts
10856
10254
  import {
10857
10255
  resolve as resolve2,
10858
- withUserAgentSuffix as withUserAgentSuffix11,
10859
- getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent4
10256
+ withUserAgentSuffix as withUserAgentSuffix10,
10257
+ getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent3
10860
10258
  } from "@ai-sdk/provider-utils";
10861
10259
  var HttpChatTransport = class {
10862
10260
  constructor({
@@ -10880,11 +10278,11 @@ var HttpChatTransport = class {
10880
10278
  abortSignal,
10881
10279
  ...options
10882
10280
  }) {
10883
- var _a17, _b, _c, _d, _e;
10281
+ var _a16, _b, _c, _d, _e;
10884
10282
  const resolvedBody = await resolve2(this.body);
10885
10283
  const resolvedHeaders = await resolve2(this.headers);
10886
10284
  const resolvedCredentials = await resolve2(this.credentials);
10887
- const preparedRequest = await ((_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
10285
+ const preparedRequest = await ((_a16 = this.prepareSendMessagesRequest) == null ? void 0 : _a16.call(this, {
10888
10286
  api: this.api,
10889
10287
  id: options.chatId,
10890
10288
  messages: options.messages,
@@ -10909,13 +10307,13 @@ var HttpChatTransport = class {
10909
10307
  const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
10910
10308
  const response = await fetch2(api, {
10911
10309
  method: "POST",
10912
- headers: withUserAgentSuffix11(
10310
+ headers: withUserAgentSuffix10(
10913
10311
  {
10914
10312
  "Content-Type": "application/json",
10915
10313
  ...headers
10916
10314
  },
10917
10315
  `ai-sdk/${VERSION}`,
10918
- getRuntimeEnvironmentUserAgent4()
10316
+ getRuntimeEnvironmentUserAgent3()
10919
10317
  ),
10920
10318
  body: JSON.stringify(body),
10921
10319
  credentials,
@@ -10932,11 +10330,11 @@ var HttpChatTransport = class {
10932
10330
  return this.processResponseStream(response.body);
10933
10331
  }
10934
10332
  async reconnectToStream(options) {
10935
- var _a17, _b, _c, _d, _e;
10333
+ var _a16, _b, _c, _d, _e;
10936
10334
  const resolvedBody = await resolve2(this.body);
10937
10335
  const resolvedHeaders = await resolve2(this.headers);
10938
10336
  const resolvedCredentials = await resolve2(this.credentials);
10939
- const preparedRequest = await ((_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
10337
+ const preparedRequest = await ((_a16 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a16.call(this, {
10940
10338
  api: this.api,
10941
10339
  id: options.chatId,
10942
10340
  body: { ...resolvedBody, ...options.body },
@@ -10950,10 +10348,10 @@ var HttpChatTransport = class {
10950
10348
  const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
10951
10349
  const response = await fetch2(api, {
10952
10350
  method: "GET",
10953
- headers: withUserAgentSuffix11(
10351
+ headers: withUserAgentSuffix10(
10954
10352
  headers,
10955
10353
  `ai-sdk/${VERSION}`,
10956
- getRuntimeEnvironmentUserAgent4()
10354
+ getRuntimeEnvironmentUserAgent3()
10957
10355
  ),
10958
10356
  credentials
10959
10357
  });
@@ -11018,11 +10416,11 @@ var AbstractChat = class {
11018
10416
  * If a messageId is provided, the message will be replaced.
11019
10417
  */
11020
10418
  this.sendMessage = async (message, options) => {
11021
- var _a17, _b, _c, _d;
10419
+ var _a16, _b, _c, _d;
11022
10420
  if (message == null) {
11023
10421
  await this.makeRequest({
11024
10422
  trigger: "submit-message",
11025
- messageId: (_a17 = this.lastMessage) == null ? void 0 : _a17.id,
10423
+ messageId: (_a16 = this.lastMessage) == null ? void 0 : _a16.id,
11026
10424
  ...options
11027
10425
  });
11028
10426
  return;
@@ -11115,7 +10513,7 @@ var AbstractChat = class {
11115
10513
  approved,
11116
10514
  reason
11117
10515
  }) => this.jobExecutor.run(async () => {
11118
- var _a17, _b;
10516
+ var _a16, _b;
11119
10517
  const messages = this.state.messages;
11120
10518
  const lastMessage = messages[messages.length - 1];
11121
10519
  const updatePart = (part) => isToolOrDynamicToolUIPart(part) && part.state === "approval-requested" && part.approval.id === id ? {
@@ -11130,7 +10528,7 @@ var AbstractChat = class {
11130
10528
  if (this.activeResponse) {
11131
10529
  this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
11132
10530
  }
11133
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
10531
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a16 = this.sendAutomaticallyWhen) == null ? void 0 : _a16.call(this, { messages: this.state.messages }))) {
11134
10532
  this.makeRequest({
11135
10533
  trigger: "submit-message",
11136
10534
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -11139,12 +10537,12 @@ var AbstractChat = class {
11139
10537
  });
11140
10538
  this.addToolResult = async ({
11141
10539
  state = "output-available",
11142
- tool: tool3,
10540
+ tool: tool2,
11143
10541
  toolCallId,
11144
10542
  output,
11145
10543
  errorText
11146
10544
  }) => this.jobExecutor.run(async () => {
11147
- var _a17, _b;
10545
+ var _a16, _b;
11148
10546
  const messages = this.state.messages;
11149
10547
  const lastMessage = messages[messages.length - 1];
11150
10548
  const updatePart = (part) => isToolOrDynamicToolUIPart(part) && part.toolCallId === toolCallId ? { ...part, state, output, errorText } : part;
@@ -11155,7 +10553,7 @@ var AbstractChat = class {
11155
10553
  if (this.activeResponse) {
11156
10554
  this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
11157
10555
  }
11158
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
10556
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a16 = this.sendAutomaticallyWhen) == null ? void 0 : _a16.call(this, { messages: this.state.messages }))) {
11159
10557
  this.makeRequest({
11160
10558
  trigger: "submit-message",
11161
10559
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -11166,10 +10564,10 @@ var AbstractChat = class {
11166
10564
  * Abort the current request immediately, keep the generated tokens if any.
11167
10565
  */
11168
10566
  this.stop = async () => {
11169
- var _a17;
10567
+ var _a16;
11170
10568
  if (this.status !== "streaming" && this.status !== "submitted")
11171
10569
  return;
11172
- if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
10570
+ if ((_a16 = this.activeResponse) == null ? void 0 : _a16.abortController) {
11173
10571
  this.activeResponse.abortController.abort();
11174
10572
  }
11175
10573
  };
@@ -11224,7 +10622,7 @@ var AbstractChat = class {
11224
10622
  body,
11225
10623
  messageId
11226
10624
  }) {
11227
- var _a17, _b, _c;
10625
+ var _a16, _b, _c;
11228
10626
  this.setStatus({ status: "submitted", error: void 0 });
11229
10627
  const lastMessage = this.lastMessage;
11230
10628
  let isAbort = false;
@@ -11273,9 +10671,9 @@ var AbstractChat = class {
11273
10671
  () => job({
11274
10672
  state: activeResponse.state,
11275
10673
  write: () => {
11276
- var _a18;
10674
+ var _a17;
11277
10675
  this.setStatus({ status: "streaming" });
11278
- const replaceLastMessage = activeResponse.state.message.id === ((_a18 = this.lastMessage) == null ? void 0 : _a18.id);
10676
+ const replaceLastMessage = activeResponse.state.message.id === ((_a17 = this.lastMessage) == null ? void 0 : _a17.id);
11279
10677
  if (replaceLastMessage) {
11280
10678
  this.state.replaceMessage(
11281
10679
  this.state.messages.length - 1,
@@ -11321,7 +10719,7 @@ var AbstractChat = class {
11321
10719
  this.setStatus({ status: "error", error: err });
11322
10720
  } finally {
11323
10721
  try {
11324
- (_a17 = this.onFinish) == null ? void 0 : _a17.call(this, {
10722
+ (_a16 = this.onFinish) == null ? void 0 : _a16.call(this, {
11325
10723
  message: this.activeResponse.state.message,
11326
10724
  messages: this.state.messages,
11327
10725
  isAbort,
@@ -11424,7 +10822,7 @@ var TextStreamChatTransport = class extends HttpChatTransport {
11424
10822
  }
11425
10823
  };
11426
10824
  export {
11427
- AISDKError18 as AISDKError,
10825
+ AISDKError17 as AISDKError,
11428
10826
  APICallError,
11429
10827
  AbstractChat,
11430
10828
  DefaultChatTransport,
@@ -11443,7 +10841,6 @@ export {
11443
10841
  JsonToSseTransformStream,
11444
10842
  LoadAPIKeyError,
11445
10843
  LoadSettingError,
11446
- MCPClientError,
11447
10844
  MessageConversionError,
11448
10845
  NoContentGeneratedError,
11449
10846
  NoImageGeneratedError,
@@ -11488,10 +10885,9 @@ export {
11488
10885
  createUIMessageStreamResponse,
11489
10886
  customProvider,
11490
10887
  defaultSettingsMiddleware,
11491
- dynamicTool2 as dynamicTool,
10888
+ dynamicTool,
11492
10889
  embed,
11493
10890
  embedMany,
11494
- createMCPClient as experimental_createMCPClient,
11495
10891
  experimental_createProviderRegistry,
11496
10892
  experimental_customProvider,
11497
10893
  generateImage as experimental_generateImage,
@@ -11513,7 +10909,7 @@ export {
11513
10909
  isTextUIPart,
11514
10910
  isToolOrDynamicToolUIPart,
11515
10911
  isToolUIPart,
11516
- jsonSchema2 as jsonSchema,
10912
+ jsonSchema,
11517
10913
  lastAssistantMessageIsCompleteWithApprovalResponses,
11518
10914
  lastAssistantMessageIsCompleteWithToolCalls,
11519
10915
  modelMessageSchema,
@@ -11532,7 +10928,7 @@ export {
11532
10928
  streamObject,
11533
10929
  streamText,
11534
10930
  systemModelMessageSchema,
11535
- tool2 as tool,
10931
+ tool,
11536
10932
  toolModelMessageSchema,
11537
10933
  uiMessageChunkSchema,
11538
10934
  userModelMessageSchema,