ai 5.0.78 → 5.0.79

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  var __defProp = Object.defineProperty;
2
2
  var __export = (target, all) => {
3
- for (var name17 in all)
4
- __defProp(target, name17, { get: all[name17], enumerable: true });
3
+ for (var name16 in all)
4
+ __defProp(target, name16, { get: all[name16], enumerable: true });
5
5
  };
6
6
 
7
7
  // src/index.ts
@@ -9,11 +9,11 @@ import { gateway as gateway2, createGateway } from "@ai-sdk/gateway";
9
9
  import {
10
10
  asSchema as asSchema5,
11
11
  createIdGenerator as createIdGenerator5,
12
- dynamicTool as dynamicTool2,
12
+ dynamicTool,
13
13
  generateId as generateId2,
14
- jsonSchema as jsonSchema2,
14
+ jsonSchema,
15
15
  parseJsonEventStream as parseJsonEventStream3,
16
- tool as tool2,
16
+ tool,
17
17
  zodSchema as zodSchema3
18
18
  } from "@ai-sdk/provider-utils";
19
19
 
@@ -98,7 +98,7 @@ import { gateway } from "@ai-sdk/gateway";
98
98
 
99
99
  // src/error/index.ts
100
100
  import {
101
- AISDKError as AISDKError18,
101
+ AISDKError as AISDKError17,
102
102
  APICallError,
103
103
  EmptyResponseBodyError,
104
104
  InvalidPromptError,
@@ -184,24 +184,21 @@ var InvalidToolInputError = class extends AISDKError4 {
184
184
  };
185
185
  _a4 = symbol4;
186
186
 
187
- // src/error/mcp-client-error.ts
187
+ // src/error/no-image-generated-error.ts
188
188
  import { AISDKError as AISDKError5 } from "@ai-sdk/provider";
189
- var name5 = "AI_MCPClientError";
189
+ var name5 = "AI_NoImageGeneratedError";
190
190
  var marker5 = `vercel.ai.error.${name5}`;
191
191
  var symbol5 = Symbol.for(marker5);
192
192
  var _a5;
193
- var MCPClientError = class extends AISDKError5 {
193
+ var NoImageGeneratedError = class extends AISDKError5 {
194
194
  constructor({
195
- name: name17 = "MCPClientError",
196
- message,
195
+ message = "No image generated.",
197
196
  cause,
198
- data,
199
- code
197
+ responses
200
198
  }) {
201
- super({ name: name17, message, cause });
199
+ super({ name: name5, message, cause });
202
200
  this[_a5] = true;
203
- this.data = data;
204
- this.code = code;
201
+ this.responses = responses;
205
202
  }
206
203
  static isInstance(error) {
207
204
  return AISDKError5.hasMarker(error, marker5);
@@ -209,35 +206,13 @@ var MCPClientError = class extends AISDKError5 {
209
206
  };
210
207
  _a5 = symbol5;
211
208
 
212
- // src/error/no-image-generated-error.ts
209
+ // src/error/no-object-generated-error.ts
213
210
  import { AISDKError as AISDKError6 } from "@ai-sdk/provider";
214
- var name6 = "AI_NoImageGeneratedError";
211
+ var name6 = "AI_NoObjectGeneratedError";
215
212
  var marker6 = `vercel.ai.error.${name6}`;
216
213
  var symbol6 = Symbol.for(marker6);
217
214
  var _a6;
218
- var NoImageGeneratedError = class extends AISDKError6 {
219
- constructor({
220
- message = "No image generated.",
221
- cause,
222
- responses
223
- }) {
224
- super({ name: name6, message, cause });
225
- this[_a6] = true;
226
- this.responses = responses;
227
- }
228
- static isInstance(error) {
229
- return AISDKError6.hasMarker(error, marker6);
230
- }
231
- };
232
- _a6 = symbol6;
233
-
234
- // src/error/no-object-generated-error.ts
235
- import { AISDKError as AISDKError7 } from "@ai-sdk/provider";
236
- var name7 = "AI_NoObjectGeneratedError";
237
- var marker7 = `vercel.ai.error.${name7}`;
238
- var symbol7 = Symbol.for(marker7);
239
- var _a7;
240
- var NoObjectGeneratedError = class extends AISDKError7 {
215
+ var NoObjectGeneratedError = class extends AISDKError6 {
241
216
  constructor({
242
217
  message = "No object generated.",
243
218
  cause,
@@ -246,43 +221,43 @@ var NoObjectGeneratedError = class extends AISDKError7 {
246
221
  usage,
247
222
  finishReason
248
223
  }) {
249
- super({ name: name7, message, cause });
250
- this[_a7] = true;
224
+ super({ name: name6, message, cause });
225
+ this[_a6] = true;
251
226
  this.text = text2;
252
227
  this.response = response;
253
228
  this.usage = usage;
254
229
  this.finishReason = finishReason;
255
230
  }
256
231
  static isInstance(error) {
257
- return AISDKError7.hasMarker(error, marker7);
232
+ return AISDKError6.hasMarker(error, marker6);
258
233
  }
259
234
  };
260
- _a7 = symbol7;
235
+ _a6 = symbol6;
261
236
 
262
237
  // src/error/no-output-generated-error.ts
263
- import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
264
- var name8 = "AI_NoOutputGeneratedError";
265
- var marker8 = `vercel.ai.error.${name8}`;
266
- var symbol8 = Symbol.for(marker8);
267
- var _a8;
268
- var NoOutputGeneratedError = class extends AISDKError8 {
238
+ import { AISDKError as AISDKError7 } from "@ai-sdk/provider";
239
+ var name7 = "AI_NoOutputGeneratedError";
240
+ var marker7 = `vercel.ai.error.${name7}`;
241
+ var symbol7 = Symbol.for(marker7);
242
+ var _a7;
243
+ var NoOutputGeneratedError = class extends AISDKError7 {
269
244
  // used in isInstance
270
245
  constructor({
271
246
  message = "No output generated.",
272
247
  cause
273
248
  } = {}) {
274
- super({ name: name8, message, cause });
275
- this[_a8] = true;
249
+ super({ name: name7, message, cause });
250
+ this[_a7] = true;
276
251
  }
277
252
  static isInstance(error) {
278
- return AISDKError8.hasMarker(error, marker8);
253
+ return AISDKError7.hasMarker(error, marker7);
279
254
  }
280
255
  };
281
- _a8 = symbol8;
256
+ _a7 = symbol7;
282
257
 
283
258
  // src/error/no-speech-generated-error.ts
284
- import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
285
- var NoSpeechGeneratedError = class extends AISDKError9 {
259
+ import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
260
+ var NoSpeechGeneratedError = class extends AISDKError8 {
286
261
  constructor(options) {
287
262
  super({
288
263
  name: "AI_NoSpeechGeneratedError",
@@ -293,53 +268,53 @@ var NoSpeechGeneratedError = class extends AISDKError9 {
293
268
  };
294
269
 
295
270
  // src/error/no-such-tool-error.ts
296
- import { AISDKError as AISDKError10 } from "@ai-sdk/provider";
297
- var name9 = "AI_NoSuchToolError";
298
- var marker9 = `vercel.ai.error.${name9}`;
299
- var symbol9 = Symbol.for(marker9);
300
- var _a9;
301
- var NoSuchToolError = class extends AISDKError10 {
271
+ import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
272
+ var name8 = "AI_NoSuchToolError";
273
+ var marker8 = `vercel.ai.error.${name8}`;
274
+ var symbol8 = Symbol.for(marker8);
275
+ var _a8;
276
+ var NoSuchToolError = class extends AISDKError9 {
302
277
  constructor({
303
278
  toolName,
304
279
  availableTools = void 0,
305
280
  message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
306
281
  }) {
307
- super({ name: name9, message });
308
- this[_a9] = true;
282
+ super({ name: name8, message });
283
+ this[_a8] = true;
309
284
  this.toolName = toolName;
310
285
  this.availableTools = availableTools;
311
286
  }
312
287
  static isInstance(error) {
313
- return AISDKError10.hasMarker(error, marker9);
288
+ return AISDKError9.hasMarker(error, marker8);
314
289
  }
315
290
  };
316
- _a9 = symbol9;
291
+ _a8 = symbol8;
317
292
 
318
293
  // src/error/tool-call-repair-error.ts
319
- import { AISDKError as AISDKError11, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
320
- var name10 = "AI_ToolCallRepairError";
321
- var marker10 = `vercel.ai.error.${name10}`;
322
- var symbol10 = Symbol.for(marker10);
323
- var _a10;
324
- var ToolCallRepairError = class extends AISDKError11 {
294
+ import { AISDKError as AISDKError10, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
295
+ var name9 = "AI_ToolCallRepairError";
296
+ var marker9 = `vercel.ai.error.${name9}`;
297
+ var symbol9 = Symbol.for(marker9);
298
+ var _a9;
299
+ var ToolCallRepairError = class extends AISDKError10 {
325
300
  constructor({
326
301
  cause,
327
302
  originalError,
328
303
  message = `Error repairing tool call: ${getErrorMessage2(cause)}`
329
304
  }) {
330
- super({ name: name10, message, cause });
331
- this[_a10] = true;
305
+ super({ name: name9, message, cause });
306
+ this[_a9] = true;
332
307
  this.originalError = originalError;
333
308
  }
334
309
  static isInstance(error) {
335
- return AISDKError11.hasMarker(error, marker10);
310
+ return AISDKError10.hasMarker(error, marker9);
336
311
  }
337
312
  };
338
- _a10 = symbol10;
313
+ _a9 = symbol9;
339
314
 
340
315
  // src/error/unsupported-model-version-error.ts
341
- import { AISDKError as AISDKError12 } from "@ai-sdk/provider";
342
- var UnsupportedModelVersionError = class extends AISDKError12 {
316
+ import { AISDKError as AISDKError11 } from "@ai-sdk/provider";
317
+ var UnsupportedModelVersionError = class extends AISDKError11 {
343
318
  constructor(options) {
344
319
  super({
345
320
  name: "AI_UnsupportedModelVersionError",
@@ -352,76 +327,76 @@ var UnsupportedModelVersionError = class extends AISDKError12 {
352
327
  };
353
328
 
354
329
  // src/prompt/invalid-data-content-error.ts
355
- import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
356
- var name11 = "AI_InvalidDataContentError";
357
- var marker11 = `vercel.ai.error.${name11}`;
358
- var symbol11 = Symbol.for(marker11);
359
- var _a11;
360
- var InvalidDataContentError = class extends AISDKError13 {
330
+ import { AISDKError as AISDKError12 } from "@ai-sdk/provider";
331
+ var name10 = "AI_InvalidDataContentError";
332
+ var marker10 = `vercel.ai.error.${name10}`;
333
+ var symbol10 = Symbol.for(marker10);
334
+ var _a10;
335
+ var InvalidDataContentError = class extends AISDKError12 {
361
336
  constructor({
362
337
  content,
363
338
  cause,
364
339
  message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
365
340
  }) {
366
- super({ name: name11, message, cause });
367
- this[_a11] = true;
341
+ super({ name: name10, message, cause });
342
+ this[_a10] = true;
368
343
  this.content = content;
369
344
  }
370
345
  static isInstance(error) {
371
- return AISDKError13.hasMarker(error, marker11);
346
+ return AISDKError12.hasMarker(error, marker10);
372
347
  }
373
348
  };
374
- _a11 = symbol11;
349
+ _a10 = symbol10;
375
350
 
376
351
  // src/prompt/invalid-message-role-error.ts
377
- import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
378
- var name12 = "AI_InvalidMessageRoleError";
379
- var marker12 = `vercel.ai.error.${name12}`;
380
- var symbol12 = Symbol.for(marker12);
381
- var _a12;
382
- var InvalidMessageRoleError = class extends AISDKError14 {
352
+ import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
353
+ var name11 = "AI_InvalidMessageRoleError";
354
+ var marker11 = `vercel.ai.error.${name11}`;
355
+ var symbol11 = Symbol.for(marker11);
356
+ var _a11;
357
+ var InvalidMessageRoleError = class extends AISDKError13 {
383
358
  constructor({
384
359
  role,
385
360
  message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
386
361
  }) {
387
- super({ name: name12, message });
388
- this[_a12] = true;
362
+ super({ name: name11, message });
363
+ this[_a11] = true;
389
364
  this.role = role;
390
365
  }
391
366
  static isInstance(error) {
392
- return AISDKError14.hasMarker(error, marker12);
367
+ return AISDKError13.hasMarker(error, marker11);
393
368
  }
394
369
  };
395
- _a12 = symbol12;
370
+ _a11 = symbol11;
396
371
 
397
372
  // src/prompt/message-conversion-error.ts
398
- import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
399
- var name13 = "AI_MessageConversionError";
400
- var marker13 = `vercel.ai.error.${name13}`;
401
- var symbol13 = Symbol.for(marker13);
402
- var _a13;
403
- var MessageConversionError = class extends AISDKError15 {
373
+ import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
374
+ var name12 = "AI_MessageConversionError";
375
+ var marker12 = `vercel.ai.error.${name12}`;
376
+ var symbol12 = Symbol.for(marker12);
377
+ var _a12;
378
+ var MessageConversionError = class extends AISDKError14 {
404
379
  constructor({
405
380
  originalMessage,
406
381
  message
407
382
  }) {
408
- super({ name: name13, message });
409
- this[_a13] = true;
383
+ super({ name: name12, message });
384
+ this[_a12] = true;
410
385
  this.originalMessage = originalMessage;
411
386
  }
412
387
  static isInstance(error) {
413
- return AISDKError15.hasMarker(error, marker13);
388
+ return AISDKError14.hasMarker(error, marker12);
414
389
  }
415
390
  };
416
- _a13 = symbol13;
391
+ _a12 = symbol12;
417
392
 
418
393
  // src/util/download/download-error.ts
419
- import { AISDKError as AISDKError16 } from "@ai-sdk/provider";
420
- var name14 = "AI_DownloadError";
421
- var marker14 = `vercel.ai.error.${name14}`;
422
- var symbol14 = Symbol.for(marker14);
423
- var _a14;
424
- var DownloadError = class extends AISDKError16 {
394
+ import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
395
+ var name13 = "AI_DownloadError";
396
+ var marker13 = `vercel.ai.error.${name13}`;
397
+ var symbol13 = Symbol.for(marker13);
398
+ var _a13;
399
+ var DownloadError = class extends AISDKError15 {
425
400
  constructor({
426
401
  url,
427
402
  statusCode,
@@ -429,41 +404,41 @@ var DownloadError = class extends AISDKError16 {
429
404
  cause,
430
405
  message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
431
406
  }) {
432
- super({ name: name14, message, cause });
433
- this[_a14] = true;
407
+ super({ name: name13, message, cause });
408
+ this[_a13] = true;
434
409
  this.url = url;
435
410
  this.statusCode = statusCode;
436
411
  this.statusText = statusText;
437
412
  }
438
413
  static isInstance(error) {
439
- return AISDKError16.hasMarker(error, marker14);
414
+ return AISDKError15.hasMarker(error, marker13);
440
415
  }
441
416
  };
442
- _a14 = symbol14;
417
+ _a13 = symbol13;
443
418
 
444
419
  // src/util/retry-error.ts
445
- import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
446
- var name15 = "AI_RetryError";
447
- var marker15 = `vercel.ai.error.${name15}`;
448
- var symbol15 = Symbol.for(marker15);
449
- var _a15;
450
- var RetryError = class extends AISDKError17 {
420
+ import { AISDKError as AISDKError16 } from "@ai-sdk/provider";
421
+ var name14 = "AI_RetryError";
422
+ var marker14 = `vercel.ai.error.${name14}`;
423
+ var symbol14 = Symbol.for(marker14);
424
+ var _a14;
425
+ var RetryError = class extends AISDKError16 {
451
426
  constructor({
452
427
  message,
453
428
  reason,
454
429
  errors
455
430
  }) {
456
- super({ name: name15, message });
457
- this[_a15] = true;
431
+ super({ name: name14, message });
432
+ this[_a14] = true;
458
433
  this.reason = reason;
459
434
  this.errors = errors;
460
435
  this.lastError = errors[errors.length - 1];
461
436
  }
462
437
  static isInstance(error) {
463
- return AISDKError17.hasMarker(error, marker15);
438
+ return AISDKError16.hasMarker(error, marker14);
464
439
  }
465
440
  };
466
- _a15 = symbol15;
441
+ _a14 = symbol14;
467
442
 
468
443
  // src/model/resolve-model.ts
469
444
  function resolveLanguageModel(model) {
@@ -495,8 +470,8 @@ function resolveEmbeddingModel(model) {
495
470
  );
496
471
  }
497
472
  function getGlobalProvider() {
498
- var _a17;
499
- return (_a17 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a17 : gateway;
473
+ var _a16;
474
+ return (_a16 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a16 : gateway;
500
475
  }
501
476
 
502
477
  // src/prompt/convert-to-language-model-prompt.ts
@@ -696,11 +671,11 @@ import {
696
671
  } from "@ai-sdk/provider-utils";
697
672
 
698
673
  // src/version.ts
699
- var VERSION = true ? "5.0.78" : "0.0.0-test";
674
+ var VERSION = true ? "5.0.79" : "0.0.0-test";
700
675
 
701
676
  // src/util/download/download.ts
702
677
  var download = async ({ url }) => {
703
- var _a17;
678
+ var _a16;
704
679
  const urlText = url.toString();
705
680
  try {
706
681
  const response = await fetch(urlText, {
@@ -719,7 +694,7 @@ var download = async ({ url }) => {
719
694
  }
720
695
  return {
721
696
  data: new Uint8Array(await response.arrayBuffer()),
722
- mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
697
+ mediaType: (_a16 = response.headers.get("content-type")) != null ? _a16 : void 0
723
698
  };
724
699
  } catch (error) {
725
700
  if (DownloadError.isInstance(error)) {
@@ -737,7 +712,7 @@ var createDefaultDownloadFunction = (download2 = download) => (requestedDownload
737
712
  );
738
713
 
739
714
  // src/prompt/data-content.ts
740
- import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
715
+ import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
741
716
  import {
742
717
  convertBase64ToUint8Array as convertBase64ToUint8Array2,
743
718
  convertUint8ArrayToBase64
@@ -768,8 +743,8 @@ var dataContentSchema = z.union([
768
743
  z.custom(
769
744
  // Buffer might not be available in some environments such as CloudFlare:
770
745
  (value) => {
771
- var _a17, _b;
772
- return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
746
+ var _a16, _b;
747
+ return (_b = (_a16 = globalThis.Buffer) == null ? void 0 : _a16.isBuffer(value)) != null ? _b : false;
773
748
  },
774
749
  { message: "Must be a Buffer" }
775
750
  )
@@ -792,7 +767,7 @@ function convertToLanguageModelV2DataContent(content) {
792
767
  content.toString()
793
768
  );
794
769
  if (dataUrlMediaType == null || base64Content == null) {
795
- throw new AISDKError19({
770
+ throw new AISDKError18({
796
771
  name: "InvalidDataContentError",
797
772
  message: `Invalid data URL format in content ${content.toString()}`
798
773
  });
@@ -967,8 +942,8 @@ async function downloadAssets(messages, download2, supportedUrls) {
967
942
  ).flat().filter(
968
943
  (part) => part.type === "image" || part.type === "file"
969
944
  ).map((part) => {
970
- var _a17;
971
- const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
945
+ var _a16;
946
+ const mediaType = (_a16 = part.mediaType) != null ? _a16 : part.type === "image" ? "image/*" : void 0;
972
947
  let data = part.type === "image" ? part.image : part.data;
973
948
  if (typeof data === "string") {
974
949
  try {
@@ -998,7 +973,7 @@ async function downloadAssets(messages, download2, supportedUrls) {
998
973
  );
999
974
  }
1000
975
  function convertPartToLanguageModelPart(part, downloadedAssets) {
1001
- var _a17;
976
+ var _a16;
1002
977
  if (part.type === "text") {
1003
978
  return {
1004
979
  type: "text",
@@ -1031,7 +1006,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1031
1006
  switch (type) {
1032
1007
  case "image": {
1033
1008
  if (data instanceof Uint8Array || typeof data === "string") {
1034
- mediaType = (_a17 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a17 : mediaType;
1009
+ mediaType = (_a16 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a16 : mediaType;
1035
1010
  }
1036
1011
  return {
1037
1012
  type: "file",
@@ -1171,28 +1146,28 @@ function prepareToolsAndToolChoice({
1171
1146
  };
1172
1147
  }
1173
1148
  const filteredTools = activeTools != null ? Object.entries(tools).filter(
1174
- ([name17]) => activeTools.includes(name17)
1149
+ ([name16]) => activeTools.includes(name16)
1175
1150
  ) : Object.entries(tools);
1176
1151
  return {
1177
- tools: filteredTools.map(([name17, tool3]) => {
1178
- const toolType = tool3.type;
1152
+ tools: filteredTools.map(([name16, tool2]) => {
1153
+ const toolType = tool2.type;
1179
1154
  switch (toolType) {
1180
1155
  case void 0:
1181
1156
  case "dynamic":
1182
1157
  case "function":
1183
1158
  return {
1184
1159
  type: "function",
1185
- name: name17,
1186
- description: tool3.description,
1187
- inputSchema: asSchema(tool3.inputSchema).jsonSchema,
1188
- providerOptions: tool3.providerOptions
1160
+ name: name16,
1161
+ description: tool2.description,
1162
+ inputSchema: asSchema(tool2.inputSchema).jsonSchema,
1163
+ providerOptions: tool2.providerOptions
1189
1164
  };
1190
1165
  case "provider-defined":
1191
1166
  return {
1192
1167
  type: "provider-defined",
1193
- name: name17,
1194
- id: tool3.id,
1195
- args: tool3.args
1168
+ name: name16,
1169
+ id: tool2.id,
1170
+ args: tool2.args
1196
1171
  };
1197
1172
  default: {
1198
1173
  const exhaustiveCheck = toolType;
@@ -1419,10 +1394,10 @@ import {
1419
1394
  GatewayAuthenticationError,
1420
1395
  GatewayModelNotFoundError
1421
1396
  } from "@ai-sdk/gateway";
1422
- import { AISDKError as AISDKError20 } from "@ai-sdk/provider";
1397
+ import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
1423
1398
  function wrapGatewayError(error) {
1424
1399
  if (GatewayAuthenticationError.isInstance(error) || GatewayModelNotFoundError.isInstance(error)) {
1425
- return new AISDKError20({
1400
+ return new AISDKError19({
1426
1401
  name: "GatewayError",
1427
1402
  message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
1428
1403
  cause: error
@@ -1453,7 +1428,7 @@ function getBaseTelemetryAttributes({
1453
1428
  telemetry,
1454
1429
  headers
1455
1430
  }) {
1456
- var _a17;
1431
+ var _a16;
1457
1432
  return {
1458
1433
  "ai.model.provider": model.provider,
1459
1434
  "ai.model.id": model.modelId,
@@ -1463,7 +1438,7 @@ function getBaseTelemetryAttributes({
1463
1438
  return attributes;
1464
1439
  }, {}),
1465
1440
  // add metadata as attributes:
1466
- ...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(
1441
+ ...Object.entries((_a16 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a16 : {}).reduce(
1467
1442
  (attributes, [key, value]) => {
1468
1443
  attributes[`ai.telemetry.metadata.${key}`] = value;
1469
1444
  return attributes;
@@ -1488,7 +1463,7 @@ var noopTracer = {
1488
1463
  startSpan() {
1489
1464
  return noopSpan;
1490
1465
  },
1491
- startActiveSpan(name17, arg1, arg2, arg3) {
1466
+ startActiveSpan(name16, arg1, arg2, arg3) {
1492
1467
  if (typeof arg1 === "function") {
1493
1468
  return arg1(noopSpan);
1494
1469
  }
@@ -1558,13 +1533,13 @@ function getTracer({
1558
1533
  // src/telemetry/record-span.ts
1559
1534
  import { SpanStatusCode } from "@opentelemetry/api";
1560
1535
  function recordSpan({
1561
- name: name17,
1536
+ name: name16,
1562
1537
  tracer,
1563
1538
  attributes,
1564
1539
  fn,
1565
1540
  endWhenDone = true
1566
1541
  }) {
1567
- return tracer.startActiveSpan(name17, { attributes }, async (span) => {
1542
+ return tracer.startActiveSpan(name16, { attributes }, async (span) => {
1568
1543
  try {
1569
1544
  const result = await fn(span);
1570
1545
  if (endWhenDone) {
@@ -1910,14 +1885,14 @@ async function doParseToolCall({
1910
1885
  tools
1911
1886
  }) {
1912
1887
  const toolName = toolCall.toolName;
1913
- const tool3 = tools[toolName];
1914
- if (tool3 == null) {
1888
+ const tool2 = tools[toolName];
1889
+ if (tool2 == null) {
1915
1890
  throw new NoSuchToolError({
1916
1891
  toolName: toolCall.toolName,
1917
1892
  availableTools: Object.keys(tools)
1918
1893
  });
1919
1894
  }
1920
- const schema = asSchema2(tool3.inputSchema);
1895
+ const schema = asSchema2(tool2.inputSchema);
1921
1896
  const parseResult = toolCall.input.trim() === "" ? await safeValidateTypes2({ value: {}, schema }) : await safeParseJSON({ text: toolCall.input, schema });
1922
1897
  if (parseResult.success === false) {
1923
1898
  throw new InvalidToolInputError({
@@ -1926,7 +1901,7 @@ async function doParseToolCall({
1926
1901
  cause: parseResult.error
1927
1902
  });
1928
1903
  }
1929
- return tool3.type === "dynamic" ? {
1904
+ return tool2.type === "dynamic" ? {
1930
1905
  type: "tool-call",
1931
1906
  toolCallId: toolCall.toolCallId,
1932
1907
  toolName: toolCall.toolName,
@@ -2012,8 +1987,8 @@ function stepCountIs(stepCount) {
2012
1987
  }
2013
1988
  function hasToolCall(toolName) {
2014
1989
  return ({ steps }) => {
2015
- var _a17, _b, _c;
2016
- return (_c = (_b = (_a17 = steps[steps.length - 1]) == null ? void 0 : _a17.toolCalls) == null ? void 0 : _b.some(
1990
+ var _a16, _b, _c;
1991
+ return (_c = (_b = (_a16 = steps[steps.length - 1]) == null ? void 0 : _a16.toolCalls) == null ? void 0 : _b.some(
2017
1992
  (toolCall) => toolCall.toolName === toolName
2018
1993
  )) != null ? _c : false;
2019
1994
  };
@@ -2031,7 +2006,7 @@ import {
2031
2006
  } from "@ai-sdk/provider";
2032
2007
  function createToolModelOutput({
2033
2008
  output,
2034
- tool: tool3,
2009
+ tool: tool2,
2035
2010
  errorMode
2036
2011
  }) {
2037
2012
  if (errorMode === "text") {
@@ -2039,8 +2014,8 @@ function createToolModelOutput({
2039
2014
  } else if (errorMode === "json") {
2040
2015
  return { type: "error-json", value: toJSONValue(output) };
2041
2016
  }
2042
- if (tool3 == null ? void 0 : tool3.toModelOutput) {
2043
- return tool3.toModelOutput(output);
2017
+ if (tool2 == null ? void 0 : tool2.toModelOutput) {
2018
+ return tool2.toModelOutput(output);
2044
2019
  }
2045
2020
  return typeof output === "string" ? { type: "text", value: output } : { type: "json", value: toJSONValue(output) };
2046
2021
  }
@@ -2216,7 +2191,7 @@ async function generateText({
2216
2191
  }),
2217
2192
  tracer,
2218
2193
  fn: async (span) => {
2219
- var _a17, _b, _c, _d, _e, _f, _g;
2194
+ var _a16, _b, _c, _d, _e, _f, _g;
2220
2195
  const callSettings2 = prepareCallSettings(settings);
2221
2196
  let currentModelResponse;
2222
2197
  let clientToolCalls = [];
@@ -2235,7 +2210,7 @@ async function generateText({
2235
2210
  messages: stepInputMessages
2236
2211
  }));
2237
2212
  const stepModel = resolveLanguageModel(
2238
- (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
2213
+ (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
2239
2214
  );
2240
2215
  const promptMessages = await convertToLanguageModelPrompt({
2241
2216
  prompt: {
@@ -2252,7 +2227,7 @@ async function generateText({
2252
2227
  });
2253
2228
  currentModelResponse = await retry(
2254
2229
  () => {
2255
- var _a18;
2230
+ var _a17;
2256
2231
  return recordSpan({
2257
2232
  name: "ai.generateText.doGenerate",
2258
2233
  attributes: selectTelemetryAttributes({
@@ -2272,7 +2247,7 @@ async function generateText({
2272
2247
  },
2273
2248
  "ai.prompt.tools": {
2274
2249
  // convert the language model level tools:
2275
- input: () => stepTools == null ? void 0 : stepTools.map((tool3) => JSON.stringify(tool3))
2250
+ input: () => stepTools == null ? void 0 : stepTools.map((tool2) => JSON.stringify(tool2))
2276
2251
  },
2277
2252
  "ai.prompt.toolChoice": {
2278
2253
  input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
@@ -2284,14 +2259,14 @@ async function generateText({
2284
2259
  "gen_ai.request.max_tokens": settings.maxOutputTokens,
2285
2260
  "gen_ai.request.presence_penalty": settings.presencePenalty,
2286
2261
  "gen_ai.request.stop_sequences": settings.stopSequences,
2287
- "gen_ai.request.temperature": (_a18 = settings.temperature) != null ? _a18 : void 0,
2262
+ "gen_ai.request.temperature": (_a17 = settings.temperature) != null ? _a17 : void 0,
2288
2263
  "gen_ai.request.top_k": settings.topK,
2289
2264
  "gen_ai.request.top_p": settings.topP
2290
2265
  }
2291
2266
  }),
2292
2267
  tracer,
2293
2268
  fn: async (span2) => {
2294
- var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h;
2269
+ var _a18, _b2, _c2, _d2, _e2, _f2, _g2, _h;
2295
2270
  const result = await stepModel.doGenerate({
2296
2271
  ...callSettings2,
2297
2272
  tools: stepTools,
@@ -2303,7 +2278,7 @@ async function generateText({
2303
2278
  headers: headersWithUserAgent
2304
2279
  });
2305
2280
  const responseData = {
2306
- id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
2281
+ id: (_b2 = (_a18 = result.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId3(),
2307
2282
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
2308
2283
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
2309
2284
  headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
@@ -2363,9 +2338,9 @@ async function generateText({
2363
2338
  if (toolCall.invalid) {
2364
2339
  continue;
2365
2340
  }
2366
- const tool3 = tools[toolCall.toolName];
2367
- if ((tool3 == null ? void 0 : tool3.onInputAvailable) != null) {
2368
- await tool3.onInputAvailable({
2341
+ const tool2 = tools[toolCall.toolName];
2342
+ if ((tool2 == null ? void 0 : tool2.onInputAvailable) != null) {
2343
+ await tool2.onInputAvailable({
2369
2344
  input: toolCall.input,
2370
2345
  toolCallId: toolCall.toolCallId,
2371
2346
  messages: stepInputMessages,
@@ -2495,8 +2470,8 @@ async function executeTools({
2495
2470
  }) {
2496
2471
  const toolOutputs = await Promise.all(
2497
2472
  toolCalls.map(async ({ toolCallId, toolName, input }) => {
2498
- const tool3 = tools[toolName];
2499
- if ((tool3 == null ? void 0 : tool3.execute) == null) {
2473
+ const tool2 = tools[toolName];
2474
+ if ((tool2 == null ? void 0 : tool2.execute) == null) {
2500
2475
  return void 0;
2501
2476
  }
2502
2477
  return recordSpan({
@@ -2519,7 +2494,7 @@ async function executeTools({
2519
2494
  fn: async (span) => {
2520
2495
  try {
2521
2496
  const stream = executeTool({
2522
- execute: tool3.execute.bind(tool3),
2497
+ execute: tool2.execute.bind(tool2),
2523
2498
  input,
2524
2499
  options: {
2525
2500
  toolCallId,
@@ -2553,7 +2528,7 @@ async function executeTools({
2553
2528
  toolName,
2554
2529
  input,
2555
2530
  output,
2556
- dynamic: tool3.type === "dynamic"
2531
+ dynamic: tool2.type === "dynamic"
2557
2532
  };
2558
2533
  } catch (error) {
2559
2534
  recordErrorOnSpan(span, error);
@@ -2563,7 +2538,7 @@ async function executeTools({
2563
2538
  toolName,
2564
2539
  input,
2565
2540
  error,
2566
- dynamic: tool3.type === "dynamic"
2541
+ dynamic: tool2.type === "dynamic"
2567
2542
  };
2568
2543
  }
2569
2544
  }
@@ -3456,7 +3431,7 @@ function processUIMessageStream({
3456
3431
  new TransformStream({
3457
3432
  async transform(chunk, controller) {
3458
3433
  await runUpdateMessageJob(async ({ state, write }) => {
3459
- var _a17, _b, _c, _d;
3434
+ var _a16, _b, _c, _d;
3460
3435
  function getToolInvocation(toolCallId) {
3461
3436
  const toolInvocations = state.message.parts.filter(isToolUIPart);
3462
3437
  const toolInvocation = toolInvocations.find(
@@ -3484,7 +3459,7 @@ function processUIMessageStream({
3484
3459
  return toolInvocation;
3485
3460
  }
3486
3461
  function updateToolPart(options) {
3487
- var _a18;
3462
+ var _a17;
3488
3463
  const part = state.message.parts.find(
3489
3464
  (part2) => isToolUIPart(part2) && part2.toolCallId === options.toolCallId
3490
3465
  );
@@ -3497,7 +3472,7 @@ function processUIMessageStream({
3497
3472
  anyPart.errorText = anyOptions.errorText;
3498
3473
  anyPart.rawInput = anyOptions.rawInput;
3499
3474
  anyPart.preliminary = anyOptions.preliminary;
3500
- anyPart.providerExecuted = (_a18 = anyOptions.providerExecuted) != null ? _a18 : part.providerExecuted;
3475
+ anyPart.providerExecuted = (_a17 = anyOptions.providerExecuted) != null ? _a17 : part.providerExecuted;
3501
3476
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
3502
3477
  part.callProviderMetadata = anyOptions.providerMetadata;
3503
3478
  }
@@ -3517,7 +3492,7 @@ function processUIMessageStream({
3517
3492
  }
3518
3493
  }
3519
3494
  function updateDynamicToolPart(options) {
3520
- var _a18;
3495
+ var _a17;
3521
3496
  const part = state.message.parts.find(
3522
3497
  (part2) => part2.type === "dynamic-tool" && part2.toolCallId === options.toolCallId
3523
3498
  );
@@ -3529,7 +3504,7 @@ function processUIMessageStream({
3529
3504
  anyPart.input = anyOptions.input;
3530
3505
  anyPart.output = anyOptions.output;
3531
3506
  anyPart.errorText = anyOptions.errorText;
3532
- anyPart.rawInput = (_a18 = anyOptions.rawInput) != null ? _a18 : anyPart.rawInput;
3507
+ anyPart.rawInput = (_a17 = anyOptions.rawInput) != null ? _a17 : anyPart.rawInput;
3533
3508
  anyPart.preliminary = anyOptions.preliminary;
3534
3509
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
3535
3510
  part.callProviderMetadata = anyOptions.providerMetadata;
@@ -3576,7 +3551,7 @@ function processUIMessageStream({
3576
3551
  case "text-delta": {
3577
3552
  const textPart = state.activeTextParts[chunk.id];
3578
3553
  textPart.text += chunk.delta;
3579
- textPart.providerMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textPart.providerMetadata;
3554
+ textPart.providerMetadata = (_a16 = chunk.providerMetadata) != null ? _a16 : textPart.providerMetadata;
3580
3555
  write();
3581
3556
  break;
3582
3557
  }
@@ -3987,11 +3962,11 @@ function createAsyncIterableStream(source) {
3987
3962
  const reader = this.getReader();
3988
3963
  let finished = false;
3989
3964
  async function cleanup(cancelStream) {
3990
- var _a17;
3965
+ var _a16;
3991
3966
  finished = true;
3992
3967
  try {
3993
3968
  if (cancelStream) {
3994
- await ((_a17 = reader.cancel) == null ? void 0 : _a17.call(reader));
3969
+ await ((_a16 = reader.cancel) == null ? void 0 : _a16.call(reader));
3995
3970
  }
3996
3971
  } finally {
3997
3972
  try {
@@ -4178,25 +4153,25 @@ var DelayedPromise = class {
4178
4153
  return this._promise;
4179
4154
  }
4180
4155
  resolve(value) {
4181
- var _a17;
4156
+ var _a16;
4182
4157
  this.status = { type: "resolved", value };
4183
4158
  if (this._promise) {
4184
- (_a17 = this._resolve) == null ? void 0 : _a17.call(this, value);
4159
+ (_a16 = this._resolve) == null ? void 0 : _a16.call(this, value);
4185
4160
  }
4186
4161
  }
4187
4162
  reject(error) {
4188
- var _a17;
4163
+ var _a16;
4189
4164
  this.status = { type: "rejected", error };
4190
4165
  if (this._promise) {
4191
- (_a17 = this._reject) == null ? void 0 : _a17.call(this, error);
4166
+ (_a16 = this._reject) == null ? void 0 : _a16.call(this, error);
4192
4167
  }
4193
4168
  }
4194
4169
  };
4195
4170
 
4196
4171
  // src/util/now.ts
4197
4172
  function now() {
4198
- var _a17, _b;
4199
- return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();
4173
+ var _a16, _b;
4174
+ return (_b = (_a16 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a16.now()) != null ? _b : Date.now();
4200
4175
  }
4201
4176
 
4202
4177
  // src/generate-text/run-tools-transformation.ts
@@ -4295,10 +4270,10 @@ function runToolsTransformation({
4295
4270
  });
4296
4271
  break;
4297
4272
  }
4298
- const tool3 = tools[toolCall.toolName];
4273
+ const tool2 = tools[toolCall.toolName];
4299
4274
  toolInputs.set(toolCall.toolCallId, toolCall.input);
4300
- if (tool3.onInputAvailable != null) {
4301
- await tool3.onInputAvailable({
4275
+ if (tool2.onInputAvailable != null) {
4276
+ await tool2.onInputAvailable({
4302
4277
  input: toolCall.input,
4303
4278
  toolCallId: toolCall.toolCallId,
4304
4279
  messages,
@@ -4306,7 +4281,7 @@ function runToolsTransformation({
4306
4281
  experimental_context
4307
4282
  });
4308
4283
  }
4309
- if (tool3.execute != null && toolCall.providerExecuted !== true) {
4284
+ if (tool2.execute != null && toolCall.providerExecuted !== true) {
4310
4285
  const toolExecutionId = generateId();
4311
4286
  outstandingToolResults.add(toolExecutionId);
4312
4287
  recordSpan({
@@ -4330,7 +4305,7 @@ function runToolsTransformation({
4330
4305
  let output;
4331
4306
  try {
4332
4307
  const stream = executeTool2({
4333
- execute: tool3.execute.bind(tool3),
4308
+ execute: tool2.execute.bind(tool2),
4334
4309
  input: toolCall.input,
4335
4310
  options: {
4336
4311
  toolCallId: toolCall.toolCallId,
@@ -4636,7 +4611,7 @@ var DefaultStreamTextResult = class {
4636
4611
  let activeReasoningContent = {};
4637
4612
  const eventProcessor = new TransformStream({
4638
4613
  async transform(chunk, controller) {
4639
- var _a17, _b, _c, _d;
4614
+ var _a16, _b, _c, _d;
4640
4615
  controller.enqueue(chunk);
4641
4616
  const { part } = chunk;
4642
4617
  if (part.type === "text-delta" || part.type === "reasoning-delta" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
@@ -4666,7 +4641,7 @@ var DefaultStreamTextResult = class {
4666
4641
  return;
4667
4642
  }
4668
4643
  activeText.text += part.text;
4669
- activeText.providerMetadata = (_a17 = part.providerMetadata) != null ? _a17 : activeText.providerMetadata;
4644
+ activeText.providerMetadata = (_a16 = part.providerMetadata) != null ? _a16 : activeText.providerMetadata;
4670
4645
  }
4671
4646
  if (part.type === "text-end") {
4672
4647
  const activeText = activeTextContent[part.id];
@@ -4822,8 +4797,8 @@ var DefaultStreamTextResult = class {
4822
4797
  "ai.response.text": { output: () => finalStep.text },
4823
4798
  "ai.response.toolCalls": {
4824
4799
  output: () => {
4825
- var _a17;
4826
- return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
4800
+ var _a16;
4801
+ return ((_a16 = finalStep.toolCalls) == null ? void 0 : _a16.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
4827
4802
  }
4828
4803
  },
4829
4804
  "ai.response.providerMetadata": JSON.stringify(
@@ -4927,7 +4902,7 @@ var DefaultStreamTextResult = class {
4927
4902
  responseMessages,
4928
4903
  usage
4929
4904
  }) {
4930
- var _a17, _b, _c, _d, _e;
4905
+ var _a16, _b, _c, _d, _e;
4931
4906
  const includeRawChunks2 = self.includeRawChunks;
4932
4907
  stepFinish = new DelayedPromise();
4933
4908
  const initialPrompt = await standardizePrompt({
@@ -4946,7 +4921,7 @@ var DefaultStreamTextResult = class {
4946
4921
  messages: stepInputMessages
4947
4922
  }));
4948
4923
  const stepModel = resolveLanguageModel(
4949
- (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
4924
+ (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
4950
4925
  );
4951
4926
  const promptMessages = await convertToLanguageModelPrompt({
4952
4927
  prompt: {
@@ -4985,7 +4960,7 @@ var DefaultStreamTextResult = class {
4985
4960
  },
4986
4961
  "ai.prompt.tools": {
4987
4962
  // convert the language model level tools:
4988
- input: () => stepTools == null ? void 0 : stepTools.map((tool3) => JSON.stringify(tool3))
4963
+ input: () => stepTools == null ? void 0 : stepTools.map((tool2) => JSON.stringify(tool2))
4989
4964
  },
4990
4965
  "ai.prompt.toolChoice": {
4991
4966
  input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
@@ -5058,7 +5033,7 @@ var DefaultStreamTextResult = class {
5058
5033
  streamWithToolResults.pipeThrough(
5059
5034
  new TransformStream({
5060
5035
  async transform(chunk, controller) {
5061
- var _a18, _b2, _c2, _d2;
5036
+ var _a17, _b2, _c2, _d2;
5062
5037
  if (chunk.type === "stream-start") {
5063
5038
  warnings = chunk.warnings;
5064
5039
  return;
@@ -5130,7 +5105,7 @@ var DefaultStreamTextResult = class {
5130
5105
  }
5131
5106
  case "response-metadata": {
5132
5107
  stepResponse = {
5133
- id: (_a18 = chunk.id) != null ? _a18 : stepResponse.id,
5108
+ id: (_a17 = chunk.id) != null ? _a17 : stepResponse.id,
5134
5109
  timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
5135
5110
  modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
5136
5111
  };
@@ -5158,9 +5133,9 @@ var DefaultStreamTextResult = class {
5158
5133
  }
5159
5134
  case "tool-input-start": {
5160
5135
  activeToolCallToolNames[chunk.id] = chunk.toolName;
5161
- const tool3 = tools == null ? void 0 : tools[chunk.toolName];
5162
- if ((tool3 == null ? void 0 : tool3.onInputStart) != null) {
5163
- await tool3.onInputStart({
5136
+ const tool2 = tools == null ? void 0 : tools[chunk.toolName];
5137
+ if ((tool2 == null ? void 0 : tool2.onInputStart) != null) {
5138
+ await tool2.onInputStart({
5164
5139
  toolCallId: chunk.id,
5165
5140
  messages: stepInputMessages,
5166
5141
  abortSignal,
@@ -5169,7 +5144,7 @@ var DefaultStreamTextResult = class {
5169
5144
  }
5170
5145
  controller.enqueue({
5171
5146
  ...chunk,
5172
- dynamic: (tool3 == null ? void 0 : tool3.type) === "dynamic"
5147
+ dynamic: (tool2 == null ? void 0 : tool2.type) === "dynamic"
5173
5148
  });
5174
5149
  break;
5175
5150
  }
@@ -5180,9 +5155,9 @@ var DefaultStreamTextResult = class {
5180
5155
  }
5181
5156
  case "tool-input-delta": {
5182
5157
  const toolName = activeToolCallToolNames[chunk.id];
5183
- const tool3 = tools == null ? void 0 : tools[toolName];
5184
- if ((tool3 == null ? void 0 : tool3.onInputDelta) != null) {
5185
- await tool3.onInputDelta({
5158
+ const tool2 = tools == null ? void 0 : tools[toolName];
5159
+ if ((tool2 == null ? void 0 : tool2.onInputDelta) != null) {
5160
+ await tool2.onInputDelta({
5186
5161
  inputTextDelta: chunk.delta,
5187
5162
  toolCallId: chunk.id,
5188
5163
  messages: stepInputMessages,
@@ -5432,14 +5407,14 @@ var DefaultStreamTextResult = class {
5432
5407
  );
5433
5408
  }
5434
5409
  async consumeStream(options) {
5435
- var _a17;
5410
+ var _a16;
5436
5411
  try {
5437
5412
  await consumeStream({
5438
5413
  stream: this.fullStream,
5439
5414
  onError: options == null ? void 0 : options.onError
5440
5415
  });
5441
5416
  } catch (error) {
5442
- (_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);
5417
+ (_a16 = options == null ? void 0 : options.onError) == null ? void 0 : _a16.call(options, error);
5443
5418
  }
5444
5419
  }
5445
5420
  get experimental_partialOutputStream() {
@@ -5475,9 +5450,9 @@ var DefaultStreamTextResult = class {
5475
5450
  }) : void 0;
5476
5451
  const toolNamesByCallId = {};
5477
5452
  const isDynamic = (toolCallId) => {
5478
- var _a17, _b;
5453
+ var _a16, _b;
5479
5454
  const toolName = toolNamesByCallId[toolCallId];
5480
- const dynamic = ((_b = (_a17 = this.tools) == null ? void 0 : _a17[toolName]) == null ? void 0 : _b.type) === "dynamic";
5455
+ const dynamic = ((_b = (_a16 = this.tools) == null ? void 0 : _a16[toolName]) == null ? void 0 : _b.type) === "dynamic";
5481
5456
  return dynamic ? true : void 0;
5482
5457
  };
5483
5458
  const baseStream = this.fullStream.pipeThrough(
@@ -5810,7 +5785,7 @@ function convertToModelMessages(messages, options) {
5810
5785
  modelMessages.push({
5811
5786
  role: "user",
5812
5787
  content: message.parts.map((part) => {
5813
- var _a17;
5788
+ var _a16;
5814
5789
  if (isTextUIPart(part)) {
5815
5790
  return {
5816
5791
  type: "text",
@@ -5828,7 +5803,7 @@ function convertToModelMessages(messages, options) {
5828
5803
  };
5829
5804
  }
5830
5805
  if (isDataUIPart(part)) {
5831
- return (_a17 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a17.call(
5806
+ return (_a16 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a16.call(
5832
5807
  options,
5833
5808
  part
5834
5809
  );
@@ -5840,7 +5815,7 @@ function convertToModelMessages(messages, options) {
5840
5815
  case "assistant": {
5841
5816
  if (message.parts != null) {
5842
5817
  let processBlock2 = function() {
5843
- var _a17, _b, _c;
5818
+ var _a16, _b, _c;
5844
5819
  if (block.length === 0) {
5845
5820
  return;
5846
5821
  }
@@ -5883,7 +5858,7 @@ function convertToModelMessages(messages, options) {
5883
5858
  type: "tool-call",
5884
5859
  toolCallId: part.toolCallId,
5885
5860
  toolName,
5886
- input: part.state === "output-error" ? (_a17 = part.input) != null ? _a17 : part.rawInput : part.input,
5861
+ input: part.state === "output-error" ? (_a16 = part.input) != null ? _a16 : part.rawInput : part.input,
5887
5862
  providerExecuted: part.providerExecuted,
5888
5863
  ...part.callProviderMetadata != null ? { providerOptions: part.callProviderMetadata } : {}
5889
5864
  });
@@ -5924,7 +5899,7 @@ function convertToModelMessages(messages, options) {
5924
5899
  modelMessages.push({
5925
5900
  role: "tool",
5926
5901
  content: toolParts.map((toolPart) => {
5927
- var _a18;
5902
+ var _a17;
5928
5903
  switch (toolPart.state) {
5929
5904
  case "output-error":
5930
5905
  case "output-available": {
@@ -5935,7 +5910,7 @@ function convertToModelMessages(messages, options) {
5935
5910
  toolName,
5936
5911
  output: createToolModelOutput({
5937
5912
  output: toolPart.state === "output-error" ? toolPart.errorText : toolPart.output,
5938
- tool: (_a18 = options == null ? void 0 : options.tools) == null ? void 0 : _a18[toolName],
5913
+ tool: (_a17 = options == null ? void 0 : options.tools) == null ? void 0 : _a17[toolName],
5939
5914
  errorMode: toolPart.state === "output-error" ? "text" : "none"
5940
5915
  })
5941
5916
  };
@@ -6060,7 +6035,7 @@ async function embed({
6060
6035
  }),
6061
6036
  tracer,
6062
6037
  fn: async (doEmbedSpan) => {
6063
- var _a17;
6038
+ var _a16;
6064
6039
  const modelResponse = await model.doEmbed({
6065
6040
  values: [value],
6066
6041
  abortSignal,
@@ -6068,7 +6043,7 @@ async function embed({
6068
6043
  providerOptions
6069
6044
  });
6070
6045
  const embedding2 = modelResponse.embeddings[0];
6071
- const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
6046
+ const usage2 = (_a16 = modelResponse.usage) != null ? _a16 : { tokens: NaN };
6072
6047
  doEmbedSpan.setAttributes(
6073
6048
  selectTelemetryAttributes({
6074
6049
  telemetry,
@@ -6178,7 +6153,7 @@ async function embedMany({
6178
6153
  }),
6179
6154
  tracer,
6180
6155
  fn: async (span) => {
6181
- var _a17;
6156
+ var _a16;
6182
6157
  const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
6183
6158
  model.maxEmbeddingsPerCall,
6184
6159
  model.supportsParallelCalls
@@ -6204,7 +6179,7 @@ async function embedMany({
6204
6179
  }),
6205
6180
  tracer,
6206
6181
  fn: async (doEmbedSpan) => {
6207
- var _a18;
6182
+ var _a17;
6208
6183
  const modelResponse = await model.doEmbed({
6209
6184
  values,
6210
6185
  abortSignal,
@@ -6212,7 +6187,7 @@ async function embedMany({
6212
6187
  providerOptions
6213
6188
  });
6214
6189
  const embeddings3 = modelResponse.embeddings;
6215
- const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
6190
+ const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
6216
6191
  doEmbedSpan.setAttributes(
6217
6192
  selectTelemetryAttributes({
6218
6193
  telemetry,
@@ -6286,7 +6261,7 @@ async function embedMany({
6286
6261
  }),
6287
6262
  tracer,
6288
6263
  fn: async (doEmbedSpan) => {
6289
- var _a18;
6264
+ var _a17;
6290
6265
  const modelResponse = await model.doEmbed({
6291
6266
  values: chunk,
6292
6267
  abortSignal,
@@ -6294,7 +6269,7 @@ async function embedMany({
6294
6269
  providerOptions
6295
6270
  });
6296
6271
  const embeddings2 = modelResponse.embeddings;
6297
- const usage = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
6272
+ const usage = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
6298
6273
  doEmbedSpan.setAttributes(
6299
6274
  selectTelemetryAttributes({
6300
6275
  telemetry,
@@ -6331,7 +6306,7 @@ async function embedMany({
6331
6306
  result.providerMetadata
6332
6307
  )) {
6333
6308
  providerMetadata[providerName] = {
6334
- ...(_a17 = providerMetadata[providerName]) != null ? _a17 : {},
6309
+ ...(_a16 = providerMetadata[providerName]) != null ? _a16 : {},
6335
6310
  ...metadata
6336
6311
  };
6337
6312
  }
@@ -6385,7 +6360,7 @@ async function generateImage({
6385
6360
  abortSignal,
6386
6361
  headers
6387
6362
  }) {
6388
- var _a17, _b;
6363
+ var _a16, _b;
6389
6364
  if (model.specificationVersion !== "v2") {
6390
6365
  throw new UnsupportedModelVersionError({
6391
6366
  version: model.specificationVersion,
@@ -6401,7 +6376,7 @@ async function generateImage({
6401
6376
  maxRetries: maxRetriesArg,
6402
6377
  abortSignal
6403
6378
  });
6404
- const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a17 : 1;
6379
+ const maxImagesPerCallWithDefault = (_a16 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a16 : 1;
6405
6380
  const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
6406
6381
  const callImageCounts = Array.from({ length: callCount }, (_, i) => {
6407
6382
  if (i < callCount - 1) {
@@ -6434,13 +6409,13 @@ async function generateImage({
6434
6409
  images.push(
6435
6410
  ...result.images.map(
6436
6411
  (image) => {
6437
- var _a18;
6412
+ var _a17;
6438
6413
  return new DefaultGeneratedFile({
6439
6414
  data: image,
6440
- mediaType: (_a18 = detectMediaType({
6415
+ mediaType: (_a17 = detectMediaType({
6441
6416
  data: image,
6442
6417
  signatures: imageMediaTypeSignatures
6443
- })) != null ? _a18 : "image/png"
6418
+ })) != null ? _a17 : "image/png"
6444
6419
  });
6445
6420
  }
6446
6421
  )
@@ -6581,7 +6556,7 @@ var arrayOutputStrategy = (schema) => {
6581
6556
  isFirstDelta,
6582
6557
  isFinalDelta
6583
6558
  }) {
6584
- var _a17;
6559
+ var _a16;
6585
6560
  if (!isJSONObject(value) || !isJSONArray(value.elements)) {
6586
6561
  return {
6587
6562
  success: false,
@@ -6604,7 +6579,7 @@ var arrayOutputStrategy = (schema) => {
6604
6579
  }
6605
6580
  resultArray.push(result.value);
6606
6581
  }
6607
- const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;
6582
+ const publishedElementCount = (_a16 = latestObject == null ? void 0 : latestObject.length) != null ? _a16 : 0;
6608
6583
  let textDelta = "";
6609
6584
  if (isFirstDelta) {
6610
6585
  textDelta += "[";
@@ -7023,7 +6998,7 @@ async function generateObject(options) {
7023
6998
  }),
7024
6999
  tracer,
7025
7000
  fn: async (span) => {
7026
- var _a17;
7001
+ var _a16;
7027
7002
  let result;
7028
7003
  let finishReason;
7029
7004
  let usage;
@@ -7069,7 +7044,7 @@ async function generateObject(options) {
7069
7044
  }),
7070
7045
  tracer,
7071
7046
  fn: async (span2) => {
7072
- var _a18, _b, _c, _d, _e, _f, _g, _h;
7047
+ var _a17, _b, _c, _d, _e, _f, _g, _h;
7073
7048
  const result2 = await model.doGenerate({
7074
7049
  responseFormat: {
7075
7050
  type: "json",
@@ -7084,7 +7059,7 @@ async function generateObject(options) {
7084
7059
  headers: headersWithUserAgent
7085
7060
  });
7086
7061
  const responseData = {
7087
- id: (_b = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b : generateId3(),
7062
+ id: (_b = (_a17 = result2.response) == null ? void 0 : _a17.id) != null ? _b : generateId3(),
7088
7063
  timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
7089
7064
  modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
7090
7065
  headers: (_g = result2.response) == null ? void 0 : _g.headers,
@@ -7138,7 +7113,7 @@ async function generateObject(options) {
7138
7113
  usage = generateResult.usage;
7139
7114
  warnings = generateResult.warnings;
7140
7115
  resultProviderMetadata = generateResult.providerMetadata;
7141
- request = (_a17 = generateResult.request) != null ? _a17 : {};
7116
+ request = (_a16 = generateResult.request) != null ? _a16 : {};
7142
7117
  response = generateResult.responseData;
7143
7118
  reasoning = generateResult.reasoning;
7144
7119
  logWarnings(warnings);
@@ -7197,9 +7172,9 @@ var DefaultGenerateObjectResult = class {
7197
7172
  this.reasoning = options.reasoning;
7198
7173
  }
7199
7174
  toJsonResponse(init) {
7200
- var _a17;
7175
+ var _a16;
7201
7176
  return new Response(JSON.stringify(this.object), {
7202
- status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
7177
+ status: (_a16 = init == null ? void 0 : init.status) != null ? _a16 : 200,
7203
7178
  headers: prepareHeaders(init == null ? void 0 : init.headers, {
7204
7179
  "content-type": "application/json; charset=utf-8"
7205
7180
  })
@@ -7327,8 +7302,8 @@ function simulateReadableStream({
7327
7302
  chunkDelayInMs = 0,
7328
7303
  _internal
7329
7304
  }) {
7330
- var _a17;
7331
- const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : delayFunction;
7305
+ var _a16;
7306
+ const delay2 = (_a16 = _internal == null ? void 0 : _internal.delay) != null ? _a16 : delayFunction;
7332
7307
  let index = 0;
7333
7308
  return new ReadableStream({
7334
7309
  async pull(controller) {
@@ -7588,7 +7563,7 @@ var DefaultStreamObjectResult = class {
7588
7563
  const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
7589
7564
  new TransformStream({
7590
7565
  async transform(chunk, controller) {
7591
- var _a17, _b, _c;
7566
+ var _a16, _b, _c;
7592
7567
  if (typeof chunk === "object" && chunk.type === "stream-start") {
7593
7568
  warnings = chunk.warnings;
7594
7569
  return;
@@ -7638,7 +7613,7 @@ var DefaultStreamObjectResult = class {
7638
7613
  switch (chunk.type) {
7639
7614
  case "response-metadata": {
7640
7615
  fullResponse = {
7641
- id: (_a17 = chunk.id) != null ? _a17 : fullResponse.id,
7616
+ id: (_a16 = chunk.id) != null ? _a16 : fullResponse.id,
7642
7617
  timestamp: (_b = chunk.timestamp) != null ? _b : fullResponse.timestamp,
7643
7618
  modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
7644
7619
  };
@@ -7905,7 +7880,7 @@ async function generateSpeech({
7905
7880
  abortSignal,
7906
7881
  headers
7907
7882
  }) {
7908
- var _a17;
7883
+ var _a16;
7909
7884
  if (model.specificationVersion !== "v2") {
7910
7885
  throw new UnsupportedModelVersionError({
7911
7886
  version: model.specificationVersion,
@@ -7941,10 +7916,10 @@ async function generateSpeech({
7941
7916
  return new DefaultSpeechResult({
7942
7917
  audio: new DefaultGeneratedAudioFile({
7943
7918
  data: result.audio,
7944
- mediaType: (_a17 = detectMediaType({
7919
+ mediaType: (_a16 = detectMediaType({
7945
7920
  data: result.audio,
7946
7921
  signatures: audioMediaTypeSignatures
7947
- })) != null ? _a17 : "audio/mp3"
7922
+ })) != null ? _a16 : "audio/mp3"
7948
7923
  }),
7949
7924
  warnings: result.warnings,
7950
7925
  responses: [result.response],
@@ -7953,11 +7928,11 @@ async function generateSpeech({
7953
7928
  }
7954
7929
  var DefaultSpeechResult = class {
7955
7930
  constructor(options) {
7956
- var _a17;
7931
+ var _a16;
7957
7932
  this.audio = options.audio;
7958
7933
  this.warnings = options.warnings;
7959
7934
  this.responses = options.responses;
7960
- this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
7935
+ this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
7961
7936
  }
7962
7937
  };
7963
7938
 
@@ -8462,7 +8437,7 @@ var doWrap = ({
8462
8437
  modelId,
8463
8438
  providerId
8464
8439
  }) => {
8465
- var _a17, _b, _c;
8440
+ var _a16, _b, _c;
8466
8441
  async function doTransform({
8467
8442
  params,
8468
8443
  type
@@ -8471,7 +8446,7 @@ var doWrap = ({
8471
8446
  }
8472
8447
  return {
8473
8448
  specificationVersion: "v2",
8474
- provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
8449
+ provider: (_a16 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a16 : model.provider,
8475
8450
  modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
8476
8451
  supportedUrls: (_c = overrideSupportedUrls == null ? void 0 : overrideSupportedUrls({ model })) != null ? _c : model.supportedUrls,
8477
8452
  async doGenerate(params) {
@@ -8579,11 +8554,11 @@ function customProvider({
8579
8554
  var experimental_customProvider = customProvider;
8580
8555
 
8581
8556
  // src/registry/no-such-provider-error.ts
8582
- import { AISDKError as AISDKError21, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
8583
- var name16 = "AI_NoSuchProviderError";
8584
- var marker16 = `vercel.ai.error.${name16}`;
8585
- var symbol16 = Symbol.for(marker16);
8586
- var _a16;
8557
+ import { AISDKError as AISDKError20, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
8558
+ var name15 = "AI_NoSuchProviderError";
8559
+ var marker15 = `vercel.ai.error.${name15}`;
8560
+ var symbol15 = Symbol.for(marker15);
8561
+ var _a15;
8587
8562
  var NoSuchProviderError = class extends NoSuchModelError3 {
8588
8563
  constructor({
8589
8564
  modelId,
@@ -8592,16 +8567,16 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
8592
8567
  availableProviders,
8593
8568
  message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
8594
8569
  }) {
8595
- super({ errorName: name16, modelId, modelType, message });
8596
- this[_a16] = true;
8570
+ super({ errorName: name15, modelId, modelType, message });
8571
+ this[_a15] = true;
8597
8572
  this.providerId = providerId;
8598
8573
  this.availableProviders = availableProviders;
8599
8574
  }
8600
8575
  static isInstance(error) {
8601
- return AISDKError21.hasMarker(error, marker16);
8576
+ return AISDKError20.hasMarker(error, marker15);
8602
8577
  }
8603
8578
  };
8604
- _a16 = symbol16;
8579
+ _a15 = symbol15;
8605
8580
 
8606
8581
  // src/registry/provider-registry.ts
8607
8582
  import {
@@ -8660,10 +8635,10 @@ var DefaultProviderRegistry = class {
8660
8635
  return [id.slice(0, index), id.slice(index + this.separator.length)];
8661
8636
  }
8662
8637
  languageModel(id) {
8663
- var _a17, _b;
8638
+ var _a16, _b;
8664
8639
  const [providerId, modelId] = this.splitId(id, "languageModel");
8665
- let model = (_b = (_a17 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
8666
- _a17,
8640
+ let model = (_b = (_a16 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
8641
+ _a16,
8667
8642
  modelId
8668
8643
  );
8669
8644
  if (model == null) {
@@ -8678,10 +8653,10 @@ var DefaultProviderRegistry = class {
8678
8653
  return model;
8679
8654
  }
8680
8655
  textEmbeddingModel(id) {
8681
- var _a17;
8656
+ var _a16;
8682
8657
  const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
8683
8658
  const provider = this.getProvider(providerId, "textEmbeddingModel");
8684
- const model = (_a17 = provider.textEmbeddingModel) == null ? void 0 : _a17.call(provider, modelId);
8659
+ const model = (_a16 = provider.textEmbeddingModel) == null ? void 0 : _a16.call(provider, modelId);
8685
8660
  if (model == null) {
8686
8661
  throw new NoSuchModelError4({
8687
8662
  modelId: id,
@@ -8691,20 +8666,20 @@ var DefaultProviderRegistry = class {
8691
8666
  return model;
8692
8667
  }
8693
8668
  imageModel(id) {
8694
- var _a17;
8669
+ var _a16;
8695
8670
  const [providerId, modelId] = this.splitId(id, "imageModel");
8696
8671
  const provider = this.getProvider(providerId, "imageModel");
8697
- const model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
8672
+ const model = (_a16 = provider.imageModel) == null ? void 0 : _a16.call(provider, modelId);
8698
8673
  if (model == null) {
8699
8674
  throw new NoSuchModelError4({ modelId: id, modelType: "imageModel" });
8700
8675
  }
8701
8676
  return model;
8702
8677
  }
8703
8678
  transcriptionModel(id) {
8704
- var _a17;
8679
+ var _a16;
8705
8680
  const [providerId, modelId] = this.splitId(id, "transcriptionModel");
8706
8681
  const provider = this.getProvider(providerId, "transcriptionModel");
8707
- const model = (_a17 = provider.transcriptionModel) == null ? void 0 : _a17.call(provider, modelId);
8682
+ const model = (_a16 = provider.transcriptionModel) == null ? void 0 : _a16.call(provider, modelId);
8708
8683
  if (model == null) {
8709
8684
  throw new NoSuchModelError4({
8710
8685
  modelId: id,
@@ -8714,10 +8689,10 @@ var DefaultProviderRegistry = class {
8714
8689
  return model;
8715
8690
  }
8716
8691
  speechModel(id) {
8717
- var _a17;
8692
+ var _a16;
8718
8693
  const [providerId, modelId] = this.splitId(id, "speechModel");
8719
8694
  const provider = this.getProvider(providerId, "speechModel");
8720
- const model = (_a17 = provider.speechModel) == null ? void 0 : _a17.call(provider, modelId);
8695
+ const model = (_a16 = provider.speechModel) == null ? void 0 : _a16.call(provider, modelId);
8721
8696
  if (model == null) {
8722
8697
  throw new NoSuchModelError4({ modelId: id, modelType: "speechModel" });
8723
8698
  }
@@ -8725,597 +8700,12 @@ var DefaultProviderRegistry = class {
8725
8700
  }
8726
8701
  };
8727
8702
 
8728
- // src/tool/mcp/mcp-client.ts
8729
- import {
8730
- dynamicTool,
8731
- jsonSchema,
8732
- tool
8733
- } from "@ai-sdk/provider-utils";
8734
-
8735
- // src/tool/mcp/mcp-sse-transport.ts
8736
- import {
8737
- EventSourceParserStream,
8738
- withUserAgentSuffix as withUserAgentSuffix8,
8739
- getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent2
8740
- } from "@ai-sdk/provider-utils";
8741
-
8742
- // src/tool/mcp/json-rpc-message.ts
8743
- import { z as z9 } from "zod/v4";
8744
-
8745
- // src/tool/mcp/types.ts
8746
- import { z as z8 } from "zod/v4";
8747
- var LATEST_PROTOCOL_VERSION = "2025-06-18";
8748
- var SUPPORTED_PROTOCOL_VERSIONS = [
8749
- LATEST_PROTOCOL_VERSION,
8750
- "2025-03-26",
8751
- "2024-11-05"
8752
- ];
8753
- var ClientOrServerImplementationSchema = z8.looseObject({
8754
- name: z8.string(),
8755
- version: z8.string()
8756
- });
8757
- var BaseParamsSchema = z8.looseObject({
8758
- _meta: z8.optional(z8.object({}).loose())
8759
- });
8760
- var ResultSchema = BaseParamsSchema;
8761
- var RequestSchema = z8.object({
8762
- method: z8.string(),
8763
- params: z8.optional(BaseParamsSchema)
8764
- });
8765
- var ServerCapabilitiesSchema = z8.looseObject({
8766
- experimental: z8.optional(z8.object({}).loose()),
8767
- logging: z8.optional(z8.object({}).loose()),
8768
- prompts: z8.optional(
8769
- z8.looseObject({
8770
- listChanged: z8.optional(z8.boolean())
8771
- })
8772
- ),
8773
- resources: z8.optional(
8774
- z8.looseObject({
8775
- subscribe: z8.optional(z8.boolean()),
8776
- listChanged: z8.optional(z8.boolean())
8777
- })
8778
- ),
8779
- tools: z8.optional(
8780
- z8.looseObject({
8781
- listChanged: z8.optional(z8.boolean())
8782
- })
8783
- )
8784
- });
8785
- var InitializeResultSchema = ResultSchema.extend({
8786
- protocolVersion: z8.string(),
8787
- capabilities: ServerCapabilitiesSchema,
8788
- serverInfo: ClientOrServerImplementationSchema,
8789
- instructions: z8.optional(z8.string())
8790
- });
8791
- var PaginatedResultSchema = ResultSchema.extend({
8792
- nextCursor: z8.optional(z8.string())
8793
- });
8794
- var ToolSchema = z8.object({
8795
- name: z8.string(),
8796
- description: z8.optional(z8.string()),
8797
- inputSchema: z8.object({
8798
- type: z8.literal("object"),
8799
- properties: z8.optional(z8.object({}).loose())
8800
- }).loose()
8801
- }).loose();
8802
- var ListToolsResultSchema = PaginatedResultSchema.extend({
8803
- tools: z8.array(ToolSchema)
8804
- });
8805
- var TextContentSchema = z8.object({
8806
- type: z8.literal("text"),
8807
- text: z8.string()
8808
- }).loose();
8809
- var ImageContentSchema = z8.object({
8810
- type: z8.literal("image"),
8811
- data: z8.base64(),
8812
- mimeType: z8.string()
8813
- }).loose();
8814
- var ResourceContentsSchema = z8.object({
8815
- /**
8816
- * The URI of this resource.
8817
- */
8818
- uri: z8.string(),
8819
- /**
8820
- * The MIME type of this resource, if known.
8821
- */
8822
- mimeType: z8.optional(z8.string())
8823
- }).loose();
8824
- var TextResourceContentsSchema = ResourceContentsSchema.extend({
8825
- text: z8.string()
8826
- });
8827
- var BlobResourceContentsSchema = ResourceContentsSchema.extend({
8828
- blob: z8.base64()
8829
- });
8830
- var EmbeddedResourceSchema = z8.object({
8831
- type: z8.literal("resource"),
8832
- resource: z8.union([TextResourceContentsSchema, BlobResourceContentsSchema])
8833
- }).loose();
8834
- var CallToolResultSchema = ResultSchema.extend({
8835
- content: z8.array(
8836
- z8.union([TextContentSchema, ImageContentSchema, EmbeddedResourceSchema])
8837
- ),
8838
- isError: z8.boolean().default(false).optional()
8839
- }).or(
8840
- ResultSchema.extend({
8841
- toolResult: z8.unknown()
8842
- })
8843
- );
8844
-
8845
- // src/tool/mcp/json-rpc-message.ts
8846
- var JSONRPC_VERSION = "2.0";
8847
- var JSONRPCRequestSchema = z9.object({
8848
- jsonrpc: z9.literal(JSONRPC_VERSION),
8849
- id: z9.union([z9.string(), z9.number().int()])
8850
- }).merge(RequestSchema).strict();
8851
- var JSONRPCResponseSchema = z9.object({
8852
- jsonrpc: z9.literal(JSONRPC_VERSION),
8853
- id: z9.union([z9.string(), z9.number().int()]),
8854
- result: ResultSchema
8855
- }).strict();
8856
- var JSONRPCErrorSchema = z9.object({
8857
- jsonrpc: z9.literal(JSONRPC_VERSION),
8858
- id: z9.union([z9.string(), z9.number().int()]),
8859
- error: z9.object({
8860
- code: z9.number().int(),
8861
- message: z9.string(),
8862
- data: z9.optional(z9.unknown())
8863
- })
8864
- }).strict();
8865
- var JSONRPCNotificationSchema = z9.object({
8866
- jsonrpc: z9.literal(JSONRPC_VERSION)
8867
- }).merge(
8868
- z9.object({
8869
- method: z9.string(),
8870
- params: z9.optional(BaseParamsSchema)
8871
- })
8872
- ).strict();
8873
- var JSONRPCMessageSchema = z9.union([
8874
- JSONRPCRequestSchema,
8875
- JSONRPCNotificationSchema,
8876
- JSONRPCResponseSchema,
8877
- JSONRPCErrorSchema
8878
- ]);
8879
-
8880
- // src/tool/mcp/mcp-sse-transport.ts
8881
- var SseMCPTransport = class {
8882
- constructor({
8883
- url,
8884
- headers
8885
- }) {
8886
- this.connected = false;
8887
- this.url = new URL(url);
8888
- this.headers = headers;
8889
- }
8890
- async start() {
8891
- return new Promise((resolve2, reject) => {
8892
- if (this.connected) {
8893
- return resolve2();
8894
- }
8895
- this.abortController = new AbortController();
8896
- const establishConnection = async () => {
8897
- var _a17, _b, _c;
8898
- try {
8899
- const headers = withUserAgentSuffix8(
8900
- {
8901
- ...this.headers,
8902
- Accept: "text/event-stream"
8903
- },
8904
- `ai-sdk/${VERSION}`,
8905
- getRuntimeEnvironmentUserAgent2()
8906
- );
8907
- const response = await fetch(this.url.href, {
8908
- headers,
8909
- signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
8910
- });
8911
- if (!response.ok || !response.body) {
8912
- const error = new MCPClientError({
8913
- message: `MCP SSE Transport Error: ${response.status} ${response.statusText}`
8914
- });
8915
- (_b = this.onerror) == null ? void 0 : _b.call(this, error);
8916
- return reject(error);
8917
- }
8918
- const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream());
8919
- const reader = stream.getReader();
8920
- const processEvents = async () => {
8921
- var _a18, _b2, _c2;
8922
- try {
8923
- while (true) {
8924
- const { done, value } = await reader.read();
8925
- if (done) {
8926
- if (this.connected) {
8927
- this.connected = false;
8928
- throw new MCPClientError({
8929
- message: "MCP SSE Transport Error: Connection closed unexpectedly"
8930
- });
8931
- }
8932
- return;
8933
- }
8934
- const { event, data } = value;
8935
- if (event === "endpoint") {
8936
- this.endpoint = new URL(data, this.url);
8937
- if (this.endpoint.origin !== this.url.origin) {
8938
- throw new MCPClientError({
8939
- message: `MCP SSE Transport Error: Endpoint origin does not match connection origin: ${this.endpoint.origin}`
8940
- });
8941
- }
8942
- this.connected = true;
8943
- resolve2();
8944
- } else if (event === "message") {
8945
- try {
8946
- const message = JSONRPCMessageSchema.parse(
8947
- JSON.parse(data)
8948
- );
8949
- (_a18 = this.onmessage) == null ? void 0 : _a18.call(this, message);
8950
- } catch (error) {
8951
- const e = new MCPClientError({
8952
- message: "MCP SSE Transport Error: Failed to parse message",
8953
- cause: error
8954
- });
8955
- (_b2 = this.onerror) == null ? void 0 : _b2.call(this, e);
8956
- }
8957
- }
8958
- }
8959
- } catch (error) {
8960
- if (error instanceof Error && error.name === "AbortError") {
8961
- return;
8962
- }
8963
- (_c2 = this.onerror) == null ? void 0 : _c2.call(this, error);
8964
- reject(error);
8965
- }
8966
- };
8967
- this.sseConnection = {
8968
- close: () => reader.cancel()
8969
- };
8970
- processEvents();
8971
- } catch (error) {
8972
- if (error instanceof Error && error.name === "AbortError") {
8973
- return;
8974
- }
8975
- (_c = this.onerror) == null ? void 0 : _c.call(this, error);
8976
- reject(error);
8977
- }
8978
- };
8979
- establishConnection();
8980
- });
8981
- }
8982
- async close() {
8983
- var _a17, _b, _c;
8984
- this.connected = false;
8985
- (_a17 = this.sseConnection) == null ? void 0 : _a17.close();
8986
- (_b = this.abortController) == null ? void 0 : _b.abort();
8987
- (_c = this.onclose) == null ? void 0 : _c.call(this);
8988
- }
8989
- async send(message) {
8990
- var _a17, _b, _c;
8991
- if (!this.endpoint || !this.connected) {
8992
- throw new MCPClientError({
8993
- message: "MCP SSE Transport Error: Not connected"
8994
- });
8995
- }
8996
- try {
8997
- const headers = withUserAgentSuffix8(
8998
- {
8999
- ...this.headers,
9000
- "Content-Type": "application/json"
9001
- },
9002
- `ai-sdk/${VERSION}`,
9003
- getRuntimeEnvironmentUserAgent2()
9004
- );
9005
- const init = {
9006
- method: "POST",
9007
- headers,
9008
- body: JSON.stringify(message),
9009
- signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
9010
- };
9011
- const response = await fetch(this.endpoint, init);
9012
- if (!response.ok) {
9013
- const text2 = await response.text().catch(() => null);
9014
- const error = new MCPClientError({
9015
- message: `MCP SSE Transport Error: POSTing to endpoint (HTTP ${response.status}): ${text2}`
9016
- });
9017
- (_b = this.onerror) == null ? void 0 : _b.call(this, error);
9018
- return;
9019
- }
9020
- } catch (error) {
9021
- (_c = this.onerror) == null ? void 0 : _c.call(this, error);
9022
- return;
9023
- }
9024
- }
9025
- };
9026
-
9027
- // src/tool/mcp/mcp-transport.ts
9028
- function createMcpTransport(config) {
9029
- if (config.type !== "sse") {
9030
- throw new MCPClientError({
9031
- message: "Unsupported or invalid transport configuration. If you are using a custom transport, make sure it implements the MCPTransport interface."
9032
- });
9033
- }
9034
- return new SseMCPTransport(config);
9035
- }
9036
- function isCustomMcpTransport(transport) {
9037
- return "start" in transport && typeof transport.start === "function" && "send" in transport && typeof transport.send === "function" && "close" in transport && typeof transport.close === "function";
9038
- }
9039
-
9040
- // src/tool/mcp/mcp-client.ts
9041
- var CLIENT_VERSION = "1.0.0";
9042
- async function createMCPClient(config) {
9043
- const client = new DefaultMCPClient(config);
9044
- await client.init();
9045
- return client;
9046
- }
9047
- var DefaultMCPClient = class {
9048
- constructor({
9049
- transport: transportConfig,
9050
- name: name17 = "ai-sdk-mcp-client",
9051
- onUncaughtError
9052
- }) {
9053
- this.requestMessageId = 0;
9054
- this.responseHandlers = /* @__PURE__ */ new Map();
9055
- this.serverCapabilities = {};
9056
- this.isClosed = true;
9057
- this.onUncaughtError = onUncaughtError;
9058
- if (isCustomMcpTransport(transportConfig)) {
9059
- this.transport = transportConfig;
9060
- } else {
9061
- this.transport = createMcpTransport(transportConfig);
9062
- }
9063
- this.transport.onclose = () => this.onClose();
9064
- this.transport.onerror = (error) => this.onError(error);
9065
- this.transport.onmessage = (message) => {
9066
- if ("method" in message) {
9067
- this.onError(
9068
- new MCPClientError({
9069
- message: "Unsupported message type"
9070
- })
9071
- );
9072
- return;
9073
- }
9074
- this.onResponse(message);
9075
- };
9076
- this.clientInfo = {
9077
- name: name17,
9078
- version: CLIENT_VERSION
9079
- };
9080
- }
9081
- async init() {
9082
- try {
9083
- await this.transport.start();
9084
- this.isClosed = false;
9085
- const result = await this.request({
9086
- request: {
9087
- method: "initialize",
9088
- params: {
9089
- protocolVersion: LATEST_PROTOCOL_VERSION,
9090
- capabilities: {},
9091
- clientInfo: this.clientInfo
9092
- }
9093
- },
9094
- resultSchema: InitializeResultSchema
9095
- });
9096
- if (result === void 0) {
9097
- throw new MCPClientError({
9098
- message: "Server sent invalid initialize result"
9099
- });
9100
- }
9101
- if (!SUPPORTED_PROTOCOL_VERSIONS.includes(result.protocolVersion)) {
9102
- throw new MCPClientError({
9103
- message: `Server's protocol version is not supported: ${result.protocolVersion}`
9104
- });
9105
- }
9106
- this.serverCapabilities = result.capabilities;
9107
- await this.notification({
9108
- method: "notifications/initialized"
9109
- });
9110
- return this;
9111
- } catch (error) {
9112
- await this.close();
9113
- throw error;
9114
- }
9115
- }
9116
- async close() {
9117
- var _a17;
9118
- if (this.isClosed)
9119
- return;
9120
- await ((_a17 = this.transport) == null ? void 0 : _a17.close());
9121
- this.onClose();
9122
- }
9123
- assertCapability(method) {
9124
- switch (method) {
9125
- case "initialize":
9126
- break;
9127
- case "tools/list":
9128
- case "tools/call":
9129
- if (!this.serverCapabilities.tools) {
9130
- throw new MCPClientError({
9131
- message: `Server does not support tools`
9132
- });
9133
- }
9134
- break;
9135
- default:
9136
- throw new MCPClientError({
9137
- message: `Unsupported method: ${method}`
9138
- });
9139
- }
9140
- }
9141
- async request({
9142
- request,
9143
- resultSchema,
9144
- options
9145
- }) {
9146
- return new Promise((resolve2, reject) => {
9147
- if (this.isClosed) {
9148
- return reject(
9149
- new MCPClientError({
9150
- message: "Attempted to send a request from a closed client"
9151
- })
9152
- );
9153
- }
9154
- this.assertCapability(request.method);
9155
- const signal = options == null ? void 0 : options.signal;
9156
- signal == null ? void 0 : signal.throwIfAborted();
9157
- const messageId = this.requestMessageId++;
9158
- const jsonrpcRequest = {
9159
- ...request,
9160
- jsonrpc: "2.0",
9161
- id: messageId
9162
- };
9163
- const cleanup = () => {
9164
- this.responseHandlers.delete(messageId);
9165
- };
9166
- this.responseHandlers.set(messageId, (response) => {
9167
- if (signal == null ? void 0 : signal.aborted) {
9168
- return reject(
9169
- new MCPClientError({
9170
- message: "Request was aborted",
9171
- cause: signal.reason
9172
- })
9173
- );
9174
- }
9175
- if (response instanceof Error) {
9176
- return reject(response);
9177
- }
9178
- try {
9179
- const result = resultSchema.parse(response.result);
9180
- resolve2(result);
9181
- } catch (error) {
9182
- const parseError = new MCPClientError({
9183
- message: "Failed to parse server response",
9184
- cause: error
9185
- });
9186
- reject(parseError);
9187
- }
9188
- });
9189
- this.transport.send(jsonrpcRequest).catch((error) => {
9190
- cleanup();
9191
- reject(error);
9192
- });
9193
- });
9194
- }
9195
- async listTools({
9196
- params,
9197
- options
9198
- } = {}) {
9199
- try {
9200
- return this.request({
9201
- request: { method: "tools/list", params },
9202
- resultSchema: ListToolsResultSchema,
9203
- options
9204
- });
9205
- } catch (error) {
9206
- throw error;
9207
- }
9208
- }
9209
- async callTool({
9210
- name: name17,
9211
- args,
9212
- options
9213
- }) {
9214
- try {
9215
- return this.request({
9216
- request: { method: "tools/call", params: { name: name17, arguments: args } },
9217
- resultSchema: CallToolResultSchema,
9218
- options: {
9219
- signal: options == null ? void 0 : options.abortSignal
9220
- }
9221
- });
9222
- } catch (error) {
9223
- throw error;
9224
- }
9225
- }
9226
- async notification(notification) {
9227
- const jsonrpcNotification = {
9228
- ...notification,
9229
- jsonrpc: "2.0"
9230
- };
9231
- await this.transport.send(jsonrpcNotification);
9232
- }
9233
- /**
9234
- * Returns a set of AI SDK tools from the MCP server
9235
- * @returns A record of tool names to their implementations
9236
- */
9237
- async tools({
9238
- schemas = "automatic"
9239
- } = {}) {
9240
- var _a17;
9241
- const tools = {};
9242
- try {
9243
- const listToolsResult = await this.listTools();
9244
- for (const { name: name17, description, inputSchema } of listToolsResult.tools) {
9245
- if (schemas !== "automatic" && !(name17 in schemas)) {
9246
- continue;
9247
- }
9248
- const self = this;
9249
- const execute = async (args, options) => {
9250
- var _a18;
9251
- (_a18 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a18.throwIfAborted();
9252
- return self.callTool({ name: name17, args, options });
9253
- };
9254
- const toolWithExecute = schemas === "automatic" ? dynamicTool({
9255
- description,
9256
- inputSchema: jsonSchema({
9257
- ...inputSchema,
9258
- properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
9259
- additionalProperties: false
9260
- }),
9261
- execute
9262
- }) : tool({
9263
- description,
9264
- inputSchema: schemas[name17].inputSchema,
9265
- execute
9266
- });
9267
- tools[name17] = toolWithExecute;
9268
- }
9269
- return tools;
9270
- } catch (error) {
9271
- throw error;
9272
- }
9273
- }
9274
- onClose() {
9275
- if (this.isClosed)
9276
- return;
9277
- this.isClosed = true;
9278
- const error = new MCPClientError({
9279
- message: "Connection closed"
9280
- });
9281
- for (const handler of this.responseHandlers.values()) {
9282
- handler(error);
9283
- }
9284
- this.responseHandlers.clear();
9285
- }
9286
- onError(error) {
9287
- if (this.onUncaughtError) {
9288
- this.onUncaughtError(error);
9289
- }
9290
- }
9291
- onResponse(response) {
9292
- const messageId = Number(response.id);
9293
- const handler = this.responseHandlers.get(messageId);
9294
- if (handler === void 0) {
9295
- throw new MCPClientError({
9296
- message: `Protocol error: Received a response for an unknown message ID: ${JSON.stringify(
9297
- response
9298
- )}`
9299
- });
9300
- }
9301
- this.responseHandlers.delete(messageId);
9302
- handler(
9303
- "result" in response ? response : new MCPClientError({
9304
- message: response.error.message,
9305
- code: response.error.code,
9306
- data: response.error.data,
9307
- cause: response.error
9308
- })
9309
- );
9310
- }
9311
- };
9312
-
9313
8703
  // src/transcribe/transcribe.ts
9314
- import { withUserAgentSuffix as withUserAgentSuffix9 } from "@ai-sdk/provider-utils";
8704
+ import { withUserAgentSuffix as withUserAgentSuffix8 } from "@ai-sdk/provider-utils";
9315
8705
 
9316
8706
  // src/error/no-transcript-generated-error.ts
9317
- import { AISDKError as AISDKError22 } from "@ai-sdk/provider";
9318
- var NoTranscriptGeneratedError = class extends AISDKError22 {
8707
+ import { AISDKError as AISDKError21 } from "@ai-sdk/provider";
8708
+ var NoTranscriptGeneratedError = class extends AISDKError21 {
9319
8709
  constructor(options) {
9320
8710
  super({
9321
8711
  name: "AI_NoTranscriptGeneratedError",
@@ -9345,23 +8735,23 @@ async function transcribe({
9345
8735
  maxRetries: maxRetriesArg,
9346
8736
  abortSignal
9347
8737
  });
9348
- const headersWithUserAgent = withUserAgentSuffix9(
8738
+ const headersWithUserAgent = withUserAgentSuffix8(
9349
8739
  headers != null ? headers : {},
9350
8740
  `ai/${VERSION}`
9351
8741
  );
9352
8742
  const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
9353
8743
  const result = await retry(
9354
8744
  () => {
9355
- var _a17;
8745
+ var _a16;
9356
8746
  return model.doGenerate({
9357
8747
  audio: audioData,
9358
8748
  abortSignal,
9359
8749
  headers: headersWithUserAgent,
9360
8750
  providerOptions,
9361
- mediaType: (_a17 = detectMediaType({
8751
+ mediaType: (_a16 = detectMediaType({
9362
8752
  data: audioData,
9363
8753
  signatures: audioMediaTypeSignatures
9364
- })) != null ? _a17 : "audio/wav"
8754
+ })) != null ? _a16 : "audio/wav"
9365
8755
  });
9366
8756
  }
9367
8757
  );
@@ -9381,22 +8771,22 @@ async function transcribe({
9381
8771
  }
9382
8772
  var DefaultTranscriptionResult = class {
9383
8773
  constructor(options) {
9384
- var _a17;
8774
+ var _a16;
9385
8775
  this.text = options.text;
9386
8776
  this.segments = options.segments;
9387
8777
  this.language = options.language;
9388
8778
  this.durationInSeconds = options.durationInSeconds;
9389
8779
  this.warnings = options.warnings;
9390
8780
  this.responses = options.responses;
9391
- this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
8781
+ this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
9392
8782
  }
9393
8783
  };
9394
8784
 
9395
8785
  // src/ui/call-completion-api.ts
9396
8786
  import {
9397
8787
  parseJsonEventStream,
9398
- withUserAgentSuffix as withUserAgentSuffix10,
9399
- getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent3
8788
+ withUserAgentSuffix as withUserAgentSuffix9,
8789
+ getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent2
9400
8790
  } from "@ai-sdk/provider-utils";
9401
8791
 
9402
8792
  // src/ui/process-text-stream.ts
@@ -9431,7 +8821,7 @@ async function callCompletionApi({
9431
8821
  onError,
9432
8822
  fetch: fetch2 = getOriginalFetch()
9433
8823
  }) {
9434
- var _a17;
8824
+ var _a16;
9435
8825
  try {
9436
8826
  setLoading(true);
9437
8827
  setError(void 0);
@@ -9445,13 +8835,13 @@ async function callCompletionApi({
9445
8835
  ...body
9446
8836
  }),
9447
8837
  credentials,
9448
- headers: withUserAgentSuffix10(
8838
+ headers: withUserAgentSuffix9(
9449
8839
  {
9450
8840
  "Content-Type": "application/json",
9451
8841
  ...headers
9452
8842
  },
9453
8843
  `ai-sdk/${VERSION}`,
9454
- getRuntimeEnvironmentUserAgent3()
8844
+ getRuntimeEnvironmentUserAgent2()
9455
8845
  ),
9456
8846
  signal: abortController.signal
9457
8847
  }).catch((err) => {
@@ -9459,7 +8849,7 @@ async function callCompletionApi({
9459
8849
  });
9460
8850
  if (!response.ok) {
9461
8851
  throw new Error(
9462
- (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
8852
+ (_a16 = await response.text()) != null ? _a16 : "Failed to fetch the chat response."
9463
8853
  );
9464
8854
  }
9465
8855
  if (!response.body) {
@@ -9545,12 +8935,12 @@ async function convertFileListToFileUIParts(files) {
9545
8935
  }
9546
8936
  return Promise.all(
9547
8937
  Array.from(files).map(async (file) => {
9548
- const { name: name17, type } = file;
8938
+ const { name: name16, type } = file;
9549
8939
  const dataUrl = await new Promise((resolve2, reject) => {
9550
8940
  const reader = new FileReader();
9551
8941
  reader.onload = (readerEvent) => {
9552
- var _a17;
9553
- resolve2((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
8942
+ var _a16;
8943
+ resolve2((_a16 = readerEvent.target) == null ? void 0 : _a16.result);
9554
8944
  };
9555
8945
  reader.onerror = (error) => reject(error);
9556
8946
  reader.readAsDataURL(file);
@@ -9558,7 +8948,7 @@ async function convertFileListToFileUIParts(files) {
9558
8948
  return {
9559
8949
  type: "file",
9560
8950
  mediaType: type,
9561
- filename: name17,
8951
+ filename: name16,
9562
8952
  url: dataUrl
9563
8953
  };
9564
8954
  })
@@ -9571,8 +8961,8 @@ import { parseJsonEventStream as parseJsonEventStream2 } from "@ai-sdk/provider-
9571
8961
  // src/ui/http-chat-transport.ts
9572
8962
  import {
9573
8963
  resolve,
9574
- withUserAgentSuffix as withUserAgentSuffix11,
9575
- getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent4
8964
+ withUserAgentSuffix as withUserAgentSuffix10,
8965
+ getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent3
9576
8966
  } from "@ai-sdk/provider-utils";
9577
8967
  var HttpChatTransport = class {
9578
8968
  constructor({
@@ -9596,11 +8986,11 @@ var HttpChatTransport = class {
9596
8986
  abortSignal,
9597
8987
  ...options
9598
8988
  }) {
9599
- var _a17, _b, _c, _d, _e;
8989
+ var _a16, _b, _c, _d, _e;
9600
8990
  const resolvedBody = await resolve(this.body);
9601
8991
  const resolvedHeaders = await resolve(this.headers);
9602
8992
  const resolvedCredentials = await resolve(this.credentials);
9603
- const preparedRequest = await ((_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
8993
+ const preparedRequest = await ((_a16 = this.prepareSendMessagesRequest) == null ? void 0 : _a16.call(this, {
9604
8994
  api: this.api,
9605
8995
  id: options.chatId,
9606
8996
  messages: options.messages,
@@ -9625,13 +9015,13 @@ var HttpChatTransport = class {
9625
9015
  const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
9626
9016
  const response = await fetch2(api, {
9627
9017
  method: "POST",
9628
- headers: withUserAgentSuffix11(
9018
+ headers: withUserAgentSuffix10(
9629
9019
  {
9630
9020
  "Content-Type": "application/json",
9631
9021
  ...headers
9632
9022
  },
9633
9023
  `ai-sdk/${VERSION}`,
9634
- getRuntimeEnvironmentUserAgent4()
9024
+ getRuntimeEnvironmentUserAgent3()
9635
9025
  ),
9636
9026
  body: JSON.stringify(body),
9637
9027
  credentials,
@@ -9648,11 +9038,11 @@ var HttpChatTransport = class {
9648
9038
  return this.processResponseStream(response.body);
9649
9039
  }
9650
9040
  async reconnectToStream(options) {
9651
- var _a17, _b, _c, _d, _e;
9041
+ var _a16, _b, _c, _d, _e;
9652
9042
  const resolvedBody = await resolve(this.body);
9653
9043
  const resolvedHeaders = await resolve(this.headers);
9654
9044
  const resolvedCredentials = await resolve(this.credentials);
9655
- const preparedRequest = await ((_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
9045
+ const preparedRequest = await ((_a16 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a16.call(this, {
9656
9046
  api: this.api,
9657
9047
  id: options.chatId,
9658
9048
  body: { ...resolvedBody, ...options.body },
@@ -9666,10 +9056,10 @@ var HttpChatTransport = class {
9666
9056
  const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
9667
9057
  const response = await fetch2(api, {
9668
9058
  method: "GET",
9669
- headers: withUserAgentSuffix11(
9059
+ headers: withUserAgentSuffix10(
9670
9060
  headers,
9671
9061
  `ai-sdk/${VERSION}`,
9672
- getRuntimeEnvironmentUserAgent4()
9062
+ getRuntimeEnvironmentUserAgent3()
9673
9063
  ),
9674
9064
  credentials
9675
9065
  });
@@ -9734,11 +9124,11 @@ var AbstractChat = class {
9734
9124
  * If a messageId is provided, the message will be replaced.
9735
9125
  */
9736
9126
  this.sendMessage = async (message, options) => {
9737
- var _a17, _b, _c, _d;
9127
+ var _a16, _b, _c, _d;
9738
9128
  if (message == null) {
9739
9129
  await this.makeRequest({
9740
9130
  trigger: "submit-message",
9741
- messageId: (_a17 = this.lastMessage) == null ? void 0 : _a17.id,
9131
+ messageId: (_a16 = this.lastMessage) == null ? void 0 : _a16.id,
9742
9132
  ...options
9743
9133
  });
9744
9134
  return;
@@ -9828,12 +9218,12 @@ var AbstractChat = class {
9828
9218
  };
9829
9219
  this.addToolResult = async ({
9830
9220
  state = "output-available",
9831
- tool: tool3,
9221
+ tool: tool2,
9832
9222
  toolCallId,
9833
9223
  output,
9834
9224
  errorText
9835
9225
  }) => this.jobExecutor.run(async () => {
9836
- var _a17, _b;
9226
+ var _a16, _b;
9837
9227
  const messages = this.state.messages;
9838
9228
  const lastMessage = messages[messages.length - 1];
9839
9229
  this.state.replaceMessage(messages.length - 1, {
@@ -9852,7 +9242,7 @@ var AbstractChat = class {
9852
9242
  } : part
9853
9243
  );
9854
9244
  }
9855
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
9245
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a16 = this.sendAutomaticallyWhen) == null ? void 0 : _a16.call(this, { messages: this.state.messages }))) {
9856
9246
  this.makeRequest({
9857
9247
  trigger: "submit-message",
9858
9248
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -9863,10 +9253,10 @@ var AbstractChat = class {
9863
9253
  * Abort the current request immediately, keep the generated tokens if any.
9864
9254
  */
9865
9255
  this.stop = async () => {
9866
- var _a17;
9256
+ var _a16;
9867
9257
  if (this.status !== "streaming" && this.status !== "submitted")
9868
9258
  return;
9869
- if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
9259
+ if ((_a16 = this.activeResponse) == null ? void 0 : _a16.abortController) {
9870
9260
  this.activeResponse.abortController.abort();
9871
9261
  }
9872
9262
  };
@@ -9921,7 +9311,7 @@ var AbstractChat = class {
9921
9311
  body,
9922
9312
  messageId
9923
9313
  }) {
9924
- var _a17, _b, _c;
9314
+ var _a16, _b, _c;
9925
9315
  this.setStatus({ status: "submitted", error: void 0 });
9926
9316
  const lastMessage = this.lastMessage;
9927
9317
  let isAbort = false;
@@ -9970,9 +9360,9 @@ var AbstractChat = class {
9970
9360
  () => job({
9971
9361
  state: activeResponse.state,
9972
9362
  write: () => {
9973
- var _a18;
9363
+ var _a17;
9974
9364
  this.setStatus({ status: "streaming" });
9975
- const replaceLastMessage = activeResponse.state.message.id === ((_a18 = this.lastMessage) == null ? void 0 : _a18.id);
9365
+ const replaceLastMessage = activeResponse.state.message.id === ((_a17 = this.lastMessage) == null ? void 0 : _a17.id);
9976
9366
  if (replaceLastMessage) {
9977
9367
  this.state.replaceMessage(
9978
9368
  this.state.messages.length - 1,
@@ -10018,7 +9408,7 @@ var AbstractChat = class {
10018
9408
  this.setStatus({ status: "error", error: err });
10019
9409
  } finally {
10020
9410
  try {
10021
- (_a17 = this.onFinish) == null ? void 0 : _a17.call(this, {
9411
+ (_a16 = this.onFinish) == null ? void 0 : _a16.call(this, {
10022
9412
  message: this.activeResponse.state.message,
10023
9413
  messages: this.state.messages,
10024
9414
  isAbort,
@@ -10104,193 +9494,193 @@ import {
10104
9494
  validateTypes as validateTypes2,
10105
9495
  zodSchema as zodSchema2
10106
9496
  } from "@ai-sdk/provider-utils";
10107
- import { z as z10 } from "zod/v4";
9497
+ import { z as z8 } from "zod/v4";
10108
9498
  var uiMessagesSchema = lazyValidator2(
10109
9499
  () => zodSchema2(
10110
- z10.array(
10111
- z10.object({
10112
- id: z10.string(),
10113
- role: z10.enum(["system", "user", "assistant"]),
10114
- metadata: z10.unknown().optional(),
10115
- parts: z10.array(
10116
- z10.union([
10117
- z10.object({
10118
- type: z10.literal("text"),
10119
- text: z10.string(),
10120
- state: z10.enum(["streaming", "done"]).optional(),
9500
+ z8.array(
9501
+ z8.object({
9502
+ id: z8.string(),
9503
+ role: z8.enum(["system", "user", "assistant"]),
9504
+ metadata: z8.unknown().optional(),
9505
+ parts: z8.array(
9506
+ z8.union([
9507
+ z8.object({
9508
+ type: z8.literal("text"),
9509
+ text: z8.string(),
9510
+ state: z8.enum(["streaming", "done"]).optional(),
10121
9511
  providerMetadata: providerMetadataSchema.optional()
10122
9512
  }),
10123
- z10.object({
10124
- type: z10.literal("reasoning"),
10125
- text: z10.string(),
10126
- state: z10.enum(["streaming", "done"]).optional(),
9513
+ z8.object({
9514
+ type: z8.literal("reasoning"),
9515
+ text: z8.string(),
9516
+ state: z8.enum(["streaming", "done"]).optional(),
10127
9517
  providerMetadata: providerMetadataSchema.optional()
10128
9518
  }),
10129
- z10.object({
10130
- type: z10.literal("source-url"),
10131
- sourceId: z10.string(),
10132
- url: z10.string(),
10133
- title: z10.string().optional(),
9519
+ z8.object({
9520
+ type: z8.literal("source-url"),
9521
+ sourceId: z8.string(),
9522
+ url: z8.string(),
9523
+ title: z8.string().optional(),
10134
9524
  providerMetadata: providerMetadataSchema.optional()
10135
9525
  }),
10136
- z10.object({
10137
- type: z10.literal("source-document"),
10138
- sourceId: z10.string(),
10139
- mediaType: z10.string(),
10140
- title: z10.string(),
10141
- filename: z10.string().optional(),
9526
+ z8.object({
9527
+ type: z8.literal("source-document"),
9528
+ sourceId: z8.string(),
9529
+ mediaType: z8.string(),
9530
+ title: z8.string(),
9531
+ filename: z8.string().optional(),
10142
9532
  providerMetadata: providerMetadataSchema.optional()
10143
9533
  }),
10144
- z10.object({
10145
- type: z10.literal("file"),
10146
- mediaType: z10.string(),
10147
- filename: z10.string().optional(),
10148
- url: z10.string(),
9534
+ z8.object({
9535
+ type: z8.literal("file"),
9536
+ mediaType: z8.string(),
9537
+ filename: z8.string().optional(),
9538
+ url: z8.string(),
10149
9539
  providerMetadata: providerMetadataSchema.optional()
10150
9540
  }),
10151
- z10.object({
10152
- type: z10.literal("step-start")
9541
+ z8.object({
9542
+ type: z8.literal("step-start")
10153
9543
  }),
10154
- z10.object({
10155
- type: z10.string().startsWith("data-"),
10156
- id: z10.string().optional(),
10157
- data: z10.unknown()
9544
+ z8.object({
9545
+ type: z8.string().startsWith("data-"),
9546
+ id: z8.string().optional(),
9547
+ data: z8.unknown()
10158
9548
  }),
10159
- z10.object({
10160
- type: z10.literal("dynamic-tool"),
10161
- toolName: z10.string(),
10162
- toolCallId: z10.string(),
10163
- state: z10.literal("input-streaming"),
10164
- input: z10.unknown().optional(),
10165
- output: z10.never().optional(),
10166
- errorText: z10.never().optional()
9549
+ z8.object({
9550
+ type: z8.literal("dynamic-tool"),
9551
+ toolName: z8.string(),
9552
+ toolCallId: z8.string(),
9553
+ state: z8.literal("input-streaming"),
9554
+ input: z8.unknown().optional(),
9555
+ output: z8.never().optional(),
9556
+ errorText: z8.never().optional()
10167
9557
  }),
10168
- z10.object({
10169
- type: z10.literal("dynamic-tool"),
10170
- toolName: z10.string(),
10171
- toolCallId: z10.string(),
10172
- state: z10.literal("input-available"),
10173
- input: z10.unknown(),
10174
- output: z10.never().optional(),
10175
- errorText: z10.never().optional(),
9558
+ z8.object({
9559
+ type: z8.literal("dynamic-tool"),
9560
+ toolName: z8.string(),
9561
+ toolCallId: z8.string(),
9562
+ state: z8.literal("input-available"),
9563
+ input: z8.unknown(),
9564
+ output: z8.never().optional(),
9565
+ errorText: z8.never().optional(),
10176
9566
  callProviderMetadata: providerMetadataSchema.optional()
10177
9567
  }),
10178
- z10.object({
10179
- type: z10.literal("dynamic-tool"),
10180
- toolName: z10.string(),
10181
- toolCallId: z10.string(),
10182
- state: z10.literal("output-available"),
10183
- input: z10.unknown(),
10184
- output: z10.unknown(),
10185
- errorText: z10.never().optional(),
9568
+ z8.object({
9569
+ type: z8.literal("dynamic-tool"),
9570
+ toolName: z8.string(),
9571
+ toolCallId: z8.string(),
9572
+ state: z8.literal("output-available"),
9573
+ input: z8.unknown(),
9574
+ output: z8.unknown(),
9575
+ errorText: z8.never().optional(),
10186
9576
  callProviderMetadata: providerMetadataSchema.optional(),
10187
- preliminary: z10.boolean().optional()
9577
+ preliminary: z8.boolean().optional()
10188
9578
  }),
10189
- z10.object({
10190
- type: z10.literal("dynamic-tool"),
10191
- toolName: z10.string(),
10192
- toolCallId: z10.string(),
10193
- state: z10.literal("output-error"),
10194
- input: z10.unknown(),
10195
- output: z10.never().optional(),
10196
- errorText: z10.string(),
9579
+ z8.object({
9580
+ type: z8.literal("dynamic-tool"),
9581
+ toolName: z8.string(),
9582
+ toolCallId: z8.string(),
9583
+ state: z8.literal("output-error"),
9584
+ input: z8.unknown(),
9585
+ output: z8.never().optional(),
9586
+ errorText: z8.string(),
10197
9587
  callProviderMetadata: providerMetadataSchema.optional()
10198
9588
  }),
10199
- z10.object({
10200
- type: z10.string().startsWith("tool-"),
10201
- toolCallId: z10.string(),
10202
- state: z10.literal("input-streaming"),
10203
- providerExecuted: z10.boolean().optional(),
10204
- input: z10.unknown().optional(),
10205
- output: z10.never().optional(),
10206
- errorText: z10.never().optional(),
10207
- approval: z10.never().optional()
9589
+ z8.object({
9590
+ type: z8.string().startsWith("tool-"),
9591
+ toolCallId: z8.string(),
9592
+ state: z8.literal("input-streaming"),
9593
+ providerExecuted: z8.boolean().optional(),
9594
+ input: z8.unknown().optional(),
9595
+ output: z8.never().optional(),
9596
+ errorText: z8.never().optional(),
9597
+ approval: z8.never().optional()
10208
9598
  }),
10209
- z10.object({
10210
- type: z10.string().startsWith("tool-"),
10211
- toolCallId: z10.string(),
10212
- state: z10.literal("input-available"),
10213
- providerExecuted: z10.boolean().optional(),
10214
- input: z10.unknown(),
10215
- output: z10.never().optional(),
10216
- errorText: z10.never().optional(),
9599
+ z8.object({
9600
+ type: z8.string().startsWith("tool-"),
9601
+ toolCallId: z8.string(),
9602
+ state: z8.literal("input-available"),
9603
+ providerExecuted: z8.boolean().optional(),
9604
+ input: z8.unknown(),
9605
+ output: z8.never().optional(),
9606
+ errorText: z8.never().optional(),
10217
9607
  callProviderMetadata: providerMetadataSchema.optional(),
10218
- approval: z10.never().optional()
9608
+ approval: z8.never().optional()
10219
9609
  }),
10220
- z10.object({
10221
- type: z10.string().startsWith("tool-"),
10222
- toolCallId: z10.string(),
10223
- state: z10.literal("approval-requested"),
10224
- input: z10.unknown(),
10225
- providerExecuted: z10.boolean().optional(),
10226
- output: z10.never().optional(),
10227
- errorText: z10.never().optional(),
9610
+ z8.object({
9611
+ type: z8.string().startsWith("tool-"),
9612
+ toolCallId: z8.string(),
9613
+ state: z8.literal("approval-requested"),
9614
+ input: z8.unknown(),
9615
+ providerExecuted: z8.boolean().optional(),
9616
+ output: z8.never().optional(),
9617
+ errorText: z8.never().optional(),
10228
9618
  callProviderMetadata: providerMetadataSchema.optional(),
10229
- approval: z10.object({
10230
- id: z10.string(),
10231
- approved: z10.never().optional(),
10232
- reason: z10.never().optional()
9619
+ approval: z8.object({
9620
+ id: z8.string(),
9621
+ approved: z8.never().optional(),
9622
+ reason: z8.never().optional()
10233
9623
  })
10234
9624
  }),
10235
- z10.object({
10236
- type: z10.string().startsWith("tool-"),
10237
- toolCallId: z10.string(),
10238
- state: z10.literal("approval-responded"),
10239
- input: z10.unknown(),
10240
- providerExecuted: z10.boolean().optional(),
10241
- output: z10.never().optional(),
10242
- errorText: z10.never().optional(),
9625
+ z8.object({
9626
+ type: z8.string().startsWith("tool-"),
9627
+ toolCallId: z8.string(),
9628
+ state: z8.literal("approval-responded"),
9629
+ input: z8.unknown(),
9630
+ providerExecuted: z8.boolean().optional(),
9631
+ output: z8.never().optional(),
9632
+ errorText: z8.never().optional(),
10243
9633
  callProviderMetadata: providerMetadataSchema.optional(),
10244
- approval: z10.object({
10245
- id: z10.string(),
10246
- approved: z10.boolean(),
10247
- reason: z10.string().optional()
9634
+ approval: z8.object({
9635
+ id: z8.string(),
9636
+ approved: z8.boolean(),
9637
+ reason: z8.string().optional()
10248
9638
  })
10249
9639
  }),
10250
- z10.object({
10251
- type: z10.string().startsWith("tool-"),
10252
- toolCallId: z10.string(),
10253
- state: z10.literal("output-available"),
10254
- providerExecuted: z10.boolean().optional(),
10255
- input: z10.unknown(),
10256
- output: z10.unknown(),
10257
- errorText: z10.never().optional(),
9640
+ z8.object({
9641
+ type: z8.string().startsWith("tool-"),
9642
+ toolCallId: z8.string(),
9643
+ state: z8.literal("output-available"),
9644
+ providerExecuted: z8.boolean().optional(),
9645
+ input: z8.unknown(),
9646
+ output: z8.unknown(),
9647
+ errorText: z8.never().optional(),
10258
9648
  callProviderMetadata: providerMetadataSchema.optional(),
10259
- preliminary: z10.boolean().optional(),
10260
- approval: z10.object({
10261
- id: z10.string(),
10262
- approved: z10.literal(true),
10263
- reason: z10.string().optional()
9649
+ preliminary: z8.boolean().optional(),
9650
+ approval: z8.object({
9651
+ id: z8.string(),
9652
+ approved: z8.literal(true),
9653
+ reason: z8.string().optional()
10264
9654
  }).optional()
10265
9655
  }),
10266
- z10.object({
10267
- type: z10.string().startsWith("tool-"),
10268
- toolCallId: z10.string(),
10269
- state: z10.literal("output-error"),
10270
- providerExecuted: z10.boolean().optional(),
10271
- input: z10.unknown(),
10272
- output: z10.never().optional(),
10273
- errorText: z10.string(),
9656
+ z8.object({
9657
+ type: z8.string().startsWith("tool-"),
9658
+ toolCallId: z8.string(),
9659
+ state: z8.literal("output-error"),
9660
+ providerExecuted: z8.boolean().optional(),
9661
+ input: z8.unknown(),
9662
+ output: z8.never().optional(),
9663
+ errorText: z8.string(),
10274
9664
  callProviderMetadata: providerMetadataSchema.optional(),
10275
- approval: z10.object({
10276
- id: z10.string(),
10277
- approved: z10.literal(true),
10278
- reason: z10.string().optional()
9665
+ approval: z8.object({
9666
+ id: z8.string(),
9667
+ approved: z8.literal(true),
9668
+ reason: z8.string().optional()
10279
9669
  }).optional()
10280
9670
  }),
10281
- z10.object({
10282
- type: z10.string().startsWith("tool-"),
10283
- toolCallId: z10.string(),
10284
- state: z10.literal("output-denied"),
10285
- providerExecuted: z10.boolean().optional(),
10286
- input: z10.unknown(),
10287
- output: z10.never().optional(),
10288
- errorText: z10.never().optional(),
9671
+ z8.object({
9672
+ type: z8.string().startsWith("tool-"),
9673
+ toolCallId: z8.string(),
9674
+ state: z8.literal("output-denied"),
9675
+ providerExecuted: z8.boolean().optional(),
9676
+ input: z8.unknown(),
9677
+ output: z8.never().optional(),
9678
+ errorText: z8.never().optional(),
10289
9679
  callProviderMetadata: providerMetadataSchema.optional(),
10290
- approval: z10.object({
10291
- id: z10.string(),
10292
- approved: z10.literal(false),
10293
- reason: z10.string().optional()
9680
+ approval: z8.object({
9681
+ id: z8.string(),
9682
+ approved: z8.literal(false),
9683
+ reason: z8.string().optional()
10294
9684
  })
10295
9685
  })
10296
9686
  ])
@@ -10359,8 +9749,8 @@ async function safeValidateUIMessages({
10359
9749
  );
10360
9750
  for (const toolPart of toolParts) {
10361
9751
  const toolName = toolPart.type.slice(5);
10362
- const tool3 = tools[toolName];
10363
- if (!tool3) {
9752
+ const tool2 = tools[toolName];
9753
+ if (!tool2) {
10364
9754
  return {
10365
9755
  success: false,
10366
9756
  error: new TypeValidationError4({
@@ -10372,13 +9762,13 @@ async function safeValidateUIMessages({
10372
9762
  if (toolPart.state === "input-available" || toolPart.state === "output-available" || toolPart.state === "output-error") {
10373
9763
  await validateTypes2({
10374
9764
  value: toolPart.input,
10375
- schema: tool3.inputSchema
9765
+ schema: tool2.inputSchema
10376
9766
  });
10377
9767
  }
10378
- if (toolPart.state === "output-available" && tool3.outputSchema) {
9768
+ if (toolPart.state === "output-available" && tool2.outputSchema) {
10379
9769
  await validateTypes2({
10380
9770
  value: toolPart.output,
10381
- schema: tool3.outputSchema
9771
+ schema: tool2.outputSchema
10382
9772
  });
10383
9773
  }
10384
9774
  }
@@ -10509,7 +9899,7 @@ function readUIMessageStream({
10509
9899
  onError,
10510
9900
  terminateOnError = false
10511
9901
  }) {
10512
- var _a17;
9902
+ var _a16;
10513
9903
  let controller;
10514
9904
  let hasErrored = false;
10515
9905
  const outputStream = new ReadableStream({
@@ -10518,7 +9908,7 @@ function readUIMessageStream({
10518
9908
  }
10519
9909
  });
10520
9910
  const state = createStreamingUIMessageState({
10521
- messageId: (_a17 = message == null ? void 0 : message.id) != null ? _a17 : "",
9911
+ messageId: (_a16 = message == null ? void 0 : message.id) != null ? _a16 : "",
10522
9912
  lastMessage: message
10523
9913
  });
10524
9914
  const handleError = (error) => {
@@ -10550,7 +9940,7 @@ function readUIMessageStream({
10550
9940
  return createAsyncIterableStream(outputStream);
10551
9941
  }
10552
9942
  export {
10553
- AISDKError18 as AISDKError,
9943
+ AISDKError17 as AISDKError,
10554
9944
  APICallError,
10555
9945
  AbstractChat,
10556
9946
  DefaultChatTransport,
@@ -10569,7 +9959,6 @@ export {
10569
9959
  JsonToSseTransformStream,
10570
9960
  LoadAPIKeyError,
10571
9961
  LoadSettingError,
10572
- MCPClientError,
10573
9962
  MessageConversionError,
10574
9963
  NoContentGeneratedError,
10575
9964
  NoImageGeneratedError,
@@ -10611,10 +10000,9 @@ export {
10611
10000
  createUIMessageStreamResponse,
10612
10001
  customProvider,
10613
10002
  defaultSettingsMiddleware,
10614
- dynamicTool2 as dynamicTool,
10003
+ dynamicTool,
10615
10004
  embed,
10616
10005
  embedMany,
10617
- createMCPClient as experimental_createMCPClient,
10618
10006
  experimental_createProviderRegistry,
10619
10007
  experimental_customProvider,
10620
10008
  generateImage as experimental_generateImage,
@@ -10636,7 +10024,7 @@ export {
10636
10024
  isTextUIPart,
10637
10025
  isToolOrDynamicToolUIPart,
10638
10026
  isToolUIPart,
10639
- jsonSchema2 as jsonSchema,
10027
+ jsonSchema,
10640
10028
  lastAssistantMessageIsCompleteWithToolCalls,
10641
10029
  modelMessageSchema,
10642
10030
  parseJsonEventStream3 as parseJsonEventStream,
@@ -10653,7 +10041,7 @@ export {
10653
10041
  streamObject,
10654
10042
  streamText,
10655
10043
  systemModelMessageSchema,
10656
- tool2 as tool,
10044
+ tool,
10657
10045
  toolModelMessageSchema,
10658
10046
  uiMessageChunkSchema,
10659
10047
  userModelMessageSchema,