ai 6.0.0-beta.71 → 6.0.0-beta.73

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -4,8 +4,8 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
5
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
6
  var __export = (target, all) => {
7
- for (var name17 in all)
8
- __defProp(target, name17, { get: all[name17], enumerable: true });
7
+ for (var name16 in all)
8
+ __defProp(target, name16, { get: all[name16], enumerable: true });
9
9
  };
10
10
  var __copyProps = (to, from, except, desc) => {
11
11
  if (from && typeof from === "object" || typeof from === "function") {
@@ -20,48 +20,47 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // src/index.ts
21
21
  var src_exports = {};
22
22
  __export(src_exports, {
23
- AISDKError: () => import_provider18.AISDKError,
24
- APICallError: () => import_provider18.APICallError,
23
+ AISDKError: () => import_provider17.AISDKError,
24
+ APICallError: () => import_provider17.APICallError,
25
25
  AbstractChat: () => AbstractChat,
26
26
  DefaultChatTransport: () => DefaultChatTransport,
27
27
  DownloadError: () => DownloadError,
28
- EmptyResponseBodyError: () => import_provider18.EmptyResponseBodyError,
28
+ EmptyResponseBodyError: () => import_provider17.EmptyResponseBodyError,
29
29
  Experimental_Agent: () => ToolLoopAgent,
30
30
  HttpChatTransport: () => HttpChatTransport,
31
31
  InvalidArgumentError: () => InvalidArgumentError,
32
32
  InvalidDataContentError: () => InvalidDataContentError,
33
33
  InvalidMessageRoleError: () => InvalidMessageRoleError,
34
- InvalidPromptError: () => import_provider18.InvalidPromptError,
35
- InvalidResponseDataError: () => import_provider18.InvalidResponseDataError,
34
+ InvalidPromptError: () => import_provider17.InvalidPromptError,
35
+ InvalidResponseDataError: () => import_provider17.InvalidResponseDataError,
36
36
  InvalidStreamPartError: () => InvalidStreamPartError,
37
37
  InvalidToolInputError: () => InvalidToolInputError,
38
- JSONParseError: () => import_provider18.JSONParseError,
38
+ JSONParseError: () => import_provider17.JSONParseError,
39
39
  JsonToSseTransformStream: () => JsonToSseTransformStream,
40
- LoadAPIKeyError: () => import_provider18.LoadAPIKeyError,
41
- LoadSettingError: () => import_provider18.LoadSettingError,
42
- MCPClientError: () => MCPClientError,
40
+ LoadAPIKeyError: () => import_provider17.LoadAPIKeyError,
41
+ LoadSettingError: () => import_provider17.LoadSettingError,
43
42
  MessageConversionError: () => MessageConversionError,
44
- NoContentGeneratedError: () => import_provider18.NoContentGeneratedError,
43
+ NoContentGeneratedError: () => import_provider17.NoContentGeneratedError,
45
44
  NoImageGeneratedError: () => NoImageGeneratedError,
46
45
  NoObjectGeneratedError: () => NoObjectGeneratedError,
47
46
  NoOutputGeneratedError: () => NoOutputGeneratedError,
48
47
  NoOutputSpecifiedError: () => NoOutputSpecifiedError,
49
48
  NoSpeechGeneratedError: () => NoSpeechGeneratedError,
50
- NoSuchModelError: () => import_provider18.NoSuchModelError,
49
+ NoSuchModelError: () => import_provider17.NoSuchModelError,
51
50
  NoSuchProviderError: () => NoSuchProviderError,
52
51
  NoSuchToolError: () => NoSuchToolError,
53
52
  Output: () => output_exports,
54
53
  RetryError: () => RetryError,
55
54
  SerialJobExecutor: () => SerialJobExecutor,
56
55
  TextStreamChatTransport: () => TextStreamChatTransport,
57
- TooManyEmbeddingValuesForCallError: () => import_provider18.TooManyEmbeddingValuesForCallError,
56
+ TooManyEmbeddingValuesForCallError: () => import_provider17.TooManyEmbeddingValuesForCallError,
58
57
  ToolCallRepairError: () => ToolCallRepairError,
59
58
  ToolLoopAgent: () => ToolLoopAgent,
60
- TypeValidationError: () => import_provider18.TypeValidationError,
59
+ TypeValidationError: () => import_provider17.TypeValidationError,
61
60
  UI_MESSAGE_STREAM_HEADERS: () => UI_MESSAGE_STREAM_HEADERS,
62
- UnsupportedFunctionalityError: () => import_provider18.UnsupportedFunctionalityError,
61
+ UnsupportedFunctionalityError: () => import_provider17.UnsupportedFunctionalityError,
63
62
  UnsupportedModelVersionError: () => UnsupportedModelVersionError,
64
- asSchema: () => import_provider_utils38.asSchema,
63
+ asSchema: () => import_provider_utils36.asSchema,
65
64
  assistantModelMessageSchema: () => assistantModelMessageSchema,
66
65
  callCompletionApi: () => callCompletionApi,
67
66
  consumeStream: () => consumeStream,
@@ -77,17 +76,16 @@ __export(src_exports, {
77
76
  createAgentUIStream: () => createAgentUIStream,
78
77
  createAgentUIStreamResponse: () => createAgentUIStreamResponse,
79
78
  createGateway: () => import_gateway3.createGateway,
80
- createIdGenerator: () => import_provider_utils38.createIdGenerator,
79
+ createIdGenerator: () => import_provider_utils36.createIdGenerator,
81
80
  createProviderRegistry: () => createProviderRegistry,
82
81
  createTextStreamResponse: () => createTextStreamResponse,
83
82
  createUIMessageStream: () => createUIMessageStream,
84
83
  createUIMessageStreamResponse: () => createUIMessageStreamResponse,
85
84
  customProvider: () => customProvider,
86
85
  defaultSettingsMiddleware: () => defaultSettingsMiddleware,
87
- dynamicTool: () => import_provider_utils38.dynamicTool,
86
+ dynamicTool: () => import_provider_utils36.dynamicTool,
88
87
  embed: () => embed,
89
88
  embedMany: () => embedMany,
90
- experimental_createMCPClient: () => createMCPClient,
91
89
  experimental_createProviderRegistry: () => experimental_createProviderRegistry,
92
90
  experimental_customProvider: () => experimental_customProvider,
93
91
  experimental_generateImage: () => generateImage,
@@ -95,7 +93,7 @@ __export(src_exports, {
95
93
  experimental_transcribe: () => transcribe,
96
94
  extractReasoningMiddleware: () => extractReasoningMiddleware,
97
95
  gateway: () => import_gateway3.gateway,
98
- generateId: () => import_provider_utils38.generateId,
96
+ generateId: () => import_provider_utils36.generateId,
99
97
  generateObject: () => generateObject,
100
98
  generateText: () => generateText,
101
99
  getTextFromDataUrl: () => getTextFromDataUrl,
@@ -109,11 +107,11 @@ __export(src_exports, {
109
107
  isTextUIPart: () => isTextUIPart,
110
108
  isToolOrDynamicToolUIPart: () => isToolOrDynamicToolUIPart,
111
109
  isToolUIPart: () => isToolUIPart,
112
- jsonSchema: () => import_provider_utils38.jsonSchema,
110
+ jsonSchema: () => import_provider_utils36.jsonSchema,
113
111
  lastAssistantMessageIsCompleteWithApprovalResponses: () => lastAssistantMessageIsCompleteWithApprovalResponses,
114
112
  lastAssistantMessageIsCompleteWithToolCalls: () => lastAssistantMessageIsCompleteWithToolCalls,
115
113
  modelMessageSchema: () => modelMessageSchema,
116
- parseJsonEventStream: () => import_provider_utils38.parseJsonEventStream,
114
+ parseJsonEventStream: () => import_provider_utils36.parseJsonEventStream,
117
115
  parsePartialJson: () => parsePartialJson,
118
116
  pipeAgentUIStreamToResponse: () => pipeAgentUIStreamToResponse,
119
117
  pipeTextStreamToResponse: () => pipeTextStreamToResponse,
@@ -128,18 +126,18 @@ __export(src_exports, {
128
126
  streamObject: () => streamObject,
129
127
  streamText: () => streamText,
130
128
  systemModelMessageSchema: () => systemModelMessageSchema,
131
- tool: () => import_provider_utils38.tool,
129
+ tool: () => import_provider_utils36.tool,
132
130
  toolModelMessageSchema: () => toolModelMessageSchema,
133
131
  uiMessageChunkSchema: () => uiMessageChunkSchema,
134
132
  userModelMessageSchema: () => userModelMessageSchema,
135
133
  validateUIMessages: () => validateUIMessages,
136
134
  wrapLanguageModel: () => wrapLanguageModel,
137
135
  wrapProvider: () => wrapProvider,
138
- zodSchema: () => import_provider_utils38.zodSchema
136
+ zodSchema: () => import_provider_utils36.zodSchema
139
137
  });
140
138
  module.exports = __toCommonJS(src_exports);
141
139
  var import_gateway3 = require("@ai-sdk/gateway");
142
- var import_provider_utils38 = require("@ai-sdk/provider-utils");
140
+ var import_provider_utils36 = require("@ai-sdk/provider-utils");
143
141
 
144
142
  // src/generate-text/generate-text.ts
145
143
  var import_provider_utils11 = require("@ai-sdk/provider-utils");
@@ -216,7 +214,7 @@ var logWarnings = (warnings) => {
216
214
  var import_gateway = require("@ai-sdk/gateway");
217
215
 
218
216
  // src/error/index.ts
219
- var import_provider18 = require("@ai-sdk/provider");
217
+ var import_provider17 = require("@ai-sdk/provider");
220
218
 
221
219
  // src/error/invalid-argument-error.ts
222
220
  var import_provider2 = require("@ai-sdk/provider");
@@ -289,24 +287,21 @@ var InvalidToolInputError = class extends import_provider4.AISDKError {
289
287
  };
290
288
  _a4 = symbol4;
291
289
 
292
- // src/error/mcp-client-error.ts
290
+ // src/error/no-image-generated-error.ts
293
291
  var import_provider5 = require("@ai-sdk/provider");
294
- var name5 = "AI_MCPClientError";
292
+ var name5 = "AI_NoImageGeneratedError";
295
293
  var marker5 = `vercel.ai.error.${name5}`;
296
294
  var symbol5 = Symbol.for(marker5);
297
295
  var _a5;
298
- var MCPClientError = class extends import_provider5.AISDKError {
296
+ var NoImageGeneratedError = class extends import_provider5.AISDKError {
299
297
  constructor({
300
- name: name17 = "MCPClientError",
301
- message,
298
+ message = "No image generated.",
302
299
  cause,
303
- data,
304
- code
300
+ responses
305
301
  }) {
306
- super({ name: name17, message, cause });
302
+ super({ name: name5, message, cause });
307
303
  this[_a5] = true;
308
- this.data = data;
309
- this.code = code;
304
+ this.responses = responses;
310
305
  }
311
306
  static isInstance(error) {
312
307
  return import_provider5.AISDKError.hasMarker(error, marker5);
@@ -314,35 +309,13 @@ var MCPClientError = class extends import_provider5.AISDKError {
314
309
  };
315
310
  _a5 = symbol5;
316
311
 
317
- // src/error/no-image-generated-error.ts
312
+ // src/error/no-object-generated-error.ts
318
313
  var import_provider6 = require("@ai-sdk/provider");
319
- var name6 = "AI_NoImageGeneratedError";
314
+ var name6 = "AI_NoObjectGeneratedError";
320
315
  var marker6 = `vercel.ai.error.${name6}`;
321
316
  var symbol6 = Symbol.for(marker6);
322
317
  var _a6;
323
- var NoImageGeneratedError = class extends import_provider6.AISDKError {
324
- constructor({
325
- message = "No image generated.",
326
- cause,
327
- responses
328
- }) {
329
- super({ name: name6, message, cause });
330
- this[_a6] = true;
331
- this.responses = responses;
332
- }
333
- static isInstance(error) {
334
- return import_provider6.AISDKError.hasMarker(error, marker6);
335
- }
336
- };
337
- _a6 = symbol6;
338
-
339
- // src/error/no-object-generated-error.ts
340
- var import_provider7 = require("@ai-sdk/provider");
341
- var name7 = "AI_NoObjectGeneratedError";
342
- var marker7 = `vercel.ai.error.${name7}`;
343
- var symbol7 = Symbol.for(marker7);
344
- var _a7;
345
- var NoObjectGeneratedError = class extends import_provider7.AISDKError {
318
+ var NoObjectGeneratedError = class extends import_provider6.AISDKError {
346
319
  constructor({
347
320
  message = "No object generated.",
348
321
  cause,
@@ -351,43 +324,43 @@ var NoObjectGeneratedError = class extends import_provider7.AISDKError {
351
324
  usage,
352
325
  finishReason
353
326
  }) {
354
- super({ name: name7, message, cause });
355
- this[_a7] = true;
327
+ super({ name: name6, message, cause });
328
+ this[_a6] = true;
356
329
  this.text = text2;
357
330
  this.response = response;
358
331
  this.usage = usage;
359
332
  this.finishReason = finishReason;
360
333
  }
361
334
  static isInstance(error) {
362
- return import_provider7.AISDKError.hasMarker(error, marker7);
335
+ return import_provider6.AISDKError.hasMarker(error, marker6);
363
336
  }
364
337
  };
365
- _a7 = symbol7;
338
+ _a6 = symbol6;
366
339
 
367
340
  // src/error/no-output-generated-error.ts
368
- var import_provider8 = require("@ai-sdk/provider");
369
- var name8 = "AI_NoOutputGeneratedError";
370
- var marker8 = `vercel.ai.error.${name8}`;
371
- var symbol8 = Symbol.for(marker8);
372
- var _a8;
373
- var NoOutputGeneratedError = class extends import_provider8.AISDKError {
341
+ var import_provider7 = require("@ai-sdk/provider");
342
+ var name7 = "AI_NoOutputGeneratedError";
343
+ var marker7 = `vercel.ai.error.${name7}`;
344
+ var symbol7 = Symbol.for(marker7);
345
+ var _a7;
346
+ var NoOutputGeneratedError = class extends import_provider7.AISDKError {
374
347
  // used in isInstance
375
348
  constructor({
376
349
  message = "No output generated.",
377
350
  cause
378
351
  } = {}) {
379
- super({ name: name8, message, cause });
380
- this[_a8] = true;
352
+ super({ name: name7, message, cause });
353
+ this[_a7] = true;
381
354
  }
382
355
  static isInstance(error) {
383
- return import_provider8.AISDKError.hasMarker(error, marker8);
356
+ return import_provider7.AISDKError.hasMarker(error, marker7);
384
357
  }
385
358
  };
386
- _a8 = symbol8;
359
+ _a7 = symbol7;
387
360
 
388
361
  // src/error/no-speech-generated-error.ts
389
- var import_provider9 = require("@ai-sdk/provider");
390
- var NoSpeechGeneratedError = class extends import_provider9.AISDKError {
362
+ var import_provider8 = require("@ai-sdk/provider");
363
+ var NoSpeechGeneratedError = class extends import_provider8.AISDKError {
391
364
  constructor(options) {
392
365
  super({
393
366
  name: "AI_NoSpeechGeneratedError",
@@ -398,53 +371,53 @@ var NoSpeechGeneratedError = class extends import_provider9.AISDKError {
398
371
  };
399
372
 
400
373
  // src/error/no-such-tool-error.ts
401
- var import_provider10 = require("@ai-sdk/provider");
402
- var name9 = "AI_NoSuchToolError";
403
- var marker9 = `vercel.ai.error.${name9}`;
404
- var symbol9 = Symbol.for(marker9);
405
- var _a9;
406
- var NoSuchToolError = class extends import_provider10.AISDKError {
374
+ var import_provider9 = require("@ai-sdk/provider");
375
+ var name8 = "AI_NoSuchToolError";
376
+ var marker8 = `vercel.ai.error.${name8}`;
377
+ var symbol8 = Symbol.for(marker8);
378
+ var _a8;
379
+ var NoSuchToolError = class extends import_provider9.AISDKError {
407
380
  constructor({
408
381
  toolName,
409
382
  availableTools = void 0,
410
383
  message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
411
384
  }) {
412
- super({ name: name9, message });
413
- this[_a9] = true;
385
+ super({ name: name8, message });
386
+ this[_a8] = true;
414
387
  this.toolName = toolName;
415
388
  this.availableTools = availableTools;
416
389
  }
417
390
  static isInstance(error) {
418
- return import_provider10.AISDKError.hasMarker(error, marker9);
391
+ return import_provider9.AISDKError.hasMarker(error, marker8);
419
392
  }
420
393
  };
421
- _a9 = symbol9;
394
+ _a8 = symbol8;
422
395
 
423
396
  // src/error/tool-call-repair-error.ts
424
- var import_provider11 = require("@ai-sdk/provider");
425
- var name10 = "AI_ToolCallRepairError";
426
- var marker10 = `vercel.ai.error.${name10}`;
427
- var symbol10 = Symbol.for(marker10);
428
- var _a10;
429
- var ToolCallRepairError = class extends import_provider11.AISDKError {
397
+ var import_provider10 = require("@ai-sdk/provider");
398
+ var name9 = "AI_ToolCallRepairError";
399
+ var marker9 = `vercel.ai.error.${name9}`;
400
+ var symbol9 = Symbol.for(marker9);
401
+ var _a9;
402
+ var ToolCallRepairError = class extends import_provider10.AISDKError {
430
403
  constructor({
431
404
  cause,
432
405
  originalError,
433
- message = `Error repairing tool call: ${(0, import_provider11.getErrorMessage)(cause)}`
406
+ message = `Error repairing tool call: ${(0, import_provider10.getErrorMessage)(cause)}`
434
407
  }) {
435
- super({ name: name10, message, cause });
436
- this[_a10] = true;
408
+ super({ name: name9, message, cause });
409
+ this[_a9] = true;
437
410
  this.originalError = originalError;
438
411
  }
439
412
  static isInstance(error) {
440
- return import_provider11.AISDKError.hasMarker(error, marker10);
413
+ return import_provider10.AISDKError.hasMarker(error, marker9);
441
414
  }
442
415
  };
443
- _a10 = symbol10;
416
+ _a9 = symbol9;
444
417
 
445
418
  // src/error/unsupported-model-version-error.ts
446
- var import_provider12 = require("@ai-sdk/provider");
447
- var UnsupportedModelVersionError = class extends import_provider12.AISDKError {
419
+ var import_provider11 = require("@ai-sdk/provider");
420
+ var UnsupportedModelVersionError = class extends import_provider11.AISDKError {
448
421
  constructor(options) {
449
422
  super({
450
423
  name: "AI_UnsupportedModelVersionError",
@@ -457,76 +430,76 @@ var UnsupportedModelVersionError = class extends import_provider12.AISDKError {
457
430
  };
458
431
 
459
432
  // src/prompt/invalid-data-content-error.ts
460
- var import_provider13 = require("@ai-sdk/provider");
461
- var name11 = "AI_InvalidDataContentError";
462
- var marker11 = `vercel.ai.error.${name11}`;
463
- var symbol11 = Symbol.for(marker11);
464
- var _a11;
465
- var InvalidDataContentError = class extends import_provider13.AISDKError {
433
+ var import_provider12 = require("@ai-sdk/provider");
434
+ var name10 = "AI_InvalidDataContentError";
435
+ var marker10 = `vercel.ai.error.${name10}`;
436
+ var symbol10 = Symbol.for(marker10);
437
+ var _a10;
438
+ var InvalidDataContentError = class extends import_provider12.AISDKError {
466
439
  constructor({
467
440
  content,
468
441
  cause,
469
442
  message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
470
443
  }) {
471
- super({ name: name11, message, cause });
472
- this[_a11] = true;
444
+ super({ name: name10, message, cause });
445
+ this[_a10] = true;
473
446
  this.content = content;
474
447
  }
475
448
  static isInstance(error) {
476
- return import_provider13.AISDKError.hasMarker(error, marker11);
449
+ return import_provider12.AISDKError.hasMarker(error, marker10);
477
450
  }
478
451
  };
479
- _a11 = symbol11;
452
+ _a10 = symbol10;
480
453
 
481
454
  // src/prompt/invalid-message-role-error.ts
482
- var import_provider14 = require("@ai-sdk/provider");
483
- var name12 = "AI_InvalidMessageRoleError";
484
- var marker12 = `vercel.ai.error.${name12}`;
485
- var symbol12 = Symbol.for(marker12);
486
- var _a12;
487
- var InvalidMessageRoleError = class extends import_provider14.AISDKError {
455
+ var import_provider13 = require("@ai-sdk/provider");
456
+ var name11 = "AI_InvalidMessageRoleError";
457
+ var marker11 = `vercel.ai.error.${name11}`;
458
+ var symbol11 = Symbol.for(marker11);
459
+ var _a11;
460
+ var InvalidMessageRoleError = class extends import_provider13.AISDKError {
488
461
  constructor({
489
462
  role,
490
463
  message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
491
464
  }) {
492
- super({ name: name12, message });
493
- this[_a12] = true;
465
+ super({ name: name11, message });
466
+ this[_a11] = true;
494
467
  this.role = role;
495
468
  }
496
469
  static isInstance(error) {
497
- return import_provider14.AISDKError.hasMarker(error, marker12);
470
+ return import_provider13.AISDKError.hasMarker(error, marker11);
498
471
  }
499
472
  };
500
- _a12 = symbol12;
473
+ _a11 = symbol11;
501
474
 
502
475
  // src/prompt/message-conversion-error.ts
503
- var import_provider15 = require("@ai-sdk/provider");
504
- var name13 = "AI_MessageConversionError";
505
- var marker13 = `vercel.ai.error.${name13}`;
506
- var symbol13 = Symbol.for(marker13);
507
- var _a13;
508
- var MessageConversionError = class extends import_provider15.AISDKError {
476
+ var import_provider14 = require("@ai-sdk/provider");
477
+ var name12 = "AI_MessageConversionError";
478
+ var marker12 = `vercel.ai.error.${name12}`;
479
+ var symbol12 = Symbol.for(marker12);
480
+ var _a12;
481
+ var MessageConversionError = class extends import_provider14.AISDKError {
509
482
  constructor({
510
483
  originalMessage,
511
484
  message
512
485
  }) {
513
- super({ name: name13, message });
514
- this[_a13] = true;
486
+ super({ name: name12, message });
487
+ this[_a12] = true;
515
488
  this.originalMessage = originalMessage;
516
489
  }
517
490
  static isInstance(error) {
518
- return import_provider15.AISDKError.hasMarker(error, marker13);
491
+ return import_provider14.AISDKError.hasMarker(error, marker12);
519
492
  }
520
493
  };
521
- _a13 = symbol13;
494
+ _a12 = symbol12;
522
495
 
523
496
  // src/util/download/download-error.ts
524
- var import_provider16 = require("@ai-sdk/provider");
525
- var name14 = "AI_DownloadError";
526
- var marker14 = `vercel.ai.error.${name14}`;
527
- var symbol14 = Symbol.for(marker14);
528
- var _a14;
529
- var DownloadError = class extends import_provider16.AISDKError {
497
+ var import_provider15 = require("@ai-sdk/provider");
498
+ var name13 = "AI_DownloadError";
499
+ var marker13 = `vercel.ai.error.${name13}`;
500
+ var symbol13 = Symbol.for(marker13);
501
+ var _a13;
502
+ var DownloadError = class extends import_provider15.AISDKError {
530
503
  constructor({
531
504
  url,
532
505
  statusCode,
@@ -534,41 +507,41 @@ var DownloadError = class extends import_provider16.AISDKError {
534
507
  cause,
535
508
  message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
536
509
  }) {
537
- super({ name: name14, message, cause });
538
- this[_a14] = true;
510
+ super({ name: name13, message, cause });
511
+ this[_a13] = true;
539
512
  this.url = url;
540
513
  this.statusCode = statusCode;
541
514
  this.statusText = statusText;
542
515
  }
543
516
  static isInstance(error) {
544
- return import_provider16.AISDKError.hasMarker(error, marker14);
517
+ return import_provider15.AISDKError.hasMarker(error, marker13);
545
518
  }
546
519
  };
547
- _a14 = symbol14;
520
+ _a13 = symbol13;
548
521
 
549
522
  // src/util/retry-error.ts
550
- var import_provider17 = require("@ai-sdk/provider");
551
- var name15 = "AI_RetryError";
552
- var marker15 = `vercel.ai.error.${name15}`;
553
- var symbol15 = Symbol.for(marker15);
554
- var _a15;
555
- var RetryError = class extends import_provider17.AISDKError {
523
+ var import_provider16 = require("@ai-sdk/provider");
524
+ var name14 = "AI_RetryError";
525
+ var marker14 = `vercel.ai.error.${name14}`;
526
+ var symbol14 = Symbol.for(marker14);
527
+ var _a14;
528
+ var RetryError = class extends import_provider16.AISDKError {
556
529
  constructor({
557
530
  message,
558
531
  reason,
559
532
  errors
560
533
  }) {
561
- super({ name: name15, message });
562
- this[_a15] = true;
534
+ super({ name: name14, message });
535
+ this[_a14] = true;
563
536
  this.reason = reason;
564
537
  this.errors = errors;
565
538
  this.lastError = errors[errors.length - 1];
566
539
  }
567
540
  static isInstance(error) {
568
- return import_provider17.AISDKError.hasMarker(error, marker15);
541
+ return import_provider16.AISDKError.hasMarker(error, marker14);
569
542
  }
570
543
  };
571
- _a15 = symbol15;
544
+ _a14 = symbol14;
572
545
 
573
546
  // src/model/resolve-model.ts
574
547
  function transformToV3LanguageModel(model) {
@@ -644,7 +617,7 @@ function resolveEmbeddingModel(model) {
644
617
  );
645
618
  }
646
619
  function resolveTranscriptionModel(model) {
647
- var _a17, _b;
620
+ var _a16, _b;
648
621
  if (typeof model !== "string") {
649
622
  if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
650
623
  const unsupportedModel = model;
@@ -659,10 +632,10 @@ function resolveTranscriptionModel(model) {
659
632
  }
660
633
  return model;
661
634
  }
662
- return (_b = (_a17 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a17, model);
635
+ return (_b = (_a16 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a16, model);
663
636
  }
664
637
  function resolveSpeechModel(model) {
665
- var _a17, _b;
638
+ var _a16, _b;
666
639
  if (typeof model !== "string") {
667
640
  if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
668
641
  const unsupportedModel = model;
@@ -677,11 +650,11 @@ function resolveSpeechModel(model) {
677
650
  }
678
651
  return model;
679
652
  }
680
- return (_b = (_a17 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a17, model);
653
+ return (_b = (_a16 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a16, model);
681
654
  }
682
655
  function getGlobalProvider() {
683
- var _a17;
684
- return (_a17 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a17 : import_gateway.gateway;
656
+ var _a16;
657
+ return (_a16 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a16 : import_gateway.gateway;
685
658
  }
686
659
 
687
660
  // src/prompt/convert-to-language-model-prompt.ts
@@ -876,11 +849,11 @@ function detectMediaType({
876
849
  var import_provider_utils2 = require("@ai-sdk/provider-utils");
877
850
 
878
851
  // src/version.ts
879
- var VERSION = true ? "6.0.0-beta.71" : "0.0.0-test";
852
+ var VERSION = true ? "6.0.0-beta.73" : "0.0.0-test";
880
853
 
881
854
  // src/util/download/download.ts
882
855
  var download = async ({ url }) => {
883
- var _a17;
856
+ var _a16;
884
857
  const urlText = url.toString();
885
858
  try {
886
859
  const response = await fetch(urlText, {
@@ -899,7 +872,7 @@ var download = async ({ url }) => {
899
872
  }
900
873
  return {
901
874
  data: new Uint8Array(await response.arrayBuffer()),
902
- mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
875
+ mediaType: (_a16 = response.headers.get("content-type")) != null ? _a16 : void 0
903
876
  };
904
877
  } catch (error) {
905
878
  if (DownloadError.isInstance(error)) {
@@ -917,7 +890,7 @@ var createDefaultDownloadFunction = (download2 = download) => (requestedDownload
917
890
  );
918
891
 
919
892
  // src/prompt/data-content.ts
920
- var import_provider19 = require("@ai-sdk/provider");
893
+ var import_provider18 = require("@ai-sdk/provider");
921
894
  var import_provider_utils3 = require("@ai-sdk/provider-utils");
922
895
  var import_v4 = require("zod/v4");
923
896
 
@@ -945,8 +918,8 @@ var dataContentSchema = import_v4.z.union([
945
918
  import_v4.z.custom(
946
919
  // Buffer might not be available in some environments such as CloudFlare:
947
920
  (value) => {
948
- var _a17, _b;
949
- return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
921
+ var _a16, _b;
922
+ return (_b = (_a16 = globalThis.Buffer) == null ? void 0 : _a16.isBuffer(value)) != null ? _b : false;
950
923
  },
951
924
  { message: "Must be a Buffer" }
952
925
  )
@@ -969,7 +942,7 @@ function convertToLanguageModelV3DataContent(content) {
969
942
  content.toString()
970
943
  );
971
944
  if (dataUrlMediaType == null || base64Content == null) {
972
- throw new import_provider19.AISDKError({
945
+ throw new import_provider18.AISDKError({
973
946
  name: "InvalidDataContentError",
974
947
  message: `Invalid data URL format in content ${content.toString()}`
975
948
  });
@@ -1160,8 +1133,8 @@ async function downloadAssets(messages, download2, supportedUrls) {
1160
1133
  ).flat().filter(
1161
1134
  (part) => part.type === "image" || part.type === "file"
1162
1135
  ).map((part) => {
1163
- var _a17;
1164
- const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
1136
+ var _a16;
1137
+ const mediaType = (_a16 = part.mediaType) != null ? _a16 : part.type === "image" ? "image/*" : void 0;
1165
1138
  let data = part.type === "image" ? part.image : part.data;
1166
1139
  if (typeof data === "string") {
1167
1140
  try {
@@ -1191,7 +1164,7 @@ async function downloadAssets(messages, download2, supportedUrls) {
1191
1164
  );
1192
1165
  }
1193
1166
  function convertPartToLanguageModelPart(part, downloadedAssets) {
1194
- var _a17;
1167
+ var _a16;
1195
1168
  if (part.type === "text") {
1196
1169
  return {
1197
1170
  type: "text",
@@ -1224,7 +1197,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1224
1197
  switch (type) {
1225
1198
  case "image": {
1226
1199
  if (data instanceof Uint8Array || typeof data === "string") {
1227
- mediaType = (_a17 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a17 : mediaType;
1200
+ mediaType = (_a16 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a16 : mediaType;
1228
1201
  }
1229
1202
  return {
1230
1203
  type: "file",
@@ -1276,19 +1249,19 @@ function mapToolResultOutput(output) {
1276
1249
  }
1277
1250
 
1278
1251
  // src/prompt/create-tool-model-output.ts
1279
- var import_provider20 = require("@ai-sdk/provider");
1252
+ var import_provider19 = require("@ai-sdk/provider");
1280
1253
  function createToolModelOutput({
1281
1254
  output,
1282
- tool: tool3,
1255
+ tool: tool2,
1283
1256
  errorMode
1284
1257
  }) {
1285
1258
  if (errorMode === "text") {
1286
- return { type: "error-text", value: (0, import_provider20.getErrorMessage)(output) };
1259
+ return { type: "error-text", value: (0, import_provider19.getErrorMessage)(output) };
1287
1260
  } else if (errorMode === "json") {
1288
1261
  return { type: "error-json", value: toJSONValue(output) };
1289
1262
  }
1290
- if (tool3 == null ? void 0 : tool3.toModelOutput) {
1291
- return tool3.toModelOutput(output);
1263
+ if (tool2 == null ? void 0 : tool2.toModelOutput) {
1264
+ return tool2.toModelOutput(output);
1292
1265
  }
1293
1266
  return typeof output === "string" ? { type: "text", value: output } : { type: "json", value: toJSONValue(output) };
1294
1267
  }
@@ -1410,29 +1383,29 @@ async function prepareToolsAndToolChoice({
1410
1383
  };
1411
1384
  }
1412
1385
  const filteredTools = activeTools != null ? Object.entries(tools).filter(
1413
- ([name17]) => activeTools.includes(name17)
1386
+ ([name16]) => activeTools.includes(name16)
1414
1387
  ) : Object.entries(tools);
1415
1388
  const languageModelTools = [];
1416
- for (const [name17, tool3] of filteredTools) {
1417
- const toolType = tool3.type;
1389
+ for (const [name16, tool2] of filteredTools) {
1390
+ const toolType = tool2.type;
1418
1391
  switch (toolType) {
1419
1392
  case void 0:
1420
1393
  case "dynamic":
1421
1394
  case "function":
1422
1395
  languageModelTools.push({
1423
1396
  type: "function",
1424
- name: name17,
1425
- description: tool3.description,
1426
- inputSchema: await (0, import_provider_utils5.asSchema)(tool3.inputSchema).jsonSchema,
1427
- providerOptions: tool3.providerOptions
1397
+ name: name16,
1398
+ description: tool2.description,
1399
+ inputSchema: await (0, import_provider_utils5.asSchema)(tool2.inputSchema).jsonSchema,
1400
+ providerOptions: tool2.providerOptions
1428
1401
  });
1429
1402
  break;
1430
1403
  case "provider-defined":
1431
1404
  languageModelTools.push({
1432
1405
  type: "provider-defined",
1433
- name: name17,
1434
- id: tool3.id,
1435
- args: tool3.args
1406
+ name: name16,
1407
+ id: tool2.id,
1408
+ args: tool2.args
1436
1409
  });
1437
1410
  break;
1438
1411
  default: {
@@ -1448,7 +1421,7 @@ async function prepareToolsAndToolChoice({
1448
1421
  }
1449
1422
 
1450
1423
  // src/prompt/standardize-prompt.ts
1451
- var import_provider21 = require("@ai-sdk/provider");
1424
+ var import_provider20 = require("@ai-sdk/provider");
1452
1425
  var import_provider_utils6 = require("@ai-sdk/provider-utils");
1453
1426
  var import_v46 = require("zod/v4");
1454
1427
 
@@ -1666,19 +1639,19 @@ var coreMessageSchema = modelMessageSchema;
1666
1639
  // src/prompt/standardize-prompt.ts
1667
1640
  async function standardizePrompt(prompt) {
1668
1641
  if (prompt.prompt == null && prompt.messages == null) {
1669
- throw new import_provider21.InvalidPromptError({
1642
+ throw new import_provider20.InvalidPromptError({
1670
1643
  prompt,
1671
1644
  message: "prompt or messages must be defined"
1672
1645
  });
1673
1646
  }
1674
1647
  if (prompt.prompt != null && prompt.messages != null) {
1675
- throw new import_provider21.InvalidPromptError({
1648
+ throw new import_provider20.InvalidPromptError({
1676
1649
  prompt,
1677
1650
  message: "prompt and messages cannot be defined at the same time"
1678
1651
  });
1679
1652
  }
1680
1653
  if (prompt.system != null && typeof prompt.system !== "string") {
1681
- throw new import_provider21.InvalidPromptError({
1654
+ throw new import_provider20.InvalidPromptError({
1682
1655
  prompt,
1683
1656
  message: "system must be a string"
1684
1657
  });
@@ -1691,13 +1664,13 @@ async function standardizePrompt(prompt) {
1691
1664
  } else if (prompt.messages != null) {
1692
1665
  messages = prompt.messages;
1693
1666
  } else {
1694
- throw new import_provider21.InvalidPromptError({
1667
+ throw new import_provider20.InvalidPromptError({
1695
1668
  prompt,
1696
1669
  message: "prompt or messages must be defined"
1697
1670
  });
1698
1671
  }
1699
1672
  if (messages.length === 0) {
1700
- throw new import_provider21.InvalidPromptError({
1673
+ throw new import_provider20.InvalidPromptError({
1701
1674
  prompt,
1702
1675
  message: "messages must not be empty"
1703
1676
  });
@@ -1707,7 +1680,7 @@ async function standardizePrompt(prompt) {
1707
1680
  schema: import_v46.z.array(modelMessageSchema)
1708
1681
  });
1709
1682
  if (!validationResult.success) {
1710
- throw new import_provider21.InvalidPromptError({
1683
+ throw new import_provider20.InvalidPromptError({
1711
1684
  prompt,
1712
1685
  message: "The messages must be a ModelMessage[]. If you have passed a UIMessage[], you can use convertToModelMessages to convert them.",
1713
1686
  cause: validationResult.error
@@ -1721,10 +1694,10 @@ async function standardizePrompt(prompt) {
1721
1694
 
1722
1695
  // src/prompt/wrap-gateway-error.ts
1723
1696
  var import_gateway2 = require("@ai-sdk/gateway");
1724
- var import_provider22 = require("@ai-sdk/provider");
1697
+ var import_provider21 = require("@ai-sdk/provider");
1725
1698
  function wrapGatewayError(error) {
1726
1699
  if (import_gateway2.GatewayAuthenticationError.isInstance(error) || import_gateway2.GatewayModelNotFoundError.isInstance(error)) {
1727
- return new import_provider22.AISDKError({
1700
+ return new import_provider21.AISDKError({
1728
1701
  name: "GatewayError",
1729
1702
  message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
1730
1703
  cause: error
@@ -1755,7 +1728,7 @@ function getBaseTelemetryAttributes({
1755
1728
  telemetry,
1756
1729
  headers
1757
1730
  }) {
1758
- var _a17;
1731
+ var _a16;
1759
1732
  return {
1760
1733
  "ai.model.provider": model.provider,
1761
1734
  "ai.model.id": model.modelId,
@@ -1765,7 +1738,7 @@ function getBaseTelemetryAttributes({
1765
1738
  return attributes;
1766
1739
  }, {}),
1767
1740
  // add metadata as attributes:
1768
- ...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(
1741
+ ...Object.entries((_a16 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a16 : {}).reduce(
1769
1742
  (attributes, [key, value]) => {
1770
1743
  attributes[`ai.telemetry.metadata.${key}`] = value;
1771
1744
  return attributes;
@@ -1790,7 +1763,7 @@ var noopTracer = {
1790
1763
  startSpan() {
1791
1764
  return noopSpan;
1792
1765
  },
1793
- startActiveSpan(name17, arg1, arg2, arg3) {
1766
+ startActiveSpan(name16, arg1, arg2, arg3) {
1794
1767
  if (typeof arg1 === "function") {
1795
1768
  return arg1(noopSpan);
1796
1769
  }
@@ -1860,14 +1833,14 @@ function getTracer({
1860
1833
  // src/telemetry/record-span.ts
1861
1834
  var import_api2 = require("@opentelemetry/api");
1862
1835
  async function recordSpan({
1863
- name: name17,
1836
+ name: name16,
1864
1837
  tracer,
1865
1838
  attributes,
1866
1839
  fn,
1867
1840
  endWhenDone = true
1868
1841
  }) {
1869
1842
  return tracer.startActiveSpan(
1870
- name17,
1843
+ name16,
1871
1844
  { attributes: await attributes },
1872
1845
  async (span) => {
1873
1846
  try {
@@ -1982,7 +1955,7 @@ function asArray(value) {
1982
1955
  }
1983
1956
 
1984
1957
  // src/util/retry-with-exponential-backoff.ts
1985
- var import_provider23 = require("@ai-sdk/provider");
1958
+ var import_provider22 = require("@ai-sdk/provider");
1986
1959
  var import_provider_utils7 = require("@ai-sdk/provider-utils");
1987
1960
  function getRetryDelayInMs({
1988
1961
  error,
@@ -2049,7 +2022,7 @@ async function _retryWithExponentialBackoff(f, {
2049
2022
  errors: newErrors
2050
2023
  });
2051
2024
  }
2052
- if (error instanceof Error && import_provider23.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
2025
+ if (error instanceof Error && import_provider22.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
2053
2026
  await (0, import_provider_utils7.delay)(
2054
2027
  getRetryDelayInMs({
2055
2028
  error,
@@ -2186,8 +2159,8 @@ async function executeToolCall({
2186
2159
  onPreliminaryToolResult
2187
2160
  }) {
2188
2161
  const { toolName, toolCallId, input } = toolCall;
2189
- const tool3 = tools == null ? void 0 : tools[toolName];
2190
- if ((tool3 == null ? void 0 : tool3.execute) == null) {
2162
+ const tool2 = tools == null ? void 0 : tools[toolName];
2163
+ if ((tool2 == null ? void 0 : tool2.execute) == null) {
2191
2164
  return void 0;
2192
2165
  }
2193
2166
  return recordSpan({
@@ -2211,7 +2184,7 @@ async function executeToolCall({
2211
2184
  let output;
2212
2185
  try {
2213
2186
  const stream = (0, import_provider_utils8.executeTool)({
2214
- execute: tool3.execute.bind(tool3),
2187
+ execute: tool2.execute.bind(tool2),
2215
2188
  input,
2216
2189
  options: {
2217
2190
  toolCallId,
@@ -2240,7 +2213,7 @@ async function executeToolCall({
2240
2213
  toolName,
2241
2214
  input,
2242
2215
  error,
2243
- dynamic: tool3.type === "dynamic"
2216
+ dynamic: tool2.type === "dynamic"
2244
2217
  };
2245
2218
  }
2246
2219
  try {
@@ -2262,7 +2235,7 @@ async function executeToolCall({
2262
2235
  toolName,
2263
2236
  input,
2264
2237
  output,
2265
- dynamic: tool3.type === "dynamic"
2238
+ dynamic: tool2.type === "dynamic"
2266
2239
  };
2267
2240
  }
2268
2241
  });
@@ -2315,18 +2288,18 @@ var DefaultGeneratedFileWithType = class extends DefaultGeneratedFile {
2315
2288
 
2316
2289
  // src/generate-text/is-approval-needed.ts
2317
2290
  async function isApprovalNeeded({
2318
- tool: tool3,
2291
+ tool: tool2,
2319
2292
  toolCall,
2320
2293
  messages,
2321
2294
  experimental_context
2322
2295
  }) {
2323
- if (tool3.needsApproval == null) {
2296
+ if (tool2.needsApproval == null) {
2324
2297
  return false;
2325
2298
  }
2326
- if (typeof tool3.needsApproval === "boolean") {
2327
- return tool3.needsApproval;
2299
+ if (typeof tool2.needsApproval === "boolean") {
2300
+ return tool2.needsApproval;
2328
2301
  }
2329
- return await tool3.needsApproval(toolCall.input, {
2302
+ return await tool2.needsApproval(toolCall.input, {
2330
2303
  toolCallId: toolCall.toolCallId,
2331
2304
  messages,
2332
2305
  experimental_context
@@ -2417,8 +2390,8 @@ async function doParseToolCall({
2417
2390
  tools
2418
2391
  }) {
2419
2392
  const toolName = toolCall.toolName;
2420
- const tool3 = tools[toolName];
2421
- if (tool3 == null) {
2393
+ const tool2 = tools[toolName];
2394
+ if (tool2 == null) {
2422
2395
  if (toolCall.providerExecuted && toolCall.dynamic) {
2423
2396
  return await parseProviderExecutedDynamicToolCall(toolCall);
2424
2397
  }
@@ -2427,7 +2400,7 @@ async function doParseToolCall({
2427
2400
  availableTools: Object.keys(tools)
2428
2401
  });
2429
2402
  }
2430
- const schema = (0, import_provider_utils10.asSchema)(tool3.inputSchema);
2403
+ const schema = (0, import_provider_utils10.asSchema)(tool2.inputSchema);
2431
2404
  const parseResult = toolCall.input.trim() === "" ? await (0, import_provider_utils10.safeValidateTypes)({ value: {}, schema }) : await (0, import_provider_utils10.safeParseJSON)({ text: toolCall.input, schema });
2432
2405
  if (parseResult.success === false) {
2433
2406
  throw new InvalidToolInputError({
@@ -2436,7 +2409,7 @@ async function doParseToolCall({
2436
2409
  cause: parseResult.error
2437
2410
  });
2438
2411
  }
2439
- return tool3.type === "dynamic" ? {
2412
+ return tool2.type === "dynamic" ? {
2440
2413
  type: "tool-call",
2441
2414
  toolCallId: toolCall.toolCallId,
2442
2415
  toolName: toolCall.toolName,
@@ -2522,8 +2495,8 @@ function stepCountIs(stepCount) {
2522
2495
  }
2523
2496
  function hasToolCall(toolName) {
2524
2497
  return ({ steps }) => {
2525
- var _a17, _b, _c;
2526
- return (_c = (_b = (_a17 = steps[steps.length - 1]) == null ? void 0 : _a17.toolCalls) == null ? void 0 : _b.some(
2498
+ var _a16, _b, _c;
2499
+ return (_c = (_b = (_a16 = steps[steps.length - 1]) == null ? void 0 : _a16.toolCalls) == null ? void 0 : _b.some(
2527
2500
  (toolCall) => toolCall.toolName === toolName
2528
2501
  )) != null ? _c : false;
2529
2502
  };
@@ -2711,7 +2684,7 @@ async function generateText({
2711
2684
  }),
2712
2685
  tracer,
2713
2686
  fn: async (span) => {
2714
- var _a17, _b, _c, _d, _e, _f, _g;
2687
+ var _a16, _b, _c, _d, _e, _f, _g;
2715
2688
  const initialMessages = initialPrompt.messages;
2716
2689
  const responseMessages = [];
2717
2690
  const { approvedToolApprovals, deniedToolApprovals } = collectToolApprovals({ messages: initialMessages });
@@ -2768,7 +2741,7 @@ async function generateText({
2768
2741
  messages: stepInputMessages
2769
2742
  }));
2770
2743
  const stepModel = resolveLanguageModel(
2771
- (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
2744
+ (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
2772
2745
  );
2773
2746
  const promptMessages = await convertToLanguageModelPrompt({
2774
2747
  prompt: {
@@ -2785,7 +2758,7 @@ async function generateText({
2785
2758
  });
2786
2759
  currentModelResponse = await retry(
2787
2760
  () => {
2788
- var _a18;
2761
+ var _a17;
2789
2762
  return recordSpan({
2790
2763
  name: "ai.generateText.doGenerate",
2791
2764
  attributes: selectTelemetryAttributes({
@@ -2805,7 +2778,7 @@ async function generateText({
2805
2778
  },
2806
2779
  "ai.prompt.tools": {
2807
2780
  // convert the language model level tools:
2808
- input: () => stepTools == null ? void 0 : stepTools.map((tool3) => JSON.stringify(tool3))
2781
+ input: () => stepTools == null ? void 0 : stepTools.map((tool2) => JSON.stringify(tool2))
2809
2782
  },
2810
2783
  "ai.prompt.toolChoice": {
2811
2784
  input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
@@ -2817,14 +2790,14 @@ async function generateText({
2817
2790
  "gen_ai.request.max_tokens": settings.maxOutputTokens,
2818
2791
  "gen_ai.request.presence_penalty": settings.presencePenalty,
2819
2792
  "gen_ai.request.stop_sequences": settings.stopSequences,
2820
- "gen_ai.request.temperature": (_a18 = settings.temperature) != null ? _a18 : void 0,
2793
+ "gen_ai.request.temperature": (_a17 = settings.temperature) != null ? _a17 : void 0,
2821
2794
  "gen_ai.request.top_k": settings.topK,
2822
2795
  "gen_ai.request.top_p": settings.topP
2823
2796
  }
2824
2797
  }),
2825
2798
  tracer,
2826
2799
  fn: async (span2) => {
2827
- var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h;
2800
+ var _a18, _b2, _c2, _d2, _e2, _f2, _g2, _h;
2828
2801
  const result = await stepModel.doGenerate({
2829
2802
  ...callSettings2,
2830
2803
  tools: stepTools,
@@ -2836,7 +2809,7 @@ async function generateText({
2836
2809
  headers: headersWithUserAgent
2837
2810
  });
2838
2811
  const responseData = {
2839
- id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId2(),
2812
+ id: (_b2 = (_a18 = result.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId2(),
2840
2813
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
2841
2814
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
2842
2815
  headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
@@ -2897,12 +2870,12 @@ async function generateText({
2897
2870
  if (toolCall.invalid) {
2898
2871
  continue;
2899
2872
  }
2900
- const tool3 = tools == null ? void 0 : tools[toolCall.toolName];
2901
- if (tool3 == null) {
2873
+ const tool2 = tools == null ? void 0 : tools[toolCall.toolName];
2874
+ if (tool2 == null) {
2902
2875
  continue;
2903
2876
  }
2904
- if ((tool3 == null ? void 0 : tool3.onInputAvailable) != null) {
2905
- await tool3.onInputAvailable({
2877
+ if ((tool2 == null ? void 0 : tool2.onInputAvailable) != null) {
2878
+ await tool2.onInputAvailable({
2906
2879
  input: toolCall.input,
2907
2880
  toolCallId: toolCall.toolCallId,
2908
2881
  messages: stepInputMessages,
@@ -2911,7 +2884,7 @@ async function generateText({
2911
2884
  });
2912
2885
  }
2913
2886
  if (await isApprovalNeeded({
2914
- tool: tool3,
2887
+ tool: tool2,
2915
2888
  toolCall,
2916
2889
  messages: stepInputMessages,
2917
2890
  experimental_context
@@ -3241,7 +3214,7 @@ function asContent({
3241
3214
  }
3242
3215
 
3243
3216
  // src/generate-text/stream-text.ts
3244
- var import_provider24 = require("@ai-sdk/provider");
3217
+ var import_provider23 = require("@ai-sdk/provider");
3245
3218
  var import_provider_utils16 = require("@ai-sdk/provider-utils");
3246
3219
 
3247
3220
  // src/util/prepare-headers.ts
@@ -3971,7 +3944,7 @@ function processUIMessageStream({
3971
3944
  new TransformStream({
3972
3945
  async transform(chunk, controller) {
3973
3946
  await runUpdateMessageJob(async ({ state, write }) => {
3974
- var _a17, _b, _c, _d;
3947
+ var _a16, _b, _c, _d;
3975
3948
  function getToolInvocation(toolCallId) {
3976
3949
  const toolInvocations = state.message.parts.filter(
3977
3950
  isToolOrDynamicToolUIPart
@@ -3987,7 +3960,7 @@ function processUIMessageStream({
3987
3960
  return toolInvocation;
3988
3961
  }
3989
3962
  function updateToolPart(options) {
3990
- var _a18;
3963
+ var _a17;
3991
3964
  const part = state.message.parts.find(
3992
3965
  (part2) => isToolUIPart(part2) && part2.toolCallId === options.toolCallId
3993
3966
  );
@@ -4000,7 +3973,7 @@ function processUIMessageStream({
4000
3973
  anyPart.errorText = anyOptions.errorText;
4001
3974
  anyPart.rawInput = anyOptions.rawInput;
4002
3975
  anyPart.preliminary = anyOptions.preliminary;
4003
- anyPart.providerExecuted = (_a18 = anyOptions.providerExecuted) != null ? _a18 : part.providerExecuted;
3976
+ anyPart.providerExecuted = (_a17 = anyOptions.providerExecuted) != null ? _a17 : part.providerExecuted;
4004
3977
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
4005
3978
  part.callProviderMetadata = anyOptions.providerMetadata;
4006
3979
  }
@@ -4020,7 +3993,7 @@ function processUIMessageStream({
4020
3993
  }
4021
3994
  }
4022
3995
  function updateDynamicToolPart(options) {
4023
- var _a18, _b2;
3996
+ var _a17, _b2;
4024
3997
  const part = state.message.parts.find(
4025
3998
  (part2) => part2.type === "dynamic-tool" && part2.toolCallId === options.toolCallId
4026
3999
  );
@@ -4032,7 +4005,7 @@ function processUIMessageStream({
4032
4005
  anyPart.input = anyOptions.input;
4033
4006
  anyPart.output = anyOptions.output;
4034
4007
  anyPart.errorText = anyOptions.errorText;
4035
- anyPart.rawInput = (_a18 = anyOptions.rawInput) != null ? _a18 : anyPart.rawInput;
4008
+ anyPart.rawInput = (_a17 = anyOptions.rawInput) != null ? _a17 : anyPart.rawInput;
4036
4009
  anyPart.preliminary = anyOptions.preliminary;
4037
4010
  anyPart.providerExecuted = (_b2 = anyOptions.providerExecuted) != null ? _b2 : part.providerExecuted;
4038
4011
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
@@ -4081,7 +4054,7 @@ function processUIMessageStream({
4081
4054
  case "text-delta": {
4082
4055
  const textPart = state.activeTextParts[chunk.id];
4083
4056
  textPart.text += chunk.delta;
4084
- textPart.providerMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textPart.providerMetadata;
4057
+ textPart.providerMetadata = (_a16 = chunk.providerMetadata) != null ? _a16 : textPart.providerMetadata;
4085
4058
  write();
4086
4059
  break;
4087
4060
  }
@@ -4505,11 +4478,11 @@ function createAsyncIterableStream(source) {
4505
4478
  const reader = this.getReader();
4506
4479
  let finished = false;
4507
4480
  async function cleanup(cancelStream) {
4508
- var _a17;
4481
+ var _a16;
4509
4482
  finished = true;
4510
4483
  try {
4511
4484
  if (cancelStream) {
4512
- await ((_a17 = reader.cancel) == null ? void 0 : _a17.call(reader));
4485
+ await ((_a16 = reader.cancel) == null ? void 0 : _a16.call(reader));
4513
4486
  }
4514
4487
  } finally {
4515
4488
  try {
@@ -4696,25 +4669,25 @@ var DelayedPromise = class {
4696
4669
  return this._promise;
4697
4670
  }
4698
4671
  resolve(value) {
4699
- var _a17;
4672
+ var _a16;
4700
4673
  this.status = { type: "resolved", value };
4701
4674
  if (this._promise) {
4702
- (_a17 = this._resolve) == null ? void 0 : _a17.call(this, value);
4675
+ (_a16 = this._resolve) == null ? void 0 : _a16.call(this, value);
4703
4676
  }
4704
4677
  }
4705
4678
  reject(error) {
4706
- var _a17;
4679
+ var _a16;
4707
4680
  this.status = { type: "rejected", error };
4708
4681
  if (this._promise) {
4709
- (_a17 = this._reject) == null ? void 0 : _a17.call(this, error);
4682
+ (_a16 = this._reject) == null ? void 0 : _a16.call(this, error);
4710
4683
  }
4711
4684
  }
4712
4685
  };
4713
4686
 
4714
4687
  // src/util/now.ts
4715
4688
  function now() {
4716
- var _a17, _b;
4717
- return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();
4689
+ var _a16, _b;
4690
+ return (_b = (_a16 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a16.now()) != null ? _b : Date.now();
4718
4691
  }
4719
4692
 
4720
4693
  // src/generate-text/run-tools-transformation.ts
@@ -4810,12 +4783,12 @@ function runToolsTransformation({
4810
4783
  });
4811
4784
  break;
4812
4785
  }
4813
- const tool3 = tools == null ? void 0 : tools[toolCall.toolName];
4814
- if (tool3 == null) {
4786
+ const tool2 = tools == null ? void 0 : tools[toolCall.toolName];
4787
+ if (tool2 == null) {
4815
4788
  break;
4816
4789
  }
4817
- if (tool3.onInputAvailable != null) {
4818
- await tool3.onInputAvailable({
4790
+ if (tool2.onInputAvailable != null) {
4791
+ await tool2.onInputAvailable({
4819
4792
  input: toolCall.input,
4820
4793
  toolCallId: toolCall.toolCallId,
4821
4794
  messages,
@@ -4824,7 +4797,7 @@ function runToolsTransformation({
4824
4797
  });
4825
4798
  }
4826
4799
  if (await isApprovalNeeded({
4827
- tool: tool3,
4800
+ tool: tool2,
4828
4801
  toolCall,
4829
4802
  messages,
4830
4803
  experimental_context
@@ -4837,7 +4810,7 @@ function runToolsTransformation({
4837
4810
  break;
4838
4811
  }
4839
4812
  toolInputs.set(toolCall.toolCallId, toolCall.input);
4840
- if (tool3.execute != null && toolCall.providerExecuted !== true) {
4813
+ if (tool2.execute != null && toolCall.providerExecuted !== true) {
4841
4814
  const toolExecutionId = generateId2();
4842
4815
  outstandingToolResults.add(toolExecutionId);
4843
4816
  executeToolCall({
@@ -5115,7 +5088,7 @@ var DefaultStreamTextResult = class {
5115
5088
  let activeReasoningContent = {};
5116
5089
  const eventProcessor = new TransformStream({
5117
5090
  async transform(chunk, controller) {
5118
- var _a17, _b, _c, _d;
5091
+ var _a16, _b, _c, _d;
5119
5092
  controller.enqueue(chunk);
5120
5093
  const { part } = chunk;
5121
5094
  if (part.type === "text-delta" || part.type === "reasoning-delta" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
@@ -5145,7 +5118,7 @@ var DefaultStreamTextResult = class {
5145
5118
  return;
5146
5119
  }
5147
5120
  activeText.text += part.text;
5148
- activeText.providerMetadata = (_a17 = part.providerMetadata) != null ? _a17 : activeText.providerMetadata;
5121
+ activeText.providerMetadata = (_a16 = part.providerMetadata) != null ? _a16 : activeText.providerMetadata;
5149
5122
  }
5150
5123
  if (part.type === "text-end") {
5151
5124
  const activeText = activeTextContent[part.id];
@@ -5304,8 +5277,8 @@ var DefaultStreamTextResult = class {
5304
5277
  "ai.response.text": { output: () => finalStep.text },
5305
5278
  "ai.response.toolCalls": {
5306
5279
  output: () => {
5307
- var _a17;
5308
- return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
5280
+ var _a16;
5281
+ return ((_a16 = finalStep.toolCalls) == null ? void 0 : _a16.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
5309
5282
  }
5310
5283
  },
5311
5284
  "ai.response.providerMetadata": JSON.stringify(
@@ -5485,7 +5458,7 @@ var DefaultStreamTextResult = class {
5485
5458
  responseMessages,
5486
5459
  usage
5487
5460
  }) {
5488
- var _a17, _b, _c, _d, _e;
5461
+ var _a16, _b, _c, _d, _e;
5489
5462
  const includeRawChunks2 = self.includeRawChunks;
5490
5463
  stepFinish = new DelayedPromise();
5491
5464
  const stepInputMessages = [...initialMessages, ...responseMessages];
@@ -5496,7 +5469,7 @@ var DefaultStreamTextResult = class {
5496
5469
  messages: stepInputMessages
5497
5470
  }));
5498
5471
  const stepModel = resolveLanguageModel(
5499
- (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
5472
+ (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
5500
5473
  );
5501
5474
  const promptMessages = await convertToLanguageModelPrompt({
5502
5475
  prompt: {
@@ -5535,7 +5508,7 @@ var DefaultStreamTextResult = class {
5535
5508
  },
5536
5509
  "ai.prompt.tools": {
5537
5510
  // convert the language model level tools:
5538
- input: () => stepTools == null ? void 0 : stepTools.map((tool3) => JSON.stringify(tool3))
5511
+ input: () => stepTools == null ? void 0 : stepTools.map((tool2) => JSON.stringify(tool2))
5539
5512
  },
5540
5513
  "ai.prompt.toolChoice": {
5541
5514
  input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
@@ -5607,7 +5580,7 @@ var DefaultStreamTextResult = class {
5607
5580
  streamWithToolResults.pipeThrough(
5608
5581
  new TransformStream({
5609
5582
  async transform(chunk, controller) {
5610
- var _a18, _b2, _c2, _d2, _e2;
5583
+ var _a17, _b2, _c2, _d2, _e2;
5611
5584
  if (chunk.type === "stream-start") {
5612
5585
  warnings = chunk.warnings;
5613
5586
  return;
@@ -5680,7 +5653,7 @@ var DefaultStreamTextResult = class {
5680
5653
  }
5681
5654
  case "response-metadata": {
5682
5655
  stepResponse = {
5683
- id: (_a18 = chunk.id) != null ? _a18 : stepResponse.id,
5656
+ id: (_a17 = chunk.id) != null ? _a17 : stepResponse.id,
5684
5657
  timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
5685
5658
  modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
5686
5659
  };
@@ -5708,9 +5681,9 @@ var DefaultStreamTextResult = class {
5708
5681
  }
5709
5682
  case "tool-input-start": {
5710
5683
  activeToolCallToolNames[chunk.id] = chunk.toolName;
5711
- const tool3 = tools == null ? void 0 : tools[chunk.toolName];
5712
- if ((tool3 == null ? void 0 : tool3.onInputStart) != null) {
5713
- await tool3.onInputStart({
5684
+ const tool2 = tools == null ? void 0 : tools[chunk.toolName];
5685
+ if ((tool2 == null ? void 0 : tool2.onInputStart) != null) {
5686
+ await tool2.onInputStart({
5714
5687
  toolCallId: chunk.id,
5715
5688
  messages: stepInputMessages,
5716
5689
  abortSignal,
@@ -5719,7 +5692,7 @@ var DefaultStreamTextResult = class {
5719
5692
  }
5720
5693
  controller.enqueue({
5721
5694
  ...chunk,
5722
- dynamic: (_e2 = chunk.dynamic) != null ? _e2 : (tool3 == null ? void 0 : tool3.type) === "dynamic"
5695
+ dynamic: (_e2 = chunk.dynamic) != null ? _e2 : (tool2 == null ? void 0 : tool2.type) === "dynamic"
5723
5696
  });
5724
5697
  break;
5725
5698
  }
@@ -5730,9 +5703,9 @@ var DefaultStreamTextResult = class {
5730
5703
  }
5731
5704
  case "tool-input-delta": {
5732
5705
  const toolName = activeToolCallToolNames[chunk.id];
5733
- const tool3 = tools == null ? void 0 : tools[toolName];
5734
- if ((tool3 == null ? void 0 : tool3.onInputDelta) != null) {
5735
- await tool3.onInputDelta({
5706
+ const tool2 = tools == null ? void 0 : tools[toolName];
5707
+ if ((tool2 == null ? void 0 : tool2.onInputDelta) != null) {
5708
+ await tool2.onInputDelta({
5736
5709
  inputTextDelta: chunk.delta,
5737
5710
  toolCallId: chunk.id,
5738
5711
  messages: stepInputMessages,
@@ -5982,14 +5955,14 @@ var DefaultStreamTextResult = class {
5982
5955
  );
5983
5956
  }
5984
5957
  async consumeStream(options) {
5985
- var _a17;
5958
+ var _a16;
5986
5959
  try {
5987
5960
  await consumeStream({
5988
5961
  stream: this.fullStream,
5989
5962
  onError: options == null ? void 0 : options.onError
5990
5963
  });
5991
5964
  } catch (error) {
5992
- (_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);
5965
+ (_a16 = options == null ? void 0 : options.onError) == null ? void 0 : _a16.call(options, error);
5993
5966
  }
5994
5967
  }
5995
5968
  get experimental_partialOutputStream() {
@@ -6020,19 +5993,19 @@ var DefaultStreamTextResult = class {
6020
5993
  sendSources = false,
6021
5994
  sendStart = true,
6022
5995
  sendFinish = true,
6023
- onError = import_provider24.getErrorMessage
5996
+ onError = import_provider23.getErrorMessage
6024
5997
  } = {}) {
6025
5998
  const responseMessageId = generateMessageId != null ? getResponseUIMessageId({
6026
5999
  originalMessages,
6027
6000
  responseMessageId: generateMessageId
6028
6001
  }) : void 0;
6029
6002
  const isDynamic = (part) => {
6030
- var _a17;
6031
- const tool3 = (_a17 = this.tools) == null ? void 0 : _a17[part.toolName];
6032
- if (tool3 == null) {
6003
+ var _a16;
6004
+ const tool2 = (_a16 = this.tools) == null ? void 0 : _a16[part.toolName];
6005
+ if (tool2 == null) {
6033
6006
  return part.dynamic;
6034
6007
  }
6035
- return (tool3 == null ? void 0 : tool3.type) === "dynamic" ? true : void 0;
6008
+ return (tool2 == null ? void 0 : tool2.type) === "dynamic" ? true : void 0;
6036
6009
  };
6037
6010
  const baseStream = this.fullStream.pipeThrough(
6038
6011
  new TransformStream({
@@ -6362,10 +6335,10 @@ var ToolLoopAgent = class {
6362
6335
  return this.settings.tools;
6363
6336
  }
6364
6337
  async prepareCall(options) {
6365
- var _a17, _b, _c, _d;
6338
+ var _a16, _b, _c, _d;
6366
6339
  const baseCallArgs = {
6367
6340
  ...this.settings,
6368
- stopWhen: (_a17 = this.settings.stopWhen) != null ? _a17 : stepCountIs(20),
6341
+ stopWhen: (_a16 = this.settings.stopWhen) != null ? _a16 : stepCountIs(20),
6369
6342
  ...options
6370
6343
  };
6371
6344
  const preparedCallArgs = (_d = await ((_c = (_b = this.settings).prepareCall) == null ? void 0 : _c.call(_b, baseCallArgs))) != null ? _d : baseCallArgs;
@@ -6483,7 +6456,7 @@ function readUIMessageStream({
6483
6456
  onError,
6484
6457
  terminateOnError = false
6485
6458
  }) {
6486
- var _a17;
6459
+ var _a16;
6487
6460
  let controller;
6488
6461
  let hasErrored = false;
6489
6462
  const outputStream = new ReadableStream({
@@ -6492,7 +6465,7 @@ function readUIMessageStream({
6492
6465
  }
6493
6466
  });
6494
6467
  const state = createStreamingUIMessageState({
6495
- messageId: (_a17 = message == null ? void 0 : message.id) != null ? _a17 : "",
6468
+ messageId: (_a16 = message == null ? void 0 : message.id) != null ? _a16 : "",
6496
6469
  lastMessage: message
6497
6470
  });
6498
6471
  const handleError = (error) => {
@@ -6559,7 +6532,7 @@ function convertToModelMessages(messages, options) {
6559
6532
  modelMessages.push({
6560
6533
  role: "user",
6561
6534
  content: message.parts.map((part) => {
6562
- var _a17;
6535
+ var _a16;
6563
6536
  if (isTextUIPart(part)) {
6564
6537
  return {
6565
6538
  type: "text",
@@ -6577,7 +6550,7 @@ function convertToModelMessages(messages, options) {
6577
6550
  };
6578
6551
  }
6579
6552
  if (isDataUIPart(part)) {
6580
- return (_a17 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a17.call(
6553
+ return (_a16 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a16.call(
6581
6554
  options,
6582
6555
  part
6583
6556
  );
@@ -6589,7 +6562,7 @@ function convertToModelMessages(messages, options) {
6589
6562
  case "assistant": {
6590
6563
  if (message.parts != null) {
6591
6564
  let processBlock2 = function() {
6592
- var _a17, _b, _c;
6565
+ var _a16, _b, _c;
6593
6566
  if (block.length === 0) {
6594
6567
  return;
6595
6568
  }
@@ -6621,7 +6594,7 @@ function convertToModelMessages(messages, options) {
6621
6594
  type: "tool-call",
6622
6595
  toolCallId: part.toolCallId,
6623
6596
  toolName,
6624
- input: part.state === "output-error" ? (_a17 = part.input) != null ? _a17 : "rawInput" in part ? part.rawInput : void 0 : part.input,
6597
+ input: part.state === "output-error" ? (_a16 = part.input) != null ? _a16 : "rawInput" in part ? part.rawInput : void 0 : part.input,
6625
6598
  providerExecuted: part.providerExecuted,
6626
6599
  ...part.callProviderMetadata != null ? { providerOptions: part.callProviderMetadata } : {}
6627
6600
  });
@@ -6670,9 +6643,9 @@ function convertToModelMessages(messages, options) {
6670
6643
  role: "tool",
6671
6644
  content: toolParts.flatMap(
6672
6645
  (toolPart) => {
6673
- var _a18, _b2, _c2;
6646
+ var _a17, _b2, _c2;
6674
6647
  const outputs = [];
6675
- if (((_a18 = toolPart.approval) == null ? void 0 : _a18.approved) != null) {
6648
+ if (((_a17 = toolPart.approval) == null ? void 0 : _a17.approved) != null) {
6676
6649
  outputs.push({
6677
6650
  type: "tool-approval-response",
6678
6651
  approvalId: toolPart.approval.id,
@@ -6744,7 +6717,7 @@ function convertToModelMessages(messages, options) {
6744
6717
  var convertToCoreMessages = convertToModelMessages;
6745
6718
 
6746
6719
  // src/ui/validate-ui-messages.ts
6747
- var import_provider25 = require("@ai-sdk/provider");
6720
+ var import_provider24 = require("@ai-sdk/provider");
6748
6721
  var import_provider_utils19 = require("@ai-sdk/provider-utils");
6749
6722
  var import_v48 = require("zod/v4");
6750
6723
  var uiMessagesSchema = (0, import_provider_utils19.lazySchema)(
@@ -7045,7 +7018,7 @@ async function safeValidateUIMessages({
7045
7018
  if (!dataSchema) {
7046
7019
  return {
7047
7020
  success: false,
7048
- error: new import_provider25.TypeValidationError({
7021
+ error: new import_provider24.TypeValidationError({
7049
7022
  value: dataPart.data,
7050
7023
  cause: `No data schema found for data part ${dataName}`
7051
7024
  })
@@ -7065,11 +7038,11 @@ async function safeValidateUIMessages({
7065
7038
  );
7066
7039
  for (const toolPart of toolParts) {
7067
7040
  const toolName = toolPart.type.slice(5);
7068
- const tool3 = tools[toolName];
7069
- if (!tool3) {
7041
+ const tool2 = tools[toolName];
7042
+ if (!tool2) {
7070
7043
  return {
7071
7044
  success: false,
7072
- error: new import_provider25.TypeValidationError({
7045
+ error: new import_provider24.TypeValidationError({
7073
7046
  value: toolPart.input,
7074
7047
  cause: `No tool schema found for tool part ${toolName}`
7075
7048
  })
@@ -7078,13 +7051,13 @@ async function safeValidateUIMessages({
7078
7051
  if (toolPart.state === "input-available" || toolPart.state === "output-available" || toolPart.state === "output-error") {
7079
7052
  await (0, import_provider_utils19.validateTypes)({
7080
7053
  value: toolPart.input,
7081
- schema: tool3.inputSchema
7054
+ schema: tool2.inputSchema
7082
7055
  });
7083
7056
  }
7084
- if (toolPart.state === "output-available" && tool3.outputSchema) {
7057
+ if (toolPart.state === "output-available" && tool2.outputSchema) {
7085
7058
  await (0, import_provider_utils19.validateTypes)({
7086
7059
  value: toolPart.output,
7087
- schema: tool3.outputSchema
7060
+ schema: tool2.outputSchema
7088
7061
  });
7089
7062
  }
7090
7063
  }
@@ -7234,7 +7207,7 @@ async function embed({
7234
7207
  }),
7235
7208
  tracer,
7236
7209
  fn: async (doEmbedSpan) => {
7237
- var _a17;
7210
+ var _a16;
7238
7211
  const modelResponse = await model.doEmbed({
7239
7212
  values: [value],
7240
7213
  abortSignal,
@@ -7242,7 +7215,7 @@ async function embed({
7242
7215
  providerOptions
7243
7216
  });
7244
7217
  const embedding2 = modelResponse.embeddings[0];
7245
- const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
7218
+ const usage2 = (_a16 = modelResponse.usage) != null ? _a16 : { tokens: NaN };
7246
7219
  doEmbedSpan.setAttributes(
7247
7220
  await selectTelemetryAttributes({
7248
7221
  telemetry,
@@ -7352,7 +7325,7 @@ async function embedMany({
7352
7325
  }),
7353
7326
  tracer,
7354
7327
  fn: async (span) => {
7355
- var _a17;
7328
+ var _a16;
7356
7329
  const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
7357
7330
  model.maxEmbeddingsPerCall,
7358
7331
  model.supportsParallelCalls
@@ -7378,7 +7351,7 @@ async function embedMany({
7378
7351
  }),
7379
7352
  tracer,
7380
7353
  fn: async (doEmbedSpan) => {
7381
- var _a18;
7354
+ var _a17;
7382
7355
  const modelResponse = await model.doEmbed({
7383
7356
  values,
7384
7357
  abortSignal,
@@ -7386,7 +7359,7 @@ async function embedMany({
7386
7359
  providerOptions
7387
7360
  });
7388
7361
  const embeddings3 = modelResponse.embeddings;
7389
- const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
7362
+ const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
7390
7363
  doEmbedSpan.setAttributes(
7391
7364
  await selectTelemetryAttributes({
7392
7365
  telemetry,
@@ -7460,7 +7433,7 @@ async function embedMany({
7460
7433
  }),
7461
7434
  tracer,
7462
7435
  fn: async (doEmbedSpan) => {
7463
- var _a18;
7436
+ var _a17;
7464
7437
  const modelResponse = await model.doEmbed({
7465
7438
  values: chunk,
7466
7439
  abortSignal,
@@ -7468,7 +7441,7 @@ async function embedMany({
7468
7441
  providerOptions
7469
7442
  });
7470
7443
  const embeddings2 = modelResponse.embeddings;
7471
- const usage = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
7444
+ const usage = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
7472
7445
  doEmbedSpan.setAttributes(
7473
7446
  await selectTelemetryAttributes({
7474
7447
  telemetry,
@@ -7505,7 +7478,7 @@ async function embedMany({
7505
7478
  result.providerMetadata
7506
7479
  )) {
7507
7480
  providerMetadata[providerName] = {
7508
- ...(_a17 = providerMetadata[providerName]) != null ? _a17 : {},
7481
+ ...(_a16 = providerMetadata[providerName]) != null ? _a16 : {},
7509
7482
  ...metadata
7510
7483
  };
7511
7484
  }
@@ -7559,7 +7532,7 @@ async function generateImage({
7559
7532
  abortSignal,
7560
7533
  headers
7561
7534
  }) {
7562
- var _a17, _b;
7535
+ var _a16, _b;
7563
7536
  if (model.specificationVersion !== "v3") {
7564
7537
  throw new UnsupportedModelVersionError({
7565
7538
  version: model.specificationVersion,
@@ -7575,7 +7548,7 @@ async function generateImage({
7575
7548
  maxRetries: maxRetriesArg,
7576
7549
  abortSignal
7577
7550
  });
7578
- const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a17 : 1;
7551
+ const maxImagesPerCallWithDefault = (_a16 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a16 : 1;
7579
7552
  const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
7580
7553
  const callImageCounts = Array.from({ length: callCount }, (_, i) => {
7581
7554
  if (i < callCount - 1) {
@@ -7608,13 +7581,13 @@ async function generateImage({
7608
7581
  images.push(
7609
7582
  ...result.images.map(
7610
7583
  (image) => {
7611
- var _a18;
7584
+ var _a17;
7612
7585
  return new DefaultGeneratedFile({
7613
7586
  data: image,
7614
- mediaType: (_a18 = detectMediaType({
7587
+ mediaType: (_a17 = detectMediaType({
7615
7588
  data: image,
7616
7589
  signatures: imageMediaTypeSignatures
7617
- })) != null ? _a18 : "image/png"
7590
+ })) != null ? _a17 : "image/png"
7618
7591
  });
7619
7592
  }
7620
7593
  )
@@ -7674,7 +7647,7 @@ function extractReasoningContent(content) {
7674
7647
  }
7675
7648
 
7676
7649
  // src/generate-object/output-strategy.ts
7677
- var import_provider26 = require("@ai-sdk/provider");
7650
+ var import_provider25 = require("@ai-sdk/provider");
7678
7651
  var import_provider_utils23 = require("@ai-sdk/provider-utils");
7679
7652
  var noSchemaOutputStrategy = {
7680
7653
  type: "no-schema",
@@ -7695,7 +7668,7 @@ var noSchemaOutputStrategy = {
7695
7668
  } : { success: true, value };
7696
7669
  },
7697
7670
  createElementStream() {
7698
- throw new import_provider26.UnsupportedFunctionalityError({
7671
+ throw new import_provider25.UnsupportedFunctionalityError({
7699
7672
  functionality: "element streams in no-schema mode"
7700
7673
  });
7701
7674
  }
@@ -7717,14 +7690,14 @@ var objectOutputStrategy = (schema) => ({
7717
7690
  return (0, import_provider_utils23.safeValidateTypes)({ value, schema });
7718
7691
  },
7719
7692
  createElementStream() {
7720
- throw new import_provider26.UnsupportedFunctionalityError({
7693
+ throw new import_provider25.UnsupportedFunctionalityError({
7721
7694
  functionality: "element streams in object mode"
7722
7695
  });
7723
7696
  }
7724
7697
  });
7725
7698
  var arrayOutputStrategy = (schema) => {
7726
7699
  return {
7727
- type: "enum",
7700
+ type: "array",
7728
7701
  // wrap in object that contains array of elements, since most LLMs will not
7729
7702
  // be able to generate an array directly:
7730
7703
  // possible future optimization: use arrays directly when model supports grammar-guided generation
@@ -7746,11 +7719,11 @@ var arrayOutputStrategy = (schema) => {
7746
7719
  isFirstDelta,
7747
7720
  isFinalDelta
7748
7721
  }) {
7749
- var _a17;
7750
- if (!(0, import_provider26.isJSONObject)(value) || !(0, import_provider26.isJSONArray)(value.elements)) {
7722
+ var _a16;
7723
+ if (!(0, import_provider25.isJSONObject)(value) || !(0, import_provider25.isJSONArray)(value.elements)) {
7751
7724
  return {
7752
7725
  success: false,
7753
- error: new import_provider26.TypeValidationError({
7726
+ error: new import_provider25.TypeValidationError({
7754
7727
  value,
7755
7728
  cause: "value must be an object that contains an array of elements"
7756
7729
  })
@@ -7769,7 +7742,7 @@ var arrayOutputStrategy = (schema) => {
7769
7742
  }
7770
7743
  resultArray.push(result.value);
7771
7744
  }
7772
- const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;
7745
+ const publishedElementCount = (_a16 = latestObject == null ? void 0 : latestObject.length) != null ? _a16 : 0;
7773
7746
  let textDelta = "";
7774
7747
  if (isFirstDelta) {
7775
7748
  textDelta += "[";
@@ -7790,10 +7763,10 @@ var arrayOutputStrategy = (schema) => {
7790
7763
  };
7791
7764
  },
7792
7765
  async validateFinalResult(value) {
7793
- if (!(0, import_provider26.isJSONObject)(value) || !(0, import_provider26.isJSONArray)(value.elements)) {
7766
+ if (!(0, import_provider25.isJSONObject)(value) || !(0, import_provider25.isJSONArray)(value.elements)) {
7794
7767
  return {
7795
7768
  success: false,
7796
- error: new import_provider26.TypeValidationError({
7769
+ error: new import_provider25.TypeValidationError({
7797
7770
  value,
7798
7771
  cause: "value must be an object that contains an array of elements"
7799
7772
  })
@@ -7856,10 +7829,10 @@ var enumOutputStrategy = (enumValues) => {
7856
7829
  additionalProperties: false
7857
7830
  }),
7858
7831
  async validateFinalResult(value) {
7859
- if (!(0, import_provider26.isJSONObject)(value) || typeof value.result !== "string") {
7832
+ if (!(0, import_provider25.isJSONObject)(value) || typeof value.result !== "string") {
7860
7833
  return {
7861
7834
  success: false,
7862
- error: new import_provider26.TypeValidationError({
7835
+ error: new import_provider25.TypeValidationError({
7863
7836
  value,
7864
7837
  cause: 'value must be an object that contains a string in the "result" property.'
7865
7838
  })
@@ -7868,17 +7841,17 @@ var enumOutputStrategy = (enumValues) => {
7868
7841
  const result = value.result;
7869
7842
  return enumValues.includes(result) ? { success: true, value: result } : {
7870
7843
  success: false,
7871
- error: new import_provider26.TypeValidationError({
7844
+ error: new import_provider25.TypeValidationError({
7872
7845
  value,
7873
7846
  cause: "value must be a string in the enum"
7874
7847
  })
7875
7848
  };
7876
7849
  },
7877
7850
  async validatePartialResult({ value, textDelta }) {
7878
- if (!(0, import_provider26.isJSONObject)(value) || typeof value.result !== "string") {
7851
+ if (!(0, import_provider25.isJSONObject)(value) || typeof value.result !== "string") {
7879
7852
  return {
7880
7853
  success: false,
7881
- error: new import_provider26.TypeValidationError({
7854
+ error: new import_provider25.TypeValidationError({
7882
7855
  value,
7883
7856
  cause: 'value must be an object that contains a string in the "result" property.'
7884
7857
  })
@@ -7891,7 +7864,7 @@ var enumOutputStrategy = (enumValues) => {
7891
7864
  if (value.result.length === 0 || possibleEnumValues.length === 0) {
7892
7865
  return {
7893
7866
  success: false,
7894
- error: new import_provider26.TypeValidationError({
7867
+ error: new import_provider25.TypeValidationError({
7895
7868
  value,
7896
7869
  cause: "value must be a string in the enum"
7897
7870
  })
@@ -7906,7 +7879,7 @@ var enumOutputStrategy = (enumValues) => {
7906
7879
  };
7907
7880
  },
7908
7881
  createElementStream() {
7909
- throw new import_provider26.UnsupportedFunctionalityError({
7882
+ throw new import_provider25.UnsupportedFunctionalityError({
7910
7883
  functionality: "element streams in enum mode"
7911
7884
  });
7912
7885
  }
@@ -7934,7 +7907,7 @@ function getOutputStrategy({
7934
7907
  }
7935
7908
 
7936
7909
  // src/generate-object/parse-and-validate-object-result.ts
7937
- var import_provider27 = require("@ai-sdk/provider");
7910
+ var import_provider26 = require("@ai-sdk/provider");
7938
7911
  var import_provider_utils24 = require("@ai-sdk/provider-utils");
7939
7912
  async function parseAndValidateObjectResult(result, outputStrategy, context) {
7940
7913
  const parseResult = await (0, import_provider_utils24.safeParseJSON)({ text: result });
@@ -7972,7 +7945,7 @@ async function parseAndValidateObjectResultWithRepair(result, outputStrategy, re
7972
7945
  try {
7973
7946
  return await parseAndValidateObjectResult(result, outputStrategy, context);
7974
7947
  } catch (error) {
7975
- if (repairText != null && NoObjectGeneratedError.isInstance(error) && (import_provider27.JSONParseError.isInstance(error.cause) || import_provider27.TypeValidationError.isInstance(error.cause))) {
7948
+ if (repairText != null && NoObjectGeneratedError.isInstance(error) && (import_provider26.JSONParseError.isInstance(error.cause) || import_provider26.TypeValidationError.isInstance(error.cause))) {
7976
7949
  const repairedText = await repairText({
7977
7950
  text: result,
7978
7951
  error: error.cause
@@ -8165,7 +8138,7 @@ async function generateObject(options) {
8165
8138
  settings: { ...callSettings, maxRetries }
8166
8139
  });
8167
8140
  const tracer = getTracer(telemetry);
8168
- const jsonSchema3 = await outputStrategy.jsonSchema();
8141
+ const jsonSchema2 = await outputStrategy.jsonSchema();
8169
8142
  try {
8170
8143
  return await recordSpan({
8171
8144
  name: "ai.generateObject",
@@ -8181,7 +8154,7 @@ async function generateObject(options) {
8181
8154
  "ai.prompt": {
8182
8155
  input: () => JSON.stringify({ system, prompt, messages })
8183
8156
  },
8184
- "ai.schema": jsonSchema3 != null ? { input: () => JSON.stringify(jsonSchema3) } : void 0,
8157
+ "ai.schema": jsonSchema2 != null ? { input: () => JSON.stringify(jsonSchema2) } : void 0,
8185
8158
  "ai.schema.name": schemaName,
8186
8159
  "ai.schema.description": schemaDescription,
8187
8160
  "ai.settings.output": outputStrategy.type
@@ -8189,7 +8162,7 @@ async function generateObject(options) {
8189
8162
  }),
8190
8163
  tracer,
8191
8164
  fn: async (span) => {
8192
- var _a17;
8165
+ var _a16;
8193
8166
  let result;
8194
8167
  let finishReason;
8195
8168
  let usage;
@@ -8235,11 +8208,11 @@ async function generateObject(options) {
8235
8208
  }),
8236
8209
  tracer,
8237
8210
  fn: async (span2) => {
8238
- var _a18, _b, _c, _d, _e, _f, _g, _h;
8211
+ var _a17, _b, _c, _d, _e, _f, _g, _h;
8239
8212
  const result2 = await model.doGenerate({
8240
8213
  responseFormat: {
8241
8214
  type: "json",
8242
- schema: jsonSchema3,
8215
+ schema: jsonSchema2,
8243
8216
  name: schemaName,
8244
8217
  description: schemaDescription
8245
8218
  },
@@ -8250,7 +8223,7 @@ async function generateObject(options) {
8250
8223
  headers: headersWithUserAgent
8251
8224
  });
8252
8225
  const responseData = {
8253
- id: (_b = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b : generateId2(),
8226
+ id: (_b = (_a17 = result2.response) == null ? void 0 : _a17.id) != null ? _b : generateId2(),
8254
8227
  timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
8255
8228
  modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
8256
8229
  headers: (_g = result2.response) == null ? void 0 : _g.headers,
@@ -8304,7 +8277,7 @@ async function generateObject(options) {
8304
8277
  usage = generateResult.usage;
8305
8278
  warnings = generateResult.warnings;
8306
8279
  resultProviderMetadata = generateResult.providerMetadata;
8307
- request = (_a17 = generateResult.request) != null ? _a17 : {};
8280
+ request = (_a16 = generateResult.request) != null ? _a16 : {};
8308
8281
  response = generateResult.responseData;
8309
8282
  reasoning = generateResult.reasoning;
8310
8283
  logWarnings(warnings);
@@ -8363,9 +8336,9 @@ var DefaultGenerateObjectResult = class {
8363
8336
  this.reasoning = options.reasoning;
8364
8337
  }
8365
8338
  toJsonResponse(init) {
8366
- var _a17;
8339
+ var _a16;
8367
8340
  return new Response(JSON.stringify(this.object), {
8368
- status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
8341
+ status: (_a16 = init == null ? void 0 : init.status) != null ? _a16 : 200,
8369
8342
  headers: prepareHeaders(init == null ? void 0 : init.headers, {
8370
8343
  "content-type": "application/json; charset=utf-8"
8371
8344
  })
@@ -8491,8 +8464,8 @@ function simulateReadableStream({
8491
8464
  chunkDelayInMs = 0,
8492
8465
  _internal
8493
8466
  }) {
8494
- var _a17;
8495
- const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : import_provider_utils26.delay;
8467
+ var _a16;
8468
+ const delay2 = (_a16 = _internal == null ? void 0 : _internal.delay) != null ? _a16 : import_provider_utils26.delay;
8496
8469
  let index = 0;
8497
8470
  return new ReadableStream({
8498
8471
  async pull(controller) {
@@ -8754,7 +8727,7 @@ var DefaultStreamObjectResult = class {
8754
8727
  const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
8755
8728
  new TransformStream({
8756
8729
  async transform(chunk, controller) {
8757
- var _a17, _b, _c;
8730
+ var _a16, _b, _c;
8758
8731
  if (typeof chunk === "object" && chunk.type === "stream-start") {
8759
8732
  warnings = chunk.warnings;
8760
8733
  return;
@@ -8804,7 +8777,7 @@ var DefaultStreamObjectResult = class {
8804
8777
  switch (chunk.type) {
8805
8778
  case "response-metadata": {
8806
8779
  fullResponse = {
8807
- id: (_a17 = chunk.id) != null ? _a17 : fullResponse.id,
8780
+ id: (_a16 = chunk.id) != null ? _a16 : fullResponse.id,
8808
8781
  timestamp: (_b = chunk.timestamp) != null ? _b : fullResponse.timestamp,
8809
8782
  modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
8810
8783
  };
@@ -9071,7 +9044,7 @@ async function generateSpeech({
9071
9044
  abortSignal,
9072
9045
  headers
9073
9046
  }) {
9074
- var _a17;
9047
+ var _a16;
9075
9048
  const resolvedModel = resolveSpeechModel(model);
9076
9049
  if (!resolvedModel) {
9077
9050
  throw new Error("Model could not be resolved");
@@ -9104,10 +9077,10 @@ async function generateSpeech({
9104
9077
  return new DefaultSpeechResult({
9105
9078
  audio: new DefaultGeneratedAudioFile({
9106
9079
  data: result.audio,
9107
- mediaType: (_a17 = detectMediaType({
9080
+ mediaType: (_a16 = detectMediaType({
9108
9081
  data: result.audio,
9109
9082
  signatures: audioMediaTypeSignatures
9110
- })) != null ? _a17 : "audio/mp3"
9083
+ })) != null ? _a16 : "audio/mp3"
9111
9084
  }),
9112
9085
  warnings: result.warnings,
9113
9086
  responses: [result.response],
@@ -9116,11 +9089,11 @@ async function generateSpeech({
9116
9089
  }
9117
9090
  var DefaultSpeechResult = class {
9118
9091
  constructor(options) {
9119
- var _a17;
9092
+ var _a16;
9120
9093
  this.audio = options.audio;
9121
9094
  this.warnings = options.warnings;
9122
9095
  this.responses = options.responses;
9123
- this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
9096
+ this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
9124
9097
  }
9125
9098
  };
9126
9099
 
@@ -9132,7 +9105,7 @@ __export(output_exports, {
9132
9105
  object: () => object,
9133
9106
  text: () => text
9134
9107
  });
9135
- var import_provider28 = require("@ai-sdk/provider");
9108
+ var import_provider27 = require("@ai-sdk/provider");
9136
9109
  var import_provider_utils29 = require("@ai-sdk/provider-utils");
9137
9110
  var text = () => ({
9138
9111
  type: "text",
@@ -9150,9 +9123,9 @@ var object = ({
9150
9123
  const schema = (0, import_provider_utils29.asSchema)(inputSchema);
9151
9124
  return {
9152
9125
  type: "object",
9153
- responseFormat: (0, import_provider_utils29.resolve)(schema.jsonSchema).then((jsonSchema3) => ({
9126
+ responseFormat: (0, import_provider_utils29.resolve)(schema.jsonSchema).then((jsonSchema2) => ({
9154
9127
  type: "json",
9155
- schema: jsonSchema3
9128
+ schema: jsonSchema2
9156
9129
  })),
9157
9130
  async parseOutput({ text: text2 }, context) {
9158
9131
  const parseResult = await (0, import_provider_utils29.safeParseJSON)({ text: text2 });
@@ -9211,8 +9184,8 @@ var array = ({
9211
9184
  return {
9212
9185
  type: "object",
9213
9186
  // JSON schema that describes an array of elements:
9214
- responseFormat: (0, import_provider_utils29.resolve)(elementSchema.jsonSchema).then((jsonSchema3) => {
9215
- const { $schema, ...itemSchema } = jsonSchema3;
9187
+ responseFormat: (0, import_provider_utils29.resolve)(elementSchema.jsonSchema).then((jsonSchema2) => {
9188
+ const { $schema, ...itemSchema } = jsonSchema2;
9216
9189
  return {
9217
9190
  type: "json",
9218
9191
  schema: {
@@ -9242,7 +9215,7 @@ var array = ({
9242
9215
  if (outerValue == null || typeof outerValue !== "object" || !("elements" in outerValue) || !Array.isArray(outerValue.elements)) {
9243
9216
  throw new NoObjectGeneratedError({
9244
9217
  message: "No object generated: response did not match schema.",
9245
- cause: new import_provider28.TypeValidationError({
9218
+ cause: new import_provider27.TypeValidationError({
9246
9219
  value: outerValue,
9247
9220
  cause: "response must be an object with an elements array"
9248
9221
  }),
@@ -9338,7 +9311,7 @@ var choice = ({
9338
9311
  if (outerValue == null || typeof outerValue !== "object" || !("result" in outerValue) || typeof outerValue.result !== "string" || !choiceOptions.includes(outerValue.result)) {
9339
9312
  throw new NoObjectGeneratedError({
9340
9313
  message: "No object generated: response did not match schema.",
9341
- cause: new import_provider28.TypeValidationError({
9314
+ cause: new import_provider27.TypeValidationError({
9342
9315
  value: outerValue,
9343
9316
  cause: "response must be an object that contains a choice value."
9344
9317
  }),
@@ -9462,7 +9435,7 @@ function pruneMessages({
9462
9435
 
9463
9436
  // src/generate-text/smooth-stream.ts
9464
9437
  var import_provider_utils30 = require("@ai-sdk/provider-utils");
9465
- var import_provider29 = require("@ai-sdk/provider");
9438
+ var import_provider28 = require("@ai-sdk/provider");
9466
9439
  var CHUNKING_REGEXPS = {
9467
9440
  word: /\S+\s+/m,
9468
9441
  line: /\n+/m
@@ -9492,7 +9465,7 @@ function smoothStream({
9492
9465
  } else {
9493
9466
  const chunkingRegex = typeof chunking === "string" ? CHUNKING_REGEXPS[chunking] : chunking;
9494
9467
  if (chunkingRegex == null) {
9495
- throw new import_provider29.InvalidArgumentError({
9468
+ throw new import_provider28.InvalidArgumentError({
9496
9469
  argument: "chunking",
9497
9470
  message: `Chunking must be "word" or "line" or a RegExp. Received: ${chunking}`
9498
9471
  });
@@ -9809,7 +9782,7 @@ var doWrap = ({
9809
9782
  modelId,
9810
9783
  providerId
9811
9784
  }) => {
9812
- var _a17, _b, _c;
9785
+ var _a16, _b, _c;
9813
9786
  async function doTransform({
9814
9787
  params,
9815
9788
  type
@@ -9818,7 +9791,7 @@ var doWrap = ({
9818
9791
  }
9819
9792
  return {
9820
9793
  specificationVersion: "v3",
9821
- provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
9794
+ provider: (_a16 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a16 : model.provider,
9822
9795
  modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
9823
9796
  supportedUrls: (_c = overrideSupportedUrls == null ? void 0 : overrideSupportedUrls({ model })) != null ? _c : model.supportedUrls,
9824
9797
  async doGenerate(params) {
@@ -9864,7 +9837,7 @@ function wrapProvider({
9864
9837
  }
9865
9838
 
9866
9839
  // src/registry/custom-provider.ts
9867
- var import_provider30 = require("@ai-sdk/provider");
9840
+ var import_provider29 = require("@ai-sdk/provider");
9868
9841
  function customProvider({
9869
9842
  languageModels,
9870
9843
  textEmbeddingModels,
@@ -9881,7 +9854,7 @@ function customProvider({
9881
9854
  if (fallbackProvider) {
9882
9855
  return fallbackProvider.languageModel(modelId);
9883
9856
  }
9884
- throw new import_provider30.NoSuchModelError({ modelId, modelType: "languageModel" });
9857
+ throw new import_provider29.NoSuchModelError({ modelId, modelType: "languageModel" });
9885
9858
  },
9886
9859
  textEmbeddingModel(modelId) {
9887
9860
  if (textEmbeddingModels != null && modelId in textEmbeddingModels) {
@@ -9890,7 +9863,7 @@ function customProvider({
9890
9863
  if (fallbackProvider) {
9891
9864
  return fallbackProvider.textEmbeddingModel(modelId);
9892
9865
  }
9893
- throw new import_provider30.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
9866
+ throw new import_provider29.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
9894
9867
  },
9895
9868
  imageModel(modelId) {
9896
9869
  if (imageModels != null && modelId in imageModels) {
@@ -9899,7 +9872,7 @@ function customProvider({
9899
9872
  if (fallbackProvider == null ? void 0 : fallbackProvider.imageModel) {
9900
9873
  return fallbackProvider.imageModel(modelId);
9901
9874
  }
9902
- throw new import_provider30.NoSuchModelError({ modelId, modelType: "imageModel" });
9875
+ throw new import_provider29.NoSuchModelError({ modelId, modelType: "imageModel" });
9903
9876
  },
9904
9877
  transcriptionModel(modelId) {
9905
9878
  if (transcriptionModels != null && modelId in transcriptionModels) {
@@ -9908,7 +9881,7 @@ function customProvider({
9908
9881
  if (fallbackProvider == null ? void 0 : fallbackProvider.transcriptionModel) {
9909
9882
  return fallbackProvider.transcriptionModel(modelId);
9910
9883
  }
9911
- throw new import_provider30.NoSuchModelError({ modelId, modelType: "transcriptionModel" });
9884
+ throw new import_provider29.NoSuchModelError({ modelId, modelType: "transcriptionModel" });
9912
9885
  },
9913
9886
  speechModel(modelId) {
9914
9887
  if (speechModels != null && modelId in speechModels) {
@@ -9917,19 +9890,19 @@ function customProvider({
9917
9890
  if (fallbackProvider == null ? void 0 : fallbackProvider.speechModel) {
9918
9891
  return fallbackProvider.speechModel(modelId);
9919
9892
  }
9920
- throw new import_provider30.NoSuchModelError({ modelId, modelType: "speechModel" });
9893
+ throw new import_provider29.NoSuchModelError({ modelId, modelType: "speechModel" });
9921
9894
  }
9922
9895
  };
9923
9896
  }
9924
9897
  var experimental_customProvider = customProvider;
9925
9898
 
9926
9899
  // src/registry/no-such-provider-error.ts
9927
- var import_provider31 = require("@ai-sdk/provider");
9928
- var name16 = "AI_NoSuchProviderError";
9929
- var marker16 = `vercel.ai.error.${name16}`;
9930
- var symbol16 = Symbol.for(marker16);
9931
- var _a16;
9932
- var NoSuchProviderError = class extends import_provider31.NoSuchModelError {
9900
+ var import_provider30 = require("@ai-sdk/provider");
9901
+ var name15 = "AI_NoSuchProviderError";
9902
+ var marker15 = `vercel.ai.error.${name15}`;
9903
+ var symbol15 = Symbol.for(marker15);
9904
+ var _a15;
9905
+ var NoSuchProviderError = class extends import_provider30.NoSuchModelError {
9933
9906
  constructor({
9934
9907
  modelId,
9935
9908
  modelType,
@@ -9937,19 +9910,19 @@ var NoSuchProviderError = class extends import_provider31.NoSuchModelError {
9937
9910
  availableProviders,
9938
9911
  message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
9939
9912
  }) {
9940
- super({ errorName: name16, modelId, modelType, message });
9941
- this[_a16] = true;
9913
+ super({ errorName: name15, modelId, modelType, message });
9914
+ this[_a15] = true;
9942
9915
  this.providerId = providerId;
9943
9916
  this.availableProviders = availableProviders;
9944
9917
  }
9945
9918
  static isInstance(error) {
9946
- return import_provider31.AISDKError.hasMarker(error, marker16);
9919
+ return import_provider30.AISDKError.hasMarker(error, marker15);
9947
9920
  }
9948
9921
  };
9949
- _a16 = symbol16;
9922
+ _a15 = symbol15;
9950
9923
 
9951
9924
  // src/registry/provider-registry.ts
9952
- var import_provider32 = require("@ai-sdk/provider");
9925
+ var import_provider31 = require("@ai-sdk/provider");
9953
9926
  function createProviderRegistry(providers, {
9954
9927
  separator = ":",
9955
9928
  languageModelMiddleware
@@ -9994,7 +9967,7 @@ var DefaultProviderRegistry = class {
9994
9967
  splitId(id, modelType) {
9995
9968
  const index = id.indexOf(this.separator);
9996
9969
  if (index === -1) {
9997
- throw new import_provider32.NoSuchModelError({
9970
+ throw new import_provider31.NoSuchModelError({
9998
9971
  modelId: id,
9999
9972
  modelType,
10000
9973
  message: `Invalid ${modelType} id for registry: ${id} (must be in the format "providerId${this.separator}modelId")`
@@ -10003,14 +9976,14 @@ var DefaultProviderRegistry = class {
10003
9976
  return [id.slice(0, index), id.slice(index + this.separator.length)];
10004
9977
  }
10005
9978
  languageModel(id) {
10006
- var _a17, _b;
9979
+ var _a16, _b;
10007
9980
  const [providerId, modelId] = this.splitId(id, "languageModel");
10008
- let model = (_b = (_a17 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
10009
- _a17,
9981
+ let model = (_b = (_a16 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
9982
+ _a16,
10010
9983
  modelId
10011
9984
  );
10012
9985
  if (model == null) {
10013
- throw new import_provider32.NoSuchModelError({ modelId: id, modelType: "languageModel" });
9986
+ throw new import_provider31.NoSuchModelError({ modelId: id, modelType: "languageModel" });
10014
9987
  }
10015
9988
  if (this.languageModelMiddleware != null) {
10016
9989
  model = wrapLanguageModel({
@@ -10021,12 +9994,12 @@ var DefaultProviderRegistry = class {
10021
9994
  return model;
10022
9995
  }
10023
9996
  textEmbeddingModel(id) {
10024
- var _a17;
9997
+ var _a16;
10025
9998
  const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
10026
9999
  const provider = this.getProvider(providerId, "textEmbeddingModel");
10027
- const model = (_a17 = provider.textEmbeddingModel) == null ? void 0 : _a17.call(provider, modelId);
10000
+ const model = (_a16 = provider.textEmbeddingModel) == null ? void 0 : _a16.call(provider, modelId);
10028
10001
  if (model == null) {
10029
- throw new import_provider32.NoSuchModelError({
10002
+ throw new import_provider31.NoSuchModelError({
10030
10003
  modelId: id,
10031
10004
  modelType: "textEmbeddingModel"
10032
10005
  });
@@ -10034,22 +10007,22 @@ var DefaultProviderRegistry = class {
10034
10007
  return model;
10035
10008
  }
10036
10009
  imageModel(id) {
10037
- var _a17;
10010
+ var _a16;
10038
10011
  const [providerId, modelId] = this.splitId(id, "imageModel");
10039
10012
  const provider = this.getProvider(providerId, "imageModel");
10040
- const model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
10013
+ const model = (_a16 = provider.imageModel) == null ? void 0 : _a16.call(provider, modelId);
10041
10014
  if (model == null) {
10042
- throw new import_provider32.NoSuchModelError({ modelId: id, modelType: "imageModel" });
10015
+ throw new import_provider31.NoSuchModelError({ modelId: id, modelType: "imageModel" });
10043
10016
  }
10044
10017
  return model;
10045
10018
  }
10046
10019
  transcriptionModel(id) {
10047
- var _a17;
10020
+ var _a16;
10048
10021
  const [providerId, modelId] = this.splitId(id, "transcriptionModel");
10049
10022
  const provider = this.getProvider(providerId, "transcriptionModel");
10050
- const model = (_a17 = provider.transcriptionModel) == null ? void 0 : _a17.call(provider, modelId);
10023
+ const model = (_a16 = provider.transcriptionModel) == null ? void 0 : _a16.call(provider, modelId);
10051
10024
  if (model == null) {
10052
- throw new import_provider32.NoSuchModelError({
10025
+ throw new import_provider31.NoSuchModelError({
10053
10026
  modelId: id,
10054
10027
  modelType: "transcriptionModel"
10055
10028
  });
@@ -10057,600 +10030,23 @@ var DefaultProviderRegistry = class {
10057
10030
  return model;
10058
10031
  }
10059
10032
  speechModel(id) {
10060
- var _a17;
10033
+ var _a16;
10061
10034
  const [providerId, modelId] = this.splitId(id, "speechModel");
10062
10035
  const provider = this.getProvider(providerId, "speechModel");
10063
- const model = (_a17 = provider.speechModel) == null ? void 0 : _a17.call(provider, modelId);
10036
+ const model = (_a16 = provider.speechModel) == null ? void 0 : _a16.call(provider, modelId);
10064
10037
  if (model == null) {
10065
- throw new import_provider32.NoSuchModelError({ modelId: id, modelType: "speechModel" });
10038
+ throw new import_provider31.NoSuchModelError({ modelId: id, modelType: "speechModel" });
10066
10039
  }
10067
10040
  return model;
10068
10041
  }
10069
10042
  };
10070
10043
 
10071
- // src/tool/mcp/mcp-client.ts
10072
- var import_provider_utils32 = require("@ai-sdk/provider-utils");
10073
-
10074
- // src/tool/mcp/mcp-sse-transport.ts
10075
- var import_provider_utils31 = require("@ai-sdk/provider-utils");
10076
-
10077
- // src/tool/mcp/json-rpc-message.ts
10078
- var import_v410 = require("zod/v4");
10079
-
10080
- // src/tool/mcp/types.ts
10081
- var import_v49 = require("zod/v4");
10082
- var LATEST_PROTOCOL_VERSION = "2025-06-18";
10083
- var SUPPORTED_PROTOCOL_VERSIONS = [
10084
- LATEST_PROTOCOL_VERSION,
10085
- "2025-03-26",
10086
- "2024-11-05"
10087
- ];
10088
- var ClientOrServerImplementationSchema = import_v49.z.looseObject({
10089
- name: import_v49.z.string(),
10090
- version: import_v49.z.string()
10091
- });
10092
- var BaseParamsSchema = import_v49.z.looseObject({
10093
- _meta: import_v49.z.optional(import_v49.z.object({}).loose())
10094
- });
10095
- var ResultSchema = BaseParamsSchema;
10096
- var RequestSchema = import_v49.z.object({
10097
- method: import_v49.z.string(),
10098
- params: import_v49.z.optional(BaseParamsSchema)
10099
- });
10100
- var ServerCapabilitiesSchema = import_v49.z.looseObject({
10101
- experimental: import_v49.z.optional(import_v49.z.object({}).loose()),
10102
- logging: import_v49.z.optional(import_v49.z.object({}).loose()),
10103
- prompts: import_v49.z.optional(
10104
- import_v49.z.looseObject({
10105
- listChanged: import_v49.z.optional(import_v49.z.boolean())
10106
- })
10107
- ),
10108
- resources: import_v49.z.optional(
10109
- import_v49.z.looseObject({
10110
- subscribe: import_v49.z.optional(import_v49.z.boolean()),
10111
- listChanged: import_v49.z.optional(import_v49.z.boolean())
10112
- })
10113
- ),
10114
- tools: import_v49.z.optional(
10115
- import_v49.z.looseObject({
10116
- listChanged: import_v49.z.optional(import_v49.z.boolean())
10117
- })
10118
- )
10119
- });
10120
- var InitializeResultSchema = ResultSchema.extend({
10121
- protocolVersion: import_v49.z.string(),
10122
- capabilities: ServerCapabilitiesSchema,
10123
- serverInfo: ClientOrServerImplementationSchema,
10124
- instructions: import_v49.z.optional(import_v49.z.string())
10125
- });
10126
- var PaginatedResultSchema = ResultSchema.extend({
10127
- nextCursor: import_v49.z.optional(import_v49.z.string())
10128
- });
10129
- var ToolSchema = import_v49.z.object({
10130
- name: import_v49.z.string(),
10131
- description: import_v49.z.optional(import_v49.z.string()),
10132
- inputSchema: import_v49.z.object({
10133
- type: import_v49.z.literal("object"),
10134
- properties: import_v49.z.optional(import_v49.z.object({}).loose())
10135
- }).loose()
10136
- }).loose();
10137
- var ListToolsResultSchema = PaginatedResultSchema.extend({
10138
- tools: import_v49.z.array(ToolSchema)
10139
- });
10140
- var TextContentSchema = import_v49.z.object({
10141
- type: import_v49.z.literal("text"),
10142
- text: import_v49.z.string()
10143
- }).loose();
10144
- var ImageContentSchema = import_v49.z.object({
10145
- type: import_v49.z.literal("image"),
10146
- data: import_v49.z.base64(),
10147
- mimeType: import_v49.z.string()
10148
- }).loose();
10149
- var ResourceContentsSchema = import_v49.z.object({
10150
- /**
10151
- * The URI of this resource.
10152
- */
10153
- uri: import_v49.z.string(),
10154
- /**
10155
- * The MIME type of this resource, if known.
10156
- */
10157
- mimeType: import_v49.z.optional(import_v49.z.string())
10158
- }).loose();
10159
- var TextResourceContentsSchema = ResourceContentsSchema.extend({
10160
- text: import_v49.z.string()
10161
- });
10162
- var BlobResourceContentsSchema = ResourceContentsSchema.extend({
10163
- blob: import_v49.z.base64()
10164
- });
10165
- var EmbeddedResourceSchema = import_v49.z.object({
10166
- type: import_v49.z.literal("resource"),
10167
- resource: import_v49.z.union([TextResourceContentsSchema, BlobResourceContentsSchema])
10168
- }).loose();
10169
- var CallToolResultSchema = ResultSchema.extend({
10170
- content: import_v49.z.array(
10171
- import_v49.z.union([TextContentSchema, ImageContentSchema, EmbeddedResourceSchema])
10172
- ),
10173
- isError: import_v49.z.boolean().default(false).optional()
10174
- }).or(
10175
- ResultSchema.extend({
10176
- toolResult: import_v49.z.unknown()
10177
- })
10178
- );
10179
-
10180
- // src/tool/mcp/json-rpc-message.ts
10181
- var JSONRPC_VERSION = "2.0";
10182
- var JSONRPCRequestSchema = import_v410.z.object({
10183
- jsonrpc: import_v410.z.literal(JSONRPC_VERSION),
10184
- id: import_v410.z.union([import_v410.z.string(), import_v410.z.number().int()])
10185
- }).merge(RequestSchema).strict();
10186
- var JSONRPCResponseSchema = import_v410.z.object({
10187
- jsonrpc: import_v410.z.literal(JSONRPC_VERSION),
10188
- id: import_v410.z.union([import_v410.z.string(), import_v410.z.number().int()]),
10189
- result: ResultSchema
10190
- }).strict();
10191
- var JSONRPCErrorSchema = import_v410.z.object({
10192
- jsonrpc: import_v410.z.literal(JSONRPC_VERSION),
10193
- id: import_v410.z.union([import_v410.z.string(), import_v410.z.number().int()]),
10194
- error: import_v410.z.object({
10195
- code: import_v410.z.number().int(),
10196
- message: import_v410.z.string(),
10197
- data: import_v410.z.optional(import_v410.z.unknown())
10198
- })
10199
- }).strict();
10200
- var JSONRPCNotificationSchema = import_v410.z.object({
10201
- jsonrpc: import_v410.z.literal(JSONRPC_VERSION)
10202
- }).merge(
10203
- import_v410.z.object({
10204
- method: import_v410.z.string(),
10205
- params: import_v410.z.optional(BaseParamsSchema)
10206
- })
10207
- ).strict();
10208
- var JSONRPCMessageSchema = import_v410.z.union([
10209
- JSONRPCRequestSchema,
10210
- JSONRPCNotificationSchema,
10211
- JSONRPCResponseSchema,
10212
- JSONRPCErrorSchema
10213
- ]);
10214
-
10215
- // src/tool/mcp/mcp-sse-transport.ts
10216
- var SseMCPTransport = class {
10217
- constructor({
10218
- url,
10219
- headers
10220
- }) {
10221
- this.connected = false;
10222
- this.url = new URL(url);
10223
- this.headers = headers;
10224
- }
10225
- async start() {
10226
- return new Promise((resolve3, reject) => {
10227
- if (this.connected) {
10228
- return resolve3();
10229
- }
10230
- this.abortController = new AbortController();
10231
- const establishConnection = async () => {
10232
- var _a17, _b, _c;
10233
- try {
10234
- const headers = (0, import_provider_utils31.withUserAgentSuffix)(
10235
- {
10236
- ...this.headers,
10237
- Accept: "text/event-stream"
10238
- },
10239
- `ai-sdk/${VERSION}`,
10240
- (0, import_provider_utils31.getRuntimeEnvironmentUserAgent)()
10241
- );
10242
- const response = await fetch(this.url.href, {
10243
- headers,
10244
- signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
10245
- });
10246
- if (!response.ok || !response.body) {
10247
- const error = new MCPClientError({
10248
- message: `MCP SSE Transport Error: ${response.status} ${response.statusText}`
10249
- });
10250
- (_b = this.onerror) == null ? void 0 : _b.call(this, error);
10251
- return reject(error);
10252
- }
10253
- const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new import_provider_utils31.EventSourceParserStream());
10254
- const reader = stream.getReader();
10255
- const processEvents = async () => {
10256
- var _a18, _b2, _c2;
10257
- try {
10258
- while (true) {
10259
- const { done, value } = await reader.read();
10260
- if (done) {
10261
- if (this.connected) {
10262
- this.connected = false;
10263
- throw new MCPClientError({
10264
- message: "MCP SSE Transport Error: Connection closed unexpectedly"
10265
- });
10266
- }
10267
- return;
10268
- }
10269
- const { event, data } = value;
10270
- if (event === "endpoint") {
10271
- this.endpoint = new URL(data, this.url);
10272
- if (this.endpoint.origin !== this.url.origin) {
10273
- throw new MCPClientError({
10274
- message: `MCP SSE Transport Error: Endpoint origin does not match connection origin: ${this.endpoint.origin}`
10275
- });
10276
- }
10277
- this.connected = true;
10278
- resolve3();
10279
- } else if (event === "message") {
10280
- try {
10281
- const message = JSONRPCMessageSchema.parse(
10282
- JSON.parse(data)
10283
- );
10284
- (_a18 = this.onmessage) == null ? void 0 : _a18.call(this, message);
10285
- } catch (error) {
10286
- const e = new MCPClientError({
10287
- message: "MCP SSE Transport Error: Failed to parse message",
10288
- cause: error
10289
- });
10290
- (_b2 = this.onerror) == null ? void 0 : _b2.call(this, e);
10291
- }
10292
- }
10293
- }
10294
- } catch (error) {
10295
- if (error instanceof Error && error.name === "AbortError") {
10296
- return;
10297
- }
10298
- (_c2 = this.onerror) == null ? void 0 : _c2.call(this, error);
10299
- reject(error);
10300
- }
10301
- };
10302
- this.sseConnection = {
10303
- close: () => reader.cancel()
10304
- };
10305
- processEvents();
10306
- } catch (error) {
10307
- if (error instanceof Error && error.name === "AbortError") {
10308
- return;
10309
- }
10310
- (_c = this.onerror) == null ? void 0 : _c.call(this, error);
10311
- reject(error);
10312
- }
10313
- };
10314
- establishConnection();
10315
- });
10316
- }
10317
- async close() {
10318
- var _a17, _b, _c;
10319
- this.connected = false;
10320
- (_a17 = this.sseConnection) == null ? void 0 : _a17.close();
10321
- (_b = this.abortController) == null ? void 0 : _b.abort();
10322
- (_c = this.onclose) == null ? void 0 : _c.call(this);
10323
- }
10324
- async send(message) {
10325
- var _a17, _b, _c;
10326
- if (!this.endpoint || !this.connected) {
10327
- throw new MCPClientError({
10328
- message: "MCP SSE Transport Error: Not connected"
10329
- });
10330
- }
10331
- try {
10332
- const headers = (0, import_provider_utils31.withUserAgentSuffix)(
10333
- {
10334
- ...this.headers,
10335
- "Content-Type": "application/json"
10336
- },
10337
- `ai-sdk/${VERSION}`,
10338
- (0, import_provider_utils31.getRuntimeEnvironmentUserAgent)()
10339
- );
10340
- const init = {
10341
- method: "POST",
10342
- headers,
10343
- body: JSON.stringify(message),
10344
- signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
10345
- };
10346
- const response = await fetch(this.endpoint, init);
10347
- if (!response.ok) {
10348
- const text2 = await response.text().catch(() => null);
10349
- const error = new MCPClientError({
10350
- message: `MCP SSE Transport Error: POSTing to endpoint (HTTP ${response.status}): ${text2}`
10351
- });
10352
- (_b = this.onerror) == null ? void 0 : _b.call(this, error);
10353
- return;
10354
- }
10355
- } catch (error) {
10356
- (_c = this.onerror) == null ? void 0 : _c.call(this, error);
10357
- return;
10358
- }
10359
- }
10360
- };
10361
-
10362
- // src/tool/mcp/mcp-transport.ts
10363
- function createMcpTransport(config) {
10364
- if (config.type !== "sse") {
10365
- throw new MCPClientError({
10366
- message: "Unsupported or invalid transport configuration. If you are using a custom transport, make sure it implements the MCPTransport interface."
10367
- });
10368
- }
10369
- return new SseMCPTransport(config);
10370
- }
10371
- function isCustomMcpTransport(transport) {
10372
- return "start" in transport && typeof transport.start === "function" && "send" in transport && typeof transport.send === "function" && "close" in transport && typeof transport.close === "function";
10373
- }
10374
-
10375
- // src/tool/mcp/mcp-client.ts
10376
- var CLIENT_VERSION = "1.0.0";
10377
- async function createMCPClient(config) {
10378
- const client = new DefaultMCPClient(config);
10379
- await client.init();
10380
- return client;
10381
- }
10382
- var DefaultMCPClient = class {
10383
- constructor({
10384
- transport: transportConfig,
10385
- name: name17 = "ai-sdk-mcp-client",
10386
- onUncaughtError
10387
- }) {
10388
- this.requestMessageId = 0;
10389
- this.responseHandlers = /* @__PURE__ */ new Map();
10390
- this.serverCapabilities = {};
10391
- this.isClosed = true;
10392
- this.onUncaughtError = onUncaughtError;
10393
- if (isCustomMcpTransport(transportConfig)) {
10394
- this.transport = transportConfig;
10395
- } else {
10396
- this.transport = createMcpTransport(transportConfig);
10397
- }
10398
- this.transport.onclose = () => this.onClose();
10399
- this.transport.onerror = (error) => this.onError(error);
10400
- this.transport.onmessage = (message) => {
10401
- if ("method" in message) {
10402
- this.onError(
10403
- new MCPClientError({
10404
- message: "Unsupported message type"
10405
- })
10406
- );
10407
- return;
10408
- }
10409
- this.onResponse(message);
10410
- };
10411
- this.clientInfo = {
10412
- name: name17,
10413
- version: CLIENT_VERSION
10414
- };
10415
- }
10416
- async init() {
10417
- try {
10418
- await this.transport.start();
10419
- this.isClosed = false;
10420
- const result = await this.request({
10421
- request: {
10422
- method: "initialize",
10423
- params: {
10424
- protocolVersion: LATEST_PROTOCOL_VERSION,
10425
- capabilities: {},
10426
- clientInfo: this.clientInfo
10427
- }
10428
- },
10429
- resultSchema: InitializeResultSchema
10430
- });
10431
- if (result === void 0) {
10432
- throw new MCPClientError({
10433
- message: "Server sent invalid initialize result"
10434
- });
10435
- }
10436
- if (!SUPPORTED_PROTOCOL_VERSIONS.includes(result.protocolVersion)) {
10437
- throw new MCPClientError({
10438
- message: `Server's protocol version is not supported: ${result.protocolVersion}`
10439
- });
10440
- }
10441
- this.serverCapabilities = result.capabilities;
10442
- await this.notification({
10443
- method: "notifications/initialized"
10444
- });
10445
- return this;
10446
- } catch (error) {
10447
- await this.close();
10448
- throw error;
10449
- }
10450
- }
10451
- async close() {
10452
- var _a17;
10453
- if (this.isClosed)
10454
- return;
10455
- await ((_a17 = this.transport) == null ? void 0 : _a17.close());
10456
- this.onClose();
10457
- }
10458
- assertCapability(method) {
10459
- switch (method) {
10460
- case "initialize":
10461
- break;
10462
- case "tools/list":
10463
- case "tools/call":
10464
- if (!this.serverCapabilities.tools) {
10465
- throw new MCPClientError({
10466
- message: `Server does not support tools`
10467
- });
10468
- }
10469
- break;
10470
- default:
10471
- throw new MCPClientError({
10472
- message: `Unsupported method: ${method}`
10473
- });
10474
- }
10475
- }
10476
- async request({
10477
- request,
10478
- resultSchema,
10479
- options
10480
- }) {
10481
- return new Promise((resolve3, reject) => {
10482
- if (this.isClosed) {
10483
- return reject(
10484
- new MCPClientError({
10485
- message: "Attempted to send a request from a closed client"
10486
- })
10487
- );
10488
- }
10489
- this.assertCapability(request.method);
10490
- const signal = options == null ? void 0 : options.signal;
10491
- signal == null ? void 0 : signal.throwIfAborted();
10492
- const messageId = this.requestMessageId++;
10493
- const jsonrpcRequest = {
10494
- ...request,
10495
- jsonrpc: "2.0",
10496
- id: messageId
10497
- };
10498
- const cleanup = () => {
10499
- this.responseHandlers.delete(messageId);
10500
- };
10501
- this.responseHandlers.set(messageId, (response) => {
10502
- if (signal == null ? void 0 : signal.aborted) {
10503
- return reject(
10504
- new MCPClientError({
10505
- message: "Request was aborted",
10506
- cause: signal.reason
10507
- })
10508
- );
10509
- }
10510
- if (response instanceof Error) {
10511
- return reject(response);
10512
- }
10513
- try {
10514
- const result = resultSchema.parse(response.result);
10515
- resolve3(result);
10516
- } catch (error) {
10517
- const parseError = new MCPClientError({
10518
- message: "Failed to parse server response",
10519
- cause: error
10520
- });
10521
- reject(parseError);
10522
- }
10523
- });
10524
- this.transport.send(jsonrpcRequest).catch((error) => {
10525
- cleanup();
10526
- reject(error);
10527
- });
10528
- });
10529
- }
10530
- async listTools({
10531
- params,
10532
- options
10533
- } = {}) {
10534
- try {
10535
- return this.request({
10536
- request: { method: "tools/list", params },
10537
- resultSchema: ListToolsResultSchema,
10538
- options
10539
- });
10540
- } catch (error) {
10541
- throw error;
10542
- }
10543
- }
10544
- async callTool({
10545
- name: name17,
10546
- args,
10547
- options
10548
- }) {
10549
- try {
10550
- return this.request({
10551
- request: { method: "tools/call", params: { name: name17, arguments: args } },
10552
- resultSchema: CallToolResultSchema,
10553
- options: {
10554
- signal: options == null ? void 0 : options.abortSignal
10555
- }
10556
- });
10557
- } catch (error) {
10558
- throw error;
10559
- }
10560
- }
10561
- async notification(notification) {
10562
- const jsonrpcNotification = {
10563
- ...notification,
10564
- jsonrpc: "2.0"
10565
- };
10566
- await this.transport.send(jsonrpcNotification);
10567
- }
10568
- /**
10569
- * Returns a set of AI SDK tools from the MCP server
10570
- * @returns A record of tool names to their implementations
10571
- */
10572
- async tools({
10573
- schemas = "automatic"
10574
- } = {}) {
10575
- var _a17;
10576
- const tools = {};
10577
- try {
10578
- const listToolsResult = await this.listTools();
10579
- for (const { name: name17, description, inputSchema } of listToolsResult.tools) {
10580
- if (schemas !== "automatic" && !(name17 in schemas)) {
10581
- continue;
10582
- }
10583
- const self = this;
10584
- const execute = async (args, options) => {
10585
- var _a18;
10586
- (_a18 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a18.throwIfAborted();
10587
- return self.callTool({ name: name17, args, options });
10588
- };
10589
- const toolWithExecute = schemas === "automatic" ? (0, import_provider_utils32.dynamicTool)({
10590
- description,
10591
- inputSchema: (0, import_provider_utils32.jsonSchema)({
10592
- ...inputSchema,
10593
- properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
10594
- additionalProperties: false
10595
- }),
10596
- execute
10597
- }) : (0, import_provider_utils32.tool)({
10598
- description,
10599
- inputSchema: schemas[name17].inputSchema,
10600
- execute
10601
- });
10602
- tools[name17] = toolWithExecute;
10603
- }
10604
- return tools;
10605
- } catch (error) {
10606
- throw error;
10607
- }
10608
- }
10609
- onClose() {
10610
- if (this.isClosed)
10611
- return;
10612
- this.isClosed = true;
10613
- const error = new MCPClientError({
10614
- message: "Connection closed"
10615
- });
10616
- for (const handler of this.responseHandlers.values()) {
10617
- handler(error);
10618
- }
10619
- this.responseHandlers.clear();
10620
- }
10621
- onError(error) {
10622
- if (this.onUncaughtError) {
10623
- this.onUncaughtError(error);
10624
- }
10625
- }
10626
- onResponse(response) {
10627
- const messageId = Number(response.id);
10628
- const handler = this.responseHandlers.get(messageId);
10629
- if (handler === void 0) {
10630
- throw new MCPClientError({
10631
- message: `Protocol error: Received a response for an unknown message ID: ${JSON.stringify(
10632
- response
10633
- )}`
10634
- });
10635
- }
10636
- this.responseHandlers.delete(messageId);
10637
- handler(
10638
- "result" in response ? response : new MCPClientError({
10639
- message: response.error.message,
10640
- code: response.error.code,
10641
- data: response.error.data,
10642
- cause: response.error
10643
- })
10644
- );
10645
- }
10646
- };
10647
-
10648
10044
  // src/transcribe/transcribe.ts
10649
- var import_provider_utils33 = require("@ai-sdk/provider-utils");
10045
+ var import_provider_utils31 = require("@ai-sdk/provider-utils");
10650
10046
 
10651
10047
  // src/error/no-transcript-generated-error.ts
10652
- var import_provider33 = require("@ai-sdk/provider");
10653
- var NoTranscriptGeneratedError = class extends import_provider33.AISDKError {
10048
+ var import_provider32 = require("@ai-sdk/provider");
10049
+ var NoTranscriptGeneratedError = class extends import_provider32.AISDKError {
10654
10050
  constructor(options) {
10655
10051
  super({
10656
10052
  name: "AI_NoTranscriptGeneratedError",
@@ -10677,23 +10073,23 @@ async function transcribe({
10677
10073
  maxRetries: maxRetriesArg,
10678
10074
  abortSignal
10679
10075
  });
10680
- const headersWithUserAgent = (0, import_provider_utils33.withUserAgentSuffix)(
10076
+ const headersWithUserAgent = (0, import_provider_utils31.withUserAgentSuffix)(
10681
10077
  headers != null ? headers : {},
10682
10078
  `ai/${VERSION}`
10683
10079
  );
10684
10080
  const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
10685
10081
  const result = await retry(
10686
10082
  () => {
10687
- var _a17;
10083
+ var _a16;
10688
10084
  return resolvedModel.doGenerate({
10689
10085
  audio: audioData,
10690
10086
  abortSignal,
10691
10087
  headers: headersWithUserAgent,
10692
10088
  providerOptions,
10693
- mediaType: (_a17 = detectMediaType({
10089
+ mediaType: (_a16 = detectMediaType({
10694
10090
  data: audioData,
10695
10091
  signatures: audioMediaTypeSignatures
10696
- })) != null ? _a17 : "audio/wav"
10092
+ })) != null ? _a16 : "audio/wav"
10697
10093
  });
10698
10094
  }
10699
10095
  );
@@ -10713,19 +10109,19 @@ async function transcribe({
10713
10109
  }
10714
10110
  var DefaultTranscriptionResult = class {
10715
10111
  constructor(options) {
10716
- var _a17;
10112
+ var _a16;
10717
10113
  this.text = options.text;
10718
10114
  this.segments = options.segments;
10719
10115
  this.language = options.language;
10720
10116
  this.durationInSeconds = options.durationInSeconds;
10721
10117
  this.warnings = options.warnings;
10722
10118
  this.responses = options.responses;
10723
- this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
10119
+ this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
10724
10120
  }
10725
10121
  };
10726
10122
 
10727
10123
  // src/ui/call-completion-api.ts
10728
- var import_provider_utils34 = require("@ai-sdk/provider-utils");
10124
+ var import_provider_utils32 = require("@ai-sdk/provider-utils");
10729
10125
 
10730
10126
  // src/ui/process-text-stream.ts
10731
10127
  async function processTextStream({
@@ -10759,7 +10155,7 @@ async function callCompletionApi({
10759
10155
  onError,
10760
10156
  fetch: fetch2 = getOriginalFetch()
10761
10157
  }) {
10762
- var _a17;
10158
+ var _a16;
10763
10159
  try {
10764
10160
  setLoading(true);
10765
10161
  setError(void 0);
@@ -10773,13 +10169,13 @@ async function callCompletionApi({
10773
10169
  ...body
10774
10170
  }),
10775
10171
  credentials,
10776
- headers: (0, import_provider_utils34.withUserAgentSuffix)(
10172
+ headers: (0, import_provider_utils32.withUserAgentSuffix)(
10777
10173
  {
10778
10174
  "Content-Type": "application/json",
10779
10175
  ...headers
10780
10176
  },
10781
10177
  `ai-sdk/${VERSION}`,
10782
- (0, import_provider_utils34.getRuntimeEnvironmentUserAgent)()
10178
+ (0, import_provider_utils32.getRuntimeEnvironmentUserAgent)()
10783
10179
  ),
10784
10180
  signal: abortController.signal
10785
10181
  }).catch((err) => {
@@ -10787,7 +10183,7 @@ async function callCompletionApi({
10787
10183
  });
10788
10184
  if (!response.ok) {
10789
10185
  throw new Error(
10790
- (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
10186
+ (_a16 = await response.text()) != null ? _a16 : "Failed to fetch the chat response."
10791
10187
  );
10792
10188
  }
10793
10189
  if (!response.body) {
@@ -10807,7 +10203,7 @@ async function callCompletionApi({
10807
10203
  }
10808
10204
  case "data": {
10809
10205
  await consumeStream({
10810
- stream: (0, import_provider_utils34.parseJsonEventStream)({
10206
+ stream: (0, import_provider_utils32.parseJsonEventStream)({
10811
10207
  stream: response.body,
10812
10208
  schema: uiMessageChunkSchema
10813
10209
  }).pipeThrough(
@@ -10859,7 +10255,7 @@ async function callCompletionApi({
10859
10255
  }
10860
10256
 
10861
10257
  // src/ui/chat.ts
10862
- var import_provider_utils37 = require("@ai-sdk/provider-utils");
10258
+ var import_provider_utils35 = require("@ai-sdk/provider-utils");
10863
10259
 
10864
10260
  // src/ui/convert-file-list-to-file-ui-parts.ts
10865
10261
  async function convertFileListToFileUIParts(files) {
@@ -10871,12 +10267,12 @@ async function convertFileListToFileUIParts(files) {
10871
10267
  }
10872
10268
  return Promise.all(
10873
10269
  Array.from(files).map(async (file) => {
10874
- const { name: name17, type } = file;
10270
+ const { name: name16, type } = file;
10875
10271
  const dataUrl = await new Promise((resolve3, reject) => {
10876
10272
  const reader = new FileReader();
10877
10273
  reader.onload = (readerEvent) => {
10878
- var _a17;
10879
- resolve3((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
10274
+ var _a16;
10275
+ resolve3((_a16 = readerEvent.target) == null ? void 0 : _a16.result);
10880
10276
  };
10881
10277
  reader.onerror = (error) => reject(error);
10882
10278
  reader.readAsDataURL(file);
@@ -10884,7 +10280,7 @@ async function convertFileListToFileUIParts(files) {
10884
10280
  return {
10885
10281
  type: "file",
10886
10282
  mediaType: type,
10887
- filename: name17,
10283
+ filename: name16,
10888
10284
  url: dataUrl
10889
10285
  };
10890
10286
  })
@@ -10892,10 +10288,10 @@ async function convertFileListToFileUIParts(files) {
10892
10288
  }
10893
10289
 
10894
10290
  // src/ui/default-chat-transport.ts
10895
- var import_provider_utils36 = require("@ai-sdk/provider-utils");
10291
+ var import_provider_utils34 = require("@ai-sdk/provider-utils");
10896
10292
 
10897
10293
  // src/ui/http-chat-transport.ts
10898
- var import_provider_utils35 = require("@ai-sdk/provider-utils");
10294
+ var import_provider_utils33 = require("@ai-sdk/provider-utils");
10899
10295
  var HttpChatTransport = class {
10900
10296
  constructor({
10901
10297
  api = "/api/chat",
@@ -10918,11 +10314,11 @@ var HttpChatTransport = class {
10918
10314
  abortSignal,
10919
10315
  ...options
10920
10316
  }) {
10921
- var _a17, _b, _c, _d, _e;
10922
- const resolvedBody = await (0, import_provider_utils35.resolve)(this.body);
10923
- const resolvedHeaders = await (0, import_provider_utils35.resolve)(this.headers);
10924
- const resolvedCredentials = await (0, import_provider_utils35.resolve)(this.credentials);
10925
- const preparedRequest = await ((_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
10317
+ var _a16, _b, _c, _d, _e;
10318
+ const resolvedBody = await (0, import_provider_utils33.resolve)(this.body);
10319
+ const resolvedHeaders = await (0, import_provider_utils33.resolve)(this.headers);
10320
+ const resolvedCredentials = await (0, import_provider_utils33.resolve)(this.credentials);
10321
+ const preparedRequest = await ((_a16 = this.prepareSendMessagesRequest) == null ? void 0 : _a16.call(this, {
10926
10322
  api: this.api,
10927
10323
  id: options.chatId,
10928
10324
  messages: options.messages,
@@ -10947,13 +10343,13 @@ var HttpChatTransport = class {
10947
10343
  const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
10948
10344
  const response = await fetch2(api, {
10949
10345
  method: "POST",
10950
- headers: (0, import_provider_utils35.withUserAgentSuffix)(
10346
+ headers: (0, import_provider_utils33.withUserAgentSuffix)(
10951
10347
  {
10952
10348
  "Content-Type": "application/json",
10953
10349
  ...headers
10954
10350
  },
10955
10351
  `ai-sdk/${VERSION}`,
10956
- (0, import_provider_utils35.getRuntimeEnvironmentUserAgent)()
10352
+ (0, import_provider_utils33.getRuntimeEnvironmentUserAgent)()
10957
10353
  ),
10958
10354
  body: JSON.stringify(body),
10959
10355
  credentials,
@@ -10970,11 +10366,11 @@ var HttpChatTransport = class {
10970
10366
  return this.processResponseStream(response.body);
10971
10367
  }
10972
10368
  async reconnectToStream(options) {
10973
- var _a17, _b, _c, _d, _e;
10974
- const resolvedBody = await (0, import_provider_utils35.resolve)(this.body);
10975
- const resolvedHeaders = await (0, import_provider_utils35.resolve)(this.headers);
10976
- const resolvedCredentials = await (0, import_provider_utils35.resolve)(this.credentials);
10977
- const preparedRequest = await ((_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
10369
+ var _a16, _b, _c, _d, _e;
10370
+ const resolvedBody = await (0, import_provider_utils33.resolve)(this.body);
10371
+ const resolvedHeaders = await (0, import_provider_utils33.resolve)(this.headers);
10372
+ const resolvedCredentials = await (0, import_provider_utils33.resolve)(this.credentials);
10373
+ const preparedRequest = await ((_a16 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a16.call(this, {
10978
10374
  api: this.api,
10979
10375
  id: options.chatId,
10980
10376
  body: { ...resolvedBody, ...options.body },
@@ -10988,10 +10384,10 @@ var HttpChatTransport = class {
10988
10384
  const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
10989
10385
  const response = await fetch2(api, {
10990
10386
  method: "GET",
10991
- headers: (0, import_provider_utils35.withUserAgentSuffix)(
10387
+ headers: (0, import_provider_utils33.withUserAgentSuffix)(
10992
10388
  headers,
10993
10389
  `ai-sdk/${VERSION}`,
10994
- (0, import_provider_utils35.getRuntimeEnvironmentUserAgent)()
10390
+ (0, import_provider_utils33.getRuntimeEnvironmentUserAgent)()
10995
10391
  ),
10996
10392
  credentials
10997
10393
  });
@@ -11016,7 +10412,7 @@ var DefaultChatTransport = class extends HttpChatTransport {
11016
10412
  super(options);
11017
10413
  }
11018
10414
  processResponseStream(stream) {
11019
- return (0, import_provider_utils36.parseJsonEventStream)({
10415
+ return (0, import_provider_utils34.parseJsonEventStream)({
11020
10416
  stream,
11021
10417
  schema: uiMessageChunkSchema
11022
10418
  }).pipeThrough(
@@ -11035,7 +10431,7 @@ var DefaultChatTransport = class extends HttpChatTransport {
11035
10431
  // src/ui/chat.ts
11036
10432
  var AbstractChat = class {
11037
10433
  constructor({
11038
- generateId: generateId2 = import_provider_utils37.generateId,
10434
+ generateId: generateId2 = import_provider_utils35.generateId,
11039
10435
  id = generateId2(),
11040
10436
  transport = new DefaultChatTransport(),
11041
10437
  messageMetadataSchema,
@@ -11056,11 +10452,11 @@ var AbstractChat = class {
11056
10452
  * If a messageId is provided, the message will be replaced.
11057
10453
  */
11058
10454
  this.sendMessage = async (message, options) => {
11059
- var _a17, _b, _c, _d;
10455
+ var _a16, _b, _c, _d;
11060
10456
  if (message == null) {
11061
10457
  await this.makeRequest({
11062
10458
  trigger: "submit-message",
11063
- messageId: (_a17 = this.lastMessage) == null ? void 0 : _a17.id,
10459
+ messageId: (_a16 = this.lastMessage) == null ? void 0 : _a16.id,
11064
10460
  ...options
11065
10461
  });
11066
10462
  return;
@@ -11153,7 +10549,7 @@ var AbstractChat = class {
11153
10549
  approved,
11154
10550
  reason
11155
10551
  }) => this.jobExecutor.run(async () => {
11156
- var _a17, _b;
10552
+ var _a16, _b;
11157
10553
  const messages = this.state.messages;
11158
10554
  const lastMessage = messages[messages.length - 1];
11159
10555
  const updatePart = (part) => isToolOrDynamicToolUIPart(part) && part.state === "approval-requested" && part.approval.id === id ? {
@@ -11168,7 +10564,7 @@ var AbstractChat = class {
11168
10564
  if (this.activeResponse) {
11169
10565
  this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
11170
10566
  }
11171
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
10567
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a16 = this.sendAutomaticallyWhen) == null ? void 0 : _a16.call(this, { messages: this.state.messages }))) {
11172
10568
  this.makeRequest({
11173
10569
  trigger: "submit-message",
11174
10570
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -11177,12 +10573,12 @@ var AbstractChat = class {
11177
10573
  });
11178
10574
  this.addToolResult = async ({
11179
10575
  state = "output-available",
11180
- tool: tool3,
10576
+ tool: tool2,
11181
10577
  toolCallId,
11182
10578
  output,
11183
10579
  errorText
11184
10580
  }) => this.jobExecutor.run(async () => {
11185
- var _a17, _b;
10581
+ var _a16, _b;
11186
10582
  const messages = this.state.messages;
11187
10583
  const lastMessage = messages[messages.length - 1];
11188
10584
  const updatePart = (part) => isToolOrDynamicToolUIPart(part) && part.toolCallId === toolCallId ? { ...part, state, output, errorText } : part;
@@ -11193,7 +10589,7 @@ var AbstractChat = class {
11193
10589
  if (this.activeResponse) {
11194
10590
  this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
11195
10591
  }
11196
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
10592
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a16 = this.sendAutomaticallyWhen) == null ? void 0 : _a16.call(this, { messages: this.state.messages }))) {
11197
10593
  this.makeRequest({
11198
10594
  trigger: "submit-message",
11199
10595
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -11204,10 +10600,10 @@ var AbstractChat = class {
11204
10600
  * Abort the current request immediately, keep the generated tokens if any.
11205
10601
  */
11206
10602
  this.stop = async () => {
11207
- var _a17;
10603
+ var _a16;
11208
10604
  if (this.status !== "streaming" && this.status !== "submitted")
11209
10605
  return;
11210
- if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
10606
+ if ((_a16 = this.activeResponse) == null ? void 0 : _a16.abortController) {
11211
10607
  this.activeResponse.abortController.abort();
11212
10608
  }
11213
10609
  };
@@ -11262,7 +10658,7 @@ var AbstractChat = class {
11262
10658
  body,
11263
10659
  messageId
11264
10660
  }) {
11265
- var _a17, _b, _c;
10661
+ var _a16, _b, _c;
11266
10662
  this.setStatus({ status: "submitted", error: void 0 });
11267
10663
  const lastMessage = this.lastMessage;
11268
10664
  let isAbort = false;
@@ -11311,9 +10707,9 @@ var AbstractChat = class {
11311
10707
  () => job({
11312
10708
  state: activeResponse.state,
11313
10709
  write: () => {
11314
- var _a18;
10710
+ var _a17;
11315
10711
  this.setStatus({ status: "streaming" });
11316
- const replaceLastMessage = activeResponse.state.message.id === ((_a18 = this.lastMessage) == null ? void 0 : _a18.id);
10712
+ const replaceLastMessage = activeResponse.state.message.id === ((_a17 = this.lastMessage) == null ? void 0 : _a17.id);
11317
10713
  if (replaceLastMessage) {
11318
10714
  this.state.replaceMessage(
11319
10715
  this.state.messages.length - 1,
@@ -11359,7 +10755,7 @@ var AbstractChat = class {
11359
10755
  this.setStatus({ status: "error", error: err });
11360
10756
  } finally {
11361
10757
  try {
11362
- (_a17 = this.onFinish) == null ? void 0 : _a17.call(this, {
10758
+ (_a16 = this.onFinish) == null ? void 0 : _a16.call(this, {
11363
10759
  message: this.activeResponse.state.message,
11364
10760
  messages: this.state.messages,
11365
10761
  isAbort,
@@ -11482,7 +10878,6 @@ var TextStreamChatTransport = class extends HttpChatTransport {
11482
10878
  JsonToSseTransformStream,
11483
10879
  LoadAPIKeyError,
11484
10880
  LoadSettingError,
11485
- MCPClientError,
11486
10881
  MessageConversionError,
11487
10882
  NoContentGeneratedError,
11488
10883
  NoImageGeneratedError,
@@ -11530,7 +10925,6 @@ var TextStreamChatTransport = class extends HttpChatTransport {
11530
10925
  dynamicTool,
11531
10926
  embed,
11532
10927
  embedMany,
11533
- experimental_createMCPClient,
11534
10928
  experimental_createProviderRegistry,
11535
10929
  experimental_customProvider,
11536
10930
  experimental_generateImage,