ai 5.0.0-canary.3 → 5.0.0-canary.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,7 +1,9 @@
1
1
  "use strict";
2
+ var __create = Object.create;
2
3
  var __defProp = Object.defineProperty;
3
4
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
5
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
5
7
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
8
  var __export = (target, all) => {
7
9
  for (var name17 in all)
@@ -15,6 +17,14 @@ var __copyProps = (to, from, except, desc) => {
15
17
  }
16
18
  return to;
17
19
  };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
18
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
29
 
20
30
  // streams/index.ts
@@ -53,6 +63,9 @@ __export(streams_exports, {
53
63
  UnsupportedFunctionalityError: () => import_provider20.UnsupportedFunctionalityError,
54
64
  appendClientMessage: () => appendClientMessage,
55
65
  appendResponseMessages: () => appendResponseMessages,
66
+ asSchema: () => asSchema,
67
+ callChatApi: () => callChatApi,
68
+ callCompletionApi: () => callCompletionApi,
56
69
  convertToCoreMessages: () => convertToCoreMessages,
57
70
  coreAssistantMessageSchema: () => coreAssistantMessageSchema,
58
71
  coreMessageSchema: () => coreMessageSchema,
@@ -62,7 +75,7 @@ __export(streams_exports, {
62
75
  cosineSimilarity: () => cosineSimilarity,
63
76
  createDataStream: () => createDataStream,
64
77
  createDataStreamResponse: () => createDataStreamResponse,
65
- createIdGenerator: () => import_provider_utils15.createIdGenerator,
78
+ createIdGenerator: () => import_provider_utils21.createIdGenerator,
66
79
  createProviderRegistry: () => createProviderRegistry,
67
80
  customProvider: () => customProvider,
68
81
  defaultSettingsMiddleware: () => defaultSettingsMiddleware,
@@ -72,34 +85,1515 @@ __export(streams_exports, {
72
85
  experimental_createProviderRegistry: () => experimental_createProviderRegistry,
73
86
  experimental_customProvider: () => experimental_customProvider,
74
87
  experimental_generateImage: () => generateImage,
88
+ experimental_transcribe: () => transcribe,
75
89
  experimental_wrapLanguageModel: () => experimental_wrapLanguageModel,
90
+ extractMaxToolInvocationStep: () => extractMaxToolInvocationStep,
76
91
  extractReasoningMiddleware: () => extractReasoningMiddleware,
77
- formatDataStreamPart: () => import_ui_utils11.formatDataStreamPart,
78
- generateId: () => import_provider_utils15.generateId,
92
+ fillMessageParts: () => fillMessageParts,
93
+ formatDataStreamPart: () => formatDataStreamPart,
94
+ generateId: () => import_provider_utils21.generateId,
79
95
  generateObject: () => generateObject,
80
96
  generateText: () => generateText,
81
- jsonSchema: () => import_ui_utils11.jsonSchema,
82
- parseDataStreamPart: () => import_ui_utils11.parseDataStreamPart,
97
+ getMessageParts: () => getMessageParts,
98
+ getTextFromDataUrl: () => getTextFromDataUrl,
99
+ isAssistantMessageWithCompletedToolCalls: () => isAssistantMessageWithCompletedToolCalls,
100
+ isDeepEqualData: () => isDeepEqualData,
101
+ jsonSchema: () => jsonSchema,
102
+ parseDataStreamPart: () => parseDataStreamPart,
103
+ parsePartialJson: () => parsePartialJson,
83
104
  pipeDataStreamToResponse: () => pipeDataStreamToResponse,
84
- processDataStream: () => import_ui_utils11.processDataStream,
85
- processTextStream: () => import_ui_utils11.processTextStream,
105
+ prepareAttachmentsForRequest: () => prepareAttachmentsForRequest,
106
+ processDataStream: () => processDataStream,
107
+ processTextStream: () => processTextStream,
108
+ shouldResubmitMessages: () => shouldResubmitMessages,
86
109
  simulateReadableStream: () => simulateReadableStream,
87
110
  simulateStreamingMiddleware: () => simulateStreamingMiddleware,
88
111
  smoothStream: () => smoothStream,
89
112
  streamObject: () => streamObject,
90
113
  streamText: () => streamText,
91
114
  tool: () => tool,
115
+ updateToolCallResult: () => updateToolCallResult,
92
116
  wrapLanguageModel: () => wrapLanguageModel,
93
- zodSchema: () => import_ui_utils11.zodSchema
117
+ zodSchema: () => zodSchema
94
118
  });
95
119
  module.exports = __toCommonJS(streams_exports);
96
120
 
97
121
  // core/index.ts
98
- var import_provider_utils15 = require("@ai-sdk/provider-utils");
99
- var import_ui_utils11 = require("@ai-sdk/ui-utils");
122
+ var import_provider_utils21 = require("@ai-sdk/provider-utils");
123
+
124
+ // core/util/index.ts
125
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
126
+
127
+ // core/util/process-chat-response.ts
128
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
129
+
130
+ // core/types/duplicated/usage.ts
131
+ function calculateLanguageModelUsage({
132
+ promptTokens,
133
+ completionTokens
134
+ }) {
135
+ return {
136
+ promptTokens,
137
+ completionTokens,
138
+ totalTokens: promptTokens + completionTokens
139
+ };
140
+ }
141
+
142
+ // core/util/parse-partial-json.ts
143
+ var import_provider_utils = require("@ai-sdk/provider-utils");
144
+
145
+ // core/util/fix-json.ts
146
+ function fixJson(input) {
147
+ const stack = ["ROOT"];
148
+ let lastValidIndex = -1;
149
+ let literalStart = null;
150
+ function processValueStart(char, i, swapState) {
151
+ {
152
+ switch (char) {
153
+ case '"': {
154
+ lastValidIndex = i;
155
+ stack.pop();
156
+ stack.push(swapState);
157
+ stack.push("INSIDE_STRING");
158
+ break;
159
+ }
160
+ case "f":
161
+ case "t":
162
+ case "n": {
163
+ lastValidIndex = i;
164
+ literalStart = i;
165
+ stack.pop();
166
+ stack.push(swapState);
167
+ stack.push("INSIDE_LITERAL");
168
+ break;
169
+ }
170
+ case "-": {
171
+ stack.pop();
172
+ stack.push(swapState);
173
+ stack.push("INSIDE_NUMBER");
174
+ break;
175
+ }
176
+ case "0":
177
+ case "1":
178
+ case "2":
179
+ case "3":
180
+ case "4":
181
+ case "5":
182
+ case "6":
183
+ case "7":
184
+ case "8":
185
+ case "9": {
186
+ lastValidIndex = i;
187
+ stack.pop();
188
+ stack.push(swapState);
189
+ stack.push("INSIDE_NUMBER");
190
+ break;
191
+ }
192
+ case "{": {
193
+ lastValidIndex = i;
194
+ stack.pop();
195
+ stack.push(swapState);
196
+ stack.push("INSIDE_OBJECT_START");
197
+ break;
198
+ }
199
+ case "[": {
200
+ lastValidIndex = i;
201
+ stack.pop();
202
+ stack.push(swapState);
203
+ stack.push("INSIDE_ARRAY_START");
204
+ break;
205
+ }
206
+ }
207
+ }
208
+ }
209
+ function processAfterObjectValue(char, i) {
210
+ switch (char) {
211
+ case ",": {
212
+ stack.pop();
213
+ stack.push("INSIDE_OBJECT_AFTER_COMMA");
214
+ break;
215
+ }
216
+ case "}": {
217
+ lastValidIndex = i;
218
+ stack.pop();
219
+ break;
220
+ }
221
+ }
222
+ }
223
+ function processAfterArrayValue(char, i) {
224
+ switch (char) {
225
+ case ",": {
226
+ stack.pop();
227
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
228
+ break;
229
+ }
230
+ case "]": {
231
+ lastValidIndex = i;
232
+ stack.pop();
233
+ break;
234
+ }
235
+ }
236
+ }
237
+ for (let i = 0; i < input.length; i++) {
238
+ const char = input[i];
239
+ const currentState = stack[stack.length - 1];
240
+ switch (currentState) {
241
+ case "ROOT":
242
+ processValueStart(char, i, "FINISH");
243
+ break;
244
+ case "INSIDE_OBJECT_START": {
245
+ switch (char) {
246
+ case '"': {
247
+ stack.pop();
248
+ stack.push("INSIDE_OBJECT_KEY");
249
+ break;
250
+ }
251
+ case "}": {
252
+ lastValidIndex = i;
253
+ stack.pop();
254
+ break;
255
+ }
256
+ }
257
+ break;
258
+ }
259
+ case "INSIDE_OBJECT_AFTER_COMMA": {
260
+ switch (char) {
261
+ case '"': {
262
+ stack.pop();
263
+ stack.push("INSIDE_OBJECT_KEY");
264
+ break;
265
+ }
266
+ }
267
+ break;
268
+ }
269
+ case "INSIDE_OBJECT_KEY": {
270
+ switch (char) {
271
+ case '"': {
272
+ stack.pop();
273
+ stack.push("INSIDE_OBJECT_AFTER_KEY");
274
+ break;
275
+ }
276
+ }
277
+ break;
278
+ }
279
+ case "INSIDE_OBJECT_AFTER_KEY": {
280
+ switch (char) {
281
+ case ":": {
282
+ stack.pop();
283
+ stack.push("INSIDE_OBJECT_BEFORE_VALUE");
284
+ break;
285
+ }
286
+ }
287
+ break;
288
+ }
289
+ case "INSIDE_OBJECT_BEFORE_VALUE": {
290
+ processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
291
+ break;
292
+ }
293
+ case "INSIDE_OBJECT_AFTER_VALUE": {
294
+ processAfterObjectValue(char, i);
295
+ break;
296
+ }
297
+ case "INSIDE_STRING": {
298
+ switch (char) {
299
+ case '"': {
300
+ stack.pop();
301
+ lastValidIndex = i;
302
+ break;
303
+ }
304
+ case "\\": {
305
+ stack.push("INSIDE_STRING_ESCAPE");
306
+ break;
307
+ }
308
+ default: {
309
+ lastValidIndex = i;
310
+ }
311
+ }
312
+ break;
313
+ }
314
+ case "INSIDE_ARRAY_START": {
315
+ switch (char) {
316
+ case "]": {
317
+ lastValidIndex = i;
318
+ stack.pop();
319
+ break;
320
+ }
321
+ default: {
322
+ lastValidIndex = i;
323
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
324
+ break;
325
+ }
326
+ }
327
+ break;
328
+ }
329
+ case "INSIDE_ARRAY_AFTER_VALUE": {
330
+ switch (char) {
331
+ case ",": {
332
+ stack.pop();
333
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
334
+ break;
335
+ }
336
+ case "]": {
337
+ lastValidIndex = i;
338
+ stack.pop();
339
+ break;
340
+ }
341
+ default: {
342
+ lastValidIndex = i;
343
+ break;
344
+ }
345
+ }
346
+ break;
347
+ }
348
+ case "INSIDE_ARRAY_AFTER_COMMA": {
349
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
350
+ break;
351
+ }
352
+ case "INSIDE_STRING_ESCAPE": {
353
+ stack.pop();
354
+ lastValidIndex = i;
355
+ break;
356
+ }
357
+ case "INSIDE_NUMBER": {
358
+ switch (char) {
359
+ case "0":
360
+ case "1":
361
+ case "2":
362
+ case "3":
363
+ case "4":
364
+ case "5":
365
+ case "6":
366
+ case "7":
367
+ case "8":
368
+ case "9": {
369
+ lastValidIndex = i;
370
+ break;
371
+ }
372
+ case "e":
373
+ case "E":
374
+ case "-":
375
+ case ".": {
376
+ break;
377
+ }
378
+ case ",": {
379
+ stack.pop();
380
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
381
+ processAfterArrayValue(char, i);
382
+ }
383
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
384
+ processAfterObjectValue(char, i);
385
+ }
386
+ break;
387
+ }
388
+ case "}": {
389
+ stack.pop();
390
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
391
+ processAfterObjectValue(char, i);
392
+ }
393
+ break;
394
+ }
395
+ case "]": {
396
+ stack.pop();
397
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
398
+ processAfterArrayValue(char, i);
399
+ }
400
+ break;
401
+ }
402
+ default: {
403
+ stack.pop();
404
+ break;
405
+ }
406
+ }
407
+ break;
408
+ }
409
+ case "INSIDE_LITERAL": {
410
+ const partialLiteral = input.substring(literalStart, i + 1);
411
+ if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
412
+ stack.pop();
413
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
414
+ processAfterObjectValue(char, i);
415
+ } else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
416
+ processAfterArrayValue(char, i);
417
+ }
418
+ } else {
419
+ lastValidIndex = i;
420
+ }
421
+ break;
422
+ }
423
+ }
424
+ }
425
+ let result = input.slice(0, lastValidIndex + 1);
426
+ for (let i = stack.length - 1; i >= 0; i--) {
427
+ const state = stack[i];
428
+ switch (state) {
429
+ case "INSIDE_STRING": {
430
+ result += '"';
431
+ break;
432
+ }
433
+ case "INSIDE_OBJECT_KEY":
434
+ case "INSIDE_OBJECT_AFTER_KEY":
435
+ case "INSIDE_OBJECT_AFTER_COMMA":
436
+ case "INSIDE_OBJECT_START":
437
+ case "INSIDE_OBJECT_BEFORE_VALUE":
438
+ case "INSIDE_OBJECT_AFTER_VALUE": {
439
+ result += "}";
440
+ break;
441
+ }
442
+ case "INSIDE_ARRAY_START":
443
+ case "INSIDE_ARRAY_AFTER_COMMA":
444
+ case "INSIDE_ARRAY_AFTER_VALUE": {
445
+ result += "]";
446
+ break;
447
+ }
448
+ case "INSIDE_LITERAL": {
449
+ const partialLiteral = input.substring(literalStart, input.length);
450
+ if ("true".startsWith(partialLiteral)) {
451
+ result += "true".slice(partialLiteral.length);
452
+ } else if ("false".startsWith(partialLiteral)) {
453
+ result += "false".slice(partialLiteral.length);
454
+ } else if ("null".startsWith(partialLiteral)) {
455
+ result += "null".slice(partialLiteral.length);
456
+ }
457
+ }
458
+ }
459
+ }
460
+ return result;
461
+ }
462
+
463
+ // core/util/parse-partial-json.ts
464
+ function parsePartialJson(jsonText) {
465
+ if (jsonText === void 0) {
466
+ return { value: void 0, state: "undefined-input" };
467
+ }
468
+ let result = (0, import_provider_utils.safeParseJSON)({ text: jsonText });
469
+ if (result.success) {
470
+ return { value: result.value, state: "successful-parse" };
471
+ }
472
+ result = (0, import_provider_utils.safeParseJSON)({ text: fixJson(jsonText) });
473
+ if (result.success) {
474
+ return { value: result.value, state: "repaired-parse" };
475
+ }
476
+ return { value: void 0, state: "failed-parse" };
477
+ }
478
+
479
+ // core/util/data-stream-parts.ts
480
+ var textStreamPart = {
481
+ code: "0",
482
+ name: "text",
483
+ parse: (value) => {
484
+ if (typeof value !== "string") {
485
+ throw new Error('"text" parts expect a string value.');
486
+ }
487
+ return { type: "text", value };
488
+ }
489
+ };
490
+ var dataStreamPart = {
491
+ code: "2",
492
+ name: "data",
493
+ parse: (value) => {
494
+ if (!Array.isArray(value)) {
495
+ throw new Error('"data" parts expect an array value.');
496
+ }
497
+ return { type: "data", value };
498
+ }
499
+ };
500
+ var errorStreamPart = {
501
+ code: "3",
502
+ name: "error",
503
+ parse: (value) => {
504
+ if (typeof value !== "string") {
505
+ throw new Error('"error" parts expect a string value.');
506
+ }
507
+ return { type: "error", value };
508
+ }
509
+ };
510
+ var messageAnnotationsStreamPart = {
511
+ code: "8",
512
+ name: "message_annotations",
513
+ parse: (value) => {
514
+ if (!Array.isArray(value)) {
515
+ throw new Error('"message_annotations" parts expect an array value.');
516
+ }
517
+ return { type: "message_annotations", value };
518
+ }
519
+ };
520
+ var toolCallStreamPart = {
521
+ code: "9",
522
+ name: "tool_call",
523
+ parse: (value) => {
524
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string" || !("args" in value) || typeof value.args !== "object") {
525
+ throw new Error(
526
+ '"tool_call" parts expect an object with a "toolCallId", "toolName", and "args" property.'
527
+ );
528
+ }
529
+ return {
530
+ type: "tool_call",
531
+ value
532
+ };
533
+ }
534
+ };
535
+ var toolResultStreamPart = {
536
+ code: "a",
537
+ name: "tool_result",
538
+ parse: (value) => {
539
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("result" in value)) {
540
+ throw new Error(
541
+ '"tool_result" parts expect an object with a "toolCallId" and a "result" property.'
542
+ );
543
+ }
544
+ return {
545
+ type: "tool_result",
546
+ value
547
+ };
548
+ }
549
+ };
550
+ var toolCallStreamingStartStreamPart = {
551
+ code: "b",
552
+ name: "tool_call_streaming_start",
553
+ parse: (value) => {
554
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string") {
555
+ throw new Error(
556
+ '"tool_call_streaming_start" parts expect an object with a "toolCallId" and "toolName" property.'
557
+ );
558
+ }
559
+ return {
560
+ type: "tool_call_streaming_start",
561
+ value
562
+ };
563
+ }
564
+ };
565
+ var toolCallDeltaStreamPart = {
566
+ code: "c",
567
+ name: "tool_call_delta",
568
+ parse: (value) => {
569
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("argsTextDelta" in value) || typeof value.argsTextDelta !== "string") {
570
+ throw new Error(
571
+ '"tool_call_delta" parts expect an object with a "toolCallId" and "argsTextDelta" property.'
572
+ );
573
+ }
574
+ return {
575
+ type: "tool_call_delta",
576
+ value
577
+ };
578
+ }
579
+ };
580
+ var finishMessageStreamPart = {
581
+ code: "d",
582
+ name: "finish_message",
583
+ parse: (value) => {
584
+ if (value == null || typeof value !== "object" || !("finishReason" in value) || typeof value.finishReason !== "string") {
585
+ throw new Error(
586
+ '"finish_message" parts expect an object with a "finishReason" property.'
587
+ );
588
+ }
589
+ const result = {
590
+ finishReason: value.finishReason
591
+ };
592
+ if ("usage" in value && value.usage != null && typeof value.usage === "object" && "promptTokens" in value.usage && "completionTokens" in value.usage) {
593
+ result.usage = {
594
+ promptTokens: typeof value.usage.promptTokens === "number" ? value.usage.promptTokens : Number.NaN,
595
+ completionTokens: typeof value.usage.completionTokens === "number" ? value.usage.completionTokens : Number.NaN
596
+ };
597
+ }
598
+ return {
599
+ type: "finish_message",
600
+ value: result
601
+ };
602
+ }
603
+ };
604
+ var finishStepStreamPart = {
605
+ code: "e",
606
+ name: "finish_step",
607
+ parse: (value) => {
608
+ if (value == null || typeof value !== "object" || !("finishReason" in value) || typeof value.finishReason !== "string") {
609
+ throw new Error(
610
+ '"finish_step" parts expect an object with a "finishReason" property.'
611
+ );
612
+ }
613
+ const result = {
614
+ finishReason: value.finishReason,
615
+ isContinued: false
616
+ };
617
+ if ("usage" in value && value.usage != null && typeof value.usage === "object" && "promptTokens" in value.usage && "completionTokens" in value.usage) {
618
+ result.usage = {
619
+ promptTokens: typeof value.usage.promptTokens === "number" ? value.usage.promptTokens : Number.NaN,
620
+ completionTokens: typeof value.usage.completionTokens === "number" ? value.usage.completionTokens : Number.NaN
621
+ };
622
+ }
623
+ if ("isContinued" in value && typeof value.isContinued === "boolean") {
624
+ result.isContinued = value.isContinued;
625
+ }
626
+ return {
627
+ type: "finish_step",
628
+ value: result
629
+ };
630
+ }
631
+ };
632
+ var startStepStreamPart = {
633
+ code: "f",
634
+ name: "start_step",
635
+ parse: (value) => {
636
+ if (value == null || typeof value !== "object" || !("messageId" in value) || typeof value.messageId !== "string") {
637
+ throw new Error(
638
+ '"start_step" parts expect an object with an "id" property.'
639
+ );
640
+ }
641
+ return {
642
+ type: "start_step",
643
+ value: {
644
+ messageId: value.messageId
645
+ }
646
+ };
647
+ }
648
+ };
649
+ var reasoningStreamPart = {
650
+ code: "g",
651
+ name: "reasoning",
652
+ parse: (value) => {
653
+ if (typeof value !== "string") {
654
+ throw new Error('"reasoning" parts expect a string value.');
655
+ }
656
+ return { type: "reasoning", value };
657
+ }
658
+ };
659
+ var sourcePart = {
660
+ code: "h",
661
+ name: "source",
662
+ parse: (value) => {
663
+ if (value == null || typeof value !== "object") {
664
+ throw new Error('"source" parts expect a Source object.');
665
+ }
666
+ return {
667
+ type: "source",
668
+ value
669
+ };
670
+ }
671
+ };
672
+ var redactedReasoningStreamPart = {
673
+ code: "i",
674
+ name: "redacted_reasoning",
675
+ parse: (value) => {
676
+ if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string") {
677
+ throw new Error(
678
+ '"redacted_reasoning" parts expect an object with a "data" property.'
679
+ );
680
+ }
681
+ return { type: "redacted_reasoning", value: { data: value.data } };
682
+ }
683
+ };
684
+ var reasoningSignatureStreamPart = {
685
+ code: "j",
686
+ name: "reasoning_signature",
687
+ parse: (value) => {
688
+ if (value == null || typeof value !== "object" || !("signature" in value) || typeof value.signature !== "string") {
689
+ throw new Error(
690
+ '"reasoning_signature" parts expect an object with a "signature" property.'
691
+ );
692
+ }
693
+ return {
694
+ type: "reasoning_signature",
695
+ value: { signature: value.signature }
696
+ };
697
+ }
698
+ };
699
+ var fileStreamPart = {
700
+ code: "k",
701
+ name: "file",
702
+ parse: (value) => {
703
+ if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string" || !("mimeType" in value) || typeof value.mimeType !== "string") {
704
+ throw new Error(
705
+ '"file" parts expect an object with a "data" and "mimeType" property.'
706
+ );
707
+ }
708
+ return { type: "file", value };
709
+ }
710
+ };
711
+ var dataStreamParts = [
712
+ textStreamPart,
713
+ dataStreamPart,
714
+ errorStreamPart,
715
+ messageAnnotationsStreamPart,
716
+ toolCallStreamPart,
717
+ toolResultStreamPart,
718
+ toolCallStreamingStartStreamPart,
719
+ toolCallDeltaStreamPart,
720
+ finishMessageStreamPart,
721
+ finishStepStreamPart,
722
+ startStepStreamPart,
723
+ reasoningStreamPart,
724
+ sourcePart,
725
+ redactedReasoningStreamPart,
726
+ reasoningSignatureStreamPart,
727
+ fileStreamPart
728
+ ];
729
+ var dataStreamPartsByCode = Object.fromEntries(
730
+ dataStreamParts.map((part) => [part.code, part])
731
+ );
732
+ var DataStreamStringPrefixes = Object.fromEntries(
733
+ dataStreamParts.map((part) => [part.name, part.code])
734
+ );
735
+ var validCodes = dataStreamParts.map((part) => part.code);
736
+ var parseDataStreamPart = (line) => {
737
+ const firstSeparatorIndex = line.indexOf(":");
738
+ if (firstSeparatorIndex === -1) {
739
+ throw new Error("Failed to parse stream string. No separator found.");
740
+ }
741
+ const prefix = line.slice(0, firstSeparatorIndex);
742
+ if (!validCodes.includes(prefix)) {
743
+ throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);
744
+ }
745
+ const code = prefix;
746
+ const textValue = line.slice(firstSeparatorIndex + 1);
747
+ const jsonValue = JSON.parse(textValue);
748
+ return dataStreamPartsByCode[code].parse(jsonValue);
749
+ };
750
+ function formatDataStreamPart(type, value) {
751
+ const streamPart = dataStreamParts.find((part) => part.name === type);
752
+ if (!streamPart) {
753
+ throw new Error(`Invalid stream part type: ${type}`);
754
+ }
755
+ return `${streamPart.code}:${JSON.stringify(value)}
756
+ `;
757
+ }
758
+
759
+ // core/util/process-data-stream.ts
760
+ var NEWLINE = "\n".charCodeAt(0);
761
+ function concatChunks(chunks, totalLength) {
762
+ const concatenatedChunks = new Uint8Array(totalLength);
763
+ let offset = 0;
764
+ for (const chunk of chunks) {
765
+ concatenatedChunks.set(chunk, offset);
766
+ offset += chunk.length;
767
+ }
768
+ chunks.length = 0;
769
+ return concatenatedChunks;
770
+ }
771
+ async function processDataStream({
772
+ stream,
773
+ onTextPart,
774
+ onReasoningPart,
775
+ onReasoningSignaturePart,
776
+ onRedactedReasoningPart,
777
+ onSourcePart,
778
+ onFilePart,
779
+ onDataPart,
780
+ onErrorPart,
781
+ onToolCallStreamingStartPart,
782
+ onToolCallDeltaPart,
783
+ onToolCallPart,
784
+ onToolResultPart,
785
+ onMessageAnnotationsPart,
786
+ onFinishMessagePart,
787
+ onFinishStepPart,
788
+ onStartStepPart
789
+ }) {
790
+ const reader = stream.getReader();
791
+ const decoder = new TextDecoder();
792
+ const chunks = [];
793
+ let totalLength = 0;
794
+ while (true) {
795
+ const { value } = await reader.read();
796
+ if (value) {
797
+ chunks.push(value);
798
+ totalLength += value.length;
799
+ if (value[value.length - 1] !== NEWLINE) {
800
+ continue;
801
+ }
802
+ }
803
+ if (chunks.length === 0) {
804
+ break;
805
+ }
806
+ const concatenatedChunks = concatChunks(chunks, totalLength);
807
+ totalLength = 0;
808
+ const streamParts = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseDataStreamPart);
809
+ for (const { type, value: value2 } of streamParts) {
810
+ switch (type) {
811
+ case "text":
812
+ await (onTextPart == null ? void 0 : onTextPart(value2));
813
+ break;
814
+ case "reasoning":
815
+ await (onReasoningPart == null ? void 0 : onReasoningPart(value2));
816
+ break;
817
+ case "reasoning_signature":
818
+ await (onReasoningSignaturePart == null ? void 0 : onReasoningSignaturePart(value2));
819
+ break;
820
+ case "redacted_reasoning":
821
+ await (onRedactedReasoningPart == null ? void 0 : onRedactedReasoningPart(value2));
822
+ break;
823
+ case "file":
824
+ await (onFilePart == null ? void 0 : onFilePart(value2));
825
+ break;
826
+ case "source":
827
+ await (onSourcePart == null ? void 0 : onSourcePart(value2));
828
+ break;
829
+ case "data":
830
+ await (onDataPart == null ? void 0 : onDataPart(value2));
831
+ break;
832
+ case "error":
833
+ await (onErrorPart == null ? void 0 : onErrorPart(value2));
834
+ break;
835
+ case "message_annotations":
836
+ await (onMessageAnnotationsPart == null ? void 0 : onMessageAnnotationsPart(value2));
837
+ break;
838
+ case "tool_call_streaming_start":
839
+ await (onToolCallStreamingStartPart == null ? void 0 : onToolCallStreamingStartPart(value2));
840
+ break;
841
+ case "tool_call_delta":
842
+ await (onToolCallDeltaPart == null ? void 0 : onToolCallDeltaPart(value2));
843
+ break;
844
+ case "tool_call":
845
+ await (onToolCallPart == null ? void 0 : onToolCallPart(value2));
846
+ break;
847
+ case "tool_result":
848
+ await (onToolResultPart == null ? void 0 : onToolResultPart(value2));
849
+ break;
850
+ case "finish_message":
851
+ await (onFinishMessagePart == null ? void 0 : onFinishMessagePart(value2));
852
+ break;
853
+ case "finish_step":
854
+ await (onFinishStepPart == null ? void 0 : onFinishStepPart(value2));
855
+ break;
856
+ case "start_step":
857
+ await (onStartStepPart == null ? void 0 : onStartStepPart(value2));
858
+ break;
859
+ default: {
860
+ const exhaustiveCheck = type;
861
+ throw new Error(`Unknown stream part type: ${exhaustiveCheck}`);
862
+ }
863
+ }
864
+ }
865
+ }
866
+ }
867
+
868
+ // core/util/process-chat-response.ts
869
+ async function processChatResponse({
870
+ stream,
871
+ update,
872
+ onToolCall,
873
+ onFinish,
874
+ generateId: generateId3 = import_provider_utils2.generateId,
875
+ getCurrentDate = () => /* @__PURE__ */ new Date(),
876
+ lastMessage
877
+ }) {
878
+ var _a17, _b;
879
+ const replaceLastMessage = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
880
+ let step = replaceLastMessage ? 1 + // find max step in existing tool invocations:
881
+ ((_b = (_a17 = lastMessage.toolInvocations) == null ? void 0 : _a17.reduce((max, toolInvocation) => {
882
+ var _a18;
883
+ return Math.max(max, (_a18 = toolInvocation.step) != null ? _a18 : 0);
884
+ }, 0)) != null ? _b : 0) : 0;
885
+ const message = replaceLastMessage ? structuredClone(lastMessage) : {
886
+ id: generateId3(),
887
+ createdAt: getCurrentDate(),
888
+ role: "assistant",
889
+ content: "",
890
+ parts: []
891
+ };
892
+ let currentTextPart = void 0;
893
+ let currentReasoningPart = void 0;
894
+ let currentReasoningTextDetail = void 0;
895
+ function updateToolInvocationPart(toolCallId, invocation) {
896
+ const part = message.parts.find(
897
+ (part2) => part2.type === "tool-invocation" && part2.toolInvocation.toolCallId === toolCallId
898
+ );
899
+ if (part != null) {
900
+ part.toolInvocation = invocation;
901
+ } else {
902
+ message.parts.push({
903
+ type: "tool-invocation",
904
+ toolInvocation: invocation
905
+ });
906
+ }
907
+ }
908
+ const data = [];
909
+ let messageAnnotations = replaceLastMessage ? lastMessage == null ? void 0 : lastMessage.annotations : void 0;
910
+ const partialToolCalls = {};
911
+ let usage = {
912
+ completionTokens: NaN,
913
+ promptTokens: NaN,
914
+ totalTokens: NaN
915
+ };
916
+ let finishReason = "unknown";
917
+ function execUpdate() {
918
+ const copiedData = [...data];
919
+ if (messageAnnotations == null ? void 0 : messageAnnotations.length) {
920
+ message.annotations = messageAnnotations;
921
+ }
922
+ const copiedMessage = {
923
+ // deep copy the message to ensure that deep changes (msg attachments) are updated
924
+ // with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
925
+ ...structuredClone(message),
926
+ // add a revision id to ensure that the message is updated with SWR. SWR uses a
927
+ // hashing approach by default to detect changes, but it only works for shallow
928
+ // changes. This is why we need to add a revision id to ensure that the message
929
+ // is updated with SWR (without it, the changes get stuck in SWR and are not
930
+ // forwarded to rendering):
931
+ revisionId: generateId3()
932
+ };
933
+ update({
934
+ message: copiedMessage,
935
+ data: copiedData,
936
+ replaceLastMessage
937
+ });
938
+ }
939
+ await processDataStream({
940
+ stream,
941
+ onTextPart(value) {
942
+ if (currentTextPart == null) {
943
+ currentTextPart = {
944
+ type: "text",
945
+ text: value
946
+ };
947
+ message.parts.push(currentTextPart);
948
+ } else {
949
+ currentTextPart.text += value;
950
+ }
951
+ message.content += value;
952
+ execUpdate();
953
+ },
954
+ onReasoningPart(value) {
955
+ var _a18;
956
+ if (currentReasoningTextDetail == null) {
957
+ currentReasoningTextDetail = { type: "text", text: value };
958
+ if (currentReasoningPart != null) {
959
+ currentReasoningPart.details.push(currentReasoningTextDetail);
960
+ }
961
+ } else {
962
+ currentReasoningTextDetail.text += value;
963
+ }
964
+ if (currentReasoningPart == null) {
965
+ currentReasoningPart = {
966
+ type: "reasoning",
967
+ reasoning: value,
968
+ details: [currentReasoningTextDetail]
969
+ };
970
+ message.parts.push(currentReasoningPart);
971
+ } else {
972
+ currentReasoningPart.reasoning += value;
973
+ }
974
+ message.reasoning = ((_a18 = message.reasoning) != null ? _a18 : "") + value;
975
+ execUpdate();
976
+ },
977
+ onReasoningSignaturePart(value) {
978
+ if (currentReasoningTextDetail != null) {
979
+ currentReasoningTextDetail.signature = value.signature;
980
+ }
981
+ },
982
+ onRedactedReasoningPart(value) {
983
+ if (currentReasoningPart == null) {
984
+ currentReasoningPart = {
985
+ type: "reasoning",
986
+ reasoning: "",
987
+ details: []
988
+ };
989
+ message.parts.push(currentReasoningPart);
990
+ }
991
+ currentReasoningPart.details.push({
992
+ type: "redacted",
993
+ data: value.data
994
+ });
995
+ currentReasoningTextDetail = void 0;
996
+ execUpdate();
997
+ },
998
+ onFilePart(value) {
999
+ message.parts.push({
1000
+ type: "file",
1001
+ mediaType: value.mimeType,
1002
+ data: value.data
1003
+ });
1004
+ execUpdate();
1005
+ },
1006
+ onSourcePart(value) {
1007
+ message.parts.push({
1008
+ type: "source",
1009
+ source: value
1010
+ });
1011
+ execUpdate();
1012
+ },
1013
+ onToolCallStreamingStartPart(value) {
1014
+ if (message.toolInvocations == null) {
1015
+ message.toolInvocations = [];
1016
+ }
1017
+ partialToolCalls[value.toolCallId] = {
1018
+ text: "",
1019
+ step,
1020
+ toolName: value.toolName,
1021
+ index: message.toolInvocations.length
1022
+ };
1023
+ const invocation = {
1024
+ state: "partial-call",
1025
+ step,
1026
+ toolCallId: value.toolCallId,
1027
+ toolName: value.toolName,
1028
+ args: void 0
1029
+ };
1030
+ message.toolInvocations.push(invocation);
1031
+ updateToolInvocationPart(value.toolCallId, invocation);
1032
+ execUpdate();
1033
+ },
1034
+ onToolCallDeltaPart(value) {
1035
+ const partialToolCall = partialToolCalls[value.toolCallId];
1036
+ partialToolCall.text += value.argsTextDelta;
1037
+ const { value: partialArgs } = parsePartialJson(partialToolCall.text);
1038
+ const invocation = {
1039
+ state: "partial-call",
1040
+ step: partialToolCall.step,
1041
+ toolCallId: value.toolCallId,
1042
+ toolName: partialToolCall.toolName,
1043
+ args: partialArgs
1044
+ };
1045
+ message.toolInvocations[partialToolCall.index] = invocation;
1046
+ updateToolInvocationPart(value.toolCallId, invocation);
1047
+ execUpdate();
1048
+ },
1049
+ async onToolCallPart(value) {
1050
+ const invocation = {
1051
+ state: "call",
1052
+ step,
1053
+ ...value
1054
+ };
1055
+ if (partialToolCalls[value.toolCallId] != null) {
1056
+ message.toolInvocations[partialToolCalls[value.toolCallId].index] = invocation;
1057
+ } else {
1058
+ if (message.toolInvocations == null) {
1059
+ message.toolInvocations = [];
1060
+ }
1061
+ message.toolInvocations.push(invocation);
1062
+ }
1063
+ updateToolInvocationPart(value.toolCallId, invocation);
1064
+ execUpdate();
1065
+ if (onToolCall) {
1066
+ const result = await onToolCall({ toolCall: value });
1067
+ if (result != null) {
1068
+ const invocation2 = {
1069
+ state: "result",
1070
+ step,
1071
+ ...value,
1072
+ result
1073
+ };
1074
+ message.toolInvocations[message.toolInvocations.length - 1] = invocation2;
1075
+ updateToolInvocationPart(value.toolCallId, invocation2);
1076
+ execUpdate();
1077
+ }
1078
+ }
1079
+ },
1080
+ onToolResultPart(value) {
1081
+ const toolInvocations = message.toolInvocations;
1082
+ if (toolInvocations == null) {
1083
+ throw new Error("tool_result must be preceded by a tool_call");
1084
+ }
1085
+ const toolInvocationIndex = toolInvocations.findIndex(
1086
+ (invocation2) => invocation2.toolCallId === value.toolCallId
1087
+ );
1088
+ if (toolInvocationIndex === -1) {
1089
+ throw new Error(
1090
+ "tool_result must be preceded by a tool_call with the same toolCallId"
1091
+ );
1092
+ }
1093
+ const invocation = {
1094
+ ...toolInvocations[toolInvocationIndex],
1095
+ state: "result",
1096
+ ...value
1097
+ };
1098
+ toolInvocations[toolInvocationIndex] = invocation;
1099
+ updateToolInvocationPart(value.toolCallId, invocation);
1100
+ execUpdate();
1101
+ },
1102
+ onDataPart(value) {
1103
+ data.push(...value);
1104
+ execUpdate();
1105
+ },
1106
+ onMessageAnnotationsPart(value) {
1107
+ if (messageAnnotations == null) {
1108
+ messageAnnotations = [...value];
1109
+ } else {
1110
+ messageAnnotations.push(...value);
1111
+ }
1112
+ execUpdate();
1113
+ },
1114
+ onFinishStepPart(value) {
1115
+ step += 1;
1116
+ currentTextPart = value.isContinued ? currentTextPart : void 0;
1117
+ currentReasoningPart = void 0;
1118
+ currentReasoningTextDetail = void 0;
1119
+ },
1120
+ onStartStepPart(value) {
1121
+ if (!replaceLastMessage) {
1122
+ message.id = value.messageId;
1123
+ }
1124
+ message.parts.push({ type: "step-start" });
1125
+ execUpdate();
1126
+ },
1127
+ onFinishMessagePart(value) {
1128
+ finishReason = value.finishReason;
1129
+ if (value.usage != null) {
1130
+ usage = calculateLanguageModelUsage(value.usage);
1131
+ }
1132
+ },
1133
+ onErrorPart(error) {
1134
+ throw new Error(error);
1135
+ }
1136
+ });
1137
+ onFinish == null ? void 0 : onFinish({ message, finishReason, usage });
1138
+ }
1139
+
1140
+ // core/util/process-chat-text-response.ts
1141
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
1142
+
1143
+ // core/util/process-text-stream.ts
1144
+ async function processTextStream({
1145
+ stream,
1146
+ onTextPart
1147
+ }) {
1148
+ const reader = stream.pipeThrough(new TextDecoderStream()).getReader();
1149
+ while (true) {
1150
+ const { done, value } = await reader.read();
1151
+ if (done) {
1152
+ break;
1153
+ }
1154
+ await onTextPart(value);
1155
+ }
1156
+ }
1157
+
1158
+ // core/util/process-chat-text-response.ts
1159
+ async function processChatTextResponse({
1160
+ stream,
1161
+ update,
1162
+ onFinish,
1163
+ getCurrentDate = () => /* @__PURE__ */ new Date(),
1164
+ generateId: generateId3 = import_provider_utils3.generateId
1165
+ }) {
1166
+ const textPart = { type: "text", text: "" };
1167
+ const resultMessage = {
1168
+ id: generateId3(),
1169
+ createdAt: getCurrentDate(),
1170
+ role: "assistant",
1171
+ content: "",
1172
+ parts: [textPart]
1173
+ };
1174
+ await processTextStream({
1175
+ stream,
1176
+ onTextPart: (chunk) => {
1177
+ resultMessage.content += chunk;
1178
+ textPart.text += chunk;
1179
+ update({
1180
+ message: { ...resultMessage },
1181
+ data: [],
1182
+ replaceLastMessage: false
1183
+ });
1184
+ }
1185
+ });
1186
+ onFinish == null ? void 0 : onFinish(resultMessage, {
1187
+ usage: { completionTokens: NaN, promptTokens: NaN, totalTokens: NaN },
1188
+ finishReason: "unknown"
1189
+ });
1190
+ }
1191
+
1192
+ // core/util/call-chat-api.ts
1193
+ var getOriginalFetch = () => fetch;
1194
+ async function callChatApi({
1195
+ api,
1196
+ body,
1197
+ streamProtocol = "data",
1198
+ credentials,
1199
+ headers,
1200
+ abortController,
1201
+ restoreMessagesOnFailure,
1202
+ onResponse,
1203
+ onUpdate,
1204
+ onFinish,
1205
+ onToolCall,
1206
+ generateId: generateId3,
1207
+ fetch: fetch2 = getOriginalFetch(),
1208
+ lastMessage
1209
+ }) {
1210
+ var _a17, _b;
1211
+ const response = await fetch2(api, {
1212
+ method: "POST",
1213
+ body: JSON.stringify(body),
1214
+ headers: {
1215
+ "Content-Type": "application/json",
1216
+ ...headers
1217
+ },
1218
+ signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
1219
+ credentials
1220
+ }).catch((err) => {
1221
+ restoreMessagesOnFailure();
1222
+ throw err;
1223
+ });
1224
+ if (onResponse) {
1225
+ try {
1226
+ await onResponse(response);
1227
+ } catch (err) {
1228
+ throw err;
1229
+ }
1230
+ }
1231
+ if (!response.ok) {
1232
+ restoreMessagesOnFailure();
1233
+ throw new Error(
1234
+ (_b = await response.text()) != null ? _b : "Failed to fetch the chat response."
1235
+ );
1236
+ }
1237
+ if (!response.body) {
1238
+ throw new Error("The response body is empty.");
1239
+ }
1240
+ switch (streamProtocol) {
1241
+ case "text": {
1242
+ await processChatTextResponse({
1243
+ stream: response.body,
1244
+ update: onUpdate,
1245
+ onFinish,
1246
+ generateId: generateId3
1247
+ });
1248
+ return;
1249
+ }
1250
+ case "data": {
1251
+ await processChatResponse({
1252
+ stream: response.body,
1253
+ update: onUpdate,
1254
+ lastMessage,
1255
+ onToolCall,
1256
+ onFinish({ message, finishReason, usage }) {
1257
+ if (onFinish && message != null) {
1258
+ onFinish(message, { usage, finishReason });
1259
+ }
1260
+ },
1261
+ generateId: generateId3
1262
+ });
1263
+ return;
1264
+ }
1265
+ default: {
1266
+ const exhaustiveCheck = streamProtocol;
1267
+ throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
1268
+ }
1269
+ }
1270
+ }
1271
+
1272
+ // core/util/call-completion-api.ts
1273
+ var getOriginalFetch2 = () => fetch;
1274
+ async function callCompletionApi({
1275
+ api,
1276
+ prompt,
1277
+ credentials,
1278
+ headers,
1279
+ body,
1280
+ streamProtocol = "data",
1281
+ setCompletion,
1282
+ setLoading,
1283
+ setError,
1284
+ setAbortController,
1285
+ onResponse,
1286
+ onFinish,
1287
+ onError,
1288
+ onData,
1289
+ fetch: fetch2 = getOriginalFetch2()
1290
+ }) {
1291
+ var _a17;
1292
+ try {
1293
+ setLoading(true);
1294
+ setError(void 0);
1295
+ const abortController = new AbortController();
1296
+ setAbortController(abortController);
1297
+ setCompletion("");
1298
+ const response = await fetch2(api, {
1299
+ method: "POST",
1300
+ body: JSON.stringify({
1301
+ prompt,
1302
+ ...body
1303
+ }),
1304
+ credentials,
1305
+ headers: {
1306
+ "Content-Type": "application/json",
1307
+ ...headers
1308
+ },
1309
+ signal: abortController.signal
1310
+ }).catch((err) => {
1311
+ throw err;
1312
+ });
1313
+ if (onResponse) {
1314
+ try {
1315
+ await onResponse(response);
1316
+ } catch (err) {
1317
+ throw err;
1318
+ }
1319
+ }
1320
+ if (!response.ok) {
1321
+ throw new Error(
1322
+ (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
1323
+ );
1324
+ }
1325
+ if (!response.body) {
1326
+ throw new Error("The response body is empty.");
1327
+ }
1328
+ let result = "";
1329
+ switch (streamProtocol) {
1330
+ case "text": {
1331
+ await processTextStream({
1332
+ stream: response.body,
1333
+ onTextPart: (chunk) => {
1334
+ result += chunk;
1335
+ setCompletion(result);
1336
+ }
1337
+ });
1338
+ break;
1339
+ }
1340
+ case "data": {
1341
+ await processDataStream({
1342
+ stream: response.body,
1343
+ onTextPart(value) {
1344
+ result += value;
1345
+ setCompletion(result);
1346
+ },
1347
+ onDataPart(value) {
1348
+ onData == null ? void 0 : onData(value);
1349
+ },
1350
+ onErrorPart(value) {
1351
+ throw new Error(value);
1352
+ }
1353
+ });
1354
+ break;
1355
+ }
1356
+ default: {
1357
+ const exhaustiveCheck = streamProtocol;
1358
+ throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
1359
+ }
1360
+ }
1361
+ if (onFinish) {
1362
+ onFinish(prompt, result);
1363
+ }
1364
+ setAbortController(null);
1365
+ return result;
1366
+ } catch (err) {
1367
+ if (err.name === "AbortError") {
1368
+ setAbortController(null);
1369
+ return null;
1370
+ }
1371
+ if (err instanceof Error) {
1372
+ if (onError) {
1373
+ onError(err);
1374
+ }
1375
+ }
1376
+ setError(err);
1377
+ } finally {
1378
+ setLoading(false);
1379
+ }
1380
+ }
1381
+
1382
+ // core/util/data-url.ts
1383
+ function getTextFromDataUrl(dataUrl) {
1384
+ const [header, base64Content] = dataUrl.split(",");
1385
+ const mediaType = header.split(";")[0].split(":")[1];
1386
+ if (mediaType == null || base64Content == null) {
1387
+ throw new Error("Invalid data URL format");
1388
+ }
1389
+ try {
1390
+ return window.atob(base64Content);
1391
+ } catch (error) {
1392
+ throw new Error(`Error decoding data URL`);
1393
+ }
1394
+ }
1395
+
1396
+ // core/util/extract-max-tool-invocation-step.ts
1397
+ function extractMaxToolInvocationStep(toolInvocations) {
1398
+ return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
1399
+ var _a17;
1400
+ return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
1401
+ }, 0);
1402
+ }
1403
+
1404
+ // core/util/get-message-parts.ts
1405
+ function getMessageParts(message) {
1406
+ var _a17;
1407
+ return (_a17 = message.parts) != null ? _a17 : [
1408
+ ...message.toolInvocations ? message.toolInvocations.map((toolInvocation) => ({
1409
+ type: "tool-invocation",
1410
+ toolInvocation
1411
+ })) : [],
1412
+ ...message.reasoning ? [
1413
+ {
1414
+ type: "reasoning",
1415
+ reasoning: message.reasoning,
1416
+ details: [{ type: "text", text: message.reasoning }]
1417
+ }
1418
+ ] : [],
1419
+ ...message.content ? [{ type: "text", text: message.content }] : []
1420
+ ];
1421
+ }
1422
+
1423
+ // core/util/fill-message-parts.ts
1424
+ function fillMessageParts(messages) {
1425
+ return messages.map((message) => ({
1426
+ ...message,
1427
+ parts: getMessageParts(message)
1428
+ }));
1429
+ }
1430
+
1431
+ // core/util/is-deep-equal-data.ts
1432
+ function isDeepEqualData(obj1, obj2) {
1433
+ if (obj1 === obj2)
1434
+ return true;
1435
+ if (obj1 == null || obj2 == null)
1436
+ return false;
1437
+ if (typeof obj1 !== "object" && typeof obj2 !== "object")
1438
+ return obj1 === obj2;
1439
+ if (obj1.constructor !== obj2.constructor)
1440
+ return false;
1441
+ if (obj1 instanceof Date && obj2 instanceof Date) {
1442
+ return obj1.getTime() === obj2.getTime();
1443
+ }
1444
+ if (Array.isArray(obj1)) {
1445
+ if (obj1.length !== obj2.length)
1446
+ return false;
1447
+ for (let i = 0; i < obj1.length; i++) {
1448
+ if (!isDeepEqualData(obj1[i], obj2[i]))
1449
+ return false;
1450
+ }
1451
+ return true;
1452
+ }
1453
+ const keys1 = Object.keys(obj1);
1454
+ const keys2 = Object.keys(obj2);
1455
+ if (keys1.length !== keys2.length)
1456
+ return false;
1457
+ for (const key of keys1) {
1458
+ if (!keys2.includes(key))
1459
+ return false;
1460
+ if (!isDeepEqualData(obj1[key], obj2[key]))
1461
+ return false;
1462
+ }
1463
+ return true;
1464
+ }
1465
+
1466
+ // core/util/prepare-attachments-for-request.ts
1467
+ async function prepareAttachmentsForRequest(attachmentsFromOptions) {
1468
+ if (!attachmentsFromOptions) {
1469
+ return [];
1470
+ }
1471
+ if (attachmentsFromOptions instanceof FileList) {
1472
+ return Promise.all(
1473
+ Array.from(attachmentsFromOptions).map(async (attachment) => {
1474
+ const { name: name17, type } = attachment;
1475
+ const dataUrl = await new Promise((resolve, reject) => {
1476
+ const reader = new FileReader();
1477
+ reader.onload = (readerEvent) => {
1478
+ var _a17;
1479
+ resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
1480
+ };
1481
+ reader.onerror = (error) => reject(error);
1482
+ reader.readAsDataURL(attachment);
1483
+ });
1484
+ return {
1485
+ name: name17,
1486
+ contentType: type,
1487
+ url: dataUrl
1488
+ };
1489
+ })
1490
+ );
1491
+ }
1492
+ if (Array.isArray(attachmentsFromOptions)) {
1493
+ return attachmentsFromOptions;
1494
+ }
1495
+ throw new Error("Invalid attachments type");
1496
+ }
1497
+
1498
+ // core/util/schema.ts
1499
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
1500
+
1501
+ // core/util/zod-schema.ts
1502
+ var import_zod_to_json_schema = __toESM(require("zod-to-json-schema"));
1503
+ function zodSchema(zodSchema2, options) {
1504
+ var _a17;
1505
+ const useReferences = (_a17 = options == null ? void 0 : options.useReferences) != null ? _a17 : false;
1506
+ return jsonSchema(
1507
+ (0, import_zod_to_json_schema.default)(zodSchema2, {
1508
+ $refStrategy: useReferences ? "root" : "none",
1509
+ target: "jsonSchema7"
1510
+ // note: openai mode breaks various gemini conversions
1511
+ }),
1512
+ {
1513
+ validate: (value) => {
1514
+ const result = zodSchema2.safeParse(value);
1515
+ return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
1516
+ }
1517
+ }
1518
+ );
1519
+ }
1520
+
1521
+ // core/util/schema.ts
1522
+ var schemaSymbol = Symbol.for("vercel.ai.schema");
1523
+ function jsonSchema(jsonSchema2, {
1524
+ validate
1525
+ } = {}) {
1526
+ return {
1527
+ [schemaSymbol]: true,
1528
+ _type: void 0,
1529
+ // should never be used directly
1530
+ [import_provider_utils4.validatorSymbol]: true,
1531
+ jsonSchema: jsonSchema2,
1532
+ validate
1533
+ };
1534
+ }
1535
+ function isSchema(value) {
1536
+ return typeof value === "object" && value !== null && schemaSymbol in value && value[schemaSymbol] === true && "jsonSchema" in value && "validate" in value;
1537
+ }
1538
+ function asSchema(schema) {
1539
+ return isSchema(schema) ? schema : zodSchema(schema);
1540
+ }
1541
+
1542
+ // core/util/should-resubmit-messages.ts
1543
+ function shouldResubmitMessages({
1544
+ originalMaxToolInvocationStep,
1545
+ originalMessageCount,
1546
+ maxSteps,
1547
+ messages
1548
+ }) {
1549
+ var _a17;
1550
+ const lastMessage = messages[messages.length - 1];
1551
+ return (
1552
+ // check if the feature is enabled:
1553
+ maxSteps > 1 && // ensure there is a last message:
1554
+ lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
1555
+ (messages.length > originalMessageCount || extractMaxToolInvocationStep(lastMessage.toolInvocations) !== originalMaxToolInvocationStep) && // check that next step is possible:
1556
+ isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
1557
+ ((_a17 = extractMaxToolInvocationStep(lastMessage.toolInvocations)) != null ? _a17 : 0) < maxSteps
1558
+ );
1559
+ }
1560
+ function isAssistantMessageWithCompletedToolCalls(message) {
1561
+ if (message.role !== "assistant") {
1562
+ return false;
1563
+ }
1564
+ const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
1565
+ return part.type === "step-start" ? index : lastIndex;
1566
+ }, -1);
1567
+ const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
1568
+ return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
1569
+ }
1570
+
1571
+ // core/util/update-tool-call-result.ts
1572
+ function updateToolCallResult({
1573
+ messages,
1574
+ toolCallId,
1575
+ toolResult: result
1576
+ }) {
1577
+ var _a17;
1578
+ const lastMessage = messages[messages.length - 1];
1579
+ const invocationPart = lastMessage.parts.find(
1580
+ (part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
1581
+ );
1582
+ if (invocationPart == null) {
1583
+ return;
1584
+ }
1585
+ const toolResult = {
1586
+ ...invocationPart.toolInvocation,
1587
+ state: "result",
1588
+ result
1589
+ };
1590
+ invocationPart.toolInvocation = toolResult;
1591
+ lastMessage.toolInvocations = (_a17 = lastMessage.toolInvocations) == null ? void 0 : _a17.map(
1592
+ (toolInvocation) => toolInvocation.toolCallId === toolCallId ? toolResult : toolInvocation
1593
+ );
1594
+ }
100
1595
 
101
1596
  // core/data-stream/create-data-stream.ts
102
- var import_ui_utils = require("@ai-sdk/ui-utils");
103
1597
  function createDataStream({
104
1598
  execute,
105
1599
  onError = () => "An error occurred."
@@ -124,13 +1618,13 @@ function createDataStream({
124
1618
  safeEnqueue(data);
125
1619
  },
126
1620
  writeData(data) {
127
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("data", [data]));
1621
+ safeEnqueue(formatDataStreamPart("data", [data]));
128
1622
  },
129
1623
  writeMessageAnnotation(annotation) {
130
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("message_annotations", [annotation]));
1624
+ safeEnqueue(formatDataStreamPart("message_annotations", [annotation]));
131
1625
  },
132
1626
  writeSource(source) {
133
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("source", source));
1627
+ safeEnqueue(formatDataStreamPart("source", source));
134
1628
  },
135
1629
  merge(streamArg) {
136
1630
  ongoingStreamPromises.push(
@@ -143,7 +1637,7 @@ function createDataStream({
143
1637
  safeEnqueue(value);
144
1638
  }
145
1639
  })().catch((error) => {
146
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("error", onError(error)));
1640
+ safeEnqueue(formatDataStreamPart("error", onError(error)));
147
1641
  })
148
1642
  );
149
1643
  },
@@ -152,12 +1646,12 @@ function createDataStream({
152
1646
  if (result) {
153
1647
  ongoingStreamPromises.push(
154
1648
  result.catch((error) => {
155
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("error", onError(error)));
1649
+ safeEnqueue(formatDataStreamPart("error", onError(error)));
156
1650
  })
157
1651
  );
158
1652
  }
159
1653
  } catch (error) {
160
- safeEnqueue((0, import_ui_utils.formatDataStreamPart)("error", onError(error)));
1654
+ safeEnqueue(formatDataStreamPart("error", onError(error)));
161
1655
  }
162
1656
  const waitForStreams = new Promise(async (resolve) => {
163
1657
  while (ongoingStreamPromises.length > 0) {
@@ -307,7 +1801,7 @@ _a = symbol;
307
1801
 
308
1802
  // util/retry-with-exponential-backoff.ts
309
1803
  var import_provider3 = require("@ai-sdk/provider");
310
- var import_provider_utils = require("@ai-sdk/provider-utils");
1804
+ var import_provider_utils6 = require("@ai-sdk/provider-utils");
311
1805
 
312
1806
  // util/retry-error.ts
313
1807
  var import_provider2 = require("@ai-sdk/provider");
@@ -351,13 +1845,13 @@ async function _retryWithExponentialBackoff(f, {
351
1845
  try {
352
1846
  return await f();
353
1847
  } catch (error) {
354
- if ((0, import_provider_utils.isAbortError)(error)) {
1848
+ if ((0, import_provider_utils6.isAbortError)(error)) {
355
1849
  throw error;
356
1850
  }
357
1851
  if (maxRetries === 0) {
358
1852
  throw error;
359
1853
  }
360
- const errorMessage = (0, import_provider_utils.getErrorMessage)(error);
1854
+ const errorMessage = (0, import_provider_utils6.getErrorMessage)(error);
361
1855
  const newErrors = [...errors, error];
362
1856
  const tryNumber = newErrors.length;
363
1857
  if (tryNumber > maxRetries) {
@@ -368,7 +1862,7 @@ async function _retryWithExponentialBackoff(f, {
368
1862
  });
369
1863
  }
370
1864
  if (error instanceof Error && import_provider3.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
371
- await (0, import_provider_utils.delay)(delayInMs);
1865
+ await (0, import_provider_utils6.delay)(delayInMs);
372
1866
  return _retryWithExponentialBackoff(
373
1867
  f,
374
1868
  { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
@@ -906,28 +2400,28 @@ var NoImageGeneratedError = class extends import_provider4.AISDKError {
906
2400
  _a3 = symbol3;
907
2401
 
908
2402
  // core/generate-text/generated-file.ts
909
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
2403
+ var import_provider_utils7 = require("@ai-sdk/provider-utils");
910
2404
  var DefaultGeneratedFile = class {
911
2405
  constructor({
912
2406
  data,
913
- mimeType
2407
+ mediaType
914
2408
  }) {
915
2409
  const isUint8Array = data instanceof Uint8Array;
916
2410
  this.base64Data = isUint8Array ? void 0 : data;
917
2411
  this.uint8ArrayData = isUint8Array ? data : void 0;
918
- this.mimeType = mimeType;
2412
+ this.mediaType = mediaType;
919
2413
  }
920
2414
  // lazy conversion with caching to avoid unnecessary conversion overhead:
921
2415
  get base64() {
922
2416
  if (this.base64Data == null) {
923
- this.base64Data = (0, import_provider_utils2.convertUint8ArrayToBase64)(this.uint8ArrayData);
2417
+ this.base64Data = (0, import_provider_utils7.convertUint8ArrayToBase64)(this.uint8ArrayData);
924
2418
  }
925
2419
  return this.base64Data;
926
2420
  }
927
2421
  // lazy conversion with caching to avoid unnecessary conversion overhead:
928
2422
  get uint8Array() {
929
2423
  if (this.uint8ArrayData == null) {
930
- this.uint8ArrayData = (0, import_provider_utils2.convertBase64ToUint8Array)(this.base64Data);
2424
+ this.uint8ArrayData = (0, import_provider_utils7.convertBase64ToUint8Array)(this.base64Data);
931
2425
  }
932
2426
  return this.uint8ArrayData;
933
2427
  }
@@ -939,45 +2433,45 @@ var DefaultGeneratedFileWithType = class extends DefaultGeneratedFile {
939
2433
  }
940
2434
  };
941
2435
 
942
- // core/util/detect-image-mimetype.ts
943
- var mimeTypeSignatures = [
2436
+ // core/util/detect-media-type.ts
2437
+ var imageMediaTypeSignatures = [
944
2438
  {
945
- mimeType: "image/gif",
2439
+ mediaType: "image/gif",
946
2440
  bytesPrefix: [71, 73, 70],
947
2441
  base64Prefix: "R0lG"
948
2442
  },
949
2443
  {
950
- mimeType: "image/png",
2444
+ mediaType: "image/png",
951
2445
  bytesPrefix: [137, 80, 78, 71],
952
2446
  base64Prefix: "iVBORw"
953
2447
  },
954
2448
  {
955
- mimeType: "image/jpeg",
2449
+ mediaType: "image/jpeg",
956
2450
  bytesPrefix: [255, 216],
957
2451
  base64Prefix: "/9j/"
958
2452
  },
959
2453
  {
960
- mimeType: "image/webp",
2454
+ mediaType: "image/webp",
961
2455
  bytesPrefix: [82, 73, 70, 70],
962
2456
  base64Prefix: "UklGRg"
963
2457
  },
964
2458
  {
965
- mimeType: "image/bmp",
2459
+ mediaType: "image/bmp",
966
2460
  bytesPrefix: [66, 77],
967
2461
  base64Prefix: "Qk"
968
2462
  },
969
2463
  {
970
- mimeType: "image/tiff",
2464
+ mediaType: "image/tiff",
971
2465
  bytesPrefix: [73, 73, 42, 0],
972
2466
  base64Prefix: "SUkqAA"
973
2467
  },
974
2468
  {
975
- mimeType: "image/tiff",
2469
+ mediaType: "image/tiff",
976
2470
  bytesPrefix: [77, 77, 0, 42],
977
2471
  base64Prefix: "TU0AKg"
978
2472
  },
979
2473
  {
980
- mimeType: "image/avif",
2474
+ mediaType: "image/avif",
981
2475
  bytesPrefix: [
982
2476
  0,
983
2477
  0,
@@ -995,7 +2489,7 @@ var mimeTypeSignatures = [
995
2489
  base64Prefix: "AAAAIGZ0eXBhdmlm"
996
2490
  },
997
2491
  {
998
- mimeType: "image/heic",
2492
+ mediaType: "image/heic",
999
2493
  bytesPrefix: [
1000
2494
  0,
1001
2495
  0,
@@ -1013,10 +2507,45 @@ var mimeTypeSignatures = [
1013
2507
  base64Prefix: "AAAAIGZ0eXBoZWlj"
1014
2508
  }
1015
2509
  ];
1016
- function detectImageMimeType(image) {
1017
- for (const signature of mimeTypeSignatures) {
1018
- if (typeof image === "string" ? image.startsWith(signature.base64Prefix) : image.length >= signature.bytesPrefix.length && signature.bytesPrefix.every((byte, index) => image[index] === byte)) {
1019
- return signature.mimeType;
2510
+ var audioMediaTypeSignatures = [
2511
+ {
2512
+ mediaType: "audio/mpeg",
2513
+ bytesPrefix: [255, 251],
2514
+ base64Prefix: "//s="
2515
+ },
2516
+ {
2517
+ mediaType: "audio/wav",
2518
+ bytesPrefix: [82, 73, 70, 70],
2519
+ base64Prefix: "UklGR"
2520
+ },
2521
+ {
2522
+ mediaType: "audio/ogg",
2523
+ bytesPrefix: [79, 103, 103, 83],
2524
+ base64Prefix: "T2dnUw"
2525
+ },
2526
+ {
2527
+ mediaType: "audio/flac",
2528
+ bytesPrefix: [102, 76, 97, 67],
2529
+ base64Prefix: "ZkxhQw"
2530
+ },
2531
+ {
2532
+ mediaType: "audio/aac",
2533
+ bytesPrefix: [64, 21, 0, 0],
2534
+ base64Prefix: "QBUA"
2535
+ },
2536
+ {
2537
+ mediaType: "audio/mp4",
2538
+ bytesPrefix: [102, 116, 121, 112],
2539
+ base64Prefix: "ZnR5cA"
2540
+ }
2541
+ ];
2542
+ function detectMediaType({
2543
+ data,
2544
+ signatures
2545
+ }) {
2546
+ for (const signature of signatures) {
2547
+ if (typeof data === "string" ? data.startsWith(signature.base64Prefix) : data.length >= signature.bytesPrefix.length && signature.bytesPrefix.every((byte, index) => data[index] === byte)) {
2548
+ return signature.mediaType;
1020
2549
  }
1021
2550
  }
1022
2551
  return void 0;
@@ -1072,7 +2601,10 @@ async function generateImage({
1072
2601
  var _a18;
1073
2602
  return new DefaultGeneratedFile({
1074
2603
  data: image,
1075
- mimeType: (_a18 = detectImageMimeType(image)) != null ? _a18 : "image/png"
2604
+ mediaType: (_a18 = detectMediaType({
2605
+ data: image,
2606
+ signatures: imageMediaTypeSignatures
2607
+ })) != null ? _a18 : "image/png"
1076
2608
  });
1077
2609
  }
1078
2610
  )
@@ -1098,7 +2630,7 @@ var DefaultGenerateImageResult = class {
1098
2630
 
1099
2631
  // core/generate-object/generate-object.ts
1100
2632
  var import_provider12 = require("@ai-sdk/provider");
1101
- var import_provider_utils6 = require("@ai-sdk/provider-utils");
2633
+ var import_provider_utils12 = require("@ai-sdk/provider-utils");
1102
2634
 
1103
2635
  // errors/no-object-generated-error.ts
1104
2636
  var import_provider5 = require("@ai-sdk/provider");
@@ -1128,6 +2660,9 @@ var NoObjectGeneratedError = class extends import_provider5.AISDKError {
1128
2660
  };
1129
2661
  _a4 = symbol4;
1130
2662
 
2663
+ // core/prompt/convert-to-language-model-prompt.ts
2664
+ var import_provider_utils9 = require("@ai-sdk/provider-utils");
2665
+
1131
2666
  // util/download-error.ts
1132
2667
  var import_provider6 = require("@ai-sdk/provider");
1133
2668
  var name5 = "AI_DownloadError";
@@ -1169,7 +2704,7 @@ async function download({ url }) {
1169
2704
  }
1170
2705
  return {
1171
2706
  data: new Uint8Array(await response.arrayBuffer()),
1172
- mimeType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
2707
+ mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
1173
2708
  };
1174
2709
  } catch (error) {
1175
2710
  if (DownloadError.isInstance(error)) {
@@ -1180,7 +2715,7 @@ async function download({ url }) {
1180
2715
  }
1181
2716
 
1182
2717
  // core/prompt/data-content.ts
1183
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
2718
+ var import_provider_utils8 = require("@ai-sdk/provider-utils");
1184
2719
 
1185
2720
  // core/prompt/invalid-data-content-error.ts
1186
2721
  var import_provider7 = require("@ai-sdk/provider");
@@ -1224,9 +2759,9 @@ function convertDataContentToBase64String(content) {
1224
2759
  return content;
1225
2760
  }
1226
2761
  if (content instanceof ArrayBuffer) {
1227
- return (0, import_provider_utils3.convertUint8ArrayToBase64)(new Uint8Array(content));
2762
+ return (0, import_provider_utils8.convertUint8ArrayToBase64)(new Uint8Array(content));
1228
2763
  }
1229
- return (0, import_provider_utils3.convertUint8ArrayToBase64)(content);
2764
+ return (0, import_provider_utils8.convertUint8ArrayToBase64)(content);
1230
2765
  }
1231
2766
  function convertDataContentToUint8Array(content) {
1232
2767
  if (content instanceof Uint8Array) {
@@ -1234,7 +2769,7 @@ function convertDataContentToUint8Array(content) {
1234
2769
  }
1235
2770
  if (typeof content === "string") {
1236
2771
  try {
1237
- return (0, import_provider_utils3.convertBase64ToUint8Array)(content);
2772
+ return (0, import_provider_utils8.convertBase64ToUint8Array)(content);
1238
2773
  } catch (error) {
1239
2774
  throw new InvalidDataContentError({
1240
2775
  message: "Invalid data content. Content string is not a base64-encoded media.",
@@ -1282,12 +2817,12 @@ function splitDataUrl(dataUrl) {
1282
2817
  try {
1283
2818
  const [header, base64Content] = dataUrl.split(",");
1284
2819
  return {
1285
- mimeType: header.split(";")[0].split(":")[1],
2820
+ mediaType: header.split(";")[0].split(":")[1],
1286
2821
  base64Content
1287
2822
  };
1288
2823
  } catch (error) {
1289
2824
  return {
1290
- mimeType: void 0,
2825
+ mediaType: void 0,
1291
2826
  base64Content: void 0
1292
2827
  };
1293
2828
  }
@@ -1352,7 +2887,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
1352
2887
  // remove empty text parts:
1353
2888
  (part) => part.type !== "text" || part.text !== ""
1354
2889
  ).map((part) => {
1355
- var _a18;
2890
+ var _a18, _b2;
1356
2891
  const providerOptions = (_a18 = part.providerOptions) != null ? _a18 : part.experimental_providerMetadata;
1357
2892
  switch (part.type) {
1358
2893
  case "file": {
@@ -1360,7 +2895,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
1360
2895
  type: "file",
1361
2896
  data: part.data instanceof URL ? part.data : convertDataContentToBase64String(part.data),
1362
2897
  filename: part.filename,
1363
- mimeType: part.mimeType,
2898
+ mediaType: (_b2 = part.mediaType) != null ? _b2 : part.mimeType,
1364
2899
  providerOptions
1365
2900
  };
1366
2901
  }
@@ -1448,7 +2983,7 @@ async function downloadAssets(messages, downloadImplementation, modelSupportsIma
1448
2983
  );
1449
2984
  }
1450
2985
  function convertPartToLanguageModelPart(part, downloadedAssets) {
1451
- var _a17, _b, _c, _d;
2986
+ var _a17, _b, _c, _d, _e;
1452
2987
  if (part.type === "text") {
1453
2988
  return {
1454
2989
  type: "text",
@@ -1456,7 +2991,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1456
2991
  providerOptions: (_a17 = part.providerOptions) != null ? _a17 : part.experimental_providerMetadata
1457
2992
  };
1458
2993
  }
1459
- let mimeType = part.mimeType;
2994
+ let mediaType = (_b = part.mediaType) != null ? _b : part.mimeType;
1460
2995
  let data;
1461
2996
  let content;
1462
2997
  let normalizedData;
@@ -1478,19 +3013,19 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1478
3013
  }
1479
3014
  if (content instanceof URL) {
1480
3015
  if (content.protocol === "data:") {
1481
- const { mimeType: dataUrlMimeType, base64Content } = splitDataUrl(
3016
+ const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(
1482
3017
  content.toString()
1483
3018
  );
1484
- if (dataUrlMimeType == null || base64Content == null) {
3019
+ if (dataUrlMediaType == null || base64Content == null) {
1485
3020
  throw new Error(`Invalid data URL format in part ${type}`);
1486
3021
  }
1487
- mimeType = dataUrlMimeType;
3022
+ mediaType = dataUrlMediaType;
1488
3023
  normalizedData = convertDataContentToUint8Array(base64Content);
1489
3024
  } else {
1490
3025
  const downloadedFile = downloadedAssets[content.toString()];
1491
3026
  if (downloadedFile) {
1492
3027
  normalizedData = downloadedFile.data;
1493
- mimeType != null ? mimeType : mimeType = downloadedFile.mimeType;
3028
+ mediaType != null ? mediaType : mediaType = downloadedFile.mediaType;
1494
3029
  } else {
1495
3030
  normalizedData = content;
1496
3031
  }
@@ -1501,25 +3036,30 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1501
3036
  switch (type) {
1502
3037
  case "image": {
1503
3038
  if (normalizedData instanceof Uint8Array) {
1504
- mimeType = (_b = detectImageMimeType(normalizedData)) != null ? _b : mimeType;
3039
+ mediaType = (_c = detectMediaType({
3040
+ data: normalizedData,
3041
+ signatures: imageMediaTypeSignatures
3042
+ })) != null ? _c : mediaType;
1505
3043
  }
1506
3044
  return {
1507
- type: "image",
1508
- image: normalizedData,
1509
- mimeType,
1510
- providerOptions: (_c = part.providerOptions) != null ? _c : part.experimental_providerMetadata
3045
+ type: "file",
3046
+ mediaType: mediaType != null ? mediaType : "image/*",
3047
+ // any image
3048
+ filename: void 0,
3049
+ data: normalizedData instanceof Uint8Array ? (0, import_provider_utils9.convertUint8ArrayToBase64)(normalizedData) : normalizedData,
3050
+ providerOptions: (_d = part.providerOptions) != null ? _d : part.experimental_providerMetadata
1511
3051
  };
1512
3052
  }
1513
3053
  case "file": {
1514
- if (mimeType == null) {
1515
- throw new Error(`Mime type is missing for file part`);
3054
+ if (mediaType == null) {
3055
+ throw new Error(`Media type is missing for file part`);
1516
3056
  }
1517
3057
  return {
1518
3058
  type: "file",
1519
- data: normalizedData instanceof Uint8Array ? convertDataContentToBase64String(normalizedData) : normalizedData,
3059
+ mediaType,
1520
3060
  filename: part.filename,
1521
- mimeType,
1522
- providerOptions: (_d = part.providerOptions) != null ? _d : part.experimental_providerMetadata
3061
+ data: normalizedData instanceof Uint8Array ? convertDataContentToBase64String(normalizedData) : normalizedData,
3062
+ providerOptions: (_e = part.providerOptions) != null ? _e : part.experimental_providerMetadata
1523
3063
  };
1524
3064
  }
1525
3065
  }
@@ -1621,7 +3161,7 @@ function prepareCallSettings({
1621
3161
 
1622
3162
  // core/prompt/standardize-prompt.ts
1623
3163
  var import_provider10 = require("@ai-sdk/provider");
1624
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
3164
+ var import_provider_utils10 = require("@ai-sdk/provider-utils");
1625
3165
  var import_zod7 = require("zod");
1626
3166
 
1627
3167
  // core/prompt/attachments-to-parts.ts
@@ -1649,7 +3189,7 @@ function attachmentsToParts(attachments) {
1649
3189
  parts.push({
1650
3190
  type: "file",
1651
3191
  data: url,
1652
- mimeType: attachment.contentType
3192
+ mediaType: attachment.contentType
1653
3193
  });
1654
3194
  }
1655
3195
  break;
@@ -1657,14 +3197,14 @@ function attachmentsToParts(attachments) {
1657
3197
  case "data:": {
1658
3198
  let header;
1659
3199
  let base64Content;
1660
- let mimeType;
3200
+ let mediaType;
1661
3201
  try {
1662
3202
  [header, base64Content] = attachment.url.split(",");
1663
- mimeType = header.split(";")[0].split(":")[1];
3203
+ mediaType = header.split(";")[0].split(":")[1];
1664
3204
  } catch (error) {
1665
3205
  throw new Error(`Error processing data URL: ${attachment.url}`);
1666
3206
  }
1667
- if (mimeType == null || base64Content == null) {
3207
+ if (mediaType == null || base64Content == null) {
1668
3208
  throw new Error(`Invalid data URL format: ${attachment.url}`);
1669
3209
  }
1670
3210
  if ((_b = attachment.contentType) == null ? void 0 : _b.startsWith("image/")) {
@@ -1688,7 +3228,7 @@ function attachmentsToParts(attachments) {
1688
3228
  parts.push({
1689
3229
  type: "file",
1690
3230
  data: base64Content,
1691
- mimeType: attachment.contentType
3231
+ mediaType: attachment.contentType
1692
3232
  });
1693
3233
  }
1694
3234
  break;
@@ -1763,14 +3303,23 @@ function convertToCoreMessages(messages, options) {
1763
3303
  case "assistant": {
1764
3304
  if (message.parts != null) {
1765
3305
  let processBlock2 = function() {
3306
+ var _a18;
1766
3307
  const content2 = [];
1767
3308
  for (const part of block) {
1768
3309
  switch (part.type) {
1769
- case "file":
1770
3310
  case "text": {
1771
3311
  content2.push(part);
1772
3312
  break;
1773
3313
  }
3314
+ case "file": {
3315
+ content2.push({
3316
+ type: "file",
3317
+ data: part.data,
3318
+ mediaType: (_a18 = part.mediaType) != null ? _a18 : part.mimeType
3319
+ // TODO migration, remove
3320
+ });
3321
+ break;
3322
+ }
1774
3323
  case "reasoning": {
1775
3324
  for (const detail of part.details) {
1776
3325
  switch (detail.type) {
@@ -2027,7 +3576,7 @@ var toolResultContentSchema = import_zod4.z.array(
2027
3576
  import_zod4.z.object({
2028
3577
  type: import_zod4.z.literal("image"),
2029
3578
  data: import_zod4.z.string(),
2030
- mimeType: import_zod4.z.string().optional()
3579
+ mediaType: import_zod4.z.string().optional()
2031
3580
  })
2032
3581
  ])
2033
3582
  );
@@ -2042,6 +3591,7 @@ var textPartSchema = import_zod5.z.object({
2042
3591
  var imagePartSchema = import_zod5.z.object({
2043
3592
  type: import_zod5.z.literal("image"),
2044
3593
  image: import_zod5.z.union([dataContentSchema, import_zod5.z.instanceof(URL)]),
3594
+ mediaType: import_zod5.z.string().optional(),
2045
3595
  mimeType: import_zod5.z.string().optional(),
2046
3596
  providerOptions: providerMetadataSchema.optional(),
2047
3597
  experimental_providerMetadata: providerMetadataSchema.optional()
@@ -2050,7 +3600,8 @@ var filePartSchema = import_zod5.z.object({
2050
3600
  type: import_zod5.z.literal("file"),
2051
3601
  data: import_zod5.z.union([dataContentSchema, import_zod5.z.instanceof(URL)]),
2052
3602
  filename: import_zod5.z.string().optional(),
2053
- mimeType: import_zod5.z.string(),
3603
+ mediaType: import_zod5.z.string(),
3604
+ mimeType: import_zod5.z.string().optional(),
2054
3605
  providerOptions: providerMetadataSchema.optional(),
2055
3606
  experimental_providerMetadata: providerMetadataSchema.optional()
2056
3607
  });
@@ -2189,7 +3740,7 @@ function standardizePrompt({
2189
3740
  message: "messages must not be empty"
2190
3741
  });
2191
3742
  }
2192
- const validationResult = (0, import_provider_utils4.safeValidateTypes)({
3743
+ const validationResult = (0, import_provider_utils10.safeValidateTypes)({
2193
3744
  value: messages,
2194
3745
  schema: import_zod7.z.array(coreMessageSchema)
2195
3746
  });
@@ -2210,7 +3761,7 @@ function standardizePrompt({
2210
3761
  }
2211
3762
 
2212
3763
  // core/types/usage.ts
2213
- function calculateLanguageModelUsage({
3764
+ function calculateLanguageModelUsage2({
2214
3765
  promptTokens,
2215
3766
  completionTokens
2216
3767
  }) {
@@ -2250,8 +3801,7 @@ function injectJsonInstruction({
2250
3801
 
2251
3802
  // core/generate-object/output-strategy.ts
2252
3803
  var import_provider11 = require("@ai-sdk/provider");
2253
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
2254
- var import_ui_utils2 = require("@ai-sdk/ui-utils");
3804
+ var import_provider_utils11 = require("@ai-sdk/provider-utils");
2255
3805
 
2256
3806
  // core/util/async-iterable-stream.ts
2257
3807
  function createAsyncIterableStream(source) {
@@ -2307,7 +3857,7 @@ var objectOutputStrategy = (schema) => ({
2307
3857
  };
2308
3858
  },
2309
3859
  validateFinalResult(value) {
2310
- return (0, import_provider_utils5.safeValidateTypes)({ value, schema });
3860
+ return (0, import_provider_utils11.safeValidateTypes)({ value, schema });
2311
3861
  },
2312
3862
  createElementStream() {
2313
3863
  throw new import_provider11.UnsupportedFunctionalityError({
@@ -2346,7 +3896,7 @@ var arrayOutputStrategy = (schema) => {
2346
3896
  const resultArray = [];
2347
3897
  for (let i = 0; i < inputArray.length; i++) {
2348
3898
  const element = inputArray[i];
2349
- const result = (0, import_provider_utils5.safeValidateTypes)({ value: element, schema });
3899
+ const result = (0, import_provider_utils11.safeValidateTypes)({ value: element, schema });
2350
3900
  if (i === inputArray.length - 1 && !isFinalDelta) {
2351
3901
  continue;
2352
3902
  }
@@ -2387,7 +3937,7 @@ var arrayOutputStrategy = (schema) => {
2387
3937
  }
2388
3938
  const inputArray = value.elements;
2389
3939
  for (const element of inputArray) {
2390
- const result = (0, import_provider_utils5.safeValidateTypes)({ value: element, schema });
3940
+ const result = (0, import_provider_utils11.safeValidateTypes)({ value: element, schema });
2391
3941
  if (!result.success) {
2392
3942
  return result;
2393
3943
  }
@@ -2479,9 +4029,9 @@ function getOutputStrategy({
2479
4029
  }) {
2480
4030
  switch (output) {
2481
4031
  case "object":
2482
- return objectOutputStrategy((0, import_ui_utils2.asSchema)(schema));
4032
+ return objectOutputStrategy(asSchema(schema));
2483
4033
  case "array":
2484
- return arrayOutputStrategy((0, import_ui_utils2.asSchema)(schema));
4034
+ return arrayOutputStrategy(asSchema(schema));
2485
4035
  case "enum":
2486
4036
  return enumOutputStrategy(enumValues);
2487
4037
  case "no-schema":
@@ -2620,7 +4170,7 @@ function validateObjectGenerationInput({
2620
4170
  }
2621
4171
 
2622
4172
  // core/generate-object/generate-object.ts
2623
- var originalGenerateId = (0, import_provider_utils6.createIdGenerator)({ prefix: "aiobj", size: 24 });
4173
+ var originalGenerateId = (0, import_provider_utils12.createIdGenerator)({ prefix: "aiobj", size: 24 });
2624
4174
  async function generateObject({
2625
4175
  model,
2626
4176
  enum: enumValues,
@@ -2701,7 +4251,6 @@ async function generateObject({
2701
4251
  let finishReason;
2702
4252
  let usage;
2703
4253
  let warnings;
2704
- let rawResponse;
2705
4254
  let response;
2706
4255
  let request;
2707
4256
  let logprobs;
@@ -2756,7 +4305,7 @@ async function generateObject({
2756
4305
  }),
2757
4306
  tracer,
2758
4307
  fn: async (span2) => {
2759
- var _a18, _b2, _c2, _d2, _e, _f;
4308
+ var _a18, _b2, _c2, _d2, _e, _f, _g, _h;
2760
4309
  const result2 = await model.doGenerate({
2761
4310
  responseFormat: {
2762
4311
  type: "json",
@@ -2774,13 +4323,15 @@ async function generateObject({
2774
4323
  const responseData = {
2775
4324
  id: (_b2 = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId3(),
2776
4325
  timestamp: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
2777
- modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
4326
+ modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
4327
+ headers: (_g = result2.response) == null ? void 0 : _g.headers,
4328
+ body: (_h = result2.response) == null ? void 0 : _h.body
2778
4329
  };
2779
4330
  if (result2.text === void 0) {
2780
4331
  throw new NoObjectGeneratedError({
2781
4332
  message: "No object generated: the model did not return a response.",
2782
4333
  response: responseData,
2783
- usage: calculateLanguageModelUsage(result2.usage),
4334
+ usage: calculateLanguageModelUsage2(result2.usage),
2784
4335
  finishReason: result2.finishReason
2785
4336
  });
2786
4337
  }
@@ -2812,7 +4363,6 @@ async function generateObject({
2812
4363
  finishReason = generateResult.finishReason;
2813
4364
  usage = generateResult.usage;
2814
4365
  warnings = generateResult.warnings;
2815
- rawResponse = generateResult.rawResponse;
2816
4366
  logprobs = generateResult.logprobs;
2817
4367
  resultProviderMetadata = generateResult.providerMetadata;
2818
4368
  request = (_b = generateResult.request) != null ? _b : {};
@@ -2862,7 +4412,7 @@ async function generateObject({
2862
4412
  }),
2863
4413
  tracer,
2864
4414
  fn: async (span2) => {
2865
- var _a18, _b2, _c2, _d2, _e, _f, _g, _h;
4415
+ var _a18, _b2, _c2, _d2, _e, _f, _g, _h, _i, _j;
2866
4416
  const result2 = await model.doGenerate({
2867
4417
  tools: [
2868
4418
  {
@@ -2884,13 +4434,15 @@ async function generateObject({
2884
4434
  const responseData = {
2885
4435
  id: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.id) != null ? _d2 : generateId3(),
2886
4436
  timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),
2887
- modelId: (_h = (_g = result2.response) == null ? void 0 : _g.modelId) != null ? _h : model.modelId
4437
+ modelId: (_h = (_g = result2.response) == null ? void 0 : _g.modelId) != null ? _h : model.modelId,
4438
+ headers: (_i = result2.response) == null ? void 0 : _i.headers,
4439
+ body: (_j = result2.response) == null ? void 0 : _j.body
2888
4440
  };
2889
4441
  if (objectText === void 0) {
2890
4442
  throw new NoObjectGeneratedError({
2891
4443
  message: "No object generated: the tool was not called.",
2892
4444
  response: responseData,
2893
- usage: calculateLanguageModelUsage(result2.usage),
4445
+ usage: calculateLanguageModelUsage2(result2.usage),
2894
4446
  finishReason: result2.finishReason
2895
4447
  });
2896
4448
  }
@@ -2922,7 +4474,6 @@ async function generateObject({
2922
4474
  finishReason = generateResult.finishReason;
2923
4475
  usage = generateResult.usage;
2924
4476
  warnings = generateResult.warnings;
2925
- rawResponse = generateResult.rawResponse;
2926
4477
  logprobs = generateResult.logprobs;
2927
4478
  resultProviderMetadata = generateResult.providerMetadata;
2928
4479
  request = (_d = generateResult.request) != null ? _d : {};
@@ -2940,14 +4491,14 @@ async function generateObject({
2940
4491
  }
2941
4492
  }
2942
4493
  function processResult(result2) {
2943
- const parseResult = (0, import_provider_utils6.safeParseJSON)({ text: result2 });
4494
+ const parseResult = (0, import_provider_utils12.safeParseJSON)({ text: result2 });
2944
4495
  if (!parseResult.success) {
2945
4496
  throw new NoObjectGeneratedError({
2946
4497
  message: "No object generated: could not parse the response.",
2947
4498
  cause: parseResult.error,
2948
4499
  text: result2,
2949
4500
  response,
2950
- usage: calculateLanguageModelUsage(usage),
4501
+ usage: calculateLanguageModelUsage2(usage),
2951
4502
  finishReason
2952
4503
  });
2953
4504
  }
@@ -2956,7 +4507,7 @@ async function generateObject({
2956
4507
  {
2957
4508
  text: result2,
2958
4509
  response,
2959
- usage: calculateLanguageModelUsage(usage)
4510
+ usage: calculateLanguageModelUsage2(usage)
2960
4511
  }
2961
4512
  );
2962
4513
  if (!validationResult.success) {
@@ -2965,7 +4516,7 @@ async function generateObject({
2965
4516
  cause: validationResult.error,
2966
4517
  text: result2,
2967
4518
  response,
2968
- usage: calculateLanguageModelUsage(usage),
4519
+ usage: calculateLanguageModelUsage2(usage),
2969
4520
  finishReason
2970
4521
  });
2971
4522
  }
@@ -3004,14 +4555,10 @@ async function generateObject({
3004
4555
  return new DefaultGenerateObjectResult({
3005
4556
  object: object2,
3006
4557
  finishReason,
3007
- usage: calculateLanguageModelUsage(usage),
4558
+ usage: calculateLanguageModelUsage2(usage),
3008
4559
  warnings,
3009
4560
  request,
3010
- response: {
3011
- ...response,
3012
- headers: rawResponse == null ? void 0 : rawResponse.headers,
3013
- body: rawResponse == null ? void 0 : rawResponse.body
3014
- },
4561
+ response,
3015
4562
  logprobs,
3016
4563
  providerMetadata: resultProviderMetadata
3017
4564
  });
@@ -3042,8 +4589,7 @@ var DefaultGenerateObjectResult = class {
3042
4589
  };
3043
4590
 
3044
4591
  // core/generate-object/stream-object.ts
3045
- var import_provider_utils7 = require("@ai-sdk/provider-utils");
3046
- var import_ui_utils3 = require("@ai-sdk/ui-utils");
4592
+ var import_provider_utils13 = require("@ai-sdk/provider-utils");
3047
4593
 
3048
4594
  // util/delayed-promise.ts
3049
4595
  var DelayedPromise = class {
@@ -3187,7 +4733,7 @@ function now() {
3187
4733
  }
3188
4734
 
3189
4735
  // core/generate-object/stream-object.ts
3190
- var originalGenerateId2 = (0, import_provider_utils7.createIdGenerator)({ prefix: "aiobj", size: 24 });
4736
+ var originalGenerateId2 = (0, import_provider_utils13.createIdGenerator)({ prefix: "aiobj", size: 24 });
3191
4737
  function streamObject({
3192
4738
  model,
3193
4739
  schema: inputSchema,
@@ -3427,7 +4973,7 @@ var DefaultStreamObjectResult = class {
3427
4973
  }
3428
4974
  }
3429
4975
  const {
3430
- result: { stream, warnings, rawResponse, request },
4976
+ result: { stream, warnings, response, request },
3431
4977
  doStreamSpan,
3432
4978
  startTimestampMs
3433
4979
  } = await retry(
@@ -3476,7 +5022,7 @@ var DefaultStreamObjectResult = class {
3476
5022
  let error;
3477
5023
  let accumulatedText = "";
3478
5024
  let textDelta = "";
3479
- let response = {
5025
+ let fullResponse = {
3480
5026
  id: generateId3(),
3481
5027
  timestamp: currentDate(),
3482
5028
  modelId: model.modelId
@@ -3502,8 +5048,8 @@ var DefaultStreamObjectResult = class {
3502
5048
  if (typeof chunk === "string") {
3503
5049
  accumulatedText += chunk;
3504
5050
  textDelta += chunk;
3505
- const { value: currentObjectJson, state: parseState } = (0, import_ui_utils3.parsePartialJson)(accumulatedText);
3506
- if (currentObjectJson !== void 0 && !(0, import_ui_utils3.isDeepEqualData)(latestObjectJson, currentObjectJson)) {
5051
+ const { value: currentObjectJson, state: parseState } = parsePartialJson(accumulatedText);
5052
+ if (currentObjectJson !== void 0 && !isDeepEqualData(latestObjectJson, currentObjectJson)) {
3507
5053
  const validationResult = outputStrategy.validatePartialResult({
3508
5054
  value: currentObjectJson,
3509
5055
  textDelta,
@@ -3511,7 +5057,7 @@ var DefaultStreamObjectResult = class {
3511
5057
  isFirstDelta,
3512
5058
  isFinalDelta: parseState === "successful-parse"
3513
5059
  });
3514
- if (validationResult.success && !(0, import_ui_utils3.isDeepEqualData)(
5060
+ if (validationResult.success && !isDeepEqualData(
3515
5061
  latestObject,
3516
5062
  validationResult.value.partial
3517
5063
  )) {
@@ -3533,10 +5079,10 @@ var DefaultStreamObjectResult = class {
3533
5079
  }
3534
5080
  switch (chunk.type) {
3535
5081
  case "response-metadata": {
3536
- response = {
3537
- id: (_a18 = chunk.id) != null ? _a18 : response.id,
3538
- timestamp: (_b2 = chunk.timestamp) != null ? _b2 : response.timestamp,
3539
- modelId: (_c = chunk.modelId) != null ? _c : response.modelId
5082
+ fullResponse = {
5083
+ id: (_a18 = chunk.id) != null ? _a18 : fullResponse.id,
5084
+ timestamp: (_b2 = chunk.timestamp) != null ? _b2 : fullResponse.timestamp,
5085
+ modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
3540
5086
  };
3541
5087
  break;
3542
5088
  }
@@ -3545,20 +5091,24 @@ var DefaultStreamObjectResult = class {
3545
5091
  controller.enqueue({ type: "text-delta", textDelta });
3546
5092
  }
3547
5093
  finishReason = chunk.finishReason;
3548
- usage = calculateLanguageModelUsage(chunk.usage);
5094
+ usage = calculateLanguageModelUsage2(chunk.usage);
3549
5095
  providerMetadata = chunk.providerMetadata;
3550
- controller.enqueue({ ...chunk, usage, response });
5096
+ controller.enqueue({
5097
+ ...chunk,
5098
+ usage,
5099
+ response: fullResponse
5100
+ });
3551
5101
  self.usagePromise.resolve(usage);
3552
5102
  self.providerMetadataPromise.resolve(providerMetadata);
3553
5103
  self.responsePromise.resolve({
3554
- ...response,
3555
- headers: rawResponse == null ? void 0 : rawResponse.headers
5104
+ ...fullResponse,
5105
+ headers: response == null ? void 0 : response.headers
3556
5106
  });
3557
5107
  const validationResult = outputStrategy.validateFinalResult(
3558
5108
  latestObjectJson,
3559
5109
  {
3560
5110
  text: accumulatedText,
3561
- response,
5111
+ response: fullResponse,
3562
5112
  usage
3563
5113
  }
3564
5114
  );
@@ -3570,7 +5120,7 @@ var DefaultStreamObjectResult = class {
3570
5120
  message: "No object generated: response did not match schema.",
3571
5121
  cause: validationResult.error,
3572
5122
  text: accumulatedText,
3573
- response,
5123
+ response: fullResponse,
3574
5124
  usage,
3575
5125
  finishReason
3576
5126
  });
@@ -3600,15 +5150,15 @@ var DefaultStreamObjectResult = class {
3600
5150
  "ai.response.object": {
3601
5151
  output: () => JSON.stringify(object2)
3602
5152
  },
3603
- "ai.response.id": response.id,
3604
- "ai.response.model": response.modelId,
3605
- "ai.response.timestamp": response.timestamp.toISOString(),
5153
+ "ai.response.id": fullResponse.id,
5154
+ "ai.response.model": fullResponse.modelId,
5155
+ "ai.response.timestamp": fullResponse.timestamp.toISOString(),
3606
5156
  "ai.usage.promptTokens": finalUsage.promptTokens,
3607
5157
  "ai.usage.completionTokens": finalUsage.completionTokens,
3608
5158
  // standardized gen-ai llm span attributes:
3609
5159
  "gen_ai.response.finish_reasons": [finishReason],
3610
- "gen_ai.response.id": response.id,
3611
- "gen_ai.response.model": response.modelId,
5160
+ "gen_ai.response.id": fullResponse.id,
5161
+ "gen_ai.response.model": fullResponse.modelId,
3612
5162
  "gen_ai.usage.input_tokens": finalUsage.promptTokens,
3613
5163
  "gen_ai.usage.output_tokens": finalUsage.completionTokens
3614
5164
  }
@@ -3632,8 +5182,8 @@ var DefaultStreamObjectResult = class {
3632
5182
  object: object2,
3633
5183
  error,
3634
5184
  response: {
3635
- ...response,
3636
- headers: rawResponse == null ? void 0 : rawResponse.headers
5185
+ ...fullResponse,
5186
+ headers: response == null ? void 0 : response.headers
3637
5187
  },
3638
5188
  warnings,
3639
5189
  providerMetadata,
@@ -3759,7 +5309,7 @@ var DefaultStreamObjectResult = class {
3759
5309
  };
3760
5310
 
3761
5311
  // core/generate-text/generate-text.ts
3762
- var import_provider_utils9 = require("@ai-sdk/provider-utils");
5312
+ var import_provider_utils15 = require("@ai-sdk/provider-utils");
3763
5313
 
3764
5314
  // errors/no-output-specified-error.ts
3765
5315
  var import_provider13 = require("@ai-sdk/provider");
@@ -3805,9 +5355,6 @@ var ToolExecutionError = class extends import_provider14.AISDKError {
3805
5355
  };
3806
5356
  _a10 = symbol10;
3807
5357
 
3808
- // core/prompt/prepare-tools-and-tool-choice.ts
3809
- var import_ui_utils4 = require("@ai-sdk/ui-utils");
3810
-
3811
5358
  // core/util/is-non-empty-object.ts
3812
5359
  function isNonEmptyObject(object2) {
3813
5360
  return object2 != null && Object.keys(object2).length > 0;
@@ -3838,7 +5385,7 @@ function prepareToolsAndToolChoice({
3838
5385
  type: "function",
3839
5386
  name: name17,
3840
5387
  description: tool2.description,
3841
- parameters: (0, import_ui_utils4.asSchema)(tool2.parameters).jsonSchema
5388
+ parameters: asSchema(tool2.parameters).jsonSchema
3842
5389
  };
3843
5390
  case "provider-defined":
3844
5391
  return {
@@ -3871,8 +5418,7 @@ function removeTextAfterLastWhitespace(text2) {
3871
5418
  }
3872
5419
 
3873
5420
  // core/generate-text/parse-tool-call.ts
3874
- var import_provider_utils8 = require("@ai-sdk/provider-utils");
3875
- var import_ui_utils5 = require("@ai-sdk/ui-utils");
5421
+ var import_provider_utils14 = require("@ai-sdk/provider-utils");
3876
5422
 
3877
5423
  // errors/invalid-tool-arguments-error.ts
3878
5424
  var import_provider15 = require("@ai-sdk/provider");
@@ -3967,7 +5513,7 @@ async function parseToolCall({
3967
5513
  repairedToolCall = await repairToolCall({
3968
5514
  toolCall,
3969
5515
  tools,
3970
- parameterSchema: ({ toolName }) => (0, import_ui_utils5.asSchema)(tools[toolName].parameters).jsonSchema,
5516
+ parameterSchema: ({ toolName }) => asSchema(tools[toolName].parameters).jsonSchema,
3971
5517
  system,
3972
5518
  messages,
3973
5519
  error
@@ -3996,8 +5542,8 @@ async function doParseToolCall({
3996
5542
  availableTools: Object.keys(tools)
3997
5543
  });
3998
5544
  }
3999
- const schema = (0, import_ui_utils5.asSchema)(tool2.parameters);
4000
- const parseResult = toolCall.args.trim() === "" ? (0, import_provider_utils8.safeValidateTypes)({ value: {}, schema }) : (0, import_provider_utils8.safeParseJSON)({ text: toolCall.args, schema });
5545
+ const schema = asSchema(tool2.parameters);
5546
+ const parseResult = toolCall.args.trim() === "" ? (0, import_provider_utils14.safeValidateTypes)({ value: {}, schema }) : (0, import_provider_utils14.safeParseJSON)({ text: toolCall.args, schema });
4001
5547
  if (parseResult.success === false) {
4002
5548
  throw new InvalidToolArgumentsError({
4003
5549
  toolName,
@@ -4041,7 +5587,7 @@ function toResponseMessages({
4041
5587
  ...files.map((file) => ({
4042
5588
  type: "file",
4043
5589
  data: file.base64,
4044
- mimeType: file.mimeType
5590
+ mediaType: file.mediaType
4045
5591
  })),
4046
5592
  { type: "text", text: text2 },
4047
5593
  ...toolCalls
@@ -4075,11 +5621,11 @@ function toResponseMessages({
4075
5621
  }
4076
5622
 
4077
5623
  // core/generate-text/generate-text.ts
4078
- var originalGenerateId3 = (0, import_provider_utils9.createIdGenerator)({
5624
+ var originalGenerateId3 = (0, import_provider_utils15.createIdGenerator)({
4079
5625
  prefix: "aitxt",
4080
5626
  size: 24
4081
5627
  });
4082
- var originalGenerateMessageId = (0, import_provider_utils9.createIdGenerator)({
5628
+ var originalGenerateMessageId = (0, import_provider_utils15.createIdGenerator)({
4083
5629
  prefix: "msg",
4084
5630
  size: 24
4085
5631
  });
@@ -4152,7 +5698,7 @@ async function generateText({
4152
5698
  }),
4153
5699
  tracer,
4154
5700
  fn: async (span) => {
4155
- var _a18, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
5701
+ var _a18, _b, _c, _d, _e, _f, _g;
4156
5702
  const toolsAndToolChoice = {
4157
5703
  ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
4158
5704
  };
@@ -4227,7 +5773,7 @@ async function generateText({
4227
5773
  }),
4228
5774
  tracer,
4229
5775
  fn: async (span2) => {
4230
- var _a19, _b2, _c2, _d2, _e2, _f2;
5776
+ var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h;
4231
5777
  const result = await model.doGenerate({
4232
5778
  ...callSettings,
4233
5779
  ...toolsAndToolChoice,
@@ -4241,7 +5787,9 @@ async function generateText({
4241
5787
  const responseData = {
4242
5788
  id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
4243
5789
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
4244
- modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
5790
+ modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId,
5791
+ headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
5792
+ body: (_h = result.response) == null ? void 0 : _h.body
4245
5793
  };
4246
5794
  span2.setAttributes(
4247
5795
  selectTelemetryAttributes({
@@ -4291,7 +5839,7 @@ async function generateText({
4291
5839
  messages: stepInputMessages,
4292
5840
  abortSignal
4293
5841
  });
4294
- const currentUsage = calculateLanguageModelUsage(
5842
+ const currentUsage = calculateLanguageModelUsage2(
4295
5843
  currentModelResponse.usage
4296
5844
  );
4297
5845
  usage = addLanguageModelUsage(usage, currentUsage);
@@ -4358,8 +5906,6 @@ async function generateText({
4358
5906
  request: (_f = currentModelResponse.request) != null ? _f : {},
4359
5907
  response: {
4360
5908
  ...currentModelResponse.response,
4361
- headers: (_g = currentModelResponse.rawResponse) == null ? void 0 : _g.headers,
4362
- body: (_h = currentModelResponse.rawResponse) == null ? void 0 : _h.body,
4363
5909
  // deep clone msgs to avoid mutating past messages in multi-step:
4364
5910
  messages: structuredClone(responseMessages)
4365
5911
  },
@@ -4411,11 +5957,9 @@ async function generateText({
4411
5957
  finishReason: currentModelResponse.finishReason,
4412
5958
  usage,
4413
5959
  warnings: currentModelResponse.warnings,
4414
- request: (_i = currentModelResponse.request) != null ? _i : {},
5960
+ request: (_g = currentModelResponse.request) != null ? _g : {},
4415
5961
  response: {
4416
5962
  ...currentModelResponse.response,
4417
- headers: (_j = currentModelResponse.rawResponse) == null ? void 0 : _j.headers,
4418
- body: (_k = currentModelResponse.rawResponse) == null ? void 0 : _k.body,
4419
5963
  messages: responseMessages
4420
5964
  },
4421
5965
  logprobs: currentModelResponse.logprobs,
@@ -4544,8 +6088,7 @@ __export(output_exports, {
4544
6088
  object: () => object,
4545
6089
  text: () => text
4546
6090
  });
4547
- var import_provider_utils10 = require("@ai-sdk/provider-utils");
4548
- var import_ui_utils6 = require("@ai-sdk/ui-utils");
6091
+ var import_provider_utils16 = require("@ai-sdk/provider-utils");
4549
6092
 
4550
6093
  // errors/index.ts
4551
6094
  var import_provider20 = require("@ai-sdk/provider");
@@ -4609,7 +6152,7 @@ var text = () => ({
4609
6152
  var object = ({
4610
6153
  schema: inputSchema
4611
6154
  }) => {
4612
- const schema = (0, import_ui_utils6.asSchema)(inputSchema);
6155
+ const schema = asSchema(inputSchema);
4613
6156
  return {
4614
6157
  type: "object",
4615
6158
  responseFormat: ({ model }) => ({
@@ -4623,7 +6166,7 @@ var object = ({
4623
6166
  });
4624
6167
  },
4625
6168
  parsePartial({ text: text2 }) {
4626
- const result = (0, import_ui_utils6.parsePartialJson)(text2);
6169
+ const result = parsePartialJson(text2);
4627
6170
  switch (result.state) {
4628
6171
  case "failed-parse":
4629
6172
  case "undefined-input":
@@ -4641,7 +6184,7 @@ var object = ({
4641
6184
  }
4642
6185
  },
4643
6186
  parseOutput({ text: text2 }, context) {
4644
- const parseResult = (0, import_provider_utils10.safeParseJSON)({ text: text2 });
6187
+ const parseResult = (0, import_provider_utils16.safeParseJSON)({ text: text2 });
4645
6188
  if (!parseResult.success) {
4646
6189
  throw new NoObjectGeneratedError({
4647
6190
  message: "No object generated: could not parse the response.",
@@ -4652,7 +6195,7 @@ var object = ({
4652
6195
  finishReason: context.finishReason
4653
6196
  });
4654
6197
  }
4655
- const validationResult = (0, import_provider_utils10.safeValidateTypes)({
6198
+ const validationResult = (0, import_provider_utils16.safeValidateTypes)({
4656
6199
  value: parseResult.value,
4657
6200
  schema
4658
6201
  });
@@ -4672,7 +6215,7 @@ var object = ({
4672
6215
  };
4673
6216
 
4674
6217
  // core/generate-text/smooth-stream.ts
4675
- var import_provider_utils11 = require("@ai-sdk/provider-utils");
6218
+ var import_provider_utils17 = require("@ai-sdk/provider-utils");
4676
6219
  var import_provider21 = require("@ai-sdk/provider");
4677
6220
  var CHUNKING_REGEXPS = {
4678
6221
  word: /\S+\s+/m,
@@ -4681,7 +6224,7 @@ var CHUNKING_REGEXPS = {
4681
6224
  function smoothStream({
4682
6225
  delayInMs = 10,
4683
6226
  chunking = "word",
4684
- _internal: { delay: delay2 = import_provider_utils11.delay } = {}
6227
+ _internal: { delay: delay2 = import_provider_utils17.delay } = {}
4685
6228
  } = {}) {
4686
6229
  let detectChunk;
4687
6230
  if (typeof chunking === "function") {
@@ -4742,8 +6285,7 @@ function smoothStream({
4742
6285
 
4743
6286
  // core/generate-text/stream-text.ts
4744
6287
  var import_provider22 = require("@ai-sdk/provider");
4745
- var import_provider_utils12 = require("@ai-sdk/provider-utils");
4746
- var import_ui_utils8 = require("@ai-sdk/ui-utils");
6288
+ var import_provider_utils18 = require("@ai-sdk/provider-utils");
4747
6289
 
4748
6290
  // util/as-array.ts
4749
6291
  function asArray(value) {
@@ -4858,7 +6400,6 @@ function mergeStreams(stream1, stream2) {
4858
6400
  }
4859
6401
 
4860
6402
  // core/generate-text/run-tools-transformation.ts
4861
- var import_ui_utils7 = require("@ai-sdk/ui-utils");
4862
6403
  function runToolsTransformation({
4863
6404
  tools,
4864
6405
  generatorStream,
@@ -4906,7 +6447,7 @@ function runToolsTransformation({
4906
6447
  controller.enqueue(
4907
6448
  new DefaultGeneratedFileWithType({
4908
6449
  data: chunk.data,
4909
- mimeType: chunk.mimeType
6450
+ mediaType: chunk.mediaType
4910
6451
  })
4911
6452
  );
4912
6453
  break;
@@ -4942,7 +6483,7 @@ function runToolsTransformation({
4942
6483
  controller.enqueue(toolCall);
4943
6484
  const tool2 = tools[toolCall.toolName];
4944
6485
  if (tool2.execute != null) {
4945
- const toolExecutionId = (0, import_ui_utils7.generateId)();
6486
+ const toolExecutionId = (0, import_provider_utils5.generateId)();
4946
6487
  outstandingToolResults.add(toolExecutionId);
4947
6488
  recordSpan({
4948
6489
  name: "ai.toolCall",
@@ -5017,7 +6558,7 @@ function runToolsTransformation({
5017
6558
  type: "finish",
5018
6559
  finishReason: chunk.finishReason,
5019
6560
  logprobs: chunk.logprobs,
5020
- usage: calculateLanguageModelUsage(chunk.usage),
6561
+ usage: calculateLanguageModelUsage2(chunk.usage),
5021
6562
  experimental_providerMetadata: chunk.providerMetadata
5022
6563
  };
5023
6564
  break;
@@ -5061,11 +6602,11 @@ function runToolsTransformation({
5061
6602
  }
5062
6603
 
5063
6604
  // core/generate-text/stream-text.ts
5064
- var originalGenerateId4 = (0, import_provider_utils12.createIdGenerator)({
6605
+ var originalGenerateId4 = (0, import_provider_utils18.createIdGenerator)({
5065
6606
  prefix: "aitxt",
5066
6607
  size: 24
5067
6608
  });
5068
- var originalGenerateMessageId2 = (0, import_provider_utils12.createIdGenerator)({
6609
+ var originalGenerateMessageId2 = (0, import_provider_utils18.createIdGenerator)({
5069
6610
  prefix: "msg",
5070
6611
  size: 24
5071
6612
  });
@@ -5528,7 +7069,7 @@ var DefaultStreamTextResult = class {
5528
7069
  ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
5529
7070
  };
5530
7071
  const {
5531
- result: { stream: stream2, warnings, rawResponse, request },
7072
+ result: { stream: stream2, warnings, response, request },
5532
7073
  doStreamSpan,
5533
7074
  startTimestampMs
5534
7075
  } = await retry(
@@ -5839,7 +7380,7 @@ var DefaultStreamTextResult = class {
5839
7380
  request: stepRequest,
5840
7381
  response: {
5841
7382
  ...stepResponse,
5842
- headers: rawResponse == null ? void 0 : rawResponse.headers
7383
+ headers: response == null ? void 0 : response.headers
5843
7384
  },
5844
7385
  warnings,
5845
7386
  isContinued: nextStepType === "continue",
@@ -5856,7 +7397,7 @@ var DefaultStreamTextResult = class {
5856
7397
  logprobs: stepLogProbs,
5857
7398
  response: {
5858
7399
  ...stepResponse,
5859
- headers: rawResponse == null ? void 0 : rawResponse.headers
7400
+ headers: response == null ? void 0 : response.headers
5860
7401
  }
5861
7402
  });
5862
7403
  self.closeStream();
@@ -6052,13 +7593,13 @@ var DefaultStreamTextResult = class {
6052
7593
  const chunkType = chunk.type;
6053
7594
  switch (chunkType) {
6054
7595
  case "text-delta": {
6055
- controller.enqueue((0, import_ui_utils8.formatDataStreamPart)("text", chunk.textDelta));
7596
+ controller.enqueue(formatDataStreamPart("text", chunk.textDelta));
6056
7597
  break;
6057
7598
  }
6058
7599
  case "reasoning": {
6059
7600
  if (sendReasoning) {
6060
7601
  controller.enqueue(
6061
- (0, import_ui_utils8.formatDataStreamPart)("reasoning", chunk.textDelta)
7602
+ formatDataStreamPart("reasoning", chunk.textDelta)
6062
7603
  );
6063
7604
  }
6064
7605
  break;
@@ -6066,7 +7607,7 @@ var DefaultStreamTextResult = class {
6066
7607
  case "redacted-reasoning": {
6067
7608
  if (sendReasoning) {
6068
7609
  controller.enqueue(
6069
- (0, import_ui_utils8.formatDataStreamPart)("redacted_reasoning", {
7610
+ formatDataStreamPart("redacted_reasoning", {
6070
7611
  data: chunk.data
6071
7612
  })
6072
7613
  );
@@ -6076,7 +7617,7 @@ var DefaultStreamTextResult = class {
6076
7617
  case "reasoning-signature": {
6077
7618
  if (sendReasoning) {
6078
7619
  controller.enqueue(
6079
- (0, import_ui_utils8.formatDataStreamPart)("reasoning_signature", {
7620
+ formatDataStreamPart("reasoning_signature", {
6080
7621
  signature: chunk.signature
6081
7622
  })
6082
7623
  );
@@ -6085,8 +7626,8 @@ var DefaultStreamTextResult = class {
6085
7626
  }
6086
7627
  case "file": {
6087
7628
  controller.enqueue(
6088
- (0, import_ui_utils8.formatDataStreamPart)("file", {
6089
- mimeType: chunk.mimeType,
7629
+ formatDataStreamPart("file", {
7630
+ mimeType: chunk.mediaType,
6090
7631
  data: chunk.base64
6091
7632
  })
6092
7633
  );
@@ -6095,14 +7636,14 @@ var DefaultStreamTextResult = class {
6095
7636
  case "source": {
6096
7637
  if (sendSources) {
6097
7638
  controller.enqueue(
6098
- (0, import_ui_utils8.formatDataStreamPart)("source", chunk.source)
7639
+ formatDataStreamPart("source", chunk.source)
6099
7640
  );
6100
7641
  }
6101
7642
  break;
6102
7643
  }
6103
7644
  case "tool-call-streaming-start": {
6104
7645
  controller.enqueue(
6105
- (0, import_ui_utils8.formatDataStreamPart)("tool_call_streaming_start", {
7646
+ formatDataStreamPart("tool_call_streaming_start", {
6106
7647
  toolCallId: chunk.toolCallId,
6107
7648
  toolName: chunk.toolName
6108
7649
  })
@@ -6111,7 +7652,7 @@ var DefaultStreamTextResult = class {
6111
7652
  }
6112
7653
  case "tool-call-delta": {
6113
7654
  controller.enqueue(
6114
- (0, import_ui_utils8.formatDataStreamPart)("tool_call_delta", {
7655
+ formatDataStreamPart("tool_call_delta", {
6115
7656
  toolCallId: chunk.toolCallId,
6116
7657
  argsTextDelta: chunk.argsTextDelta
6117
7658
  })
@@ -6120,7 +7661,7 @@ var DefaultStreamTextResult = class {
6120
7661
  }
6121
7662
  case "tool-call": {
6122
7663
  controller.enqueue(
6123
- (0, import_ui_utils8.formatDataStreamPart)("tool_call", {
7664
+ formatDataStreamPart("tool_call", {
6124
7665
  toolCallId: chunk.toolCallId,
6125
7666
  toolName: chunk.toolName,
6126
7667
  args: chunk.args
@@ -6130,7 +7671,7 @@ var DefaultStreamTextResult = class {
6130
7671
  }
6131
7672
  case "tool-result": {
6132
7673
  controller.enqueue(
6133
- (0, import_ui_utils8.formatDataStreamPart)("tool_result", {
7674
+ formatDataStreamPart("tool_result", {
6134
7675
  toolCallId: chunk.toolCallId,
6135
7676
  result: chunk.result
6136
7677
  })
@@ -6139,13 +7680,13 @@ var DefaultStreamTextResult = class {
6139
7680
  }
6140
7681
  case "error": {
6141
7682
  controller.enqueue(
6142
- (0, import_ui_utils8.formatDataStreamPart)("error", getErrorMessage5(chunk.error))
7683
+ formatDataStreamPart("error", getErrorMessage5(chunk.error))
6143
7684
  );
6144
7685
  break;
6145
7686
  }
6146
7687
  case "step-start": {
6147
7688
  controller.enqueue(
6148
- (0, import_ui_utils8.formatDataStreamPart)("start_step", {
7689
+ formatDataStreamPart("start_step", {
6149
7690
  messageId: chunk.messageId
6150
7691
  })
6151
7692
  );
@@ -6153,7 +7694,7 @@ var DefaultStreamTextResult = class {
6153
7694
  }
6154
7695
  case "step-finish": {
6155
7696
  controller.enqueue(
6156
- (0, import_ui_utils8.formatDataStreamPart)("finish_step", {
7697
+ formatDataStreamPart("finish_step", {
6157
7698
  finishReason: chunk.finishReason,
6158
7699
  usage: sendUsage ? {
6159
7700
  promptTokens: chunk.usage.promptTokens,
@@ -6167,7 +7708,7 @@ var DefaultStreamTextResult = class {
6167
7708
  case "finish": {
6168
7709
  if (experimental_sendFinish) {
6169
7710
  controller.enqueue(
6170
- (0, import_ui_utils8.formatDataStreamPart)("finish_message", {
7711
+ formatDataStreamPart("finish_message", {
6171
7712
  finishReason: chunk.finishReason,
6172
7713
  usage: sendUsage ? {
6173
7714
  promptTokens: chunk.usage.promptTokens,
@@ -6290,6 +7831,70 @@ var DefaultStreamTextResult = class {
6290
7831
  }
6291
7832
  };
6292
7833
 
7834
+ // errors/no-transcript-generated-error.ts
7835
+ var import_provider23 = require("@ai-sdk/provider");
7836
+ var NoTranscriptGeneratedError = class extends import_provider23.AISDKError {
7837
+ constructor(options) {
7838
+ super({
7839
+ name: "AI_NoTranscriptGeneratedError",
7840
+ message: "No transcript generated."
7841
+ });
7842
+ this.responses = options.responses;
7843
+ }
7844
+ };
7845
+
7846
+ // core/transcribe/transcribe.ts
7847
+ async function transcribe({
7848
+ model,
7849
+ audio,
7850
+ providerOptions = {},
7851
+ maxRetries: maxRetriesArg,
7852
+ abortSignal,
7853
+ headers
7854
+ }) {
7855
+ const { retry } = prepareRetries({ maxRetries: maxRetriesArg });
7856
+ const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
7857
+ const result = await retry(
7858
+ () => {
7859
+ var _a17;
7860
+ return model.doGenerate({
7861
+ audio: audioData,
7862
+ abortSignal,
7863
+ headers,
7864
+ providerOptions,
7865
+ mediaType: (_a17 = detectMediaType({
7866
+ data: audioData,
7867
+ signatures: audioMediaTypeSignatures
7868
+ })) != null ? _a17 : "audio/wav"
7869
+ });
7870
+ }
7871
+ );
7872
+ if (!result.text) {
7873
+ throw new NoTranscriptGeneratedError({ responses: [result.response] });
7874
+ }
7875
+ return new DefaultTranscriptionResult({
7876
+ text: result.text,
7877
+ segments: result.segments,
7878
+ language: result.language,
7879
+ durationInSeconds: result.durationInSeconds,
7880
+ warnings: result.warnings,
7881
+ responses: [result.response],
7882
+ providerMetadata: result.providerMetadata
7883
+ });
7884
+ }
7885
+ var DefaultTranscriptionResult = class {
7886
+ constructor(options) {
7887
+ var _a17;
7888
+ this.text = options.text;
7889
+ this.segments = options.segments;
7890
+ this.language = options.language;
7891
+ this.durationInSeconds = options.durationInSeconds;
7892
+ this.warnings = options.warnings;
7893
+ this.responses = options.responses;
7894
+ this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
7895
+ }
7896
+ };
7897
+
6293
7898
  // core/util/merge-objects.ts
6294
7899
  function mergeObjects(target, source) {
6295
7900
  if (target === void 0 && source === void 0) {
@@ -6532,7 +8137,7 @@ function simulateStreamingMiddleware() {
6532
8137
  return {
6533
8138
  stream: simulatedStream,
6534
8139
  rawCall: result.rawCall,
6535
- rawResponse: result.rawResponse,
8140
+ rawResponse: result.response,
6536
8141
  warnings: result.warnings
6537
8142
  };
6538
8143
  }
@@ -6604,14 +8209,13 @@ function appendClientMessage({
6604
8209
  }
6605
8210
 
6606
8211
  // core/prompt/append-response-messages.ts
6607
- var import_ui_utils9 = require("@ai-sdk/ui-utils");
6608
- var import_provider23 = require("@ai-sdk/provider");
8212
+ var import_provider24 = require("@ai-sdk/provider");
6609
8213
  function appendResponseMessages({
6610
8214
  messages,
6611
8215
  responseMessages,
6612
8216
  _internal: { currentDate = () => /* @__PURE__ */ new Date() } = {}
6613
8217
  }) {
6614
- var _a17, _b, _c, _d;
8218
+ var _a17, _b, _c, _d, _e;
6615
8219
  const clonedMessages = structuredClone(messages);
6616
8220
  for (const message of responseMessages) {
6617
8221
  const role = message.role;
@@ -6688,14 +8292,14 @@ function appendResponseMessages({
6688
8292
  break;
6689
8293
  case "file":
6690
8294
  if (part.data instanceof URL) {
6691
- throw new import_provider23.AISDKError({
8295
+ throw new import_provider24.AISDKError({
6692
8296
  name: "InvalidAssistantFileData",
6693
8297
  message: "File data cannot be a URL"
6694
8298
  });
6695
8299
  }
6696
8300
  parts.push({
6697
8301
  type: "file",
6698
- mimeType: part.mimeType,
8302
+ mediaType: (_a17 = part.mediaType) != null ? _a17 : part.mimeType,
6699
8303
  data: convertDataContentToBase64String(part.data)
6700
8304
  });
6701
8305
  break;
@@ -6703,15 +8307,15 @@ function appendResponseMessages({
6703
8307
  }
6704
8308
  }
6705
8309
  if (isLastMessageAssistant) {
6706
- const maxStep = (0, import_ui_utils9.extractMaxToolInvocationStep)(
8310
+ const maxStep = extractMaxToolInvocationStep(
6707
8311
  lastMessage.toolInvocations
6708
8312
  );
6709
- (_a17 = lastMessage.parts) != null ? _a17 : lastMessage.parts = [];
8313
+ (_b = lastMessage.parts) != null ? _b : lastMessage.parts = [];
6710
8314
  lastMessage.content = textContent;
6711
8315
  lastMessage.reasoning = reasoningTextContent;
6712
8316
  lastMessage.parts.push(...parts);
6713
8317
  lastMessage.toolInvocations = [
6714
- ...(_b = lastMessage.toolInvocations) != null ? _b : [],
8318
+ ...(_c = lastMessage.toolInvocations) != null ? _c : [],
6715
8319
  ...getToolInvocations2(maxStep === void 0 ? 0 : maxStep + 1)
6716
8320
  ];
6717
8321
  getToolInvocations2(maxStep === void 0 ? 0 : maxStep + 1).map((call) => ({
@@ -6741,13 +8345,13 @@ function appendResponseMessages({
6741
8345
  break;
6742
8346
  }
6743
8347
  case "tool": {
6744
- (_c = lastMessage.toolInvocations) != null ? _c : lastMessage.toolInvocations = [];
8348
+ (_d = lastMessage.toolInvocations) != null ? _d : lastMessage.toolInvocations = [];
6745
8349
  if (lastMessage.role !== "assistant") {
6746
8350
  throw new Error(
6747
8351
  `Tool result must follow an assistant message: ${lastMessage.role}`
6748
8352
  );
6749
8353
  }
6750
- (_d = lastMessage.parts) != null ? _d : lastMessage.parts = [];
8354
+ (_e = lastMessage.parts) != null ? _e : lastMessage.parts = [];
6751
8355
  for (const contentPart of message.content) {
6752
8356
  const toolCall = lastMessage.toolInvocations.find(
6753
8357
  (call) => call.toolCallId === contentPart.toolCallId
@@ -6782,7 +8386,7 @@ function appendResponseMessages({
6782
8386
  }
6783
8387
 
6784
8388
  // core/registry/custom-provider.ts
6785
- var import_provider24 = require("@ai-sdk/provider");
8389
+ var import_provider25 = require("@ai-sdk/provider");
6786
8390
  function customProvider({
6787
8391
  languageModels,
6788
8392
  textEmbeddingModels,
@@ -6797,7 +8401,7 @@ function customProvider({
6797
8401
  if (fallbackProvider) {
6798
8402
  return fallbackProvider.languageModel(modelId);
6799
8403
  }
6800
- throw new import_provider24.NoSuchModelError({ modelId, modelType: "languageModel" });
8404
+ throw new import_provider25.NoSuchModelError({ modelId, modelType: "languageModel" });
6801
8405
  },
6802
8406
  textEmbeddingModel(modelId) {
6803
8407
  if (textEmbeddingModels != null && modelId in textEmbeddingModels) {
@@ -6806,7 +8410,7 @@ function customProvider({
6806
8410
  if (fallbackProvider) {
6807
8411
  return fallbackProvider.textEmbeddingModel(modelId);
6808
8412
  }
6809
- throw new import_provider24.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
8413
+ throw new import_provider25.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
6810
8414
  },
6811
8415
  imageModel(modelId) {
6812
8416
  if (imageModels != null && modelId in imageModels) {
@@ -6815,19 +8419,19 @@ function customProvider({
6815
8419
  if (fallbackProvider == null ? void 0 : fallbackProvider.imageModel) {
6816
8420
  return fallbackProvider.imageModel(modelId);
6817
8421
  }
6818
- throw new import_provider24.NoSuchModelError({ modelId, modelType: "imageModel" });
8422
+ throw new import_provider25.NoSuchModelError({ modelId, modelType: "imageModel" });
6819
8423
  }
6820
8424
  };
6821
8425
  }
6822
8426
  var experimental_customProvider = customProvider;
6823
8427
 
6824
8428
  // core/registry/no-such-provider-error.ts
6825
- var import_provider25 = require("@ai-sdk/provider");
8429
+ var import_provider26 = require("@ai-sdk/provider");
6826
8430
  var name16 = "AI_NoSuchProviderError";
6827
8431
  var marker16 = `vercel.ai.error.${name16}`;
6828
8432
  var symbol16 = Symbol.for(marker16);
6829
8433
  var _a16;
6830
- var NoSuchProviderError = class extends import_provider25.NoSuchModelError {
8434
+ var NoSuchProviderError = class extends import_provider26.NoSuchModelError {
6831
8435
  constructor({
6832
8436
  modelId,
6833
8437
  modelType,
@@ -6841,13 +8445,13 @@ var NoSuchProviderError = class extends import_provider25.NoSuchModelError {
6841
8445
  this.availableProviders = availableProviders;
6842
8446
  }
6843
8447
  static isInstance(error) {
6844
- return import_provider25.AISDKError.hasMarker(error, marker16);
8448
+ return import_provider26.AISDKError.hasMarker(error, marker16);
6845
8449
  }
6846
8450
  };
6847
8451
  _a16 = symbol16;
6848
8452
 
6849
8453
  // core/registry/provider-registry.ts
6850
- var import_provider26 = require("@ai-sdk/provider");
8454
+ var import_provider27 = require("@ai-sdk/provider");
6851
8455
  function createProviderRegistry(providers, {
6852
8456
  separator = ":"
6853
8457
  } = {}) {
@@ -6886,7 +8490,7 @@ var DefaultProviderRegistry = class {
6886
8490
  splitId(id, modelType) {
6887
8491
  const index = id.indexOf(this.separator);
6888
8492
  if (index === -1) {
6889
- throw new import_provider26.NoSuchModelError({
8493
+ throw new import_provider27.NoSuchModelError({
6890
8494
  modelId: id,
6891
8495
  modelType,
6892
8496
  message: `Invalid ${modelType} id for registry: ${id} (must be in the format "providerId${this.separator}modelId")`
@@ -6899,7 +8503,7 @@ var DefaultProviderRegistry = class {
6899
8503
  const [providerId, modelId] = this.splitId(id, "languageModel");
6900
8504
  const model = (_b = (_a17 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a17, modelId);
6901
8505
  if (model == null) {
6902
- throw new import_provider26.NoSuchModelError({ modelId: id, modelType: "languageModel" });
8506
+ throw new import_provider27.NoSuchModelError({ modelId: id, modelType: "languageModel" });
6903
8507
  }
6904
8508
  return model;
6905
8509
  }
@@ -6909,7 +8513,7 @@ var DefaultProviderRegistry = class {
6909
8513
  const provider = this.getProvider(providerId);
6910
8514
  const model = (_a17 = provider.textEmbeddingModel) == null ? void 0 : _a17.call(provider, modelId);
6911
8515
  if (model == null) {
6912
- throw new import_provider26.NoSuchModelError({
8516
+ throw new import_provider27.NoSuchModelError({
6913
8517
  modelId: id,
6914
8518
  modelType: "textEmbeddingModel"
6915
8519
  });
@@ -6922,22 +8526,19 @@ var DefaultProviderRegistry = class {
6922
8526
  const provider = this.getProvider(providerId);
6923
8527
  const model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
6924
8528
  if (model == null) {
6925
- throw new import_provider26.NoSuchModelError({ modelId: id, modelType: "imageModel" });
8529
+ throw new import_provider27.NoSuchModelError({ modelId: id, modelType: "imageModel" });
6926
8530
  }
6927
8531
  return model;
6928
8532
  }
6929
8533
  };
6930
8534
 
6931
- // core/tool/mcp/mcp-client.ts
6932
- var import_ui_utils10 = require("@ai-sdk/ui-utils");
6933
-
6934
8535
  // core/tool/tool.ts
6935
8536
  function tool(tool2) {
6936
8537
  return tool2;
6937
8538
  }
6938
8539
 
6939
8540
  // core/tool/mcp/mcp-sse-transport.ts
6940
- var import_provider_utils13 = require("@ai-sdk/provider-utils");
8541
+ var import_provider_utils19 = require("@ai-sdk/provider-utils");
6941
8542
 
6942
8543
  // core/tool/mcp/json-rpc-message.ts
6943
8544
  var import_zod9 = require("zod");
@@ -7108,7 +8709,7 @@ var SseMCPTransport = class {
7108
8709
  (_b = this.onerror) == null ? void 0 : _b.call(this, error);
7109
8710
  return reject(error);
7110
8711
  }
7111
- const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough((0, import_provider_utils13.createEventSourceParserStream)());
8712
+ const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough((0, import_provider_utils19.createEventSourceParserStream)());
7112
8713
  const reader = stream.getReader();
7113
8714
  const processEvents = async () => {
7114
8715
  var _a18, _b2, _c2;
@@ -7422,7 +9023,7 @@ var MCPClient = class {
7422
9023
  if (schemas !== "automatic" && !(name17 in schemas)) {
7423
9024
  continue;
7424
9025
  }
7425
- const parameters = schemas === "automatic" ? (0, import_ui_utils10.jsonSchema)(inputSchema) : schemas[name17].parameters;
9026
+ const parameters = schemas === "automatic" ? jsonSchema(inputSchema) : schemas[name17].parameters;
7426
9027
  const self = this;
7427
9028
  const toolWithExecute = tool({
7428
9029
  description,
@@ -7515,7 +9116,7 @@ function cosineSimilarity(vector1, vector2, options) {
7515
9116
  }
7516
9117
 
7517
9118
  // core/util/simulate-readable-stream.ts
7518
- var import_provider_utils14 = require("@ai-sdk/provider-utils");
9119
+ var import_provider_utils20 = require("@ai-sdk/provider-utils");
7519
9120
  function simulateReadableStream({
7520
9121
  chunks,
7521
9122
  initialDelayInMs = 0,
@@ -7523,7 +9124,7 @@ function simulateReadableStream({
7523
9124
  _internal
7524
9125
  }) {
7525
9126
  var _a17;
7526
- const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : import_provider_utils14.delay;
9127
+ const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : import_provider_utils20.delay;
7527
9128
  let index = 0;
7528
9129
  return new ReadableStream({
7529
9130
  async pull(controller) {
@@ -7544,7 +9145,6 @@ __export(langchain_adapter_exports, {
7544
9145
  toDataStream: () => toDataStream,
7545
9146
  toDataStreamResponse: () => toDataStreamResponse
7546
9147
  });
7547
- var import_ui_utils12 = require("@ai-sdk/ui-utils");
7548
9148
 
7549
9149
  // streams/stream-callbacks.ts
7550
9150
  function createCallbacksTransformer(callbacks = {}) {
@@ -7600,7 +9200,7 @@ function toDataStreamInternal(stream, callbacks) {
7600
9200
  ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
7601
9201
  new TransformStream({
7602
9202
  transform: async (chunk, controller) => {
7603
- controller.enqueue((0, import_ui_utils12.formatDataStreamPart)("text", chunk));
9203
+ controller.enqueue(formatDataStreamPart("text", chunk));
7604
9204
  }
7605
9205
  })
7606
9206
  );
@@ -7651,11 +9251,10 @@ __export(llamaindex_adapter_exports, {
7651
9251
  toDataStream: () => toDataStream2,
7652
9252
  toDataStreamResponse: () => toDataStreamResponse2
7653
9253
  });
7654
- var import_provider_utils16 = require("@ai-sdk/provider-utils");
7655
- var import_ui_utils13 = require("@ai-sdk/ui-utils");
9254
+ var import_provider_utils22 = require("@ai-sdk/provider-utils");
7656
9255
  function toDataStreamInternal2(stream, callbacks) {
7657
9256
  const trimStart = trimStartOfStream();
7658
- return (0, import_provider_utils16.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
9257
+ return (0, import_provider_utils22.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
7659
9258
  new TransformStream({
7660
9259
  async transform(message, controller) {
7661
9260
  controller.enqueue(trimStart(message.delta));
@@ -7664,7 +9263,7 @@ function toDataStreamInternal2(stream, callbacks) {
7664
9263
  ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
7665
9264
  new TransformStream({
7666
9265
  transform: async (chunk, controller) => {
7667
- controller.enqueue((0, import_ui_utils13.formatDataStreamPart)("text", chunk));
9266
+ controller.enqueue(formatDataStreamPart("text", chunk));
7668
9267
  }
7669
9268
  })
7670
9269
  );
@@ -7705,9 +9304,6 @@ function trimStartOfStream() {
7705
9304
  };
7706
9305
  }
7707
9306
 
7708
- // streams/stream-data.ts
7709
- var import_ui_utils14 = require("@ai-sdk/ui-utils");
7710
-
7711
9307
  // util/constants.ts
7712
9308
  var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
7713
9309
 
@@ -7758,7 +9354,7 @@ var StreamData = class {
7758
9354
  throw new Error("Stream controller is not initialized.");
7759
9355
  }
7760
9356
  this.controller.enqueue(
7761
- this.encoder.encode((0, import_ui_utils14.formatDataStreamPart)("data", [value]))
9357
+ this.encoder.encode(formatDataStreamPart("data", [value]))
7762
9358
  );
7763
9359
  }
7764
9360
  appendMessageAnnotation(value) {
@@ -7769,7 +9365,7 @@ var StreamData = class {
7769
9365
  throw new Error("Stream controller is not initialized.");
7770
9366
  }
7771
9367
  this.controller.enqueue(
7772
- this.encoder.encode((0, import_ui_utils14.formatDataStreamPart)("message_annotations", [value]))
9368
+ this.encoder.encode(formatDataStreamPart("message_annotations", [value]))
7773
9369
  );
7774
9370
  }
7775
9371
  };
@@ -7808,6 +9404,9 @@ var StreamData = class {
7808
9404
  UnsupportedFunctionalityError,
7809
9405
  appendClientMessage,
7810
9406
  appendResponseMessages,
9407
+ asSchema,
9408
+ callChatApi,
9409
+ callCompletionApi,
7811
9410
  convertToCoreMessages,
7812
9411
  coreAssistantMessageSchema,
7813
9412
  coreMessageSchema,
@@ -7827,23 +9426,34 @@ var StreamData = class {
7827
9426
  experimental_createProviderRegistry,
7828
9427
  experimental_customProvider,
7829
9428
  experimental_generateImage,
9429
+ experimental_transcribe,
7830
9430
  experimental_wrapLanguageModel,
9431
+ extractMaxToolInvocationStep,
7831
9432
  extractReasoningMiddleware,
9433
+ fillMessageParts,
7832
9434
  formatDataStreamPart,
7833
9435
  generateId,
7834
9436
  generateObject,
7835
9437
  generateText,
9438
+ getMessageParts,
9439
+ getTextFromDataUrl,
9440
+ isAssistantMessageWithCompletedToolCalls,
9441
+ isDeepEqualData,
7836
9442
  jsonSchema,
7837
9443
  parseDataStreamPart,
9444
+ parsePartialJson,
7838
9445
  pipeDataStreamToResponse,
9446
+ prepareAttachmentsForRequest,
7839
9447
  processDataStream,
7840
9448
  processTextStream,
9449
+ shouldResubmitMessages,
7841
9450
  simulateReadableStream,
7842
9451
  simulateStreamingMiddleware,
7843
9452
  smoothStream,
7844
9453
  streamObject,
7845
9454
  streamText,
7846
9455
  tool,
9456
+ updateToolCallResult,
7847
9457
  wrapLanguageModel,
7848
9458
  zodSchema
7849
9459
  });