ai 5.0.0-canary.3 → 5.0.0-canary.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -6,17 +6,1480 @@ var __export = (target, all) => {
6
6
 
7
7
  // core/index.ts
8
8
  import { createIdGenerator as createIdGenerator5, generateId as generateId2 } from "@ai-sdk/provider-utils";
9
- import {
10
- formatDataStreamPart as formatDataStreamPart3,
11
- jsonSchema as jsonSchema2,
12
- parseDataStreamPart,
13
- processDataStream,
14
- processTextStream,
15
- zodSchema
16
- } from "@ai-sdk/ui-utils";
9
+
10
+ // core/util/index.ts
11
+ import { generateId } from "@ai-sdk/provider-utils";
12
+
13
+ // core/util/process-chat-response.ts
14
+ import { generateId as generateIdFunction } from "@ai-sdk/provider-utils";
15
+
16
+ // core/types/duplicated/usage.ts
17
+ function calculateLanguageModelUsage({
18
+ promptTokens,
19
+ completionTokens
20
+ }) {
21
+ return {
22
+ promptTokens,
23
+ completionTokens,
24
+ totalTokens: promptTokens + completionTokens
25
+ };
26
+ }
27
+
28
+ // core/util/parse-partial-json.ts
29
+ import { safeParseJSON } from "@ai-sdk/provider-utils";
30
+
31
+ // core/util/fix-json.ts
32
+ function fixJson(input) {
33
+ const stack = ["ROOT"];
34
+ let lastValidIndex = -1;
35
+ let literalStart = null;
36
+ function processValueStart(char, i, swapState) {
37
+ {
38
+ switch (char) {
39
+ case '"': {
40
+ lastValidIndex = i;
41
+ stack.pop();
42
+ stack.push(swapState);
43
+ stack.push("INSIDE_STRING");
44
+ break;
45
+ }
46
+ case "f":
47
+ case "t":
48
+ case "n": {
49
+ lastValidIndex = i;
50
+ literalStart = i;
51
+ stack.pop();
52
+ stack.push(swapState);
53
+ stack.push("INSIDE_LITERAL");
54
+ break;
55
+ }
56
+ case "-": {
57
+ stack.pop();
58
+ stack.push(swapState);
59
+ stack.push("INSIDE_NUMBER");
60
+ break;
61
+ }
62
+ case "0":
63
+ case "1":
64
+ case "2":
65
+ case "3":
66
+ case "4":
67
+ case "5":
68
+ case "6":
69
+ case "7":
70
+ case "8":
71
+ case "9": {
72
+ lastValidIndex = i;
73
+ stack.pop();
74
+ stack.push(swapState);
75
+ stack.push("INSIDE_NUMBER");
76
+ break;
77
+ }
78
+ case "{": {
79
+ lastValidIndex = i;
80
+ stack.pop();
81
+ stack.push(swapState);
82
+ stack.push("INSIDE_OBJECT_START");
83
+ break;
84
+ }
85
+ case "[": {
86
+ lastValidIndex = i;
87
+ stack.pop();
88
+ stack.push(swapState);
89
+ stack.push("INSIDE_ARRAY_START");
90
+ break;
91
+ }
92
+ }
93
+ }
94
+ }
95
+ function processAfterObjectValue(char, i) {
96
+ switch (char) {
97
+ case ",": {
98
+ stack.pop();
99
+ stack.push("INSIDE_OBJECT_AFTER_COMMA");
100
+ break;
101
+ }
102
+ case "}": {
103
+ lastValidIndex = i;
104
+ stack.pop();
105
+ break;
106
+ }
107
+ }
108
+ }
109
+ function processAfterArrayValue(char, i) {
110
+ switch (char) {
111
+ case ",": {
112
+ stack.pop();
113
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
114
+ break;
115
+ }
116
+ case "]": {
117
+ lastValidIndex = i;
118
+ stack.pop();
119
+ break;
120
+ }
121
+ }
122
+ }
123
+ for (let i = 0; i < input.length; i++) {
124
+ const char = input[i];
125
+ const currentState = stack[stack.length - 1];
126
+ switch (currentState) {
127
+ case "ROOT":
128
+ processValueStart(char, i, "FINISH");
129
+ break;
130
+ case "INSIDE_OBJECT_START": {
131
+ switch (char) {
132
+ case '"': {
133
+ stack.pop();
134
+ stack.push("INSIDE_OBJECT_KEY");
135
+ break;
136
+ }
137
+ case "}": {
138
+ lastValidIndex = i;
139
+ stack.pop();
140
+ break;
141
+ }
142
+ }
143
+ break;
144
+ }
145
+ case "INSIDE_OBJECT_AFTER_COMMA": {
146
+ switch (char) {
147
+ case '"': {
148
+ stack.pop();
149
+ stack.push("INSIDE_OBJECT_KEY");
150
+ break;
151
+ }
152
+ }
153
+ break;
154
+ }
155
+ case "INSIDE_OBJECT_KEY": {
156
+ switch (char) {
157
+ case '"': {
158
+ stack.pop();
159
+ stack.push("INSIDE_OBJECT_AFTER_KEY");
160
+ break;
161
+ }
162
+ }
163
+ break;
164
+ }
165
+ case "INSIDE_OBJECT_AFTER_KEY": {
166
+ switch (char) {
167
+ case ":": {
168
+ stack.pop();
169
+ stack.push("INSIDE_OBJECT_BEFORE_VALUE");
170
+ break;
171
+ }
172
+ }
173
+ break;
174
+ }
175
+ case "INSIDE_OBJECT_BEFORE_VALUE": {
176
+ processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
177
+ break;
178
+ }
179
+ case "INSIDE_OBJECT_AFTER_VALUE": {
180
+ processAfterObjectValue(char, i);
181
+ break;
182
+ }
183
+ case "INSIDE_STRING": {
184
+ switch (char) {
185
+ case '"': {
186
+ stack.pop();
187
+ lastValidIndex = i;
188
+ break;
189
+ }
190
+ case "\\": {
191
+ stack.push("INSIDE_STRING_ESCAPE");
192
+ break;
193
+ }
194
+ default: {
195
+ lastValidIndex = i;
196
+ }
197
+ }
198
+ break;
199
+ }
200
+ case "INSIDE_ARRAY_START": {
201
+ switch (char) {
202
+ case "]": {
203
+ lastValidIndex = i;
204
+ stack.pop();
205
+ break;
206
+ }
207
+ default: {
208
+ lastValidIndex = i;
209
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
210
+ break;
211
+ }
212
+ }
213
+ break;
214
+ }
215
+ case "INSIDE_ARRAY_AFTER_VALUE": {
216
+ switch (char) {
217
+ case ",": {
218
+ stack.pop();
219
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
220
+ break;
221
+ }
222
+ case "]": {
223
+ lastValidIndex = i;
224
+ stack.pop();
225
+ break;
226
+ }
227
+ default: {
228
+ lastValidIndex = i;
229
+ break;
230
+ }
231
+ }
232
+ break;
233
+ }
234
+ case "INSIDE_ARRAY_AFTER_COMMA": {
235
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
236
+ break;
237
+ }
238
+ case "INSIDE_STRING_ESCAPE": {
239
+ stack.pop();
240
+ lastValidIndex = i;
241
+ break;
242
+ }
243
+ case "INSIDE_NUMBER": {
244
+ switch (char) {
245
+ case "0":
246
+ case "1":
247
+ case "2":
248
+ case "3":
249
+ case "4":
250
+ case "5":
251
+ case "6":
252
+ case "7":
253
+ case "8":
254
+ case "9": {
255
+ lastValidIndex = i;
256
+ break;
257
+ }
258
+ case "e":
259
+ case "E":
260
+ case "-":
261
+ case ".": {
262
+ break;
263
+ }
264
+ case ",": {
265
+ stack.pop();
266
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
267
+ processAfterArrayValue(char, i);
268
+ }
269
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
270
+ processAfterObjectValue(char, i);
271
+ }
272
+ break;
273
+ }
274
+ case "}": {
275
+ stack.pop();
276
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
277
+ processAfterObjectValue(char, i);
278
+ }
279
+ break;
280
+ }
281
+ case "]": {
282
+ stack.pop();
283
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
284
+ processAfterArrayValue(char, i);
285
+ }
286
+ break;
287
+ }
288
+ default: {
289
+ stack.pop();
290
+ break;
291
+ }
292
+ }
293
+ break;
294
+ }
295
+ case "INSIDE_LITERAL": {
296
+ const partialLiteral = input.substring(literalStart, i + 1);
297
+ if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
298
+ stack.pop();
299
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
300
+ processAfterObjectValue(char, i);
301
+ } else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
302
+ processAfterArrayValue(char, i);
303
+ }
304
+ } else {
305
+ lastValidIndex = i;
306
+ }
307
+ break;
308
+ }
309
+ }
310
+ }
311
+ let result = input.slice(0, lastValidIndex + 1);
312
+ for (let i = stack.length - 1; i >= 0; i--) {
313
+ const state = stack[i];
314
+ switch (state) {
315
+ case "INSIDE_STRING": {
316
+ result += '"';
317
+ break;
318
+ }
319
+ case "INSIDE_OBJECT_KEY":
320
+ case "INSIDE_OBJECT_AFTER_KEY":
321
+ case "INSIDE_OBJECT_AFTER_COMMA":
322
+ case "INSIDE_OBJECT_START":
323
+ case "INSIDE_OBJECT_BEFORE_VALUE":
324
+ case "INSIDE_OBJECT_AFTER_VALUE": {
325
+ result += "}";
326
+ break;
327
+ }
328
+ case "INSIDE_ARRAY_START":
329
+ case "INSIDE_ARRAY_AFTER_COMMA":
330
+ case "INSIDE_ARRAY_AFTER_VALUE": {
331
+ result += "]";
332
+ break;
333
+ }
334
+ case "INSIDE_LITERAL": {
335
+ const partialLiteral = input.substring(literalStart, input.length);
336
+ if ("true".startsWith(partialLiteral)) {
337
+ result += "true".slice(partialLiteral.length);
338
+ } else if ("false".startsWith(partialLiteral)) {
339
+ result += "false".slice(partialLiteral.length);
340
+ } else if ("null".startsWith(partialLiteral)) {
341
+ result += "null".slice(partialLiteral.length);
342
+ }
343
+ }
344
+ }
345
+ }
346
+ return result;
347
+ }
348
+
349
+ // core/util/parse-partial-json.ts
350
+ function parsePartialJson(jsonText) {
351
+ if (jsonText === void 0) {
352
+ return { value: void 0, state: "undefined-input" };
353
+ }
354
+ let result = safeParseJSON({ text: jsonText });
355
+ if (result.success) {
356
+ return { value: result.value, state: "successful-parse" };
357
+ }
358
+ result = safeParseJSON({ text: fixJson(jsonText) });
359
+ if (result.success) {
360
+ return { value: result.value, state: "repaired-parse" };
361
+ }
362
+ return { value: void 0, state: "failed-parse" };
363
+ }
364
+
365
+ // core/util/data-stream-parts.ts
366
+ var textStreamPart = {
367
+ code: "0",
368
+ name: "text",
369
+ parse: (value) => {
370
+ if (typeof value !== "string") {
371
+ throw new Error('"text" parts expect a string value.');
372
+ }
373
+ return { type: "text", value };
374
+ }
375
+ };
376
+ var dataStreamPart = {
377
+ code: "2",
378
+ name: "data",
379
+ parse: (value) => {
380
+ if (!Array.isArray(value)) {
381
+ throw new Error('"data" parts expect an array value.');
382
+ }
383
+ return { type: "data", value };
384
+ }
385
+ };
386
+ var errorStreamPart = {
387
+ code: "3",
388
+ name: "error",
389
+ parse: (value) => {
390
+ if (typeof value !== "string") {
391
+ throw new Error('"error" parts expect a string value.');
392
+ }
393
+ return { type: "error", value };
394
+ }
395
+ };
396
+ var messageAnnotationsStreamPart = {
397
+ code: "8",
398
+ name: "message_annotations",
399
+ parse: (value) => {
400
+ if (!Array.isArray(value)) {
401
+ throw new Error('"message_annotations" parts expect an array value.');
402
+ }
403
+ return { type: "message_annotations", value };
404
+ }
405
+ };
406
+ var toolCallStreamPart = {
407
+ code: "9",
408
+ name: "tool_call",
409
+ parse: (value) => {
410
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string" || !("args" in value) || typeof value.args !== "object") {
411
+ throw new Error(
412
+ '"tool_call" parts expect an object with a "toolCallId", "toolName", and "args" property.'
413
+ );
414
+ }
415
+ return {
416
+ type: "tool_call",
417
+ value
418
+ };
419
+ }
420
+ };
421
+ var toolResultStreamPart = {
422
+ code: "a",
423
+ name: "tool_result",
424
+ parse: (value) => {
425
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("result" in value)) {
426
+ throw new Error(
427
+ '"tool_result" parts expect an object with a "toolCallId" and a "result" property.'
428
+ );
429
+ }
430
+ return {
431
+ type: "tool_result",
432
+ value
433
+ };
434
+ }
435
+ };
436
+ var toolCallStreamingStartStreamPart = {
437
+ code: "b",
438
+ name: "tool_call_streaming_start",
439
+ parse: (value) => {
440
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string") {
441
+ throw new Error(
442
+ '"tool_call_streaming_start" parts expect an object with a "toolCallId" and "toolName" property.'
443
+ );
444
+ }
445
+ return {
446
+ type: "tool_call_streaming_start",
447
+ value
448
+ };
449
+ }
450
+ };
451
+ var toolCallDeltaStreamPart = {
452
+ code: "c",
453
+ name: "tool_call_delta",
454
+ parse: (value) => {
455
+ if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("argsTextDelta" in value) || typeof value.argsTextDelta !== "string") {
456
+ throw new Error(
457
+ '"tool_call_delta" parts expect an object with a "toolCallId" and "argsTextDelta" property.'
458
+ );
459
+ }
460
+ return {
461
+ type: "tool_call_delta",
462
+ value
463
+ };
464
+ }
465
+ };
466
+ var finishMessageStreamPart = {
467
+ code: "d",
468
+ name: "finish_message",
469
+ parse: (value) => {
470
+ if (value == null || typeof value !== "object" || !("finishReason" in value) || typeof value.finishReason !== "string") {
471
+ throw new Error(
472
+ '"finish_message" parts expect an object with a "finishReason" property.'
473
+ );
474
+ }
475
+ const result = {
476
+ finishReason: value.finishReason
477
+ };
478
+ if ("usage" in value && value.usage != null && typeof value.usage === "object" && "promptTokens" in value.usage && "completionTokens" in value.usage) {
479
+ result.usage = {
480
+ promptTokens: typeof value.usage.promptTokens === "number" ? value.usage.promptTokens : Number.NaN,
481
+ completionTokens: typeof value.usage.completionTokens === "number" ? value.usage.completionTokens : Number.NaN
482
+ };
483
+ }
484
+ return {
485
+ type: "finish_message",
486
+ value: result
487
+ };
488
+ }
489
+ };
490
+ var finishStepStreamPart = {
491
+ code: "e",
492
+ name: "finish_step",
493
+ parse: (value) => {
494
+ if (value == null || typeof value !== "object" || !("finishReason" in value) || typeof value.finishReason !== "string") {
495
+ throw new Error(
496
+ '"finish_step" parts expect an object with a "finishReason" property.'
497
+ );
498
+ }
499
+ const result = {
500
+ finishReason: value.finishReason,
501
+ isContinued: false
502
+ };
503
+ if ("usage" in value && value.usage != null && typeof value.usage === "object" && "promptTokens" in value.usage && "completionTokens" in value.usage) {
504
+ result.usage = {
505
+ promptTokens: typeof value.usage.promptTokens === "number" ? value.usage.promptTokens : Number.NaN,
506
+ completionTokens: typeof value.usage.completionTokens === "number" ? value.usage.completionTokens : Number.NaN
507
+ };
508
+ }
509
+ if ("isContinued" in value && typeof value.isContinued === "boolean") {
510
+ result.isContinued = value.isContinued;
511
+ }
512
+ return {
513
+ type: "finish_step",
514
+ value: result
515
+ };
516
+ }
517
+ };
518
+ var startStepStreamPart = {
519
+ code: "f",
520
+ name: "start_step",
521
+ parse: (value) => {
522
+ if (value == null || typeof value !== "object" || !("messageId" in value) || typeof value.messageId !== "string") {
523
+ throw new Error(
524
+ '"start_step" parts expect an object with an "id" property.'
525
+ );
526
+ }
527
+ return {
528
+ type: "start_step",
529
+ value: {
530
+ messageId: value.messageId
531
+ }
532
+ };
533
+ }
534
+ };
535
+ var reasoningStreamPart = {
536
+ code: "g",
537
+ name: "reasoning",
538
+ parse: (value) => {
539
+ if (typeof value !== "string") {
540
+ throw new Error('"reasoning" parts expect a string value.');
541
+ }
542
+ return { type: "reasoning", value };
543
+ }
544
+ };
545
+ var sourcePart = {
546
+ code: "h",
547
+ name: "source",
548
+ parse: (value) => {
549
+ if (value == null || typeof value !== "object") {
550
+ throw new Error('"source" parts expect a Source object.');
551
+ }
552
+ return {
553
+ type: "source",
554
+ value
555
+ };
556
+ }
557
+ };
558
+ var redactedReasoningStreamPart = {
559
+ code: "i",
560
+ name: "redacted_reasoning",
561
+ parse: (value) => {
562
+ if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string") {
563
+ throw new Error(
564
+ '"redacted_reasoning" parts expect an object with a "data" property.'
565
+ );
566
+ }
567
+ return { type: "redacted_reasoning", value: { data: value.data } };
568
+ }
569
+ };
570
+ var reasoningSignatureStreamPart = {
571
+ code: "j",
572
+ name: "reasoning_signature",
573
+ parse: (value) => {
574
+ if (value == null || typeof value !== "object" || !("signature" in value) || typeof value.signature !== "string") {
575
+ throw new Error(
576
+ '"reasoning_signature" parts expect an object with a "signature" property.'
577
+ );
578
+ }
579
+ return {
580
+ type: "reasoning_signature",
581
+ value: { signature: value.signature }
582
+ };
583
+ }
584
+ };
585
+ var fileStreamPart = {
586
+ code: "k",
587
+ name: "file",
588
+ parse: (value) => {
589
+ if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string" || !("mimeType" in value) || typeof value.mimeType !== "string") {
590
+ throw new Error(
591
+ '"file" parts expect an object with a "data" and "mimeType" property.'
592
+ );
593
+ }
594
+ return { type: "file", value };
595
+ }
596
+ };
597
+ var dataStreamParts = [
598
+ textStreamPart,
599
+ dataStreamPart,
600
+ errorStreamPart,
601
+ messageAnnotationsStreamPart,
602
+ toolCallStreamPart,
603
+ toolResultStreamPart,
604
+ toolCallStreamingStartStreamPart,
605
+ toolCallDeltaStreamPart,
606
+ finishMessageStreamPart,
607
+ finishStepStreamPart,
608
+ startStepStreamPart,
609
+ reasoningStreamPart,
610
+ sourcePart,
611
+ redactedReasoningStreamPart,
612
+ reasoningSignatureStreamPart,
613
+ fileStreamPart
614
+ ];
615
+ var dataStreamPartsByCode = Object.fromEntries(
616
+ dataStreamParts.map((part) => [part.code, part])
617
+ );
618
+ var DataStreamStringPrefixes = Object.fromEntries(
619
+ dataStreamParts.map((part) => [part.name, part.code])
620
+ );
621
+ var validCodes = dataStreamParts.map((part) => part.code);
622
+ var parseDataStreamPart = (line) => {
623
+ const firstSeparatorIndex = line.indexOf(":");
624
+ if (firstSeparatorIndex === -1) {
625
+ throw new Error("Failed to parse stream string. No separator found.");
626
+ }
627
+ const prefix = line.slice(0, firstSeparatorIndex);
628
+ if (!validCodes.includes(prefix)) {
629
+ throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);
630
+ }
631
+ const code = prefix;
632
+ const textValue = line.slice(firstSeparatorIndex + 1);
633
+ const jsonValue = JSON.parse(textValue);
634
+ return dataStreamPartsByCode[code].parse(jsonValue);
635
+ };
636
+ function formatDataStreamPart(type, value) {
637
+ const streamPart = dataStreamParts.find((part) => part.name === type);
638
+ if (!streamPart) {
639
+ throw new Error(`Invalid stream part type: ${type}`);
640
+ }
641
+ return `${streamPart.code}:${JSON.stringify(value)}
642
+ `;
643
+ }
644
+
645
+ // core/util/process-data-stream.ts
646
+ var NEWLINE = "\n".charCodeAt(0);
647
+ function concatChunks(chunks, totalLength) {
648
+ const concatenatedChunks = new Uint8Array(totalLength);
649
+ let offset = 0;
650
+ for (const chunk of chunks) {
651
+ concatenatedChunks.set(chunk, offset);
652
+ offset += chunk.length;
653
+ }
654
+ chunks.length = 0;
655
+ return concatenatedChunks;
656
+ }
657
+ async function processDataStream({
658
+ stream,
659
+ onTextPart,
660
+ onReasoningPart,
661
+ onReasoningSignaturePart,
662
+ onRedactedReasoningPart,
663
+ onSourcePart,
664
+ onFilePart,
665
+ onDataPart,
666
+ onErrorPart,
667
+ onToolCallStreamingStartPart,
668
+ onToolCallDeltaPart,
669
+ onToolCallPart,
670
+ onToolResultPart,
671
+ onMessageAnnotationsPart,
672
+ onFinishMessagePart,
673
+ onFinishStepPart,
674
+ onStartStepPart
675
+ }) {
676
+ const reader = stream.getReader();
677
+ const decoder = new TextDecoder();
678
+ const chunks = [];
679
+ let totalLength = 0;
680
+ while (true) {
681
+ const { value } = await reader.read();
682
+ if (value) {
683
+ chunks.push(value);
684
+ totalLength += value.length;
685
+ if (value[value.length - 1] !== NEWLINE) {
686
+ continue;
687
+ }
688
+ }
689
+ if (chunks.length === 0) {
690
+ break;
691
+ }
692
+ const concatenatedChunks = concatChunks(chunks, totalLength);
693
+ totalLength = 0;
694
+ const streamParts = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseDataStreamPart);
695
+ for (const { type, value: value2 } of streamParts) {
696
+ switch (type) {
697
+ case "text":
698
+ await (onTextPart == null ? void 0 : onTextPart(value2));
699
+ break;
700
+ case "reasoning":
701
+ await (onReasoningPart == null ? void 0 : onReasoningPart(value2));
702
+ break;
703
+ case "reasoning_signature":
704
+ await (onReasoningSignaturePart == null ? void 0 : onReasoningSignaturePart(value2));
705
+ break;
706
+ case "redacted_reasoning":
707
+ await (onRedactedReasoningPart == null ? void 0 : onRedactedReasoningPart(value2));
708
+ break;
709
+ case "file":
710
+ await (onFilePart == null ? void 0 : onFilePart(value2));
711
+ break;
712
+ case "source":
713
+ await (onSourcePart == null ? void 0 : onSourcePart(value2));
714
+ break;
715
+ case "data":
716
+ await (onDataPart == null ? void 0 : onDataPart(value2));
717
+ break;
718
+ case "error":
719
+ await (onErrorPart == null ? void 0 : onErrorPart(value2));
720
+ break;
721
+ case "message_annotations":
722
+ await (onMessageAnnotationsPart == null ? void 0 : onMessageAnnotationsPart(value2));
723
+ break;
724
+ case "tool_call_streaming_start":
725
+ await (onToolCallStreamingStartPart == null ? void 0 : onToolCallStreamingStartPart(value2));
726
+ break;
727
+ case "tool_call_delta":
728
+ await (onToolCallDeltaPart == null ? void 0 : onToolCallDeltaPart(value2));
729
+ break;
730
+ case "tool_call":
731
+ await (onToolCallPart == null ? void 0 : onToolCallPart(value2));
732
+ break;
733
+ case "tool_result":
734
+ await (onToolResultPart == null ? void 0 : onToolResultPart(value2));
735
+ break;
736
+ case "finish_message":
737
+ await (onFinishMessagePart == null ? void 0 : onFinishMessagePart(value2));
738
+ break;
739
+ case "finish_step":
740
+ await (onFinishStepPart == null ? void 0 : onFinishStepPart(value2));
741
+ break;
742
+ case "start_step":
743
+ await (onStartStepPart == null ? void 0 : onStartStepPart(value2));
744
+ break;
745
+ default: {
746
+ const exhaustiveCheck = type;
747
+ throw new Error(`Unknown stream part type: ${exhaustiveCheck}`);
748
+ }
749
+ }
750
+ }
751
+ }
752
+ }
753
+
754
+ // core/util/process-chat-response.ts
755
+ async function processChatResponse({
756
+ stream,
757
+ update,
758
+ onToolCall,
759
+ onFinish,
760
+ generateId: generateId3 = generateIdFunction,
761
+ getCurrentDate = () => /* @__PURE__ */ new Date(),
762
+ lastMessage
763
+ }) {
764
+ var _a17, _b;
765
+ const replaceLastMessage = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
766
+ let step = replaceLastMessage ? 1 + // find max step in existing tool invocations:
767
+ ((_b = (_a17 = lastMessage.toolInvocations) == null ? void 0 : _a17.reduce((max, toolInvocation) => {
768
+ var _a18;
769
+ return Math.max(max, (_a18 = toolInvocation.step) != null ? _a18 : 0);
770
+ }, 0)) != null ? _b : 0) : 0;
771
+ const message = replaceLastMessage ? structuredClone(lastMessage) : {
772
+ id: generateId3(),
773
+ createdAt: getCurrentDate(),
774
+ role: "assistant",
775
+ content: "",
776
+ parts: []
777
+ };
778
+ let currentTextPart = void 0;
779
+ let currentReasoningPart = void 0;
780
+ let currentReasoningTextDetail = void 0;
781
+ function updateToolInvocationPart(toolCallId, invocation) {
782
+ const part = message.parts.find(
783
+ (part2) => part2.type === "tool-invocation" && part2.toolInvocation.toolCallId === toolCallId
784
+ );
785
+ if (part != null) {
786
+ part.toolInvocation = invocation;
787
+ } else {
788
+ message.parts.push({
789
+ type: "tool-invocation",
790
+ toolInvocation: invocation
791
+ });
792
+ }
793
+ }
794
+ const data = [];
795
+ let messageAnnotations = replaceLastMessage ? lastMessage == null ? void 0 : lastMessage.annotations : void 0;
796
+ const partialToolCalls = {};
797
+ let usage = {
798
+ completionTokens: NaN,
799
+ promptTokens: NaN,
800
+ totalTokens: NaN
801
+ };
802
+ let finishReason = "unknown";
803
+ function execUpdate() {
804
+ const copiedData = [...data];
805
+ if (messageAnnotations == null ? void 0 : messageAnnotations.length) {
806
+ message.annotations = messageAnnotations;
807
+ }
808
+ const copiedMessage = {
809
+ // deep copy the message to ensure that deep changes (msg attachments) are updated
810
+ // with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
811
+ ...structuredClone(message),
812
+ // add a revision id to ensure that the message is updated with SWR. SWR uses a
813
+ // hashing approach by default to detect changes, but it only works for shallow
814
+ // changes. This is why we need to add a revision id to ensure that the message
815
+ // is updated with SWR (without it, the changes get stuck in SWR and are not
816
+ // forwarded to rendering):
817
+ revisionId: generateId3()
818
+ };
819
+ update({
820
+ message: copiedMessage,
821
+ data: copiedData,
822
+ replaceLastMessage
823
+ });
824
+ }
825
+ await processDataStream({
826
+ stream,
827
+ onTextPart(value) {
828
+ if (currentTextPart == null) {
829
+ currentTextPart = {
830
+ type: "text",
831
+ text: value
832
+ };
833
+ message.parts.push(currentTextPart);
834
+ } else {
835
+ currentTextPart.text += value;
836
+ }
837
+ message.content += value;
838
+ execUpdate();
839
+ },
840
+ onReasoningPart(value) {
841
+ var _a18;
842
+ if (currentReasoningTextDetail == null) {
843
+ currentReasoningTextDetail = { type: "text", text: value };
844
+ if (currentReasoningPart != null) {
845
+ currentReasoningPart.details.push(currentReasoningTextDetail);
846
+ }
847
+ } else {
848
+ currentReasoningTextDetail.text += value;
849
+ }
850
+ if (currentReasoningPart == null) {
851
+ currentReasoningPart = {
852
+ type: "reasoning",
853
+ reasoning: value,
854
+ details: [currentReasoningTextDetail]
855
+ };
856
+ message.parts.push(currentReasoningPart);
857
+ } else {
858
+ currentReasoningPart.reasoning += value;
859
+ }
860
+ message.reasoning = ((_a18 = message.reasoning) != null ? _a18 : "") + value;
861
+ execUpdate();
862
+ },
863
+ onReasoningSignaturePart(value) {
864
+ if (currentReasoningTextDetail != null) {
865
+ currentReasoningTextDetail.signature = value.signature;
866
+ }
867
+ },
868
+ onRedactedReasoningPart(value) {
869
+ if (currentReasoningPart == null) {
870
+ currentReasoningPart = {
871
+ type: "reasoning",
872
+ reasoning: "",
873
+ details: []
874
+ };
875
+ message.parts.push(currentReasoningPart);
876
+ }
877
+ currentReasoningPart.details.push({
878
+ type: "redacted",
879
+ data: value.data
880
+ });
881
+ currentReasoningTextDetail = void 0;
882
+ execUpdate();
883
+ },
884
+ onFilePart(value) {
885
+ message.parts.push({
886
+ type: "file",
887
+ mediaType: value.mimeType,
888
+ data: value.data
889
+ });
890
+ execUpdate();
891
+ },
892
+ onSourcePart(value) {
893
+ message.parts.push({
894
+ type: "source",
895
+ source: value
896
+ });
897
+ execUpdate();
898
+ },
899
+ onToolCallStreamingStartPart(value) {
900
+ if (message.toolInvocations == null) {
901
+ message.toolInvocations = [];
902
+ }
903
+ partialToolCalls[value.toolCallId] = {
904
+ text: "",
905
+ step,
906
+ toolName: value.toolName,
907
+ index: message.toolInvocations.length
908
+ };
909
+ const invocation = {
910
+ state: "partial-call",
911
+ step,
912
+ toolCallId: value.toolCallId,
913
+ toolName: value.toolName,
914
+ args: void 0
915
+ };
916
+ message.toolInvocations.push(invocation);
917
+ updateToolInvocationPart(value.toolCallId, invocation);
918
+ execUpdate();
919
+ },
920
+ onToolCallDeltaPart(value) {
921
+ const partialToolCall = partialToolCalls[value.toolCallId];
922
+ partialToolCall.text += value.argsTextDelta;
923
+ const { value: partialArgs } = parsePartialJson(partialToolCall.text);
924
+ const invocation = {
925
+ state: "partial-call",
926
+ step: partialToolCall.step,
927
+ toolCallId: value.toolCallId,
928
+ toolName: partialToolCall.toolName,
929
+ args: partialArgs
930
+ };
931
+ message.toolInvocations[partialToolCall.index] = invocation;
932
+ updateToolInvocationPart(value.toolCallId, invocation);
933
+ execUpdate();
934
+ },
935
+ async onToolCallPart(value) {
936
+ const invocation = {
937
+ state: "call",
938
+ step,
939
+ ...value
940
+ };
941
+ if (partialToolCalls[value.toolCallId] != null) {
942
+ message.toolInvocations[partialToolCalls[value.toolCallId].index] = invocation;
943
+ } else {
944
+ if (message.toolInvocations == null) {
945
+ message.toolInvocations = [];
946
+ }
947
+ message.toolInvocations.push(invocation);
948
+ }
949
+ updateToolInvocationPart(value.toolCallId, invocation);
950
+ execUpdate();
951
+ if (onToolCall) {
952
+ const result = await onToolCall({ toolCall: value });
953
+ if (result != null) {
954
+ const invocation2 = {
955
+ state: "result",
956
+ step,
957
+ ...value,
958
+ result
959
+ };
960
+ message.toolInvocations[message.toolInvocations.length - 1] = invocation2;
961
+ updateToolInvocationPart(value.toolCallId, invocation2);
962
+ execUpdate();
963
+ }
964
+ }
965
+ },
966
+ onToolResultPart(value) {
967
+ const toolInvocations = message.toolInvocations;
968
+ if (toolInvocations == null) {
969
+ throw new Error("tool_result must be preceded by a tool_call");
970
+ }
971
+ const toolInvocationIndex = toolInvocations.findIndex(
972
+ (invocation2) => invocation2.toolCallId === value.toolCallId
973
+ );
974
+ if (toolInvocationIndex === -1) {
975
+ throw new Error(
976
+ "tool_result must be preceded by a tool_call with the same toolCallId"
977
+ );
978
+ }
979
+ const invocation = {
980
+ ...toolInvocations[toolInvocationIndex],
981
+ state: "result",
982
+ ...value
983
+ };
984
+ toolInvocations[toolInvocationIndex] = invocation;
985
+ updateToolInvocationPart(value.toolCallId, invocation);
986
+ execUpdate();
987
+ },
988
+ onDataPart(value) {
989
+ data.push(...value);
990
+ execUpdate();
991
+ },
992
+ onMessageAnnotationsPart(value) {
993
+ if (messageAnnotations == null) {
994
+ messageAnnotations = [...value];
995
+ } else {
996
+ messageAnnotations.push(...value);
997
+ }
998
+ execUpdate();
999
+ },
1000
+ onFinishStepPart(value) {
1001
+ step += 1;
1002
+ currentTextPart = value.isContinued ? currentTextPart : void 0;
1003
+ currentReasoningPart = void 0;
1004
+ currentReasoningTextDetail = void 0;
1005
+ },
1006
+ onStartStepPart(value) {
1007
+ if (!replaceLastMessage) {
1008
+ message.id = value.messageId;
1009
+ }
1010
+ message.parts.push({ type: "step-start" });
1011
+ execUpdate();
1012
+ },
1013
+ onFinishMessagePart(value) {
1014
+ finishReason = value.finishReason;
1015
+ if (value.usage != null) {
1016
+ usage = calculateLanguageModelUsage(value.usage);
1017
+ }
1018
+ },
1019
+ onErrorPart(error) {
1020
+ throw new Error(error);
1021
+ }
1022
+ });
1023
+ onFinish == null ? void 0 : onFinish({ message, finishReason, usage });
1024
+ }
1025
+
1026
+ // core/util/process-chat-text-response.ts
1027
+ import { generateId as generateIdFunction2 } from "@ai-sdk/provider-utils";
1028
+
1029
+ // core/util/process-text-stream.ts
1030
+ async function processTextStream({
1031
+ stream,
1032
+ onTextPart
1033
+ }) {
1034
+ const reader = stream.pipeThrough(new TextDecoderStream()).getReader();
1035
+ while (true) {
1036
+ const { done, value } = await reader.read();
1037
+ if (done) {
1038
+ break;
1039
+ }
1040
+ await onTextPart(value);
1041
+ }
1042
+ }
1043
+
1044
+ // core/util/process-chat-text-response.ts
1045
+ async function processChatTextResponse({
1046
+ stream,
1047
+ update,
1048
+ onFinish,
1049
+ getCurrentDate = () => /* @__PURE__ */ new Date(),
1050
+ generateId: generateId3 = generateIdFunction2
1051
+ }) {
1052
+ const textPart = { type: "text", text: "" };
1053
+ const resultMessage = {
1054
+ id: generateId3(),
1055
+ createdAt: getCurrentDate(),
1056
+ role: "assistant",
1057
+ content: "",
1058
+ parts: [textPart]
1059
+ };
1060
+ await processTextStream({
1061
+ stream,
1062
+ onTextPart: (chunk) => {
1063
+ resultMessage.content += chunk;
1064
+ textPart.text += chunk;
1065
+ update({
1066
+ message: { ...resultMessage },
1067
+ data: [],
1068
+ replaceLastMessage: false
1069
+ });
1070
+ }
1071
+ });
1072
+ onFinish == null ? void 0 : onFinish(resultMessage, {
1073
+ usage: { completionTokens: NaN, promptTokens: NaN, totalTokens: NaN },
1074
+ finishReason: "unknown"
1075
+ });
1076
+ }
1077
+
1078
+ // core/util/call-chat-api.ts
1079
+ var getOriginalFetch = () => fetch;
1080
+ async function callChatApi({
1081
+ api,
1082
+ body,
1083
+ streamProtocol = "data",
1084
+ credentials,
1085
+ headers,
1086
+ abortController,
1087
+ restoreMessagesOnFailure,
1088
+ onResponse,
1089
+ onUpdate,
1090
+ onFinish,
1091
+ onToolCall,
1092
+ generateId: generateId3,
1093
+ fetch: fetch2 = getOriginalFetch(),
1094
+ lastMessage
1095
+ }) {
1096
+ var _a17, _b;
1097
+ const response = await fetch2(api, {
1098
+ method: "POST",
1099
+ body: JSON.stringify(body),
1100
+ headers: {
1101
+ "Content-Type": "application/json",
1102
+ ...headers
1103
+ },
1104
+ signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
1105
+ credentials
1106
+ }).catch((err) => {
1107
+ restoreMessagesOnFailure();
1108
+ throw err;
1109
+ });
1110
+ if (onResponse) {
1111
+ try {
1112
+ await onResponse(response);
1113
+ } catch (err) {
1114
+ throw err;
1115
+ }
1116
+ }
1117
+ if (!response.ok) {
1118
+ restoreMessagesOnFailure();
1119
+ throw new Error(
1120
+ (_b = await response.text()) != null ? _b : "Failed to fetch the chat response."
1121
+ );
1122
+ }
1123
+ if (!response.body) {
1124
+ throw new Error("The response body is empty.");
1125
+ }
1126
+ switch (streamProtocol) {
1127
+ case "text": {
1128
+ await processChatTextResponse({
1129
+ stream: response.body,
1130
+ update: onUpdate,
1131
+ onFinish,
1132
+ generateId: generateId3
1133
+ });
1134
+ return;
1135
+ }
1136
+ case "data": {
1137
+ await processChatResponse({
1138
+ stream: response.body,
1139
+ update: onUpdate,
1140
+ lastMessage,
1141
+ onToolCall,
1142
+ onFinish({ message, finishReason, usage }) {
1143
+ if (onFinish && message != null) {
1144
+ onFinish(message, { usage, finishReason });
1145
+ }
1146
+ },
1147
+ generateId: generateId3
1148
+ });
1149
+ return;
1150
+ }
1151
+ default: {
1152
+ const exhaustiveCheck = streamProtocol;
1153
+ throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
1154
+ }
1155
+ }
1156
+ }
1157
+
1158
+ // core/util/call-completion-api.ts
1159
+ var getOriginalFetch2 = () => fetch;
1160
+ async function callCompletionApi({
1161
+ api,
1162
+ prompt,
1163
+ credentials,
1164
+ headers,
1165
+ body,
1166
+ streamProtocol = "data",
1167
+ setCompletion,
1168
+ setLoading,
1169
+ setError,
1170
+ setAbortController,
1171
+ onResponse,
1172
+ onFinish,
1173
+ onError,
1174
+ onData,
1175
+ fetch: fetch2 = getOriginalFetch2()
1176
+ }) {
1177
+ var _a17;
1178
+ try {
1179
+ setLoading(true);
1180
+ setError(void 0);
1181
+ const abortController = new AbortController();
1182
+ setAbortController(abortController);
1183
+ setCompletion("");
1184
+ const response = await fetch2(api, {
1185
+ method: "POST",
1186
+ body: JSON.stringify({
1187
+ prompt,
1188
+ ...body
1189
+ }),
1190
+ credentials,
1191
+ headers: {
1192
+ "Content-Type": "application/json",
1193
+ ...headers
1194
+ },
1195
+ signal: abortController.signal
1196
+ }).catch((err) => {
1197
+ throw err;
1198
+ });
1199
+ if (onResponse) {
1200
+ try {
1201
+ await onResponse(response);
1202
+ } catch (err) {
1203
+ throw err;
1204
+ }
1205
+ }
1206
+ if (!response.ok) {
1207
+ throw new Error(
1208
+ (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
1209
+ );
1210
+ }
1211
+ if (!response.body) {
1212
+ throw new Error("The response body is empty.");
1213
+ }
1214
+ let result = "";
1215
+ switch (streamProtocol) {
1216
+ case "text": {
1217
+ await processTextStream({
1218
+ stream: response.body,
1219
+ onTextPart: (chunk) => {
1220
+ result += chunk;
1221
+ setCompletion(result);
1222
+ }
1223
+ });
1224
+ break;
1225
+ }
1226
+ case "data": {
1227
+ await processDataStream({
1228
+ stream: response.body,
1229
+ onTextPart(value) {
1230
+ result += value;
1231
+ setCompletion(result);
1232
+ },
1233
+ onDataPart(value) {
1234
+ onData == null ? void 0 : onData(value);
1235
+ },
1236
+ onErrorPart(value) {
1237
+ throw new Error(value);
1238
+ }
1239
+ });
1240
+ break;
1241
+ }
1242
+ default: {
1243
+ const exhaustiveCheck = streamProtocol;
1244
+ throw new Error(`Unknown stream protocol: ${exhaustiveCheck}`);
1245
+ }
1246
+ }
1247
+ if (onFinish) {
1248
+ onFinish(prompt, result);
1249
+ }
1250
+ setAbortController(null);
1251
+ return result;
1252
+ } catch (err) {
1253
+ if (err.name === "AbortError") {
1254
+ setAbortController(null);
1255
+ return null;
1256
+ }
1257
+ if (err instanceof Error) {
1258
+ if (onError) {
1259
+ onError(err);
1260
+ }
1261
+ }
1262
+ setError(err);
1263
+ } finally {
1264
+ setLoading(false);
1265
+ }
1266
+ }
1267
+
1268
+ // core/util/data-url.ts
1269
+ function getTextFromDataUrl(dataUrl) {
1270
+ const [header, base64Content] = dataUrl.split(",");
1271
+ const mediaType = header.split(";")[0].split(":")[1];
1272
+ if (mediaType == null || base64Content == null) {
1273
+ throw new Error("Invalid data URL format");
1274
+ }
1275
+ try {
1276
+ return window.atob(base64Content);
1277
+ } catch (error) {
1278
+ throw new Error(`Error decoding data URL`);
1279
+ }
1280
+ }
1281
+
1282
+ // core/util/extract-max-tool-invocation-step.ts
1283
+ function extractMaxToolInvocationStep(toolInvocations) {
1284
+ return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
1285
+ var _a17;
1286
+ return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
1287
+ }, 0);
1288
+ }
1289
+
1290
+ // core/util/get-message-parts.ts
1291
+ function getMessageParts(message) {
1292
+ var _a17;
1293
+ return (_a17 = message.parts) != null ? _a17 : [
1294
+ ...message.toolInvocations ? message.toolInvocations.map((toolInvocation) => ({
1295
+ type: "tool-invocation",
1296
+ toolInvocation
1297
+ })) : [],
1298
+ ...message.reasoning ? [
1299
+ {
1300
+ type: "reasoning",
1301
+ reasoning: message.reasoning,
1302
+ details: [{ type: "text", text: message.reasoning }]
1303
+ }
1304
+ ] : [],
1305
+ ...message.content ? [{ type: "text", text: message.content }] : []
1306
+ ];
1307
+ }
1308
+
1309
+ // core/util/fill-message-parts.ts
1310
+ function fillMessageParts(messages) {
1311
+ return messages.map((message) => ({
1312
+ ...message,
1313
+ parts: getMessageParts(message)
1314
+ }));
1315
+ }
1316
+
1317
+ // core/util/is-deep-equal-data.ts
1318
+ function isDeepEqualData(obj1, obj2) {
1319
+ if (obj1 === obj2)
1320
+ return true;
1321
+ if (obj1 == null || obj2 == null)
1322
+ return false;
1323
+ if (typeof obj1 !== "object" && typeof obj2 !== "object")
1324
+ return obj1 === obj2;
1325
+ if (obj1.constructor !== obj2.constructor)
1326
+ return false;
1327
+ if (obj1 instanceof Date && obj2 instanceof Date) {
1328
+ return obj1.getTime() === obj2.getTime();
1329
+ }
1330
+ if (Array.isArray(obj1)) {
1331
+ if (obj1.length !== obj2.length)
1332
+ return false;
1333
+ for (let i = 0; i < obj1.length; i++) {
1334
+ if (!isDeepEqualData(obj1[i], obj2[i]))
1335
+ return false;
1336
+ }
1337
+ return true;
1338
+ }
1339
+ const keys1 = Object.keys(obj1);
1340
+ const keys2 = Object.keys(obj2);
1341
+ if (keys1.length !== keys2.length)
1342
+ return false;
1343
+ for (const key of keys1) {
1344
+ if (!keys2.includes(key))
1345
+ return false;
1346
+ if (!isDeepEqualData(obj1[key], obj2[key]))
1347
+ return false;
1348
+ }
1349
+ return true;
1350
+ }
1351
+
1352
+ // core/util/prepare-attachments-for-request.ts
1353
+ async function prepareAttachmentsForRequest(attachmentsFromOptions) {
1354
+ if (!attachmentsFromOptions) {
1355
+ return [];
1356
+ }
1357
+ if (attachmentsFromOptions instanceof FileList) {
1358
+ return Promise.all(
1359
+ Array.from(attachmentsFromOptions).map(async (attachment) => {
1360
+ const { name: name17, type } = attachment;
1361
+ const dataUrl = await new Promise((resolve, reject) => {
1362
+ const reader = new FileReader();
1363
+ reader.onload = (readerEvent) => {
1364
+ var _a17;
1365
+ resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
1366
+ };
1367
+ reader.onerror = (error) => reject(error);
1368
+ reader.readAsDataURL(attachment);
1369
+ });
1370
+ return {
1371
+ name: name17,
1372
+ contentType: type,
1373
+ url: dataUrl
1374
+ };
1375
+ })
1376
+ );
1377
+ }
1378
+ if (Array.isArray(attachmentsFromOptions)) {
1379
+ return attachmentsFromOptions;
1380
+ }
1381
+ throw new Error("Invalid attachments type");
1382
+ }
1383
+
1384
+ // core/util/schema.ts
1385
+ import { validatorSymbol } from "@ai-sdk/provider-utils";
1386
+
1387
+ // core/util/zod-schema.ts
1388
+ import zodToJsonSchema from "zod-to-json-schema";
1389
+ function zodSchema(zodSchema2, options) {
1390
+ var _a17;
1391
+ const useReferences = (_a17 = options == null ? void 0 : options.useReferences) != null ? _a17 : false;
1392
+ return jsonSchema(
1393
+ zodToJsonSchema(zodSchema2, {
1394
+ $refStrategy: useReferences ? "root" : "none",
1395
+ target: "jsonSchema7"
1396
+ // note: openai mode breaks various gemini conversions
1397
+ }),
1398
+ {
1399
+ validate: (value) => {
1400
+ const result = zodSchema2.safeParse(value);
1401
+ return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
1402
+ }
1403
+ }
1404
+ );
1405
+ }
1406
+
1407
+ // core/util/schema.ts
1408
+ var schemaSymbol = Symbol.for("vercel.ai.schema");
1409
+ function jsonSchema(jsonSchema2, {
1410
+ validate
1411
+ } = {}) {
1412
+ return {
1413
+ [schemaSymbol]: true,
1414
+ _type: void 0,
1415
+ // should never be used directly
1416
+ [validatorSymbol]: true,
1417
+ jsonSchema: jsonSchema2,
1418
+ validate
1419
+ };
1420
+ }
1421
+ function isSchema(value) {
1422
+ return typeof value === "object" && value !== null && schemaSymbol in value && value[schemaSymbol] === true && "jsonSchema" in value && "validate" in value;
1423
+ }
1424
+ function asSchema(schema) {
1425
+ return isSchema(schema) ? schema : zodSchema(schema);
1426
+ }
1427
+
1428
+ // core/util/should-resubmit-messages.ts
1429
+ function shouldResubmitMessages({
1430
+ originalMaxToolInvocationStep,
1431
+ originalMessageCount,
1432
+ maxSteps,
1433
+ messages
1434
+ }) {
1435
+ var _a17;
1436
+ const lastMessage = messages[messages.length - 1];
1437
+ return (
1438
+ // check if the feature is enabled:
1439
+ maxSteps > 1 && // ensure there is a last message:
1440
+ lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
1441
+ (messages.length > originalMessageCount || extractMaxToolInvocationStep(lastMessage.toolInvocations) !== originalMaxToolInvocationStep) && // check that next step is possible:
1442
+ isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
1443
+ ((_a17 = extractMaxToolInvocationStep(lastMessage.toolInvocations)) != null ? _a17 : 0) < maxSteps
1444
+ );
1445
+ }
1446
+ function isAssistantMessageWithCompletedToolCalls(message) {
1447
+ if (message.role !== "assistant") {
1448
+ return false;
1449
+ }
1450
+ const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
1451
+ return part.type === "step-start" ? index : lastIndex;
1452
+ }, -1);
1453
+ const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
1454
+ return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
1455
+ }
1456
+
1457
+ // core/util/update-tool-call-result.ts
1458
+ function updateToolCallResult({
1459
+ messages,
1460
+ toolCallId,
1461
+ toolResult: result
1462
+ }) {
1463
+ var _a17;
1464
+ const lastMessage = messages[messages.length - 1];
1465
+ const invocationPart = lastMessage.parts.find(
1466
+ (part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
1467
+ );
1468
+ if (invocationPart == null) {
1469
+ return;
1470
+ }
1471
+ const toolResult = {
1472
+ ...invocationPart.toolInvocation,
1473
+ state: "result",
1474
+ result
1475
+ };
1476
+ invocationPart.toolInvocation = toolResult;
1477
+ lastMessage.toolInvocations = (_a17 = lastMessage.toolInvocations) == null ? void 0 : _a17.map(
1478
+ (toolInvocation) => toolInvocation.toolCallId === toolCallId ? toolResult : toolInvocation
1479
+ );
1480
+ }
17
1481
 
18
1482
  // core/data-stream/create-data-stream.ts
19
- import { formatDataStreamPart } from "@ai-sdk/ui-utils";
20
1483
  function createDataStream({
21
1484
  execute,
22
1485
  onError = () => "An error occurred."
@@ -830,12 +2293,12 @@ import {
830
2293
  var DefaultGeneratedFile = class {
831
2294
  constructor({
832
2295
  data,
833
- mimeType
2296
+ mediaType
834
2297
  }) {
835
2298
  const isUint8Array = data instanceof Uint8Array;
836
2299
  this.base64Data = isUint8Array ? void 0 : data;
837
2300
  this.uint8ArrayData = isUint8Array ? data : void 0;
838
- this.mimeType = mimeType;
2301
+ this.mediaType = mediaType;
839
2302
  }
840
2303
  // lazy conversion with caching to avoid unnecessary conversion overhead:
841
2304
  get base64() {
@@ -859,45 +2322,45 @@ var DefaultGeneratedFileWithType = class extends DefaultGeneratedFile {
859
2322
  }
860
2323
  };
861
2324
 
862
- // core/util/detect-image-mimetype.ts
863
- var mimeTypeSignatures = [
2325
+ // core/util/detect-media-type.ts
2326
+ var imageMediaTypeSignatures = [
864
2327
  {
865
- mimeType: "image/gif",
2328
+ mediaType: "image/gif",
866
2329
  bytesPrefix: [71, 73, 70],
867
2330
  base64Prefix: "R0lG"
868
2331
  },
869
2332
  {
870
- mimeType: "image/png",
2333
+ mediaType: "image/png",
871
2334
  bytesPrefix: [137, 80, 78, 71],
872
2335
  base64Prefix: "iVBORw"
873
2336
  },
874
2337
  {
875
- mimeType: "image/jpeg",
2338
+ mediaType: "image/jpeg",
876
2339
  bytesPrefix: [255, 216],
877
2340
  base64Prefix: "/9j/"
878
2341
  },
879
2342
  {
880
- mimeType: "image/webp",
2343
+ mediaType: "image/webp",
881
2344
  bytesPrefix: [82, 73, 70, 70],
882
2345
  base64Prefix: "UklGRg"
883
2346
  },
884
2347
  {
885
- mimeType: "image/bmp",
2348
+ mediaType: "image/bmp",
886
2349
  bytesPrefix: [66, 77],
887
2350
  base64Prefix: "Qk"
888
2351
  },
889
2352
  {
890
- mimeType: "image/tiff",
2353
+ mediaType: "image/tiff",
891
2354
  bytesPrefix: [73, 73, 42, 0],
892
2355
  base64Prefix: "SUkqAA"
893
2356
  },
894
2357
  {
895
- mimeType: "image/tiff",
2358
+ mediaType: "image/tiff",
896
2359
  bytesPrefix: [77, 77, 0, 42],
897
2360
  base64Prefix: "TU0AKg"
898
2361
  },
899
2362
  {
900
- mimeType: "image/avif",
2363
+ mediaType: "image/avif",
901
2364
  bytesPrefix: [
902
2365
  0,
903
2366
  0,
@@ -915,7 +2378,7 @@ var mimeTypeSignatures = [
915
2378
  base64Prefix: "AAAAIGZ0eXBhdmlm"
916
2379
  },
917
2380
  {
918
- mimeType: "image/heic",
2381
+ mediaType: "image/heic",
919
2382
  bytesPrefix: [
920
2383
  0,
921
2384
  0,
@@ -933,10 +2396,45 @@ var mimeTypeSignatures = [
933
2396
  base64Prefix: "AAAAIGZ0eXBoZWlj"
934
2397
  }
935
2398
  ];
936
- function detectImageMimeType(image) {
937
- for (const signature of mimeTypeSignatures) {
938
- if (typeof image === "string" ? image.startsWith(signature.base64Prefix) : image.length >= signature.bytesPrefix.length && signature.bytesPrefix.every((byte, index) => image[index] === byte)) {
939
- return signature.mimeType;
2399
+ var audioMediaTypeSignatures = [
2400
+ {
2401
+ mediaType: "audio/mpeg",
2402
+ bytesPrefix: [255, 251],
2403
+ base64Prefix: "//s="
2404
+ },
2405
+ {
2406
+ mediaType: "audio/wav",
2407
+ bytesPrefix: [82, 73, 70, 70],
2408
+ base64Prefix: "UklGR"
2409
+ },
2410
+ {
2411
+ mediaType: "audio/ogg",
2412
+ bytesPrefix: [79, 103, 103, 83],
2413
+ base64Prefix: "T2dnUw"
2414
+ },
2415
+ {
2416
+ mediaType: "audio/flac",
2417
+ bytesPrefix: [102, 76, 97, 67],
2418
+ base64Prefix: "ZkxhQw"
2419
+ },
2420
+ {
2421
+ mediaType: "audio/aac",
2422
+ bytesPrefix: [64, 21, 0, 0],
2423
+ base64Prefix: "QBUA"
2424
+ },
2425
+ {
2426
+ mediaType: "audio/mp4",
2427
+ bytesPrefix: [102, 116, 121, 112],
2428
+ base64Prefix: "ZnR5cA"
2429
+ }
2430
+ ];
2431
+ function detectMediaType({
2432
+ data,
2433
+ signatures
2434
+ }) {
2435
+ for (const signature of signatures) {
2436
+ if (typeof data === "string" ? data.startsWith(signature.base64Prefix) : data.length >= signature.bytesPrefix.length && signature.bytesPrefix.every((byte, index) => data[index] === byte)) {
2437
+ return signature.mediaType;
940
2438
  }
941
2439
  }
942
2440
  return void 0;
@@ -992,7 +2490,10 @@ async function generateImage({
992
2490
  var _a18;
993
2491
  return new DefaultGeneratedFile({
994
2492
  data: image,
995
- mimeType: (_a18 = detectImageMimeType(image)) != null ? _a18 : "image/png"
2493
+ mediaType: (_a18 = detectMediaType({
2494
+ data: image,
2495
+ signatures: imageMediaTypeSignatures
2496
+ })) != null ? _a18 : "image/png"
996
2497
  });
997
2498
  }
998
2499
  )
@@ -1021,7 +2522,7 @@ import {
1021
2522
  JSONParseError,
1022
2523
  TypeValidationError as TypeValidationError2
1023
2524
  } from "@ai-sdk/provider";
1024
- import { createIdGenerator, safeParseJSON } from "@ai-sdk/provider-utils";
2525
+ import { createIdGenerator, safeParseJSON as safeParseJSON2 } from "@ai-sdk/provider-utils";
1025
2526
 
1026
2527
  // errors/no-object-generated-error.ts
1027
2528
  import { AISDKError as AISDKError4 } from "@ai-sdk/provider";
@@ -1051,6 +2552,9 @@ var NoObjectGeneratedError = class extends AISDKError4 {
1051
2552
  };
1052
2553
  _a4 = symbol4;
1053
2554
 
2555
+ // core/prompt/convert-to-language-model-prompt.ts
2556
+ import { convertUint8ArrayToBase64 as convertUint8ArrayToBase643 } from "@ai-sdk/provider-utils";
2557
+
1054
2558
  // util/download-error.ts
1055
2559
  import { AISDKError as AISDKError5 } from "@ai-sdk/provider";
1056
2560
  var name5 = "AI_DownloadError";
@@ -1092,7 +2596,7 @@ async function download({ url }) {
1092
2596
  }
1093
2597
  return {
1094
2598
  data: new Uint8Array(await response.arrayBuffer()),
1095
- mimeType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
2599
+ mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
1096
2600
  };
1097
2601
  } catch (error) {
1098
2602
  if (DownloadError.isInstance(error)) {
@@ -1208,12 +2712,12 @@ function splitDataUrl(dataUrl) {
1208
2712
  try {
1209
2713
  const [header, base64Content] = dataUrl.split(",");
1210
2714
  return {
1211
- mimeType: header.split(";")[0].split(":")[1],
2715
+ mediaType: header.split(";")[0].split(":")[1],
1212
2716
  base64Content
1213
2717
  };
1214
2718
  } catch (error) {
1215
2719
  return {
1216
- mimeType: void 0,
2720
+ mediaType: void 0,
1217
2721
  base64Content: void 0
1218
2722
  };
1219
2723
  }
@@ -1278,7 +2782,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
1278
2782
  // remove empty text parts:
1279
2783
  (part) => part.type !== "text" || part.text !== ""
1280
2784
  ).map((part) => {
1281
- var _a18;
2785
+ var _a18, _b2;
1282
2786
  const providerOptions = (_a18 = part.providerOptions) != null ? _a18 : part.experimental_providerMetadata;
1283
2787
  switch (part.type) {
1284
2788
  case "file": {
@@ -1286,7 +2790,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
1286
2790
  type: "file",
1287
2791
  data: part.data instanceof URL ? part.data : convertDataContentToBase64String(part.data),
1288
2792
  filename: part.filename,
1289
- mimeType: part.mimeType,
2793
+ mediaType: (_b2 = part.mediaType) != null ? _b2 : part.mimeType,
1290
2794
  providerOptions
1291
2795
  };
1292
2796
  }
@@ -1374,7 +2878,7 @@ async function downloadAssets(messages, downloadImplementation, modelSupportsIma
1374
2878
  );
1375
2879
  }
1376
2880
  function convertPartToLanguageModelPart(part, downloadedAssets) {
1377
- var _a17, _b, _c, _d;
2881
+ var _a17, _b, _c, _d, _e;
1378
2882
  if (part.type === "text") {
1379
2883
  return {
1380
2884
  type: "text",
@@ -1382,7 +2886,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1382
2886
  providerOptions: (_a17 = part.providerOptions) != null ? _a17 : part.experimental_providerMetadata
1383
2887
  };
1384
2888
  }
1385
- let mimeType = part.mimeType;
2889
+ let mediaType = (_b = part.mediaType) != null ? _b : part.mimeType;
1386
2890
  let data;
1387
2891
  let content;
1388
2892
  let normalizedData;
@@ -1404,19 +2908,19 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1404
2908
  }
1405
2909
  if (content instanceof URL) {
1406
2910
  if (content.protocol === "data:") {
1407
- const { mimeType: dataUrlMimeType, base64Content } = splitDataUrl(
2911
+ const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(
1408
2912
  content.toString()
1409
2913
  );
1410
- if (dataUrlMimeType == null || base64Content == null) {
2914
+ if (dataUrlMediaType == null || base64Content == null) {
1411
2915
  throw new Error(`Invalid data URL format in part ${type}`);
1412
2916
  }
1413
- mimeType = dataUrlMimeType;
2917
+ mediaType = dataUrlMediaType;
1414
2918
  normalizedData = convertDataContentToUint8Array(base64Content);
1415
2919
  } else {
1416
2920
  const downloadedFile = downloadedAssets[content.toString()];
1417
2921
  if (downloadedFile) {
1418
2922
  normalizedData = downloadedFile.data;
1419
- mimeType != null ? mimeType : mimeType = downloadedFile.mimeType;
2923
+ mediaType != null ? mediaType : mediaType = downloadedFile.mediaType;
1420
2924
  } else {
1421
2925
  normalizedData = content;
1422
2926
  }
@@ -1427,25 +2931,30 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1427
2931
  switch (type) {
1428
2932
  case "image": {
1429
2933
  if (normalizedData instanceof Uint8Array) {
1430
- mimeType = (_b = detectImageMimeType(normalizedData)) != null ? _b : mimeType;
2934
+ mediaType = (_c = detectMediaType({
2935
+ data: normalizedData,
2936
+ signatures: imageMediaTypeSignatures
2937
+ })) != null ? _c : mediaType;
1431
2938
  }
1432
2939
  return {
1433
- type: "image",
1434
- image: normalizedData,
1435
- mimeType,
1436
- providerOptions: (_c = part.providerOptions) != null ? _c : part.experimental_providerMetadata
2940
+ type: "file",
2941
+ mediaType: mediaType != null ? mediaType : "image/*",
2942
+ // any image
2943
+ filename: void 0,
2944
+ data: normalizedData instanceof Uint8Array ? convertUint8ArrayToBase643(normalizedData) : normalizedData,
2945
+ providerOptions: (_d = part.providerOptions) != null ? _d : part.experimental_providerMetadata
1437
2946
  };
1438
2947
  }
1439
2948
  case "file": {
1440
- if (mimeType == null) {
1441
- throw new Error(`Mime type is missing for file part`);
2949
+ if (mediaType == null) {
2950
+ throw new Error(`Media type is missing for file part`);
1442
2951
  }
1443
2952
  return {
1444
2953
  type: "file",
1445
- data: normalizedData instanceof Uint8Array ? convertDataContentToBase64String(normalizedData) : normalizedData,
2954
+ mediaType,
1446
2955
  filename: part.filename,
1447
- mimeType,
1448
- providerOptions: (_d = part.providerOptions) != null ? _d : part.experimental_providerMetadata
2956
+ data: normalizedData instanceof Uint8Array ? convertDataContentToBase64String(normalizedData) : normalizedData,
2957
+ providerOptions: (_e = part.providerOptions) != null ? _e : part.experimental_providerMetadata
1449
2958
  };
1450
2959
  }
1451
2960
  }
@@ -1575,7 +3084,7 @@ function attachmentsToParts(attachments) {
1575
3084
  parts.push({
1576
3085
  type: "file",
1577
3086
  data: url,
1578
- mimeType: attachment.contentType
3087
+ mediaType: attachment.contentType
1579
3088
  });
1580
3089
  }
1581
3090
  break;
@@ -1583,14 +3092,14 @@ function attachmentsToParts(attachments) {
1583
3092
  case "data:": {
1584
3093
  let header;
1585
3094
  let base64Content;
1586
- let mimeType;
3095
+ let mediaType;
1587
3096
  try {
1588
3097
  [header, base64Content] = attachment.url.split(",");
1589
- mimeType = header.split(";")[0].split(":")[1];
3098
+ mediaType = header.split(";")[0].split(":")[1];
1590
3099
  } catch (error) {
1591
3100
  throw new Error(`Error processing data URL: ${attachment.url}`);
1592
3101
  }
1593
- if (mimeType == null || base64Content == null) {
3102
+ if (mediaType == null || base64Content == null) {
1594
3103
  throw new Error(`Invalid data URL format: ${attachment.url}`);
1595
3104
  }
1596
3105
  if ((_b = attachment.contentType) == null ? void 0 : _b.startsWith("image/")) {
@@ -1614,7 +3123,7 @@ function attachmentsToParts(attachments) {
1614
3123
  parts.push({
1615
3124
  type: "file",
1616
3125
  data: base64Content,
1617
- mimeType: attachment.contentType
3126
+ mediaType: attachment.contentType
1618
3127
  });
1619
3128
  }
1620
3129
  break;
@@ -1689,14 +3198,23 @@ function convertToCoreMessages(messages, options) {
1689
3198
  case "assistant": {
1690
3199
  if (message.parts != null) {
1691
3200
  let processBlock2 = function() {
3201
+ var _a18;
1692
3202
  const content2 = [];
1693
3203
  for (const part of block) {
1694
3204
  switch (part.type) {
1695
- case "file":
1696
3205
  case "text": {
1697
3206
  content2.push(part);
1698
3207
  break;
1699
3208
  }
3209
+ case "file": {
3210
+ content2.push({
3211
+ type: "file",
3212
+ data: part.data,
3213
+ mediaType: (_a18 = part.mediaType) != null ? _a18 : part.mimeType
3214
+ // TODO migration, remove
3215
+ });
3216
+ break;
3217
+ }
1700
3218
  case "reasoning": {
1701
3219
  for (const detail of part.details) {
1702
3220
  switch (detail.type) {
@@ -1953,7 +3471,7 @@ var toolResultContentSchema = z4.array(
1953
3471
  z4.object({
1954
3472
  type: z4.literal("image"),
1955
3473
  data: z4.string(),
1956
- mimeType: z4.string().optional()
3474
+ mediaType: z4.string().optional()
1957
3475
  })
1958
3476
  ])
1959
3477
  );
@@ -1968,6 +3486,7 @@ var textPartSchema = z5.object({
1968
3486
  var imagePartSchema = z5.object({
1969
3487
  type: z5.literal("image"),
1970
3488
  image: z5.union([dataContentSchema, z5.instanceof(URL)]),
3489
+ mediaType: z5.string().optional(),
1971
3490
  mimeType: z5.string().optional(),
1972
3491
  providerOptions: providerMetadataSchema.optional(),
1973
3492
  experimental_providerMetadata: providerMetadataSchema.optional()
@@ -1976,7 +3495,8 @@ var filePartSchema = z5.object({
1976
3495
  type: z5.literal("file"),
1977
3496
  data: z5.union([dataContentSchema, z5.instanceof(URL)]),
1978
3497
  filename: z5.string().optional(),
1979
- mimeType: z5.string(),
3498
+ mediaType: z5.string(),
3499
+ mimeType: z5.string().optional(),
1980
3500
  providerOptions: providerMetadataSchema.optional(),
1981
3501
  experimental_providerMetadata: providerMetadataSchema.optional()
1982
3502
  });
@@ -2136,7 +3656,7 @@ function standardizePrompt({
2136
3656
  }
2137
3657
 
2138
3658
  // core/types/usage.ts
2139
- function calculateLanguageModelUsage({
3659
+ function calculateLanguageModelUsage2({
2140
3660
  promptTokens,
2141
3661
  completionTokens
2142
3662
  }) {
@@ -2182,7 +3702,6 @@ import {
2182
3702
  UnsupportedFunctionalityError
2183
3703
  } from "@ai-sdk/provider";
2184
3704
  import { safeValidateTypes as safeValidateTypes2 } from "@ai-sdk/provider-utils";
2185
- import { asSchema } from "@ai-sdk/ui-utils";
2186
3705
 
2187
3706
  // core/util/async-iterable-stream.ts
2188
3707
  function createAsyncIterableStream(source) {
@@ -2632,7 +4151,6 @@ async function generateObject({
2632
4151
  let finishReason;
2633
4152
  let usage;
2634
4153
  let warnings;
2635
- let rawResponse;
2636
4154
  let response;
2637
4155
  let request;
2638
4156
  let logprobs;
@@ -2687,7 +4205,7 @@ async function generateObject({
2687
4205
  }),
2688
4206
  tracer,
2689
4207
  fn: async (span2) => {
2690
- var _a18, _b2, _c2, _d2, _e, _f;
4208
+ var _a18, _b2, _c2, _d2, _e, _f, _g, _h;
2691
4209
  const result2 = await model.doGenerate({
2692
4210
  responseFormat: {
2693
4211
  type: "json",
@@ -2705,13 +4223,15 @@ async function generateObject({
2705
4223
  const responseData = {
2706
4224
  id: (_b2 = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId3(),
2707
4225
  timestamp: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
2708
- modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
4226
+ modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
4227
+ headers: (_g = result2.response) == null ? void 0 : _g.headers,
4228
+ body: (_h = result2.response) == null ? void 0 : _h.body
2709
4229
  };
2710
4230
  if (result2.text === void 0) {
2711
4231
  throw new NoObjectGeneratedError({
2712
4232
  message: "No object generated: the model did not return a response.",
2713
4233
  response: responseData,
2714
- usage: calculateLanguageModelUsage(result2.usage),
4234
+ usage: calculateLanguageModelUsage2(result2.usage),
2715
4235
  finishReason: result2.finishReason
2716
4236
  });
2717
4237
  }
@@ -2743,7 +4263,6 @@ async function generateObject({
2743
4263
  finishReason = generateResult.finishReason;
2744
4264
  usage = generateResult.usage;
2745
4265
  warnings = generateResult.warnings;
2746
- rawResponse = generateResult.rawResponse;
2747
4266
  logprobs = generateResult.logprobs;
2748
4267
  resultProviderMetadata = generateResult.providerMetadata;
2749
4268
  request = (_b = generateResult.request) != null ? _b : {};
@@ -2793,7 +4312,7 @@ async function generateObject({
2793
4312
  }),
2794
4313
  tracer,
2795
4314
  fn: async (span2) => {
2796
- var _a18, _b2, _c2, _d2, _e, _f, _g, _h;
4315
+ var _a18, _b2, _c2, _d2, _e, _f, _g, _h, _i, _j;
2797
4316
  const result2 = await model.doGenerate({
2798
4317
  tools: [
2799
4318
  {
@@ -2815,13 +4334,15 @@ async function generateObject({
2815
4334
  const responseData = {
2816
4335
  id: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.id) != null ? _d2 : generateId3(),
2817
4336
  timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),
2818
- modelId: (_h = (_g = result2.response) == null ? void 0 : _g.modelId) != null ? _h : model.modelId
4337
+ modelId: (_h = (_g = result2.response) == null ? void 0 : _g.modelId) != null ? _h : model.modelId,
4338
+ headers: (_i = result2.response) == null ? void 0 : _i.headers,
4339
+ body: (_j = result2.response) == null ? void 0 : _j.body
2819
4340
  };
2820
4341
  if (objectText === void 0) {
2821
4342
  throw new NoObjectGeneratedError({
2822
4343
  message: "No object generated: the tool was not called.",
2823
4344
  response: responseData,
2824
- usage: calculateLanguageModelUsage(result2.usage),
4345
+ usage: calculateLanguageModelUsage2(result2.usage),
2825
4346
  finishReason: result2.finishReason
2826
4347
  });
2827
4348
  }
@@ -2853,7 +4374,6 @@ async function generateObject({
2853
4374
  finishReason = generateResult.finishReason;
2854
4375
  usage = generateResult.usage;
2855
4376
  warnings = generateResult.warnings;
2856
- rawResponse = generateResult.rawResponse;
2857
4377
  logprobs = generateResult.logprobs;
2858
4378
  resultProviderMetadata = generateResult.providerMetadata;
2859
4379
  request = (_d = generateResult.request) != null ? _d : {};
@@ -2871,14 +4391,14 @@ async function generateObject({
2871
4391
  }
2872
4392
  }
2873
4393
  function processResult(result2) {
2874
- const parseResult = safeParseJSON({ text: result2 });
4394
+ const parseResult = safeParseJSON2({ text: result2 });
2875
4395
  if (!parseResult.success) {
2876
4396
  throw new NoObjectGeneratedError({
2877
4397
  message: "No object generated: could not parse the response.",
2878
4398
  cause: parseResult.error,
2879
4399
  text: result2,
2880
4400
  response,
2881
- usage: calculateLanguageModelUsage(usage),
4401
+ usage: calculateLanguageModelUsage2(usage),
2882
4402
  finishReason
2883
4403
  });
2884
4404
  }
@@ -2887,7 +4407,7 @@ async function generateObject({
2887
4407
  {
2888
4408
  text: result2,
2889
4409
  response,
2890
- usage: calculateLanguageModelUsage(usage)
4410
+ usage: calculateLanguageModelUsage2(usage)
2891
4411
  }
2892
4412
  );
2893
4413
  if (!validationResult.success) {
@@ -2896,7 +4416,7 @@ async function generateObject({
2896
4416
  cause: validationResult.error,
2897
4417
  text: result2,
2898
4418
  response,
2899
- usage: calculateLanguageModelUsage(usage),
4419
+ usage: calculateLanguageModelUsage2(usage),
2900
4420
  finishReason
2901
4421
  });
2902
4422
  }
@@ -2935,14 +4455,10 @@ async function generateObject({
2935
4455
  return new DefaultGenerateObjectResult({
2936
4456
  object: object2,
2937
4457
  finishReason,
2938
- usage: calculateLanguageModelUsage(usage),
4458
+ usage: calculateLanguageModelUsage2(usage),
2939
4459
  warnings,
2940
4460
  request,
2941
- response: {
2942
- ...response,
2943
- headers: rawResponse == null ? void 0 : rawResponse.headers,
2944
- body: rawResponse == null ? void 0 : rawResponse.body
2945
- },
4461
+ response,
2946
4462
  logprobs,
2947
4463
  providerMetadata: resultProviderMetadata
2948
4464
  });
@@ -2974,10 +4490,6 @@ var DefaultGenerateObjectResult = class {
2974
4490
 
2975
4491
  // core/generate-object/stream-object.ts
2976
4492
  import { createIdGenerator as createIdGenerator2 } from "@ai-sdk/provider-utils";
2977
- import {
2978
- isDeepEqualData,
2979
- parsePartialJson
2980
- } from "@ai-sdk/ui-utils";
2981
4493
 
2982
4494
  // util/delayed-promise.ts
2983
4495
  var DelayedPromise = class {
@@ -3361,7 +4873,7 @@ var DefaultStreamObjectResult = class {
3361
4873
  }
3362
4874
  }
3363
4875
  const {
3364
- result: { stream, warnings, rawResponse, request },
4876
+ result: { stream, warnings, response, request },
3365
4877
  doStreamSpan,
3366
4878
  startTimestampMs
3367
4879
  } = await retry(
@@ -3410,7 +4922,7 @@ var DefaultStreamObjectResult = class {
3410
4922
  let error;
3411
4923
  let accumulatedText = "";
3412
4924
  let textDelta = "";
3413
- let response = {
4925
+ let fullResponse = {
3414
4926
  id: generateId3(),
3415
4927
  timestamp: currentDate(),
3416
4928
  modelId: model.modelId
@@ -3467,10 +4979,10 @@ var DefaultStreamObjectResult = class {
3467
4979
  }
3468
4980
  switch (chunk.type) {
3469
4981
  case "response-metadata": {
3470
- response = {
3471
- id: (_a18 = chunk.id) != null ? _a18 : response.id,
3472
- timestamp: (_b2 = chunk.timestamp) != null ? _b2 : response.timestamp,
3473
- modelId: (_c = chunk.modelId) != null ? _c : response.modelId
4982
+ fullResponse = {
4983
+ id: (_a18 = chunk.id) != null ? _a18 : fullResponse.id,
4984
+ timestamp: (_b2 = chunk.timestamp) != null ? _b2 : fullResponse.timestamp,
4985
+ modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
3474
4986
  };
3475
4987
  break;
3476
4988
  }
@@ -3479,20 +4991,24 @@ var DefaultStreamObjectResult = class {
3479
4991
  controller.enqueue({ type: "text-delta", textDelta });
3480
4992
  }
3481
4993
  finishReason = chunk.finishReason;
3482
- usage = calculateLanguageModelUsage(chunk.usage);
4994
+ usage = calculateLanguageModelUsage2(chunk.usage);
3483
4995
  providerMetadata = chunk.providerMetadata;
3484
- controller.enqueue({ ...chunk, usage, response });
4996
+ controller.enqueue({
4997
+ ...chunk,
4998
+ usage,
4999
+ response: fullResponse
5000
+ });
3485
5001
  self.usagePromise.resolve(usage);
3486
5002
  self.providerMetadataPromise.resolve(providerMetadata);
3487
5003
  self.responsePromise.resolve({
3488
- ...response,
3489
- headers: rawResponse == null ? void 0 : rawResponse.headers
5004
+ ...fullResponse,
5005
+ headers: response == null ? void 0 : response.headers
3490
5006
  });
3491
5007
  const validationResult = outputStrategy.validateFinalResult(
3492
5008
  latestObjectJson,
3493
5009
  {
3494
5010
  text: accumulatedText,
3495
- response,
5011
+ response: fullResponse,
3496
5012
  usage
3497
5013
  }
3498
5014
  );
@@ -3504,7 +5020,7 @@ var DefaultStreamObjectResult = class {
3504
5020
  message: "No object generated: response did not match schema.",
3505
5021
  cause: validationResult.error,
3506
5022
  text: accumulatedText,
3507
- response,
5023
+ response: fullResponse,
3508
5024
  usage,
3509
5025
  finishReason
3510
5026
  });
@@ -3534,15 +5050,15 @@ var DefaultStreamObjectResult = class {
3534
5050
  "ai.response.object": {
3535
5051
  output: () => JSON.stringify(object2)
3536
5052
  },
3537
- "ai.response.id": response.id,
3538
- "ai.response.model": response.modelId,
3539
- "ai.response.timestamp": response.timestamp.toISOString(),
5053
+ "ai.response.id": fullResponse.id,
5054
+ "ai.response.model": fullResponse.modelId,
5055
+ "ai.response.timestamp": fullResponse.timestamp.toISOString(),
3540
5056
  "ai.usage.promptTokens": finalUsage.promptTokens,
3541
5057
  "ai.usage.completionTokens": finalUsage.completionTokens,
3542
5058
  // standardized gen-ai llm span attributes:
3543
5059
  "gen_ai.response.finish_reasons": [finishReason],
3544
- "gen_ai.response.id": response.id,
3545
- "gen_ai.response.model": response.modelId,
5060
+ "gen_ai.response.id": fullResponse.id,
5061
+ "gen_ai.response.model": fullResponse.modelId,
3546
5062
  "gen_ai.usage.input_tokens": finalUsage.promptTokens,
3547
5063
  "gen_ai.usage.output_tokens": finalUsage.completionTokens
3548
5064
  }
@@ -3566,8 +5082,8 @@ var DefaultStreamObjectResult = class {
3566
5082
  object: object2,
3567
5083
  error,
3568
5084
  response: {
3569
- ...response,
3570
- headers: rawResponse == null ? void 0 : rawResponse.headers
5085
+ ...fullResponse,
5086
+ headers: response == null ? void 0 : response.headers
3571
5087
  },
3572
5088
  warnings,
3573
5089
  providerMetadata,
@@ -3739,9 +5255,6 @@ var ToolExecutionError = class extends AISDKError10 {
3739
5255
  };
3740
5256
  _a10 = symbol10;
3741
5257
 
3742
- // core/prompt/prepare-tools-and-tool-choice.ts
3743
- import { asSchema as asSchema2 } from "@ai-sdk/ui-utils";
3744
-
3745
5258
  // core/util/is-non-empty-object.ts
3746
5259
  function isNonEmptyObject(object2) {
3747
5260
  return object2 != null && Object.keys(object2).length > 0;
@@ -3772,7 +5285,7 @@ function prepareToolsAndToolChoice({
3772
5285
  type: "function",
3773
5286
  name: name17,
3774
5287
  description: tool2.description,
3775
- parameters: asSchema2(tool2.parameters).jsonSchema
5288
+ parameters: asSchema(tool2.parameters).jsonSchema
3776
5289
  };
3777
5290
  case "provider-defined":
3778
5291
  return {
@@ -3805,8 +5318,7 @@ function removeTextAfterLastWhitespace(text2) {
3805
5318
  }
3806
5319
 
3807
5320
  // core/generate-text/parse-tool-call.ts
3808
- import { safeParseJSON as safeParseJSON2, safeValidateTypes as safeValidateTypes3 } from "@ai-sdk/provider-utils";
3809
- import { asSchema as asSchema3 } from "@ai-sdk/ui-utils";
5321
+ import { safeParseJSON as safeParseJSON3, safeValidateTypes as safeValidateTypes3 } from "@ai-sdk/provider-utils";
3810
5322
 
3811
5323
  // errors/invalid-tool-arguments-error.ts
3812
5324
  import { AISDKError as AISDKError11, getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
@@ -3901,7 +5413,7 @@ async function parseToolCall({
3901
5413
  repairedToolCall = await repairToolCall({
3902
5414
  toolCall,
3903
5415
  tools,
3904
- parameterSchema: ({ toolName }) => asSchema3(tools[toolName].parameters).jsonSchema,
5416
+ parameterSchema: ({ toolName }) => asSchema(tools[toolName].parameters).jsonSchema,
3905
5417
  system,
3906
5418
  messages,
3907
5419
  error
@@ -3930,8 +5442,8 @@ async function doParseToolCall({
3930
5442
  availableTools: Object.keys(tools)
3931
5443
  });
3932
5444
  }
3933
- const schema = asSchema3(tool2.parameters);
3934
- const parseResult = toolCall.args.trim() === "" ? safeValidateTypes3({ value: {}, schema }) : safeParseJSON2({ text: toolCall.args, schema });
5445
+ const schema = asSchema(tool2.parameters);
5446
+ const parseResult = toolCall.args.trim() === "" ? safeValidateTypes3({ value: {}, schema }) : safeParseJSON3({ text: toolCall.args, schema });
3935
5447
  if (parseResult.success === false) {
3936
5448
  throw new InvalidToolArgumentsError({
3937
5449
  toolName,
@@ -3975,7 +5487,7 @@ function toResponseMessages({
3975
5487
  ...files.map((file) => ({
3976
5488
  type: "file",
3977
5489
  data: file.base64,
3978
- mimeType: file.mimeType
5490
+ mediaType: file.mediaType
3979
5491
  })),
3980
5492
  { type: "text", text: text2 },
3981
5493
  ...toolCalls
@@ -4086,7 +5598,7 @@ async function generateText({
4086
5598
  }),
4087
5599
  tracer,
4088
5600
  fn: async (span) => {
4089
- var _a18, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
5601
+ var _a18, _b, _c, _d, _e, _f, _g;
4090
5602
  const toolsAndToolChoice = {
4091
5603
  ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
4092
5604
  };
@@ -4161,7 +5673,7 @@ async function generateText({
4161
5673
  }),
4162
5674
  tracer,
4163
5675
  fn: async (span2) => {
4164
- var _a19, _b2, _c2, _d2, _e2, _f2;
5676
+ var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h;
4165
5677
  const result = await model.doGenerate({
4166
5678
  ...callSettings,
4167
5679
  ...toolsAndToolChoice,
@@ -4175,7 +5687,9 @@ async function generateText({
4175
5687
  const responseData = {
4176
5688
  id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
4177
5689
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
4178
- modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
5690
+ modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId,
5691
+ headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
5692
+ body: (_h = result.response) == null ? void 0 : _h.body
4179
5693
  };
4180
5694
  span2.setAttributes(
4181
5695
  selectTelemetryAttributes({
@@ -4225,7 +5739,7 @@ async function generateText({
4225
5739
  messages: stepInputMessages,
4226
5740
  abortSignal
4227
5741
  });
4228
- const currentUsage = calculateLanguageModelUsage(
5742
+ const currentUsage = calculateLanguageModelUsage2(
4229
5743
  currentModelResponse.usage
4230
5744
  );
4231
5745
  usage = addLanguageModelUsage(usage, currentUsage);
@@ -4292,8 +5806,6 @@ async function generateText({
4292
5806
  request: (_f = currentModelResponse.request) != null ? _f : {},
4293
5807
  response: {
4294
5808
  ...currentModelResponse.response,
4295
- headers: (_g = currentModelResponse.rawResponse) == null ? void 0 : _g.headers,
4296
- body: (_h = currentModelResponse.rawResponse) == null ? void 0 : _h.body,
4297
5809
  // deep clone msgs to avoid mutating past messages in multi-step:
4298
5810
  messages: structuredClone(responseMessages)
4299
5811
  },
@@ -4345,11 +5857,9 @@ async function generateText({
4345
5857
  finishReason: currentModelResponse.finishReason,
4346
5858
  usage,
4347
5859
  warnings: currentModelResponse.warnings,
4348
- request: (_i = currentModelResponse.request) != null ? _i : {},
5860
+ request: (_g = currentModelResponse.request) != null ? _g : {},
4349
5861
  response: {
4350
5862
  ...currentModelResponse.response,
4351
- headers: (_j = currentModelResponse.rawResponse) == null ? void 0 : _j.headers,
4352
- body: (_k = currentModelResponse.rawResponse) == null ? void 0 : _k.body,
4353
5863
  messages: responseMessages
4354
5864
  },
4355
5865
  logprobs: currentModelResponse.logprobs,
@@ -4478,11 +5988,7 @@ __export(output_exports, {
4478
5988
  object: () => object,
4479
5989
  text: () => text
4480
5990
  });
4481
- import { safeParseJSON as safeParseJSON3, safeValidateTypes as safeValidateTypes4 } from "@ai-sdk/provider-utils";
4482
- import {
4483
- asSchema as asSchema4,
4484
- parsePartialJson as parsePartialJson2
4485
- } from "@ai-sdk/ui-utils";
5991
+ import { safeParseJSON as safeParseJSON4, safeValidateTypes as safeValidateTypes4 } from "@ai-sdk/provider-utils";
4486
5992
 
4487
5993
  // errors/index.ts
4488
5994
  import {
@@ -4558,7 +6064,7 @@ var text = () => ({
4558
6064
  var object = ({
4559
6065
  schema: inputSchema
4560
6066
  }) => {
4561
- const schema = asSchema4(inputSchema);
6067
+ const schema = asSchema(inputSchema);
4562
6068
  return {
4563
6069
  type: "object",
4564
6070
  responseFormat: ({ model }) => ({
@@ -4572,7 +6078,7 @@ var object = ({
4572
6078
  });
4573
6079
  },
4574
6080
  parsePartial({ text: text2 }) {
4575
- const result = parsePartialJson2(text2);
6081
+ const result = parsePartialJson(text2);
4576
6082
  switch (result.state) {
4577
6083
  case "failed-parse":
4578
6084
  case "undefined-input":
@@ -4590,7 +6096,7 @@ var object = ({
4590
6096
  }
4591
6097
  },
4592
6098
  parseOutput({ text: text2 }, context) {
4593
- const parseResult = safeParseJSON3({ text: text2 });
6099
+ const parseResult = safeParseJSON4({ text: text2 });
4594
6100
  if (!parseResult.success) {
4595
6101
  throw new NoObjectGeneratedError({
4596
6102
  message: "No object generated: could not parse the response.",
@@ -4692,7 +6198,6 @@ function smoothStream({
4692
6198
  // core/generate-text/stream-text.ts
4693
6199
  import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
4694
6200
  import { createIdGenerator as createIdGenerator4 } from "@ai-sdk/provider-utils";
4695
- import { formatDataStreamPart as formatDataStreamPart2 } from "@ai-sdk/ui-utils";
4696
6201
 
4697
6202
  // util/as-array.ts
4698
6203
  function asArray(value) {
@@ -4807,7 +6312,6 @@ function mergeStreams(stream1, stream2) {
4807
6312
  }
4808
6313
 
4809
6314
  // core/generate-text/run-tools-transformation.ts
4810
- import { generateId } from "@ai-sdk/ui-utils";
4811
6315
  function runToolsTransformation({
4812
6316
  tools,
4813
6317
  generatorStream,
@@ -4855,7 +6359,7 @@ function runToolsTransformation({
4855
6359
  controller.enqueue(
4856
6360
  new DefaultGeneratedFileWithType({
4857
6361
  data: chunk.data,
4858
- mimeType: chunk.mimeType
6362
+ mediaType: chunk.mediaType
4859
6363
  })
4860
6364
  );
4861
6365
  break;
@@ -4966,7 +6470,7 @@ function runToolsTransformation({
4966
6470
  type: "finish",
4967
6471
  finishReason: chunk.finishReason,
4968
6472
  logprobs: chunk.logprobs,
4969
- usage: calculateLanguageModelUsage(chunk.usage),
6473
+ usage: calculateLanguageModelUsage2(chunk.usage),
4970
6474
  experimental_providerMetadata: chunk.providerMetadata
4971
6475
  };
4972
6476
  break;
@@ -5477,7 +6981,7 @@ var DefaultStreamTextResult = class {
5477
6981
  ...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
5478
6982
  };
5479
6983
  const {
5480
- result: { stream: stream2, warnings, rawResponse, request },
6984
+ result: { stream: stream2, warnings, response, request },
5481
6985
  doStreamSpan,
5482
6986
  startTimestampMs
5483
6987
  } = await retry(
@@ -5788,7 +7292,7 @@ var DefaultStreamTextResult = class {
5788
7292
  request: stepRequest,
5789
7293
  response: {
5790
7294
  ...stepResponse,
5791
- headers: rawResponse == null ? void 0 : rawResponse.headers
7295
+ headers: response == null ? void 0 : response.headers
5792
7296
  },
5793
7297
  warnings,
5794
7298
  isContinued: nextStepType === "continue",
@@ -5805,7 +7309,7 @@ var DefaultStreamTextResult = class {
5805
7309
  logprobs: stepLogProbs,
5806
7310
  response: {
5807
7311
  ...stepResponse,
5808
- headers: rawResponse == null ? void 0 : rawResponse.headers
7312
+ headers: response == null ? void 0 : response.headers
5809
7313
  }
5810
7314
  });
5811
7315
  self.closeStream();
@@ -6001,13 +7505,13 @@ var DefaultStreamTextResult = class {
6001
7505
  const chunkType = chunk.type;
6002
7506
  switch (chunkType) {
6003
7507
  case "text-delta": {
6004
- controller.enqueue(formatDataStreamPart2("text", chunk.textDelta));
7508
+ controller.enqueue(formatDataStreamPart("text", chunk.textDelta));
6005
7509
  break;
6006
7510
  }
6007
7511
  case "reasoning": {
6008
7512
  if (sendReasoning) {
6009
7513
  controller.enqueue(
6010
- formatDataStreamPart2("reasoning", chunk.textDelta)
7514
+ formatDataStreamPart("reasoning", chunk.textDelta)
6011
7515
  );
6012
7516
  }
6013
7517
  break;
@@ -6015,7 +7519,7 @@ var DefaultStreamTextResult = class {
6015
7519
  case "redacted-reasoning": {
6016
7520
  if (sendReasoning) {
6017
7521
  controller.enqueue(
6018
- formatDataStreamPart2("redacted_reasoning", {
7522
+ formatDataStreamPart("redacted_reasoning", {
6019
7523
  data: chunk.data
6020
7524
  })
6021
7525
  );
@@ -6025,7 +7529,7 @@ var DefaultStreamTextResult = class {
6025
7529
  case "reasoning-signature": {
6026
7530
  if (sendReasoning) {
6027
7531
  controller.enqueue(
6028
- formatDataStreamPart2("reasoning_signature", {
7532
+ formatDataStreamPart("reasoning_signature", {
6029
7533
  signature: chunk.signature
6030
7534
  })
6031
7535
  );
@@ -6034,8 +7538,8 @@ var DefaultStreamTextResult = class {
6034
7538
  }
6035
7539
  case "file": {
6036
7540
  controller.enqueue(
6037
- formatDataStreamPart2("file", {
6038
- mimeType: chunk.mimeType,
7541
+ formatDataStreamPart("file", {
7542
+ mimeType: chunk.mediaType,
6039
7543
  data: chunk.base64
6040
7544
  })
6041
7545
  );
@@ -6044,14 +7548,14 @@ var DefaultStreamTextResult = class {
6044
7548
  case "source": {
6045
7549
  if (sendSources) {
6046
7550
  controller.enqueue(
6047
- formatDataStreamPart2("source", chunk.source)
7551
+ formatDataStreamPart("source", chunk.source)
6048
7552
  );
6049
7553
  }
6050
7554
  break;
6051
7555
  }
6052
7556
  case "tool-call-streaming-start": {
6053
7557
  controller.enqueue(
6054
- formatDataStreamPart2("tool_call_streaming_start", {
7558
+ formatDataStreamPart("tool_call_streaming_start", {
6055
7559
  toolCallId: chunk.toolCallId,
6056
7560
  toolName: chunk.toolName
6057
7561
  })
@@ -6060,7 +7564,7 @@ var DefaultStreamTextResult = class {
6060
7564
  }
6061
7565
  case "tool-call-delta": {
6062
7566
  controller.enqueue(
6063
- formatDataStreamPart2("tool_call_delta", {
7567
+ formatDataStreamPart("tool_call_delta", {
6064
7568
  toolCallId: chunk.toolCallId,
6065
7569
  argsTextDelta: chunk.argsTextDelta
6066
7570
  })
@@ -6069,7 +7573,7 @@ var DefaultStreamTextResult = class {
6069
7573
  }
6070
7574
  case "tool-call": {
6071
7575
  controller.enqueue(
6072
- formatDataStreamPart2("tool_call", {
7576
+ formatDataStreamPart("tool_call", {
6073
7577
  toolCallId: chunk.toolCallId,
6074
7578
  toolName: chunk.toolName,
6075
7579
  args: chunk.args
@@ -6079,7 +7583,7 @@ var DefaultStreamTextResult = class {
6079
7583
  }
6080
7584
  case "tool-result": {
6081
7585
  controller.enqueue(
6082
- formatDataStreamPart2("tool_result", {
7586
+ formatDataStreamPart("tool_result", {
6083
7587
  toolCallId: chunk.toolCallId,
6084
7588
  result: chunk.result
6085
7589
  })
@@ -6088,13 +7592,13 @@ var DefaultStreamTextResult = class {
6088
7592
  }
6089
7593
  case "error": {
6090
7594
  controller.enqueue(
6091
- formatDataStreamPart2("error", getErrorMessage5(chunk.error))
7595
+ formatDataStreamPart("error", getErrorMessage5(chunk.error))
6092
7596
  );
6093
7597
  break;
6094
7598
  }
6095
7599
  case "step-start": {
6096
7600
  controller.enqueue(
6097
- formatDataStreamPart2("start_step", {
7601
+ formatDataStreamPart("start_step", {
6098
7602
  messageId: chunk.messageId
6099
7603
  })
6100
7604
  );
@@ -6102,7 +7606,7 @@ var DefaultStreamTextResult = class {
6102
7606
  }
6103
7607
  case "step-finish": {
6104
7608
  controller.enqueue(
6105
- formatDataStreamPart2("finish_step", {
7609
+ formatDataStreamPart("finish_step", {
6106
7610
  finishReason: chunk.finishReason,
6107
7611
  usage: sendUsage ? {
6108
7612
  promptTokens: chunk.usage.promptTokens,
@@ -6116,7 +7620,7 @@ var DefaultStreamTextResult = class {
6116
7620
  case "finish": {
6117
7621
  if (experimental_sendFinish) {
6118
7622
  controller.enqueue(
6119
- formatDataStreamPart2("finish_message", {
7623
+ formatDataStreamPart("finish_message", {
6120
7624
  finishReason: chunk.finishReason,
6121
7625
  usage: sendUsage ? {
6122
7626
  promptTokens: chunk.usage.promptTokens,
@@ -6239,6 +7743,70 @@ var DefaultStreamTextResult = class {
6239
7743
  }
6240
7744
  };
6241
7745
 
7746
+ // errors/no-transcript-generated-error.ts
7747
+ import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
7748
+ var NoTranscriptGeneratedError = class extends AISDKError18 {
7749
+ constructor(options) {
7750
+ super({
7751
+ name: "AI_NoTranscriptGeneratedError",
7752
+ message: "No transcript generated."
7753
+ });
7754
+ this.responses = options.responses;
7755
+ }
7756
+ };
7757
+
7758
+ // core/transcribe/transcribe.ts
7759
+ async function transcribe({
7760
+ model,
7761
+ audio,
7762
+ providerOptions = {},
7763
+ maxRetries: maxRetriesArg,
7764
+ abortSignal,
7765
+ headers
7766
+ }) {
7767
+ const { retry } = prepareRetries({ maxRetries: maxRetriesArg });
7768
+ const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
7769
+ const result = await retry(
7770
+ () => {
7771
+ var _a17;
7772
+ return model.doGenerate({
7773
+ audio: audioData,
7774
+ abortSignal,
7775
+ headers,
7776
+ providerOptions,
7777
+ mediaType: (_a17 = detectMediaType({
7778
+ data: audioData,
7779
+ signatures: audioMediaTypeSignatures
7780
+ })) != null ? _a17 : "audio/wav"
7781
+ });
7782
+ }
7783
+ );
7784
+ if (!result.text) {
7785
+ throw new NoTranscriptGeneratedError({ responses: [result.response] });
7786
+ }
7787
+ return new DefaultTranscriptionResult({
7788
+ text: result.text,
7789
+ segments: result.segments,
7790
+ language: result.language,
7791
+ durationInSeconds: result.durationInSeconds,
7792
+ warnings: result.warnings,
7793
+ responses: [result.response],
7794
+ providerMetadata: result.providerMetadata
7795
+ });
7796
+ }
7797
+ var DefaultTranscriptionResult = class {
7798
+ constructor(options) {
7799
+ var _a17;
7800
+ this.text = options.text;
7801
+ this.segments = options.segments;
7802
+ this.language = options.language;
7803
+ this.durationInSeconds = options.durationInSeconds;
7804
+ this.warnings = options.warnings;
7805
+ this.responses = options.responses;
7806
+ this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
7807
+ }
7808
+ };
7809
+
6242
7810
  // core/util/merge-objects.ts
6243
7811
  function mergeObjects(target, source) {
6244
7812
  if (target === void 0 && source === void 0) {
@@ -6481,7 +8049,7 @@ function simulateStreamingMiddleware() {
6481
8049
  return {
6482
8050
  stream: simulatedStream,
6483
8051
  rawCall: result.rawCall,
6484
- rawResponse: result.rawResponse,
8052
+ rawResponse: result.response,
6485
8053
  warnings: result.warnings
6486
8054
  };
6487
8055
  }
@@ -6553,16 +8121,13 @@ function appendClientMessage({
6553
8121
  }
6554
8122
 
6555
8123
  // core/prompt/append-response-messages.ts
6556
- import {
6557
- extractMaxToolInvocationStep
6558
- } from "@ai-sdk/ui-utils";
6559
- import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
8124
+ import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
6560
8125
  function appendResponseMessages({
6561
8126
  messages,
6562
8127
  responseMessages,
6563
8128
  _internal: { currentDate = () => /* @__PURE__ */ new Date() } = {}
6564
8129
  }) {
6565
- var _a17, _b, _c, _d;
8130
+ var _a17, _b, _c, _d, _e;
6566
8131
  const clonedMessages = structuredClone(messages);
6567
8132
  for (const message of responseMessages) {
6568
8133
  const role = message.role;
@@ -6639,14 +8204,14 @@ function appendResponseMessages({
6639
8204
  break;
6640
8205
  case "file":
6641
8206
  if (part.data instanceof URL) {
6642
- throw new AISDKError18({
8207
+ throw new AISDKError19({
6643
8208
  name: "InvalidAssistantFileData",
6644
8209
  message: "File data cannot be a URL"
6645
8210
  });
6646
8211
  }
6647
8212
  parts.push({
6648
8213
  type: "file",
6649
- mimeType: part.mimeType,
8214
+ mediaType: (_a17 = part.mediaType) != null ? _a17 : part.mimeType,
6650
8215
  data: convertDataContentToBase64String(part.data)
6651
8216
  });
6652
8217
  break;
@@ -6657,12 +8222,12 @@ function appendResponseMessages({
6657
8222
  const maxStep = extractMaxToolInvocationStep(
6658
8223
  lastMessage.toolInvocations
6659
8224
  );
6660
- (_a17 = lastMessage.parts) != null ? _a17 : lastMessage.parts = [];
8225
+ (_b = lastMessage.parts) != null ? _b : lastMessage.parts = [];
6661
8226
  lastMessage.content = textContent;
6662
8227
  lastMessage.reasoning = reasoningTextContent;
6663
8228
  lastMessage.parts.push(...parts);
6664
8229
  lastMessage.toolInvocations = [
6665
- ...(_b = lastMessage.toolInvocations) != null ? _b : [],
8230
+ ...(_c = lastMessage.toolInvocations) != null ? _c : [],
6666
8231
  ...getToolInvocations2(maxStep === void 0 ? 0 : maxStep + 1)
6667
8232
  ];
6668
8233
  getToolInvocations2(maxStep === void 0 ? 0 : maxStep + 1).map((call) => ({
@@ -6692,13 +8257,13 @@ function appendResponseMessages({
6692
8257
  break;
6693
8258
  }
6694
8259
  case "tool": {
6695
- (_c = lastMessage.toolInvocations) != null ? _c : lastMessage.toolInvocations = [];
8260
+ (_d = lastMessage.toolInvocations) != null ? _d : lastMessage.toolInvocations = [];
6696
8261
  if (lastMessage.role !== "assistant") {
6697
8262
  throw new Error(
6698
8263
  `Tool result must follow an assistant message: ${lastMessage.role}`
6699
8264
  );
6700
8265
  }
6701
- (_d = lastMessage.parts) != null ? _d : lastMessage.parts = [];
8266
+ (_e = lastMessage.parts) != null ? _e : lastMessage.parts = [];
6702
8267
  for (const contentPart of message.content) {
6703
8268
  const toolCall = lastMessage.toolInvocations.find(
6704
8269
  (call) => call.toolCallId === contentPart.toolCallId
@@ -6773,7 +8338,7 @@ function customProvider({
6773
8338
  var experimental_customProvider = customProvider;
6774
8339
 
6775
8340
  // core/registry/no-such-provider-error.ts
6776
- import { AISDKError as AISDKError19, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
8341
+ import { AISDKError as AISDKError20, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
6777
8342
  var name16 = "AI_NoSuchProviderError";
6778
8343
  var marker16 = `vercel.ai.error.${name16}`;
6779
8344
  var symbol16 = Symbol.for(marker16);
@@ -6792,7 +8357,7 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
6792
8357
  this.availableProviders = availableProviders;
6793
8358
  }
6794
8359
  static isInstance(error) {
6795
- return AISDKError19.hasMarker(error, marker16);
8360
+ return AISDKError20.hasMarker(error, marker16);
6796
8361
  }
6797
8362
  };
6798
8363
  _a16 = symbol16;
@@ -6879,9 +8444,6 @@ var DefaultProviderRegistry = class {
6879
8444
  }
6880
8445
  };
6881
8446
 
6882
- // core/tool/mcp/mcp-client.ts
6883
- import { jsonSchema } from "@ai-sdk/ui-utils";
6884
-
6885
8447
  // core/tool/tool.ts
6886
8448
  function tool(tool2) {
6887
8449
  return tool2;
@@ -7495,7 +9057,6 @@ __export(langchain_adapter_exports, {
7495
9057
  toDataStream: () => toDataStream,
7496
9058
  toDataStreamResponse: () => toDataStreamResponse
7497
9059
  });
7498
- import { formatDataStreamPart as formatDataStreamPart4 } from "@ai-sdk/ui-utils";
7499
9060
 
7500
9061
  // streams/stream-callbacks.ts
7501
9062
  function createCallbacksTransformer(callbacks = {}) {
@@ -7551,7 +9112,7 @@ function toDataStreamInternal(stream, callbacks) {
7551
9112
  ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
7552
9113
  new TransformStream({
7553
9114
  transform: async (chunk, controller) => {
7554
- controller.enqueue(formatDataStreamPart4("text", chunk));
9115
+ controller.enqueue(formatDataStreamPart("text", chunk));
7555
9116
  }
7556
9117
  })
7557
9118
  );
@@ -7603,7 +9164,6 @@ __export(llamaindex_adapter_exports, {
7603
9164
  toDataStreamResponse: () => toDataStreamResponse2
7604
9165
  });
7605
9166
  import { convertAsyncIteratorToReadableStream } from "@ai-sdk/provider-utils";
7606
- import { formatDataStreamPart as formatDataStreamPart5 } from "@ai-sdk/ui-utils";
7607
9167
  function toDataStreamInternal2(stream, callbacks) {
7608
9168
  const trimStart = trimStartOfStream();
7609
9169
  return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]()).pipeThrough(
@@ -7615,7 +9175,7 @@ function toDataStreamInternal2(stream, callbacks) {
7615
9175
  ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
7616
9176
  new TransformStream({
7617
9177
  transform: async (chunk, controller) => {
7618
- controller.enqueue(formatDataStreamPart5("text", chunk));
9178
+ controller.enqueue(formatDataStreamPart("text", chunk));
7619
9179
  }
7620
9180
  })
7621
9181
  );
@@ -7656,9 +9216,6 @@ function trimStartOfStream() {
7656
9216
  };
7657
9217
  }
7658
9218
 
7659
- // streams/stream-data.ts
7660
- import { formatDataStreamPart as formatDataStreamPart6 } from "@ai-sdk/ui-utils";
7661
-
7662
9219
  // util/constants.ts
7663
9220
  var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
7664
9221
 
@@ -7709,7 +9266,7 @@ var StreamData = class {
7709
9266
  throw new Error("Stream controller is not initialized.");
7710
9267
  }
7711
9268
  this.controller.enqueue(
7712
- this.encoder.encode(formatDataStreamPart6("data", [value]))
9269
+ this.encoder.encode(formatDataStreamPart("data", [value]))
7713
9270
  );
7714
9271
  }
7715
9272
  appendMessageAnnotation(value) {
@@ -7720,7 +9277,7 @@ var StreamData = class {
7720
9277
  throw new Error("Stream controller is not initialized.");
7721
9278
  }
7722
9279
  this.controller.enqueue(
7723
- this.encoder.encode(formatDataStreamPart6("message_annotations", [value]))
9280
+ this.encoder.encode(formatDataStreamPart("message_annotations", [value]))
7724
9281
  );
7725
9282
  }
7726
9283
  };
@@ -7758,6 +9315,9 @@ export {
7758
9315
  UnsupportedFunctionalityError2 as UnsupportedFunctionalityError,
7759
9316
  appendClientMessage,
7760
9317
  appendResponseMessages,
9318
+ asSchema,
9319
+ callChatApi,
9320
+ callCompletionApi,
7761
9321
  convertToCoreMessages,
7762
9322
  coreAssistantMessageSchema,
7763
9323
  coreMessageSchema,
@@ -7777,23 +9337,34 @@ export {
7777
9337
  experimental_createProviderRegistry,
7778
9338
  experimental_customProvider,
7779
9339
  generateImage as experimental_generateImage,
9340
+ transcribe as experimental_transcribe,
7780
9341
  experimental_wrapLanguageModel,
9342
+ extractMaxToolInvocationStep,
7781
9343
  extractReasoningMiddleware,
7782
- formatDataStreamPart3 as formatDataStreamPart,
9344
+ fillMessageParts,
9345
+ formatDataStreamPart,
7783
9346
  generateId2 as generateId,
7784
9347
  generateObject,
7785
9348
  generateText,
7786
- jsonSchema2 as jsonSchema,
9349
+ getMessageParts,
9350
+ getTextFromDataUrl,
9351
+ isAssistantMessageWithCompletedToolCalls,
9352
+ isDeepEqualData,
9353
+ jsonSchema,
7787
9354
  parseDataStreamPart,
9355
+ parsePartialJson,
7788
9356
  pipeDataStreamToResponse,
9357
+ prepareAttachmentsForRequest,
7789
9358
  processDataStream,
7790
9359
  processTextStream,
9360
+ shouldResubmitMessages,
7791
9361
  simulateReadableStream,
7792
9362
  simulateStreamingMiddleware,
7793
9363
  smoothStream,
7794
9364
  streamObject,
7795
9365
  streamText,
7796
9366
  tool,
9367
+ updateToolCallResult,
7797
9368
  wrapLanguageModel,
7798
9369
  zodSchema
7799
9370
  };