ai 5.0.0-beta.2 → 5.0.0-beta.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/ai.js ADDED
@@ -0,0 +1,4664 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
7
+ var __export = (target, all) => {
8
+ for (var name9 in all)
9
+ __defProp(target, name9, { get: all[name9], enumerable: true });
10
+ };
11
+ var __copyProps = (to, from, except, desc) => {
12
+ if (from && typeof from === "object" || typeof from === "function") {
13
+ for (let key of __getOwnPropNames(from))
14
+ if (!__hasOwnProp.call(to, key) && key !== except)
15
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
16
+ }
17
+ return to;
18
+ };
19
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
20
+
21
+ // src/bin/ai.ts
22
+ var ai_exports = {};
23
+ __export(ai_exports, {
24
+ formatAttachedFiles: () => formatAttachedFiles,
25
+ getMediaType: () => getMediaType,
26
+ isStdinAvailable: () => isStdinAvailable,
27
+ parseArgs: () => parseArgs,
28
+ readFileContent: () => readFileContent,
29
+ resolveModel: () => resolveModel,
30
+ showHelp: () => showHelp,
31
+ showVersion: () => showVersion
32
+ });
33
+ module.exports = __toCommonJS(ai_exports);
34
+
35
+ // src/generate-text/stream-text.ts
36
+ var import_provider16 = require("@ai-sdk/provider");
37
+ var import_provider_utils12 = require("@ai-sdk/provider-utils");
38
+
39
+ // src/error/no-output-specified-error.ts
40
+ var import_provider = require("@ai-sdk/provider");
41
+ var name = "AI_NoOutputSpecifiedError";
42
+ var marker = `vercel.ai.error.${name}`;
43
+ var symbol = Symbol.for(marker);
44
+ var _a;
45
+ var NoOutputSpecifiedError = class extends import_provider.AISDKError {
46
+ // used in isInstance
47
+ constructor({ message = "No output specified." } = {}) {
48
+ super({ name, message });
49
+ this[_a] = true;
50
+ }
51
+ static isInstance(error) {
52
+ return import_provider.AISDKError.hasMarker(error, marker);
53
+ }
54
+ };
55
+ _a = symbol;
56
+
57
+ // src/util/prepare-headers.ts
58
+ function prepareHeaders(headers, defaultHeaders) {
59
+ const responseHeaders = new Headers(headers != null ? headers : {});
60
+ for (const [key, value] of Object.entries(defaultHeaders)) {
61
+ if (!responseHeaders.has(key)) {
62
+ responseHeaders.set(key, value);
63
+ }
64
+ }
65
+ return responseHeaders;
66
+ }
67
+
68
+ // src/text-stream/create-text-stream-response.ts
69
+ function createTextStreamResponse({
70
+ status,
71
+ statusText,
72
+ headers,
73
+ textStream
74
+ }) {
75
+ return new Response(textStream.pipeThrough(new TextEncoderStream()), {
76
+ status: status != null ? status : 200,
77
+ statusText,
78
+ headers: prepareHeaders(headers, {
79
+ "content-type": "text/plain; charset=utf-8"
80
+ })
81
+ });
82
+ }
83
+
84
+ // src/util/write-to-server-response.ts
85
+ function writeToServerResponse({
86
+ response,
87
+ status,
88
+ statusText,
89
+ headers,
90
+ stream
91
+ }) {
92
+ response.writeHead(status != null ? status : 200, statusText, headers);
93
+ const reader = stream.getReader();
94
+ const read = async () => {
95
+ try {
96
+ while (true) {
97
+ const { done, value } = await reader.read();
98
+ if (done)
99
+ break;
100
+ response.write(value);
101
+ }
102
+ } catch (error) {
103
+ throw error;
104
+ } finally {
105
+ response.end();
106
+ }
107
+ };
108
+ read();
109
+ }
110
+
111
+ // src/text-stream/pipe-text-stream-to-response.ts
112
+ function pipeTextStreamToResponse({
113
+ response,
114
+ status,
115
+ statusText,
116
+ headers,
117
+ textStream
118
+ }) {
119
+ writeToServerResponse({
120
+ response,
121
+ status,
122
+ statusText,
123
+ headers: Object.fromEntries(
124
+ prepareHeaders(headers, {
125
+ "content-type": "text/plain; charset=utf-8"
126
+ }).entries()
127
+ ),
128
+ stream: textStream.pipeThrough(new TextEncoderStream())
129
+ });
130
+ }
131
+
132
+ // src/ui-message-stream/json-to-sse-transform-stream.ts
133
+ var JsonToSseTransformStream = class extends TransformStream {
134
+ constructor() {
135
+ super({
136
+ transform(part, controller) {
137
+ controller.enqueue(`data: ${JSON.stringify(part)}
138
+
139
+ `);
140
+ },
141
+ flush(controller) {
142
+ controller.enqueue("data: [DONE]\n\n");
143
+ }
144
+ });
145
+ }
146
+ };
147
+
148
+ // src/ui-message-stream/ui-message-stream-headers.ts
149
+ var UI_MESSAGE_STREAM_HEADERS = {
150
+ "content-type": "text/event-stream",
151
+ "cache-control": "no-cache",
152
+ connection: "keep-alive",
153
+ "x-vercel-ai-ui-message-stream": "v1",
154
+ "x-accel-buffering": "no"
155
+ // disable nginx buffering
156
+ };
157
+
158
+ // src/ui-message-stream/create-ui-message-stream-response.ts
159
+ function createUIMessageStreamResponse({
160
+ status,
161
+ statusText,
162
+ headers,
163
+ stream,
164
+ consumeSseStream
165
+ }) {
166
+ let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
167
+ if (consumeSseStream) {
168
+ const [stream1, stream2] = sseStream.tee();
169
+ sseStream = stream1;
170
+ consumeSseStream({ stream: stream2 });
171
+ }
172
+ return new Response(sseStream.pipeThrough(new TextEncoderStream()), {
173
+ status,
174
+ statusText,
175
+ headers: prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS)
176
+ });
177
+ }
178
+
179
+ // src/ui-message-stream/get-response-ui-message-id.ts
180
+ function getResponseUIMessageId({
181
+ originalMessages,
182
+ responseMessageId
183
+ }) {
184
+ if (originalMessages == null) {
185
+ return void 0;
186
+ }
187
+ const lastMessage = originalMessages[originalMessages.length - 1];
188
+ return (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage.id : typeof responseMessageId === "function" ? responseMessageId() : responseMessageId;
189
+ }
190
+
191
+ // src/ui/process-ui-message-stream.ts
192
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
193
+
194
+ // src/ui-message-stream/ui-message-chunks.ts
195
+ var import_v4 = require("zod/v4");
196
+ var uiMessageChunkSchema = import_v4.z.union([
197
+ import_v4.z.strictObject({
198
+ type: import_v4.z.literal("text-start"),
199
+ id: import_v4.z.string()
200
+ }),
201
+ import_v4.z.strictObject({
202
+ type: import_v4.z.literal("text-delta"),
203
+ id: import_v4.z.string(),
204
+ delta: import_v4.z.string()
205
+ }),
206
+ import_v4.z.strictObject({
207
+ type: import_v4.z.literal("text-end"),
208
+ id: import_v4.z.string()
209
+ }),
210
+ import_v4.z.strictObject({
211
+ type: import_v4.z.literal("error"),
212
+ errorText: import_v4.z.string()
213
+ }),
214
+ import_v4.z.strictObject({
215
+ type: import_v4.z.literal("tool-input-start"),
216
+ toolCallId: import_v4.z.string(),
217
+ toolName: import_v4.z.string(),
218
+ providerExecuted: import_v4.z.boolean().optional()
219
+ }),
220
+ import_v4.z.strictObject({
221
+ type: import_v4.z.literal("tool-input-delta"),
222
+ toolCallId: import_v4.z.string(),
223
+ inputTextDelta: import_v4.z.string()
224
+ }),
225
+ import_v4.z.strictObject({
226
+ type: import_v4.z.literal("tool-input-available"),
227
+ toolCallId: import_v4.z.string(),
228
+ toolName: import_v4.z.string(),
229
+ input: import_v4.z.unknown(),
230
+ providerExecuted: import_v4.z.boolean().optional()
231
+ }),
232
+ import_v4.z.strictObject({
233
+ type: import_v4.z.literal("tool-output-available"),
234
+ toolCallId: import_v4.z.string(),
235
+ output: import_v4.z.unknown(),
236
+ providerExecuted: import_v4.z.boolean().optional()
237
+ }),
238
+ import_v4.z.strictObject({
239
+ type: import_v4.z.literal("tool-output-error"),
240
+ toolCallId: import_v4.z.string(),
241
+ errorText: import_v4.z.string(),
242
+ providerExecuted: import_v4.z.boolean().optional()
243
+ }),
244
+ import_v4.z.strictObject({
245
+ type: import_v4.z.literal("reasoning"),
246
+ text: import_v4.z.string(),
247
+ providerMetadata: import_v4.z.record(import_v4.z.string(), import_v4.z.any()).optional()
248
+ }),
249
+ import_v4.z.strictObject({
250
+ type: import_v4.z.literal("reasoning-start"),
251
+ id: import_v4.z.string(),
252
+ providerMetadata: import_v4.z.record(import_v4.z.string(), import_v4.z.any()).optional()
253
+ }),
254
+ import_v4.z.strictObject({
255
+ type: import_v4.z.literal("reasoning-delta"),
256
+ id: import_v4.z.string(),
257
+ delta: import_v4.z.string(),
258
+ providerMetadata: import_v4.z.record(import_v4.z.string(), import_v4.z.any()).optional()
259
+ }),
260
+ import_v4.z.strictObject({
261
+ type: import_v4.z.literal("reasoning-end"),
262
+ id: import_v4.z.string(),
263
+ providerMetadata: import_v4.z.record(import_v4.z.string(), import_v4.z.any()).optional()
264
+ }),
265
+ import_v4.z.strictObject({
266
+ type: import_v4.z.literal("reasoning-part-finish")
267
+ }),
268
+ import_v4.z.strictObject({
269
+ type: import_v4.z.literal("source-url"),
270
+ sourceId: import_v4.z.string(),
271
+ url: import_v4.z.string(),
272
+ title: import_v4.z.string().optional(),
273
+ providerMetadata: import_v4.z.any().optional()
274
+ // Use z.any() for generic metadata
275
+ }),
276
+ import_v4.z.strictObject({
277
+ type: import_v4.z.literal("source-document"),
278
+ sourceId: import_v4.z.string(),
279
+ mediaType: import_v4.z.string(),
280
+ title: import_v4.z.string(),
281
+ filename: import_v4.z.string().optional(),
282
+ providerMetadata: import_v4.z.any().optional()
283
+ // Use z.any() for generic metadata
284
+ }),
285
+ import_v4.z.strictObject({
286
+ type: import_v4.z.literal("file"),
287
+ url: import_v4.z.string(),
288
+ mediaType: import_v4.z.string()
289
+ }),
290
+ import_v4.z.strictObject({
291
+ type: import_v4.z.string().startsWith("data-"),
292
+ id: import_v4.z.string().optional(),
293
+ data: import_v4.z.unknown(),
294
+ transient: import_v4.z.boolean().optional()
295
+ }),
296
+ import_v4.z.strictObject({
297
+ type: import_v4.z.literal("start-step")
298
+ }),
299
+ import_v4.z.strictObject({
300
+ type: import_v4.z.literal("finish-step")
301
+ }),
302
+ import_v4.z.strictObject({
303
+ type: import_v4.z.literal("start"),
304
+ messageId: import_v4.z.string().optional(),
305
+ messageMetadata: import_v4.z.unknown().optional()
306
+ }),
307
+ import_v4.z.strictObject({
308
+ type: import_v4.z.literal("finish"),
309
+ messageMetadata: import_v4.z.unknown().optional()
310
+ }),
311
+ import_v4.z.strictObject({
312
+ type: import_v4.z.literal("message-metadata"),
313
+ messageMetadata: import_v4.z.unknown()
314
+ })
315
+ ]);
316
+ function isDataUIMessageChunk(chunk) {
317
+ return chunk.type.startsWith("data-");
318
+ }
319
+
320
+ // src/util/merge-objects.ts
321
+ function mergeObjects(base, overrides) {
322
+ if (base === void 0 && overrides === void 0) {
323
+ return void 0;
324
+ }
325
+ if (base === void 0) {
326
+ return overrides;
327
+ }
328
+ if (overrides === void 0) {
329
+ return base;
330
+ }
331
+ const result = { ...base };
332
+ for (const key in overrides) {
333
+ if (Object.prototype.hasOwnProperty.call(overrides, key)) {
334
+ const overridesValue = overrides[key];
335
+ if (overridesValue === void 0)
336
+ continue;
337
+ const baseValue = key in base ? base[key] : void 0;
338
+ const isSourceObject = overridesValue !== null && typeof overridesValue === "object" && !Array.isArray(overridesValue) && !(overridesValue instanceof Date) && !(overridesValue instanceof RegExp);
339
+ const isTargetObject = baseValue !== null && baseValue !== void 0 && typeof baseValue === "object" && !Array.isArray(baseValue) && !(baseValue instanceof Date) && !(baseValue instanceof RegExp);
340
+ if (isSourceObject && isTargetObject) {
341
+ result[key] = mergeObjects(
342
+ baseValue,
343
+ overridesValue
344
+ );
345
+ } else {
346
+ result[key] = overridesValue;
347
+ }
348
+ }
349
+ }
350
+ return result;
351
+ }
352
+
353
+ // src/util/parse-partial-json.ts
354
+ var import_provider_utils = require("@ai-sdk/provider-utils");
355
+
356
+ // src/util/fix-json.ts
357
+ function fixJson(input) {
358
+ const stack = ["ROOT"];
359
+ let lastValidIndex = -1;
360
+ let literalStart = null;
361
+ function processValueStart(char, i, swapState) {
362
+ {
363
+ switch (char) {
364
+ case '"': {
365
+ lastValidIndex = i;
366
+ stack.pop();
367
+ stack.push(swapState);
368
+ stack.push("INSIDE_STRING");
369
+ break;
370
+ }
371
+ case "f":
372
+ case "t":
373
+ case "n": {
374
+ lastValidIndex = i;
375
+ literalStart = i;
376
+ stack.pop();
377
+ stack.push(swapState);
378
+ stack.push("INSIDE_LITERAL");
379
+ break;
380
+ }
381
+ case "-": {
382
+ stack.pop();
383
+ stack.push(swapState);
384
+ stack.push("INSIDE_NUMBER");
385
+ break;
386
+ }
387
+ case "0":
388
+ case "1":
389
+ case "2":
390
+ case "3":
391
+ case "4":
392
+ case "5":
393
+ case "6":
394
+ case "7":
395
+ case "8":
396
+ case "9": {
397
+ lastValidIndex = i;
398
+ stack.pop();
399
+ stack.push(swapState);
400
+ stack.push("INSIDE_NUMBER");
401
+ break;
402
+ }
403
+ case "{": {
404
+ lastValidIndex = i;
405
+ stack.pop();
406
+ stack.push(swapState);
407
+ stack.push("INSIDE_OBJECT_START");
408
+ break;
409
+ }
410
+ case "[": {
411
+ lastValidIndex = i;
412
+ stack.pop();
413
+ stack.push(swapState);
414
+ stack.push("INSIDE_ARRAY_START");
415
+ break;
416
+ }
417
+ }
418
+ }
419
+ }
420
+ function processAfterObjectValue(char, i) {
421
+ switch (char) {
422
+ case ",": {
423
+ stack.pop();
424
+ stack.push("INSIDE_OBJECT_AFTER_COMMA");
425
+ break;
426
+ }
427
+ case "}": {
428
+ lastValidIndex = i;
429
+ stack.pop();
430
+ break;
431
+ }
432
+ }
433
+ }
434
+ function processAfterArrayValue(char, i) {
435
+ switch (char) {
436
+ case ",": {
437
+ stack.pop();
438
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
439
+ break;
440
+ }
441
+ case "]": {
442
+ lastValidIndex = i;
443
+ stack.pop();
444
+ break;
445
+ }
446
+ }
447
+ }
448
+ for (let i = 0; i < input.length; i++) {
449
+ const char = input[i];
450
+ const currentState = stack[stack.length - 1];
451
+ switch (currentState) {
452
+ case "ROOT":
453
+ processValueStart(char, i, "FINISH");
454
+ break;
455
+ case "INSIDE_OBJECT_START": {
456
+ switch (char) {
457
+ case '"': {
458
+ stack.pop();
459
+ stack.push("INSIDE_OBJECT_KEY");
460
+ break;
461
+ }
462
+ case "}": {
463
+ lastValidIndex = i;
464
+ stack.pop();
465
+ break;
466
+ }
467
+ }
468
+ break;
469
+ }
470
+ case "INSIDE_OBJECT_AFTER_COMMA": {
471
+ switch (char) {
472
+ case '"': {
473
+ stack.pop();
474
+ stack.push("INSIDE_OBJECT_KEY");
475
+ break;
476
+ }
477
+ }
478
+ break;
479
+ }
480
+ case "INSIDE_OBJECT_KEY": {
481
+ switch (char) {
482
+ case '"': {
483
+ stack.pop();
484
+ stack.push("INSIDE_OBJECT_AFTER_KEY");
485
+ break;
486
+ }
487
+ }
488
+ break;
489
+ }
490
+ case "INSIDE_OBJECT_AFTER_KEY": {
491
+ switch (char) {
492
+ case ":": {
493
+ stack.pop();
494
+ stack.push("INSIDE_OBJECT_BEFORE_VALUE");
495
+ break;
496
+ }
497
+ }
498
+ break;
499
+ }
500
+ case "INSIDE_OBJECT_BEFORE_VALUE": {
501
+ processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
502
+ break;
503
+ }
504
+ case "INSIDE_OBJECT_AFTER_VALUE": {
505
+ processAfterObjectValue(char, i);
506
+ break;
507
+ }
508
+ case "INSIDE_STRING": {
509
+ switch (char) {
510
+ case '"': {
511
+ stack.pop();
512
+ lastValidIndex = i;
513
+ break;
514
+ }
515
+ case "\\": {
516
+ stack.push("INSIDE_STRING_ESCAPE");
517
+ break;
518
+ }
519
+ default: {
520
+ lastValidIndex = i;
521
+ }
522
+ }
523
+ break;
524
+ }
525
+ case "INSIDE_ARRAY_START": {
526
+ switch (char) {
527
+ case "]": {
528
+ lastValidIndex = i;
529
+ stack.pop();
530
+ break;
531
+ }
532
+ default: {
533
+ lastValidIndex = i;
534
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
535
+ break;
536
+ }
537
+ }
538
+ break;
539
+ }
540
+ case "INSIDE_ARRAY_AFTER_VALUE": {
541
+ switch (char) {
542
+ case ",": {
543
+ stack.pop();
544
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
545
+ break;
546
+ }
547
+ case "]": {
548
+ lastValidIndex = i;
549
+ stack.pop();
550
+ break;
551
+ }
552
+ default: {
553
+ lastValidIndex = i;
554
+ break;
555
+ }
556
+ }
557
+ break;
558
+ }
559
+ case "INSIDE_ARRAY_AFTER_COMMA": {
560
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
561
+ break;
562
+ }
563
+ case "INSIDE_STRING_ESCAPE": {
564
+ stack.pop();
565
+ lastValidIndex = i;
566
+ break;
567
+ }
568
+ case "INSIDE_NUMBER": {
569
+ switch (char) {
570
+ case "0":
571
+ case "1":
572
+ case "2":
573
+ case "3":
574
+ case "4":
575
+ case "5":
576
+ case "6":
577
+ case "7":
578
+ case "8":
579
+ case "9": {
580
+ lastValidIndex = i;
581
+ break;
582
+ }
583
+ case "e":
584
+ case "E":
585
+ case "-":
586
+ case ".": {
587
+ break;
588
+ }
589
+ case ",": {
590
+ stack.pop();
591
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
592
+ processAfterArrayValue(char, i);
593
+ }
594
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
595
+ processAfterObjectValue(char, i);
596
+ }
597
+ break;
598
+ }
599
+ case "}": {
600
+ stack.pop();
601
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
602
+ processAfterObjectValue(char, i);
603
+ }
604
+ break;
605
+ }
606
+ case "]": {
607
+ stack.pop();
608
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
609
+ processAfterArrayValue(char, i);
610
+ }
611
+ break;
612
+ }
613
+ default: {
614
+ stack.pop();
615
+ break;
616
+ }
617
+ }
618
+ break;
619
+ }
620
+ case "INSIDE_LITERAL": {
621
+ const partialLiteral = input.substring(literalStart, i + 1);
622
+ if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
623
+ stack.pop();
624
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
625
+ processAfterObjectValue(char, i);
626
+ } else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
627
+ processAfterArrayValue(char, i);
628
+ }
629
+ } else {
630
+ lastValidIndex = i;
631
+ }
632
+ break;
633
+ }
634
+ }
635
+ }
636
+ let result = input.slice(0, lastValidIndex + 1);
637
+ for (let i = stack.length - 1; i >= 0; i--) {
638
+ const state = stack[i];
639
+ switch (state) {
640
+ case "INSIDE_STRING": {
641
+ result += '"';
642
+ break;
643
+ }
644
+ case "INSIDE_OBJECT_KEY":
645
+ case "INSIDE_OBJECT_AFTER_KEY":
646
+ case "INSIDE_OBJECT_AFTER_COMMA":
647
+ case "INSIDE_OBJECT_START":
648
+ case "INSIDE_OBJECT_BEFORE_VALUE":
649
+ case "INSIDE_OBJECT_AFTER_VALUE": {
650
+ result += "}";
651
+ break;
652
+ }
653
+ case "INSIDE_ARRAY_START":
654
+ case "INSIDE_ARRAY_AFTER_COMMA":
655
+ case "INSIDE_ARRAY_AFTER_VALUE": {
656
+ result += "]";
657
+ break;
658
+ }
659
+ case "INSIDE_LITERAL": {
660
+ const partialLiteral = input.substring(literalStart, input.length);
661
+ if ("true".startsWith(partialLiteral)) {
662
+ result += "true".slice(partialLiteral.length);
663
+ } else if ("false".startsWith(partialLiteral)) {
664
+ result += "false".slice(partialLiteral.length);
665
+ } else if ("null".startsWith(partialLiteral)) {
666
+ result += "null".slice(partialLiteral.length);
667
+ }
668
+ }
669
+ }
670
+ }
671
+ return result;
672
+ }
673
+
674
+ // src/util/parse-partial-json.ts
675
+ async function parsePartialJson(jsonText) {
676
+ if (jsonText === void 0) {
677
+ return { value: void 0, state: "undefined-input" };
678
+ }
679
+ let result = await (0, import_provider_utils.safeParseJSON)({ text: jsonText });
680
+ if (result.success) {
681
+ return { value: result.value, state: "successful-parse" };
682
+ }
683
+ result = await (0, import_provider_utils.safeParseJSON)({ text: fixJson(jsonText) });
684
+ if (result.success) {
685
+ return { value: result.value, state: "repaired-parse" };
686
+ }
687
+ return { value: void 0, state: "failed-parse" };
688
+ }
689
+
690
+ // src/ui/ui-messages.ts
691
+ function isToolUIPart(part) {
692
+ return part.type.startsWith("tool-");
693
+ }
694
+ function getToolName(part) {
695
+ return part.type.split("-")[1];
696
+ }
697
+
698
+ // src/ui/process-ui-message-stream.ts
699
+ function createStreamingUIMessageState({
700
+ lastMessage,
701
+ messageId
702
+ }) {
703
+ return {
704
+ message: (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage : {
705
+ id: messageId,
706
+ metadata: void 0,
707
+ role: "assistant",
708
+ parts: []
709
+ },
710
+ activeTextParts: {},
711
+ activeReasoningParts: {},
712
+ partialToolCalls: {}
713
+ };
714
+ }
715
+ function processUIMessageStream({
716
+ stream,
717
+ onToolCall,
718
+ messageMetadataSchema,
719
+ dataPartSchemas,
720
+ runUpdateMessageJob,
721
+ onError,
722
+ onData
723
+ }) {
724
+ return stream.pipeThrough(
725
+ new TransformStream({
726
+ async transform(part, controller) {
727
+ await runUpdateMessageJob(async ({ state, write }) => {
728
+ var _a9, _b;
729
+ function updateToolInvocationPart(options) {
730
+ var _a10;
731
+ const part2 = state.message.parts.find(
732
+ (part3) => isToolUIPart(part3) && part3.toolCallId === options.toolCallId
733
+ );
734
+ const anyOptions = options;
735
+ const anyPart = part2;
736
+ if (part2 != null) {
737
+ part2.state = options.state;
738
+ anyPart.input = anyOptions.input;
739
+ anyPart.output = anyOptions.output;
740
+ anyPart.errorText = anyOptions.errorText;
741
+ anyPart.providerExecuted = (_a10 = anyOptions.providerExecuted) != null ? _a10 : part2.providerExecuted;
742
+ } else {
743
+ state.message.parts.push({
744
+ type: `tool-${options.toolName}`,
745
+ toolCallId: options.toolCallId,
746
+ state: options.state,
747
+ input: anyOptions.input,
748
+ output: anyOptions.output,
749
+ errorText: anyOptions.errorText,
750
+ providerExecuted: anyOptions.providerExecuted
751
+ });
752
+ }
753
+ }
754
+ async function updateMessageMetadata(metadata) {
755
+ if (metadata != null) {
756
+ const mergedMetadata = state.message.metadata != null ? mergeObjects(state.message.metadata, metadata) : metadata;
757
+ if (messageMetadataSchema != null) {
758
+ await (0, import_provider_utils2.validateTypes)({
759
+ value: mergedMetadata,
760
+ schema: messageMetadataSchema
761
+ });
762
+ }
763
+ state.message.metadata = mergedMetadata;
764
+ }
765
+ }
766
+ switch (part.type) {
767
+ case "text-start": {
768
+ const textPart = {
769
+ type: "text",
770
+ text: "",
771
+ state: "streaming"
772
+ };
773
+ state.activeTextParts[part.id] = textPart;
774
+ state.message.parts.push(textPart);
775
+ write();
776
+ break;
777
+ }
778
+ case "text-delta": {
779
+ state.activeTextParts[part.id].text += part.delta;
780
+ write();
781
+ break;
782
+ }
783
+ case "text-end": {
784
+ const textPart = state.activeTextParts[part.id];
785
+ textPart.state = "done";
786
+ delete state.activeTextParts[part.id];
787
+ write();
788
+ break;
789
+ }
790
+ case "reasoning-start": {
791
+ const reasoningPart = {
792
+ type: "reasoning",
793
+ text: "",
794
+ providerMetadata: part.providerMetadata,
795
+ state: "streaming"
796
+ };
797
+ state.activeReasoningParts[part.id] = reasoningPart;
798
+ state.message.parts.push(reasoningPart);
799
+ write();
800
+ break;
801
+ }
802
+ case "reasoning-delta": {
803
+ const reasoningPart = state.activeReasoningParts[part.id];
804
+ reasoningPart.text += part.delta;
805
+ reasoningPart.providerMetadata = (_a9 = part.providerMetadata) != null ? _a9 : reasoningPart.providerMetadata;
806
+ write();
807
+ break;
808
+ }
809
+ case "reasoning-end": {
810
+ const reasoningPart = state.activeReasoningParts[part.id];
811
+ reasoningPart.providerMetadata = (_b = part.providerMetadata) != null ? _b : reasoningPart.providerMetadata;
812
+ reasoningPart.state = "done";
813
+ delete state.activeReasoningParts[part.id];
814
+ write();
815
+ break;
816
+ }
817
+ case "file": {
818
+ state.message.parts.push({
819
+ type: "file",
820
+ mediaType: part.mediaType,
821
+ url: part.url
822
+ });
823
+ write();
824
+ break;
825
+ }
826
+ case "source-url": {
827
+ state.message.parts.push({
828
+ type: "source-url",
829
+ sourceId: part.sourceId,
830
+ url: part.url,
831
+ title: part.title,
832
+ providerMetadata: part.providerMetadata
833
+ });
834
+ write();
835
+ break;
836
+ }
837
+ case "source-document": {
838
+ state.message.parts.push({
839
+ type: "source-document",
840
+ sourceId: part.sourceId,
841
+ mediaType: part.mediaType,
842
+ title: part.title,
843
+ filename: part.filename,
844
+ providerMetadata: part.providerMetadata
845
+ });
846
+ write();
847
+ break;
848
+ }
849
+ case "tool-input-start": {
850
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
851
+ state.partialToolCalls[part.toolCallId] = {
852
+ text: "",
853
+ toolName: part.toolName,
854
+ index: toolInvocations.length
855
+ };
856
+ updateToolInvocationPart({
857
+ toolCallId: part.toolCallId,
858
+ toolName: part.toolName,
859
+ state: "input-streaming",
860
+ input: void 0,
861
+ providerExecuted: part.providerExecuted
862
+ });
863
+ write();
864
+ break;
865
+ }
866
+ case "tool-input-delta": {
867
+ const partialToolCall = state.partialToolCalls[part.toolCallId];
868
+ partialToolCall.text += part.inputTextDelta;
869
+ const { value: partialArgs } = await parsePartialJson(
870
+ partialToolCall.text
871
+ );
872
+ updateToolInvocationPart({
873
+ toolCallId: part.toolCallId,
874
+ toolName: partialToolCall.toolName,
875
+ state: "input-streaming",
876
+ input: partialArgs
877
+ });
878
+ write();
879
+ break;
880
+ }
881
+ case "tool-input-available": {
882
+ updateToolInvocationPart({
883
+ toolCallId: part.toolCallId,
884
+ toolName: part.toolName,
885
+ state: "input-available",
886
+ input: part.input,
887
+ providerExecuted: part.providerExecuted
888
+ });
889
+ write();
890
+ if (onToolCall && !part.providerExecuted) {
891
+ const result = await onToolCall({
892
+ toolCall: part
893
+ });
894
+ if (result != null) {
895
+ updateToolInvocationPart({
896
+ toolCallId: part.toolCallId,
897
+ toolName: part.toolName,
898
+ state: "output-available",
899
+ input: part.input,
900
+ output: result
901
+ });
902
+ write();
903
+ }
904
+ }
905
+ break;
906
+ }
907
+ case "tool-output-available": {
908
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
909
+ if (toolInvocations == null) {
910
+ throw new Error("tool_result must be preceded by a tool_call");
911
+ }
912
+ const toolInvocationIndex = toolInvocations.findIndex(
913
+ (invocation) => invocation.toolCallId === part.toolCallId
914
+ );
915
+ if (toolInvocationIndex === -1) {
916
+ throw new Error(
917
+ "tool_result must be preceded by a tool_call with the same toolCallId"
918
+ );
919
+ }
920
+ const toolName = getToolName(
921
+ toolInvocations[toolInvocationIndex]
922
+ );
923
+ updateToolInvocationPart({
924
+ toolCallId: part.toolCallId,
925
+ toolName,
926
+ state: "output-available",
927
+ input: toolInvocations[toolInvocationIndex].input,
928
+ output: part.output,
929
+ providerExecuted: part.providerExecuted
930
+ });
931
+ write();
932
+ break;
933
+ }
934
+ case "tool-output-error": {
935
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
936
+ if (toolInvocations == null) {
937
+ throw new Error("tool_result must be preceded by a tool_call");
938
+ }
939
+ const toolInvocationIndex = toolInvocations.findIndex(
940
+ (invocation) => invocation.toolCallId === part.toolCallId
941
+ );
942
+ if (toolInvocationIndex === -1) {
943
+ throw new Error(
944
+ "tool_result must be preceded by a tool_call with the same toolCallId"
945
+ );
946
+ }
947
+ const toolName = getToolName(
948
+ toolInvocations[toolInvocationIndex]
949
+ );
950
+ updateToolInvocationPart({
951
+ toolCallId: part.toolCallId,
952
+ toolName,
953
+ state: "output-error",
954
+ input: toolInvocations[toolInvocationIndex].input,
955
+ errorText: part.errorText,
956
+ providerExecuted: part.providerExecuted
957
+ });
958
+ write();
959
+ break;
960
+ }
961
+ case "start-step": {
962
+ state.message.parts.push({ type: "step-start" });
963
+ break;
964
+ }
965
+ case "finish-step": {
966
+ state.activeTextParts = {};
967
+ state.activeReasoningParts = {};
968
+ break;
969
+ }
970
+ case "start": {
971
+ if (part.messageId != null) {
972
+ state.message.id = part.messageId;
973
+ }
974
+ await updateMessageMetadata(part.messageMetadata);
975
+ if (part.messageId != null || part.messageMetadata != null) {
976
+ write();
977
+ }
978
+ break;
979
+ }
980
+ case "finish": {
981
+ await updateMessageMetadata(part.messageMetadata);
982
+ if (part.messageMetadata != null) {
983
+ write();
984
+ }
985
+ break;
986
+ }
987
+ case "message-metadata": {
988
+ await updateMessageMetadata(part.messageMetadata);
989
+ if (part.messageMetadata != null) {
990
+ write();
991
+ }
992
+ break;
993
+ }
994
+ case "error": {
995
+ onError == null ? void 0 : onError(new Error(part.errorText));
996
+ break;
997
+ }
998
+ default: {
999
+ if (isDataUIMessageChunk(part)) {
1000
+ const dataPart = part;
1001
+ if (dataPart.transient) {
1002
+ onData == null ? void 0 : onData(dataPart);
1003
+ break;
1004
+ }
1005
+ const existingPart = dataPart.id != null ? state.message.parts.find(
1006
+ (partArg) => dataPart.type === partArg.type && dataPart.id === partArg.id
1007
+ ) : void 0;
1008
+ if (existingPart != null) {
1009
+ existingPart.data = isObject(existingPart.data) && isObject(dataPart.data) ? mergeObjects(existingPart.data, dataPart.data) : dataPart.data;
1010
+ } else {
1011
+ state.message.parts.push(dataPart);
1012
+ }
1013
+ onData == null ? void 0 : onData(dataPart);
1014
+ write();
1015
+ }
1016
+ }
1017
+ }
1018
+ controller.enqueue(part);
1019
+ });
1020
+ }
1021
+ })
1022
+ );
1023
+ }
1024
+ function isObject(value) {
1025
+ return typeof value === "object" && value !== null;
1026
+ }
1027
+
1028
+ // src/ui-message-stream/handle-ui-message-stream-finish.ts
1029
+ function handleUIMessageStreamFinish({
1030
+ messageId,
1031
+ originalMessages = [],
1032
+ onFinish,
1033
+ onError,
1034
+ stream
1035
+ }) {
1036
+ let lastMessage = originalMessages == null ? void 0 : originalMessages[originalMessages.length - 1];
1037
+ if ((lastMessage == null ? void 0 : lastMessage.role) !== "assistant") {
1038
+ lastMessage = void 0;
1039
+ } else {
1040
+ messageId = lastMessage.id;
1041
+ }
1042
+ const idInjectedStream = stream.pipeThrough(
1043
+ new TransformStream({
1044
+ transform(chunk, controller) {
1045
+ if (chunk.type === "start") {
1046
+ const startChunk = chunk;
1047
+ if (startChunk.messageId == null && messageId != null) {
1048
+ startChunk.messageId = messageId;
1049
+ }
1050
+ }
1051
+ controller.enqueue(chunk);
1052
+ }
1053
+ })
1054
+ );
1055
+ if (onFinish == null) {
1056
+ return idInjectedStream;
1057
+ }
1058
+ const state = createStreamingUIMessageState({
1059
+ lastMessage: lastMessage ? structuredClone(lastMessage) : void 0,
1060
+ messageId: messageId != null ? messageId : ""
1061
+ // will be overridden by the stream
1062
+ });
1063
+ const runUpdateMessageJob = async (job) => {
1064
+ await job({ state, write: () => {
1065
+ } });
1066
+ };
1067
+ return processUIMessageStream({
1068
+ stream: idInjectedStream,
1069
+ runUpdateMessageJob,
1070
+ onError
1071
+ }).pipeThrough(
1072
+ new TransformStream({
1073
+ transform(chunk, controller) {
1074
+ controller.enqueue(chunk);
1075
+ },
1076
+ flush() {
1077
+ const isContinuation = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
1078
+ onFinish({
1079
+ isContinuation,
1080
+ responseMessage: state.message,
1081
+ messages: [
1082
+ ...isContinuation ? originalMessages.slice(0, -1) : originalMessages,
1083
+ state.message
1084
+ ]
1085
+ });
1086
+ }
1087
+ })
1088
+ );
1089
+ }
1090
+
1091
+ // src/ui-message-stream/pipe-ui-message-stream-to-response.ts
1092
+ function pipeUIMessageStreamToResponse({
1093
+ response,
1094
+ status,
1095
+ statusText,
1096
+ headers,
1097
+ stream,
1098
+ consumeSseStream
1099
+ }) {
1100
+ let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
1101
+ if (consumeSseStream) {
1102
+ const [stream1, stream2] = sseStream.tee();
1103
+ sseStream = stream1;
1104
+ consumeSseStream({ stream: stream2 });
1105
+ }
1106
+ writeToServerResponse({
1107
+ response,
1108
+ status,
1109
+ statusText,
1110
+ headers: Object.fromEntries(
1111
+ prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS).entries()
1112
+ ),
1113
+ stream: sseStream.pipeThrough(new TextEncoderStream())
1114
+ });
1115
+ }
1116
+
1117
+ // src/util/as-array.ts
1118
+ function asArray(value) {
1119
+ return value === void 0 ? [] : Array.isArray(value) ? value : [value];
1120
+ }
1121
+
1122
+ // src/util/async-iterable-stream.ts
1123
+ function createAsyncIterableStream(source) {
1124
+ const stream = source.pipeThrough(new TransformStream());
1125
+ stream[Symbol.asyncIterator] = () => {
1126
+ const reader = stream.getReader();
1127
+ return {
1128
+ async next() {
1129
+ const { done, value } = await reader.read();
1130
+ return done ? { done: true, value: void 0 } : { done: false, value };
1131
+ }
1132
+ };
1133
+ };
1134
+ return stream;
1135
+ }
1136
+
1137
+ // src/util/consume-stream.ts
1138
+ async function consumeStream({
1139
+ stream,
1140
+ onError
1141
+ }) {
1142
+ const reader = stream.getReader();
1143
+ try {
1144
+ while (true) {
1145
+ const { done } = await reader.read();
1146
+ if (done)
1147
+ break;
1148
+ }
1149
+ } catch (error) {
1150
+ onError == null ? void 0 : onError(error);
1151
+ } finally {
1152
+ reader.releaseLock();
1153
+ }
1154
+ }
1155
+
1156
+ // src/util/create-resolvable-promise.ts
1157
+ function createResolvablePromise() {
1158
+ let resolve2;
1159
+ let reject;
1160
+ const promise = new Promise((res, rej) => {
1161
+ resolve2 = res;
1162
+ reject = rej;
1163
+ });
1164
+ return {
1165
+ promise,
1166
+ resolve: resolve2,
1167
+ reject
1168
+ };
1169
+ }
1170
+
1171
+ // src/util/create-stitchable-stream.ts
1172
+ function createStitchableStream() {
1173
+ let innerStreamReaders = [];
1174
+ let controller = null;
1175
+ let isClosed = false;
1176
+ let waitForNewStream = createResolvablePromise();
1177
+ const processPull = async () => {
1178
+ if (isClosed && innerStreamReaders.length === 0) {
1179
+ controller == null ? void 0 : controller.close();
1180
+ return;
1181
+ }
1182
+ if (innerStreamReaders.length === 0) {
1183
+ waitForNewStream = createResolvablePromise();
1184
+ await waitForNewStream.promise;
1185
+ return processPull();
1186
+ }
1187
+ try {
1188
+ const { value, done } = await innerStreamReaders[0].read();
1189
+ if (done) {
1190
+ innerStreamReaders.shift();
1191
+ if (innerStreamReaders.length > 0) {
1192
+ await processPull();
1193
+ } else if (isClosed) {
1194
+ controller == null ? void 0 : controller.close();
1195
+ }
1196
+ } else {
1197
+ controller == null ? void 0 : controller.enqueue(value);
1198
+ }
1199
+ } catch (error) {
1200
+ controller == null ? void 0 : controller.error(error);
1201
+ innerStreamReaders.shift();
1202
+ if (isClosed && innerStreamReaders.length === 0) {
1203
+ controller == null ? void 0 : controller.close();
1204
+ }
1205
+ }
1206
+ };
1207
+ return {
1208
+ stream: new ReadableStream({
1209
+ start(controllerParam) {
1210
+ controller = controllerParam;
1211
+ },
1212
+ pull: processPull,
1213
+ async cancel() {
1214
+ for (const reader of innerStreamReaders) {
1215
+ await reader.cancel();
1216
+ }
1217
+ innerStreamReaders = [];
1218
+ isClosed = true;
1219
+ }
1220
+ }),
1221
+ addStream: (innerStream) => {
1222
+ if (isClosed) {
1223
+ throw new Error("Cannot add inner stream: outer stream is closed");
1224
+ }
1225
+ innerStreamReaders.push(innerStream.getReader());
1226
+ waitForNewStream.resolve();
1227
+ },
1228
+ /**
1229
+ * Gracefully close the outer stream. This will let the inner streams
1230
+ * finish processing and then close the outer stream.
1231
+ */
1232
+ close: () => {
1233
+ isClosed = true;
1234
+ waitForNewStream.resolve();
1235
+ if (innerStreamReaders.length === 0) {
1236
+ controller == null ? void 0 : controller.close();
1237
+ }
1238
+ },
1239
+ /**
1240
+ * Immediately close the outer stream. This will cancel all inner streams
1241
+ * and close the outer stream.
1242
+ */
1243
+ terminate: () => {
1244
+ isClosed = true;
1245
+ waitForNewStream.resolve();
1246
+ innerStreamReaders.forEach((reader) => reader.cancel());
1247
+ innerStreamReaders = [];
1248
+ controller == null ? void 0 : controller.close();
1249
+ }
1250
+ };
1251
+ }
1252
+
1253
+ // src/util/delayed-promise.ts
1254
+ var DelayedPromise = class {
1255
+ constructor() {
1256
+ this.status = { type: "pending" };
1257
+ this._resolve = void 0;
1258
+ this._reject = void 0;
1259
+ }
1260
+ get promise() {
1261
+ if (this._promise) {
1262
+ return this._promise;
1263
+ }
1264
+ this._promise = new Promise((resolve2, reject) => {
1265
+ if (this.status.type === "resolved") {
1266
+ resolve2(this.status.value);
1267
+ } else if (this.status.type === "rejected") {
1268
+ reject(this.status.error);
1269
+ }
1270
+ this._resolve = resolve2;
1271
+ this._reject = reject;
1272
+ });
1273
+ return this._promise;
1274
+ }
1275
+ resolve(value) {
1276
+ var _a9;
1277
+ this.status = { type: "resolved", value };
1278
+ if (this._promise) {
1279
+ (_a9 = this._resolve) == null ? void 0 : _a9.call(this, value);
1280
+ }
1281
+ }
1282
+ reject(error) {
1283
+ var _a9;
1284
+ this.status = { type: "rejected", error };
1285
+ if (this._promise) {
1286
+ (_a9 = this._reject) == null ? void 0 : _a9.call(this, error);
1287
+ }
1288
+ }
1289
+ };
1290
+
1291
+ // src/util/now.ts
1292
+ function now() {
1293
+ var _a9, _b;
1294
+ return (_b = (_a9 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a9.now()) != null ? _b : Date.now();
1295
+ }
1296
+
1297
+ // src/error/invalid-argument-error.ts
1298
+ var import_provider2 = require("@ai-sdk/provider");
1299
+ var name2 = "AI_InvalidArgumentError";
1300
+ var marker2 = `vercel.ai.error.${name2}`;
1301
+ var symbol2 = Symbol.for(marker2);
1302
+ var _a2;
1303
+ var InvalidArgumentError = class extends import_provider2.AISDKError {
1304
+ constructor({
1305
+ parameter,
1306
+ value,
1307
+ message
1308
+ }) {
1309
+ super({
1310
+ name: name2,
1311
+ message: `Invalid argument for parameter ${parameter}: ${message}`
1312
+ });
1313
+ this[_a2] = true;
1314
+ this.parameter = parameter;
1315
+ this.value = value;
1316
+ }
1317
+ static isInstance(error) {
1318
+ return import_provider2.AISDKError.hasMarker(error, marker2);
1319
+ }
1320
+ };
1321
+ _a2 = symbol2;
1322
+
1323
+ // src/util/retry-with-exponential-backoff.ts
1324
+ var import_provider4 = require("@ai-sdk/provider");
1325
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
1326
+
1327
+ // src/util/retry-error.ts
1328
+ var import_provider3 = require("@ai-sdk/provider");
1329
+ var name3 = "AI_RetryError";
1330
+ var marker3 = `vercel.ai.error.${name3}`;
1331
+ var symbol3 = Symbol.for(marker3);
1332
+ var _a3;
1333
+ var RetryError = class extends import_provider3.AISDKError {
1334
+ constructor({
1335
+ message,
1336
+ reason,
1337
+ errors
1338
+ }) {
1339
+ super({ name: name3, message });
1340
+ this[_a3] = true;
1341
+ this.reason = reason;
1342
+ this.errors = errors;
1343
+ this.lastError = errors[errors.length - 1];
1344
+ }
1345
+ static isInstance(error) {
1346
+ return import_provider3.AISDKError.hasMarker(error, marker3);
1347
+ }
1348
+ };
1349
+ _a3 = symbol3;
1350
+
1351
+ // src/util/retry-with-exponential-backoff.ts
1352
+ function getRetryDelay(error, exponentialBackoffDelay) {
1353
+ const headers = error.responseHeaders;
1354
+ if (!headers)
1355
+ return exponentialBackoffDelay;
1356
+ let timeoutMillis;
1357
+ const retryAfterMs = headers["retry-after-ms"];
1358
+ if (retryAfterMs) {
1359
+ const timeoutMs = parseFloat(retryAfterMs);
1360
+ if (!Number.isNaN(timeoutMs)) {
1361
+ timeoutMillis = timeoutMs;
1362
+ }
1363
+ }
1364
+ const retryAfter = headers["retry-after"];
1365
+ if (retryAfter && timeoutMillis === void 0) {
1366
+ const timeoutSeconds = parseFloat(retryAfter);
1367
+ if (!Number.isNaN(timeoutSeconds)) {
1368
+ timeoutMillis = timeoutSeconds * 1e3;
1369
+ } else {
1370
+ timeoutMillis = Date.parse(retryAfter) - Date.now();
1371
+ }
1372
+ }
1373
+ if (timeoutMillis !== void 0 && 0 <= timeoutMillis && timeoutMillis < 60 * 1e3) {
1374
+ return timeoutMillis;
1375
+ }
1376
+ return exponentialBackoffDelay;
1377
+ }
1378
+ var retryWithExponentialBackoffRespectingRetryHeaders = ({
1379
+ maxRetries = 2,
1380
+ initialDelayInMs = 2e3,
1381
+ backoffFactor = 2
1382
+ } = {}) => async (f) => _retryWithExponentialBackoff(f, {
1383
+ maxRetries,
1384
+ delayInMs: initialDelayInMs,
1385
+ backoffFactor
1386
+ });
1387
+ async function _retryWithExponentialBackoff(f, {
1388
+ maxRetries,
1389
+ delayInMs,
1390
+ backoffFactor
1391
+ }, errors = []) {
1392
+ try {
1393
+ return await f();
1394
+ } catch (error) {
1395
+ if ((0, import_provider_utils3.isAbortError)(error)) {
1396
+ throw error;
1397
+ }
1398
+ if (maxRetries === 0) {
1399
+ throw error;
1400
+ }
1401
+ const errorMessage = (0, import_provider_utils3.getErrorMessage)(error);
1402
+ const newErrors = [...errors, error];
1403
+ const tryNumber = newErrors.length;
1404
+ if (tryNumber > maxRetries) {
1405
+ throw new RetryError({
1406
+ message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
1407
+ reason: "maxRetriesExceeded",
1408
+ errors: newErrors
1409
+ });
1410
+ }
1411
+ if (error instanceof Error && import_provider4.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
1412
+ const actualDelay = getRetryDelay(error, delayInMs);
1413
+ await (0, import_provider_utils3.delay)(actualDelay);
1414
+ return _retryWithExponentialBackoff(
1415
+ f,
1416
+ { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
1417
+ newErrors
1418
+ );
1419
+ }
1420
+ if (tryNumber === 1) {
1421
+ throw error;
1422
+ }
1423
+ throw new RetryError({
1424
+ message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
1425
+ reason: "errorNotRetryable",
1426
+ errors: newErrors
1427
+ });
1428
+ }
1429
+ }
1430
+
1431
+ // src/util/prepare-retries.ts
1432
+ function prepareRetries({
1433
+ maxRetries
1434
+ }) {
1435
+ if (maxRetries != null) {
1436
+ if (!Number.isInteger(maxRetries)) {
1437
+ throw new InvalidArgumentError({
1438
+ parameter: "maxRetries",
1439
+ value: maxRetries,
1440
+ message: "maxRetries must be an integer"
1441
+ });
1442
+ }
1443
+ if (maxRetries < 0) {
1444
+ throw new InvalidArgumentError({
1445
+ parameter: "maxRetries",
1446
+ value: maxRetries,
1447
+ message: "maxRetries must be >= 0"
1448
+ });
1449
+ }
1450
+ }
1451
+ const maxRetriesResult = maxRetries != null ? maxRetries : 2;
1452
+ return {
1453
+ maxRetries: maxRetriesResult,
1454
+ retry: retryWithExponentialBackoffRespectingRetryHeaders({
1455
+ maxRetries: maxRetriesResult
1456
+ })
1457
+ };
1458
+ }
1459
+
1460
+ // src/prompt/convert-to-language-model-prompt.ts
1461
+ var import_provider_utils6 = require("@ai-sdk/provider-utils");
1462
+
1463
+ // src/util/detect-media-type.ts
1464
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
1465
+ var imageMediaTypeSignatures = [
1466
+ {
1467
+ mediaType: "image/gif",
1468
+ bytesPrefix: [71, 73, 70],
1469
+ base64Prefix: "R0lG"
1470
+ },
1471
+ {
1472
+ mediaType: "image/png",
1473
+ bytesPrefix: [137, 80, 78, 71],
1474
+ base64Prefix: "iVBORw"
1475
+ },
1476
+ {
1477
+ mediaType: "image/jpeg",
1478
+ bytesPrefix: [255, 216],
1479
+ base64Prefix: "/9j/"
1480
+ },
1481
+ {
1482
+ mediaType: "image/webp",
1483
+ bytesPrefix: [82, 73, 70, 70],
1484
+ base64Prefix: "UklGRg"
1485
+ },
1486
+ {
1487
+ mediaType: "image/bmp",
1488
+ bytesPrefix: [66, 77],
1489
+ base64Prefix: "Qk"
1490
+ },
1491
+ {
1492
+ mediaType: "image/tiff",
1493
+ bytesPrefix: [73, 73, 42, 0],
1494
+ base64Prefix: "SUkqAA"
1495
+ },
1496
+ {
1497
+ mediaType: "image/tiff",
1498
+ bytesPrefix: [77, 77, 0, 42],
1499
+ base64Prefix: "TU0AKg"
1500
+ },
1501
+ {
1502
+ mediaType: "image/avif",
1503
+ bytesPrefix: [
1504
+ 0,
1505
+ 0,
1506
+ 0,
1507
+ 32,
1508
+ 102,
1509
+ 116,
1510
+ 121,
1511
+ 112,
1512
+ 97,
1513
+ 118,
1514
+ 105,
1515
+ 102
1516
+ ],
1517
+ base64Prefix: "AAAAIGZ0eXBhdmlm"
1518
+ },
1519
+ {
1520
+ mediaType: "image/heic",
1521
+ bytesPrefix: [
1522
+ 0,
1523
+ 0,
1524
+ 0,
1525
+ 32,
1526
+ 102,
1527
+ 116,
1528
+ 121,
1529
+ 112,
1530
+ 104,
1531
+ 101,
1532
+ 105,
1533
+ 99
1534
+ ],
1535
+ base64Prefix: "AAAAIGZ0eXBoZWlj"
1536
+ }
1537
+ ];
1538
+ var stripID3 = (data) => {
1539
+ const bytes = typeof data === "string" ? (0, import_provider_utils4.convertBase64ToUint8Array)(data) : data;
1540
+ const id3Size = (bytes[6] & 127) << 21 | (bytes[7] & 127) << 14 | (bytes[8] & 127) << 7 | bytes[9] & 127;
1541
+ return bytes.slice(id3Size + 10);
1542
+ };
1543
+ function stripID3TagsIfPresent(data) {
1544
+ const hasId3 = typeof data === "string" && data.startsWith("SUQz") || typeof data !== "string" && data.length > 10 && data[0] === 73 && // 'I'
1545
+ data[1] === 68 && // 'D'
1546
+ data[2] === 51;
1547
+ return hasId3 ? stripID3(data) : data;
1548
+ }
1549
+ function detectMediaType({
1550
+ data,
1551
+ signatures
1552
+ }) {
1553
+ const processedData = stripID3TagsIfPresent(data);
1554
+ for (const signature of signatures) {
1555
+ if (typeof processedData === "string" ? processedData.startsWith(signature.base64Prefix) : processedData.length >= signature.bytesPrefix.length && signature.bytesPrefix.every(
1556
+ (byte, index) => processedData[index] === byte
1557
+ )) {
1558
+ return signature.mediaType;
1559
+ }
1560
+ }
1561
+ return void 0;
1562
+ }
1563
+
1564
+ // src/util/download-error.ts
1565
+ var import_provider5 = require("@ai-sdk/provider");
1566
+ var name4 = "AI_DownloadError";
1567
+ var marker4 = `vercel.ai.error.${name4}`;
1568
+ var symbol4 = Symbol.for(marker4);
1569
+ var _a4;
1570
+ var DownloadError = class extends import_provider5.AISDKError {
1571
+ constructor({
1572
+ url,
1573
+ statusCode,
1574
+ statusText,
1575
+ cause,
1576
+ message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
1577
+ }) {
1578
+ super({ name: name4, message, cause });
1579
+ this[_a4] = true;
1580
+ this.url = url;
1581
+ this.statusCode = statusCode;
1582
+ this.statusText = statusText;
1583
+ }
1584
+ static isInstance(error) {
1585
+ return import_provider5.AISDKError.hasMarker(error, marker4);
1586
+ }
1587
+ };
1588
+ _a4 = symbol4;
1589
+
1590
+ // src/util/download.ts
1591
+ async function download({ url }) {
1592
+ var _a9;
1593
+ const urlText = url.toString();
1594
+ try {
1595
+ const response = await fetch(urlText);
1596
+ if (!response.ok) {
1597
+ throw new DownloadError({
1598
+ url: urlText,
1599
+ statusCode: response.status,
1600
+ statusText: response.statusText
1601
+ });
1602
+ }
1603
+ return {
1604
+ data: new Uint8Array(await response.arrayBuffer()),
1605
+ mediaType: (_a9 = response.headers.get("content-type")) != null ? _a9 : void 0
1606
+ };
1607
+ } catch (error) {
1608
+ if (DownloadError.isInstance(error)) {
1609
+ throw error;
1610
+ }
1611
+ throw new DownloadError({ url: urlText, cause: error });
1612
+ }
1613
+ }
1614
+
1615
+ // src/prompt/data-content.ts
1616
+ var import_provider6 = require("@ai-sdk/provider");
1617
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
1618
+ var import_v42 = require("zod/v4");
1619
+
1620
+ // src/prompt/split-data-url.ts
1621
+ function splitDataUrl(dataUrl) {
1622
+ try {
1623
+ const [header, base64Content] = dataUrl.split(",");
1624
+ return {
1625
+ mediaType: header.split(";")[0].split(":")[1],
1626
+ base64Content
1627
+ };
1628
+ } catch (error) {
1629
+ return {
1630
+ mediaType: void 0,
1631
+ base64Content: void 0
1632
+ };
1633
+ }
1634
+ }
1635
+
1636
+ // src/prompt/data-content.ts
1637
+ var dataContentSchema = import_v42.z.union([
1638
+ import_v42.z.string(),
1639
+ import_v42.z.instanceof(Uint8Array),
1640
+ import_v42.z.instanceof(ArrayBuffer),
1641
+ import_v42.z.custom(
1642
+ // Buffer might not be available in some environments such as CloudFlare:
1643
+ (value) => {
1644
+ var _a9, _b;
1645
+ return (_b = (_a9 = globalThis.Buffer) == null ? void 0 : _a9.isBuffer(value)) != null ? _b : false;
1646
+ },
1647
+ { message: "Must be a Buffer" }
1648
+ )
1649
+ ]);
1650
+ function convertToLanguageModelV2DataContent(content) {
1651
+ if (content instanceof Uint8Array) {
1652
+ return { data: content, mediaType: void 0 };
1653
+ }
1654
+ if (content instanceof ArrayBuffer) {
1655
+ return { data: new Uint8Array(content), mediaType: void 0 };
1656
+ }
1657
+ if (typeof content === "string") {
1658
+ try {
1659
+ content = new URL(content);
1660
+ } catch (error) {
1661
+ }
1662
+ }
1663
+ if (content instanceof URL && content.protocol === "data:") {
1664
+ const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(
1665
+ content.toString()
1666
+ );
1667
+ if (dataUrlMediaType == null || base64Content == null) {
1668
+ throw new import_provider6.AISDKError({
1669
+ name: "InvalidDataContentError",
1670
+ message: `Invalid data URL format in content ${content.toString()}`
1671
+ });
1672
+ }
1673
+ return { data: base64Content, mediaType: dataUrlMediaType };
1674
+ }
1675
+ return { data: content, mediaType: void 0 };
1676
+ }
1677
+ function convertDataContentToBase64String(content) {
1678
+ if (typeof content === "string") {
1679
+ return content;
1680
+ }
1681
+ if (content instanceof ArrayBuffer) {
1682
+ return (0, import_provider_utils5.convertUint8ArrayToBase64)(new Uint8Array(content));
1683
+ }
1684
+ return (0, import_provider_utils5.convertUint8ArrayToBase64)(content);
1685
+ }
1686
+
1687
+ // src/prompt/invalid-message-role-error.ts
1688
+ var import_provider7 = require("@ai-sdk/provider");
1689
+ var name5 = "AI_InvalidMessageRoleError";
1690
+ var marker5 = `vercel.ai.error.${name5}`;
1691
+ var symbol5 = Symbol.for(marker5);
1692
+ var _a5;
1693
+ var InvalidMessageRoleError = class extends import_provider7.AISDKError {
1694
+ constructor({
1695
+ role,
1696
+ message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
1697
+ }) {
1698
+ super({ name: name5, message });
1699
+ this[_a5] = true;
1700
+ this.role = role;
1701
+ }
1702
+ static isInstance(error) {
1703
+ return import_provider7.AISDKError.hasMarker(error, marker5);
1704
+ }
1705
+ };
1706
+ _a5 = symbol5;
1707
+
1708
+ // src/prompt/convert-to-language-model-prompt.ts
1709
+ async function convertToLanguageModelPrompt({
1710
+ prompt,
1711
+ supportedUrls,
1712
+ downloadImplementation = download
1713
+ }) {
1714
+ const downloadedAssets = await downloadAssets(
1715
+ prompt.messages,
1716
+ downloadImplementation,
1717
+ supportedUrls
1718
+ );
1719
+ return [
1720
+ ...prompt.system != null ? [{ role: "system", content: prompt.system }] : [],
1721
+ ...prompt.messages.map(
1722
+ (message) => convertToLanguageModelMessage({ message, downloadedAssets })
1723
+ )
1724
+ ];
1725
+ }
1726
+ function convertToLanguageModelMessage({
1727
+ message,
1728
+ downloadedAssets
1729
+ }) {
1730
+ const role = message.role;
1731
+ switch (role) {
1732
+ case "system": {
1733
+ return {
1734
+ role: "system",
1735
+ content: message.content,
1736
+ providerOptions: message.providerOptions
1737
+ };
1738
+ }
1739
+ case "user": {
1740
+ if (typeof message.content === "string") {
1741
+ return {
1742
+ role: "user",
1743
+ content: [{ type: "text", text: message.content }],
1744
+ providerOptions: message.providerOptions
1745
+ };
1746
+ }
1747
+ return {
1748
+ role: "user",
1749
+ content: message.content.map((part) => convertPartToLanguageModelPart(part, downloadedAssets)).filter((part) => part.type !== "text" || part.text !== ""),
1750
+ providerOptions: message.providerOptions
1751
+ };
1752
+ }
1753
+ case "assistant": {
1754
+ if (typeof message.content === "string") {
1755
+ return {
1756
+ role: "assistant",
1757
+ content: [{ type: "text", text: message.content }],
1758
+ providerOptions: message.providerOptions
1759
+ };
1760
+ }
1761
+ return {
1762
+ role: "assistant",
1763
+ content: message.content.filter(
1764
+ // remove empty text parts:
1765
+ (part) => part.type !== "text" || part.text !== ""
1766
+ ).map((part) => {
1767
+ const providerOptions = part.providerOptions;
1768
+ switch (part.type) {
1769
+ case "file": {
1770
+ const { data, mediaType } = convertToLanguageModelV2DataContent(
1771
+ part.data
1772
+ );
1773
+ return {
1774
+ type: "file",
1775
+ data,
1776
+ filename: part.filename,
1777
+ mediaType: mediaType != null ? mediaType : part.mediaType,
1778
+ providerOptions
1779
+ };
1780
+ }
1781
+ case "reasoning": {
1782
+ return {
1783
+ type: "reasoning",
1784
+ text: part.text,
1785
+ providerOptions
1786
+ };
1787
+ }
1788
+ case "text": {
1789
+ return {
1790
+ type: "text",
1791
+ text: part.text,
1792
+ providerOptions
1793
+ };
1794
+ }
1795
+ case "tool-call": {
1796
+ return {
1797
+ type: "tool-call",
1798
+ toolCallId: part.toolCallId,
1799
+ toolName: part.toolName,
1800
+ input: part.input,
1801
+ providerExecuted: part.providerExecuted,
1802
+ providerOptions
1803
+ };
1804
+ }
1805
+ case "tool-result": {
1806
+ return {
1807
+ type: "tool-result",
1808
+ toolCallId: part.toolCallId,
1809
+ toolName: part.toolName,
1810
+ output: part.output,
1811
+ providerOptions
1812
+ };
1813
+ }
1814
+ }
1815
+ }),
1816
+ providerOptions: message.providerOptions
1817
+ };
1818
+ }
1819
+ case "tool": {
1820
+ return {
1821
+ role: "tool",
1822
+ content: message.content.map((part) => ({
1823
+ type: "tool-result",
1824
+ toolCallId: part.toolCallId,
1825
+ toolName: part.toolName,
1826
+ output: part.output,
1827
+ providerOptions: part.providerOptions
1828
+ })),
1829
+ providerOptions: message.providerOptions
1830
+ };
1831
+ }
1832
+ default: {
1833
+ const _exhaustiveCheck = role;
1834
+ throw new InvalidMessageRoleError({ role: _exhaustiveCheck });
1835
+ }
1836
+ }
1837
+ }
1838
+ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
1839
+ const urls = messages.filter((message) => message.role === "user").map((message) => message.content).filter(
1840
+ (content) => Array.isArray(content)
1841
+ ).flat().filter(
1842
+ (part) => part.type === "image" || part.type === "file"
1843
+ ).map((part) => {
1844
+ var _a9;
1845
+ const mediaType = (_a9 = part.mediaType) != null ? _a9 : part.type === "image" ? "image/*" : void 0;
1846
+ let data = part.type === "image" ? part.image : part.data;
1847
+ if (typeof data === "string") {
1848
+ try {
1849
+ data = new URL(data);
1850
+ } catch (ignored) {
1851
+ }
1852
+ }
1853
+ return { mediaType, data };
1854
+ }).filter(
1855
+ (part) => part.data instanceof URL && part.mediaType != null && !(0, import_provider_utils6.isUrlSupported)({
1856
+ url: part.data.toString(),
1857
+ mediaType: part.mediaType,
1858
+ supportedUrls
1859
+ })
1860
+ ).map((part) => part.data);
1861
+ const downloadedImages = await Promise.all(
1862
+ urls.map(async (url) => ({
1863
+ url,
1864
+ data: await downloadImplementation({ url })
1865
+ }))
1866
+ );
1867
+ return Object.fromEntries(
1868
+ downloadedImages.map(({ url, data }) => [url.toString(), data])
1869
+ );
1870
+ }
1871
+ function convertPartToLanguageModelPart(part, downloadedAssets) {
1872
+ var _a9;
1873
+ if (part.type === "text") {
1874
+ return {
1875
+ type: "text",
1876
+ text: part.text,
1877
+ providerOptions: part.providerOptions
1878
+ };
1879
+ }
1880
+ let originalData;
1881
+ const type = part.type;
1882
+ switch (type) {
1883
+ case "image":
1884
+ originalData = part.image;
1885
+ break;
1886
+ case "file":
1887
+ originalData = part.data;
1888
+ break;
1889
+ default:
1890
+ throw new Error(`Unsupported part type: ${type}`);
1891
+ }
1892
+ const { data: convertedData, mediaType: convertedMediaType } = convertToLanguageModelV2DataContent(originalData);
1893
+ let mediaType = convertedMediaType != null ? convertedMediaType : part.mediaType;
1894
+ let data = convertedData;
1895
+ if (data instanceof URL) {
1896
+ const downloadedFile = downloadedAssets[data.toString()];
1897
+ if (downloadedFile) {
1898
+ data = downloadedFile.data;
1899
+ mediaType != null ? mediaType : mediaType = downloadedFile.mediaType;
1900
+ }
1901
+ }
1902
+ switch (type) {
1903
+ case "image": {
1904
+ if (data instanceof Uint8Array || typeof data === "string") {
1905
+ mediaType = (_a9 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a9 : mediaType;
1906
+ }
1907
+ return {
1908
+ type: "file",
1909
+ mediaType: mediaType != null ? mediaType : "image/*",
1910
+ // any image
1911
+ filename: void 0,
1912
+ data,
1913
+ providerOptions: part.providerOptions
1914
+ };
1915
+ }
1916
+ case "file": {
1917
+ if (mediaType == null) {
1918
+ throw new Error(`Media type is missing for file part`);
1919
+ }
1920
+ return {
1921
+ type: "file",
1922
+ mediaType,
1923
+ filename: part.filename,
1924
+ data,
1925
+ providerOptions: part.providerOptions
1926
+ };
1927
+ }
1928
+ }
1929
+ }
1930
+
1931
+ // src/prompt/prepare-call-settings.ts
1932
+ function prepareCallSettings({
1933
+ maxOutputTokens,
1934
+ temperature,
1935
+ topP,
1936
+ topK,
1937
+ presencePenalty,
1938
+ frequencyPenalty,
1939
+ seed,
1940
+ stopSequences
1941
+ }) {
1942
+ if (maxOutputTokens != null) {
1943
+ if (!Number.isInteger(maxOutputTokens)) {
1944
+ throw new InvalidArgumentError({
1945
+ parameter: "maxOutputTokens",
1946
+ value: maxOutputTokens,
1947
+ message: "maxOutputTokens must be an integer"
1948
+ });
1949
+ }
1950
+ if (maxOutputTokens < 1) {
1951
+ throw new InvalidArgumentError({
1952
+ parameter: "maxOutputTokens",
1953
+ value: maxOutputTokens,
1954
+ message: "maxOutputTokens must be >= 1"
1955
+ });
1956
+ }
1957
+ }
1958
+ if (temperature != null) {
1959
+ if (typeof temperature !== "number") {
1960
+ throw new InvalidArgumentError({
1961
+ parameter: "temperature",
1962
+ value: temperature,
1963
+ message: "temperature must be a number"
1964
+ });
1965
+ }
1966
+ }
1967
+ if (topP != null) {
1968
+ if (typeof topP !== "number") {
1969
+ throw new InvalidArgumentError({
1970
+ parameter: "topP",
1971
+ value: topP,
1972
+ message: "topP must be a number"
1973
+ });
1974
+ }
1975
+ }
1976
+ if (topK != null) {
1977
+ if (typeof topK !== "number") {
1978
+ throw new InvalidArgumentError({
1979
+ parameter: "topK",
1980
+ value: topK,
1981
+ message: "topK must be a number"
1982
+ });
1983
+ }
1984
+ }
1985
+ if (presencePenalty != null) {
1986
+ if (typeof presencePenalty !== "number") {
1987
+ throw new InvalidArgumentError({
1988
+ parameter: "presencePenalty",
1989
+ value: presencePenalty,
1990
+ message: "presencePenalty must be a number"
1991
+ });
1992
+ }
1993
+ }
1994
+ if (frequencyPenalty != null) {
1995
+ if (typeof frequencyPenalty !== "number") {
1996
+ throw new InvalidArgumentError({
1997
+ parameter: "frequencyPenalty",
1998
+ value: frequencyPenalty,
1999
+ message: "frequencyPenalty must be a number"
2000
+ });
2001
+ }
2002
+ }
2003
+ if (seed != null) {
2004
+ if (!Number.isInteger(seed)) {
2005
+ throw new InvalidArgumentError({
2006
+ parameter: "seed",
2007
+ value: seed,
2008
+ message: "seed must be an integer"
2009
+ });
2010
+ }
2011
+ }
2012
+ return {
2013
+ maxOutputTokens,
2014
+ temperature,
2015
+ topP,
2016
+ topK,
2017
+ presencePenalty,
2018
+ frequencyPenalty,
2019
+ stopSequences,
2020
+ seed
2021
+ };
2022
+ }
2023
+
2024
+ // src/prompt/prepare-tools-and-tool-choice.ts
2025
+ var import_provider_utils7 = require("@ai-sdk/provider-utils");
2026
+
2027
+ // src/util/is-non-empty-object.ts
2028
+ function isNonEmptyObject(object) {
2029
+ return object != null && Object.keys(object).length > 0;
2030
+ }
2031
+
2032
+ // src/prompt/prepare-tools-and-tool-choice.ts
2033
+ function prepareToolsAndToolChoice({
2034
+ tools,
2035
+ toolChoice,
2036
+ activeTools
2037
+ }) {
2038
+ if (!isNonEmptyObject(tools)) {
2039
+ return {
2040
+ tools: void 0,
2041
+ toolChoice: void 0
2042
+ };
2043
+ }
2044
+ const filteredTools = activeTools != null ? Object.entries(tools).filter(
2045
+ ([name9]) => activeTools.includes(name9)
2046
+ ) : Object.entries(tools);
2047
+ return {
2048
+ tools: filteredTools.map(([name9, tool]) => {
2049
+ const toolType = tool.type;
2050
+ switch (toolType) {
2051
+ case void 0:
2052
+ case "function":
2053
+ return {
2054
+ type: "function",
2055
+ name: name9,
2056
+ description: tool.description,
2057
+ inputSchema: (0, import_provider_utils7.asSchema)(tool.inputSchema).jsonSchema
2058
+ };
2059
+ case "provider-defined":
2060
+ return {
2061
+ type: "provider-defined",
2062
+ name: name9,
2063
+ id: tool.id,
2064
+ args: tool.args
2065
+ };
2066
+ default: {
2067
+ const exhaustiveCheck = toolType;
2068
+ throw new Error(`Unsupported tool type: ${exhaustiveCheck}`);
2069
+ }
2070
+ }
2071
+ }),
2072
+ toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
2073
+ };
2074
+ }
2075
+
2076
+ // src/prompt/resolve-language-model.ts
2077
+ var import_gateway = require("@ai-sdk/gateway");
2078
+
2079
+ // src/error/index.ts
2080
+ var import_provider12 = require("@ai-sdk/provider");
2081
+
2082
+ // src/error/invalid-tool-input-error.ts
2083
+ var import_provider8 = require("@ai-sdk/provider");
2084
+ var name6 = "AI_InvalidToolInputError";
2085
+ var marker6 = `vercel.ai.error.${name6}`;
2086
+ var symbol6 = Symbol.for(marker6);
2087
+ var _a6;
2088
+ var InvalidToolInputError = class extends import_provider8.AISDKError {
2089
+ constructor({
2090
+ toolInput,
2091
+ toolName,
2092
+ cause,
2093
+ message = `Invalid input for tool ${toolName}: ${(0, import_provider8.getErrorMessage)(cause)}`
2094
+ }) {
2095
+ super({ name: name6, message, cause });
2096
+ this[_a6] = true;
2097
+ this.toolInput = toolInput;
2098
+ this.toolName = toolName;
2099
+ }
2100
+ static isInstance(error) {
2101
+ return import_provider8.AISDKError.hasMarker(error, marker6);
2102
+ }
2103
+ };
2104
+ _a6 = symbol6;
2105
+
2106
+ // src/error/no-such-tool-error.ts
2107
+ var import_provider9 = require("@ai-sdk/provider");
2108
+ var name7 = "AI_NoSuchToolError";
2109
+ var marker7 = `vercel.ai.error.${name7}`;
2110
+ var symbol7 = Symbol.for(marker7);
2111
+ var _a7;
2112
+ var NoSuchToolError = class extends import_provider9.AISDKError {
2113
+ constructor({
2114
+ toolName,
2115
+ availableTools = void 0,
2116
+ message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
2117
+ }) {
2118
+ super({ name: name7, message });
2119
+ this[_a7] = true;
2120
+ this.toolName = toolName;
2121
+ this.availableTools = availableTools;
2122
+ }
2123
+ static isInstance(error) {
2124
+ return import_provider9.AISDKError.hasMarker(error, marker7);
2125
+ }
2126
+ };
2127
+ _a7 = symbol7;
2128
+
2129
+ // src/error/tool-call-repair-error.ts
2130
+ var import_provider10 = require("@ai-sdk/provider");
2131
+ var name8 = "AI_ToolCallRepairError";
2132
+ var marker8 = `vercel.ai.error.${name8}`;
2133
+ var symbol8 = Symbol.for(marker8);
2134
+ var _a8;
2135
+ var ToolCallRepairError = class extends import_provider10.AISDKError {
2136
+ constructor({
2137
+ cause,
2138
+ originalError,
2139
+ message = `Error repairing tool call: ${(0, import_provider10.getErrorMessage)(cause)}`
2140
+ }) {
2141
+ super({ name: name8, message, cause });
2142
+ this[_a8] = true;
2143
+ this.originalError = originalError;
2144
+ }
2145
+ static isInstance(error) {
2146
+ return import_provider10.AISDKError.hasMarker(error, marker8);
2147
+ }
2148
+ };
2149
+ _a8 = symbol8;
2150
+
2151
+ // src/error/unsupported-model-version-error.ts
2152
+ var import_provider11 = require("@ai-sdk/provider");
2153
+ var UnsupportedModelVersionError = class extends import_provider11.AISDKError {
2154
+ constructor(options) {
2155
+ super({
2156
+ name: "AI_UnsupportedModelVersionError",
2157
+ message: `Unsupported model version ${options.version} for provider "${options.provider}" and model "${options.modelId}". AI SDK 5 only supports models that implement specification version "v2".`
2158
+ });
2159
+ this.version = options.version;
2160
+ this.provider = options.provider;
2161
+ this.modelId = options.modelId;
2162
+ }
2163
+ };
2164
+
2165
+ // src/prompt/resolve-language-model.ts
2166
+ function resolveLanguageModel(model) {
2167
+ if (typeof model !== "string") {
2168
+ if (model.specificationVersion !== "v2") {
2169
+ throw new UnsupportedModelVersionError({
2170
+ version: model.specificationVersion,
2171
+ provider: model.provider,
2172
+ modelId: model.modelId
2173
+ });
2174
+ }
2175
+ return model;
2176
+ }
2177
+ const globalProvider = globalThis.AI_SDK_DEFAULT_PROVIDER;
2178
+ return (globalProvider != null ? globalProvider : import_gateway.gateway).languageModel(model);
2179
+ }
2180
+
2181
+ // src/prompt/standardize-prompt.ts
2182
+ var import_provider13 = require("@ai-sdk/provider");
2183
+ var import_provider_utils8 = require("@ai-sdk/provider-utils");
2184
+ var import_v47 = require("zod/v4");
2185
+
2186
+ // src/prompt/message.ts
2187
+ var import_v46 = require("zod/v4");
2188
+
2189
+ // src/types/provider-metadata.ts
2190
+ var import_v44 = require("zod/v4");
2191
+
2192
+ // src/types/json-value.ts
2193
+ var import_v43 = require("zod/v4");
2194
+ var jsonValueSchema = import_v43.z.lazy(
2195
+ () => import_v43.z.union([
2196
+ import_v43.z.null(),
2197
+ import_v43.z.string(),
2198
+ import_v43.z.number(),
2199
+ import_v43.z.boolean(),
2200
+ import_v43.z.record(import_v43.z.string(), jsonValueSchema),
2201
+ import_v43.z.array(jsonValueSchema)
2202
+ ])
2203
+ );
2204
+
2205
+ // src/types/provider-metadata.ts
2206
+ var providerMetadataSchema = import_v44.z.record(
2207
+ import_v44.z.string(),
2208
+ import_v44.z.record(import_v44.z.string(), jsonValueSchema)
2209
+ );
2210
+
2211
+ // src/prompt/content-part.ts
2212
+ var import_v45 = require("zod/v4");
2213
+ var textPartSchema = import_v45.z.object({
2214
+ type: import_v45.z.literal("text"),
2215
+ text: import_v45.z.string(),
2216
+ providerOptions: providerMetadataSchema.optional()
2217
+ });
2218
+ var imagePartSchema = import_v45.z.object({
2219
+ type: import_v45.z.literal("image"),
2220
+ image: import_v45.z.union([dataContentSchema, import_v45.z.instanceof(URL)]),
2221
+ mediaType: import_v45.z.string().optional(),
2222
+ providerOptions: providerMetadataSchema.optional()
2223
+ });
2224
+ var filePartSchema = import_v45.z.object({
2225
+ type: import_v45.z.literal("file"),
2226
+ data: import_v45.z.union([dataContentSchema, import_v45.z.instanceof(URL)]),
2227
+ filename: import_v45.z.string().optional(),
2228
+ mediaType: import_v45.z.string(),
2229
+ providerOptions: providerMetadataSchema.optional()
2230
+ });
2231
+ var reasoningPartSchema = import_v45.z.object({
2232
+ type: import_v45.z.literal("reasoning"),
2233
+ text: import_v45.z.string(),
2234
+ providerOptions: providerMetadataSchema.optional()
2235
+ });
2236
+ var toolCallPartSchema = import_v45.z.object({
2237
+ type: import_v45.z.literal("tool-call"),
2238
+ toolCallId: import_v45.z.string(),
2239
+ toolName: import_v45.z.string(),
2240
+ input: import_v45.z.unknown(),
2241
+ providerOptions: providerMetadataSchema.optional(),
2242
+ providerExecuted: import_v45.z.boolean().optional()
2243
+ });
2244
+ var outputSchema = import_v45.z.discriminatedUnion("type", [
2245
+ import_v45.z.object({
2246
+ type: import_v45.z.literal("text"),
2247
+ value: import_v45.z.string()
2248
+ }),
2249
+ import_v45.z.object({
2250
+ type: import_v45.z.literal("json"),
2251
+ value: jsonValueSchema
2252
+ }),
2253
+ import_v45.z.object({
2254
+ type: import_v45.z.literal("error-text"),
2255
+ value: import_v45.z.string()
2256
+ }),
2257
+ import_v45.z.object({
2258
+ type: import_v45.z.literal("error-json"),
2259
+ value: jsonValueSchema
2260
+ }),
2261
+ import_v45.z.object({
2262
+ type: import_v45.z.literal("content"),
2263
+ value: import_v45.z.array(
2264
+ import_v45.z.union([
2265
+ import_v45.z.object({
2266
+ type: import_v45.z.literal("text"),
2267
+ text: import_v45.z.string()
2268
+ }),
2269
+ import_v45.z.object({
2270
+ type: import_v45.z.literal("media"),
2271
+ data: import_v45.z.string(),
2272
+ mediaType: import_v45.z.string()
2273
+ })
2274
+ ])
2275
+ )
2276
+ })
2277
+ ]);
2278
+ var toolResultPartSchema = import_v45.z.object({
2279
+ type: import_v45.z.literal("tool-result"),
2280
+ toolCallId: import_v45.z.string(),
2281
+ toolName: import_v45.z.string(),
2282
+ output: outputSchema,
2283
+ providerOptions: providerMetadataSchema.optional()
2284
+ });
2285
+
2286
+ // src/prompt/message.ts
2287
+ var systemModelMessageSchema = import_v46.z.object(
2288
+ {
2289
+ role: import_v46.z.literal("system"),
2290
+ content: import_v46.z.string(),
2291
+ providerOptions: providerMetadataSchema.optional()
2292
+ }
2293
+ );
2294
+ var userModelMessageSchema = import_v46.z.object({
2295
+ role: import_v46.z.literal("user"),
2296
+ content: import_v46.z.union([
2297
+ import_v46.z.string(),
2298
+ import_v46.z.array(import_v46.z.union([textPartSchema, imagePartSchema, filePartSchema]))
2299
+ ]),
2300
+ providerOptions: providerMetadataSchema.optional()
2301
+ });
2302
+ var assistantModelMessageSchema = import_v46.z.object({
2303
+ role: import_v46.z.literal("assistant"),
2304
+ content: import_v46.z.union([
2305
+ import_v46.z.string(),
2306
+ import_v46.z.array(
2307
+ import_v46.z.union([
2308
+ textPartSchema,
2309
+ filePartSchema,
2310
+ reasoningPartSchema,
2311
+ toolCallPartSchema,
2312
+ toolResultPartSchema
2313
+ ])
2314
+ )
2315
+ ]),
2316
+ providerOptions: providerMetadataSchema.optional()
2317
+ });
2318
+ var toolModelMessageSchema = import_v46.z.object({
2319
+ role: import_v46.z.literal("tool"),
2320
+ content: import_v46.z.array(toolResultPartSchema),
2321
+ providerOptions: providerMetadataSchema.optional()
2322
+ });
2323
+ var modelMessageSchema = import_v46.z.union([
2324
+ systemModelMessageSchema,
2325
+ userModelMessageSchema,
2326
+ assistantModelMessageSchema,
2327
+ toolModelMessageSchema
2328
+ ]);
2329
+
2330
+ // src/prompt/standardize-prompt.ts
2331
+ async function standardizePrompt(prompt) {
2332
+ if (prompt.prompt == null && prompt.messages == null) {
2333
+ throw new import_provider13.InvalidPromptError({
2334
+ prompt,
2335
+ message: "prompt or messages must be defined"
2336
+ });
2337
+ }
2338
+ if (prompt.prompt != null && prompt.messages != null) {
2339
+ throw new import_provider13.InvalidPromptError({
2340
+ prompt,
2341
+ message: "prompt and messages cannot be defined at the same time"
2342
+ });
2343
+ }
2344
+ if (prompt.system != null && typeof prompt.system !== "string") {
2345
+ throw new import_provider13.InvalidPromptError({
2346
+ prompt,
2347
+ message: "system must be a string"
2348
+ });
2349
+ }
2350
+ let messages;
2351
+ if (prompt.prompt != null && typeof prompt.prompt === "string") {
2352
+ messages = [{ role: "user", content: prompt.prompt }];
2353
+ } else if (prompt.prompt != null && Array.isArray(prompt.prompt)) {
2354
+ messages = prompt.prompt;
2355
+ } else if (prompt.messages != null) {
2356
+ messages = prompt.messages;
2357
+ } else {
2358
+ throw new import_provider13.InvalidPromptError({
2359
+ prompt,
2360
+ message: "prompt or messages must be defined"
2361
+ });
2362
+ }
2363
+ if (messages.length === 0) {
2364
+ throw new import_provider13.InvalidPromptError({
2365
+ prompt,
2366
+ message: "messages must not be empty"
2367
+ });
2368
+ }
2369
+ const validationResult = await (0, import_provider_utils8.safeValidateTypes)({
2370
+ value: messages,
2371
+ schema: import_v47.z.array(modelMessageSchema)
2372
+ });
2373
+ if (!validationResult.success) {
2374
+ throw new import_provider13.InvalidPromptError({
2375
+ prompt,
2376
+ message: "The messages must be a ModelMessage[]. If you have passed a UIMessage[], you can use convertToModelMessages to convert them.",
2377
+ cause: validationResult.error
2378
+ });
2379
+ }
2380
+ return {
2381
+ messages,
2382
+ system: prompt.system
2383
+ };
2384
+ }
2385
+
2386
+ // src/prompt/wrap-gateway-error.ts
2387
+ var import_gateway2 = require("@ai-sdk/gateway");
2388
+ var import_provider14 = require("@ai-sdk/provider");
2389
+ function wrapGatewayError(error) {
2390
+ if (import_gateway2.GatewayAuthenticationError.isInstance(error) || import_gateway2.GatewayModelNotFoundError.isInstance(error)) {
2391
+ return new import_provider14.AISDKError({
2392
+ name: "GatewayError",
2393
+ message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
2394
+ cause: error
2395
+ });
2396
+ }
2397
+ return error;
2398
+ }
2399
+
2400
+ // src/telemetry/assemble-operation-name.ts
2401
+ function assembleOperationName({
2402
+ operationId,
2403
+ telemetry
2404
+ }) {
2405
+ return {
2406
+ // standardized operation and resource name:
2407
+ "operation.name": `${operationId}${(telemetry == null ? void 0 : telemetry.functionId) != null ? ` ${telemetry.functionId}` : ""}`,
2408
+ "resource.name": telemetry == null ? void 0 : telemetry.functionId,
2409
+ // detailed, AI SDK specific data:
2410
+ "ai.operationId": operationId,
2411
+ "ai.telemetry.functionId": telemetry == null ? void 0 : telemetry.functionId
2412
+ };
2413
+ }
2414
+
2415
+ // src/telemetry/get-base-telemetry-attributes.ts
2416
+ function getBaseTelemetryAttributes({
2417
+ model,
2418
+ settings,
2419
+ telemetry,
2420
+ headers
2421
+ }) {
2422
+ var _a9;
2423
+ return {
2424
+ "ai.model.provider": model.provider,
2425
+ "ai.model.id": model.modelId,
2426
+ // settings:
2427
+ ...Object.entries(settings).reduce((attributes, [key, value]) => {
2428
+ attributes[`ai.settings.${key}`] = value;
2429
+ return attributes;
2430
+ }, {}),
2431
+ // add metadata as attributes:
2432
+ ...Object.entries((_a9 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a9 : {}).reduce(
2433
+ (attributes, [key, value]) => {
2434
+ attributes[`ai.telemetry.metadata.${key}`] = value;
2435
+ return attributes;
2436
+ },
2437
+ {}
2438
+ ),
2439
+ // request headers
2440
+ ...Object.entries(headers != null ? headers : {}).reduce((attributes, [key, value]) => {
2441
+ if (value !== void 0) {
2442
+ attributes[`ai.request.headers.${key}`] = value;
2443
+ }
2444
+ return attributes;
2445
+ }, {})
2446
+ };
2447
+ }
2448
+
2449
+ // src/telemetry/get-tracer.ts
2450
+ var import_api = require("@opentelemetry/api");
2451
+
2452
+ // src/telemetry/noop-tracer.ts
2453
+ var noopTracer = {
2454
+ startSpan() {
2455
+ return noopSpan;
2456
+ },
2457
+ startActiveSpan(name9, arg1, arg2, arg3) {
2458
+ if (typeof arg1 === "function") {
2459
+ return arg1(noopSpan);
2460
+ }
2461
+ if (typeof arg2 === "function") {
2462
+ return arg2(noopSpan);
2463
+ }
2464
+ if (typeof arg3 === "function") {
2465
+ return arg3(noopSpan);
2466
+ }
2467
+ }
2468
+ };
2469
+ var noopSpan = {
2470
+ spanContext() {
2471
+ return noopSpanContext;
2472
+ },
2473
+ setAttribute() {
2474
+ return this;
2475
+ },
2476
+ setAttributes() {
2477
+ return this;
2478
+ },
2479
+ addEvent() {
2480
+ return this;
2481
+ },
2482
+ addLink() {
2483
+ return this;
2484
+ },
2485
+ addLinks() {
2486
+ return this;
2487
+ },
2488
+ setStatus() {
2489
+ return this;
2490
+ },
2491
+ updateName() {
2492
+ return this;
2493
+ },
2494
+ end() {
2495
+ return this;
2496
+ },
2497
+ isRecording() {
2498
+ return false;
2499
+ },
2500
+ recordException() {
2501
+ return this;
2502
+ }
2503
+ };
2504
+ var noopSpanContext = {
2505
+ traceId: "",
2506
+ spanId: "",
2507
+ traceFlags: 0
2508
+ };
2509
+
2510
+ // src/telemetry/get-tracer.ts
2511
+ function getTracer({
2512
+ isEnabled = false,
2513
+ tracer
2514
+ } = {}) {
2515
+ if (!isEnabled) {
2516
+ return noopTracer;
2517
+ }
2518
+ if (tracer) {
2519
+ return tracer;
2520
+ }
2521
+ return import_api.trace.getTracer("ai");
2522
+ }
2523
+
2524
+ // src/telemetry/record-span.ts
2525
+ var import_api2 = require("@opentelemetry/api");
2526
+ function recordSpan({
2527
+ name: name9,
2528
+ tracer,
2529
+ attributes,
2530
+ fn,
2531
+ endWhenDone = true
2532
+ }) {
2533
+ return tracer.startActiveSpan(name9, { attributes }, async (span) => {
2534
+ try {
2535
+ const result = await fn(span);
2536
+ if (endWhenDone) {
2537
+ span.end();
2538
+ }
2539
+ return result;
2540
+ } catch (error) {
2541
+ try {
2542
+ recordErrorOnSpan(span, error);
2543
+ } finally {
2544
+ span.end();
2545
+ }
2546
+ throw error;
2547
+ }
2548
+ });
2549
+ }
2550
+ function recordErrorOnSpan(span, error) {
2551
+ if (error instanceof Error) {
2552
+ span.recordException({
2553
+ name: error.name,
2554
+ message: error.message,
2555
+ stack: error.stack
2556
+ });
2557
+ span.setStatus({
2558
+ code: import_api2.SpanStatusCode.ERROR,
2559
+ message: error.message
2560
+ });
2561
+ } else {
2562
+ span.setStatus({ code: import_api2.SpanStatusCode.ERROR });
2563
+ }
2564
+ }
2565
+
2566
+ // src/telemetry/select-telemetry-attributes.ts
2567
+ function selectTelemetryAttributes({
2568
+ telemetry,
2569
+ attributes
2570
+ }) {
2571
+ if ((telemetry == null ? void 0 : telemetry.isEnabled) !== true) {
2572
+ return {};
2573
+ }
2574
+ return Object.entries(attributes).reduce((attributes2, [key, value]) => {
2575
+ if (value == null) {
2576
+ return attributes2;
2577
+ }
2578
+ if (typeof value === "object" && "input" in value && typeof value.input === "function") {
2579
+ if ((telemetry == null ? void 0 : telemetry.recordInputs) === false) {
2580
+ return attributes2;
2581
+ }
2582
+ const result = value.input();
2583
+ return result == null ? attributes2 : { ...attributes2, [key]: result };
2584
+ }
2585
+ if (typeof value === "object" && "output" in value && typeof value.output === "function") {
2586
+ if ((telemetry == null ? void 0 : telemetry.recordOutputs) === false) {
2587
+ return attributes2;
2588
+ }
2589
+ const result = value.output();
2590
+ return result == null ? attributes2 : { ...attributes2, [key]: result };
2591
+ }
2592
+ return { ...attributes2, [key]: value };
2593
+ }, {});
2594
+ }
2595
+
2596
+ // src/telemetry/stringify-for-telemetry.ts
2597
+ function stringifyForTelemetry(prompt) {
2598
+ return JSON.stringify(
2599
+ prompt.map((message) => ({
2600
+ ...message,
2601
+ content: typeof message.content === "string" ? message.content : message.content.map(
2602
+ (part) => part.type === "file" ? {
2603
+ ...part,
2604
+ data: part.data instanceof Uint8Array ? convertDataContentToBase64String(part.data) : part.data
2605
+ } : part
2606
+ )
2607
+ }))
2608
+ );
2609
+ }
2610
+
2611
+ // src/types/usage.ts
2612
+ function addLanguageModelUsage(usage1, usage2) {
2613
+ return {
2614
+ inputTokens: addTokenCounts(usage1.inputTokens, usage2.inputTokens),
2615
+ outputTokens: addTokenCounts(usage1.outputTokens, usage2.outputTokens),
2616
+ totalTokens: addTokenCounts(usage1.totalTokens, usage2.totalTokens),
2617
+ reasoningTokens: addTokenCounts(
2618
+ usage1.reasoningTokens,
2619
+ usage2.reasoningTokens
2620
+ ),
2621
+ cachedInputTokens: addTokenCounts(
2622
+ usage1.cachedInputTokens,
2623
+ usage2.cachedInputTokens
2624
+ )
2625
+ };
2626
+ }
2627
+ function addTokenCounts(tokenCount1, tokenCount2) {
2628
+ return tokenCount1 == null && tokenCount2 == null ? void 0 : (tokenCount1 != null ? tokenCount1 : 0) + (tokenCount2 != null ? tokenCount2 : 0);
2629
+ }
2630
+
2631
+ // src/generate-text/run-tools-transformation.ts
2632
+ var import_provider_utils11 = require("@ai-sdk/provider-utils");
2633
+
2634
+ // src/generate-text/generated-file.ts
2635
+ var import_provider_utils9 = require("@ai-sdk/provider-utils");
2636
+ var DefaultGeneratedFile = class {
2637
+ constructor({
2638
+ data,
2639
+ mediaType
2640
+ }) {
2641
+ const isUint8Array = data instanceof Uint8Array;
2642
+ this.base64Data = isUint8Array ? void 0 : data;
2643
+ this.uint8ArrayData = isUint8Array ? data : void 0;
2644
+ this.mediaType = mediaType;
2645
+ }
2646
+ // lazy conversion with caching to avoid unnecessary conversion overhead:
2647
+ get base64() {
2648
+ if (this.base64Data == null) {
2649
+ this.base64Data = (0, import_provider_utils9.convertUint8ArrayToBase64)(this.uint8ArrayData);
2650
+ }
2651
+ return this.base64Data;
2652
+ }
2653
+ // lazy conversion with caching to avoid unnecessary conversion overhead:
2654
+ get uint8Array() {
2655
+ if (this.uint8ArrayData == null) {
2656
+ this.uint8ArrayData = (0, import_provider_utils9.convertBase64ToUint8Array)(this.base64Data);
2657
+ }
2658
+ return this.uint8ArrayData;
2659
+ }
2660
+ };
2661
+ var DefaultGeneratedFileWithType = class extends DefaultGeneratedFile {
2662
+ constructor(options) {
2663
+ super(options);
2664
+ this.type = "file";
2665
+ }
2666
+ };
2667
+
2668
+ // src/generate-text/parse-tool-call.ts
2669
+ var import_provider_utils10 = require("@ai-sdk/provider-utils");
2670
+ async function parseToolCall({
2671
+ toolCall,
2672
+ tools,
2673
+ repairToolCall,
2674
+ system,
2675
+ messages
2676
+ }) {
2677
+ if (tools == null) {
2678
+ throw new NoSuchToolError({ toolName: toolCall.toolName });
2679
+ }
2680
+ try {
2681
+ return await doParseToolCall({ toolCall, tools });
2682
+ } catch (error) {
2683
+ if (repairToolCall == null || !(NoSuchToolError.isInstance(error) || InvalidToolInputError.isInstance(error))) {
2684
+ throw error;
2685
+ }
2686
+ let repairedToolCall = null;
2687
+ try {
2688
+ repairedToolCall = await repairToolCall({
2689
+ toolCall,
2690
+ tools,
2691
+ inputSchema: ({ toolName }) => {
2692
+ const { inputSchema } = tools[toolName];
2693
+ return (0, import_provider_utils10.asSchema)(inputSchema).jsonSchema;
2694
+ },
2695
+ system,
2696
+ messages,
2697
+ error
2698
+ });
2699
+ } catch (repairError) {
2700
+ throw new ToolCallRepairError({
2701
+ cause: repairError,
2702
+ originalError: error
2703
+ });
2704
+ }
2705
+ if (repairedToolCall == null) {
2706
+ throw error;
2707
+ }
2708
+ return await doParseToolCall({ toolCall: repairedToolCall, tools });
2709
+ }
2710
+ }
2711
+ async function doParseToolCall({
2712
+ toolCall,
2713
+ tools
2714
+ }) {
2715
+ const toolName = toolCall.toolName;
2716
+ const tool = tools[toolName];
2717
+ if (tool == null) {
2718
+ throw new NoSuchToolError({
2719
+ toolName: toolCall.toolName,
2720
+ availableTools: Object.keys(tools)
2721
+ });
2722
+ }
2723
+ const schema = (0, import_provider_utils10.asSchema)(tool.inputSchema);
2724
+ const parseResult = toolCall.input.trim() === "" ? await (0, import_provider_utils10.safeValidateTypes)({ value: {}, schema }) : await (0, import_provider_utils10.safeParseJSON)({ text: toolCall.input, schema });
2725
+ if (parseResult.success === false) {
2726
+ throw new InvalidToolInputError({
2727
+ toolName,
2728
+ toolInput: toolCall.input,
2729
+ cause: parseResult.error
2730
+ });
2731
+ }
2732
+ return {
2733
+ type: "tool-call",
2734
+ toolCallId: toolCall.toolCallId,
2735
+ toolName,
2736
+ input: parseResult.value,
2737
+ providerExecuted: toolCall.providerExecuted,
2738
+ providerMetadata: toolCall.providerMetadata
2739
+ };
2740
+ }
2741
+
2742
+ // src/generate-text/run-tools-transformation.ts
2743
+ function runToolsTransformation({
2744
+ tools,
2745
+ generatorStream,
2746
+ tracer,
2747
+ telemetry,
2748
+ system,
2749
+ messages,
2750
+ abortSignal,
2751
+ repairToolCall
2752
+ }) {
2753
+ let toolResultsStreamController = null;
2754
+ const toolResultsStream = new ReadableStream({
2755
+ start(controller) {
2756
+ toolResultsStreamController = controller;
2757
+ }
2758
+ });
2759
+ const outstandingToolResults = /* @__PURE__ */ new Set();
2760
+ const toolInputs = /* @__PURE__ */ new Map();
2761
+ let canClose = false;
2762
+ let finishChunk = void 0;
2763
+ function attemptClose() {
2764
+ if (canClose && outstandingToolResults.size === 0) {
2765
+ if (finishChunk != null) {
2766
+ toolResultsStreamController.enqueue(finishChunk);
2767
+ }
2768
+ toolResultsStreamController.close();
2769
+ }
2770
+ }
2771
+ const forwardStream = new TransformStream({
2772
+ async transform(chunk, controller) {
2773
+ const chunkType = chunk.type;
2774
+ switch (chunkType) {
2775
+ case "stream-start":
2776
+ case "text-start":
2777
+ case "text-delta":
2778
+ case "text-end":
2779
+ case "reasoning-start":
2780
+ case "reasoning-delta":
2781
+ case "reasoning-end":
2782
+ case "tool-input-start":
2783
+ case "tool-input-delta":
2784
+ case "tool-input-end":
2785
+ case "source":
2786
+ case "response-metadata":
2787
+ case "error":
2788
+ case "raw": {
2789
+ controller.enqueue(chunk);
2790
+ break;
2791
+ }
2792
+ case "file": {
2793
+ controller.enqueue({
2794
+ type: "file",
2795
+ file: new DefaultGeneratedFileWithType({
2796
+ data: chunk.data,
2797
+ mediaType: chunk.mediaType
2798
+ })
2799
+ });
2800
+ break;
2801
+ }
2802
+ case "finish": {
2803
+ finishChunk = {
2804
+ type: "finish",
2805
+ finishReason: chunk.finishReason,
2806
+ usage: chunk.usage,
2807
+ providerMetadata: chunk.providerMetadata
2808
+ };
2809
+ break;
2810
+ }
2811
+ case "tool-call": {
2812
+ try {
2813
+ const toolCall = await parseToolCall({
2814
+ toolCall: chunk,
2815
+ tools,
2816
+ repairToolCall,
2817
+ system,
2818
+ messages
2819
+ });
2820
+ controller.enqueue(toolCall);
2821
+ const tool = tools[toolCall.toolName];
2822
+ toolInputs.set(toolCall.toolCallId, toolCall.input);
2823
+ if (tool.onInputAvailable != null) {
2824
+ await tool.onInputAvailable({
2825
+ input: toolCall.input,
2826
+ toolCallId: toolCall.toolCallId,
2827
+ messages,
2828
+ abortSignal
2829
+ });
2830
+ }
2831
+ if (tool.execute != null && toolCall.providerExecuted !== true) {
2832
+ const toolExecutionId = (0, import_provider_utils11.generateId)();
2833
+ outstandingToolResults.add(toolExecutionId);
2834
+ recordSpan({
2835
+ name: "ai.toolCall",
2836
+ attributes: selectTelemetryAttributes({
2837
+ telemetry,
2838
+ attributes: {
2839
+ ...assembleOperationName({
2840
+ operationId: "ai.toolCall",
2841
+ telemetry
2842
+ }),
2843
+ "ai.toolCall.name": toolCall.toolName,
2844
+ "ai.toolCall.id": toolCall.toolCallId,
2845
+ "ai.toolCall.input": {
2846
+ output: () => JSON.stringify(toolCall.input)
2847
+ }
2848
+ }
2849
+ }),
2850
+ tracer,
2851
+ fn: async (span) => {
2852
+ let output;
2853
+ try {
2854
+ output = await tool.execute(toolCall.input, {
2855
+ toolCallId: toolCall.toolCallId,
2856
+ messages,
2857
+ abortSignal
2858
+ });
2859
+ } catch (error) {
2860
+ recordErrorOnSpan(span, error);
2861
+ toolResultsStreamController.enqueue({
2862
+ ...toolCall,
2863
+ type: "tool-error",
2864
+ error
2865
+ });
2866
+ outstandingToolResults.delete(toolExecutionId);
2867
+ attemptClose();
2868
+ return;
2869
+ }
2870
+ toolResultsStreamController.enqueue({
2871
+ ...toolCall,
2872
+ type: "tool-result",
2873
+ output
2874
+ });
2875
+ outstandingToolResults.delete(toolExecutionId);
2876
+ attemptClose();
2877
+ try {
2878
+ span.setAttributes(
2879
+ selectTelemetryAttributes({
2880
+ telemetry,
2881
+ attributes: {
2882
+ "ai.toolCall.output": {
2883
+ output: () => JSON.stringify(output)
2884
+ }
2885
+ }
2886
+ })
2887
+ );
2888
+ } catch (ignored) {
2889
+ }
2890
+ }
2891
+ });
2892
+ }
2893
+ } catch (error) {
2894
+ toolResultsStreamController.enqueue({ type: "error", error });
2895
+ }
2896
+ break;
2897
+ }
2898
+ case "tool-result": {
2899
+ const toolName = chunk.toolName;
2900
+ if (chunk.isError) {
2901
+ toolResultsStreamController.enqueue({
2902
+ type: "tool-error",
2903
+ toolCallId: chunk.toolCallId,
2904
+ toolName,
2905
+ input: toolInputs.get(chunk.toolCallId),
2906
+ providerExecuted: chunk.providerExecuted,
2907
+ error: chunk.result
2908
+ });
2909
+ } else {
2910
+ controller.enqueue({
2911
+ type: "tool-result",
2912
+ toolCallId: chunk.toolCallId,
2913
+ toolName,
2914
+ input: toolInputs.get(chunk.toolCallId),
2915
+ output: chunk.result,
2916
+ providerExecuted: chunk.providerExecuted
2917
+ });
2918
+ }
2919
+ break;
2920
+ }
2921
+ default: {
2922
+ const _exhaustiveCheck = chunkType;
2923
+ throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
2924
+ }
2925
+ }
2926
+ },
2927
+ flush() {
2928
+ canClose = true;
2929
+ attemptClose();
2930
+ }
2931
+ });
2932
+ return new ReadableStream({
2933
+ async start(controller) {
2934
+ return Promise.all([
2935
+ generatorStream.pipeThrough(forwardStream).pipeTo(
2936
+ new WritableStream({
2937
+ write(chunk) {
2938
+ controller.enqueue(chunk);
2939
+ },
2940
+ close() {
2941
+ }
2942
+ })
2943
+ ),
2944
+ toolResultsStream.pipeTo(
2945
+ new WritableStream({
2946
+ write(chunk) {
2947
+ controller.enqueue(chunk);
2948
+ },
2949
+ close() {
2950
+ controller.close();
2951
+ }
2952
+ })
2953
+ )
2954
+ ]);
2955
+ }
2956
+ });
2957
+ }
2958
+
2959
+ // src/generate-text/step-result.ts
2960
+ var DefaultStepResult = class {
2961
+ constructor({
2962
+ content,
2963
+ finishReason,
2964
+ usage,
2965
+ warnings,
2966
+ request,
2967
+ response,
2968
+ providerMetadata
2969
+ }) {
2970
+ this.content = content;
2971
+ this.finishReason = finishReason;
2972
+ this.usage = usage;
2973
+ this.warnings = warnings;
2974
+ this.request = request;
2975
+ this.response = response;
2976
+ this.providerMetadata = providerMetadata;
2977
+ }
2978
+ get text() {
2979
+ return this.content.filter((part) => part.type === "text").map((part) => part.text).join("");
2980
+ }
2981
+ get reasoning() {
2982
+ return this.content.filter((part) => part.type === "reasoning");
2983
+ }
2984
+ get reasoningText() {
2985
+ return this.reasoning.length === 0 ? void 0 : this.reasoning.map((part) => part.text).join("");
2986
+ }
2987
+ get files() {
2988
+ return this.content.filter((part) => part.type === "file").map((part) => part.file);
2989
+ }
2990
+ get sources() {
2991
+ return this.content.filter((part) => part.type === "source");
2992
+ }
2993
+ get toolCalls() {
2994
+ return this.content.filter((part) => part.type === "tool-call");
2995
+ }
2996
+ get toolResults() {
2997
+ return this.content.filter((part) => part.type === "tool-result");
2998
+ }
2999
+ };
3000
+
3001
+ // src/generate-text/stop-condition.ts
3002
+ function stepCountIs(stepCount) {
3003
+ return ({ steps }) => steps.length === stepCount;
3004
+ }
3005
+ async function isStopConditionMet({
3006
+ stopConditions,
3007
+ steps
3008
+ }) {
3009
+ return (await Promise.all(stopConditions.map((condition) => condition({ steps })))).some((result) => result);
3010
+ }
3011
+
3012
+ // src/prompt/create-tool-model-output.ts
3013
+ var import_provider15 = require("@ai-sdk/provider");
3014
+ function createToolModelOutput({
3015
+ output,
3016
+ tool,
3017
+ errorMode
3018
+ }) {
3019
+ if (errorMode === "text") {
3020
+ return { type: "error-text", value: (0, import_provider15.getErrorMessage)(output) };
3021
+ } else if (errorMode === "json") {
3022
+ return { type: "error-json", value: output };
3023
+ }
3024
+ if (tool == null ? void 0 : tool.toModelOutput) {
3025
+ return tool.toModelOutput(output);
3026
+ }
3027
+ return typeof output === "string" ? { type: "text", value: output } : { type: "json", value: output };
3028
+ }
3029
+
3030
+ // src/generate-text/to-response-messages.ts
3031
+ function toResponseMessages({
3032
+ content: inputContent,
3033
+ tools
3034
+ }) {
3035
+ const responseMessages = [];
3036
+ const content = inputContent.filter((part) => part.type !== "source").filter(
3037
+ (part) => (part.type !== "tool-result" || part.providerExecuted) && (part.type !== "tool-error" || part.providerExecuted)
3038
+ ).filter((part) => part.type !== "text" || part.text.length > 0).map((part) => {
3039
+ switch (part.type) {
3040
+ case "text":
3041
+ return part;
3042
+ case "reasoning":
3043
+ return {
3044
+ type: "reasoning",
3045
+ text: part.text,
3046
+ providerOptions: part.providerMetadata
3047
+ };
3048
+ case "file":
3049
+ return {
3050
+ type: "file",
3051
+ data: part.file.base64,
3052
+ mediaType: part.file.mediaType,
3053
+ providerOptions: part.providerMetadata
3054
+ };
3055
+ case "tool-call":
3056
+ return {
3057
+ type: "tool-call",
3058
+ toolCallId: part.toolCallId,
3059
+ toolName: part.toolName,
3060
+ input: part.input,
3061
+ providerExecuted: part.providerExecuted,
3062
+ providerOptions: part.providerMetadata
3063
+ };
3064
+ case "tool-result":
3065
+ return {
3066
+ type: "tool-result",
3067
+ toolCallId: part.toolCallId,
3068
+ toolName: part.toolName,
3069
+ output: createToolModelOutput({
3070
+ tool: tools == null ? void 0 : tools[part.toolName],
3071
+ output: part.output,
3072
+ errorMode: "none"
3073
+ }),
3074
+ providerExecuted: true,
3075
+ providerOptions: part.providerMetadata
3076
+ };
3077
+ case "tool-error":
3078
+ return {
3079
+ type: "tool-result",
3080
+ toolCallId: part.toolCallId,
3081
+ toolName: part.toolName,
3082
+ output: createToolModelOutput({
3083
+ tool: tools == null ? void 0 : tools[part.toolName],
3084
+ output: part.error,
3085
+ errorMode: "json"
3086
+ }),
3087
+ providerOptions: part.providerMetadata
3088
+ };
3089
+ }
3090
+ });
3091
+ if (content.length > 0) {
3092
+ responseMessages.push({
3093
+ role: "assistant",
3094
+ content
3095
+ });
3096
+ }
3097
+ const toolResultContent = inputContent.filter((part) => part.type === "tool-result" || part.type === "tool-error").filter((part) => !part.providerExecuted).map((toolResult) => ({
3098
+ type: "tool-result",
3099
+ toolCallId: toolResult.toolCallId,
3100
+ toolName: toolResult.toolName,
3101
+ output: createToolModelOutput({
3102
+ tool: tools == null ? void 0 : tools[toolResult.toolName],
3103
+ output: toolResult.type === "tool-result" ? toolResult.output : toolResult.error,
3104
+ errorMode: toolResult.type === "tool-error" ? "text" : "none"
3105
+ })
3106
+ }));
3107
+ if (toolResultContent.length > 0) {
3108
+ responseMessages.push({
3109
+ role: "tool",
3110
+ content: toolResultContent
3111
+ });
3112
+ }
3113
+ return responseMessages;
3114
+ }
3115
+
3116
+ // src/generate-text/stream-text.ts
3117
+ var originalGenerateId = (0, import_provider_utils12.createIdGenerator)({
3118
+ prefix: "aitxt",
3119
+ size: 24
3120
+ });
3121
+ function streamText({
3122
+ model,
3123
+ tools,
3124
+ toolChoice,
3125
+ system,
3126
+ prompt,
3127
+ messages,
3128
+ maxRetries,
3129
+ abortSignal,
3130
+ headers,
3131
+ stopWhen = stepCountIs(1),
3132
+ experimental_output: output,
3133
+ experimental_telemetry: telemetry,
3134
+ prepareStep,
3135
+ providerOptions,
3136
+ experimental_activeTools,
3137
+ activeTools = experimental_activeTools,
3138
+ experimental_repairToolCall: repairToolCall,
3139
+ experimental_transform: transform,
3140
+ includeRawChunks = false,
3141
+ onChunk,
3142
+ onError = ({ error }) => {
3143
+ console.error(error);
3144
+ },
3145
+ onFinish,
3146
+ onStepFinish,
3147
+ _internal: {
3148
+ now: now2 = now,
3149
+ generateId: generateId2 = originalGenerateId,
3150
+ currentDate = () => /* @__PURE__ */ new Date()
3151
+ } = {},
3152
+ ...settings
3153
+ }) {
3154
+ return new DefaultStreamTextResult({
3155
+ model: resolveLanguageModel(model),
3156
+ telemetry,
3157
+ headers,
3158
+ settings,
3159
+ maxRetries,
3160
+ abortSignal,
3161
+ system,
3162
+ prompt,
3163
+ messages,
3164
+ tools,
3165
+ toolChoice,
3166
+ transforms: asArray(transform),
3167
+ activeTools,
3168
+ repairToolCall,
3169
+ stopConditions: asArray(stopWhen),
3170
+ output,
3171
+ providerOptions,
3172
+ prepareStep,
3173
+ includeRawChunks,
3174
+ onChunk,
3175
+ onError,
3176
+ onFinish,
3177
+ onStepFinish,
3178
+ now: now2,
3179
+ currentDate,
3180
+ generateId: generateId2
3181
+ });
3182
+ }
3183
+ function createOutputTransformStream(output) {
3184
+ if (!output) {
3185
+ return new TransformStream({
3186
+ transform(chunk, controller) {
3187
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3188
+ }
3189
+ });
3190
+ }
3191
+ let firstTextChunkId = void 0;
3192
+ let text = "";
3193
+ let textChunk = "";
3194
+ let lastPublishedJson = "";
3195
+ function publishTextChunk({
3196
+ controller,
3197
+ partialOutput = void 0
3198
+ }) {
3199
+ controller.enqueue({
3200
+ part: {
3201
+ type: "text",
3202
+ id: firstTextChunkId,
3203
+ text: textChunk
3204
+ },
3205
+ partialOutput
3206
+ });
3207
+ textChunk = "";
3208
+ }
3209
+ return new TransformStream({
3210
+ async transform(chunk, controller) {
3211
+ if (chunk.type === "finish-step" && textChunk.length > 0) {
3212
+ publishTextChunk({ controller });
3213
+ }
3214
+ if (chunk.type !== "text" && chunk.type !== "text-start" && chunk.type !== "text-end") {
3215
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3216
+ return;
3217
+ }
3218
+ if (firstTextChunkId == null) {
3219
+ firstTextChunkId = chunk.id;
3220
+ } else if (chunk.id !== firstTextChunkId) {
3221
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3222
+ return;
3223
+ }
3224
+ if (chunk.type === "text-start") {
3225
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3226
+ return;
3227
+ }
3228
+ if (chunk.type === "text-end") {
3229
+ if (textChunk.length > 0) {
3230
+ publishTextChunk({ controller });
3231
+ }
3232
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3233
+ return;
3234
+ }
3235
+ text += chunk.text;
3236
+ textChunk += chunk.text;
3237
+ const result = await output.parsePartial({ text });
3238
+ if (result != null) {
3239
+ const currentJson = JSON.stringify(result.partial);
3240
+ if (currentJson !== lastPublishedJson) {
3241
+ publishTextChunk({ controller, partialOutput: result.partial });
3242
+ lastPublishedJson = currentJson;
3243
+ }
3244
+ }
3245
+ }
3246
+ });
3247
+ }
3248
+ var DefaultStreamTextResult = class {
3249
+ constructor({
3250
+ model,
3251
+ telemetry,
3252
+ headers,
3253
+ settings,
3254
+ maxRetries: maxRetriesArg,
3255
+ abortSignal,
3256
+ system,
3257
+ prompt,
3258
+ messages,
3259
+ tools,
3260
+ toolChoice,
3261
+ transforms,
3262
+ activeTools,
3263
+ repairToolCall,
3264
+ stopConditions,
3265
+ output,
3266
+ providerOptions,
3267
+ prepareStep,
3268
+ includeRawChunks,
3269
+ now: now2,
3270
+ currentDate,
3271
+ generateId: generateId2,
3272
+ onChunk,
3273
+ onError,
3274
+ onFinish,
3275
+ onStepFinish
3276
+ }) {
3277
+ this._totalUsage = new DelayedPromise();
3278
+ this._finishReason = new DelayedPromise();
3279
+ this._steps = new DelayedPromise();
3280
+ this.output = output;
3281
+ this.includeRawChunks = includeRawChunks;
3282
+ this.generateId = generateId2;
3283
+ let stepFinish;
3284
+ let recordedContent = [];
3285
+ const recordedResponseMessages = [];
3286
+ let recordedFinishReason = void 0;
3287
+ let recordedTotalUsage = void 0;
3288
+ let recordedRequest = {};
3289
+ let recordedWarnings = [];
3290
+ const recordedSteps = [];
3291
+ let rootSpan;
3292
+ let activeTextContent = {};
3293
+ let activeReasoningContent = {};
3294
+ const eventProcessor = new TransformStream({
3295
+ async transform(chunk, controller) {
3296
+ var _a9, _b;
3297
+ controller.enqueue(chunk);
3298
+ const { part } = chunk;
3299
+ if (part.type === "text" || part.type === "reasoning" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
3300
+ await (onChunk == null ? void 0 : onChunk({ chunk: part }));
3301
+ }
3302
+ if (part.type === "error") {
3303
+ await onError({ error: wrapGatewayError(part.error) });
3304
+ }
3305
+ if (part.type === "text-start") {
3306
+ activeTextContent[part.id] = {
3307
+ type: "text",
3308
+ text: "",
3309
+ providerMetadata: part.providerMetadata
3310
+ };
3311
+ recordedContent.push(activeTextContent[part.id]);
3312
+ }
3313
+ if (part.type === "text") {
3314
+ const activeText = activeTextContent[part.id];
3315
+ if (activeText == null) {
3316
+ controller.enqueue({
3317
+ part: {
3318
+ type: "error",
3319
+ error: `text part ${part.id} not found`
3320
+ },
3321
+ partialOutput: void 0
3322
+ });
3323
+ return;
3324
+ }
3325
+ activeText.text += part.text;
3326
+ activeText.providerMetadata = part.providerMetadata;
3327
+ }
3328
+ if (part.type === "text-end") {
3329
+ delete activeTextContent[part.id];
3330
+ }
3331
+ if (part.type === "reasoning-start") {
3332
+ activeReasoningContent[part.id] = {
3333
+ type: "reasoning",
3334
+ text: "",
3335
+ providerMetadata: part.providerMetadata
3336
+ };
3337
+ recordedContent.push(activeReasoningContent[part.id]);
3338
+ }
3339
+ if (part.type === "reasoning") {
3340
+ const activeReasoning = activeReasoningContent[part.id];
3341
+ if (activeReasoning == null) {
3342
+ controller.enqueue({
3343
+ part: {
3344
+ type: "error",
3345
+ error: `reasoning part ${part.id} not found`
3346
+ },
3347
+ partialOutput: void 0
3348
+ });
3349
+ return;
3350
+ }
3351
+ activeReasoning.text += part.text;
3352
+ activeReasoning.providerMetadata = (_a9 = part.providerMetadata) != null ? _a9 : activeReasoning.providerMetadata;
3353
+ }
3354
+ if (part.type === "reasoning-end") {
3355
+ const activeReasoning = activeReasoningContent[part.id];
3356
+ if (activeReasoning == null) {
3357
+ controller.enqueue({
3358
+ part: {
3359
+ type: "error",
3360
+ error: `reasoning part ${part.id} not found`
3361
+ },
3362
+ partialOutput: void 0
3363
+ });
3364
+ return;
3365
+ }
3366
+ activeReasoning.providerMetadata = (_b = part.providerMetadata) != null ? _b : activeReasoning.providerMetadata;
3367
+ delete activeReasoningContent[part.id];
3368
+ }
3369
+ if (part.type === "file") {
3370
+ recordedContent.push({ type: "file", file: part.file });
3371
+ }
3372
+ if (part.type === "source") {
3373
+ recordedContent.push(part);
3374
+ }
3375
+ if (part.type === "tool-call") {
3376
+ recordedContent.push(part);
3377
+ }
3378
+ if (part.type === "tool-result") {
3379
+ recordedContent.push(part);
3380
+ }
3381
+ if (part.type === "tool-error") {
3382
+ recordedContent.push(part);
3383
+ }
3384
+ if (part.type === "start-step") {
3385
+ recordedRequest = part.request;
3386
+ recordedWarnings = part.warnings;
3387
+ }
3388
+ if (part.type === "finish-step") {
3389
+ const stepMessages = toResponseMessages({
3390
+ content: recordedContent,
3391
+ tools
3392
+ });
3393
+ const currentStepResult = new DefaultStepResult({
3394
+ content: recordedContent,
3395
+ finishReason: part.finishReason,
3396
+ usage: part.usage,
3397
+ warnings: recordedWarnings,
3398
+ request: recordedRequest,
3399
+ response: {
3400
+ ...part.response,
3401
+ messages: [...recordedResponseMessages, ...stepMessages]
3402
+ },
3403
+ providerMetadata: part.providerMetadata
3404
+ });
3405
+ await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
3406
+ recordedSteps.push(currentStepResult);
3407
+ recordedContent = [];
3408
+ activeReasoningContent = {};
3409
+ activeTextContent = {};
3410
+ recordedResponseMessages.push(...stepMessages);
3411
+ stepFinish.resolve();
3412
+ }
3413
+ if (part.type === "finish") {
3414
+ recordedTotalUsage = part.totalUsage;
3415
+ recordedFinishReason = part.finishReason;
3416
+ }
3417
+ },
3418
+ async flush(controller) {
3419
+ try {
3420
+ if (recordedSteps.length === 0) {
3421
+ return;
3422
+ }
3423
+ const finishReason = recordedFinishReason != null ? recordedFinishReason : "unknown";
3424
+ const totalUsage = recordedTotalUsage != null ? recordedTotalUsage : {
3425
+ inputTokens: void 0,
3426
+ outputTokens: void 0,
3427
+ totalTokens: void 0
3428
+ };
3429
+ self._finishReason.resolve(finishReason);
3430
+ self._totalUsage.resolve(totalUsage);
3431
+ self._steps.resolve(recordedSteps);
3432
+ const finalStep = recordedSteps[recordedSteps.length - 1];
3433
+ await (onFinish == null ? void 0 : onFinish({
3434
+ finishReason,
3435
+ totalUsage,
3436
+ usage: finalStep.usage,
3437
+ content: finalStep.content,
3438
+ text: finalStep.text,
3439
+ reasoningText: finalStep.reasoningText,
3440
+ reasoning: finalStep.reasoning,
3441
+ files: finalStep.files,
3442
+ sources: finalStep.sources,
3443
+ toolCalls: finalStep.toolCalls,
3444
+ toolResults: finalStep.toolResults,
3445
+ request: finalStep.request,
3446
+ response: finalStep.response,
3447
+ warnings: finalStep.warnings,
3448
+ providerMetadata: finalStep.providerMetadata,
3449
+ steps: recordedSteps
3450
+ }));
3451
+ rootSpan.setAttributes(
3452
+ selectTelemetryAttributes({
3453
+ telemetry,
3454
+ attributes: {
3455
+ "ai.response.finishReason": finishReason,
3456
+ "ai.response.text": { output: () => finalStep.text },
3457
+ "ai.response.toolCalls": {
3458
+ output: () => {
3459
+ var _a9;
3460
+ return ((_a9 = finalStep.toolCalls) == null ? void 0 : _a9.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
3461
+ }
3462
+ },
3463
+ "ai.response.providerMetadata": JSON.stringify(
3464
+ finalStep.providerMetadata
3465
+ ),
3466
+ "ai.usage.inputTokens": totalUsage.inputTokens,
3467
+ "ai.usage.outputTokens": totalUsage.outputTokens,
3468
+ "ai.usage.totalTokens": totalUsage.totalTokens,
3469
+ "ai.usage.reasoningTokens": totalUsage.reasoningTokens,
3470
+ "ai.usage.cachedInputTokens": totalUsage.cachedInputTokens
3471
+ }
3472
+ })
3473
+ );
3474
+ } catch (error) {
3475
+ controller.error(error);
3476
+ } finally {
3477
+ rootSpan.end();
3478
+ }
3479
+ }
3480
+ });
3481
+ const stitchableStream = createStitchableStream();
3482
+ this.addStream = stitchableStream.addStream;
3483
+ this.closeStream = stitchableStream.close;
3484
+ let stream = stitchableStream.stream;
3485
+ stream = stream.pipeThrough(
3486
+ new TransformStream({
3487
+ start(controller) {
3488
+ controller.enqueue({ type: "start" });
3489
+ }
3490
+ })
3491
+ );
3492
+ for (const transform of transforms) {
3493
+ stream = stream.pipeThrough(
3494
+ transform({
3495
+ tools,
3496
+ stopStream() {
3497
+ stitchableStream.terminate();
3498
+ }
3499
+ })
3500
+ );
3501
+ }
3502
+ this.baseStream = stream.pipeThrough(createOutputTransformStream(output)).pipeThrough(eventProcessor);
3503
+ const { maxRetries, retry } = prepareRetries({
3504
+ maxRetries: maxRetriesArg
3505
+ });
3506
+ const tracer = getTracer(telemetry);
3507
+ const callSettings = prepareCallSettings(settings);
3508
+ const baseTelemetryAttributes = getBaseTelemetryAttributes({
3509
+ model,
3510
+ telemetry,
3511
+ headers,
3512
+ settings: { ...callSettings, maxRetries }
3513
+ });
3514
+ const self = this;
3515
+ recordSpan({
3516
+ name: "ai.streamText",
3517
+ attributes: selectTelemetryAttributes({
3518
+ telemetry,
3519
+ attributes: {
3520
+ ...assembleOperationName({ operationId: "ai.streamText", telemetry }),
3521
+ ...baseTelemetryAttributes,
3522
+ // specific settings that only make sense on the outer level:
3523
+ "ai.prompt": {
3524
+ input: () => JSON.stringify({ system, prompt, messages })
3525
+ }
3526
+ }
3527
+ }),
3528
+ tracer,
3529
+ endWhenDone: false,
3530
+ fn: async (rootSpanArg) => {
3531
+ rootSpan = rootSpanArg;
3532
+ async function streamStep({
3533
+ currentStep,
3534
+ responseMessages,
3535
+ usage
3536
+ }) {
3537
+ var _a9, _b, _c, _d, _e;
3538
+ const includeRawChunks2 = self.includeRawChunks;
3539
+ stepFinish = new DelayedPromise();
3540
+ const initialPrompt = await standardizePrompt({
3541
+ system,
3542
+ prompt,
3543
+ messages
3544
+ });
3545
+ const stepInputMessages = [
3546
+ ...initialPrompt.messages,
3547
+ ...responseMessages
3548
+ ];
3549
+ const prepareStepResult = await (prepareStep == null ? void 0 : prepareStep({
3550
+ model,
3551
+ steps: recordedSteps,
3552
+ stepNumber: recordedSteps.length,
3553
+ messages: stepInputMessages
3554
+ }));
3555
+ const promptMessages = await convertToLanguageModelPrompt({
3556
+ prompt: {
3557
+ system: (_a9 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a9 : initialPrompt.system,
3558
+ messages: (_b = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _b : stepInputMessages
3559
+ },
3560
+ supportedUrls: await model.supportedUrls
3561
+ });
3562
+ const stepModel = resolveLanguageModel(
3563
+ (_c = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _c : model
3564
+ );
3565
+ const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
3566
+ tools,
3567
+ toolChoice: (_d = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _d : toolChoice,
3568
+ activeTools: (_e = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _e : activeTools
3569
+ });
3570
+ const {
3571
+ result: { stream: stream2, response, request },
3572
+ doStreamSpan,
3573
+ startTimestampMs
3574
+ } = await retry(
3575
+ () => recordSpan({
3576
+ name: "ai.streamText.doStream",
3577
+ attributes: selectTelemetryAttributes({
3578
+ telemetry,
3579
+ attributes: {
3580
+ ...assembleOperationName({
3581
+ operationId: "ai.streamText.doStream",
3582
+ telemetry
3583
+ }),
3584
+ ...baseTelemetryAttributes,
3585
+ // model:
3586
+ "ai.model.provider": stepModel.provider,
3587
+ "ai.model.id": stepModel.modelId,
3588
+ // prompt:
3589
+ "ai.prompt.messages": {
3590
+ input: () => stringifyForTelemetry(promptMessages)
3591
+ },
3592
+ "ai.prompt.tools": {
3593
+ // convert the language model level tools:
3594
+ input: () => stepTools == null ? void 0 : stepTools.map((tool) => JSON.stringify(tool))
3595
+ },
3596
+ "ai.prompt.toolChoice": {
3597
+ input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
3598
+ },
3599
+ // standardized gen-ai llm span attributes:
3600
+ "gen_ai.system": stepModel.provider,
3601
+ "gen_ai.request.model": stepModel.modelId,
3602
+ "gen_ai.request.frequency_penalty": callSettings.frequencyPenalty,
3603
+ "gen_ai.request.max_tokens": callSettings.maxOutputTokens,
3604
+ "gen_ai.request.presence_penalty": callSettings.presencePenalty,
3605
+ "gen_ai.request.stop_sequences": callSettings.stopSequences,
3606
+ "gen_ai.request.temperature": callSettings.temperature,
3607
+ "gen_ai.request.top_k": callSettings.topK,
3608
+ "gen_ai.request.top_p": callSettings.topP
3609
+ }
3610
+ }),
3611
+ tracer,
3612
+ endWhenDone: false,
3613
+ fn: async (doStreamSpan2) => {
3614
+ return {
3615
+ startTimestampMs: now2(),
3616
+ // get before the call
3617
+ doStreamSpan: doStreamSpan2,
3618
+ result: await stepModel.doStream({
3619
+ ...callSettings,
3620
+ tools: stepTools,
3621
+ toolChoice: stepToolChoice,
3622
+ responseFormat: output == null ? void 0 : output.responseFormat,
3623
+ prompt: promptMessages,
3624
+ providerOptions,
3625
+ abortSignal,
3626
+ headers,
3627
+ includeRawChunks: includeRawChunks2
3628
+ })
3629
+ };
3630
+ }
3631
+ })
3632
+ );
3633
+ const streamWithToolResults = runToolsTransformation({
3634
+ tools,
3635
+ generatorStream: stream2,
3636
+ tracer,
3637
+ telemetry,
3638
+ system,
3639
+ messages: stepInputMessages,
3640
+ repairToolCall,
3641
+ abortSignal
3642
+ });
3643
+ const stepRequest = request != null ? request : {};
3644
+ const stepToolCalls = [];
3645
+ const stepToolOutputs = [];
3646
+ let warnings;
3647
+ const activeToolCallToolNames = {};
3648
+ let stepFinishReason = "unknown";
3649
+ let stepUsage = {
3650
+ inputTokens: void 0,
3651
+ outputTokens: void 0,
3652
+ totalTokens: void 0
3653
+ };
3654
+ let stepProviderMetadata;
3655
+ let stepFirstChunk = true;
3656
+ let stepResponse = {
3657
+ id: generateId2(),
3658
+ timestamp: currentDate(),
3659
+ modelId: model.modelId
3660
+ };
3661
+ let activeText = "";
3662
+ self.addStream(
3663
+ streamWithToolResults.pipeThrough(
3664
+ new TransformStream({
3665
+ async transform(chunk, controller) {
3666
+ var _a10, _b2, _c2, _d2;
3667
+ if (chunk.type === "stream-start") {
3668
+ warnings = chunk.warnings;
3669
+ return;
3670
+ }
3671
+ if (stepFirstChunk) {
3672
+ const msToFirstChunk = now2() - startTimestampMs;
3673
+ stepFirstChunk = false;
3674
+ doStreamSpan.addEvent("ai.stream.firstChunk", {
3675
+ "ai.response.msToFirstChunk": msToFirstChunk
3676
+ });
3677
+ doStreamSpan.setAttributes({
3678
+ "ai.response.msToFirstChunk": msToFirstChunk
3679
+ });
3680
+ controller.enqueue({
3681
+ type: "start-step",
3682
+ request: stepRequest,
3683
+ warnings: warnings != null ? warnings : []
3684
+ });
3685
+ }
3686
+ const chunkType = chunk.type;
3687
+ switch (chunkType) {
3688
+ case "text-start":
3689
+ case "text-end": {
3690
+ controller.enqueue(chunk);
3691
+ break;
3692
+ }
3693
+ case "text-delta": {
3694
+ if (chunk.delta.length > 0) {
3695
+ controller.enqueue({
3696
+ type: "text",
3697
+ id: chunk.id,
3698
+ text: chunk.delta,
3699
+ providerMetadata: chunk.providerMetadata
3700
+ });
3701
+ activeText += chunk.delta;
3702
+ }
3703
+ break;
3704
+ }
3705
+ case "reasoning-start":
3706
+ case "reasoning-end": {
3707
+ controller.enqueue(chunk);
3708
+ break;
3709
+ }
3710
+ case "reasoning-delta": {
3711
+ controller.enqueue({
3712
+ type: "reasoning",
3713
+ id: chunk.id,
3714
+ text: chunk.delta,
3715
+ providerMetadata: chunk.providerMetadata
3716
+ });
3717
+ break;
3718
+ }
3719
+ case "tool-call": {
3720
+ controller.enqueue(chunk);
3721
+ stepToolCalls.push(chunk);
3722
+ break;
3723
+ }
3724
+ case "tool-result": {
3725
+ controller.enqueue(chunk);
3726
+ stepToolOutputs.push(chunk);
3727
+ break;
3728
+ }
3729
+ case "tool-error": {
3730
+ controller.enqueue(chunk);
3731
+ stepToolOutputs.push(chunk);
3732
+ break;
3733
+ }
3734
+ case "response-metadata": {
3735
+ stepResponse = {
3736
+ id: (_a10 = chunk.id) != null ? _a10 : stepResponse.id,
3737
+ timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
3738
+ modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
3739
+ };
3740
+ break;
3741
+ }
3742
+ case "finish": {
3743
+ stepUsage = chunk.usage;
3744
+ stepFinishReason = chunk.finishReason;
3745
+ stepProviderMetadata = chunk.providerMetadata;
3746
+ const msToFinish = now2() - startTimestampMs;
3747
+ doStreamSpan.addEvent("ai.stream.finish");
3748
+ doStreamSpan.setAttributes({
3749
+ "ai.response.msToFinish": msToFinish,
3750
+ "ai.response.avgOutputTokensPerSecond": 1e3 * ((_d2 = stepUsage.outputTokens) != null ? _d2 : 0) / msToFinish
3751
+ });
3752
+ break;
3753
+ }
3754
+ case "file": {
3755
+ controller.enqueue(chunk);
3756
+ break;
3757
+ }
3758
+ case "source": {
3759
+ controller.enqueue(chunk);
3760
+ break;
3761
+ }
3762
+ case "tool-input-start": {
3763
+ activeToolCallToolNames[chunk.id] = chunk.toolName;
3764
+ const tool = tools == null ? void 0 : tools[chunk.toolName];
3765
+ if ((tool == null ? void 0 : tool.onInputStart) != null) {
3766
+ await tool.onInputStart({
3767
+ toolCallId: chunk.id,
3768
+ messages: stepInputMessages,
3769
+ abortSignal
3770
+ });
3771
+ }
3772
+ controller.enqueue(chunk);
3773
+ break;
3774
+ }
3775
+ case "tool-input-end": {
3776
+ delete activeToolCallToolNames[chunk.id];
3777
+ controller.enqueue(chunk);
3778
+ break;
3779
+ }
3780
+ case "tool-input-delta": {
3781
+ const toolName = activeToolCallToolNames[chunk.id];
3782
+ const tool = tools == null ? void 0 : tools[toolName];
3783
+ if ((tool == null ? void 0 : tool.onInputDelta) != null) {
3784
+ await tool.onInputDelta({
3785
+ inputTextDelta: chunk.delta,
3786
+ toolCallId: chunk.id,
3787
+ messages: stepInputMessages,
3788
+ abortSignal
3789
+ });
3790
+ }
3791
+ controller.enqueue(chunk);
3792
+ break;
3793
+ }
3794
+ case "error": {
3795
+ controller.enqueue(chunk);
3796
+ stepFinishReason = "error";
3797
+ break;
3798
+ }
3799
+ case "raw": {
3800
+ if (includeRawChunks2) {
3801
+ controller.enqueue(chunk);
3802
+ }
3803
+ break;
3804
+ }
3805
+ default: {
3806
+ const exhaustiveCheck = chunkType;
3807
+ throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
3808
+ }
3809
+ }
3810
+ },
3811
+ // invoke onFinish callback and resolve toolResults promise when the stream is about to close:
3812
+ async flush(controller) {
3813
+ const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
3814
+ try {
3815
+ doStreamSpan.setAttributes(
3816
+ selectTelemetryAttributes({
3817
+ telemetry,
3818
+ attributes: {
3819
+ "ai.response.finishReason": stepFinishReason,
3820
+ "ai.response.text": {
3821
+ output: () => activeText
3822
+ },
3823
+ "ai.response.toolCalls": {
3824
+ output: () => stepToolCallsJson
3825
+ },
3826
+ "ai.response.id": stepResponse.id,
3827
+ "ai.response.model": stepResponse.modelId,
3828
+ "ai.response.timestamp": stepResponse.timestamp.toISOString(),
3829
+ "ai.response.providerMetadata": JSON.stringify(stepProviderMetadata),
3830
+ "ai.usage.inputTokens": stepUsage.inputTokens,
3831
+ "ai.usage.outputTokens": stepUsage.outputTokens,
3832
+ "ai.usage.totalTokens": stepUsage.totalTokens,
3833
+ "ai.usage.reasoningTokens": stepUsage.reasoningTokens,
3834
+ "ai.usage.cachedInputTokens": stepUsage.cachedInputTokens,
3835
+ // standardized gen-ai llm span attributes:
3836
+ "gen_ai.response.finish_reasons": [stepFinishReason],
3837
+ "gen_ai.response.id": stepResponse.id,
3838
+ "gen_ai.response.model": stepResponse.modelId,
3839
+ "gen_ai.usage.input_tokens": stepUsage.inputTokens,
3840
+ "gen_ai.usage.output_tokens": stepUsage.outputTokens
3841
+ }
3842
+ })
3843
+ );
3844
+ } catch (error) {
3845
+ } finally {
3846
+ doStreamSpan.end();
3847
+ }
3848
+ controller.enqueue({
3849
+ type: "finish-step",
3850
+ finishReason: stepFinishReason,
3851
+ usage: stepUsage,
3852
+ providerMetadata: stepProviderMetadata,
3853
+ response: {
3854
+ ...stepResponse,
3855
+ headers: response == null ? void 0 : response.headers
3856
+ }
3857
+ });
3858
+ const combinedUsage = addLanguageModelUsage(usage, stepUsage);
3859
+ await stepFinish.promise;
3860
+ const clientToolCalls = stepToolCalls.filter(
3861
+ (toolCall) => toolCall.providerExecuted !== true
3862
+ );
3863
+ const clientToolOutputs = stepToolOutputs.filter(
3864
+ (toolOutput) => toolOutput.providerExecuted !== true
3865
+ );
3866
+ if (clientToolCalls.length > 0 && // all current tool calls have outputs (incl. execution errors):
3867
+ clientToolOutputs.length === clientToolCalls.length && // continue until a stop condition is met:
3868
+ !await isStopConditionMet({
3869
+ stopConditions,
3870
+ steps: recordedSteps
3871
+ })) {
3872
+ responseMessages.push(
3873
+ ...toResponseMessages({
3874
+ content: (
3875
+ // use transformed content to create the messages for the next step:
3876
+ recordedSteps[recordedSteps.length - 1].content
3877
+ ),
3878
+ tools
3879
+ })
3880
+ );
3881
+ try {
3882
+ await streamStep({
3883
+ currentStep: currentStep + 1,
3884
+ responseMessages,
3885
+ usage: combinedUsage
3886
+ });
3887
+ } catch (error) {
3888
+ controller.enqueue({
3889
+ type: "error",
3890
+ error
3891
+ });
3892
+ self.closeStream();
3893
+ }
3894
+ } else {
3895
+ controller.enqueue({
3896
+ type: "finish",
3897
+ finishReason: stepFinishReason,
3898
+ totalUsage: combinedUsage
3899
+ });
3900
+ self.closeStream();
3901
+ }
3902
+ }
3903
+ })
3904
+ )
3905
+ );
3906
+ }
3907
+ await streamStep({
3908
+ currentStep: 0,
3909
+ responseMessages: [],
3910
+ usage: {
3911
+ inputTokens: void 0,
3912
+ outputTokens: void 0,
3913
+ totalTokens: void 0
3914
+ }
3915
+ });
3916
+ }
3917
+ }).catch((error) => {
3918
+ self.addStream(
3919
+ new ReadableStream({
3920
+ start(controller) {
3921
+ controller.enqueue({ type: "error", error });
3922
+ controller.close();
3923
+ }
3924
+ })
3925
+ );
3926
+ self.closeStream();
3927
+ });
3928
+ }
3929
+ get steps() {
3930
+ return this._steps.promise;
3931
+ }
3932
+ get finalStep() {
3933
+ return this.steps.then((steps) => steps[steps.length - 1]);
3934
+ }
3935
+ get content() {
3936
+ return this.finalStep.then((step) => step.content);
3937
+ }
3938
+ get warnings() {
3939
+ return this.finalStep.then((step) => step.warnings);
3940
+ }
3941
+ get providerMetadata() {
3942
+ return this.finalStep.then((step) => step.providerMetadata);
3943
+ }
3944
+ get text() {
3945
+ return this.finalStep.then((step) => step.text);
3946
+ }
3947
+ get reasoningText() {
3948
+ return this.finalStep.then((step) => step.reasoningText);
3949
+ }
3950
+ get reasoning() {
3951
+ return this.finalStep.then((step) => step.reasoning);
3952
+ }
3953
+ get sources() {
3954
+ return this.finalStep.then((step) => step.sources);
3955
+ }
3956
+ get files() {
3957
+ return this.finalStep.then((step) => step.files);
3958
+ }
3959
+ get toolCalls() {
3960
+ return this.finalStep.then((step) => step.toolCalls);
3961
+ }
3962
+ get toolResults() {
3963
+ return this.finalStep.then((step) => step.toolResults);
3964
+ }
3965
+ get usage() {
3966
+ return this.finalStep.then((step) => step.usage);
3967
+ }
3968
+ get request() {
3969
+ return this.finalStep.then((step) => step.request);
3970
+ }
3971
+ get response() {
3972
+ return this.finalStep.then((step) => step.response);
3973
+ }
3974
+ get totalUsage() {
3975
+ return this._totalUsage.promise;
3976
+ }
3977
+ get finishReason() {
3978
+ return this._finishReason.promise;
3979
+ }
3980
+ /**
3981
+ Split out a new stream from the original stream.
3982
+ The original stream is replaced to allow for further splitting,
3983
+ since we do not know how many times the stream will be split.
3984
+
3985
+ Note: this leads to buffering the stream content on the server.
3986
+ However, the LLM results are expected to be small enough to not cause issues.
3987
+ */
3988
+ teeStream() {
3989
+ const [stream1, stream2] = this.baseStream.tee();
3990
+ this.baseStream = stream2;
3991
+ return stream1;
3992
+ }
3993
+ get textStream() {
3994
+ return createAsyncIterableStream(
3995
+ this.teeStream().pipeThrough(
3996
+ new TransformStream({
3997
+ transform({ part }, controller) {
3998
+ if (part.type === "text") {
3999
+ controller.enqueue(part.text);
4000
+ }
4001
+ }
4002
+ })
4003
+ )
4004
+ );
4005
+ }
4006
+ get fullStream() {
4007
+ return createAsyncIterableStream(
4008
+ this.teeStream().pipeThrough(
4009
+ new TransformStream({
4010
+ transform({ part }, controller) {
4011
+ controller.enqueue(part);
4012
+ }
4013
+ })
4014
+ )
4015
+ );
4016
+ }
4017
+ async consumeStream(options) {
4018
+ var _a9;
4019
+ try {
4020
+ await consumeStream({
4021
+ stream: this.fullStream,
4022
+ onError: options == null ? void 0 : options.onError
4023
+ });
4024
+ } catch (error) {
4025
+ (_a9 = options == null ? void 0 : options.onError) == null ? void 0 : _a9.call(options, error);
4026
+ }
4027
+ }
4028
+ get experimental_partialOutputStream() {
4029
+ if (this.output == null) {
4030
+ throw new NoOutputSpecifiedError();
4031
+ }
4032
+ return createAsyncIterableStream(
4033
+ this.teeStream().pipeThrough(
4034
+ new TransformStream({
4035
+ transform({ partialOutput }, controller) {
4036
+ if (partialOutput != null) {
4037
+ controller.enqueue(partialOutput);
4038
+ }
4039
+ }
4040
+ })
4041
+ )
4042
+ );
4043
+ }
4044
+ toUIMessageStream({
4045
+ originalMessages,
4046
+ generateMessageId,
4047
+ onFinish,
4048
+ messageMetadata,
4049
+ sendReasoning = true,
4050
+ sendSources = false,
4051
+ sendStart = true,
4052
+ sendFinish = true,
4053
+ onError = import_provider16.getErrorMessage
4054
+ } = {}) {
4055
+ const responseMessageId = getResponseUIMessageId({
4056
+ originalMessages,
4057
+ responseMessageId: this.generateId
4058
+ });
4059
+ const baseStream = this.fullStream.pipeThrough(
4060
+ new TransformStream({
4061
+ transform: async (part, controller) => {
4062
+ const messageMetadataValue = messageMetadata == null ? void 0 : messageMetadata({ part });
4063
+ const partType = part.type;
4064
+ switch (partType) {
4065
+ case "text-start": {
4066
+ controller.enqueue({ type: "text-start", id: part.id });
4067
+ break;
4068
+ }
4069
+ case "text": {
4070
+ controller.enqueue({
4071
+ type: "text-delta",
4072
+ id: part.id,
4073
+ delta: part.text
4074
+ });
4075
+ break;
4076
+ }
4077
+ case "text-end": {
4078
+ controller.enqueue({ type: "text-end", id: part.id });
4079
+ break;
4080
+ }
4081
+ case "reasoning-start": {
4082
+ controller.enqueue({
4083
+ type: "reasoning-start",
4084
+ id: part.id,
4085
+ providerMetadata: part.providerMetadata
4086
+ });
4087
+ break;
4088
+ }
4089
+ case "reasoning": {
4090
+ if (sendReasoning) {
4091
+ controller.enqueue({
4092
+ type: "reasoning-delta",
4093
+ id: part.id,
4094
+ delta: part.text,
4095
+ providerMetadata: part.providerMetadata
4096
+ });
4097
+ }
4098
+ break;
4099
+ }
4100
+ case "reasoning-end": {
4101
+ controller.enqueue({
4102
+ type: "reasoning-end",
4103
+ id: part.id,
4104
+ providerMetadata: part.providerMetadata
4105
+ });
4106
+ break;
4107
+ }
4108
+ case "file": {
4109
+ controller.enqueue({
4110
+ type: "file",
4111
+ mediaType: part.file.mediaType,
4112
+ url: `data:${part.file.mediaType};base64,${part.file.base64}`
4113
+ });
4114
+ break;
4115
+ }
4116
+ case "source": {
4117
+ if (sendSources && part.sourceType === "url") {
4118
+ controller.enqueue({
4119
+ type: "source-url",
4120
+ sourceId: part.id,
4121
+ url: part.url,
4122
+ title: part.title,
4123
+ providerMetadata: part.providerMetadata
4124
+ });
4125
+ }
4126
+ if (sendSources && part.sourceType === "document") {
4127
+ controller.enqueue({
4128
+ type: "source-document",
4129
+ sourceId: part.id,
4130
+ mediaType: part.mediaType,
4131
+ title: part.title,
4132
+ filename: part.filename,
4133
+ providerMetadata: part.providerMetadata
4134
+ });
4135
+ }
4136
+ break;
4137
+ }
4138
+ case "tool-input-start": {
4139
+ controller.enqueue({
4140
+ type: "tool-input-start",
4141
+ toolCallId: part.id,
4142
+ toolName: part.toolName,
4143
+ providerExecuted: part.providerExecuted
4144
+ });
4145
+ break;
4146
+ }
4147
+ case "tool-input-delta": {
4148
+ controller.enqueue({
4149
+ type: "tool-input-delta",
4150
+ toolCallId: part.id,
4151
+ inputTextDelta: part.delta
4152
+ });
4153
+ break;
4154
+ }
4155
+ case "tool-call": {
4156
+ controller.enqueue({
4157
+ type: "tool-input-available",
4158
+ toolCallId: part.toolCallId,
4159
+ toolName: part.toolName,
4160
+ input: part.input,
4161
+ providerExecuted: part.providerExecuted
4162
+ });
4163
+ break;
4164
+ }
4165
+ case "tool-result": {
4166
+ controller.enqueue({
4167
+ type: "tool-output-available",
4168
+ toolCallId: part.toolCallId,
4169
+ output: part.output,
4170
+ providerExecuted: part.providerExecuted
4171
+ });
4172
+ break;
4173
+ }
4174
+ case "tool-error": {
4175
+ controller.enqueue({
4176
+ type: "tool-output-error",
4177
+ toolCallId: part.toolCallId,
4178
+ errorText: onError(part.error),
4179
+ providerExecuted: part.providerExecuted
4180
+ });
4181
+ break;
4182
+ }
4183
+ case "error": {
4184
+ controller.enqueue({
4185
+ type: "error",
4186
+ errorText: onError(part.error)
4187
+ });
4188
+ break;
4189
+ }
4190
+ case "start-step": {
4191
+ controller.enqueue({ type: "start-step" });
4192
+ break;
4193
+ }
4194
+ case "finish-step": {
4195
+ controller.enqueue({ type: "finish-step" });
4196
+ break;
4197
+ }
4198
+ case "start": {
4199
+ if (sendStart) {
4200
+ controller.enqueue({
4201
+ type: "start",
4202
+ messageId: responseMessageId,
4203
+ messageMetadata: messageMetadataValue
4204
+ });
4205
+ }
4206
+ break;
4207
+ }
4208
+ case "finish": {
4209
+ if (sendFinish) {
4210
+ controller.enqueue({
4211
+ type: "finish",
4212
+ messageMetadata: messageMetadataValue
4213
+ });
4214
+ }
4215
+ break;
4216
+ }
4217
+ case "tool-input-end": {
4218
+ break;
4219
+ }
4220
+ case "raw": {
4221
+ break;
4222
+ }
4223
+ default: {
4224
+ const exhaustiveCheck = partType;
4225
+ throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
4226
+ }
4227
+ }
4228
+ if (messageMetadataValue != null && partType !== "start" && partType !== "finish") {
4229
+ controller.enqueue({
4230
+ type: "message-metadata",
4231
+ messageMetadata: messageMetadataValue
4232
+ });
4233
+ }
4234
+ }
4235
+ })
4236
+ );
4237
+ return handleUIMessageStreamFinish({
4238
+ stream: baseStream,
4239
+ messageId: responseMessageId != null ? responseMessageId : generateMessageId == null ? void 0 : generateMessageId(),
4240
+ originalMessages,
4241
+ onFinish,
4242
+ onError
4243
+ });
4244
+ }
4245
+ pipeUIMessageStreamToResponse(response, {
4246
+ originalMessages,
4247
+ generateMessageId,
4248
+ onFinish,
4249
+ messageMetadata,
4250
+ sendReasoning,
4251
+ sendSources,
4252
+ sendFinish,
4253
+ sendStart,
4254
+ onError,
4255
+ ...init
4256
+ } = {}) {
4257
+ pipeUIMessageStreamToResponse({
4258
+ response,
4259
+ stream: this.toUIMessageStream({
4260
+ originalMessages,
4261
+ generateMessageId,
4262
+ onFinish,
4263
+ messageMetadata,
4264
+ sendReasoning,
4265
+ sendSources,
4266
+ sendFinish,
4267
+ sendStart,
4268
+ onError
4269
+ }),
4270
+ ...init
4271
+ });
4272
+ }
4273
+ pipeTextStreamToResponse(response, init) {
4274
+ pipeTextStreamToResponse({
4275
+ response,
4276
+ textStream: this.textStream,
4277
+ ...init
4278
+ });
4279
+ }
4280
+ toUIMessageStreamResponse({
4281
+ originalMessages,
4282
+ generateMessageId,
4283
+ onFinish,
4284
+ messageMetadata,
4285
+ sendReasoning,
4286
+ sendSources,
4287
+ sendFinish,
4288
+ sendStart,
4289
+ onError,
4290
+ ...init
4291
+ } = {}) {
4292
+ return createUIMessageStreamResponse({
4293
+ stream: this.toUIMessageStream({
4294
+ originalMessages,
4295
+ generateMessageId,
4296
+ onFinish,
4297
+ messageMetadata,
4298
+ sendReasoning,
4299
+ sendSources,
4300
+ sendFinish,
4301
+ sendStart,
4302
+ onError
4303
+ }),
4304
+ ...init
4305
+ });
4306
+ }
4307
+ toTextStreamResponse(init) {
4308
+ return createTextStreamResponse({
4309
+ textStream: this.textStream,
4310
+ ...init
4311
+ });
4312
+ }
4313
+ };
4314
+
4315
+ // src/bin/ai.ts
4316
+ var import_fs = require("fs");
4317
+ var import_path = require("path");
4318
+ var import_gateway3 = require("@ai-sdk/gateway");
4319
+ function isStdinAvailable() {
4320
+ return !process.stdin.isTTY;
4321
+ }
4322
+ async function readStdin() {
4323
+ return new Promise((resolve2) => {
4324
+ let data = "";
4325
+ process.stdin.setEncoding("utf8");
4326
+ process.stdin.on("data", (chunk) => {
4327
+ data += chunk;
4328
+ });
4329
+ process.stdin.on("end", () => {
4330
+ resolve2(data.trim());
4331
+ });
4332
+ });
4333
+ }
4334
+ function getMediaType(filePath) {
4335
+ var _a9;
4336
+ const ext = (_a9 = filePath.split(".").pop()) == null ? void 0 : _a9.toLowerCase();
4337
+ const mimeTypes = {
4338
+ js: "application/javascript",
4339
+ ts: "application/typescript",
4340
+ jsx: "text/jsx",
4341
+ tsx: "text/tsx",
4342
+ json: "application/json",
4343
+ md: "text/markdown",
4344
+ txt: "text/plain",
4345
+ py: "text/x-python",
4346
+ html: "text/html",
4347
+ css: "text/css",
4348
+ xml: "application/xml",
4349
+ yaml: "application/yaml",
4350
+ yml: "application/yaml",
4351
+ jpg: "image/jpeg",
4352
+ jpeg: "image/jpeg",
4353
+ png: "image/png",
4354
+ gif: "image/gif",
4355
+ webp: "image/webp",
4356
+ svg: "image/svg+xml",
4357
+ bmp: "image/bmp",
4358
+ tiff: "image/tiff",
4359
+ tif: "image/tiff"
4360
+ };
4361
+ return mimeTypes[ext || ""] || "text/plain";
4362
+ }
4363
+ function readFileContent(filePath) {
4364
+ const absolutePath = (0, import_path.resolve)(filePath);
4365
+ if (!(0, import_fs.existsSync)(absolutePath)) {
4366
+ throw new Error(`File not found: ${filePath}`);
4367
+ }
4368
+ const mediaType = getMediaType(filePath);
4369
+ const isImage = mediaType.startsWith("image/");
4370
+ let content;
4371
+ if (isImage) {
4372
+ const buffer = (0, import_fs.readFileSync)(absolutePath);
4373
+ content = `data:${mediaType};base64,${buffer.toString("base64")}`;
4374
+ } else {
4375
+ content = (0, import_fs.readFileSync)(absolutePath, "utf8");
4376
+ }
4377
+ return {
4378
+ name: filePath,
4379
+ content,
4380
+ mediaType
4381
+ };
4382
+ }
4383
+ function parseArgs() {
4384
+ const args = process.argv.slice(2);
4385
+ const options = {
4386
+ model: process.env.AI_MODEL || "openai/gpt-4",
4387
+ files: [],
4388
+ help: false,
4389
+ version: false,
4390
+ verbose: process.env.AI_VERBOSE === "true",
4391
+ system: process.env.AI_SYSTEM
4392
+ };
4393
+ const promptArgs = [];
4394
+ let i = 0;
4395
+ while (i < args.length) {
4396
+ const arg = args[i];
4397
+ switch (arg) {
4398
+ case "-h":
4399
+ case "--help":
4400
+ options.help = true;
4401
+ break;
4402
+ case "-V":
4403
+ case "--version":
4404
+ options.version = true;
4405
+ break;
4406
+ case "-v":
4407
+ case "--verbose":
4408
+ options.verbose = true;
4409
+ break;
4410
+ case "-m":
4411
+ case "--model":
4412
+ if (i + 1 < args.length) {
4413
+ options.model = args[i + 1];
4414
+ i++;
4415
+ } else {
4416
+ throw new Error("Model option requires a value");
4417
+ }
4418
+ break;
4419
+ case "-f":
4420
+ case "--file":
4421
+ if (i + 1 < args.length) {
4422
+ options.files.push(args[i + 1]);
4423
+ i++;
4424
+ } else {
4425
+ throw new Error("File option requires a value");
4426
+ }
4427
+ break;
4428
+ case "-s":
4429
+ case "--system":
4430
+ if (i + 1 < args.length) {
4431
+ options.system = args[i + 1];
4432
+ i++;
4433
+ } else {
4434
+ throw new Error("System option requires a value");
4435
+ }
4436
+ break;
4437
+ default:
4438
+ if (arg.startsWith("-")) {
4439
+ throw new Error(`Unknown option: ${arg}`);
4440
+ } else {
4441
+ promptArgs.push(arg);
4442
+ }
4443
+ }
4444
+ i++;
4445
+ }
4446
+ if (promptArgs.length > 0) {
4447
+ options.prompt = promptArgs.join(" ");
4448
+ }
4449
+ return options;
4450
+ }
4451
+ function showHelp() {
4452
+ console.log(`Usage: ai [options] [prompt]
4453
+
4454
+ AI CLI - Stream text generation from various AI models
4455
+
4456
+ Arguments:
4457
+ prompt The prompt to send to the AI model (optional if using stdin)
4458
+
4459
+ Options:
4460
+ -m, --model <model> Model to use. Format: provider/model or just model name.
4461
+ Examples: openai/gpt-4o, anthropic/claude-3-5-sonnet-20241022
4462
+ (default: "openai/gpt-4")
4463
+ -f, --file <file> Attach a file to the prompt (can be used multiple times)
4464
+ -s, --system <message> System message to set context
4465
+ -v, --verbose Show detailed information (model, usage, etc.)
4466
+ -h, --help Display help for command
4467
+ -V, --version Output the version number
4468
+
4469
+ Environment Variables:
4470
+ - AI_MODEL: Default model to use
4471
+ - AI_SYSTEM: Default system message
4472
+ - AI_VERBOSE: Set to 'true' to enable verbose output
4473
+
4474
+ Authentication (choose one):
4475
+ - VERCEL_OIDC_TOKEN: Vercel OIDC token (for Vercel projects)
4476
+ - AI_GATEWAY_API_KEY: AI Gateway API key
4477
+
4478
+ Setting Environment Variables:
4479
+ # Option 1: Export in current session
4480
+ export AI_GATEWAY_API_KEY="your-key-here"
4481
+ export AI_MODEL="anthropic/claude-3-5-sonnet-20241022"
4482
+
4483
+ # Option 2: Inline for single command
4484
+ AI_GATEWAY_API_KEY="your-key" ai "Hello world"
4485
+
4486
+ # Option 3: Add to shell profile (~/.bashrc, ~/.zshrc)
4487
+ echo 'export AI_GATEWAY_API_KEY="your-key"' >> ~/.bashrc
4488
+
4489
+ Examples:
4490
+ npx ai "Hello, world!"
4491
+ npx ai "Write a poem" -m anthropic/claude-3-5-sonnet-20241022
4492
+ npx ai "Explain quantum physics" -m groq/llama-3.1-8b-instant
4493
+ npx ai "Explain this code" -f script.js -f README.md
4494
+ echo "What is life?" | npx ai
4495
+ cat file.txt | npx ai "Summarize this content"
4496
+ npx ai -f package.json "What dependencies does this project have?"
4497
+
4498
+ Unix-style piping:
4499
+ echo "Hello world" | npx ai "Translate to French"
4500
+ cat README.md | npx ai "Summarize this"
4501
+ curl -s https://api.github.com/repos/vercel/ai | npx ai "What is this repository about?"
4502
+
4503
+ Authentication Setup:
4504
+ This CLI uses the Vercel AI Gateway. You need ONE of these for authentication:
4505
+
4506
+ OIDC Token (for Vercel projects):
4507
+ - Automatically available in Vercel deployments
4508
+ - For local development: run 'vercel env pull' or use 'vercel dev'
4509
+
4510
+ API Key (for any environment):
4511
+ - Get your key from the AI Gateway dashboard
4512
+ - Set: export AI_GATEWAY_API_KEY="your-key-here"
4513
+
4514
+ The gateway supports OpenAI, Anthropic, Google, Groq, and more providers.`);
4515
+ }
4516
+ function showVersion() {
4517
+ console.log("1.0.0");
4518
+ }
4519
+ function resolveModel(modelString) {
4520
+ return import_gateway3.gateway.languageModel(modelString);
4521
+ }
4522
+ function formatAttachedFiles(files) {
4523
+ if (files.length === 0)
4524
+ return "";
4525
+ const textFiles = files.filter((f) => {
4526
+ var _a9;
4527
+ return !((_a9 = f.mediaType) == null ? void 0 : _a9.startsWith("image/"));
4528
+ });
4529
+ if (textFiles.length === 0)
4530
+ return "";
4531
+ let result = "\n\nAttached files:\n";
4532
+ for (const file of textFiles) {
4533
+ result += `
4534
+ --- ${file.name} ---
4535
+ `;
4536
+ result += file.content;
4537
+ result += "\n";
4538
+ }
4539
+ return result;
4540
+ }
4541
+ async function main() {
4542
+ try {
4543
+ const options = parseArgs();
4544
+ if (options.help) {
4545
+ showHelp();
4546
+ return;
4547
+ }
4548
+ if (options.version) {
4549
+ showVersion();
4550
+ return;
4551
+ }
4552
+ let prompt = options.prompt || "";
4553
+ if (isStdinAvailable()) {
4554
+ const stdinContent = await readStdin();
4555
+ if (stdinContent) {
4556
+ prompt = prompt ? `${stdinContent}
4557
+
4558
+ ${prompt}` : stdinContent;
4559
+ }
4560
+ }
4561
+ if (!prompt.trim()) {
4562
+ console.error(
4563
+ "Error: No prompt provided. Use --help for usage information."
4564
+ );
4565
+ process.exit(1);
4566
+ }
4567
+ const attachedFiles = [];
4568
+ for (const filePath of options.files) {
4569
+ try {
4570
+ const file = readFileContent(filePath);
4571
+ attachedFiles.push(file);
4572
+ } catch (error) {
4573
+ console.error(
4574
+ `Error reading file ${filePath}: ${error instanceof Error ? error.message : "Unknown error"}`
4575
+ );
4576
+ process.exit(1);
4577
+ }
4578
+ }
4579
+ const textPrompt = prompt + formatAttachedFiles(attachedFiles);
4580
+ const imageFiles = attachedFiles.filter(
4581
+ (f) => {
4582
+ var _a9;
4583
+ return (_a9 = f.mediaType) == null ? void 0 : _a9.startsWith("image/");
4584
+ }
4585
+ );
4586
+ if (imageFiles.length > 0 && options.model === "openai/gpt-4") {
4587
+ options.model = "openai/gpt-4o";
4588
+ }
4589
+ if (options.verbose) {
4590
+ console.error(`Using model: ${options.model}`);
4591
+ if (attachedFiles.length > 0) {
4592
+ console.error(
4593
+ `Attached files: ${attachedFiles.map((f) => f.name).join(", ")}`
4594
+ );
4595
+ }
4596
+ console.error("");
4597
+ }
4598
+ const model = resolveModel(options.model);
4599
+ let messages;
4600
+ if (imageFiles.length > 0) {
4601
+ const content = [{ type: "text", text: textPrompt }];
4602
+ for (const img of imageFiles) {
4603
+ content.push({
4604
+ type: "image",
4605
+ image: img.content
4606
+ });
4607
+ }
4608
+ messages = [{ role: "user", content }];
4609
+ }
4610
+ const result = await streamText(
4611
+ messages ? {
4612
+ model,
4613
+ messages,
4614
+ system: options.system
4615
+ } : {
4616
+ model,
4617
+ prompt: textPrompt,
4618
+ system: options.system
4619
+ }
4620
+ );
4621
+ for await (const chunk of result.textStream) {
4622
+ process.stdout.write(chunk);
4623
+ }
4624
+ process.stdout.write("\n");
4625
+ if (options.verbose) {
4626
+ const usage = await result.usage;
4627
+ if (usage) {
4628
+ console.error(
4629
+ `
4630
+ Usage: ${usage.inputTokens} prompt + ${usage.outputTokens} completion = ${usage.totalTokens} total tokens`
4631
+ );
4632
+ }
4633
+ }
4634
+ } catch (error) {
4635
+ console.error(
4636
+ `Error: ${error instanceof Error ? error.message : "Unknown error"}`
4637
+ );
4638
+ process.exit(1);
4639
+ }
4640
+ }
4641
+ process.on("SIGINT", () => {
4642
+ process.exit(0);
4643
+ });
4644
+ process.on("SIGTERM", () => {
4645
+ process.exit(0);
4646
+ });
4647
+ main().catch((error) => {
4648
+ console.error(
4649
+ `Fatal error: ${error instanceof Error ? error.message : "Unknown error"}`
4650
+ );
4651
+ process.exit(1);
4652
+ });
4653
+ // Annotate the CommonJS export names for ESM import in node:
4654
+ 0 && (module.exports = {
4655
+ formatAttachedFiles,
4656
+ getMediaType,
4657
+ isStdinAvailable,
4658
+ parseArgs,
4659
+ readFileContent,
4660
+ resolveModel,
4661
+ showHelp,
4662
+ showVersion
4663
+ });
4664
+ //# sourceMappingURL=ai.js.map