ai 5.0.0-beta.18 → 5.0.0-beta.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/ai.js ADDED
@@ -0,0 +1,4635 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
7
+ var __export = (target, all) => {
8
+ for (var name9 in all)
9
+ __defProp(target, name9, { get: all[name9], enumerable: true });
10
+ };
11
+ var __copyProps = (to, from, except, desc) => {
12
+ if (from && typeof from === "object" || typeof from === "function") {
13
+ for (let key of __getOwnPropNames(from))
14
+ if (!__hasOwnProp.call(to, key) && key !== except)
15
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
16
+ }
17
+ return to;
18
+ };
19
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
20
+
21
+ // src/bin/ai.ts
22
+ var ai_exports = {};
23
+ __export(ai_exports, {
24
+ formatAttachedFiles: () => formatAttachedFiles,
25
+ getMediaType: () => getMediaType,
26
+ isStdinAvailable: () => isStdinAvailable,
27
+ parseArgs: () => parseArgs,
28
+ readFileContent: () => readFileContent,
29
+ resolveModel: () => resolveModel,
30
+ showHelp: () => showHelp,
31
+ showVersion: () => showVersion
32
+ });
33
+ module.exports = __toCommonJS(ai_exports);
34
+
35
+ // src/generate-text/stream-text.ts
36
+ var import_provider16 = require("@ai-sdk/provider");
37
+ var import_provider_utils12 = require("@ai-sdk/provider-utils");
38
+
39
+ // src/error/no-output-specified-error.ts
40
+ var import_provider = require("@ai-sdk/provider");
41
+ var name = "AI_NoOutputSpecifiedError";
42
+ var marker = `vercel.ai.error.${name}`;
43
+ var symbol = Symbol.for(marker);
44
+ var _a;
45
+ var NoOutputSpecifiedError = class extends import_provider.AISDKError {
46
+ // used in isInstance
47
+ constructor({ message = "No output specified." } = {}) {
48
+ super({ name, message });
49
+ this[_a] = true;
50
+ }
51
+ static isInstance(error) {
52
+ return import_provider.AISDKError.hasMarker(error, marker);
53
+ }
54
+ };
55
+ _a = symbol;
56
+
57
+ // src/util/prepare-headers.ts
58
+ function prepareHeaders(headers, defaultHeaders) {
59
+ const responseHeaders = new Headers(headers != null ? headers : {});
60
+ for (const [key, value] of Object.entries(defaultHeaders)) {
61
+ if (!responseHeaders.has(key)) {
62
+ responseHeaders.set(key, value);
63
+ }
64
+ }
65
+ return responseHeaders;
66
+ }
67
+
68
+ // src/text-stream/create-text-stream-response.ts
69
+ function createTextStreamResponse({
70
+ status,
71
+ statusText,
72
+ headers,
73
+ textStream
74
+ }) {
75
+ return new Response(textStream.pipeThrough(new TextEncoderStream()), {
76
+ status: status != null ? status : 200,
77
+ statusText,
78
+ headers: prepareHeaders(headers, {
79
+ "content-type": "text/plain; charset=utf-8"
80
+ })
81
+ });
82
+ }
83
+
84
+ // src/util/write-to-server-response.ts
85
+ function writeToServerResponse({
86
+ response,
87
+ status,
88
+ statusText,
89
+ headers,
90
+ stream
91
+ }) {
92
+ response.writeHead(status != null ? status : 200, statusText, headers);
93
+ const reader = stream.getReader();
94
+ const read = async () => {
95
+ try {
96
+ while (true) {
97
+ const { done, value } = await reader.read();
98
+ if (done)
99
+ break;
100
+ response.write(value);
101
+ }
102
+ } catch (error) {
103
+ throw error;
104
+ } finally {
105
+ response.end();
106
+ }
107
+ };
108
+ read();
109
+ }
110
+
111
+ // src/text-stream/pipe-text-stream-to-response.ts
112
+ function pipeTextStreamToResponse({
113
+ response,
114
+ status,
115
+ statusText,
116
+ headers,
117
+ textStream
118
+ }) {
119
+ writeToServerResponse({
120
+ response,
121
+ status,
122
+ statusText,
123
+ headers: Object.fromEntries(
124
+ prepareHeaders(headers, {
125
+ "content-type": "text/plain; charset=utf-8"
126
+ }).entries()
127
+ ),
128
+ stream: textStream.pipeThrough(new TextEncoderStream())
129
+ });
130
+ }
131
+
132
+ // src/ui-message-stream/json-to-sse-transform-stream.ts
133
+ var JsonToSseTransformStream = class extends TransformStream {
134
+ constructor() {
135
+ super({
136
+ transform(part, controller) {
137
+ controller.enqueue(`data: ${JSON.stringify(part)}
138
+
139
+ `);
140
+ },
141
+ flush(controller) {
142
+ controller.enqueue("data: [DONE]\n\n");
143
+ }
144
+ });
145
+ }
146
+ };
147
+
148
+ // src/ui-message-stream/ui-message-stream-headers.ts
149
+ var UI_MESSAGE_STREAM_HEADERS = {
150
+ "content-type": "text/event-stream",
151
+ "cache-control": "no-cache",
152
+ connection: "keep-alive",
153
+ "x-vercel-ai-ui-message-stream": "v1",
154
+ "x-accel-buffering": "no"
155
+ // disable nginx buffering
156
+ };
157
+
158
+ // src/ui-message-stream/create-ui-message-stream-response.ts
159
+ function createUIMessageStreamResponse({
160
+ status,
161
+ statusText,
162
+ headers,
163
+ stream,
164
+ consumeSseStream
165
+ }) {
166
+ let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
167
+ if (consumeSseStream) {
168
+ const [stream1, stream2] = sseStream.tee();
169
+ sseStream = stream1;
170
+ consumeSseStream({ stream: stream2 });
171
+ }
172
+ return new Response(sseStream.pipeThrough(new TextEncoderStream()), {
173
+ status,
174
+ statusText,
175
+ headers: prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS)
176
+ });
177
+ }
178
+
179
+ // src/ui-message-stream/get-response-ui-message-id.ts
180
+ function getResponseUIMessageId({
181
+ originalMessages,
182
+ responseMessageId
183
+ }) {
184
+ if (originalMessages == null) {
185
+ return void 0;
186
+ }
187
+ const lastMessage = originalMessages[originalMessages.length - 1];
188
+ return (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage.id : typeof responseMessageId === "function" ? responseMessageId() : responseMessageId;
189
+ }
190
+
191
+ // src/ui/process-ui-message-stream.ts
192
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
193
+
194
+ // src/ui-message-stream/ui-message-chunks.ts
195
+ var import_v4 = require("zod/v4");
196
+ var uiMessageChunkSchema = import_v4.z.union([
197
+ import_v4.z.strictObject({
198
+ type: import_v4.z.literal("text-start"),
199
+ id: import_v4.z.string()
200
+ }),
201
+ import_v4.z.strictObject({
202
+ type: import_v4.z.literal("text-delta"),
203
+ id: import_v4.z.string(),
204
+ delta: import_v4.z.string()
205
+ }),
206
+ import_v4.z.strictObject({
207
+ type: import_v4.z.literal("text-end"),
208
+ id: import_v4.z.string()
209
+ }),
210
+ import_v4.z.strictObject({
211
+ type: import_v4.z.literal("error"),
212
+ errorText: import_v4.z.string()
213
+ }),
214
+ import_v4.z.strictObject({
215
+ type: import_v4.z.literal("tool-input-start"),
216
+ toolCallId: import_v4.z.string(),
217
+ toolName: import_v4.z.string(),
218
+ providerExecuted: import_v4.z.boolean().optional()
219
+ }),
220
+ import_v4.z.strictObject({
221
+ type: import_v4.z.literal("tool-input-delta"),
222
+ toolCallId: import_v4.z.string(),
223
+ inputTextDelta: import_v4.z.string()
224
+ }),
225
+ import_v4.z.strictObject({
226
+ type: import_v4.z.literal("tool-input-available"),
227
+ toolCallId: import_v4.z.string(),
228
+ toolName: import_v4.z.string(),
229
+ input: import_v4.z.unknown(),
230
+ providerExecuted: import_v4.z.boolean().optional()
231
+ }),
232
+ import_v4.z.strictObject({
233
+ type: import_v4.z.literal("tool-output-available"),
234
+ toolCallId: import_v4.z.string(),
235
+ output: import_v4.z.unknown(),
236
+ providerExecuted: import_v4.z.boolean().optional()
237
+ }),
238
+ import_v4.z.strictObject({
239
+ type: import_v4.z.literal("tool-output-error"),
240
+ toolCallId: import_v4.z.string(),
241
+ errorText: import_v4.z.string(),
242
+ providerExecuted: import_v4.z.boolean().optional()
243
+ }),
244
+ import_v4.z.strictObject({
245
+ type: import_v4.z.literal("reasoning"),
246
+ text: import_v4.z.string(),
247
+ providerMetadata: import_v4.z.record(import_v4.z.string(), import_v4.z.any()).optional()
248
+ }),
249
+ import_v4.z.strictObject({
250
+ type: import_v4.z.literal("reasoning-start"),
251
+ id: import_v4.z.string(),
252
+ providerMetadata: import_v4.z.record(import_v4.z.string(), import_v4.z.any()).optional()
253
+ }),
254
+ import_v4.z.strictObject({
255
+ type: import_v4.z.literal("reasoning-delta"),
256
+ id: import_v4.z.string(),
257
+ delta: import_v4.z.string(),
258
+ providerMetadata: import_v4.z.record(import_v4.z.string(), import_v4.z.any()).optional()
259
+ }),
260
+ import_v4.z.strictObject({
261
+ type: import_v4.z.literal("reasoning-end"),
262
+ id: import_v4.z.string(),
263
+ providerMetadata: import_v4.z.record(import_v4.z.string(), import_v4.z.any()).optional()
264
+ }),
265
+ import_v4.z.strictObject({
266
+ type: import_v4.z.literal("reasoning-part-finish")
267
+ }),
268
+ import_v4.z.strictObject({
269
+ type: import_v4.z.literal("source-url"),
270
+ sourceId: import_v4.z.string(),
271
+ url: import_v4.z.string(),
272
+ title: import_v4.z.string().optional(),
273
+ providerMetadata: import_v4.z.any().optional()
274
+ // Use z.any() for generic metadata
275
+ }),
276
+ import_v4.z.strictObject({
277
+ type: import_v4.z.literal("source-document"),
278
+ sourceId: import_v4.z.string(),
279
+ mediaType: import_v4.z.string(),
280
+ title: import_v4.z.string(),
281
+ filename: import_v4.z.string().optional(),
282
+ providerMetadata: import_v4.z.any().optional()
283
+ // Use z.any() for generic metadata
284
+ }),
285
+ import_v4.z.strictObject({
286
+ type: import_v4.z.literal("file"),
287
+ url: import_v4.z.string(),
288
+ mediaType: import_v4.z.string()
289
+ }),
290
+ import_v4.z.strictObject({
291
+ type: import_v4.z.string().startsWith("data-"),
292
+ id: import_v4.z.string().optional(),
293
+ data: import_v4.z.unknown(),
294
+ transient: import_v4.z.boolean().optional()
295
+ }),
296
+ import_v4.z.strictObject({
297
+ type: import_v4.z.literal("start-step")
298
+ }),
299
+ import_v4.z.strictObject({
300
+ type: import_v4.z.literal("finish-step")
301
+ }),
302
+ import_v4.z.strictObject({
303
+ type: import_v4.z.literal("start"),
304
+ messageId: import_v4.z.string().optional(),
305
+ messageMetadata: import_v4.z.unknown().optional()
306
+ }),
307
+ import_v4.z.strictObject({
308
+ type: import_v4.z.literal("finish"),
309
+ messageMetadata: import_v4.z.unknown().optional()
310
+ }),
311
+ import_v4.z.strictObject({
312
+ type: import_v4.z.literal("message-metadata"),
313
+ messageMetadata: import_v4.z.unknown()
314
+ })
315
+ ]);
316
+ function isDataUIMessageChunk(chunk) {
317
+ return chunk.type.startsWith("data-");
318
+ }
319
+
320
+ // src/util/merge-objects.ts
321
+ function mergeObjects(base, overrides) {
322
+ if (base === void 0 && overrides === void 0) {
323
+ return void 0;
324
+ }
325
+ if (base === void 0) {
326
+ return overrides;
327
+ }
328
+ if (overrides === void 0) {
329
+ return base;
330
+ }
331
+ const result = { ...base };
332
+ for (const key in overrides) {
333
+ if (Object.prototype.hasOwnProperty.call(overrides, key)) {
334
+ const overridesValue = overrides[key];
335
+ if (overridesValue === void 0)
336
+ continue;
337
+ const baseValue = key in base ? base[key] : void 0;
338
+ const isSourceObject = overridesValue !== null && typeof overridesValue === "object" && !Array.isArray(overridesValue) && !(overridesValue instanceof Date) && !(overridesValue instanceof RegExp);
339
+ const isTargetObject = baseValue !== null && baseValue !== void 0 && typeof baseValue === "object" && !Array.isArray(baseValue) && !(baseValue instanceof Date) && !(baseValue instanceof RegExp);
340
+ if (isSourceObject && isTargetObject) {
341
+ result[key] = mergeObjects(
342
+ baseValue,
343
+ overridesValue
344
+ );
345
+ } else {
346
+ result[key] = overridesValue;
347
+ }
348
+ }
349
+ }
350
+ return result;
351
+ }
352
+
353
+ // src/util/parse-partial-json.ts
354
+ var import_provider_utils = require("@ai-sdk/provider-utils");
355
+
356
+ // src/util/fix-json.ts
357
+ function fixJson(input) {
358
+ const stack = ["ROOT"];
359
+ let lastValidIndex = -1;
360
+ let literalStart = null;
361
+ function processValueStart(char, i, swapState) {
362
+ {
363
+ switch (char) {
364
+ case '"': {
365
+ lastValidIndex = i;
366
+ stack.pop();
367
+ stack.push(swapState);
368
+ stack.push("INSIDE_STRING");
369
+ break;
370
+ }
371
+ case "f":
372
+ case "t":
373
+ case "n": {
374
+ lastValidIndex = i;
375
+ literalStart = i;
376
+ stack.pop();
377
+ stack.push(swapState);
378
+ stack.push("INSIDE_LITERAL");
379
+ break;
380
+ }
381
+ case "-": {
382
+ stack.pop();
383
+ stack.push(swapState);
384
+ stack.push("INSIDE_NUMBER");
385
+ break;
386
+ }
387
+ case "0":
388
+ case "1":
389
+ case "2":
390
+ case "3":
391
+ case "4":
392
+ case "5":
393
+ case "6":
394
+ case "7":
395
+ case "8":
396
+ case "9": {
397
+ lastValidIndex = i;
398
+ stack.pop();
399
+ stack.push(swapState);
400
+ stack.push("INSIDE_NUMBER");
401
+ break;
402
+ }
403
+ case "{": {
404
+ lastValidIndex = i;
405
+ stack.pop();
406
+ stack.push(swapState);
407
+ stack.push("INSIDE_OBJECT_START");
408
+ break;
409
+ }
410
+ case "[": {
411
+ lastValidIndex = i;
412
+ stack.pop();
413
+ stack.push(swapState);
414
+ stack.push("INSIDE_ARRAY_START");
415
+ break;
416
+ }
417
+ }
418
+ }
419
+ }
420
+ function processAfterObjectValue(char, i) {
421
+ switch (char) {
422
+ case ",": {
423
+ stack.pop();
424
+ stack.push("INSIDE_OBJECT_AFTER_COMMA");
425
+ break;
426
+ }
427
+ case "}": {
428
+ lastValidIndex = i;
429
+ stack.pop();
430
+ break;
431
+ }
432
+ }
433
+ }
434
+ function processAfterArrayValue(char, i) {
435
+ switch (char) {
436
+ case ",": {
437
+ stack.pop();
438
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
439
+ break;
440
+ }
441
+ case "]": {
442
+ lastValidIndex = i;
443
+ stack.pop();
444
+ break;
445
+ }
446
+ }
447
+ }
448
+ for (let i = 0; i < input.length; i++) {
449
+ const char = input[i];
450
+ const currentState = stack[stack.length - 1];
451
+ switch (currentState) {
452
+ case "ROOT":
453
+ processValueStart(char, i, "FINISH");
454
+ break;
455
+ case "INSIDE_OBJECT_START": {
456
+ switch (char) {
457
+ case '"': {
458
+ stack.pop();
459
+ stack.push("INSIDE_OBJECT_KEY");
460
+ break;
461
+ }
462
+ case "}": {
463
+ lastValidIndex = i;
464
+ stack.pop();
465
+ break;
466
+ }
467
+ }
468
+ break;
469
+ }
470
+ case "INSIDE_OBJECT_AFTER_COMMA": {
471
+ switch (char) {
472
+ case '"': {
473
+ stack.pop();
474
+ stack.push("INSIDE_OBJECT_KEY");
475
+ break;
476
+ }
477
+ }
478
+ break;
479
+ }
480
+ case "INSIDE_OBJECT_KEY": {
481
+ switch (char) {
482
+ case '"': {
483
+ stack.pop();
484
+ stack.push("INSIDE_OBJECT_AFTER_KEY");
485
+ break;
486
+ }
487
+ }
488
+ break;
489
+ }
490
+ case "INSIDE_OBJECT_AFTER_KEY": {
491
+ switch (char) {
492
+ case ":": {
493
+ stack.pop();
494
+ stack.push("INSIDE_OBJECT_BEFORE_VALUE");
495
+ break;
496
+ }
497
+ }
498
+ break;
499
+ }
500
+ case "INSIDE_OBJECT_BEFORE_VALUE": {
501
+ processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
502
+ break;
503
+ }
504
+ case "INSIDE_OBJECT_AFTER_VALUE": {
505
+ processAfterObjectValue(char, i);
506
+ break;
507
+ }
508
+ case "INSIDE_STRING": {
509
+ switch (char) {
510
+ case '"': {
511
+ stack.pop();
512
+ lastValidIndex = i;
513
+ break;
514
+ }
515
+ case "\\": {
516
+ stack.push("INSIDE_STRING_ESCAPE");
517
+ break;
518
+ }
519
+ default: {
520
+ lastValidIndex = i;
521
+ }
522
+ }
523
+ break;
524
+ }
525
+ case "INSIDE_ARRAY_START": {
526
+ switch (char) {
527
+ case "]": {
528
+ lastValidIndex = i;
529
+ stack.pop();
530
+ break;
531
+ }
532
+ default: {
533
+ lastValidIndex = i;
534
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
535
+ break;
536
+ }
537
+ }
538
+ break;
539
+ }
540
+ case "INSIDE_ARRAY_AFTER_VALUE": {
541
+ switch (char) {
542
+ case ",": {
543
+ stack.pop();
544
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
545
+ break;
546
+ }
547
+ case "]": {
548
+ lastValidIndex = i;
549
+ stack.pop();
550
+ break;
551
+ }
552
+ default: {
553
+ lastValidIndex = i;
554
+ break;
555
+ }
556
+ }
557
+ break;
558
+ }
559
+ case "INSIDE_ARRAY_AFTER_COMMA": {
560
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
561
+ break;
562
+ }
563
+ case "INSIDE_STRING_ESCAPE": {
564
+ stack.pop();
565
+ lastValidIndex = i;
566
+ break;
567
+ }
568
+ case "INSIDE_NUMBER": {
569
+ switch (char) {
570
+ case "0":
571
+ case "1":
572
+ case "2":
573
+ case "3":
574
+ case "4":
575
+ case "5":
576
+ case "6":
577
+ case "7":
578
+ case "8":
579
+ case "9": {
580
+ lastValidIndex = i;
581
+ break;
582
+ }
583
+ case "e":
584
+ case "E":
585
+ case "-":
586
+ case ".": {
587
+ break;
588
+ }
589
+ case ",": {
590
+ stack.pop();
591
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
592
+ processAfterArrayValue(char, i);
593
+ }
594
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
595
+ processAfterObjectValue(char, i);
596
+ }
597
+ break;
598
+ }
599
+ case "}": {
600
+ stack.pop();
601
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
602
+ processAfterObjectValue(char, i);
603
+ }
604
+ break;
605
+ }
606
+ case "]": {
607
+ stack.pop();
608
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
609
+ processAfterArrayValue(char, i);
610
+ }
611
+ break;
612
+ }
613
+ default: {
614
+ stack.pop();
615
+ break;
616
+ }
617
+ }
618
+ break;
619
+ }
620
+ case "INSIDE_LITERAL": {
621
+ const partialLiteral = input.substring(literalStart, i + 1);
622
+ if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
623
+ stack.pop();
624
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
625
+ processAfterObjectValue(char, i);
626
+ } else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
627
+ processAfterArrayValue(char, i);
628
+ }
629
+ } else {
630
+ lastValidIndex = i;
631
+ }
632
+ break;
633
+ }
634
+ }
635
+ }
636
+ let result = input.slice(0, lastValidIndex + 1);
637
+ for (let i = stack.length - 1; i >= 0; i--) {
638
+ const state = stack[i];
639
+ switch (state) {
640
+ case "INSIDE_STRING": {
641
+ result += '"';
642
+ break;
643
+ }
644
+ case "INSIDE_OBJECT_KEY":
645
+ case "INSIDE_OBJECT_AFTER_KEY":
646
+ case "INSIDE_OBJECT_AFTER_COMMA":
647
+ case "INSIDE_OBJECT_START":
648
+ case "INSIDE_OBJECT_BEFORE_VALUE":
649
+ case "INSIDE_OBJECT_AFTER_VALUE": {
650
+ result += "}";
651
+ break;
652
+ }
653
+ case "INSIDE_ARRAY_START":
654
+ case "INSIDE_ARRAY_AFTER_COMMA":
655
+ case "INSIDE_ARRAY_AFTER_VALUE": {
656
+ result += "]";
657
+ break;
658
+ }
659
+ case "INSIDE_LITERAL": {
660
+ const partialLiteral = input.substring(literalStart, input.length);
661
+ if ("true".startsWith(partialLiteral)) {
662
+ result += "true".slice(partialLiteral.length);
663
+ } else if ("false".startsWith(partialLiteral)) {
664
+ result += "false".slice(partialLiteral.length);
665
+ } else if ("null".startsWith(partialLiteral)) {
666
+ result += "null".slice(partialLiteral.length);
667
+ }
668
+ }
669
+ }
670
+ }
671
+ return result;
672
+ }
673
+
674
+ // src/util/parse-partial-json.ts
675
+ async function parsePartialJson(jsonText) {
676
+ if (jsonText === void 0) {
677
+ return { value: void 0, state: "undefined-input" };
678
+ }
679
+ let result = await (0, import_provider_utils.safeParseJSON)({ text: jsonText });
680
+ if (result.success) {
681
+ return { value: result.value, state: "successful-parse" };
682
+ }
683
+ result = await (0, import_provider_utils.safeParseJSON)({ text: fixJson(jsonText) });
684
+ if (result.success) {
685
+ return { value: result.value, state: "repaired-parse" };
686
+ }
687
+ return { value: void 0, state: "failed-parse" };
688
+ }
689
+
690
+ // src/ui/ui-messages.ts
691
+ function isToolUIPart(part) {
692
+ return part.type.startsWith("tool-");
693
+ }
694
+ function getToolName(part) {
695
+ return part.type.split("-")[1];
696
+ }
697
+
698
+ // src/ui/process-ui-message-stream.ts
699
+ function createStreamingUIMessageState({
700
+ lastMessage,
701
+ messageId
702
+ }) {
703
+ return {
704
+ message: (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage : {
705
+ id: messageId,
706
+ metadata: void 0,
707
+ role: "assistant",
708
+ parts: []
709
+ },
710
+ activeTextParts: {},
711
+ activeReasoningParts: {},
712
+ partialToolCalls: {}
713
+ };
714
+ }
715
+ function processUIMessageStream({
716
+ stream,
717
+ onToolCall,
718
+ messageMetadataSchema,
719
+ dataPartSchemas,
720
+ runUpdateMessageJob,
721
+ onError,
722
+ onData
723
+ }) {
724
+ return stream.pipeThrough(
725
+ new TransformStream({
726
+ async transform(part, controller) {
727
+ await runUpdateMessageJob(async ({ state, write }) => {
728
+ var _a9, _b;
729
+ function updateToolInvocationPart(options) {
730
+ var _a10;
731
+ const part2 = state.message.parts.find(
732
+ (part3) => isToolUIPart(part3) && part3.toolCallId === options.toolCallId
733
+ );
734
+ const anyOptions = options;
735
+ const anyPart = part2;
736
+ if (part2 != null) {
737
+ part2.state = options.state;
738
+ anyPart.input = anyOptions.input;
739
+ anyPart.output = anyOptions.output;
740
+ anyPart.errorText = anyOptions.errorText;
741
+ anyPart.providerExecuted = (_a10 = anyOptions.providerExecuted) != null ? _a10 : part2.providerExecuted;
742
+ } else {
743
+ state.message.parts.push({
744
+ type: `tool-${options.toolName}`,
745
+ toolCallId: options.toolCallId,
746
+ state: options.state,
747
+ input: anyOptions.input,
748
+ output: anyOptions.output,
749
+ errorText: anyOptions.errorText,
750
+ providerExecuted: anyOptions.providerExecuted
751
+ });
752
+ }
753
+ }
754
+ async function updateMessageMetadata(metadata) {
755
+ if (metadata != null) {
756
+ const mergedMetadata = state.message.metadata != null ? mergeObjects(state.message.metadata, metadata) : metadata;
757
+ if (messageMetadataSchema != null) {
758
+ await (0, import_provider_utils2.validateTypes)({
759
+ value: mergedMetadata,
760
+ schema: messageMetadataSchema
761
+ });
762
+ }
763
+ state.message.metadata = mergedMetadata;
764
+ }
765
+ }
766
+ switch (part.type) {
767
+ case "text-start": {
768
+ const textPart = {
769
+ type: "text",
770
+ text: "",
771
+ state: "streaming"
772
+ };
773
+ state.activeTextParts[part.id] = textPart;
774
+ state.message.parts.push(textPart);
775
+ write();
776
+ break;
777
+ }
778
+ case "text-delta": {
779
+ state.activeTextParts[part.id].text += part.delta;
780
+ write();
781
+ break;
782
+ }
783
+ case "text-end": {
784
+ const textPart = state.activeTextParts[part.id];
785
+ textPart.state = "done";
786
+ delete state.activeTextParts[part.id];
787
+ write();
788
+ break;
789
+ }
790
+ case "reasoning-start": {
791
+ const reasoningPart = {
792
+ type: "reasoning",
793
+ text: "",
794
+ providerMetadata: part.providerMetadata,
795
+ state: "streaming"
796
+ };
797
+ state.activeReasoningParts[part.id] = reasoningPart;
798
+ state.message.parts.push(reasoningPart);
799
+ write();
800
+ break;
801
+ }
802
+ case "reasoning-delta": {
803
+ const reasoningPart = state.activeReasoningParts[part.id];
804
+ reasoningPart.text += part.delta;
805
+ reasoningPart.providerMetadata = (_a9 = part.providerMetadata) != null ? _a9 : reasoningPart.providerMetadata;
806
+ write();
807
+ break;
808
+ }
809
+ case "reasoning-end": {
810
+ const reasoningPart = state.activeReasoningParts[part.id];
811
+ reasoningPart.providerMetadata = (_b = part.providerMetadata) != null ? _b : reasoningPart.providerMetadata;
812
+ reasoningPart.state = "done";
813
+ delete state.activeReasoningParts[part.id];
814
+ write();
815
+ break;
816
+ }
817
+ case "file": {
818
+ state.message.parts.push({
819
+ type: "file",
820
+ mediaType: part.mediaType,
821
+ url: part.url
822
+ });
823
+ write();
824
+ break;
825
+ }
826
+ case "source-url": {
827
+ state.message.parts.push({
828
+ type: "source-url",
829
+ sourceId: part.sourceId,
830
+ url: part.url,
831
+ title: part.title,
832
+ providerMetadata: part.providerMetadata
833
+ });
834
+ write();
835
+ break;
836
+ }
837
+ case "source-document": {
838
+ state.message.parts.push({
839
+ type: "source-document",
840
+ sourceId: part.sourceId,
841
+ mediaType: part.mediaType,
842
+ title: part.title,
843
+ filename: part.filename,
844
+ providerMetadata: part.providerMetadata
845
+ });
846
+ write();
847
+ break;
848
+ }
849
+ case "tool-input-start": {
850
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
851
+ state.partialToolCalls[part.toolCallId] = {
852
+ text: "",
853
+ toolName: part.toolName,
854
+ index: toolInvocations.length
855
+ };
856
+ updateToolInvocationPart({
857
+ toolCallId: part.toolCallId,
858
+ toolName: part.toolName,
859
+ state: "input-streaming",
860
+ input: void 0,
861
+ providerExecuted: part.providerExecuted
862
+ });
863
+ write();
864
+ break;
865
+ }
866
+ case "tool-input-delta": {
867
+ const partialToolCall = state.partialToolCalls[part.toolCallId];
868
+ partialToolCall.text += part.inputTextDelta;
869
+ const { value: partialArgs } = await parsePartialJson(
870
+ partialToolCall.text
871
+ );
872
+ updateToolInvocationPart({
873
+ toolCallId: part.toolCallId,
874
+ toolName: partialToolCall.toolName,
875
+ state: "input-streaming",
876
+ input: partialArgs
877
+ });
878
+ write();
879
+ break;
880
+ }
881
+ case "tool-input-available": {
882
+ updateToolInvocationPart({
883
+ toolCallId: part.toolCallId,
884
+ toolName: part.toolName,
885
+ state: "input-available",
886
+ input: part.input,
887
+ providerExecuted: part.providerExecuted
888
+ });
889
+ write();
890
+ if (onToolCall && !part.providerExecuted) {
891
+ const result = await onToolCall({
892
+ toolCall: part
893
+ });
894
+ if (result != null) {
895
+ updateToolInvocationPart({
896
+ toolCallId: part.toolCallId,
897
+ toolName: part.toolName,
898
+ state: "output-available",
899
+ input: part.input,
900
+ output: result
901
+ });
902
+ write();
903
+ }
904
+ }
905
+ break;
906
+ }
907
+ case "tool-output-available": {
908
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
909
+ if (toolInvocations == null) {
910
+ throw new Error("tool_result must be preceded by a tool_call");
911
+ }
912
+ const toolInvocationIndex = toolInvocations.findIndex(
913
+ (invocation) => invocation.toolCallId === part.toolCallId
914
+ );
915
+ if (toolInvocationIndex === -1) {
916
+ throw new Error(
917
+ "tool_result must be preceded by a tool_call with the same toolCallId"
918
+ );
919
+ }
920
+ const toolName = getToolName(
921
+ toolInvocations[toolInvocationIndex]
922
+ );
923
+ updateToolInvocationPart({
924
+ toolCallId: part.toolCallId,
925
+ toolName,
926
+ state: "output-available",
927
+ input: toolInvocations[toolInvocationIndex].input,
928
+ output: part.output,
929
+ providerExecuted: part.providerExecuted
930
+ });
931
+ write();
932
+ break;
933
+ }
934
+ case "tool-output-error": {
935
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
936
+ if (toolInvocations == null) {
937
+ throw new Error("tool_result must be preceded by a tool_call");
938
+ }
939
+ const toolInvocationIndex = toolInvocations.findIndex(
940
+ (invocation) => invocation.toolCallId === part.toolCallId
941
+ );
942
+ if (toolInvocationIndex === -1) {
943
+ throw new Error(
944
+ "tool_result must be preceded by a tool_call with the same toolCallId"
945
+ );
946
+ }
947
+ const toolName = getToolName(
948
+ toolInvocations[toolInvocationIndex]
949
+ );
950
+ updateToolInvocationPart({
951
+ toolCallId: part.toolCallId,
952
+ toolName,
953
+ state: "output-error",
954
+ input: toolInvocations[toolInvocationIndex].input,
955
+ errorText: part.errorText,
956
+ providerExecuted: part.providerExecuted
957
+ });
958
+ write();
959
+ break;
960
+ }
961
+ case "start-step": {
962
+ state.message.parts.push({ type: "step-start" });
963
+ break;
964
+ }
965
+ case "finish-step": {
966
+ state.activeTextParts = {};
967
+ state.activeReasoningParts = {};
968
+ break;
969
+ }
970
+ case "start": {
971
+ if (part.messageId != null) {
972
+ state.message.id = part.messageId;
973
+ }
974
+ await updateMessageMetadata(part.messageMetadata);
975
+ if (part.messageId != null || part.messageMetadata != null) {
976
+ write();
977
+ }
978
+ break;
979
+ }
980
+ case "finish": {
981
+ await updateMessageMetadata(part.messageMetadata);
982
+ if (part.messageMetadata != null) {
983
+ write();
984
+ }
985
+ break;
986
+ }
987
+ case "message-metadata": {
988
+ await updateMessageMetadata(part.messageMetadata);
989
+ if (part.messageMetadata != null) {
990
+ write();
991
+ }
992
+ break;
993
+ }
994
+ case "error": {
995
+ onError == null ? void 0 : onError(new Error(part.errorText));
996
+ break;
997
+ }
998
+ default: {
999
+ if (isDataUIMessageChunk(part)) {
1000
+ const dataPart = part;
1001
+ if (dataPart.transient) {
1002
+ onData == null ? void 0 : onData(dataPart);
1003
+ break;
1004
+ }
1005
+ const existingPart = dataPart.id != null ? state.message.parts.find(
1006
+ (partArg) => dataPart.type === partArg.type && dataPart.id === partArg.id
1007
+ ) : void 0;
1008
+ if (existingPart != null) {
1009
+ existingPart.data = isObject(existingPart.data) && isObject(dataPart.data) ? mergeObjects(existingPart.data, dataPart.data) : dataPart.data;
1010
+ } else {
1011
+ state.message.parts.push(dataPart);
1012
+ }
1013
+ onData == null ? void 0 : onData(dataPart);
1014
+ write();
1015
+ }
1016
+ }
1017
+ }
1018
+ controller.enqueue(part);
1019
+ });
1020
+ }
1021
+ })
1022
+ );
1023
+ }
1024
+ function isObject(value) {
1025
+ return typeof value === "object" && value !== null;
1026
+ }
1027
+
1028
+ // src/ui-message-stream/handle-ui-message-stream-finish.ts
1029
+ function handleUIMessageStreamFinish({
1030
+ messageId,
1031
+ originalMessages = [],
1032
+ onFinish,
1033
+ onError,
1034
+ stream
1035
+ }) {
1036
+ let lastMessage = originalMessages == null ? void 0 : originalMessages[originalMessages.length - 1];
1037
+ if ((lastMessage == null ? void 0 : lastMessage.role) !== "assistant") {
1038
+ lastMessage = void 0;
1039
+ } else {
1040
+ messageId = lastMessage.id;
1041
+ }
1042
+ const idInjectedStream = stream.pipeThrough(
1043
+ new TransformStream({
1044
+ transform(chunk, controller) {
1045
+ if (chunk.type === "start") {
1046
+ const startChunk = chunk;
1047
+ if (startChunk.messageId == null && messageId != null) {
1048
+ startChunk.messageId = messageId;
1049
+ }
1050
+ }
1051
+ controller.enqueue(chunk);
1052
+ }
1053
+ })
1054
+ );
1055
+ if (onFinish == null) {
1056
+ return idInjectedStream;
1057
+ }
1058
+ const state = createStreamingUIMessageState({
1059
+ lastMessage: lastMessage ? structuredClone(lastMessage) : void 0,
1060
+ messageId: messageId != null ? messageId : ""
1061
+ // will be overridden by the stream
1062
+ });
1063
+ const runUpdateMessageJob = async (job) => {
1064
+ await job({ state, write: () => {
1065
+ } });
1066
+ };
1067
+ return processUIMessageStream({
1068
+ stream: idInjectedStream,
1069
+ runUpdateMessageJob,
1070
+ onError
1071
+ }).pipeThrough(
1072
+ new TransformStream({
1073
+ transform(chunk, controller) {
1074
+ controller.enqueue(chunk);
1075
+ },
1076
+ flush() {
1077
+ const isContinuation = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
1078
+ onFinish({
1079
+ isContinuation,
1080
+ responseMessage: state.message,
1081
+ messages: [
1082
+ ...isContinuation ? originalMessages.slice(0, -1) : originalMessages,
1083
+ state.message
1084
+ ]
1085
+ });
1086
+ }
1087
+ })
1088
+ );
1089
+ }
1090
+
1091
+ // src/ui-message-stream/pipe-ui-message-stream-to-response.ts
1092
+ function pipeUIMessageStreamToResponse({
1093
+ response,
1094
+ status,
1095
+ statusText,
1096
+ headers,
1097
+ stream,
1098
+ consumeSseStream
1099
+ }) {
1100
+ let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
1101
+ if (consumeSseStream) {
1102
+ const [stream1, stream2] = sseStream.tee();
1103
+ sseStream = stream1;
1104
+ consumeSseStream({ stream: stream2 });
1105
+ }
1106
+ writeToServerResponse({
1107
+ response,
1108
+ status,
1109
+ statusText,
1110
+ headers: Object.fromEntries(
1111
+ prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS).entries()
1112
+ ),
1113
+ stream: sseStream.pipeThrough(new TextEncoderStream())
1114
+ });
1115
+ }
1116
+
1117
+ // src/util/as-array.ts
1118
+ function asArray(value) {
1119
+ return value === void 0 ? [] : Array.isArray(value) ? value : [value];
1120
+ }
1121
+
1122
+ // src/util/async-iterable-stream.ts
1123
+ function createAsyncIterableStream(source) {
1124
+ const stream = source.pipeThrough(new TransformStream());
1125
+ stream[Symbol.asyncIterator] = () => {
1126
+ const reader = stream.getReader();
1127
+ return {
1128
+ async next() {
1129
+ const { done, value } = await reader.read();
1130
+ return done ? { done: true, value: void 0 } : { done: false, value };
1131
+ }
1132
+ };
1133
+ };
1134
+ return stream;
1135
+ }
1136
+
1137
+ // src/util/consume-stream.ts
1138
+ async function consumeStream({
1139
+ stream,
1140
+ onError
1141
+ }) {
1142
+ const reader = stream.getReader();
1143
+ try {
1144
+ while (true) {
1145
+ const { done } = await reader.read();
1146
+ if (done)
1147
+ break;
1148
+ }
1149
+ } catch (error) {
1150
+ onError == null ? void 0 : onError(error);
1151
+ } finally {
1152
+ reader.releaseLock();
1153
+ }
1154
+ }
1155
+
1156
+ // src/util/create-resolvable-promise.ts
1157
+ function createResolvablePromise() {
1158
+ let resolve2;
1159
+ let reject;
1160
+ const promise = new Promise((res, rej) => {
1161
+ resolve2 = res;
1162
+ reject = rej;
1163
+ });
1164
+ return {
1165
+ promise,
1166
+ resolve: resolve2,
1167
+ reject
1168
+ };
1169
+ }
1170
+
1171
+ // src/util/create-stitchable-stream.ts
1172
+ function createStitchableStream() {
1173
+ let innerStreamReaders = [];
1174
+ let controller = null;
1175
+ let isClosed = false;
1176
+ let waitForNewStream = createResolvablePromise();
1177
+ const processPull = async () => {
1178
+ if (isClosed && innerStreamReaders.length === 0) {
1179
+ controller == null ? void 0 : controller.close();
1180
+ return;
1181
+ }
1182
+ if (innerStreamReaders.length === 0) {
1183
+ waitForNewStream = createResolvablePromise();
1184
+ await waitForNewStream.promise;
1185
+ return processPull();
1186
+ }
1187
+ try {
1188
+ const { value, done } = await innerStreamReaders[0].read();
1189
+ if (done) {
1190
+ innerStreamReaders.shift();
1191
+ if (innerStreamReaders.length > 0) {
1192
+ await processPull();
1193
+ } else if (isClosed) {
1194
+ controller == null ? void 0 : controller.close();
1195
+ }
1196
+ } else {
1197
+ controller == null ? void 0 : controller.enqueue(value);
1198
+ }
1199
+ } catch (error) {
1200
+ controller == null ? void 0 : controller.error(error);
1201
+ innerStreamReaders.shift();
1202
+ if (isClosed && innerStreamReaders.length === 0) {
1203
+ controller == null ? void 0 : controller.close();
1204
+ }
1205
+ }
1206
+ };
1207
+ return {
1208
+ stream: new ReadableStream({
1209
+ start(controllerParam) {
1210
+ controller = controllerParam;
1211
+ },
1212
+ pull: processPull,
1213
+ async cancel() {
1214
+ for (const reader of innerStreamReaders) {
1215
+ await reader.cancel();
1216
+ }
1217
+ innerStreamReaders = [];
1218
+ isClosed = true;
1219
+ }
1220
+ }),
1221
+ addStream: (innerStream) => {
1222
+ if (isClosed) {
1223
+ throw new Error("Cannot add inner stream: outer stream is closed");
1224
+ }
1225
+ innerStreamReaders.push(innerStream.getReader());
1226
+ waitForNewStream.resolve();
1227
+ },
1228
+ /**
1229
+ * Gracefully close the outer stream. This will let the inner streams
1230
+ * finish processing and then close the outer stream.
1231
+ */
1232
+ close: () => {
1233
+ isClosed = true;
1234
+ waitForNewStream.resolve();
1235
+ if (innerStreamReaders.length === 0) {
1236
+ controller == null ? void 0 : controller.close();
1237
+ }
1238
+ },
1239
+ /**
1240
+ * Immediately close the outer stream. This will cancel all inner streams
1241
+ * and close the outer stream.
1242
+ */
1243
+ terminate: () => {
1244
+ isClosed = true;
1245
+ waitForNewStream.resolve();
1246
+ innerStreamReaders.forEach((reader) => reader.cancel());
1247
+ innerStreamReaders = [];
1248
+ controller == null ? void 0 : controller.close();
1249
+ }
1250
+ };
1251
+ }
1252
+
1253
+ // src/util/delayed-promise.ts
1254
+ var DelayedPromise = class {
1255
+ constructor() {
1256
+ this.status = { type: "pending" };
1257
+ this._resolve = void 0;
1258
+ this._reject = void 0;
1259
+ }
1260
+ get promise() {
1261
+ if (this._promise) {
1262
+ return this._promise;
1263
+ }
1264
+ this._promise = new Promise((resolve2, reject) => {
1265
+ if (this.status.type === "resolved") {
1266
+ resolve2(this.status.value);
1267
+ } else if (this.status.type === "rejected") {
1268
+ reject(this.status.error);
1269
+ }
1270
+ this._resolve = resolve2;
1271
+ this._reject = reject;
1272
+ });
1273
+ return this._promise;
1274
+ }
1275
+ resolve(value) {
1276
+ var _a9;
1277
+ this.status = { type: "resolved", value };
1278
+ if (this._promise) {
1279
+ (_a9 = this._resolve) == null ? void 0 : _a9.call(this, value);
1280
+ }
1281
+ }
1282
+ reject(error) {
1283
+ var _a9;
1284
+ this.status = { type: "rejected", error };
1285
+ if (this._promise) {
1286
+ (_a9 = this._reject) == null ? void 0 : _a9.call(this, error);
1287
+ }
1288
+ }
1289
+ };
1290
+
1291
+ // src/util/now.ts
1292
+ function now() {
1293
+ var _a9, _b;
1294
+ return (_b = (_a9 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a9.now()) != null ? _b : Date.now();
1295
+ }
1296
+
1297
+ // src/error/invalid-argument-error.ts
1298
+ var import_provider2 = require("@ai-sdk/provider");
1299
+ var name2 = "AI_InvalidArgumentError";
1300
+ var marker2 = `vercel.ai.error.${name2}`;
1301
+ var symbol2 = Symbol.for(marker2);
1302
+ var _a2;
1303
+ var InvalidArgumentError = class extends import_provider2.AISDKError {
1304
+ constructor({
1305
+ parameter,
1306
+ value,
1307
+ message
1308
+ }) {
1309
+ super({
1310
+ name: name2,
1311
+ message: `Invalid argument for parameter ${parameter}: ${message}`
1312
+ });
1313
+ this[_a2] = true;
1314
+ this.parameter = parameter;
1315
+ this.value = value;
1316
+ }
1317
+ static isInstance(error) {
1318
+ return import_provider2.AISDKError.hasMarker(error, marker2);
1319
+ }
1320
+ };
1321
+ _a2 = symbol2;
1322
+
1323
+ // src/util/retry-with-exponential-backoff.ts
1324
+ var import_provider4 = require("@ai-sdk/provider");
1325
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
1326
+
1327
+ // src/util/retry-error.ts
1328
+ var import_provider3 = require("@ai-sdk/provider");
1329
+ var name3 = "AI_RetryError";
1330
+ var marker3 = `vercel.ai.error.${name3}`;
1331
+ var symbol3 = Symbol.for(marker3);
1332
+ var _a3;
1333
+ var RetryError = class extends import_provider3.AISDKError {
1334
+ constructor({
1335
+ message,
1336
+ reason,
1337
+ errors
1338
+ }) {
1339
+ super({ name: name3, message });
1340
+ this[_a3] = true;
1341
+ this.reason = reason;
1342
+ this.errors = errors;
1343
+ this.lastError = errors[errors.length - 1];
1344
+ }
1345
+ static isInstance(error) {
1346
+ return import_provider3.AISDKError.hasMarker(error, marker3);
1347
+ }
1348
+ };
1349
+ _a3 = symbol3;
1350
+
1351
+ // src/util/retry-with-exponential-backoff.ts
1352
+ var retryWithExponentialBackoff = ({
1353
+ maxRetries = 2,
1354
+ initialDelayInMs = 2e3,
1355
+ backoffFactor = 2
1356
+ } = {}) => async (f) => _retryWithExponentialBackoff(f, {
1357
+ maxRetries,
1358
+ delayInMs: initialDelayInMs,
1359
+ backoffFactor
1360
+ });
1361
+ async function _retryWithExponentialBackoff(f, {
1362
+ maxRetries,
1363
+ delayInMs,
1364
+ backoffFactor
1365
+ }, errors = []) {
1366
+ try {
1367
+ return await f();
1368
+ } catch (error) {
1369
+ if ((0, import_provider_utils3.isAbortError)(error)) {
1370
+ throw error;
1371
+ }
1372
+ if (maxRetries === 0) {
1373
+ throw error;
1374
+ }
1375
+ const errorMessage = (0, import_provider_utils3.getErrorMessage)(error);
1376
+ const newErrors = [...errors, error];
1377
+ const tryNumber = newErrors.length;
1378
+ if (tryNumber > maxRetries) {
1379
+ throw new RetryError({
1380
+ message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
1381
+ reason: "maxRetriesExceeded",
1382
+ errors: newErrors
1383
+ });
1384
+ }
1385
+ if (error instanceof Error && import_provider4.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
1386
+ await (0, import_provider_utils3.delay)(delayInMs);
1387
+ return _retryWithExponentialBackoff(
1388
+ f,
1389
+ { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
1390
+ newErrors
1391
+ );
1392
+ }
1393
+ if (tryNumber === 1) {
1394
+ throw error;
1395
+ }
1396
+ throw new RetryError({
1397
+ message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
1398
+ reason: "errorNotRetryable",
1399
+ errors: newErrors
1400
+ });
1401
+ }
1402
+ }
1403
+
1404
+ // src/util/prepare-retries.ts
1405
+ function prepareRetries({
1406
+ maxRetries
1407
+ }) {
1408
+ if (maxRetries != null) {
1409
+ if (!Number.isInteger(maxRetries)) {
1410
+ throw new InvalidArgumentError({
1411
+ parameter: "maxRetries",
1412
+ value: maxRetries,
1413
+ message: "maxRetries must be an integer"
1414
+ });
1415
+ }
1416
+ if (maxRetries < 0) {
1417
+ throw new InvalidArgumentError({
1418
+ parameter: "maxRetries",
1419
+ value: maxRetries,
1420
+ message: "maxRetries must be >= 0"
1421
+ });
1422
+ }
1423
+ }
1424
+ const maxRetriesResult = maxRetries != null ? maxRetries : 2;
1425
+ return {
1426
+ maxRetries: maxRetriesResult,
1427
+ retry: retryWithExponentialBackoff({ maxRetries: maxRetriesResult })
1428
+ };
1429
+ }
1430
+
1431
+ // src/prompt/convert-to-language-model-prompt.ts
1432
+ var import_provider_utils6 = require("@ai-sdk/provider-utils");
1433
+
1434
+ // src/util/detect-media-type.ts
1435
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
1436
+ var imageMediaTypeSignatures = [
1437
+ {
1438
+ mediaType: "image/gif",
1439
+ bytesPrefix: [71, 73, 70],
1440
+ base64Prefix: "R0lG"
1441
+ },
1442
+ {
1443
+ mediaType: "image/png",
1444
+ bytesPrefix: [137, 80, 78, 71],
1445
+ base64Prefix: "iVBORw"
1446
+ },
1447
+ {
1448
+ mediaType: "image/jpeg",
1449
+ bytesPrefix: [255, 216],
1450
+ base64Prefix: "/9j/"
1451
+ },
1452
+ {
1453
+ mediaType: "image/webp",
1454
+ bytesPrefix: [82, 73, 70, 70],
1455
+ base64Prefix: "UklGRg"
1456
+ },
1457
+ {
1458
+ mediaType: "image/bmp",
1459
+ bytesPrefix: [66, 77],
1460
+ base64Prefix: "Qk"
1461
+ },
1462
+ {
1463
+ mediaType: "image/tiff",
1464
+ bytesPrefix: [73, 73, 42, 0],
1465
+ base64Prefix: "SUkqAA"
1466
+ },
1467
+ {
1468
+ mediaType: "image/tiff",
1469
+ bytesPrefix: [77, 77, 0, 42],
1470
+ base64Prefix: "TU0AKg"
1471
+ },
1472
+ {
1473
+ mediaType: "image/avif",
1474
+ bytesPrefix: [
1475
+ 0,
1476
+ 0,
1477
+ 0,
1478
+ 32,
1479
+ 102,
1480
+ 116,
1481
+ 121,
1482
+ 112,
1483
+ 97,
1484
+ 118,
1485
+ 105,
1486
+ 102
1487
+ ],
1488
+ base64Prefix: "AAAAIGZ0eXBhdmlm"
1489
+ },
1490
+ {
1491
+ mediaType: "image/heic",
1492
+ bytesPrefix: [
1493
+ 0,
1494
+ 0,
1495
+ 0,
1496
+ 32,
1497
+ 102,
1498
+ 116,
1499
+ 121,
1500
+ 112,
1501
+ 104,
1502
+ 101,
1503
+ 105,
1504
+ 99
1505
+ ],
1506
+ base64Prefix: "AAAAIGZ0eXBoZWlj"
1507
+ }
1508
+ ];
1509
+ var stripID3 = (data) => {
1510
+ const bytes = typeof data === "string" ? (0, import_provider_utils4.convertBase64ToUint8Array)(data) : data;
1511
+ const id3Size = (bytes[6] & 127) << 21 | (bytes[7] & 127) << 14 | (bytes[8] & 127) << 7 | bytes[9] & 127;
1512
+ return bytes.slice(id3Size + 10);
1513
+ };
1514
+ function stripID3TagsIfPresent(data) {
1515
+ const hasId3 = typeof data === "string" && data.startsWith("SUQz") || typeof data !== "string" && data.length > 10 && data[0] === 73 && // 'I'
1516
+ data[1] === 68 && // 'D'
1517
+ data[2] === 51;
1518
+ return hasId3 ? stripID3(data) : data;
1519
+ }
1520
+ function detectMediaType({
1521
+ data,
1522
+ signatures
1523
+ }) {
1524
+ const processedData = stripID3TagsIfPresent(data);
1525
+ for (const signature of signatures) {
1526
+ if (typeof processedData === "string" ? processedData.startsWith(signature.base64Prefix) : processedData.length >= signature.bytesPrefix.length && signature.bytesPrefix.every(
1527
+ (byte, index) => processedData[index] === byte
1528
+ )) {
1529
+ return signature.mediaType;
1530
+ }
1531
+ }
1532
+ return void 0;
1533
+ }
1534
+
1535
+ // src/util/download-error.ts
1536
+ var import_provider5 = require("@ai-sdk/provider");
1537
+ var name4 = "AI_DownloadError";
1538
+ var marker4 = `vercel.ai.error.${name4}`;
1539
+ var symbol4 = Symbol.for(marker4);
1540
+ var _a4;
1541
+ var DownloadError = class extends import_provider5.AISDKError {
1542
+ constructor({
1543
+ url,
1544
+ statusCode,
1545
+ statusText,
1546
+ cause,
1547
+ message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
1548
+ }) {
1549
+ super({ name: name4, message, cause });
1550
+ this[_a4] = true;
1551
+ this.url = url;
1552
+ this.statusCode = statusCode;
1553
+ this.statusText = statusText;
1554
+ }
1555
+ static isInstance(error) {
1556
+ return import_provider5.AISDKError.hasMarker(error, marker4);
1557
+ }
1558
+ };
1559
+ _a4 = symbol4;
1560
+
1561
+ // src/util/download.ts
1562
+ async function download({ url }) {
1563
+ var _a9;
1564
+ const urlText = url.toString();
1565
+ try {
1566
+ const response = await fetch(urlText);
1567
+ if (!response.ok) {
1568
+ throw new DownloadError({
1569
+ url: urlText,
1570
+ statusCode: response.status,
1571
+ statusText: response.statusText
1572
+ });
1573
+ }
1574
+ return {
1575
+ data: new Uint8Array(await response.arrayBuffer()),
1576
+ mediaType: (_a9 = response.headers.get("content-type")) != null ? _a9 : void 0
1577
+ };
1578
+ } catch (error) {
1579
+ if (DownloadError.isInstance(error)) {
1580
+ throw error;
1581
+ }
1582
+ throw new DownloadError({ url: urlText, cause: error });
1583
+ }
1584
+ }
1585
+
1586
+ // src/prompt/data-content.ts
1587
+ var import_provider6 = require("@ai-sdk/provider");
1588
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
1589
+ var import_v42 = require("zod/v4");
1590
+
1591
+ // src/prompt/split-data-url.ts
1592
+ function splitDataUrl(dataUrl) {
1593
+ try {
1594
+ const [header, base64Content] = dataUrl.split(",");
1595
+ return {
1596
+ mediaType: header.split(";")[0].split(":")[1],
1597
+ base64Content
1598
+ };
1599
+ } catch (error) {
1600
+ return {
1601
+ mediaType: void 0,
1602
+ base64Content: void 0
1603
+ };
1604
+ }
1605
+ }
1606
+
1607
+ // src/prompt/data-content.ts
1608
+ var dataContentSchema = import_v42.z.union([
1609
+ import_v42.z.string(),
1610
+ import_v42.z.instanceof(Uint8Array),
1611
+ import_v42.z.instanceof(ArrayBuffer),
1612
+ import_v42.z.custom(
1613
+ // Buffer might not be available in some environments such as CloudFlare:
1614
+ (value) => {
1615
+ var _a9, _b;
1616
+ return (_b = (_a9 = globalThis.Buffer) == null ? void 0 : _a9.isBuffer(value)) != null ? _b : false;
1617
+ },
1618
+ { message: "Must be a Buffer" }
1619
+ )
1620
+ ]);
1621
+ function convertToLanguageModelV2DataContent(content) {
1622
+ if (content instanceof Uint8Array) {
1623
+ return { data: content, mediaType: void 0 };
1624
+ }
1625
+ if (content instanceof ArrayBuffer) {
1626
+ return { data: new Uint8Array(content), mediaType: void 0 };
1627
+ }
1628
+ if (typeof content === "string") {
1629
+ try {
1630
+ content = new URL(content);
1631
+ } catch (error) {
1632
+ }
1633
+ }
1634
+ if (content instanceof URL && content.protocol === "data:") {
1635
+ const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(
1636
+ content.toString()
1637
+ );
1638
+ if (dataUrlMediaType == null || base64Content == null) {
1639
+ throw new import_provider6.AISDKError({
1640
+ name: "InvalidDataContentError",
1641
+ message: `Invalid data URL format in content ${content.toString()}`
1642
+ });
1643
+ }
1644
+ return { data: base64Content, mediaType: dataUrlMediaType };
1645
+ }
1646
+ return { data: content, mediaType: void 0 };
1647
+ }
1648
+ function convertDataContentToBase64String(content) {
1649
+ if (typeof content === "string") {
1650
+ return content;
1651
+ }
1652
+ if (content instanceof ArrayBuffer) {
1653
+ return (0, import_provider_utils5.convertUint8ArrayToBase64)(new Uint8Array(content));
1654
+ }
1655
+ return (0, import_provider_utils5.convertUint8ArrayToBase64)(content);
1656
+ }
1657
+
1658
+ // src/prompt/invalid-message-role-error.ts
1659
+ var import_provider7 = require("@ai-sdk/provider");
1660
+ var name5 = "AI_InvalidMessageRoleError";
1661
+ var marker5 = `vercel.ai.error.${name5}`;
1662
+ var symbol5 = Symbol.for(marker5);
1663
+ var _a5;
1664
+ var InvalidMessageRoleError = class extends import_provider7.AISDKError {
1665
+ constructor({
1666
+ role,
1667
+ message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
1668
+ }) {
1669
+ super({ name: name5, message });
1670
+ this[_a5] = true;
1671
+ this.role = role;
1672
+ }
1673
+ static isInstance(error) {
1674
+ return import_provider7.AISDKError.hasMarker(error, marker5);
1675
+ }
1676
+ };
1677
+ _a5 = symbol5;
1678
+
1679
+ // src/prompt/convert-to-language-model-prompt.ts
1680
+ async function convertToLanguageModelPrompt({
1681
+ prompt,
1682
+ supportedUrls,
1683
+ downloadImplementation = download
1684
+ }) {
1685
+ const downloadedAssets = await downloadAssets(
1686
+ prompt.messages,
1687
+ downloadImplementation,
1688
+ supportedUrls
1689
+ );
1690
+ return [
1691
+ ...prompt.system != null ? [{ role: "system", content: prompt.system }] : [],
1692
+ ...prompt.messages.map(
1693
+ (message) => convertToLanguageModelMessage({ message, downloadedAssets })
1694
+ )
1695
+ ];
1696
+ }
1697
+ function convertToLanguageModelMessage({
1698
+ message,
1699
+ downloadedAssets
1700
+ }) {
1701
+ const role = message.role;
1702
+ switch (role) {
1703
+ case "system": {
1704
+ return {
1705
+ role: "system",
1706
+ content: message.content,
1707
+ providerOptions: message.providerOptions
1708
+ };
1709
+ }
1710
+ case "user": {
1711
+ if (typeof message.content === "string") {
1712
+ return {
1713
+ role: "user",
1714
+ content: [{ type: "text", text: message.content }],
1715
+ providerOptions: message.providerOptions
1716
+ };
1717
+ }
1718
+ return {
1719
+ role: "user",
1720
+ content: message.content.map((part) => convertPartToLanguageModelPart(part, downloadedAssets)).filter((part) => part.type !== "text" || part.text !== ""),
1721
+ providerOptions: message.providerOptions
1722
+ };
1723
+ }
1724
+ case "assistant": {
1725
+ if (typeof message.content === "string") {
1726
+ return {
1727
+ role: "assistant",
1728
+ content: [{ type: "text", text: message.content }],
1729
+ providerOptions: message.providerOptions
1730
+ };
1731
+ }
1732
+ return {
1733
+ role: "assistant",
1734
+ content: message.content.filter(
1735
+ // remove empty text parts:
1736
+ (part) => part.type !== "text" || part.text !== ""
1737
+ ).map((part) => {
1738
+ const providerOptions = part.providerOptions;
1739
+ switch (part.type) {
1740
+ case "file": {
1741
+ const { data, mediaType } = convertToLanguageModelV2DataContent(
1742
+ part.data
1743
+ );
1744
+ return {
1745
+ type: "file",
1746
+ data,
1747
+ filename: part.filename,
1748
+ mediaType: mediaType != null ? mediaType : part.mediaType,
1749
+ providerOptions
1750
+ };
1751
+ }
1752
+ case "reasoning": {
1753
+ return {
1754
+ type: "reasoning",
1755
+ text: part.text,
1756
+ providerOptions
1757
+ };
1758
+ }
1759
+ case "text": {
1760
+ return {
1761
+ type: "text",
1762
+ text: part.text,
1763
+ providerOptions
1764
+ };
1765
+ }
1766
+ case "tool-call": {
1767
+ return {
1768
+ type: "tool-call",
1769
+ toolCallId: part.toolCallId,
1770
+ toolName: part.toolName,
1771
+ input: part.input,
1772
+ providerExecuted: part.providerExecuted,
1773
+ providerOptions
1774
+ };
1775
+ }
1776
+ case "tool-result": {
1777
+ return {
1778
+ type: "tool-result",
1779
+ toolCallId: part.toolCallId,
1780
+ toolName: part.toolName,
1781
+ output: part.output,
1782
+ providerOptions
1783
+ };
1784
+ }
1785
+ }
1786
+ }),
1787
+ providerOptions: message.providerOptions
1788
+ };
1789
+ }
1790
+ case "tool": {
1791
+ return {
1792
+ role: "tool",
1793
+ content: message.content.map((part) => ({
1794
+ type: "tool-result",
1795
+ toolCallId: part.toolCallId,
1796
+ toolName: part.toolName,
1797
+ output: part.output,
1798
+ providerOptions: part.providerOptions
1799
+ })),
1800
+ providerOptions: message.providerOptions
1801
+ };
1802
+ }
1803
+ default: {
1804
+ const _exhaustiveCheck = role;
1805
+ throw new InvalidMessageRoleError({ role: _exhaustiveCheck });
1806
+ }
1807
+ }
1808
+ }
1809
+ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
1810
+ const urls = messages.filter((message) => message.role === "user").map((message) => message.content).filter(
1811
+ (content) => Array.isArray(content)
1812
+ ).flat().filter(
1813
+ (part) => part.type === "image" || part.type === "file"
1814
+ ).map((part) => {
1815
+ var _a9;
1816
+ const mediaType = (_a9 = part.mediaType) != null ? _a9 : part.type === "image" ? "image/*" : void 0;
1817
+ let data = part.type === "image" ? part.image : part.data;
1818
+ if (typeof data === "string") {
1819
+ try {
1820
+ data = new URL(data);
1821
+ } catch (ignored) {
1822
+ }
1823
+ }
1824
+ return { mediaType, data };
1825
+ }).filter(
1826
+ (part) => part.data instanceof URL && part.mediaType != null && !(0, import_provider_utils6.isUrlSupported)({
1827
+ url: part.data.toString(),
1828
+ mediaType: part.mediaType,
1829
+ supportedUrls
1830
+ })
1831
+ ).map((part) => part.data);
1832
+ const downloadedImages = await Promise.all(
1833
+ urls.map(async (url) => ({
1834
+ url,
1835
+ data: await downloadImplementation({ url })
1836
+ }))
1837
+ );
1838
+ return Object.fromEntries(
1839
+ downloadedImages.map(({ url, data }) => [url.toString(), data])
1840
+ );
1841
+ }
1842
+ function convertPartToLanguageModelPart(part, downloadedAssets) {
1843
+ var _a9;
1844
+ if (part.type === "text") {
1845
+ return {
1846
+ type: "text",
1847
+ text: part.text,
1848
+ providerOptions: part.providerOptions
1849
+ };
1850
+ }
1851
+ let originalData;
1852
+ const type = part.type;
1853
+ switch (type) {
1854
+ case "image":
1855
+ originalData = part.image;
1856
+ break;
1857
+ case "file":
1858
+ originalData = part.data;
1859
+ break;
1860
+ default:
1861
+ throw new Error(`Unsupported part type: ${type}`);
1862
+ }
1863
+ const { data: convertedData, mediaType: convertedMediaType } = convertToLanguageModelV2DataContent(originalData);
1864
+ let mediaType = convertedMediaType != null ? convertedMediaType : part.mediaType;
1865
+ let data = convertedData;
1866
+ if (data instanceof URL) {
1867
+ const downloadedFile = downloadedAssets[data.toString()];
1868
+ if (downloadedFile) {
1869
+ data = downloadedFile.data;
1870
+ mediaType != null ? mediaType : mediaType = downloadedFile.mediaType;
1871
+ }
1872
+ }
1873
+ switch (type) {
1874
+ case "image": {
1875
+ if (data instanceof Uint8Array || typeof data === "string") {
1876
+ mediaType = (_a9 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a9 : mediaType;
1877
+ }
1878
+ return {
1879
+ type: "file",
1880
+ mediaType: mediaType != null ? mediaType : "image/*",
1881
+ // any image
1882
+ filename: void 0,
1883
+ data,
1884
+ providerOptions: part.providerOptions
1885
+ };
1886
+ }
1887
+ case "file": {
1888
+ if (mediaType == null) {
1889
+ throw new Error(`Media type is missing for file part`);
1890
+ }
1891
+ return {
1892
+ type: "file",
1893
+ mediaType,
1894
+ filename: part.filename,
1895
+ data,
1896
+ providerOptions: part.providerOptions
1897
+ };
1898
+ }
1899
+ }
1900
+ }
1901
+
1902
+ // src/prompt/prepare-call-settings.ts
1903
+ function prepareCallSettings({
1904
+ maxOutputTokens,
1905
+ temperature,
1906
+ topP,
1907
+ topK,
1908
+ presencePenalty,
1909
+ frequencyPenalty,
1910
+ seed,
1911
+ stopSequences
1912
+ }) {
1913
+ if (maxOutputTokens != null) {
1914
+ if (!Number.isInteger(maxOutputTokens)) {
1915
+ throw new InvalidArgumentError({
1916
+ parameter: "maxOutputTokens",
1917
+ value: maxOutputTokens,
1918
+ message: "maxOutputTokens must be an integer"
1919
+ });
1920
+ }
1921
+ if (maxOutputTokens < 1) {
1922
+ throw new InvalidArgumentError({
1923
+ parameter: "maxOutputTokens",
1924
+ value: maxOutputTokens,
1925
+ message: "maxOutputTokens must be >= 1"
1926
+ });
1927
+ }
1928
+ }
1929
+ if (temperature != null) {
1930
+ if (typeof temperature !== "number") {
1931
+ throw new InvalidArgumentError({
1932
+ parameter: "temperature",
1933
+ value: temperature,
1934
+ message: "temperature must be a number"
1935
+ });
1936
+ }
1937
+ }
1938
+ if (topP != null) {
1939
+ if (typeof topP !== "number") {
1940
+ throw new InvalidArgumentError({
1941
+ parameter: "topP",
1942
+ value: topP,
1943
+ message: "topP must be a number"
1944
+ });
1945
+ }
1946
+ }
1947
+ if (topK != null) {
1948
+ if (typeof topK !== "number") {
1949
+ throw new InvalidArgumentError({
1950
+ parameter: "topK",
1951
+ value: topK,
1952
+ message: "topK must be a number"
1953
+ });
1954
+ }
1955
+ }
1956
+ if (presencePenalty != null) {
1957
+ if (typeof presencePenalty !== "number") {
1958
+ throw new InvalidArgumentError({
1959
+ parameter: "presencePenalty",
1960
+ value: presencePenalty,
1961
+ message: "presencePenalty must be a number"
1962
+ });
1963
+ }
1964
+ }
1965
+ if (frequencyPenalty != null) {
1966
+ if (typeof frequencyPenalty !== "number") {
1967
+ throw new InvalidArgumentError({
1968
+ parameter: "frequencyPenalty",
1969
+ value: frequencyPenalty,
1970
+ message: "frequencyPenalty must be a number"
1971
+ });
1972
+ }
1973
+ }
1974
+ if (seed != null) {
1975
+ if (!Number.isInteger(seed)) {
1976
+ throw new InvalidArgumentError({
1977
+ parameter: "seed",
1978
+ value: seed,
1979
+ message: "seed must be an integer"
1980
+ });
1981
+ }
1982
+ }
1983
+ return {
1984
+ maxOutputTokens,
1985
+ temperature,
1986
+ topP,
1987
+ topK,
1988
+ presencePenalty,
1989
+ frequencyPenalty,
1990
+ stopSequences,
1991
+ seed
1992
+ };
1993
+ }
1994
+
1995
+ // src/prompt/prepare-tools-and-tool-choice.ts
1996
+ var import_provider_utils7 = require("@ai-sdk/provider-utils");
1997
+
1998
+ // src/util/is-non-empty-object.ts
1999
+ function isNonEmptyObject(object) {
2000
+ return object != null && Object.keys(object).length > 0;
2001
+ }
2002
+
2003
+ // src/prompt/prepare-tools-and-tool-choice.ts
2004
+ function prepareToolsAndToolChoice({
2005
+ tools,
2006
+ toolChoice,
2007
+ activeTools
2008
+ }) {
2009
+ if (!isNonEmptyObject(tools)) {
2010
+ return {
2011
+ tools: void 0,
2012
+ toolChoice: void 0
2013
+ };
2014
+ }
2015
+ const filteredTools = activeTools != null ? Object.entries(tools).filter(
2016
+ ([name9]) => activeTools.includes(name9)
2017
+ ) : Object.entries(tools);
2018
+ return {
2019
+ tools: filteredTools.map(([name9, tool]) => {
2020
+ const toolType = tool.type;
2021
+ switch (toolType) {
2022
+ case void 0:
2023
+ case "function":
2024
+ return {
2025
+ type: "function",
2026
+ name: name9,
2027
+ description: tool.description,
2028
+ inputSchema: (0, import_provider_utils7.asSchema)(tool.inputSchema).jsonSchema
2029
+ };
2030
+ case "provider-defined":
2031
+ return {
2032
+ type: "provider-defined",
2033
+ name: name9,
2034
+ id: tool.id,
2035
+ args: tool.args
2036
+ };
2037
+ default: {
2038
+ const exhaustiveCheck = toolType;
2039
+ throw new Error(`Unsupported tool type: ${exhaustiveCheck}`);
2040
+ }
2041
+ }
2042
+ }),
2043
+ toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
2044
+ };
2045
+ }
2046
+
2047
+ // src/prompt/resolve-language-model.ts
2048
+ var import_gateway = require("@ai-sdk/gateway");
2049
+
2050
+ // src/error/index.ts
2051
+ var import_provider12 = require("@ai-sdk/provider");
2052
+
2053
+ // src/error/invalid-tool-input-error.ts
2054
+ var import_provider8 = require("@ai-sdk/provider");
2055
+ var name6 = "AI_InvalidToolInputError";
2056
+ var marker6 = `vercel.ai.error.${name6}`;
2057
+ var symbol6 = Symbol.for(marker6);
2058
+ var _a6;
2059
+ var InvalidToolInputError = class extends import_provider8.AISDKError {
2060
+ constructor({
2061
+ toolInput,
2062
+ toolName,
2063
+ cause,
2064
+ message = `Invalid input for tool ${toolName}: ${(0, import_provider8.getErrorMessage)(cause)}`
2065
+ }) {
2066
+ super({ name: name6, message, cause });
2067
+ this[_a6] = true;
2068
+ this.toolInput = toolInput;
2069
+ this.toolName = toolName;
2070
+ }
2071
+ static isInstance(error) {
2072
+ return import_provider8.AISDKError.hasMarker(error, marker6);
2073
+ }
2074
+ };
2075
+ _a6 = symbol6;
2076
+
2077
+ // src/error/no-such-tool-error.ts
2078
+ var import_provider9 = require("@ai-sdk/provider");
2079
+ var name7 = "AI_NoSuchToolError";
2080
+ var marker7 = `vercel.ai.error.${name7}`;
2081
+ var symbol7 = Symbol.for(marker7);
2082
+ var _a7;
2083
+ var NoSuchToolError = class extends import_provider9.AISDKError {
2084
+ constructor({
2085
+ toolName,
2086
+ availableTools = void 0,
2087
+ message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
2088
+ }) {
2089
+ super({ name: name7, message });
2090
+ this[_a7] = true;
2091
+ this.toolName = toolName;
2092
+ this.availableTools = availableTools;
2093
+ }
2094
+ static isInstance(error) {
2095
+ return import_provider9.AISDKError.hasMarker(error, marker7);
2096
+ }
2097
+ };
2098
+ _a7 = symbol7;
2099
+
2100
+ // src/error/tool-call-repair-error.ts
2101
+ var import_provider10 = require("@ai-sdk/provider");
2102
+ var name8 = "AI_ToolCallRepairError";
2103
+ var marker8 = `vercel.ai.error.${name8}`;
2104
+ var symbol8 = Symbol.for(marker8);
2105
+ var _a8;
2106
+ var ToolCallRepairError = class extends import_provider10.AISDKError {
2107
+ constructor({
2108
+ cause,
2109
+ originalError,
2110
+ message = `Error repairing tool call: ${(0, import_provider10.getErrorMessage)(cause)}`
2111
+ }) {
2112
+ super({ name: name8, message, cause });
2113
+ this[_a8] = true;
2114
+ this.originalError = originalError;
2115
+ }
2116
+ static isInstance(error) {
2117
+ return import_provider10.AISDKError.hasMarker(error, marker8);
2118
+ }
2119
+ };
2120
+ _a8 = symbol8;
2121
+
2122
+ // src/error/unsupported-model-version-error.ts
2123
+ var import_provider11 = require("@ai-sdk/provider");
2124
+ var UnsupportedModelVersionError = class extends import_provider11.AISDKError {
2125
+ constructor(options) {
2126
+ super({
2127
+ name: "AI_UnsupportedModelVersionError",
2128
+ message: `Unsupported model version ${options.version} for provider "${options.provider}" and model "${options.modelId}". AI SDK 5 only supports models that implement specification version "v2".`
2129
+ });
2130
+ this.version = options.version;
2131
+ this.provider = options.provider;
2132
+ this.modelId = options.modelId;
2133
+ }
2134
+ };
2135
+
2136
+ // src/prompt/resolve-language-model.ts
2137
+ function resolveLanguageModel(model) {
2138
+ if (typeof model !== "string") {
2139
+ if (model.specificationVersion !== "v2") {
2140
+ throw new UnsupportedModelVersionError({
2141
+ version: model.specificationVersion,
2142
+ provider: model.provider,
2143
+ modelId: model.modelId
2144
+ });
2145
+ }
2146
+ return model;
2147
+ }
2148
+ const globalProvider = globalThis.AI_SDK_DEFAULT_PROVIDER;
2149
+ return (globalProvider != null ? globalProvider : import_gateway.gateway).languageModel(model);
2150
+ }
2151
+
2152
+ // src/prompt/standardize-prompt.ts
2153
+ var import_provider13 = require("@ai-sdk/provider");
2154
+ var import_provider_utils8 = require("@ai-sdk/provider-utils");
2155
+ var import_v47 = require("zod/v4");
2156
+
2157
+ // src/prompt/message.ts
2158
+ var import_v46 = require("zod/v4");
2159
+
2160
+ // src/types/provider-metadata.ts
2161
+ var import_v44 = require("zod/v4");
2162
+
2163
+ // src/types/json-value.ts
2164
+ var import_v43 = require("zod/v4");
2165
+ var jsonValueSchema = import_v43.z.lazy(
2166
+ () => import_v43.z.union([
2167
+ import_v43.z.null(),
2168
+ import_v43.z.string(),
2169
+ import_v43.z.number(),
2170
+ import_v43.z.boolean(),
2171
+ import_v43.z.record(import_v43.z.string(), jsonValueSchema),
2172
+ import_v43.z.array(jsonValueSchema)
2173
+ ])
2174
+ );
2175
+
2176
+ // src/types/provider-metadata.ts
2177
+ var providerMetadataSchema = import_v44.z.record(
2178
+ import_v44.z.string(),
2179
+ import_v44.z.record(import_v44.z.string(), jsonValueSchema)
2180
+ );
2181
+
2182
+ // src/prompt/content-part.ts
2183
+ var import_v45 = require("zod/v4");
2184
+ var textPartSchema = import_v45.z.object({
2185
+ type: import_v45.z.literal("text"),
2186
+ text: import_v45.z.string(),
2187
+ providerOptions: providerMetadataSchema.optional()
2188
+ });
2189
+ var imagePartSchema = import_v45.z.object({
2190
+ type: import_v45.z.literal("image"),
2191
+ image: import_v45.z.union([dataContentSchema, import_v45.z.instanceof(URL)]),
2192
+ mediaType: import_v45.z.string().optional(),
2193
+ providerOptions: providerMetadataSchema.optional()
2194
+ });
2195
+ var filePartSchema = import_v45.z.object({
2196
+ type: import_v45.z.literal("file"),
2197
+ data: import_v45.z.union([dataContentSchema, import_v45.z.instanceof(URL)]),
2198
+ filename: import_v45.z.string().optional(),
2199
+ mediaType: import_v45.z.string(),
2200
+ providerOptions: providerMetadataSchema.optional()
2201
+ });
2202
+ var reasoningPartSchema = import_v45.z.object({
2203
+ type: import_v45.z.literal("reasoning"),
2204
+ text: import_v45.z.string(),
2205
+ providerOptions: providerMetadataSchema.optional()
2206
+ });
2207
+ var toolCallPartSchema = import_v45.z.object({
2208
+ type: import_v45.z.literal("tool-call"),
2209
+ toolCallId: import_v45.z.string(),
2210
+ toolName: import_v45.z.string(),
2211
+ input: import_v45.z.unknown(),
2212
+ providerOptions: providerMetadataSchema.optional(),
2213
+ providerExecuted: import_v45.z.boolean().optional()
2214
+ });
2215
+ var outputSchema = import_v45.z.discriminatedUnion("type", [
2216
+ import_v45.z.object({
2217
+ type: import_v45.z.literal("text"),
2218
+ value: import_v45.z.string()
2219
+ }),
2220
+ import_v45.z.object({
2221
+ type: import_v45.z.literal("json"),
2222
+ value: jsonValueSchema
2223
+ }),
2224
+ import_v45.z.object({
2225
+ type: import_v45.z.literal("error-text"),
2226
+ value: import_v45.z.string()
2227
+ }),
2228
+ import_v45.z.object({
2229
+ type: import_v45.z.literal("error-json"),
2230
+ value: jsonValueSchema
2231
+ }),
2232
+ import_v45.z.object({
2233
+ type: import_v45.z.literal("content"),
2234
+ value: import_v45.z.array(
2235
+ import_v45.z.union([
2236
+ import_v45.z.object({
2237
+ type: import_v45.z.literal("text"),
2238
+ text: import_v45.z.string()
2239
+ }),
2240
+ import_v45.z.object({
2241
+ type: import_v45.z.literal("media"),
2242
+ data: import_v45.z.string(),
2243
+ mediaType: import_v45.z.string()
2244
+ })
2245
+ ])
2246
+ )
2247
+ })
2248
+ ]);
2249
+ var toolResultPartSchema = import_v45.z.object({
2250
+ type: import_v45.z.literal("tool-result"),
2251
+ toolCallId: import_v45.z.string(),
2252
+ toolName: import_v45.z.string(),
2253
+ output: outputSchema,
2254
+ providerOptions: providerMetadataSchema.optional()
2255
+ });
2256
+
2257
+ // src/prompt/message.ts
2258
+ var systemModelMessageSchema = import_v46.z.object(
2259
+ {
2260
+ role: import_v46.z.literal("system"),
2261
+ content: import_v46.z.string(),
2262
+ providerOptions: providerMetadataSchema.optional()
2263
+ }
2264
+ );
2265
+ var userModelMessageSchema = import_v46.z.object({
2266
+ role: import_v46.z.literal("user"),
2267
+ content: import_v46.z.union([
2268
+ import_v46.z.string(),
2269
+ import_v46.z.array(import_v46.z.union([textPartSchema, imagePartSchema, filePartSchema]))
2270
+ ]),
2271
+ providerOptions: providerMetadataSchema.optional()
2272
+ });
2273
+ var assistantModelMessageSchema = import_v46.z.object({
2274
+ role: import_v46.z.literal("assistant"),
2275
+ content: import_v46.z.union([
2276
+ import_v46.z.string(),
2277
+ import_v46.z.array(
2278
+ import_v46.z.union([
2279
+ textPartSchema,
2280
+ filePartSchema,
2281
+ reasoningPartSchema,
2282
+ toolCallPartSchema,
2283
+ toolResultPartSchema
2284
+ ])
2285
+ )
2286
+ ]),
2287
+ providerOptions: providerMetadataSchema.optional()
2288
+ });
2289
+ var toolModelMessageSchema = import_v46.z.object({
2290
+ role: import_v46.z.literal("tool"),
2291
+ content: import_v46.z.array(toolResultPartSchema),
2292
+ providerOptions: providerMetadataSchema.optional()
2293
+ });
2294
+ var modelMessageSchema = import_v46.z.union([
2295
+ systemModelMessageSchema,
2296
+ userModelMessageSchema,
2297
+ assistantModelMessageSchema,
2298
+ toolModelMessageSchema
2299
+ ]);
2300
+
2301
+ // src/prompt/standardize-prompt.ts
2302
+ async function standardizePrompt(prompt) {
2303
+ if (prompt.prompt == null && prompt.messages == null) {
2304
+ throw new import_provider13.InvalidPromptError({
2305
+ prompt,
2306
+ message: "prompt or messages must be defined"
2307
+ });
2308
+ }
2309
+ if (prompt.prompt != null && prompt.messages != null) {
2310
+ throw new import_provider13.InvalidPromptError({
2311
+ prompt,
2312
+ message: "prompt and messages cannot be defined at the same time"
2313
+ });
2314
+ }
2315
+ if (prompt.system != null && typeof prompt.system !== "string") {
2316
+ throw new import_provider13.InvalidPromptError({
2317
+ prompt,
2318
+ message: "system must be a string"
2319
+ });
2320
+ }
2321
+ let messages;
2322
+ if (prompt.prompt != null && typeof prompt.prompt === "string") {
2323
+ messages = [{ role: "user", content: prompt.prompt }];
2324
+ } else if (prompt.prompt != null && Array.isArray(prompt.prompt)) {
2325
+ messages = prompt.prompt;
2326
+ } else if (prompt.messages != null) {
2327
+ messages = prompt.messages;
2328
+ } else {
2329
+ throw new import_provider13.InvalidPromptError({
2330
+ prompt,
2331
+ message: "prompt or messages must be defined"
2332
+ });
2333
+ }
2334
+ if (messages.length === 0) {
2335
+ throw new import_provider13.InvalidPromptError({
2336
+ prompt,
2337
+ message: "messages must not be empty"
2338
+ });
2339
+ }
2340
+ const validationResult = await (0, import_provider_utils8.safeValidateTypes)({
2341
+ value: messages,
2342
+ schema: import_v47.z.array(modelMessageSchema)
2343
+ });
2344
+ if (!validationResult.success) {
2345
+ throw new import_provider13.InvalidPromptError({
2346
+ prompt,
2347
+ message: "The messages must be a ModelMessage[]. If you have passed a UIMessage[], you can use convertToModelMessages to convert them.",
2348
+ cause: validationResult.error
2349
+ });
2350
+ }
2351
+ return {
2352
+ messages,
2353
+ system: prompt.system
2354
+ };
2355
+ }
2356
+
2357
+ // src/prompt/wrap-gateway-error.ts
2358
+ var import_gateway2 = require("@ai-sdk/gateway");
2359
+ var import_provider14 = require("@ai-sdk/provider");
2360
+ function wrapGatewayError(error) {
2361
+ if (import_gateway2.GatewayAuthenticationError.isInstance(error) || import_gateway2.GatewayModelNotFoundError.isInstance(error)) {
2362
+ return new import_provider14.AISDKError({
2363
+ name: "GatewayError",
2364
+ message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
2365
+ cause: error
2366
+ });
2367
+ }
2368
+ return error;
2369
+ }
2370
+
2371
+ // src/telemetry/assemble-operation-name.ts
2372
+ function assembleOperationName({
2373
+ operationId,
2374
+ telemetry
2375
+ }) {
2376
+ return {
2377
+ // standardized operation and resource name:
2378
+ "operation.name": `${operationId}${(telemetry == null ? void 0 : telemetry.functionId) != null ? ` ${telemetry.functionId}` : ""}`,
2379
+ "resource.name": telemetry == null ? void 0 : telemetry.functionId,
2380
+ // detailed, AI SDK specific data:
2381
+ "ai.operationId": operationId,
2382
+ "ai.telemetry.functionId": telemetry == null ? void 0 : telemetry.functionId
2383
+ };
2384
+ }
2385
+
2386
+ // src/telemetry/get-base-telemetry-attributes.ts
2387
+ function getBaseTelemetryAttributes({
2388
+ model,
2389
+ settings,
2390
+ telemetry,
2391
+ headers
2392
+ }) {
2393
+ var _a9;
2394
+ return {
2395
+ "ai.model.provider": model.provider,
2396
+ "ai.model.id": model.modelId,
2397
+ // settings:
2398
+ ...Object.entries(settings).reduce((attributes, [key, value]) => {
2399
+ attributes[`ai.settings.${key}`] = value;
2400
+ return attributes;
2401
+ }, {}),
2402
+ // add metadata as attributes:
2403
+ ...Object.entries((_a9 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a9 : {}).reduce(
2404
+ (attributes, [key, value]) => {
2405
+ attributes[`ai.telemetry.metadata.${key}`] = value;
2406
+ return attributes;
2407
+ },
2408
+ {}
2409
+ ),
2410
+ // request headers
2411
+ ...Object.entries(headers != null ? headers : {}).reduce((attributes, [key, value]) => {
2412
+ if (value !== void 0) {
2413
+ attributes[`ai.request.headers.${key}`] = value;
2414
+ }
2415
+ return attributes;
2416
+ }, {})
2417
+ };
2418
+ }
2419
+
2420
+ // src/telemetry/get-tracer.ts
2421
+ var import_api = require("@opentelemetry/api");
2422
+
2423
+ // src/telemetry/noop-tracer.ts
2424
+ var noopTracer = {
2425
+ startSpan() {
2426
+ return noopSpan;
2427
+ },
2428
+ startActiveSpan(name9, arg1, arg2, arg3) {
2429
+ if (typeof arg1 === "function") {
2430
+ return arg1(noopSpan);
2431
+ }
2432
+ if (typeof arg2 === "function") {
2433
+ return arg2(noopSpan);
2434
+ }
2435
+ if (typeof arg3 === "function") {
2436
+ return arg3(noopSpan);
2437
+ }
2438
+ }
2439
+ };
2440
+ var noopSpan = {
2441
+ spanContext() {
2442
+ return noopSpanContext;
2443
+ },
2444
+ setAttribute() {
2445
+ return this;
2446
+ },
2447
+ setAttributes() {
2448
+ return this;
2449
+ },
2450
+ addEvent() {
2451
+ return this;
2452
+ },
2453
+ addLink() {
2454
+ return this;
2455
+ },
2456
+ addLinks() {
2457
+ return this;
2458
+ },
2459
+ setStatus() {
2460
+ return this;
2461
+ },
2462
+ updateName() {
2463
+ return this;
2464
+ },
2465
+ end() {
2466
+ return this;
2467
+ },
2468
+ isRecording() {
2469
+ return false;
2470
+ },
2471
+ recordException() {
2472
+ return this;
2473
+ }
2474
+ };
2475
+ var noopSpanContext = {
2476
+ traceId: "",
2477
+ spanId: "",
2478
+ traceFlags: 0
2479
+ };
2480
+
2481
+ // src/telemetry/get-tracer.ts
2482
+ function getTracer({
2483
+ isEnabled = false,
2484
+ tracer
2485
+ } = {}) {
2486
+ if (!isEnabled) {
2487
+ return noopTracer;
2488
+ }
2489
+ if (tracer) {
2490
+ return tracer;
2491
+ }
2492
+ return import_api.trace.getTracer("ai");
2493
+ }
2494
+
2495
+ // src/telemetry/record-span.ts
2496
+ var import_api2 = require("@opentelemetry/api");
2497
+ function recordSpan({
2498
+ name: name9,
2499
+ tracer,
2500
+ attributes,
2501
+ fn,
2502
+ endWhenDone = true
2503
+ }) {
2504
+ return tracer.startActiveSpan(name9, { attributes }, async (span) => {
2505
+ try {
2506
+ const result = await fn(span);
2507
+ if (endWhenDone) {
2508
+ span.end();
2509
+ }
2510
+ return result;
2511
+ } catch (error) {
2512
+ try {
2513
+ recordErrorOnSpan(span, error);
2514
+ } finally {
2515
+ span.end();
2516
+ }
2517
+ throw error;
2518
+ }
2519
+ });
2520
+ }
2521
+ function recordErrorOnSpan(span, error) {
2522
+ if (error instanceof Error) {
2523
+ span.recordException({
2524
+ name: error.name,
2525
+ message: error.message,
2526
+ stack: error.stack
2527
+ });
2528
+ span.setStatus({
2529
+ code: import_api2.SpanStatusCode.ERROR,
2530
+ message: error.message
2531
+ });
2532
+ } else {
2533
+ span.setStatus({ code: import_api2.SpanStatusCode.ERROR });
2534
+ }
2535
+ }
2536
+
2537
+ // src/telemetry/select-telemetry-attributes.ts
2538
+ function selectTelemetryAttributes({
2539
+ telemetry,
2540
+ attributes
2541
+ }) {
2542
+ if ((telemetry == null ? void 0 : telemetry.isEnabled) !== true) {
2543
+ return {};
2544
+ }
2545
+ return Object.entries(attributes).reduce((attributes2, [key, value]) => {
2546
+ if (value == null) {
2547
+ return attributes2;
2548
+ }
2549
+ if (typeof value === "object" && "input" in value && typeof value.input === "function") {
2550
+ if ((telemetry == null ? void 0 : telemetry.recordInputs) === false) {
2551
+ return attributes2;
2552
+ }
2553
+ const result = value.input();
2554
+ return result == null ? attributes2 : { ...attributes2, [key]: result };
2555
+ }
2556
+ if (typeof value === "object" && "output" in value && typeof value.output === "function") {
2557
+ if ((telemetry == null ? void 0 : telemetry.recordOutputs) === false) {
2558
+ return attributes2;
2559
+ }
2560
+ const result = value.output();
2561
+ return result == null ? attributes2 : { ...attributes2, [key]: result };
2562
+ }
2563
+ return { ...attributes2, [key]: value };
2564
+ }, {});
2565
+ }
2566
+
2567
+ // src/telemetry/stringify-for-telemetry.ts
2568
+ function stringifyForTelemetry(prompt) {
2569
+ return JSON.stringify(
2570
+ prompt.map((message) => ({
2571
+ ...message,
2572
+ content: typeof message.content === "string" ? message.content : message.content.map(
2573
+ (part) => part.type === "file" ? {
2574
+ ...part,
2575
+ data: part.data instanceof Uint8Array ? convertDataContentToBase64String(part.data) : part.data
2576
+ } : part
2577
+ )
2578
+ }))
2579
+ );
2580
+ }
2581
+
2582
+ // src/types/usage.ts
2583
+ function addLanguageModelUsage(usage1, usage2) {
2584
+ return {
2585
+ inputTokens: addTokenCounts(usage1.inputTokens, usage2.inputTokens),
2586
+ outputTokens: addTokenCounts(usage1.outputTokens, usage2.outputTokens),
2587
+ totalTokens: addTokenCounts(usage1.totalTokens, usage2.totalTokens),
2588
+ reasoningTokens: addTokenCounts(
2589
+ usage1.reasoningTokens,
2590
+ usage2.reasoningTokens
2591
+ ),
2592
+ cachedInputTokens: addTokenCounts(
2593
+ usage1.cachedInputTokens,
2594
+ usage2.cachedInputTokens
2595
+ )
2596
+ };
2597
+ }
2598
+ function addTokenCounts(tokenCount1, tokenCount2) {
2599
+ return tokenCount1 == null && tokenCount2 == null ? void 0 : (tokenCount1 != null ? tokenCount1 : 0) + (tokenCount2 != null ? tokenCount2 : 0);
2600
+ }
2601
+
2602
+ // src/generate-text/run-tools-transformation.ts
2603
+ var import_provider_utils11 = require("@ai-sdk/provider-utils");
2604
+
2605
+ // src/generate-text/generated-file.ts
2606
+ var import_provider_utils9 = require("@ai-sdk/provider-utils");
2607
+ var DefaultGeneratedFile = class {
2608
+ constructor({
2609
+ data,
2610
+ mediaType
2611
+ }) {
2612
+ const isUint8Array = data instanceof Uint8Array;
2613
+ this.base64Data = isUint8Array ? void 0 : data;
2614
+ this.uint8ArrayData = isUint8Array ? data : void 0;
2615
+ this.mediaType = mediaType;
2616
+ }
2617
+ // lazy conversion with caching to avoid unnecessary conversion overhead:
2618
+ get base64() {
2619
+ if (this.base64Data == null) {
2620
+ this.base64Data = (0, import_provider_utils9.convertUint8ArrayToBase64)(this.uint8ArrayData);
2621
+ }
2622
+ return this.base64Data;
2623
+ }
2624
+ // lazy conversion with caching to avoid unnecessary conversion overhead:
2625
+ get uint8Array() {
2626
+ if (this.uint8ArrayData == null) {
2627
+ this.uint8ArrayData = (0, import_provider_utils9.convertBase64ToUint8Array)(this.base64Data);
2628
+ }
2629
+ return this.uint8ArrayData;
2630
+ }
2631
+ };
2632
+ var DefaultGeneratedFileWithType = class extends DefaultGeneratedFile {
2633
+ constructor(options) {
2634
+ super(options);
2635
+ this.type = "file";
2636
+ }
2637
+ };
2638
+
2639
+ // src/generate-text/parse-tool-call.ts
2640
+ var import_provider_utils10 = require("@ai-sdk/provider-utils");
2641
+ async function parseToolCall({
2642
+ toolCall,
2643
+ tools,
2644
+ repairToolCall,
2645
+ system,
2646
+ messages
2647
+ }) {
2648
+ if (tools == null) {
2649
+ throw new NoSuchToolError({ toolName: toolCall.toolName });
2650
+ }
2651
+ try {
2652
+ return await doParseToolCall({ toolCall, tools });
2653
+ } catch (error) {
2654
+ if (repairToolCall == null || !(NoSuchToolError.isInstance(error) || InvalidToolInputError.isInstance(error))) {
2655
+ throw error;
2656
+ }
2657
+ let repairedToolCall = null;
2658
+ try {
2659
+ repairedToolCall = await repairToolCall({
2660
+ toolCall,
2661
+ tools,
2662
+ inputSchema: ({ toolName }) => {
2663
+ const { inputSchema } = tools[toolName];
2664
+ return (0, import_provider_utils10.asSchema)(inputSchema).jsonSchema;
2665
+ },
2666
+ system,
2667
+ messages,
2668
+ error
2669
+ });
2670
+ } catch (repairError) {
2671
+ throw new ToolCallRepairError({
2672
+ cause: repairError,
2673
+ originalError: error
2674
+ });
2675
+ }
2676
+ if (repairedToolCall == null) {
2677
+ throw error;
2678
+ }
2679
+ return await doParseToolCall({ toolCall: repairedToolCall, tools });
2680
+ }
2681
+ }
2682
+ async function doParseToolCall({
2683
+ toolCall,
2684
+ tools
2685
+ }) {
2686
+ const toolName = toolCall.toolName;
2687
+ const tool = tools[toolName];
2688
+ if (tool == null) {
2689
+ throw new NoSuchToolError({
2690
+ toolName: toolCall.toolName,
2691
+ availableTools: Object.keys(tools)
2692
+ });
2693
+ }
2694
+ const schema = (0, import_provider_utils10.asSchema)(tool.inputSchema);
2695
+ const parseResult = toolCall.input.trim() === "" ? await (0, import_provider_utils10.safeValidateTypes)({ value: {}, schema }) : await (0, import_provider_utils10.safeParseJSON)({ text: toolCall.input, schema });
2696
+ if (parseResult.success === false) {
2697
+ throw new InvalidToolInputError({
2698
+ toolName,
2699
+ toolInput: toolCall.input,
2700
+ cause: parseResult.error
2701
+ });
2702
+ }
2703
+ return {
2704
+ type: "tool-call",
2705
+ toolCallId: toolCall.toolCallId,
2706
+ toolName,
2707
+ input: parseResult.value,
2708
+ providerExecuted: toolCall.providerExecuted,
2709
+ providerMetadata: toolCall.providerMetadata
2710
+ };
2711
+ }
2712
+
2713
+ // src/generate-text/run-tools-transformation.ts
2714
+ function runToolsTransformation({
2715
+ tools,
2716
+ generatorStream,
2717
+ tracer,
2718
+ telemetry,
2719
+ system,
2720
+ messages,
2721
+ abortSignal,
2722
+ repairToolCall
2723
+ }) {
2724
+ let toolResultsStreamController = null;
2725
+ const toolResultsStream = new ReadableStream({
2726
+ start(controller) {
2727
+ toolResultsStreamController = controller;
2728
+ }
2729
+ });
2730
+ const outstandingToolResults = /* @__PURE__ */ new Set();
2731
+ const toolInputs = /* @__PURE__ */ new Map();
2732
+ let canClose = false;
2733
+ let finishChunk = void 0;
2734
+ function attemptClose() {
2735
+ if (canClose && outstandingToolResults.size === 0) {
2736
+ if (finishChunk != null) {
2737
+ toolResultsStreamController.enqueue(finishChunk);
2738
+ }
2739
+ toolResultsStreamController.close();
2740
+ }
2741
+ }
2742
+ const forwardStream = new TransformStream({
2743
+ async transform(chunk, controller) {
2744
+ const chunkType = chunk.type;
2745
+ switch (chunkType) {
2746
+ case "stream-start":
2747
+ case "text-start":
2748
+ case "text-delta":
2749
+ case "text-end":
2750
+ case "reasoning-start":
2751
+ case "reasoning-delta":
2752
+ case "reasoning-end":
2753
+ case "tool-input-start":
2754
+ case "tool-input-delta":
2755
+ case "tool-input-end":
2756
+ case "source":
2757
+ case "response-metadata":
2758
+ case "error":
2759
+ case "raw": {
2760
+ controller.enqueue(chunk);
2761
+ break;
2762
+ }
2763
+ case "file": {
2764
+ controller.enqueue({
2765
+ type: "file",
2766
+ file: new DefaultGeneratedFileWithType({
2767
+ data: chunk.data,
2768
+ mediaType: chunk.mediaType
2769
+ })
2770
+ });
2771
+ break;
2772
+ }
2773
+ case "finish": {
2774
+ finishChunk = {
2775
+ type: "finish",
2776
+ finishReason: chunk.finishReason,
2777
+ usage: chunk.usage,
2778
+ providerMetadata: chunk.providerMetadata
2779
+ };
2780
+ break;
2781
+ }
2782
+ case "tool-call": {
2783
+ try {
2784
+ const toolCall = await parseToolCall({
2785
+ toolCall: chunk,
2786
+ tools,
2787
+ repairToolCall,
2788
+ system,
2789
+ messages
2790
+ });
2791
+ controller.enqueue(toolCall);
2792
+ const tool = tools[toolCall.toolName];
2793
+ toolInputs.set(toolCall.toolCallId, toolCall.input);
2794
+ if (tool.onInputAvailable != null) {
2795
+ await tool.onInputAvailable({
2796
+ input: toolCall.input,
2797
+ toolCallId: toolCall.toolCallId,
2798
+ messages,
2799
+ abortSignal
2800
+ });
2801
+ }
2802
+ if (tool.execute != null && toolCall.providerExecuted !== true) {
2803
+ const toolExecutionId = (0, import_provider_utils11.generateId)();
2804
+ outstandingToolResults.add(toolExecutionId);
2805
+ recordSpan({
2806
+ name: "ai.toolCall",
2807
+ attributes: selectTelemetryAttributes({
2808
+ telemetry,
2809
+ attributes: {
2810
+ ...assembleOperationName({
2811
+ operationId: "ai.toolCall",
2812
+ telemetry
2813
+ }),
2814
+ "ai.toolCall.name": toolCall.toolName,
2815
+ "ai.toolCall.id": toolCall.toolCallId,
2816
+ "ai.toolCall.input": {
2817
+ output: () => JSON.stringify(toolCall.input)
2818
+ }
2819
+ }
2820
+ }),
2821
+ tracer,
2822
+ fn: async (span) => {
2823
+ let output;
2824
+ try {
2825
+ output = await tool.execute(toolCall.input, {
2826
+ toolCallId: toolCall.toolCallId,
2827
+ messages,
2828
+ abortSignal
2829
+ });
2830
+ } catch (error) {
2831
+ recordErrorOnSpan(span, error);
2832
+ toolResultsStreamController.enqueue({
2833
+ ...toolCall,
2834
+ type: "tool-error",
2835
+ error
2836
+ });
2837
+ outstandingToolResults.delete(toolExecutionId);
2838
+ attemptClose();
2839
+ return;
2840
+ }
2841
+ toolResultsStreamController.enqueue({
2842
+ ...toolCall,
2843
+ type: "tool-result",
2844
+ output
2845
+ });
2846
+ outstandingToolResults.delete(toolExecutionId);
2847
+ attemptClose();
2848
+ try {
2849
+ span.setAttributes(
2850
+ selectTelemetryAttributes({
2851
+ telemetry,
2852
+ attributes: {
2853
+ "ai.toolCall.output": {
2854
+ output: () => JSON.stringify(output)
2855
+ }
2856
+ }
2857
+ })
2858
+ );
2859
+ } catch (ignored) {
2860
+ }
2861
+ }
2862
+ });
2863
+ }
2864
+ } catch (error) {
2865
+ toolResultsStreamController.enqueue({ type: "error", error });
2866
+ }
2867
+ break;
2868
+ }
2869
+ case "tool-result": {
2870
+ const toolName = chunk.toolName;
2871
+ if (chunk.isError) {
2872
+ toolResultsStreamController.enqueue({
2873
+ type: "tool-error",
2874
+ toolCallId: chunk.toolCallId,
2875
+ toolName,
2876
+ input: toolInputs.get(chunk.toolCallId),
2877
+ providerExecuted: chunk.providerExecuted,
2878
+ error: chunk.result
2879
+ });
2880
+ } else {
2881
+ controller.enqueue({
2882
+ type: "tool-result",
2883
+ toolCallId: chunk.toolCallId,
2884
+ toolName,
2885
+ input: toolInputs.get(chunk.toolCallId),
2886
+ output: chunk.result,
2887
+ providerExecuted: chunk.providerExecuted
2888
+ });
2889
+ }
2890
+ break;
2891
+ }
2892
+ default: {
2893
+ const _exhaustiveCheck = chunkType;
2894
+ throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
2895
+ }
2896
+ }
2897
+ },
2898
+ flush() {
2899
+ canClose = true;
2900
+ attemptClose();
2901
+ }
2902
+ });
2903
+ return new ReadableStream({
2904
+ async start(controller) {
2905
+ return Promise.all([
2906
+ generatorStream.pipeThrough(forwardStream).pipeTo(
2907
+ new WritableStream({
2908
+ write(chunk) {
2909
+ controller.enqueue(chunk);
2910
+ },
2911
+ close() {
2912
+ }
2913
+ })
2914
+ ),
2915
+ toolResultsStream.pipeTo(
2916
+ new WritableStream({
2917
+ write(chunk) {
2918
+ controller.enqueue(chunk);
2919
+ },
2920
+ close() {
2921
+ controller.close();
2922
+ }
2923
+ })
2924
+ )
2925
+ ]);
2926
+ }
2927
+ });
2928
+ }
2929
+
2930
+ // src/generate-text/step-result.ts
2931
+ var DefaultStepResult = class {
2932
+ constructor({
2933
+ content,
2934
+ finishReason,
2935
+ usage,
2936
+ warnings,
2937
+ request,
2938
+ response,
2939
+ providerMetadata
2940
+ }) {
2941
+ this.content = content;
2942
+ this.finishReason = finishReason;
2943
+ this.usage = usage;
2944
+ this.warnings = warnings;
2945
+ this.request = request;
2946
+ this.response = response;
2947
+ this.providerMetadata = providerMetadata;
2948
+ }
2949
+ get text() {
2950
+ return this.content.filter((part) => part.type === "text").map((part) => part.text).join("");
2951
+ }
2952
+ get reasoning() {
2953
+ return this.content.filter((part) => part.type === "reasoning");
2954
+ }
2955
+ get reasoningText() {
2956
+ return this.reasoning.length === 0 ? void 0 : this.reasoning.map((part) => part.text).join("");
2957
+ }
2958
+ get files() {
2959
+ return this.content.filter((part) => part.type === "file").map((part) => part.file);
2960
+ }
2961
+ get sources() {
2962
+ return this.content.filter((part) => part.type === "source");
2963
+ }
2964
+ get toolCalls() {
2965
+ return this.content.filter((part) => part.type === "tool-call");
2966
+ }
2967
+ get toolResults() {
2968
+ return this.content.filter((part) => part.type === "tool-result");
2969
+ }
2970
+ };
2971
+
2972
+ // src/generate-text/stop-condition.ts
2973
+ function stepCountIs(stepCount) {
2974
+ return ({ steps }) => steps.length === stepCount;
2975
+ }
2976
+ async function isStopConditionMet({
2977
+ stopConditions,
2978
+ steps
2979
+ }) {
2980
+ return (await Promise.all(stopConditions.map((condition) => condition({ steps })))).some((result) => result);
2981
+ }
2982
+
2983
+ // src/prompt/create-tool-model-output.ts
2984
+ var import_provider15 = require("@ai-sdk/provider");
2985
+ function createToolModelOutput({
2986
+ output,
2987
+ tool,
2988
+ errorMode
2989
+ }) {
2990
+ if (errorMode === "text") {
2991
+ return { type: "error-text", value: (0, import_provider15.getErrorMessage)(output) };
2992
+ } else if (errorMode === "json") {
2993
+ return { type: "error-json", value: output };
2994
+ }
2995
+ if (tool == null ? void 0 : tool.toModelOutput) {
2996
+ return tool.toModelOutput(output);
2997
+ }
2998
+ return typeof output === "string" ? { type: "text", value: output } : { type: "json", value: output };
2999
+ }
3000
+
3001
+ // src/generate-text/to-response-messages.ts
3002
+ function toResponseMessages({
3003
+ content: inputContent,
3004
+ tools
3005
+ }) {
3006
+ const responseMessages = [];
3007
+ const content = inputContent.filter((part) => part.type !== "source").filter(
3008
+ (part) => (part.type !== "tool-result" || part.providerExecuted) && (part.type !== "tool-error" || part.providerExecuted)
3009
+ ).filter((part) => part.type !== "text" || part.text.length > 0).map((part) => {
3010
+ switch (part.type) {
3011
+ case "text":
3012
+ return part;
3013
+ case "reasoning":
3014
+ return {
3015
+ type: "reasoning",
3016
+ text: part.text,
3017
+ providerOptions: part.providerMetadata
3018
+ };
3019
+ case "file":
3020
+ return {
3021
+ type: "file",
3022
+ data: part.file.base64,
3023
+ mediaType: part.file.mediaType,
3024
+ providerOptions: part.providerMetadata
3025
+ };
3026
+ case "tool-call":
3027
+ return {
3028
+ type: "tool-call",
3029
+ toolCallId: part.toolCallId,
3030
+ toolName: part.toolName,
3031
+ input: part.input,
3032
+ providerExecuted: part.providerExecuted,
3033
+ providerOptions: part.providerMetadata
3034
+ };
3035
+ case "tool-result":
3036
+ return {
3037
+ type: "tool-result",
3038
+ toolCallId: part.toolCallId,
3039
+ toolName: part.toolName,
3040
+ output: createToolModelOutput({
3041
+ tool: tools == null ? void 0 : tools[part.toolName],
3042
+ output: part.output,
3043
+ errorMode: "none"
3044
+ }),
3045
+ providerExecuted: true,
3046
+ providerOptions: part.providerMetadata
3047
+ };
3048
+ case "tool-error":
3049
+ return {
3050
+ type: "tool-result",
3051
+ toolCallId: part.toolCallId,
3052
+ toolName: part.toolName,
3053
+ output: createToolModelOutput({
3054
+ tool: tools == null ? void 0 : tools[part.toolName],
3055
+ output: part.error,
3056
+ errorMode: "json"
3057
+ }),
3058
+ providerOptions: part.providerMetadata
3059
+ };
3060
+ }
3061
+ });
3062
+ if (content.length > 0) {
3063
+ responseMessages.push({
3064
+ role: "assistant",
3065
+ content
3066
+ });
3067
+ }
3068
+ const toolResultContent = inputContent.filter((part) => part.type === "tool-result" || part.type === "tool-error").filter((part) => !part.providerExecuted).map((toolResult) => ({
3069
+ type: "tool-result",
3070
+ toolCallId: toolResult.toolCallId,
3071
+ toolName: toolResult.toolName,
3072
+ output: createToolModelOutput({
3073
+ tool: tools == null ? void 0 : tools[toolResult.toolName],
3074
+ output: toolResult.type === "tool-result" ? toolResult.output : toolResult.error,
3075
+ errorMode: toolResult.type === "tool-error" ? "text" : "none"
3076
+ })
3077
+ }));
3078
+ if (toolResultContent.length > 0) {
3079
+ responseMessages.push({
3080
+ role: "tool",
3081
+ content: toolResultContent
3082
+ });
3083
+ }
3084
+ return responseMessages;
3085
+ }
3086
+
3087
+ // src/generate-text/stream-text.ts
3088
+ var originalGenerateId = (0, import_provider_utils12.createIdGenerator)({
3089
+ prefix: "aitxt",
3090
+ size: 24
3091
+ });
3092
+ function streamText({
3093
+ model,
3094
+ tools,
3095
+ toolChoice,
3096
+ system,
3097
+ prompt,
3098
+ messages,
3099
+ maxRetries,
3100
+ abortSignal,
3101
+ headers,
3102
+ stopWhen = stepCountIs(1),
3103
+ experimental_output: output,
3104
+ experimental_telemetry: telemetry,
3105
+ prepareStep,
3106
+ providerOptions,
3107
+ experimental_activeTools,
3108
+ activeTools = experimental_activeTools,
3109
+ experimental_repairToolCall: repairToolCall,
3110
+ experimental_transform: transform,
3111
+ includeRawChunks = false,
3112
+ onChunk,
3113
+ onError = ({ error }) => {
3114
+ console.error(error);
3115
+ },
3116
+ onFinish,
3117
+ onStepFinish,
3118
+ _internal: {
3119
+ now: now2 = now,
3120
+ generateId: generateId2 = originalGenerateId,
3121
+ currentDate = () => /* @__PURE__ */ new Date()
3122
+ } = {},
3123
+ ...settings
3124
+ }) {
3125
+ return new DefaultStreamTextResult({
3126
+ model: resolveLanguageModel(model),
3127
+ telemetry,
3128
+ headers,
3129
+ settings,
3130
+ maxRetries,
3131
+ abortSignal,
3132
+ system,
3133
+ prompt,
3134
+ messages,
3135
+ tools,
3136
+ toolChoice,
3137
+ transforms: asArray(transform),
3138
+ activeTools,
3139
+ repairToolCall,
3140
+ stopConditions: asArray(stopWhen),
3141
+ output,
3142
+ providerOptions,
3143
+ prepareStep,
3144
+ includeRawChunks,
3145
+ onChunk,
3146
+ onError,
3147
+ onFinish,
3148
+ onStepFinish,
3149
+ now: now2,
3150
+ currentDate,
3151
+ generateId: generateId2
3152
+ });
3153
+ }
3154
+ function createOutputTransformStream(output) {
3155
+ if (!output) {
3156
+ return new TransformStream({
3157
+ transform(chunk, controller) {
3158
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3159
+ }
3160
+ });
3161
+ }
3162
+ let firstTextChunkId = void 0;
3163
+ let text = "";
3164
+ let textChunk = "";
3165
+ let lastPublishedJson = "";
3166
+ function publishTextChunk({
3167
+ controller,
3168
+ partialOutput = void 0
3169
+ }) {
3170
+ controller.enqueue({
3171
+ part: {
3172
+ type: "text",
3173
+ id: firstTextChunkId,
3174
+ text: textChunk
3175
+ },
3176
+ partialOutput
3177
+ });
3178
+ textChunk = "";
3179
+ }
3180
+ return new TransformStream({
3181
+ async transform(chunk, controller) {
3182
+ if (chunk.type === "finish-step" && textChunk.length > 0) {
3183
+ publishTextChunk({ controller });
3184
+ }
3185
+ if (chunk.type !== "text" && chunk.type !== "text-start" && chunk.type !== "text-end") {
3186
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3187
+ return;
3188
+ }
3189
+ if (firstTextChunkId == null) {
3190
+ firstTextChunkId = chunk.id;
3191
+ } else if (chunk.id !== firstTextChunkId) {
3192
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3193
+ return;
3194
+ }
3195
+ if (chunk.type === "text-start") {
3196
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3197
+ return;
3198
+ }
3199
+ if (chunk.type === "text-end") {
3200
+ if (textChunk.length > 0) {
3201
+ publishTextChunk({ controller });
3202
+ }
3203
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3204
+ return;
3205
+ }
3206
+ text += chunk.text;
3207
+ textChunk += chunk.text;
3208
+ const result = await output.parsePartial({ text });
3209
+ if (result != null) {
3210
+ const currentJson = JSON.stringify(result.partial);
3211
+ if (currentJson !== lastPublishedJson) {
3212
+ publishTextChunk({ controller, partialOutput: result.partial });
3213
+ lastPublishedJson = currentJson;
3214
+ }
3215
+ }
3216
+ }
3217
+ });
3218
+ }
3219
+ var DefaultStreamTextResult = class {
3220
+ constructor({
3221
+ model,
3222
+ telemetry,
3223
+ headers,
3224
+ settings,
3225
+ maxRetries: maxRetriesArg,
3226
+ abortSignal,
3227
+ system,
3228
+ prompt,
3229
+ messages,
3230
+ tools,
3231
+ toolChoice,
3232
+ transforms,
3233
+ activeTools,
3234
+ repairToolCall,
3235
+ stopConditions,
3236
+ output,
3237
+ providerOptions,
3238
+ prepareStep,
3239
+ includeRawChunks,
3240
+ now: now2,
3241
+ currentDate,
3242
+ generateId: generateId2,
3243
+ onChunk,
3244
+ onError,
3245
+ onFinish,
3246
+ onStepFinish
3247
+ }) {
3248
+ this._totalUsage = new DelayedPromise();
3249
+ this._finishReason = new DelayedPromise();
3250
+ this._steps = new DelayedPromise();
3251
+ this.output = output;
3252
+ this.includeRawChunks = includeRawChunks;
3253
+ this.generateId = generateId2;
3254
+ let stepFinish;
3255
+ let recordedContent = [];
3256
+ const recordedResponseMessages = [];
3257
+ let recordedFinishReason = void 0;
3258
+ let recordedTotalUsage = void 0;
3259
+ let recordedRequest = {};
3260
+ let recordedWarnings = [];
3261
+ const recordedSteps = [];
3262
+ let rootSpan;
3263
+ let activeTextContent = {};
3264
+ let activeReasoningContent = {};
3265
+ const eventProcessor = new TransformStream({
3266
+ async transform(chunk, controller) {
3267
+ var _a9, _b;
3268
+ controller.enqueue(chunk);
3269
+ const { part } = chunk;
3270
+ if (part.type === "text" || part.type === "reasoning" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
3271
+ await (onChunk == null ? void 0 : onChunk({ chunk: part }));
3272
+ }
3273
+ if (part.type === "error") {
3274
+ await onError({ error: wrapGatewayError(part.error) });
3275
+ }
3276
+ if (part.type === "text-start") {
3277
+ activeTextContent[part.id] = {
3278
+ type: "text",
3279
+ text: "",
3280
+ providerMetadata: part.providerMetadata
3281
+ };
3282
+ recordedContent.push(activeTextContent[part.id]);
3283
+ }
3284
+ if (part.type === "text") {
3285
+ const activeText = activeTextContent[part.id];
3286
+ if (activeText == null) {
3287
+ controller.enqueue({
3288
+ part: {
3289
+ type: "error",
3290
+ error: `text part ${part.id} not found`
3291
+ },
3292
+ partialOutput: void 0
3293
+ });
3294
+ return;
3295
+ }
3296
+ activeText.text += part.text;
3297
+ activeText.providerMetadata = part.providerMetadata;
3298
+ }
3299
+ if (part.type === "text-end") {
3300
+ delete activeTextContent[part.id];
3301
+ }
3302
+ if (part.type === "reasoning-start") {
3303
+ activeReasoningContent[part.id] = {
3304
+ type: "reasoning",
3305
+ text: "",
3306
+ providerMetadata: part.providerMetadata
3307
+ };
3308
+ recordedContent.push(activeReasoningContent[part.id]);
3309
+ }
3310
+ if (part.type === "reasoning") {
3311
+ const activeReasoning = activeReasoningContent[part.id];
3312
+ if (activeReasoning == null) {
3313
+ controller.enqueue({
3314
+ part: {
3315
+ type: "error",
3316
+ error: `reasoning part ${part.id} not found`
3317
+ },
3318
+ partialOutput: void 0
3319
+ });
3320
+ return;
3321
+ }
3322
+ activeReasoning.text += part.text;
3323
+ activeReasoning.providerMetadata = (_a9 = part.providerMetadata) != null ? _a9 : activeReasoning.providerMetadata;
3324
+ }
3325
+ if (part.type === "reasoning-end") {
3326
+ const activeReasoning = activeReasoningContent[part.id];
3327
+ if (activeReasoning == null) {
3328
+ controller.enqueue({
3329
+ part: {
3330
+ type: "error",
3331
+ error: `reasoning part ${part.id} not found`
3332
+ },
3333
+ partialOutput: void 0
3334
+ });
3335
+ return;
3336
+ }
3337
+ activeReasoning.providerMetadata = (_b = part.providerMetadata) != null ? _b : activeReasoning.providerMetadata;
3338
+ delete activeReasoningContent[part.id];
3339
+ }
3340
+ if (part.type === "file") {
3341
+ recordedContent.push({ type: "file", file: part.file });
3342
+ }
3343
+ if (part.type === "source") {
3344
+ recordedContent.push(part);
3345
+ }
3346
+ if (part.type === "tool-call") {
3347
+ recordedContent.push(part);
3348
+ }
3349
+ if (part.type === "tool-result") {
3350
+ recordedContent.push(part);
3351
+ }
3352
+ if (part.type === "tool-error") {
3353
+ recordedContent.push(part);
3354
+ }
3355
+ if (part.type === "start-step") {
3356
+ recordedRequest = part.request;
3357
+ recordedWarnings = part.warnings;
3358
+ }
3359
+ if (part.type === "finish-step") {
3360
+ const stepMessages = toResponseMessages({
3361
+ content: recordedContent,
3362
+ tools
3363
+ });
3364
+ const currentStepResult = new DefaultStepResult({
3365
+ content: recordedContent,
3366
+ finishReason: part.finishReason,
3367
+ usage: part.usage,
3368
+ warnings: recordedWarnings,
3369
+ request: recordedRequest,
3370
+ response: {
3371
+ ...part.response,
3372
+ messages: [...recordedResponseMessages, ...stepMessages]
3373
+ },
3374
+ providerMetadata: part.providerMetadata
3375
+ });
3376
+ await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
3377
+ recordedSteps.push(currentStepResult);
3378
+ recordedContent = [];
3379
+ activeReasoningContent = {};
3380
+ activeTextContent = {};
3381
+ recordedResponseMessages.push(...stepMessages);
3382
+ stepFinish.resolve();
3383
+ }
3384
+ if (part.type === "finish") {
3385
+ recordedTotalUsage = part.totalUsage;
3386
+ recordedFinishReason = part.finishReason;
3387
+ }
3388
+ },
3389
+ async flush(controller) {
3390
+ try {
3391
+ if (recordedSteps.length === 0) {
3392
+ return;
3393
+ }
3394
+ const finishReason = recordedFinishReason != null ? recordedFinishReason : "unknown";
3395
+ const totalUsage = recordedTotalUsage != null ? recordedTotalUsage : {
3396
+ inputTokens: void 0,
3397
+ outputTokens: void 0,
3398
+ totalTokens: void 0
3399
+ };
3400
+ self._finishReason.resolve(finishReason);
3401
+ self._totalUsage.resolve(totalUsage);
3402
+ self._steps.resolve(recordedSteps);
3403
+ const finalStep = recordedSteps[recordedSteps.length - 1];
3404
+ await (onFinish == null ? void 0 : onFinish({
3405
+ finishReason,
3406
+ totalUsage,
3407
+ usage: finalStep.usage,
3408
+ content: finalStep.content,
3409
+ text: finalStep.text,
3410
+ reasoningText: finalStep.reasoningText,
3411
+ reasoning: finalStep.reasoning,
3412
+ files: finalStep.files,
3413
+ sources: finalStep.sources,
3414
+ toolCalls: finalStep.toolCalls,
3415
+ toolResults: finalStep.toolResults,
3416
+ request: finalStep.request,
3417
+ response: finalStep.response,
3418
+ warnings: finalStep.warnings,
3419
+ providerMetadata: finalStep.providerMetadata,
3420
+ steps: recordedSteps
3421
+ }));
3422
+ rootSpan.setAttributes(
3423
+ selectTelemetryAttributes({
3424
+ telemetry,
3425
+ attributes: {
3426
+ "ai.response.finishReason": finishReason,
3427
+ "ai.response.text": { output: () => finalStep.text },
3428
+ "ai.response.toolCalls": {
3429
+ output: () => {
3430
+ var _a9;
3431
+ return ((_a9 = finalStep.toolCalls) == null ? void 0 : _a9.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
3432
+ }
3433
+ },
3434
+ "ai.response.providerMetadata": JSON.stringify(
3435
+ finalStep.providerMetadata
3436
+ ),
3437
+ "ai.usage.inputTokens": totalUsage.inputTokens,
3438
+ "ai.usage.outputTokens": totalUsage.outputTokens,
3439
+ "ai.usage.totalTokens": totalUsage.totalTokens,
3440
+ "ai.usage.reasoningTokens": totalUsage.reasoningTokens,
3441
+ "ai.usage.cachedInputTokens": totalUsage.cachedInputTokens
3442
+ }
3443
+ })
3444
+ );
3445
+ } catch (error) {
3446
+ controller.error(error);
3447
+ } finally {
3448
+ rootSpan.end();
3449
+ }
3450
+ }
3451
+ });
3452
+ const stitchableStream = createStitchableStream();
3453
+ this.addStream = stitchableStream.addStream;
3454
+ this.closeStream = stitchableStream.close;
3455
+ let stream = stitchableStream.stream;
3456
+ stream = stream.pipeThrough(
3457
+ new TransformStream({
3458
+ start(controller) {
3459
+ controller.enqueue({ type: "start" });
3460
+ }
3461
+ })
3462
+ );
3463
+ for (const transform of transforms) {
3464
+ stream = stream.pipeThrough(
3465
+ transform({
3466
+ tools,
3467
+ stopStream() {
3468
+ stitchableStream.terminate();
3469
+ }
3470
+ })
3471
+ );
3472
+ }
3473
+ this.baseStream = stream.pipeThrough(createOutputTransformStream(output)).pipeThrough(eventProcessor);
3474
+ const { maxRetries, retry } = prepareRetries({
3475
+ maxRetries: maxRetriesArg
3476
+ });
3477
+ const tracer = getTracer(telemetry);
3478
+ const callSettings = prepareCallSettings(settings);
3479
+ const baseTelemetryAttributes = getBaseTelemetryAttributes({
3480
+ model,
3481
+ telemetry,
3482
+ headers,
3483
+ settings: { ...callSettings, maxRetries }
3484
+ });
3485
+ const self = this;
3486
+ recordSpan({
3487
+ name: "ai.streamText",
3488
+ attributes: selectTelemetryAttributes({
3489
+ telemetry,
3490
+ attributes: {
3491
+ ...assembleOperationName({ operationId: "ai.streamText", telemetry }),
3492
+ ...baseTelemetryAttributes,
3493
+ // specific settings that only make sense on the outer level:
3494
+ "ai.prompt": {
3495
+ input: () => JSON.stringify({ system, prompt, messages })
3496
+ }
3497
+ }
3498
+ }),
3499
+ tracer,
3500
+ endWhenDone: false,
3501
+ fn: async (rootSpanArg) => {
3502
+ rootSpan = rootSpanArg;
3503
+ async function streamStep({
3504
+ currentStep,
3505
+ responseMessages,
3506
+ usage
3507
+ }) {
3508
+ var _a9, _b, _c, _d, _e;
3509
+ const includeRawChunks2 = self.includeRawChunks;
3510
+ stepFinish = new DelayedPromise();
3511
+ const initialPrompt = await standardizePrompt({
3512
+ system,
3513
+ prompt,
3514
+ messages
3515
+ });
3516
+ const stepInputMessages = [
3517
+ ...initialPrompt.messages,
3518
+ ...responseMessages
3519
+ ];
3520
+ const prepareStepResult = await (prepareStep == null ? void 0 : prepareStep({
3521
+ model,
3522
+ steps: recordedSteps,
3523
+ stepNumber: recordedSteps.length,
3524
+ messages: stepInputMessages
3525
+ }));
3526
+ const promptMessages = await convertToLanguageModelPrompt({
3527
+ prompt: {
3528
+ system: (_a9 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a9 : initialPrompt.system,
3529
+ messages: (_b = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _b : stepInputMessages
3530
+ },
3531
+ supportedUrls: await model.supportedUrls
3532
+ });
3533
+ const stepModel = resolveLanguageModel(
3534
+ (_c = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _c : model
3535
+ );
3536
+ const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
3537
+ tools,
3538
+ toolChoice: (_d = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _d : toolChoice,
3539
+ activeTools: (_e = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _e : activeTools
3540
+ });
3541
+ const {
3542
+ result: { stream: stream2, response, request },
3543
+ doStreamSpan,
3544
+ startTimestampMs
3545
+ } = await retry(
3546
+ () => recordSpan({
3547
+ name: "ai.streamText.doStream",
3548
+ attributes: selectTelemetryAttributes({
3549
+ telemetry,
3550
+ attributes: {
3551
+ ...assembleOperationName({
3552
+ operationId: "ai.streamText.doStream",
3553
+ telemetry
3554
+ }),
3555
+ ...baseTelemetryAttributes,
3556
+ // model:
3557
+ "ai.model.provider": stepModel.provider,
3558
+ "ai.model.id": stepModel.modelId,
3559
+ // prompt:
3560
+ "ai.prompt.messages": {
3561
+ input: () => stringifyForTelemetry(promptMessages)
3562
+ },
3563
+ "ai.prompt.tools": {
3564
+ // convert the language model level tools:
3565
+ input: () => stepTools == null ? void 0 : stepTools.map((tool) => JSON.stringify(tool))
3566
+ },
3567
+ "ai.prompt.toolChoice": {
3568
+ input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
3569
+ },
3570
+ // standardized gen-ai llm span attributes:
3571
+ "gen_ai.system": stepModel.provider,
3572
+ "gen_ai.request.model": stepModel.modelId,
3573
+ "gen_ai.request.frequency_penalty": callSettings.frequencyPenalty,
3574
+ "gen_ai.request.max_tokens": callSettings.maxOutputTokens,
3575
+ "gen_ai.request.presence_penalty": callSettings.presencePenalty,
3576
+ "gen_ai.request.stop_sequences": callSettings.stopSequences,
3577
+ "gen_ai.request.temperature": callSettings.temperature,
3578
+ "gen_ai.request.top_k": callSettings.topK,
3579
+ "gen_ai.request.top_p": callSettings.topP
3580
+ }
3581
+ }),
3582
+ tracer,
3583
+ endWhenDone: false,
3584
+ fn: async (doStreamSpan2) => {
3585
+ return {
3586
+ startTimestampMs: now2(),
3587
+ // get before the call
3588
+ doStreamSpan: doStreamSpan2,
3589
+ result: await stepModel.doStream({
3590
+ ...callSettings,
3591
+ tools: stepTools,
3592
+ toolChoice: stepToolChoice,
3593
+ responseFormat: output == null ? void 0 : output.responseFormat,
3594
+ prompt: promptMessages,
3595
+ providerOptions,
3596
+ abortSignal,
3597
+ headers,
3598
+ includeRawChunks: includeRawChunks2
3599
+ })
3600
+ };
3601
+ }
3602
+ })
3603
+ );
3604
+ const streamWithToolResults = runToolsTransformation({
3605
+ tools,
3606
+ generatorStream: stream2,
3607
+ tracer,
3608
+ telemetry,
3609
+ system,
3610
+ messages: stepInputMessages,
3611
+ repairToolCall,
3612
+ abortSignal
3613
+ });
3614
+ const stepRequest = request != null ? request : {};
3615
+ const stepToolCalls = [];
3616
+ const stepToolOutputs = [];
3617
+ let warnings;
3618
+ const activeToolCallToolNames = {};
3619
+ let stepFinishReason = "unknown";
3620
+ let stepUsage = {
3621
+ inputTokens: void 0,
3622
+ outputTokens: void 0,
3623
+ totalTokens: void 0
3624
+ };
3625
+ let stepProviderMetadata;
3626
+ let stepFirstChunk = true;
3627
+ let stepResponse = {
3628
+ id: generateId2(),
3629
+ timestamp: currentDate(),
3630
+ modelId: model.modelId
3631
+ };
3632
+ let activeText = "";
3633
+ self.addStream(
3634
+ streamWithToolResults.pipeThrough(
3635
+ new TransformStream({
3636
+ async transform(chunk, controller) {
3637
+ var _a10, _b2, _c2, _d2;
3638
+ if (chunk.type === "stream-start") {
3639
+ warnings = chunk.warnings;
3640
+ return;
3641
+ }
3642
+ if (stepFirstChunk) {
3643
+ const msToFirstChunk = now2() - startTimestampMs;
3644
+ stepFirstChunk = false;
3645
+ doStreamSpan.addEvent("ai.stream.firstChunk", {
3646
+ "ai.response.msToFirstChunk": msToFirstChunk
3647
+ });
3648
+ doStreamSpan.setAttributes({
3649
+ "ai.response.msToFirstChunk": msToFirstChunk
3650
+ });
3651
+ controller.enqueue({
3652
+ type: "start-step",
3653
+ request: stepRequest,
3654
+ warnings: warnings != null ? warnings : []
3655
+ });
3656
+ }
3657
+ const chunkType = chunk.type;
3658
+ switch (chunkType) {
3659
+ case "text-start":
3660
+ case "text-end": {
3661
+ controller.enqueue(chunk);
3662
+ break;
3663
+ }
3664
+ case "text-delta": {
3665
+ if (chunk.delta.length > 0) {
3666
+ controller.enqueue({
3667
+ type: "text",
3668
+ id: chunk.id,
3669
+ text: chunk.delta,
3670
+ providerMetadata: chunk.providerMetadata
3671
+ });
3672
+ activeText += chunk.delta;
3673
+ }
3674
+ break;
3675
+ }
3676
+ case "reasoning-start":
3677
+ case "reasoning-end": {
3678
+ controller.enqueue(chunk);
3679
+ break;
3680
+ }
3681
+ case "reasoning-delta": {
3682
+ controller.enqueue({
3683
+ type: "reasoning",
3684
+ id: chunk.id,
3685
+ text: chunk.delta,
3686
+ providerMetadata: chunk.providerMetadata
3687
+ });
3688
+ break;
3689
+ }
3690
+ case "tool-call": {
3691
+ controller.enqueue(chunk);
3692
+ stepToolCalls.push(chunk);
3693
+ break;
3694
+ }
3695
+ case "tool-result": {
3696
+ controller.enqueue(chunk);
3697
+ stepToolOutputs.push(chunk);
3698
+ break;
3699
+ }
3700
+ case "tool-error": {
3701
+ controller.enqueue(chunk);
3702
+ stepToolOutputs.push(chunk);
3703
+ break;
3704
+ }
3705
+ case "response-metadata": {
3706
+ stepResponse = {
3707
+ id: (_a10 = chunk.id) != null ? _a10 : stepResponse.id,
3708
+ timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
3709
+ modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
3710
+ };
3711
+ break;
3712
+ }
3713
+ case "finish": {
3714
+ stepUsage = chunk.usage;
3715
+ stepFinishReason = chunk.finishReason;
3716
+ stepProviderMetadata = chunk.providerMetadata;
3717
+ const msToFinish = now2() - startTimestampMs;
3718
+ doStreamSpan.addEvent("ai.stream.finish");
3719
+ doStreamSpan.setAttributes({
3720
+ "ai.response.msToFinish": msToFinish,
3721
+ "ai.response.avgOutputTokensPerSecond": 1e3 * ((_d2 = stepUsage.outputTokens) != null ? _d2 : 0) / msToFinish
3722
+ });
3723
+ break;
3724
+ }
3725
+ case "file": {
3726
+ controller.enqueue(chunk);
3727
+ break;
3728
+ }
3729
+ case "source": {
3730
+ controller.enqueue(chunk);
3731
+ break;
3732
+ }
3733
+ case "tool-input-start": {
3734
+ activeToolCallToolNames[chunk.id] = chunk.toolName;
3735
+ const tool = tools == null ? void 0 : tools[chunk.toolName];
3736
+ if ((tool == null ? void 0 : tool.onInputStart) != null) {
3737
+ await tool.onInputStart({
3738
+ toolCallId: chunk.id,
3739
+ messages: stepInputMessages,
3740
+ abortSignal
3741
+ });
3742
+ }
3743
+ controller.enqueue(chunk);
3744
+ break;
3745
+ }
3746
+ case "tool-input-end": {
3747
+ delete activeToolCallToolNames[chunk.id];
3748
+ controller.enqueue(chunk);
3749
+ break;
3750
+ }
3751
+ case "tool-input-delta": {
3752
+ const toolName = activeToolCallToolNames[chunk.id];
3753
+ const tool = tools == null ? void 0 : tools[toolName];
3754
+ if ((tool == null ? void 0 : tool.onInputDelta) != null) {
3755
+ await tool.onInputDelta({
3756
+ inputTextDelta: chunk.delta,
3757
+ toolCallId: chunk.id,
3758
+ messages: stepInputMessages,
3759
+ abortSignal
3760
+ });
3761
+ }
3762
+ controller.enqueue(chunk);
3763
+ break;
3764
+ }
3765
+ case "error": {
3766
+ controller.enqueue(chunk);
3767
+ stepFinishReason = "error";
3768
+ break;
3769
+ }
3770
+ case "raw": {
3771
+ if (includeRawChunks2) {
3772
+ controller.enqueue(chunk);
3773
+ }
3774
+ break;
3775
+ }
3776
+ default: {
3777
+ const exhaustiveCheck = chunkType;
3778
+ throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
3779
+ }
3780
+ }
3781
+ },
3782
+ // invoke onFinish callback and resolve toolResults promise when the stream is about to close:
3783
+ async flush(controller) {
3784
+ const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
3785
+ try {
3786
+ doStreamSpan.setAttributes(
3787
+ selectTelemetryAttributes({
3788
+ telemetry,
3789
+ attributes: {
3790
+ "ai.response.finishReason": stepFinishReason,
3791
+ "ai.response.text": {
3792
+ output: () => activeText
3793
+ },
3794
+ "ai.response.toolCalls": {
3795
+ output: () => stepToolCallsJson
3796
+ },
3797
+ "ai.response.id": stepResponse.id,
3798
+ "ai.response.model": stepResponse.modelId,
3799
+ "ai.response.timestamp": stepResponse.timestamp.toISOString(),
3800
+ "ai.response.providerMetadata": JSON.stringify(stepProviderMetadata),
3801
+ "ai.usage.inputTokens": stepUsage.inputTokens,
3802
+ "ai.usage.outputTokens": stepUsage.outputTokens,
3803
+ "ai.usage.totalTokens": stepUsage.totalTokens,
3804
+ "ai.usage.reasoningTokens": stepUsage.reasoningTokens,
3805
+ "ai.usage.cachedInputTokens": stepUsage.cachedInputTokens,
3806
+ // standardized gen-ai llm span attributes:
3807
+ "gen_ai.response.finish_reasons": [stepFinishReason],
3808
+ "gen_ai.response.id": stepResponse.id,
3809
+ "gen_ai.response.model": stepResponse.modelId,
3810
+ "gen_ai.usage.input_tokens": stepUsage.inputTokens,
3811
+ "gen_ai.usage.output_tokens": stepUsage.outputTokens
3812
+ }
3813
+ })
3814
+ );
3815
+ } catch (error) {
3816
+ } finally {
3817
+ doStreamSpan.end();
3818
+ }
3819
+ controller.enqueue({
3820
+ type: "finish-step",
3821
+ finishReason: stepFinishReason,
3822
+ usage: stepUsage,
3823
+ providerMetadata: stepProviderMetadata,
3824
+ response: {
3825
+ ...stepResponse,
3826
+ headers: response == null ? void 0 : response.headers
3827
+ }
3828
+ });
3829
+ const combinedUsage = addLanguageModelUsage(usage, stepUsage);
3830
+ await stepFinish.promise;
3831
+ const clientToolCalls = stepToolCalls.filter(
3832
+ (toolCall) => toolCall.providerExecuted !== true
3833
+ );
3834
+ const clientToolOutputs = stepToolOutputs.filter(
3835
+ (toolOutput) => toolOutput.providerExecuted !== true
3836
+ );
3837
+ if (clientToolCalls.length > 0 && // all current tool calls have outputs (incl. execution errors):
3838
+ clientToolOutputs.length === clientToolCalls.length && // continue until a stop condition is met:
3839
+ !await isStopConditionMet({
3840
+ stopConditions,
3841
+ steps: recordedSteps
3842
+ })) {
3843
+ responseMessages.push(
3844
+ ...toResponseMessages({
3845
+ content: (
3846
+ // use transformed content to create the messages for the next step:
3847
+ recordedSteps[recordedSteps.length - 1].content
3848
+ ),
3849
+ tools
3850
+ })
3851
+ );
3852
+ try {
3853
+ await streamStep({
3854
+ currentStep: currentStep + 1,
3855
+ responseMessages,
3856
+ usage: combinedUsage
3857
+ });
3858
+ } catch (error) {
3859
+ controller.enqueue({
3860
+ type: "error",
3861
+ error
3862
+ });
3863
+ self.closeStream();
3864
+ }
3865
+ } else {
3866
+ controller.enqueue({
3867
+ type: "finish",
3868
+ finishReason: stepFinishReason,
3869
+ totalUsage: combinedUsage
3870
+ });
3871
+ self.closeStream();
3872
+ }
3873
+ }
3874
+ })
3875
+ )
3876
+ );
3877
+ }
3878
+ await streamStep({
3879
+ currentStep: 0,
3880
+ responseMessages: [],
3881
+ usage: {
3882
+ inputTokens: void 0,
3883
+ outputTokens: void 0,
3884
+ totalTokens: void 0
3885
+ }
3886
+ });
3887
+ }
3888
+ }).catch((error) => {
3889
+ self.addStream(
3890
+ new ReadableStream({
3891
+ start(controller) {
3892
+ controller.enqueue({ type: "error", error });
3893
+ controller.close();
3894
+ }
3895
+ })
3896
+ );
3897
+ self.closeStream();
3898
+ });
3899
+ }
3900
+ get steps() {
3901
+ return this._steps.promise;
3902
+ }
3903
+ get finalStep() {
3904
+ return this.steps.then((steps) => steps[steps.length - 1]);
3905
+ }
3906
+ get content() {
3907
+ return this.finalStep.then((step) => step.content);
3908
+ }
3909
+ get warnings() {
3910
+ return this.finalStep.then((step) => step.warnings);
3911
+ }
3912
+ get providerMetadata() {
3913
+ return this.finalStep.then((step) => step.providerMetadata);
3914
+ }
3915
+ get text() {
3916
+ return this.finalStep.then((step) => step.text);
3917
+ }
3918
+ get reasoningText() {
3919
+ return this.finalStep.then((step) => step.reasoningText);
3920
+ }
3921
+ get reasoning() {
3922
+ return this.finalStep.then((step) => step.reasoning);
3923
+ }
3924
+ get sources() {
3925
+ return this.finalStep.then((step) => step.sources);
3926
+ }
3927
+ get files() {
3928
+ return this.finalStep.then((step) => step.files);
3929
+ }
3930
+ get toolCalls() {
3931
+ return this.finalStep.then((step) => step.toolCalls);
3932
+ }
3933
+ get toolResults() {
3934
+ return this.finalStep.then((step) => step.toolResults);
3935
+ }
3936
+ get usage() {
3937
+ return this.finalStep.then((step) => step.usage);
3938
+ }
3939
+ get request() {
3940
+ return this.finalStep.then((step) => step.request);
3941
+ }
3942
+ get response() {
3943
+ return this.finalStep.then((step) => step.response);
3944
+ }
3945
+ get totalUsage() {
3946
+ return this._totalUsage.promise;
3947
+ }
3948
+ get finishReason() {
3949
+ return this._finishReason.promise;
3950
+ }
3951
+ /**
3952
+ Split out a new stream from the original stream.
3953
+ The original stream is replaced to allow for further splitting,
3954
+ since we do not know how many times the stream will be split.
3955
+
3956
+ Note: this leads to buffering the stream content on the server.
3957
+ However, the LLM results are expected to be small enough to not cause issues.
3958
+ */
3959
+ teeStream() {
3960
+ const [stream1, stream2] = this.baseStream.tee();
3961
+ this.baseStream = stream2;
3962
+ return stream1;
3963
+ }
3964
+ get textStream() {
3965
+ return createAsyncIterableStream(
3966
+ this.teeStream().pipeThrough(
3967
+ new TransformStream({
3968
+ transform({ part }, controller) {
3969
+ if (part.type === "text") {
3970
+ controller.enqueue(part.text);
3971
+ }
3972
+ }
3973
+ })
3974
+ )
3975
+ );
3976
+ }
3977
+ get fullStream() {
3978
+ return createAsyncIterableStream(
3979
+ this.teeStream().pipeThrough(
3980
+ new TransformStream({
3981
+ transform({ part }, controller) {
3982
+ controller.enqueue(part);
3983
+ }
3984
+ })
3985
+ )
3986
+ );
3987
+ }
3988
+ async consumeStream(options) {
3989
+ var _a9;
3990
+ try {
3991
+ await consumeStream({
3992
+ stream: this.fullStream,
3993
+ onError: options == null ? void 0 : options.onError
3994
+ });
3995
+ } catch (error) {
3996
+ (_a9 = options == null ? void 0 : options.onError) == null ? void 0 : _a9.call(options, error);
3997
+ }
3998
+ }
3999
+ get experimental_partialOutputStream() {
4000
+ if (this.output == null) {
4001
+ throw new NoOutputSpecifiedError();
4002
+ }
4003
+ return createAsyncIterableStream(
4004
+ this.teeStream().pipeThrough(
4005
+ new TransformStream({
4006
+ transform({ partialOutput }, controller) {
4007
+ if (partialOutput != null) {
4008
+ controller.enqueue(partialOutput);
4009
+ }
4010
+ }
4011
+ })
4012
+ )
4013
+ );
4014
+ }
4015
+ toUIMessageStream({
4016
+ originalMessages,
4017
+ generateMessageId,
4018
+ onFinish,
4019
+ messageMetadata,
4020
+ sendReasoning = true,
4021
+ sendSources = false,
4022
+ sendStart = true,
4023
+ sendFinish = true,
4024
+ onError = import_provider16.getErrorMessage
4025
+ } = {}) {
4026
+ const responseMessageId = getResponseUIMessageId({
4027
+ originalMessages,
4028
+ responseMessageId: this.generateId
4029
+ });
4030
+ const baseStream = this.fullStream.pipeThrough(
4031
+ new TransformStream({
4032
+ transform: async (part, controller) => {
4033
+ const messageMetadataValue = messageMetadata == null ? void 0 : messageMetadata({ part });
4034
+ const partType = part.type;
4035
+ switch (partType) {
4036
+ case "text-start": {
4037
+ controller.enqueue({ type: "text-start", id: part.id });
4038
+ break;
4039
+ }
4040
+ case "text": {
4041
+ controller.enqueue({
4042
+ type: "text-delta",
4043
+ id: part.id,
4044
+ delta: part.text
4045
+ });
4046
+ break;
4047
+ }
4048
+ case "text-end": {
4049
+ controller.enqueue({ type: "text-end", id: part.id });
4050
+ break;
4051
+ }
4052
+ case "reasoning-start": {
4053
+ controller.enqueue({
4054
+ type: "reasoning-start",
4055
+ id: part.id,
4056
+ providerMetadata: part.providerMetadata
4057
+ });
4058
+ break;
4059
+ }
4060
+ case "reasoning": {
4061
+ if (sendReasoning) {
4062
+ controller.enqueue({
4063
+ type: "reasoning-delta",
4064
+ id: part.id,
4065
+ delta: part.text,
4066
+ providerMetadata: part.providerMetadata
4067
+ });
4068
+ }
4069
+ break;
4070
+ }
4071
+ case "reasoning-end": {
4072
+ controller.enqueue({
4073
+ type: "reasoning-end",
4074
+ id: part.id,
4075
+ providerMetadata: part.providerMetadata
4076
+ });
4077
+ break;
4078
+ }
4079
+ case "file": {
4080
+ controller.enqueue({
4081
+ type: "file",
4082
+ mediaType: part.file.mediaType,
4083
+ url: `data:${part.file.mediaType};base64,${part.file.base64}`
4084
+ });
4085
+ break;
4086
+ }
4087
+ case "source": {
4088
+ if (sendSources && part.sourceType === "url") {
4089
+ controller.enqueue({
4090
+ type: "source-url",
4091
+ sourceId: part.id,
4092
+ url: part.url,
4093
+ title: part.title,
4094
+ providerMetadata: part.providerMetadata
4095
+ });
4096
+ }
4097
+ if (sendSources && part.sourceType === "document") {
4098
+ controller.enqueue({
4099
+ type: "source-document",
4100
+ sourceId: part.id,
4101
+ mediaType: part.mediaType,
4102
+ title: part.title,
4103
+ filename: part.filename,
4104
+ providerMetadata: part.providerMetadata
4105
+ });
4106
+ }
4107
+ break;
4108
+ }
4109
+ case "tool-input-start": {
4110
+ controller.enqueue({
4111
+ type: "tool-input-start",
4112
+ toolCallId: part.id,
4113
+ toolName: part.toolName,
4114
+ providerExecuted: part.providerExecuted
4115
+ });
4116
+ break;
4117
+ }
4118
+ case "tool-input-delta": {
4119
+ controller.enqueue({
4120
+ type: "tool-input-delta",
4121
+ toolCallId: part.id,
4122
+ inputTextDelta: part.delta
4123
+ });
4124
+ break;
4125
+ }
4126
+ case "tool-call": {
4127
+ controller.enqueue({
4128
+ type: "tool-input-available",
4129
+ toolCallId: part.toolCallId,
4130
+ toolName: part.toolName,
4131
+ input: part.input,
4132
+ providerExecuted: part.providerExecuted
4133
+ });
4134
+ break;
4135
+ }
4136
+ case "tool-result": {
4137
+ controller.enqueue({
4138
+ type: "tool-output-available",
4139
+ toolCallId: part.toolCallId,
4140
+ output: part.output,
4141
+ providerExecuted: part.providerExecuted
4142
+ });
4143
+ break;
4144
+ }
4145
+ case "tool-error": {
4146
+ controller.enqueue({
4147
+ type: "tool-output-error",
4148
+ toolCallId: part.toolCallId,
4149
+ errorText: onError(part.error),
4150
+ providerExecuted: part.providerExecuted
4151
+ });
4152
+ break;
4153
+ }
4154
+ case "error": {
4155
+ controller.enqueue({
4156
+ type: "error",
4157
+ errorText: onError(part.error)
4158
+ });
4159
+ break;
4160
+ }
4161
+ case "start-step": {
4162
+ controller.enqueue({ type: "start-step" });
4163
+ break;
4164
+ }
4165
+ case "finish-step": {
4166
+ controller.enqueue({ type: "finish-step" });
4167
+ break;
4168
+ }
4169
+ case "start": {
4170
+ if (sendStart) {
4171
+ controller.enqueue({
4172
+ type: "start",
4173
+ messageId: responseMessageId,
4174
+ messageMetadata: messageMetadataValue
4175
+ });
4176
+ }
4177
+ break;
4178
+ }
4179
+ case "finish": {
4180
+ if (sendFinish) {
4181
+ controller.enqueue({
4182
+ type: "finish",
4183
+ messageMetadata: messageMetadataValue
4184
+ });
4185
+ }
4186
+ break;
4187
+ }
4188
+ case "tool-input-end": {
4189
+ break;
4190
+ }
4191
+ case "raw": {
4192
+ break;
4193
+ }
4194
+ default: {
4195
+ const exhaustiveCheck = partType;
4196
+ throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
4197
+ }
4198
+ }
4199
+ if (messageMetadataValue != null && partType !== "start" && partType !== "finish") {
4200
+ controller.enqueue({
4201
+ type: "message-metadata",
4202
+ messageMetadata: messageMetadataValue
4203
+ });
4204
+ }
4205
+ }
4206
+ })
4207
+ );
4208
+ return handleUIMessageStreamFinish({
4209
+ stream: baseStream,
4210
+ messageId: responseMessageId != null ? responseMessageId : generateMessageId == null ? void 0 : generateMessageId(),
4211
+ originalMessages,
4212
+ onFinish,
4213
+ onError
4214
+ });
4215
+ }
4216
+ pipeUIMessageStreamToResponse(response, {
4217
+ originalMessages,
4218
+ generateMessageId,
4219
+ onFinish,
4220
+ messageMetadata,
4221
+ sendReasoning,
4222
+ sendSources,
4223
+ sendFinish,
4224
+ sendStart,
4225
+ onError,
4226
+ ...init
4227
+ } = {}) {
4228
+ pipeUIMessageStreamToResponse({
4229
+ response,
4230
+ stream: this.toUIMessageStream({
4231
+ originalMessages,
4232
+ generateMessageId,
4233
+ onFinish,
4234
+ messageMetadata,
4235
+ sendReasoning,
4236
+ sendSources,
4237
+ sendFinish,
4238
+ sendStart,
4239
+ onError
4240
+ }),
4241
+ ...init
4242
+ });
4243
+ }
4244
+ pipeTextStreamToResponse(response, init) {
4245
+ pipeTextStreamToResponse({
4246
+ response,
4247
+ textStream: this.textStream,
4248
+ ...init
4249
+ });
4250
+ }
4251
+ toUIMessageStreamResponse({
4252
+ originalMessages,
4253
+ generateMessageId,
4254
+ onFinish,
4255
+ messageMetadata,
4256
+ sendReasoning,
4257
+ sendSources,
4258
+ sendFinish,
4259
+ sendStart,
4260
+ onError,
4261
+ ...init
4262
+ } = {}) {
4263
+ return createUIMessageStreamResponse({
4264
+ stream: this.toUIMessageStream({
4265
+ originalMessages,
4266
+ generateMessageId,
4267
+ onFinish,
4268
+ messageMetadata,
4269
+ sendReasoning,
4270
+ sendSources,
4271
+ sendFinish,
4272
+ sendStart,
4273
+ onError
4274
+ }),
4275
+ ...init
4276
+ });
4277
+ }
4278
+ toTextStreamResponse(init) {
4279
+ return createTextStreamResponse({
4280
+ textStream: this.textStream,
4281
+ ...init
4282
+ });
4283
+ }
4284
+ };
4285
+
4286
+ // src/bin/ai.ts
4287
+ var import_fs = require("fs");
4288
+ var import_path = require("path");
4289
+ var import_gateway3 = require("@ai-sdk/gateway");
4290
+ function isStdinAvailable() {
4291
+ return !process.stdin.isTTY;
4292
+ }
4293
+ async function readStdin() {
4294
+ return new Promise((resolve2) => {
4295
+ let data = "";
4296
+ process.stdin.setEncoding("utf8");
4297
+ process.stdin.on("data", (chunk) => {
4298
+ data += chunk;
4299
+ });
4300
+ process.stdin.on("end", () => {
4301
+ resolve2(data.trim());
4302
+ });
4303
+ });
4304
+ }
4305
+ function getMediaType(filePath) {
4306
+ var _a9;
4307
+ const ext = (_a9 = filePath.split(".").pop()) == null ? void 0 : _a9.toLowerCase();
4308
+ const mimeTypes = {
4309
+ js: "application/javascript",
4310
+ ts: "application/typescript",
4311
+ jsx: "text/jsx",
4312
+ tsx: "text/tsx",
4313
+ json: "application/json",
4314
+ md: "text/markdown",
4315
+ txt: "text/plain",
4316
+ py: "text/x-python",
4317
+ html: "text/html",
4318
+ css: "text/css",
4319
+ xml: "application/xml",
4320
+ yaml: "application/yaml",
4321
+ yml: "application/yaml",
4322
+ jpg: "image/jpeg",
4323
+ jpeg: "image/jpeg",
4324
+ png: "image/png",
4325
+ gif: "image/gif",
4326
+ webp: "image/webp",
4327
+ svg: "image/svg+xml",
4328
+ bmp: "image/bmp",
4329
+ tiff: "image/tiff",
4330
+ tif: "image/tiff"
4331
+ };
4332
+ return mimeTypes[ext || ""] || "text/plain";
4333
+ }
4334
+ function readFileContent(filePath) {
4335
+ const absolutePath = (0, import_path.resolve)(filePath);
4336
+ if (!(0, import_fs.existsSync)(absolutePath)) {
4337
+ throw new Error(`File not found: ${filePath}`);
4338
+ }
4339
+ const mediaType = getMediaType(filePath);
4340
+ const isImage = mediaType.startsWith("image/");
4341
+ let content;
4342
+ if (isImage) {
4343
+ const buffer = (0, import_fs.readFileSync)(absolutePath);
4344
+ content = `data:${mediaType};base64,${buffer.toString("base64")}`;
4345
+ } else {
4346
+ content = (0, import_fs.readFileSync)(absolutePath, "utf8");
4347
+ }
4348
+ return {
4349
+ name: filePath,
4350
+ content,
4351
+ mediaType
4352
+ };
4353
+ }
4354
+ function parseArgs() {
4355
+ const args = process.argv.slice(2);
4356
+ const options = {
4357
+ model: process.env.AI_MODEL || "openai/gpt-4",
4358
+ files: [],
4359
+ help: false,
4360
+ version: false,
4361
+ verbose: process.env.AI_VERBOSE === "true",
4362
+ system: process.env.AI_SYSTEM
4363
+ };
4364
+ const promptArgs = [];
4365
+ let i = 0;
4366
+ while (i < args.length) {
4367
+ const arg = args[i];
4368
+ switch (arg) {
4369
+ case "-h":
4370
+ case "--help":
4371
+ options.help = true;
4372
+ break;
4373
+ case "-V":
4374
+ case "--version":
4375
+ options.version = true;
4376
+ break;
4377
+ case "-v":
4378
+ case "--verbose":
4379
+ options.verbose = true;
4380
+ break;
4381
+ case "-m":
4382
+ case "--model":
4383
+ if (i + 1 < args.length) {
4384
+ options.model = args[i + 1];
4385
+ i++;
4386
+ } else {
4387
+ throw new Error("Model option requires a value");
4388
+ }
4389
+ break;
4390
+ case "-f":
4391
+ case "--file":
4392
+ if (i + 1 < args.length) {
4393
+ options.files.push(args[i + 1]);
4394
+ i++;
4395
+ } else {
4396
+ throw new Error("File option requires a value");
4397
+ }
4398
+ break;
4399
+ case "-s":
4400
+ case "--system":
4401
+ if (i + 1 < args.length) {
4402
+ options.system = args[i + 1];
4403
+ i++;
4404
+ } else {
4405
+ throw new Error("System option requires a value");
4406
+ }
4407
+ break;
4408
+ default:
4409
+ if (arg.startsWith("-")) {
4410
+ throw new Error(`Unknown option: ${arg}`);
4411
+ } else {
4412
+ promptArgs.push(arg);
4413
+ }
4414
+ }
4415
+ i++;
4416
+ }
4417
+ if (promptArgs.length > 0) {
4418
+ options.prompt = promptArgs.join(" ");
4419
+ }
4420
+ return options;
4421
+ }
4422
+ function showHelp() {
4423
+ console.log(`Usage: ai [options] [prompt]
4424
+
4425
+ AI CLI - Stream text generation from various AI models
4426
+
4427
+ Arguments:
4428
+ prompt The prompt to send to the AI model (optional if using stdin)
4429
+
4430
+ Options:
4431
+ -m, --model <model> Model to use. Format: provider/model or just model name.
4432
+ Examples: openai/gpt-4o, anthropic/claude-3-5-sonnet-20241022
4433
+ (default: "openai/gpt-4")
4434
+ -f, --file <file> Attach a file to the prompt (can be used multiple times)
4435
+ -s, --system <message> System message to set context
4436
+ -v, --verbose Show detailed information (model, usage, etc.)
4437
+ -h, --help Display help for command
4438
+ -V, --version Output the version number
4439
+
4440
+ Environment Variables:
4441
+ - AI_MODEL: Default model to use
4442
+ - AI_SYSTEM: Default system message
4443
+ - AI_VERBOSE: Set to 'true' to enable verbose output
4444
+
4445
+ Authentication (choose one):
4446
+ - VERCEL_OIDC_TOKEN: Vercel OIDC token (for Vercel projects)
4447
+ - AI_GATEWAY_API_KEY: AI Gateway API key
4448
+
4449
+ Setting Environment Variables:
4450
+ # Option 1: Export in current session
4451
+ export AI_GATEWAY_API_KEY="your-key-here"
4452
+ export AI_MODEL="anthropic/claude-3-5-sonnet-20241022"
4453
+
4454
+ # Option 2: Inline for single command
4455
+ AI_GATEWAY_API_KEY="your-key" ai "Hello world"
4456
+
4457
+ # Option 3: Add to shell profile (~/.bashrc, ~/.zshrc)
4458
+ echo 'export AI_GATEWAY_API_KEY="your-key"' >> ~/.bashrc
4459
+
4460
+ Examples:
4461
+ npx ai "Hello, world!"
4462
+ npx ai "Write a poem" -m anthropic/claude-3-5-sonnet-20241022
4463
+ npx ai "Explain quantum physics" -m groq/llama-3.1-8b-instant
4464
+ npx ai "Explain this code" -f script.js -f README.md
4465
+ echo "What is life?" | npx ai
4466
+ cat file.txt | npx ai "Summarize this content"
4467
+ npx ai -f package.json "What dependencies does this project have?"
4468
+
4469
+ Unix-style piping:
4470
+ echo "Hello world" | npx ai "Translate to French"
4471
+ cat README.md | npx ai "Summarize this"
4472
+ curl -s https://api.github.com/repos/vercel/ai | npx ai "What is this repository about?"
4473
+
4474
+ Authentication Setup:
4475
+ This CLI uses the Vercel AI Gateway. You need ONE of these for authentication:
4476
+
4477
+ OIDC Token (for Vercel projects):
4478
+ - Automatically available in Vercel deployments
4479
+ - For local development: run 'vercel env pull' or use 'vercel dev'
4480
+
4481
+ API Key (for any environment):
4482
+ - Get your key from the AI Gateway dashboard
4483
+ - Set: export AI_GATEWAY_API_KEY="your-key-here"
4484
+
4485
+ The gateway supports OpenAI, Anthropic, Google, Groq, and more providers.`);
4486
+ }
4487
+ function showVersion() {
4488
+ console.log("1.0.0");
4489
+ }
4490
+ function resolveModel(modelString) {
4491
+ return import_gateway3.gateway.languageModel(modelString);
4492
+ }
4493
+ function formatAttachedFiles(files) {
4494
+ if (files.length === 0)
4495
+ return "";
4496
+ const textFiles = files.filter((f) => {
4497
+ var _a9;
4498
+ return !((_a9 = f.mediaType) == null ? void 0 : _a9.startsWith("image/"));
4499
+ });
4500
+ if (textFiles.length === 0)
4501
+ return "";
4502
+ let result = "\n\nAttached files:\n";
4503
+ for (const file of textFiles) {
4504
+ result += `
4505
+ --- ${file.name} ---
4506
+ `;
4507
+ result += file.content;
4508
+ result += "\n";
4509
+ }
4510
+ return result;
4511
+ }
4512
+ async function main() {
4513
+ try {
4514
+ const options = parseArgs();
4515
+ if (options.help) {
4516
+ showHelp();
4517
+ return;
4518
+ }
4519
+ if (options.version) {
4520
+ showVersion();
4521
+ return;
4522
+ }
4523
+ let prompt = options.prompt || "";
4524
+ if (isStdinAvailable()) {
4525
+ const stdinContent = await readStdin();
4526
+ if (stdinContent) {
4527
+ prompt = prompt ? `${stdinContent}
4528
+
4529
+ ${prompt}` : stdinContent;
4530
+ }
4531
+ }
4532
+ if (!prompt.trim()) {
4533
+ console.error(
4534
+ "Error: No prompt provided. Use --help for usage information."
4535
+ );
4536
+ process.exit(1);
4537
+ }
4538
+ const attachedFiles = [];
4539
+ for (const filePath of options.files) {
4540
+ try {
4541
+ const file = readFileContent(filePath);
4542
+ attachedFiles.push(file);
4543
+ } catch (error) {
4544
+ console.error(
4545
+ `Error reading file ${filePath}: ${error instanceof Error ? error.message : "Unknown error"}`
4546
+ );
4547
+ process.exit(1);
4548
+ }
4549
+ }
4550
+ const textPrompt = prompt + formatAttachedFiles(attachedFiles);
4551
+ const imageFiles = attachedFiles.filter(
4552
+ (f) => {
4553
+ var _a9;
4554
+ return (_a9 = f.mediaType) == null ? void 0 : _a9.startsWith("image/");
4555
+ }
4556
+ );
4557
+ if (imageFiles.length > 0 && options.model === "openai/gpt-4") {
4558
+ options.model = "openai/gpt-4o";
4559
+ }
4560
+ if (options.verbose) {
4561
+ console.error(`Using model: ${options.model}`);
4562
+ if (attachedFiles.length > 0) {
4563
+ console.error(
4564
+ `Attached files: ${attachedFiles.map((f) => f.name).join(", ")}`
4565
+ );
4566
+ }
4567
+ console.error("");
4568
+ }
4569
+ const model = resolveModel(options.model);
4570
+ let messages;
4571
+ if (imageFiles.length > 0) {
4572
+ const content = [{ type: "text", text: textPrompt }];
4573
+ for (const img of imageFiles) {
4574
+ content.push({
4575
+ type: "image",
4576
+ image: img.content
4577
+ });
4578
+ }
4579
+ messages = [{ role: "user", content }];
4580
+ }
4581
+ const result = await streamText(
4582
+ messages ? {
4583
+ model,
4584
+ messages,
4585
+ system: options.system
4586
+ } : {
4587
+ model,
4588
+ prompt: textPrompt,
4589
+ system: options.system
4590
+ }
4591
+ );
4592
+ for await (const chunk of result.textStream) {
4593
+ process.stdout.write(chunk);
4594
+ }
4595
+ process.stdout.write("\n");
4596
+ if (options.verbose) {
4597
+ const usage = await result.usage;
4598
+ if (usage) {
4599
+ console.error(
4600
+ `
4601
+ Usage: ${usage.inputTokens} prompt + ${usage.outputTokens} completion = ${usage.totalTokens} total tokens`
4602
+ );
4603
+ }
4604
+ }
4605
+ } catch (error) {
4606
+ console.error(
4607
+ `Error: ${error instanceof Error ? error.message : "Unknown error"}`
4608
+ );
4609
+ process.exit(1);
4610
+ }
4611
+ }
4612
+ process.on("SIGINT", () => {
4613
+ process.exit(0);
4614
+ });
4615
+ process.on("SIGTERM", () => {
4616
+ process.exit(0);
4617
+ });
4618
+ main().catch((error) => {
4619
+ console.error(
4620
+ `Fatal error: ${error instanceof Error ? error.message : "Unknown error"}`
4621
+ );
4622
+ process.exit(1);
4623
+ });
4624
+ // Annotate the CommonJS export names for ESM import in node:
4625
+ 0 && (module.exports = {
4626
+ formatAttachedFiles,
4627
+ getMediaType,
4628
+ isStdinAvailable,
4629
+ parseArgs,
4630
+ readFileContent,
4631
+ resolveModel,
4632
+ showHelp,
4633
+ showVersion
4634
+ });
4635
+ //# sourceMappingURL=ai.js.map