ai 5.0.0-beta.25 → 5.0.0-beta.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/ai.js DELETED
@@ -1,4736 +0,0 @@
1
- #!/usr/bin/env node
2
- "use strict";
3
- var __defProp = Object.defineProperty;
4
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
- var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __hasOwnProp = Object.prototype.hasOwnProperty;
7
- var __export = (target, all) => {
8
- for (var name9 in all)
9
- __defProp(target, name9, { get: all[name9], enumerable: true });
10
- };
11
- var __copyProps = (to, from, except, desc) => {
12
- if (from && typeof from === "object" || typeof from === "function") {
13
- for (let key of __getOwnPropNames(from))
14
- if (!__hasOwnProp.call(to, key) && key !== except)
15
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
16
- }
17
- return to;
18
- };
19
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
20
-
21
- // src/bin/ai.ts
22
- var ai_exports = {};
23
- __export(ai_exports, {
24
- formatAttachedFiles: () => formatAttachedFiles,
25
- getMediaType: () => getMediaType,
26
- isStdinAvailable: () => isStdinAvailable,
27
- main: () => main,
28
- parseArgs: () => parseArgs,
29
- readFileContent: () => readFileContent,
30
- resolveModel: () => resolveModel,
31
- showHelp: () => showHelp,
32
- showVersion: () => showVersion
33
- });
34
- module.exports = __toCommonJS(ai_exports);
35
-
36
- // src/generate-text/stream-text.ts
37
- var import_provider16 = require("@ai-sdk/provider");
38
- var import_provider_utils12 = require("@ai-sdk/provider-utils");
39
-
40
- // src/error/no-output-specified-error.ts
41
- var import_provider = require("@ai-sdk/provider");
42
- var name = "AI_NoOutputSpecifiedError";
43
- var marker = `vercel.ai.error.${name}`;
44
- var symbol = Symbol.for(marker);
45
- var _a;
46
- var NoOutputSpecifiedError = class extends import_provider.AISDKError {
47
- // used in isInstance
48
- constructor({ message = "No output specified." } = {}) {
49
- super({ name, message });
50
- this[_a] = true;
51
- }
52
- static isInstance(error) {
53
- return import_provider.AISDKError.hasMarker(error, marker);
54
- }
55
- };
56
- _a = symbol;
57
-
58
- // src/util/prepare-headers.ts
59
- function prepareHeaders(headers, defaultHeaders) {
60
- const responseHeaders = new Headers(headers != null ? headers : {});
61
- for (const [key, value] of Object.entries(defaultHeaders)) {
62
- if (!responseHeaders.has(key)) {
63
- responseHeaders.set(key, value);
64
- }
65
- }
66
- return responseHeaders;
67
- }
68
-
69
- // src/text-stream/create-text-stream-response.ts
70
- function createTextStreamResponse({
71
- status,
72
- statusText,
73
- headers,
74
- textStream
75
- }) {
76
- return new Response(textStream.pipeThrough(new TextEncoderStream()), {
77
- status: status != null ? status : 200,
78
- statusText,
79
- headers: prepareHeaders(headers, {
80
- "content-type": "text/plain; charset=utf-8"
81
- })
82
- });
83
- }
84
-
85
- // src/util/write-to-server-response.ts
86
- function writeToServerResponse({
87
- response,
88
- status,
89
- statusText,
90
- headers,
91
- stream
92
- }) {
93
- response.writeHead(status != null ? status : 200, statusText, headers);
94
- const reader = stream.getReader();
95
- const read = async () => {
96
- try {
97
- while (true) {
98
- const { done, value } = await reader.read();
99
- if (done)
100
- break;
101
- response.write(value);
102
- }
103
- } catch (error) {
104
- throw error;
105
- } finally {
106
- response.end();
107
- }
108
- };
109
- read();
110
- }
111
-
112
- // src/text-stream/pipe-text-stream-to-response.ts
113
- function pipeTextStreamToResponse({
114
- response,
115
- status,
116
- statusText,
117
- headers,
118
- textStream
119
- }) {
120
- writeToServerResponse({
121
- response,
122
- status,
123
- statusText,
124
- headers: Object.fromEntries(
125
- prepareHeaders(headers, {
126
- "content-type": "text/plain; charset=utf-8"
127
- }).entries()
128
- ),
129
- stream: textStream.pipeThrough(new TextEncoderStream())
130
- });
131
- }
132
-
133
- // src/ui-message-stream/json-to-sse-transform-stream.ts
134
- var JsonToSseTransformStream = class extends TransformStream {
135
- constructor() {
136
- super({
137
- transform(part, controller) {
138
- controller.enqueue(`data: ${JSON.stringify(part)}
139
-
140
- `);
141
- },
142
- flush(controller) {
143
- controller.enqueue("data: [DONE]\n\n");
144
- }
145
- });
146
- }
147
- };
148
-
149
- // src/ui-message-stream/ui-message-stream-headers.ts
150
- var UI_MESSAGE_STREAM_HEADERS = {
151
- "content-type": "text/event-stream",
152
- "cache-control": "no-cache",
153
- connection: "keep-alive",
154
- "x-vercel-ai-ui-message-stream": "v1",
155
- "x-accel-buffering": "no"
156
- // disable nginx buffering
157
- };
158
-
159
- // src/ui-message-stream/create-ui-message-stream-response.ts
160
- function createUIMessageStreamResponse({
161
- status,
162
- statusText,
163
- headers,
164
- stream,
165
- consumeSseStream
166
- }) {
167
- let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
168
- if (consumeSseStream) {
169
- const [stream1, stream2] = sseStream.tee();
170
- sseStream = stream1;
171
- consumeSseStream({ stream: stream2 });
172
- }
173
- return new Response(sseStream.pipeThrough(new TextEncoderStream()), {
174
- status,
175
- statusText,
176
- headers: prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS)
177
- });
178
- }
179
-
180
- // src/ui-message-stream/get-response-ui-message-id.ts
181
- function getResponseUIMessageId({
182
- originalMessages,
183
- responseMessageId
184
- }) {
185
- if (originalMessages == null) {
186
- return void 0;
187
- }
188
- const lastMessage = originalMessages[originalMessages.length - 1];
189
- return (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage.id : typeof responseMessageId === "function" ? responseMessageId() : responseMessageId;
190
- }
191
-
192
- // src/ui/process-ui-message-stream.ts
193
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
194
-
195
- // src/ui-message-stream/ui-message-chunks.ts
196
- var import_v43 = require("zod/v4");
197
-
198
- // src/types/provider-metadata.ts
199
- var import_v42 = require("zod/v4");
200
-
201
- // src/types/json-value.ts
202
- var import_v4 = require("zod/v4");
203
- var jsonValueSchema = import_v4.z.lazy(
204
- () => import_v4.z.union([
205
- import_v4.z.null(),
206
- import_v4.z.string(),
207
- import_v4.z.number(),
208
- import_v4.z.boolean(),
209
- import_v4.z.record(import_v4.z.string(), jsonValueSchema),
210
- import_v4.z.array(jsonValueSchema)
211
- ])
212
- );
213
-
214
- // src/types/provider-metadata.ts
215
- var providerMetadataSchema = import_v42.z.record(
216
- import_v42.z.string(),
217
- import_v42.z.record(import_v42.z.string(), jsonValueSchema)
218
- );
219
-
220
- // src/ui-message-stream/ui-message-chunks.ts
221
- var uiMessageChunkSchema = import_v43.z.union([
222
- import_v43.z.strictObject({
223
- type: import_v43.z.literal("text-start"),
224
- id: import_v43.z.string(),
225
- providerMetadata: providerMetadataSchema.optional()
226
- }),
227
- import_v43.z.strictObject({
228
- type: import_v43.z.literal("text-delta"),
229
- id: import_v43.z.string(),
230
- delta: import_v43.z.string(),
231
- providerMetadata: providerMetadataSchema.optional()
232
- }),
233
- import_v43.z.strictObject({
234
- type: import_v43.z.literal("text-end"),
235
- id: import_v43.z.string(),
236
- providerMetadata: providerMetadataSchema.optional()
237
- }),
238
- import_v43.z.strictObject({
239
- type: import_v43.z.literal("error"),
240
- errorText: import_v43.z.string()
241
- }),
242
- import_v43.z.strictObject({
243
- type: import_v43.z.literal("tool-input-start"),
244
- toolCallId: import_v43.z.string(),
245
- toolName: import_v43.z.string(),
246
- providerExecuted: import_v43.z.boolean().optional()
247
- }),
248
- import_v43.z.strictObject({
249
- type: import_v43.z.literal("tool-input-delta"),
250
- toolCallId: import_v43.z.string(),
251
- inputTextDelta: import_v43.z.string()
252
- }),
253
- import_v43.z.strictObject({
254
- type: import_v43.z.literal("tool-input-available"),
255
- toolCallId: import_v43.z.string(),
256
- toolName: import_v43.z.string(),
257
- input: import_v43.z.unknown(),
258
- providerExecuted: import_v43.z.boolean().optional(),
259
- providerMetadata: providerMetadataSchema.optional()
260
- }),
261
- import_v43.z.strictObject({
262
- type: import_v43.z.literal("tool-output-available"),
263
- toolCallId: import_v43.z.string(),
264
- output: import_v43.z.unknown(),
265
- providerExecuted: import_v43.z.boolean().optional()
266
- }),
267
- import_v43.z.strictObject({
268
- type: import_v43.z.literal("tool-output-error"),
269
- toolCallId: import_v43.z.string(),
270
- errorText: import_v43.z.string(),
271
- providerExecuted: import_v43.z.boolean().optional()
272
- }),
273
- import_v43.z.strictObject({
274
- type: import_v43.z.literal("reasoning"),
275
- text: import_v43.z.string(),
276
- providerMetadata: providerMetadataSchema.optional()
277
- }),
278
- import_v43.z.strictObject({
279
- type: import_v43.z.literal("reasoning-start"),
280
- id: import_v43.z.string(),
281
- providerMetadata: providerMetadataSchema.optional()
282
- }),
283
- import_v43.z.strictObject({
284
- type: import_v43.z.literal("reasoning-delta"),
285
- id: import_v43.z.string(),
286
- delta: import_v43.z.string(),
287
- providerMetadata: providerMetadataSchema.optional()
288
- }),
289
- import_v43.z.strictObject({
290
- type: import_v43.z.literal("reasoning-end"),
291
- id: import_v43.z.string(),
292
- providerMetadata: providerMetadataSchema.optional()
293
- }),
294
- import_v43.z.strictObject({
295
- type: import_v43.z.literal("reasoning-part-finish")
296
- }),
297
- import_v43.z.strictObject({
298
- type: import_v43.z.literal("source-url"),
299
- sourceId: import_v43.z.string(),
300
- url: import_v43.z.string(),
301
- title: import_v43.z.string().optional(),
302
- providerMetadata: providerMetadataSchema.optional()
303
- }),
304
- import_v43.z.strictObject({
305
- type: import_v43.z.literal("source-document"),
306
- sourceId: import_v43.z.string(),
307
- mediaType: import_v43.z.string(),
308
- title: import_v43.z.string(),
309
- filename: import_v43.z.string().optional(),
310
- providerMetadata: providerMetadataSchema.optional()
311
- }),
312
- import_v43.z.strictObject({
313
- type: import_v43.z.literal("file"),
314
- url: import_v43.z.string(),
315
- mediaType: import_v43.z.string(),
316
- providerMetadata: providerMetadataSchema.optional()
317
- }),
318
- import_v43.z.strictObject({
319
- type: import_v43.z.string().startsWith("data-"),
320
- id: import_v43.z.string().optional(),
321
- data: import_v43.z.unknown(),
322
- transient: import_v43.z.boolean().optional()
323
- }),
324
- import_v43.z.strictObject({
325
- type: import_v43.z.literal("start-step")
326
- }),
327
- import_v43.z.strictObject({
328
- type: import_v43.z.literal("finish-step")
329
- }),
330
- import_v43.z.strictObject({
331
- type: import_v43.z.literal("start"),
332
- messageId: import_v43.z.string().optional(),
333
- messageMetadata: import_v43.z.unknown().optional()
334
- }),
335
- import_v43.z.strictObject({
336
- type: import_v43.z.literal("finish"),
337
- messageMetadata: import_v43.z.unknown().optional()
338
- }),
339
- import_v43.z.strictObject({
340
- type: import_v43.z.literal("abort")
341
- }),
342
- import_v43.z.strictObject({
343
- type: import_v43.z.literal("message-metadata"),
344
- messageMetadata: import_v43.z.unknown()
345
- })
346
- ]);
347
- function isDataUIMessageChunk(chunk) {
348
- return chunk.type.startsWith("data-");
349
- }
350
-
351
- // src/util/merge-objects.ts
352
- function mergeObjects(base, overrides) {
353
- if (base === void 0 && overrides === void 0) {
354
- return void 0;
355
- }
356
- if (base === void 0) {
357
- return overrides;
358
- }
359
- if (overrides === void 0) {
360
- return base;
361
- }
362
- const result = { ...base };
363
- for (const key in overrides) {
364
- if (Object.prototype.hasOwnProperty.call(overrides, key)) {
365
- const overridesValue = overrides[key];
366
- if (overridesValue === void 0)
367
- continue;
368
- const baseValue = key in base ? base[key] : void 0;
369
- const isSourceObject = overridesValue !== null && typeof overridesValue === "object" && !Array.isArray(overridesValue) && !(overridesValue instanceof Date) && !(overridesValue instanceof RegExp);
370
- const isTargetObject = baseValue !== null && baseValue !== void 0 && typeof baseValue === "object" && !Array.isArray(baseValue) && !(baseValue instanceof Date) && !(baseValue instanceof RegExp);
371
- if (isSourceObject && isTargetObject) {
372
- result[key] = mergeObjects(
373
- baseValue,
374
- overridesValue
375
- );
376
- } else {
377
- result[key] = overridesValue;
378
- }
379
- }
380
- }
381
- return result;
382
- }
383
-
384
- // src/util/parse-partial-json.ts
385
- var import_provider_utils = require("@ai-sdk/provider-utils");
386
-
387
- // src/util/fix-json.ts
388
- function fixJson(input) {
389
- const stack = ["ROOT"];
390
- let lastValidIndex = -1;
391
- let literalStart = null;
392
- function processValueStart(char, i, swapState) {
393
- {
394
- switch (char) {
395
- case '"': {
396
- lastValidIndex = i;
397
- stack.pop();
398
- stack.push(swapState);
399
- stack.push("INSIDE_STRING");
400
- break;
401
- }
402
- case "f":
403
- case "t":
404
- case "n": {
405
- lastValidIndex = i;
406
- literalStart = i;
407
- stack.pop();
408
- stack.push(swapState);
409
- stack.push("INSIDE_LITERAL");
410
- break;
411
- }
412
- case "-": {
413
- stack.pop();
414
- stack.push(swapState);
415
- stack.push("INSIDE_NUMBER");
416
- break;
417
- }
418
- case "0":
419
- case "1":
420
- case "2":
421
- case "3":
422
- case "4":
423
- case "5":
424
- case "6":
425
- case "7":
426
- case "8":
427
- case "9": {
428
- lastValidIndex = i;
429
- stack.pop();
430
- stack.push(swapState);
431
- stack.push("INSIDE_NUMBER");
432
- break;
433
- }
434
- case "{": {
435
- lastValidIndex = i;
436
- stack.pop();
437
- stack.push(swapState);
438
- stack.push("INSIDE_OBJECT_START");
439
- break;
440
- }
441
- case "[": {
442
- lastValidIndex = i;
443
- stack.pop();
444
- stack.push(swapState);
445
- stack.push("INSIDE_ARRAY_START");
446
- break;
447
- }
448
- }
449
- }
450
- }
451
- function processAfterObjectValue(char, i) {
452
- switch (char) {
453
- case ",": {
454
- stack.pop();
455
- stack.push("INSIDE_OBJECT_AFTER_COMMA");
456
- break;
457
- }
458
- case "}": {
459
- lastValidIndex = i;
460
- stack.pop();
461
- break;
462
- }
463
- }
464
- }
465
- function processAfterArrayValue(char, i) {
466
- switch (char) {
467
- case ",": {
468
- stack.pop();
469
- stack.push("INSIDE_ARRAY_AFTER_COMMA");
470
- break;
471
- }
472
- case "]": {
473
- lastValidIndex = i;
474
- stack.pop();
475
- break;
476
- }
477
- }
478
- }
479
- for (let i = 0; i < input.length; i++) {
480
- const char = input[i];
481
- const currentState = stack[stack.length - 1];
482
- switch (currentState) {
483
- case "ROOT":
484
- processValueStart(char, i, "FINISH");
485
- break;
486
- case "INSIDE_OBJECT_START": {
487
- switch (char) {
488
- case '"': {
489
- stack.pop();
490
- stack.push("INSIDE_OBJECT_KEY");
491
- break;
492
- }
493
- case "}": {
494
- lastValidIndex = i;
495
- stack.pop();
496
- break;
497
- }
498
- }
499
- break;
500
- }
501
- case "INSIDE_OBJECT_AFTER_COMMA": {
502
- switch (char) {
503
- case '"': {
504
- stack.pop();
505
- stack.push("INSIDE_OBJECT_KEY");
506
- break;
507
- }
508
- }
509
- break;
510
- }
511
- case "INSIDE_OBJECT_KEY": {
512
- switch (char) {
513
- case '"': {
514
- stack.pop();
515
- stack.push("INSIDE_OBJECT_AFTER_KEY");
516
- break;
517
- }
518
- }
519
- break;
520
- }
521
- case "INSIDE_OBJECT_AFTER_KEY": {
522
- switch (char) {
523
- case ":": {
524
- stack.pop();
525
- stack.push("INSIDE_OBJECT_BEFORE_VALUE");
526
- break;
527
- }
528
- }
529
- break;
530
- }
531
- case "INSIDE_OBJECT_BEFORE_VALUE": {
532
- processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
533
- break;
534
- }
535
- case "INSIDE_OBJECT_AFTER_VALUE": {
536
- processAfterObjectValue(char, i);
537
- break;
538
- }
539
- case "INSIDE_STRING": {
540
- switch (char) {
541
- case '"': {
542
- stack.pop();
543
- lastValidIndex = i;
544
- break;
545
- }
546
- case "\\": {
547
- stack.push("INSIDE_STRING_ESCAPE");
548
- break;
549
- }
550
- default: {
551
- lastValidIndex = i;
552
- }
553
- }
554
- break;
555
- }
556
- case "INSIDE_ARRAY_START": {
557
- switch (char) {
558
- case "]": {
559
- lastValidIndex = i;
560
- stack.pop();
561
- break;
562
- }
563
- default: {
564
- lastValidIndex = i;
565
- processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
566
- break;
567
- }
568
- }
569
- break;
570
- }
571
- case "INSIDE_ARRAY_AFTER_VALUE": {
572
- switch (char) {
573
- case ",": {
574
- stack.pop();
575
- stack.push("INSIDE_ARRAY_AFTER_COMMA");
576
- break;
577
- }
578
- case "]": {
579
- lastValidIndex = i;
580
- stack.pop();
581
- break;
582
- }
583
- default: {
584
- lastValidIndex = i;
585
- break;
586
- }
587
- }
588
- break;
589
- }
590
- case "INSIDE_ARRAY_AFTER_COMMA": {
591
- processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
592
- break;
593
- }
594
- case "INSIDE_STRING_ESCAPE": {
595
- stack.pop();
596
- lastValidIndex = i;
597
- break;
598
- }
599
- case "INSIDE_NUMBER": {
600
- switch (char) {
601
- case "0":
602
- case "1":
603
- case "2":
604
- case "3":
605
- case "4":
606
- case "5":
607
- case "6":
608
- case "7":
609
- case "8":
610
- case "9": {
611
- lastValidIndex = i;
612
- break;
613
- }
614
- case "e":
615
- case "E":
616
- case "-":
617
- case ".": {
618
- break;
619
- }
620
- case ",": {
621
- stack.pop();
622
- if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
623
- processAfterArrayValue(char, i);
624
- }
625
- if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
626
- processAfterObjectValue(char, i);
627
- }
628
- break;
629
- }
630
- case "}": {
631
- stack.pop();
632
- if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
633
- processAfterObjectValue(char, i);
634
- }
635
- break;
636
- }
637
- case "]": {
638
- stack.pop();
639
- if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
640
- processAfterArrayValue(char, i);
641
- }
642
- break;
643
- }
644
- default: {
645
- stack.pop();
646
- break;
647
- }
648
- }
649
- break;
650
- }
651
- case "INSIDE_LITERAL": {
652
- const partialLiteral = input.substring(literalStart, i + 1);
653
- if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
654
- stack.pop();
655
- if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
656
- processAfterObjectValue(char, i);
657
- } else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
658
- processAfterArrayValue(char, i);
659
- }
660
- } else {
661
- lastValidIndex = i;
662
- }
663
- break;
664
- }
665
- }
666
- }
667
- let result = input.slice(0, lastValidIndex + 1);
668
- for (let i = stack.length - 1; i >= 0; i--) {
669
- const state = stack[i];
670
- switch (state) {
671
- case "INSIDE_STRING": {
672
- result += '"';
673
- break;
674
- }
675
- case "INSIDE_OBJECT_KEY":
676
- case "INSIDE_OBJECT_AFTER_KEY":
677
- case "INSIDE_OBJECT_AFTER_COMMA":
678
- case "INSIDE_OBJECT_START":
679
- case "INSIDE_OBJECT_BEFORE_VALUE":
680
- case "INSIDE_OBJECT_AFTER_VALUE": {
681
- result += "}";
682
- break;
683
- }
684
- case "INSIDE_ARRAY_START":
685
- case "INSIDE_ARRAY_AFTER_COMMA":
686
- case "INSIDE_ARRAY_AFTER_VALUE": {
687
- result += "]";
688
- break;
689
- }
690
- case "INSIDE_LITERAL": {
691
- const partialLiteral = input.substring(literalStart, input.length);
692
- if ("true".startsWith(partialLiteral)) {
693
- result += "true".slice(partialLiteral.length);
694
- } else if ("false".startsWith(partialLiteral)) {
695
- result += "false".slice(partialLiteral.length);
696
- } else if ("null".startsWith(partialLiteral)) {
697
- result += "null".slice(partialLiteral.length);
698
- }
699
- }
700
- }
701
- }
702
- return result;
703
- }
704
-
705
- // src/util/parse-partial-json.ts
706
- async function parsePartialJson(jsonText) {
707
- if (jsonText === void 0) {
708
- return { value: void 0, state: "undefined-input" };
709
- }
710
- let result = await (0, import_provider_utils.safeParseJSON)({ text: jsonText });
711
- if (result.success) {
712
- return { value: result.value, state: "successful-parse" };
713
- }
714
- result = await (0, import_provider_utils.safeParseJSON)({ text: fixJson(jsonText) });
715
- if (result.success) {
716
- return { value: result.value, state: "repaired-parse" };
717
- }
718
- return { value: void 0, state: "failed-parse" };
719
- }
720
-
721
- // src/ui/ui-messages.ts
722
- function isToolUIPart(part) {
723
- return part.type.startsWith("tool-");
724
- }
725
- function getToolName(part) {
726
- return part.type.split("-").slice(1).join("-");
727
- }
728
-
729
- // src/ui/process-ui-message-stream.ts
730
- function createStreamingUIMessageState({
731
- lastMessage,
732
- messageId
733
- }) {
734
- return {
735
- message: (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage : {
736
- id: messageId,
737
- metadata: void 0,
738
- role: "assistant",
739
- parts: []
740
- },
741
- activeTextParts: {},
742
- activeReasoningParts: {},
743
- partialToolCalls: {}
744
- };
745
- }
746
- function processUIMessageStream({
747
- stream,
748
- onToolCall,
749
- messageMetadataSchema,
750
- dataPartSchemas,
751
- runUpdateMessageJob,
752
- onError,
753
- onData
754
- }) {
755
- return stream.pipeThrough(
756
- new TransformStream({
757
- async transform(chunk, controller) {
758
- await runUpdateMessageJob(async ({ state, write }) => {
759
- var _a9, _b, _c, _d;
760
- function updateToolInvocationPart(options) {
761
- var _a10;
762
- const part = state.message.parts.find(
763
- (part2) => isToolUIPart(part2) && part2.toolCallId === options.toolCallId
764
- );
765
- const anyOptions = options;
766
- const anyPart = part;
767
- if (part != null) {
768
- part.state = options.state;
769
- anyPart.input = anyOptions.input;
770
- anyPart.output = anyOptions.output;
771
- anyPart.errorText = anyOptions.errorText;
772
- anyPart.providerExecuted = (_a10 = anyOptions.providerExecuted) != null ? _a10 : part.providerExecuted;
773
- if (anyOptions.providerMetadata != null && part.state === "input-available") {
774
- part.callProviderMetadata = anyOptions.providerMetadata;
775
- }
776
- } else {
777
- state.message.parts.push({
778
- type: `tool-${options.toolName}`,
779
- toolCallId: options.toolCallId,
780
- state: options.state,
781
- input: anyOptions.input,
782
- output: anyOptions.output,
783
- errorText: anyOptions.errorText,
784
- providerExecuted: anyOptions.providerExecuted,
785
- ...anyOptions.providerMetadata != null ? { callProviderMetadata: anyOptions.providerMetadata } : {}
786
- });
787
- }
788
- }
789
- async function updateMessageMetadata(metadata) {
790
- if (metadata != null) {
791
- const mergedMetadata = state.message.metadata != null ? mergeObjects(state.message.metadata, metadata) : metadata;
792
- if (messageMetadataSchema != null) {
793
- await (0, import_provider_utils2.validateTypes)({
794
- value: mergedMetadata,
795
- schema: messageMetadataSchema
796
- });
797
- }
798
- state.message.metadata = mergedMetadata;
799
- }
800
- }
801
- switch (chunk.type) {
802
- case "text-start": {
803
- const textPart = {
804
- type: "text",
805
- text: "",
806
- providerMetadata: chunk.providerMetadata,
807
- state: "streaming"
808
- };
809
- state.activeTextParts[chunk.id] = textPart;
810
- state.message.parts.push(textPart);
811
- write();
812
- break;
813
- }
814
- case "text-delta": {
815
- const textPart = state.activeTextParts[chunk.id];
816
- textPart.text += chunk.delta;
817
- textPart.providerMetadata = (_a9 = chunk.providerMetadata) != null ? _a9 : textPart.providerMetadata;
818
- write();
819
- break;
820
- }
821
- case "text-end": {
822
- const textPart = state.activeTextParts[chunk.id];
823
- textPart.state = "done";
824
- textPart.providerMetadata = (_b = chunk.providerMetadata) != null ? _b : textPart.providerMetadata;
825
- delete state.activeTextParts[chunk.id];
826
- write();
827
- break;
828
- }
829
- case "reasoning-start": {
830
- const reasoningPart = {
831
- type: "reasoning",
832
- text: "",
833
- providerMetadata: chunk.providerMetadata,
834
- state: "streaming"
835
- };
836
- state.activeReasoningParts[chunk.id] = reasoningPart;
837
- state.message.parts.push(reasoningPart);
838
- write();
839
- break;
840
- }
841
- case "reasoning-delta": {
842
- const reasoningPart = state.activeReasoningParts[chunk.id];
843
- reasoningPart.text += chunk.delta;
844
- reasoningPart.providerMetadata = (_c = chunk.providerMetadata) != null ? _c : reasoningPart.providerMetadata;
845
- write();
846
- break;
847
- }
848
- case "reasoning-end": {
849
- const reasoningPart = state.activeReasoningParts[chunk.id];
850
- reasoningPart.providerMetadata = (_d = chunk.providerMetadata) != null ? _d : reasoningPart.providerMetadata;
851
- reasoningPart.state = "done";
852
- delete state.activeReasoningParts[chunk.id];
853
- write();
854
- break;
855
- }
856
- case "file": {
857
- state.message.parts.push({
858
- type: "file",
859
- mediaType: chunk.mediaType,
860
- url: chunk.url
861
- });
862
- write();
863
- break;
864
- }
865
- case "source-url": {
866
- state.message.parts.push({
867
- type: "source-url",
868
- sourceId: chunk.sourceId,
869
- url: chunk.url,
870
- title: chunk.title,
871
- providerMetadata: chunk.providerMetadata
872
- });
873
- write();
874
- break;
875
- }
876
- case "source-document": {
877
- state.message.parts.push({
878
- type: "source-document",
879
- sourceId: chunk.sourceId,
880
- mediaType: chunk.mediaType,
881
- title: chunk.title,
882
- filename: chunk.filename,
883
- providerMetadata: chunk.providerMetadata
884
- });
885
- write();
886
- break;
887
- }
888
- case "tool-input-start": {
889
- const toolInvocations = state.message.parts.filter(isToolUIPart);
890
- state.partialToolCalls[chunk.toolCallId] = {
891
- text: "",
892
- toolName: chunk.toolName,
893
- index: toolInvocations.length
894
- };
895
- updateToolInvocationPart({
896
- toolCallId: chunk.toolCallId,
897
- toolName: chunk.toolName,
898
- state: "input-streaming",
899
- input: void 0,
900
- providerExecuted: chunk.providerExecuted
901
- });
902
- write();
903
- break;
904
- }
905
- case "tool-input-delta": {
906
- const partialToolCall = state.partialToolCalls[chunk.toolCallId];
907
- partialToolCall.text += chunk.inputTextDelta;
908
- const { value: partialArgs } = await parsePartialJson(
909
- partialToolCall.text
910
- );
911
- updateToolInvocationPart({
912
- toolCallId: chunk.toolCallId,
913
- toolName: partialToolCall.toolName,
914
- state: "input-streaming",
915
- input: partialArgs
916
- });
917
- write();
918
- break;
919
- }
920
- case "tool-input-available": {
921
- updateToolInvocationPart({
922
- toolCallId: chunk.toolCallId,
923
- toolName: chunk.toolName,
924
- state: "input-available",
925
- input: chunk.input,
926
- providerExecuted: chunk.providerExecuted,
927
- providerMetadata: chunk.providerMetadata
928
- });
929
- write();
930
- if (onToolCall && !chunk.providerExecuted) {
931
- const result = await onToolCall({
932
- toolCall: chunk
933
- });
934
- if (result != null) {
935
- updateToolInvocationPart({
936
- toolCallId: chunk.toolCallId,
937
- toolName: chunk.toolName,
938
- state: "output-available",
939
- input: chunk.input,
940
- output: result
941
- });
942
- write();
943
- }
944
- }
945
- break;
946
- }
947
- case "tool-output-available": {
948
- const toolInvocations = state.message.parts.filter(isToolUIPart);
949
- if (toolInvocations == null) {
950
- throw new Error("tool_result must be preceded by a tool_call");
951
- }
952
- const toolInvocationIndex = toolInvocations.findIndex(
953
- (invocation) => invocation.toolCallId === chunk.toolCallId
954
- );
955
- if (toolInvocationIndex === -1) {
956
- throw new Error(
957
- "tool_result must be preceded by a tool_call with the same toolCallId"
958
- );
959
- }
960
- const toolName = getToolName(
961
- toolInvocations[toolInvocationIndex]
962
- );
963
- updateToolInvocationPart({
964
- toolCallId: chunk.toolCallId,
965
- toolName,
966
- state: "output-available",
967
- input: toolInvocations[toolInvocationIndex].input,
968
- output: chunk.output,
969
- providerExecuted: chunk.providerExecuted
970
- });
971
- write();
972
- break;
973
- }
974
- case "tool-output-error": {
975
- const toolInvocations = state.message.parts.filter(isToolUIPart);
976
- if (toolInvocations == null) {
977
- throw new Error("tool_result must be preceded by a tool_call");
978
- }
979
- const toolInvocationIndex = toolInvocations.findIndex(
980
- (invocation) => invocation.toolCallId === chunk.toolCallId
981
- );
982
- if (toolInvocationIndex === -1) {
983
- throw new Error(
984
- "tool_result must be preceded by a tool_call with the same toolCallId"
985
- );
986
- }
987
- const toolName = getToolName(
988
- toolInvocations[toolInvocationIndex]
989
- );
990
- updateToolInvocationPart({
991
- toolCallId: chunk.toolCallId,
992
- toolName,
993
- state: "output-error",
994
- input: toolInvocations[toolInvocationIndex].input,
995
- errorText: chunk.errorText,
996
- providerExecuted: chunk.providerExecuted
997
- });
998
- write();
999
- break;
1000
- }
1001
- case "start-step": {
1002
- state.message.parts.push({ type: "step-start" });
1003
- break;
1004
- }
1005
- case "finish-step": {
1006
- state.activeTextParts = {};
1007
- state.activeReasoningParts = {};
1008
- break;
1009
- }
1010
- case "start": {
1011
- if (chunk.messageId != null) {
1012
- state.message.id = chunk.messageId;
1013
- }
1014
- await updateMessageMetadata(chunk.messageMetadata);
1015
- if (chunk.messageId != null || chunk.messageMetadata != null) {
1016
- write();
1017
- }
1018
- break;
1019
- }
1020
- case "finish": {
1021
- await updateMessageMetadata(chunk.messageMetadata);
1022
- if (chunk.messageMetadata != null) {
1023
- write();
1024
- }
1025
- break;
1026
- }
1027
- case "message-metadata": {
1028
- await updateMessageMetadata(chunk.messageMetadata);
1029
- if (chunk.messageMetadata != null) {
1030
- write();
1031
- }
1032
- break;
1033
- }
1034
- case "error": {
1035
- onError == null ? void 0 : onError(new Error(chunk.errorText));
1036
- break;
1037
- }
1038
- default: {
1039
- if (isDataUIMessageChunk(chunk)) {
1040
- if ((dataPartSchemas == null ? void 0 : dataPartSchemas[chunk.type]) != null) {
1041
- await (0, import_provider_utils2.validateTypes)({
1042
- value: chunk.data,
1043
- schema: dataPartSchemas[chunk.type]
1044
- });
1045
- }
1046
- const dataChunk = chunk;
1047
- if (dataChunk.transient) {
1048
- onData == null ? void 0 : onData(dataChunk);
1049
- break;
1050
- }
1051
- const existingUIPart = dataChunk.id != null ? state.message.parts.find(
1052
- (chunkArg) => dataChunk.type === chunkArg.type && dataChunk.id === chunkArg.id
1053
- ) : void 0;
1054
- if (existingUIPart != null) {
1055
- existingUIPart.data = dataChunk.data;
1056
- } else {
1057
- state.message.parts.push(dataChunk);
1058
- }
1059
- onData == null ? void 0 : onData(dataChunk);
1060
- write();
1061
- }
1062
- }
1063
- }
1064
- controller.enqueue(chunk);
1065
- });
1066
- }
1067
- })
1068
- );
1069
- }
1070
-
1071
- // src/ui-message-stream/handle-ui-message-stream-finish.ts
1072
- function handleUIMessageStreamFinish({
1073
- messageId,
1074
- originalMessages = [],
1075
- onFinish,
1076
- onError,
1077
- stream
1078
- }) {
1079
- let lastMessage = originalMessages == null ? void 0 : originalMessages[originalMessages.length - 1];
1080
- if ((lastMessage == null ? void 0 : lastMessage.role) !== "assistant") {
1081
- lastMessage = void 0;
1082
- } else {
1083
- messageId = lastMessage.id;
1084
- }
1085
- let isAborted = false;
1086
- const idInjectedStream = stream.pipeThrough(
1087
- new TransformStream({
1088
- transform(chunk, controller) {
1089
- if (chunk.type === "start") {
1090
- const startChunk = chunk;
1091
- if (startChunk.messageId == null && messageId != null) {
1092
- startChunk.messageId = messageId;
1093
- }
1094
- }
1095
- if (chunk.type === "abort") {
1096
- isAborted = true;
1097
- }
1098
- controller.enqueue(chunk);
1099
- }
1100
- })
1101
- );
1102
- if (onFinish == null) {
1103
- return idInjectedStream;
1104
- }
1105
- const state = createStreamingUIMessageState({
1106
- lastMessage: lastMessage ? structuredClone(lastMessage) : void 0,
1107
- messageId: messageId != null ? messageId : ""
1108
- // will be overridden by the stream
1109
- });
1110
- const runUpdateMessageJob = async (job) => {
1111
- await job({ state, write: () => {
1112
- } });
1113
- };
1114
- return processUIMessageStream({
1115
- stream: idInjectedStream,
1116
- runUpdateMessageJob,
1117
- onError
1118
- }).pipeThrough(
1119
- new TransformStream({
1120
- transform(chunk, controller) {
1121
- controller.enqueue(chunk);
1122
- },
1123
- async flush() {
1124
- const isContinuation = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
1125
- await onFinish({
1126
- isAborted,
1127
- isContinuation,
1128
- responseMessage: state.message,
1129
- messages: [
1130
- ...isContinuation ? originalMessages.slice(0, -1) : originalMessages,
1131
- state.message
1132
- ]
1133
- });
1134
- }
1135
- })
1136
- );
1137
- }
1138
-
1139
- // src/ui-message-stream/pipe-ui-message-stream-to-response.ts
1140
- function pipeUIMessageStreamToResponse({
1141
- response,
1142
- status,
1143
- statusText,
1144
- headers,
1145
- stream,
1146
- consumeSseStream
1147
- }) {
1148
- let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
1149
- if (consumeSseStream) {
1150
- const [stream1, stream2] = sseStream.tee();
1151
- sseStream = stream1;
1152
- consumeSseStream({ stream: stream2 });
1153
- }
1154
- writeToServerResponse({
1155
- response,
1156
- status,
1157
- statusText,
1158
- headers: Object.fromEntries(
1159
- prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS).entries()
1160
- ),
1161
- stream: sseStream.pipeThrough(new TextEncoderStream())
1162
- });
1163
- }
1164
-
1165
- // src/util/as-array.ts
1166
- function asArray(value) {
1167
- return value === void 0 ? [] : Array.isArray(value) ? value : [value];
1168
- }
1169
-
1170
- // src/util/async-iterable-stream.ts
1171
- function createAsyncIterableStream(source) {
1172
- const stream = source.pipeThrough(new TransformStream());
1173
- stream[Symbol.asyncIterator] = () => {
1174
- const reader = stream.getReader();
1175
- return {
1176
- async next() {
1177
- const { done, value } = await reader.read();
1178
- return done ? { done: true, value: void 0 } : { done: false, value };
1179
- }
1180
- };
1181
- };
1182
- return stream;
1183
- }
1184
-
1185
- // src/util/consume-stream.ts
1186
- async function consumeStream({
1187
- stream,
1188
- onError
1189
- }) {
1190
- const reader = stream.getReader();
1191
- try {
1192
- while (true) {
1193
- const { done } = await reader.read();
1194
- if (done)
1195
- break;
1196
- }
1197
- } catch (error) {
1198
- onError == null ? void 0 : onError(error);
1199
- } finally {
1200
- reader.releaseLock();
1201
- }
1202
- }
1203
-
1204
- // src/util/create-resolvable-promise.ts
1205
- function createResolvablePromise() {
1206
- let resolve2;
1207
- let reject;
1208
- const promise = new Promise((res, rej) => {
1209
- resolve2 = res;
1210
- reject = rej;
1211
- });
1212
- return {
1213
- promise,
1214
- resolve: resolve2,
1215
- reject
1216
- };
1217
- }
1218
-
1219
- // src/util/create-stitchable-stream.ts
1220
- function createStitchableStream() {
1221
- let innerStreamReaders = [];
1222
- let controller = null;
1223
- let isClosed = false;
1224
- let waitForNewStream = createResolvablePromise();
1225
- const terminate = () => {
1226
- isClosed = true;
1227
- waitForNewStream.resolve();
1228
- innerStreamReaders.forEach((reader) => reader.cancel());
1229
- innerStreamReaders = [];
1230
- controller == null ? void 0 : controller.close();
1231
- };
1232
- const processPull = async () => {
1233
- if (isClosed && innerStreamReaders.length === 0) {
1234
- controller == null ? void 0 : controller.close();
1235
- return;
1236
- }
1237
- if (innerStreamReaders.length === 0) {
1238
- waitForNewStream = createResolvablePromise();
1239
- await waitForNewStream.promise;
1240
- return processPull();
1241
- }
1242
- try {
1243
- const { value, done } = await innerStreamReaders[0].read();
1244
- if (done) {
1245
- innerStreamReaders.shift();
1246
- if (innerStreamReaders.length > 0) {
1247
- await processPull();
1248
- } else if (isClosed) {
1249
- controller == null ? void 0 : controller.close();
1250
- }
1251
- } else {
1252
- controller == null ? void 0 : controller.enqueue(value);
1253
- }
1254
- } catch (error) {
1255
- controller == null ? void 0 : controller.error(error);
1256
- innerStreamReaders.shift();
1257
- terminate();
1258
- }
1259
- };
1260
- return {
1261
- stream: new ReadableStream({
1262
- start(controllerParam) {
1263
- controller = controllerParam;
1264
- },
1265
- pull: processPull,
1266
- async cancel() {
1267
- for (const reader of innerStreamReaders) {
1268
- await reader.cancel();
1269
- }
1270
- innerStreamReaders = [];
1271
- isClosed = true;
1272
- }
1273
- }),
1274
- addStream: (innerStream) => {
1275
- if (isClosed) {
1276
- throw new Error("Cannot add inner stream: outer stream is closed");
1277
- }
1278
- innerStreamReaders.push(innerStream.getReader());
1279
- waitForNewStream.resolve();
1280
- },
1281
- /**
1282
- * Gracefully close the outer stream. This will let the inner streams
1283
- * finish processing and then close the outer stream.
1284
- */
1285
- close: () => {
1286
- isClosed = true;
1287
- waitForNewStream.resolve();
1288
- if (innerStreamReaders.length === 0) {
1289
- controller == null ? void 0 : controller.close();
1290
- }
1291
- },
1292
- /**
1293
- * Immediately close the outer stream. This will cancel all inner streams
1294
- * and close the outer stream.
1295
- */
1296
- terminate
1297
- };
1298
- }
1299
-
1300
- // src/util/delayed-promise.ts
1301
- var DelayedPromise = class {
1302
- constructor() {
1303
- this.status = { type: "pending" };
1304
- this._resolve = void 0;
1305
- this._reject = void 0;
1306
- }
1307
- get promise() {
1308
- if (this._promise) {
1309
- return this._promise;
1310
- }
1311
- this._promise = new Promise((resolve2, reject) => {
1312
- if (this.status.type === "resolved") {
1313
- resolve2(this.status.value);
1314
- } else if (this.status.type === "rejected") {
1315
- reject(this.status.error);
1316
- }
1317
- this._resolve = resolve2;
1318
- this._reject = reject;
1319
- });
1320
- return this._promise;
1321
- }
1322
- resolve(value) {
1323
- var _a9;
1324
- this.status = { type: "resolved", value };
1325
- if (this._promise) {
1326
- (_a9 = this._resolve) == null ? void 0 : _a9.call(this, value);
1327
- }
1328
- }
1329
- reject(error) {
1330
- var _a9;
1331
- this.status = { type: "rejected", error };
1332
- if (this._promise) {
1333
- (_a9 = this._reject) == null ? void 0 : _a9.call(this, error);
1334
- }
1335
- }
1336
- };
1337
-
1338
- // src/util/now.ts
1339
- function now() {
1340
- var _a9, _b;
1341
- return (_b = (_a9 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a9.now()) != null ? _b : Date.now();
1342
- }
1343
-
1344
- // src/error/invalid-argument-error.ts
1345
- var import_provider2 = require("@ai-sdk/provider");
1346
- var name2 = "AI_InvalidArgumentError";
1347
- var marker2 = `vercel.ai.error.${name2}`;
1348
- var symbol2 = Symbol.for(marker2);
1349
- var _a2;
1350
- var InvalidArgumentError = class extends import_provider2.AISDKError {
1351
- constructor({
1352
- parameter,
1353
- value,
1354
- message
1355
- }) {
1356
- super({
1357
- name: name2,
1358
- message: `Invalid argument for parameter ${parameter}: ${message}`
1359
- });
1360
- this[_a2] = true;
1361
- this.parameter = parameter;
1362
- this.value = value;
1363
- }
1364
- static isInstance(error) {
1365
- return import_provider2.AISDKError.hasMarker(error, marker2);
1366
- }
1367
- };
1368
- _a2 = symbol2;
1369
-
1370
- // src/util/retry-with-exponential-backoff.ts
1371
- var import_provider4 = require("@ai-sdk/provider");
1372
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
1373
-
1374
- // src/util/retry-error.ts
1375
- var import_provider3 = require("@ai-sdk/provider");
1376
- var name3 = "AI_RetryError";
1377
- var marker3 = `vercel.ai.error.${name3}`;
1378
- var symbol3 = Symbol.for(marker3);
1379
- var _a3;
1380
- var RetryError = class extends import_provider3.AISDKError {
1381
- constructor({
1382
- message,
1383
- reason,
1384
- errors
1385
- }) {
1386
- super({ name: name3, message });
1387
- this[_a3] = true;
1388
- this.reason = reason;
1389
- this.errors = errors;
1390
- this.lastError = errors[errors.length - 1];
1391
- }
1392
- static isInstance(error) {
1393
- return import_provider3.AISDKError.hasMarker(error, marker3);
1394
- }
1395
- };
1396
- _a3 = symbol3;
1397
-
1398
- // src/util/retry-with-exponential-backoff.ts
1399
- function getRetryDelay(error, exponentialBackoffDelay) {
1400
- const headers = error.responseHeaders;
1401
- if (!headers)
1402
- return exponentialBackoffDelay;
1403
- let timeoutMillis;
1404
- const retryAfterMs = headers["retry-after-ms"];
1405
- if (retryAfterMs) {
1406
- const timeoutMs = parseFloat(retryAfterMs);
1407
- if (!Number.isNaN(timeoutMs)) {
1408
- timeoutMillis = timeoutMs;
1409
- }
1410
- }
1411
- const retryAfter = headers["retry-after"];
1412
- if (retryAfter && timeoutMillis === void 0) {
1413
- const timeoutSeconds = parseFloat(retryAfter);
1414
- if (!Number.isNaN(timeoutSeconds)) {
1415
- timeoutMillis = timeoutSeconds * 1e3;
1416
- } else {
1417
- timeoutMillis = Date.parse(retryAfter) - Date.now();
1418
- }
1419
- }
1420
- if (timeoutMillis !== void 0 && 0 <= timeoutMillis && timeoutMillis < 60 * 1e3) {
1421
- return timeoutMillis;
1422
- }
1423
- return exponentialBackoffDelay;
1424
- }
1425
- var retryWithExponentialBackoffRespectingRetryHeaders = ({
1426
- maxRetries = 2,
1427
- initialDelayInMs = 2e3,
1428
- backoffFactor = 2
1429
- } = {}) => async (f) => _retryWithExponentialBackoff(f, {
1430
- maxRetries,
1431
- delayInMs: initialDelayInMs,
1432
- backoffFactor
1433
- });
1434
- async function _retryWithExponentialBackoff(f, {
1435
- maxRetries,
1436
- delayInMs,
1437
- backoffFactor
1438
- }, errors = []) {
1439
- try {
1440
- return await f();
1441
- } catch (error) {
1442
- if ((0, import_provider_utils3.isAbortError)(error)) {
1443
- throw error;
1444
- }
1445
- if (maxRetries === 0) {
1446
- throw error;
1447
- }
1448
- const errorMessage = (0, import_provider_utils3.getErrorMessage)(error);
1449
- const newErrors = [...errors, error];
1450
- const tryNumber = newErrors.length;
1451
- if (tryNumber > maxRetries) {
1452
- throw new RetryError({
1453
- message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
1454
- reason: "maxRetriesExceeded",
1455
- errors: newErrors
1456
- });
1457
- }
1458
- if (error instanceof Error && import_provider4.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
1459
- const actualDelay = getRetryDelay(error, delayInMs);
1460
- await (0, import_provider_utils3.delay)(actualDelay);
1461
- return _retryWithExponentialBackoff(
1462
- f,
1463
- { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
1464
- newErrors
1465
- );
1466
- }
1467
- if (tryNumber === 1) {
1468
- throw error;
1469
- }
1470
- throw new RetryError({
1471
- message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
1472
- reason: "errorNotRetryable",
1473
- errors: newErrors
1474
- });
1475
- }
1476
- }
1477
-
1478
- // src/util/prepare-retries.ts
1479
- function prepareRetries({
1480
- maxRetries
1481
- }) {
1482
- if (maxRetries != null) {
1483
- if (!Number.isInteger(maxRetries)) {
1484
- throw new InvalidArgumentError({
1485
- parameter: "maxRetries",
1486
- value: maxRetries,
1487
- message: "maxRetries must be an integer"
1488
- });
1489
- }
1490
- if (maxRetries < 0) {
1491
- throw new InvalidArgumentError({
1492
- parameter: "maxRetries",
1493
- value: maxRetries,
1494
- message: "maxRetries must be >= 0"
1495
- });
1496
- }
1497
- }
1498
- const maxRetriesResult = maxRetries != null ? maxRetries : 2;
1499
- return {
1500
- maxRetries: maxRetriesResult,
1501
- retry: retryWithExponentialBackoffRespectingRetryHeaders({
1502
- maxRetries: maxRetriesResult
1503
- })
1504
- };
1505
- }
1506
-
1507
- // src/prompt/convert-to-language-model-prompt.ts
1508
- var import_provider_utils6 = require("@ai-sdk/provider-utils");
1509
-
1510
- // src/util/detect-media-type.ts
1511
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
1512
- var imageMediaTypeSignatures = [
1513
- {
1514
- mediaType: "image/gif",
1515
- bytesPrefix: [71, 73, 70],
1516
- base64Prefix: "R0lG"
1517
- },
1518
- {
1519
- mediaType: "image/png",
1520
- bytesPrefix: [137, 80, 78, 71],
1521
- base64Prefix: "iVBORw"
1522
- },
1523
- {
1524
- mediaType: "image/jpeg",
1525
- bytesPrefix: [255, 216],
1526
- base64Prefix: "/9j/"
1527
- },
1528
- {
1529
- mediaType: "image/webp",
1530
- bytesPrefix: [82, 73, 70, 70],
1531
- base64Prefix: "UklGRg"
1532
- },
1533
- {
1534
- mediaType: "image/bmp",
1535
- bytesPrefix: [66, 77],
1536
- base64Prefix: "Qk"
1537
- },
1538
- {
1539
- mediaType: "image/tiff",
1540
- bytesPrefix: [73, 73, 42, 0],
1541
- base64Prefix: "SUkqAA"
1542
- },
1543
- {
1544
- mediaType: "image/tiff",
1545
- bytesPrefix: [77, 77, 0, 42],
1546
- base64Prefix: "TU0AKg"
1547
- },
1548
- {
1549
- mediaType: "image/avif",
1550
- bytesPrefix: [
1551
- 0,
1552
- 0,
1553
- 0,
1554
- 32,
1555
- 102,
1556
- 116,
1557
- 121,
1558
- 112,
1559
- 97,
1560
- 118,
1561
- 105,
1562
- 102
1563
- ],
1564
- base64Prefix: "AAAAIGZ0eXBhdmlm"
1565
- },
1566
- {
1567
- mediaType: "image/heic",
1568
- bytesPrefix: [
1569
- 0,
1570
- 0,
1571
- 0,
1572
- 32,
1573
- 102,
1574
- 116,
1575
- 121,
1576
- 112,
1577
- 104,
1578
- 101,
1579
- 105,
1580
- 99
1581
- ],
1582
- base64Prefix: "AAAAIGZ0eXBoZWlj"
1583
- }
1584
- ];
1585
- var stripID3 = (data) => {
1586
- const bytes = typeof data === "string" ? (0, import_provider_utils4.convertBase64ToUint8Array)(data) : data;
1587
- const id3Size = (bytes[6] & 127) << 21 | (bytes[7] & 127) << 14 | (bytes[8] & 127) << 7 | bytes[9] & 127;
1588
- return bytes.slice(id3Size + 10);
1589
- };
1590
- function stripID3TagsIfPresent(data) {
1591
- const hasId3 = typeof data === "string" && data.startsWith("SUQz") || typeof data !== "string" && data.length > 10 && data[0] === 73 && // 'I'
1592
- data[1] === 68 && // 'D'
1593
- data[2] === 51;
1594
- return hasId3 ? stripID3(data) : data;
1595
- }
1596
- function detectMediaType({
1597
- data,
1598
- signatures
1599
- }) {
1600
- const processedData = stripID3TagsIfPresent(data);
1601
- for (const signature of signatures) {
1602
- if (typeof processedData === "string" ? processedData.startsWith(signature.base64Prefix) : processedData.length >= signature.bytesPrefix.length && signature.bytesPrefix.every(
1603
- (byte, index) => processedData[index] === byte
1604
- )) {
1605
- return signature.mediaType;
1606
- }
1607
- }
1608
- return void 0;
1609
- }
1610
-
1611
- // src/util/download-error.ts
1612
- var import_provider5 = require("@ai-sdk/provider");
1613
- var name4 = "AI_DownloadError";
1614
- var marker4 = `vercel.ai.error.${name4}`;
1615
- var symbol4 = Symbol.for(marker4);
1616
- var _a4;
1617
- var DownloadError = class extends import_provider5.AISDKError {
1618
- constructor({
1619
- url,
1620
- statusCode,
1621
- statusText,
1622
- cause,
1623
- message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
1624
- }) {
1625
- super({ name: name4, message, cause });
1626
- this[_a4] = true;
1627
- this.url = url;
1628
- this.statusCode = statusCode;
1629
- this.statusText = statusText;
1630
- }
1631
- static isInstance(error) {
1632
- return import_provider5.AISDKError.hasMarker(error, marker4);
1633
- }
1634
- };
1635
- _a4 = symbol4;
1636
-
1637
- // src/util/download.ts
1638
- async function download({ url }) {
1639
- var _a9;
1640
- const urlText = url.toString();
1641
- try {
1642
- const response = await fetch(urlText);
1643
- if (!response.ok) {
1644
- throw new DownloadError({
1645
- url: urlText,
1646
- statusCode: response.status,
1647
- statusText: response.statusText
1648
- });
1649
- }
1650
- return {
1651
- data: new Uint8Array(await response.arrayBuffer()),
1652
- mediaType: (_a9 = response.headers.get("content-type")) != null ? _a9 : void 0
1653
- };
1654
- } catch (error) {
1655
- if (DownloadError.isInstance(error)) {
1656
- throw error;
1657
- }
1658
- throw new DownloadError({ url: urlText, cause: error });
1659
- }
1660
- }
1661
-
1662
- // src/prompt/data-content.ts
1663
- var import_provider6 = require("@ai-sdk/provider");
1664
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
1665
- var import_v44 = require("zod/v4");
1666
-
1667
- // src/prompt/split-data-url.ts
1668
- function splitDataUrl(dataUrl) {
1669
- try {
1670
- const [header, base64Content] = dataUrl.split(",");
1671
- return {
1672
- mediaType: header.split(";")[0].split(":")[1],
1673
- base64Content
1674
- };
1675
- } catch (error) {
1676
- return {
1677
- mediaType: void 0,
1678
- base64Content: void 0
1679
- };
1680
- }
1681
- }
1682
-
1683
- // src/prompt/data-content.ts
1684
- var dataContentSchema = import_v44.z.union([
1685
- import_v44.z.string(),
1686
- import_v44.z.instanceof(Uint8Array),
1687
- import_v44.z.instanceof(ArrayBuffer),
1688
- import_v44.z.custom(
1689
- // Buffer might not be available in some environments such as CloudFlare:
1690
- (value) => {
1691
- var _a9, _b;
1692
- return (_b = (_a9 = globalThis.Buffer) == null ? void 0 : _a9.isBuffer(value)) != null ? _b : false;
1693
- },
1694
- { message: "Must be a Buffer" }
1695
- )
1696
- ]);
1697
- function convertToLanguageModelV2DataContent(content) {
1698
- if (content instanceof Uint8Array) {
1699
- return { data: content, mediaType: void 0 };
1700
- }
1701
- if (content instanceof ArrayBuffer) {
1702
- return { data: new Uint8Array(content), mediaType: void 0 };
1703
- }
1704
- if (typeof content === "string") {
1705
- try {
1706
- content = new URL(content);
1707
- } catch (error) {
1708
- }
1709
- }
1710
- if (content instanceof URL && content.protocol === "data:") {
1711
- const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(
1712
- content.toString()
1713
- );
1714
- if (dataUrlMediaType == null || base64Content == null) {
1715
- throw new import_provider6.AISDKError({
1716
- name: "InvalidDataContentError",
1717
- message: `Invalid data URL format in content ${content.toString()}`
1718
- });
1719
- }
1720
- return { data: base64Content, mediaType: dataUrlMediaType };
1721
- }
1722
- return { data: content, mediaType: void 0 };
1723
- }
1724
- function convertDataContentToBase64String(content) {
1725
- if (typeof content === "string") {
1726
- return content;
1727
- }
1728
- if (content instanceof ArrayBuffer) {
1729
- return (0, import_provider_utils5.convertUint8ArrayToBase64)(new Uint8Array(content));
1730
- }
1731
- return (0, import_provider_utils5.convertUint8ArrayToBase64)(content);
1732
- }
1733
-
1734
- // src/prompt/invalid-message-role-error.ts
1735
- var import_provider7 = require("@ai-sdk/provider");
1736
- var name5 = "AI_InvalidMessageRoleError";
1737
- var marker5 = `vercel.ai.error.${name5}`;
1738
- var symbol5 = Symbol.for(marker5);
1739
- var _a5;
1740
- var InvalidMessageRoleError = class extends import_provider7.AISDKError {
1741
- constructor({
1742
- role,
1743
- message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
1744
- }) {
1745
- super({ name: name5, message });
1746
- this[_a5] = true;
1747
- this.role = role;
1748
- }
1749
- static isInstance(error) {
1750
- return import_provider7.AISDKError.hasMarker(error, marker5);
1751
- }
1752
- };
1753
- _a5 = symbol5;
1754
-
1755
- // src/prompt/convert-to-language-model-prompt.ts
1756
- async function convertToLanguageModelPrompt({
1757
- prompt,
1758
- supportedUrls,
1759
- downloadImplementation = download
1760
- }) {
1761
- const downloadedAssets = await downloadAssets(
1762
- prompt.messages,
1763
- downloadImplementation,
1764
- supportedUrls
1765
- );
1766
- return [
1767
- ...prompt.system != null ? [{ role: "system", content: prompt.system }] : [],
1768
- ...prompt.messages.map(
1769
- (message) => convertToLanguageModelMessage({ message, downloadedAssets })
1770
- )
1771
- ];
1772
- }
1773
- function convertToLanguageModelMessage({
1774
- message,
1775
- downloadedAssets
1776
- }) {
1777
- const role = message.role;
1778
- switch (role) {
1779
- case "system": {
1780
- return {
1781
- role: "system",
1782
- content: message.content,
1783
- providerOptions: message.providerOptions
1784
- };
1785
- }
1786
- case "user": {
1787
- if (typeof message.content === "string") {
1788
- return {
1789
- role: "user",
1790
- content: [{ type: "text", text: message.content }],
1791
- providerOptions: message.providerOptions
1792
- };
1793
- }
1794
- return {
1795
- role: "user",
1796
- content: message.content.map((part) => convertPartToLanguageModelPart(part, downloadedAssets)).filter((part) => part.type !== "text" || part.text !== ""),
1797
- providerOptions: message.providerOptions
1798
- };
1799
- }
1800
- case "assistant": {
1801
- if (typeof message.content === "string") {
1802
- return {
1803
- role: "assistant",
1804
- content: [{ type: "text", text: message.content }],
1805
- providerOptions: message.providerOptions
1806
- };
1807
- }
1808
- return {
1809
- role: "assistant",
1810
- content: message.content.filter(
1811
- // remove empty text parts:
1812
- (part) => part.type !== "text" || part.text !== ""
1813
- ).map((part) => {
1814
- const providerOptions = part.providerOptions;
1815
- switch (part.type) {
1816
- case "file": {
1817
- const { data, mediaType } = convertToLanguageModelV2DataContent(
1818
- part.data
1819
- );
1820
- return {
1821
- type: "file",
1822
- data,
1823
- filename: part.filename,
1824
- mediaType: mediaType != null ? mediaType : part.mediaType,
1825
- providerOptions
1826
- };
1827
- }
1828
- case "reasoning": {
1829
- return {
1830
- type: "reasoning",
1831
- text: part.text,
1832
- providerOptions
1833
- };
1834
- }
1835
- case "text": {
1836
- return {
1837
- type: "text",
1838
- text: part.text,
1839
- providerOptions
1840
- };
1841
- }
1842
- case "tool-call": {
1843
- return {
1844
- type: "tool-call",
1845
- toolCallId: part.toolCallId,
1846
- toolName: part.toolName,
1847
- input: part.input,
1848
- providerExecuted: part.providerExecuted,
1849
- providerOptions
1850
- };
1851
- }
1852
- case "tool-result": {
1853
- return {
1854
- type: "tool-result",
1855
- toolCallId: part.toolCallId,
1856
- toolName: part.toolName,
1857
- output: part.output,
1858
- providerOptions
1859
- };
1860
- }
1861
- }
1862
- }),
1863
- providerOptions: message.providerOptions
1864
- };
1865
- }
1866
- case "tool": {
1867
- return {
1868
- role: "tool",
1869
- content: message.content.map((part) => ({
1870
- type: "tool-result",
1871
- toolCallId: part.toolCallId,
1872
- toolName: part.toolName,
1873
- output: part.output,
1874
- providerOptions: part.providerOptions
1875
- })),
1876
- providerOptions: message.providerOptions
1877
- };
1878
- }
1879
- default: {
1880
- const _exhaustiveCheck = role;
1881
- throw new InvalidMessageRoleError({ role: _exhaustiveCheck });
1882
- }
1883
- }
1884
- }
1885
- async function downloadAssets(messages, downloadImplementation, supportedUrls) {
1886
- const urls = messages.filter((message) => message.role === "user").map((message) => message.content).filter(
1887
- (content) => Array.isArray(content)
1888
- ).flat().filter(
1889
- (part) => part.type === "image" || part.type === "file"
1890
- ).map((part) => {
1891
- var _a9;
1892
- const mediaType = (_a9 = part.mediaType) != null ? _a9 : part.type === "image" ? "image/*" : void 0;
1893
- let data = part.type === "image" ? part.image : part.data;
1894
- if (typeof data === "string") {
1895
- try {
1896
- data = new URL(data);
1897
- } catch (ignored) {
1898
- }
1899
- }
1900
- return { mediaType, data };
1901
- }).filter(
1902
- (part) => part.data instanceof URL && part.mediaType != null && !(0, import_provider_utils6.isUrlSupported)({
1903
- url: part.data.toString(),
1904
- mediaType: part.mediaType,
1905
- supportedUrls
1906
- })
1907
- ).map((part) => part.data);
1908
- const downloadedImages = await Promise.all(
1909
- urls.map(async (url) => ({
1910
- url,
1911
- data: await downloadImplementation({ url })
1912
- }))
1913
- );
1914
- return Object.fromEntries(
1915
- downloadedImages.map(({ url, data }) => [url.toString(), data])
1916
- );
1917
- }
1918
- function convertPartToLanguageModelPart(part, downloadedAssets) {
1919
- var _a9;
1920
- if (part.type === "text") {
1921
- return {
1922
- type: "text",
1923
- text: part.text,
1924
- providerOptions: part.providerOptions
1925
- };
1926
- }
1927
- let originalData;
1928
- const type = part.type;
1929
- switch (type) {
1930
- case "image":
1931
- originalData = part.image;
1932
- break;
1933
- case "file":
1934
- originalData = part.data;
1935
- break;
1936
- default:
1937
- throw new Error(`Unsupported part type: ${type}`);
1938
- }
1939
- const { data: convertedData, mediaType: convertedMediaType } = convertToLanguageModelV2DataContent(originalData);
1940
- let mediaType = convertedMediaType != null ? convertedMediaType : part.mediaType;
1941
- let data = convertedData;
1942
- if (data instanceof URL) {
1943
- const downloadedFile = downloadedAssets[data.toString()];
1944
- if (downloadedFile) {
1945
- data = downloadedFile.data;
1946
- mediaType != null ? mediaType : mediaType = downloadedFile.mediaType;
1947
- }
1948
- }
1949
- switch (type) {
1950
- case "image": {
1951
- if (data instanceof Uint8Array || typeof data === "string") {
1952
- mediaType = (_a9 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a9 : mediaType;
1953
- }
1954
- return {
1955
- type: "file",
1956
- mediaType: mediaType != null ? mediaType : "image/*",
1957
- // any image
1958
- filename: void 0,
1959
- data,
1960
- providerOptions: part.providerOptions
1961
- };
1962
- }
1963
- case "file": {
1964
- if (mediaType == null) {
1965
- throw new Error(`Media type is missing for file part`);
1966
- }
1967
- return {
1968
- type: "file",
1969
- mediaType,
1970
- filename: part.filename,
1971
- data,
1972
- providerOptions: part.providerOptions
1973
- };
1974
- }
1975
- }
1976
- }
1977
-
1978
- // src/prompt/prepare-call-settings.ts
1979
- function prepareCallSettings({
1980
- maxOutputTokens,
1981
- temperature,
1982
- topP,
1983
- topK,
1984
- presencePenalty,
1985
- frequencyPenalty,
1986
- seed,
1987
- stopSequences
1988
- }) {
1989
- if (maxOutputTokens != null) {
1990
- if (!Number.isInteger(maxOutputTokens)) {
1991
- throw new InvalidArgumentError({
1992
- parameter: "maxOutputTokens",
1993
- value: maxOutputTokens,
1994
- message: "maxOutputTokens must be an integer"
1995
- });
1996
- }
1997
- if (maxOutputTokens < 1) {
1998
- throw new InvalidArgumentError({
1999
- parameter: "maxOutputTokens",
2000
- value: maxOutputTokens,
2001
- message: "maxOutputTokens must be >= 1"
2002
- });
2003
- }
2004
- }
2005
- if (temperature != null) {
2006
- if (typeof temperature !== "number") {
2007
- throw new InvalidArgumentError({
2008
- parameter: "temperature",
2009
- value: temperature,
2010
- message: "temperature must be a number"
2011
- });
2012
- }
2013
- }
2014
- if (topP != null) {
2015
- if (typeof topP !== "number") {
2016
- throw new InvalidArgumentError({
2017
- parameter: "topP",
2018
- value: topP,
2019
- message: "topP must be a number"
2020
- });
2021
- }
2022
- }
2023
- if (topK != null) {
2024
- if (typeof topK !== "number") {
2025
- throw new InvalidArgumentError({
2026
- parameter: "topK",
2027
- value: topK,
2028
- message: "topK must be a number"
2029
- });
2030
- }
2031
- }
2032
- if (presencePenalty != null) {
2033
- if (typeof presencePenalty !== "number") {
2034
- throw new InvalidArgumentError({
2035
- parameter: "presencePenalty",
2036
- value: presencePenalty,
2037
- message: "presencePenalty must be a number"
2038
- });
2039
- }
2040
- }
2041
- if (frequencyPenalty != null) {
2042
- if (typeof frequencyPenalty !== "number") {
2043
- throw new InvalidArgumentError({
2044
- parameter: "frequencyPenalty",
2045
- value: frequencyPenalty,
2046
- message: "frequencyPenalty must be a number"
2047
- });
2048
- }
2049
- }
2050
- if (seed != null) {
2051
- if (!Number.isInteger(seed)) {
2052
- throw new InvalidArgumentError({
2053
- parameter: "seed",
2054
- value: seed,
2055
- message: "seed must be an integer"
2056
- });
2057
- }
2058
- }
2059
- return {
2060
- maxOutputTokens,
2061
- temperature,
2062
- topP,
2063
- topK,
2064
- presencePenalty,
2065
- frequencyPenalty,
2066
- stopSequences,
2067
- seed
2068
- };
2069
- }
2070
-
2071
- // src/prompt/prepare-tools-and-tool-choice.ts
2072
- var import_provider_utils7 = require("@ai-sdk/provider-utils");
2073
-
2074
- // src/util/is-non-empty-object.ts
2075
- function isNonEmptyObject(object) {
2076
- return object != null && Object.keys(object).length > 0;
2077
- }
2078
-
2079
- // src/prompt/prepare-tools-and-tool-choice.ts
2080
- function prepareToolsAndToolChoice({
2081
- tools,
2082
- toolChoice,
2083
- activeTools
2084
- }) {
2085
- if (!isNonEmptyObject(tools)) {
2086
- return {
2087
- tools: void 0,
2088
- toolChoice: void 0
2089
- };
2090
- }
2091
- const filteredTools = activeTools != null ? Object.entries(tools).filter(
2092
- ([name9]) => activeTools.includes(name9)
2093
- ) : Object.entries(tools);
2094
- return {
2095
- tools: filteredTools.map(([name9, tool]) => {
2096
- const toolType = tool.type;
2097
- switch (toolType) {
2098
- case void 0:
2099
- case "function":
2100
- return {
2101
- type: "function",
2102
- name: name9,
2103
- description: tool.description,
2104
- inputSchema: (0, import_provider_utils7.asSchema)(tool.inputSchema).jsonSchema
2105
- };
2106
- case "provider-defined":
2107
- return {
2108
- type: "provider-defined",
2109
- name: name9,
2110
- id: tool.id,
2111
- args: tool.args
2112
- };
2113
- default: {
2114
- const exhaustiveCheck = toolType;
2115
- throw new Error(`Unsupported tool type: ${exhaustiveCheck}`);
2116
- }
2117
- }
2118
- }),
2119
- toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
2120
- };
2121
- }
2122
-
2123
- // src/prompt/resolve-language-model.ts
2124
- var import_gateway = require("@ai-sdk/gateway");
2125
-
2126
- // src/error/index.ts
2127
- var import_provider12 = require("@ai-sdk/provider");
2128
-
2129
- // src/error/invalid-tool-input-error.ts
2130
- var import_provider8 = require("@ai-sdk/provider");
2131
- var name6 = "AI_InvalidToolInputError";
2132
- var marker6 = `vercel.ai.error.${name6}`;
2133
- var symbol6 = Symbol.for(marker6);
2134
- var _a6;
2135
- var InvalidToolInputError = class extends import_provider8.AISDKError {
2136
- constructor({
2137
- toolInput,
2138
- toolName,
2139
- cause,
2140
- message = `Invalid input for tool ${toolName}: ${(0, import_provider8.getErrorMessage)(cause)}`
2141
- }) {
2142
- super({ name: name6, message, cause });
2143
- this[_a6] = true;
2144
- this.toolInput = toolInput;
2145
- this.toolName = toolName;
2146
- }
2147
- static isInstance(error) {
2148
- return import_provider8.AISDKError.hasMarker(error, marker6);
2149
- }
2150
- };
2151
- _a6 = symbol6;
2152
-
2153
- // src/error/no-such-tool-error.ts
2154
- var import_provider9 = require("@ai-sdk/provider");
2155
- var name7 = "AI_NoSuchToolError";
2156
- var marker7 = `vercel.ai.error.${name7}`;
2157
- var symbol7 = Symbol.for(marker7);
2158
- var _a7;
2159
- var NoSuchToolError = class extends import_provider9.AISDKError {
2160
- constructor({
2161
- toolName,
2162
- availableTools = void 0,
2163
- message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
2164
- }) {
2165
- super({ name: name7, message });
2166
- this[_a7] = true;
2167
- this.toolName = toolName;
2168
- this.availableTools = availableTools;
2169
- }
2170
- static isInstance(error) {
2171
- return import_provider9.AISDKError.hasMarker(error, marker7);
2172
- }
2173
- };
2174
- _a7 = symbol7;
2175
-
2176
- // src/error/tool-call-repair-error.ts
2177
- var import_provider10 = require("@ai-sdk/provider");
2178
- var name8 = "AI_ToolCallRepairError";
2179
- var marker8 = `vercel.ai.error.${name8}`;
2180
- var symbol8 = Symbol.for(marker8);
2181
- var _a8;
2182
- var ToolCallRepairError = class extends import_provider10.AISDKError {
2183
- constructor({
2184
- cause,
2185
- originalError,
2186
- message = `Error repairing tool call: ${(0, import_provider10.getErrorMessage)(cause)}`
2187
- }) {
2188
- super({ name: name8, message, cause });
2189
- this[_a8] = true;
2190
- this.originalError = originalError;
2191
- }
2192
- static isInstance(error) {
2193
- return import_provider10.AISDKError.hasMarker(error, marker8);
2194
- }
2195
- };
2196
- _a8 = symbol8;
2197
-
2198
- // src/error/unsupported-model-version-error.ts
2199
- var import_provider11 = require("@ai-sdk/provider");
2200
- var UnsupportedModelVersionError = class extends import_provider11.AISDKError {
2201
- constructor(options) {
2202
- super({
2203
- name: "AI_UnsupportedModelVersionError",
2204
- message: `Unsupported model version ${options.version} for provider "${options.provider}" and model "${options.modelId}". AI SDK 5 only supports models that implement specification version "v2".`
2205
- });
2206
- this.version = options.version;
2207
- this.provider = options.provider;
2208
- this.modelId = options.modelId;
2209
- }
2210
- };
2211
-
2212
- // src/prompt/resolve-language-model.ts
2213
- function resolveLanguageModel(model) {
2214
- if (typeof model !== "string") {
2215
- if (model.specificationVersion !== "v2") {
2216
- throw new UnsupportedModelVersionError({
2217
- version: model.specificationVersion,
2218
- provider: model.provider,
2219
- modelId: model.modelId
2220
- });
2221
- }
2222
- return model;
2223
- }
2224
- const globalProvider = globalThis.AI_SDK_DEFAULT_PROVIDER;
2225
- return (globalProvider != null ? globalProvider : import_gateway.gateway).languageModel(model);
2226
- }
2227
-
2228
- // src/prompt/standardize-prompt.ts
2229
- var import_provider13 = require("@ai-sdk/provider");
2230
- var import_provider_utils8 = require("@ai-sdk/provider-utils");
2231
- var import_v47 = require("zod/v4");
2232
-
2233
- // src/prompt/message.ts
2234
- var import_v46 = require("zod/v4");
2235
-
2236
- // src/prompt/content-part.ts
2237
- var import_v45 = require("zod/v4");
2238
- var textPartSchema = import_v45.z.object({
2239
- type: import_v45.z.literal("text"),
2240
- text: import_v45.z.string(),
2241
- providerOptions: providerMetadataSchema.optional()
2242
- });
2243
- var imagePartSchema = import_v45.z.object({
2244
- type: import_v45.z.literal("image"),
2245
- image: import_v45.z.union([dataContentSchema, import_v45.z.instanceof(URL)]),
2246
- mediaType: import_v45.z.string().optional(),
2247
- providerOptions: providerMetadataSchema.optional()
2248
- });
2249
- var filePartSchema = import_v45.z.object({
2250
- type: import_v45.z.literal("file"),
2251
- data: import_v45.z.union([dataContentSchema, import_v45.z.instanceof(URL)]),
2252
- filename: import_v45.z.string().optional(),
2253
- mediaType: import_v45.z.string(),
2254
- providerOptions: providerMetadataSchema.optional()
2255
- });
2256
- var reasoningPartSchema = import_v45.z.object({
2257
- type: import_v45.z.literal("reasoning"),
2258
- text: import_v45.z.string(),
2259
- providerOptions: providerMetadataSchema.optional()
2260
- });
2261
- var toolCallPartSchema = import_v45.z.object({
2262
- type: import_v45.z.literal("tool-call"),
2263
- toolCallId: import_v45.z.string(),
2264
- toolName: import_v45.z.string(),
2265
- input: import_v45.z.unknown(),
2266
- providerOptions: providerMetadataSchema.optional(),
2267
- providerExecuted: import_v45.z.boolean().optional()
2268
- });
2269
- var outputSchema = import_v45.z.discriminatedUnion("type", [
2270
- import_v45.z.object({
2271
- type: import_v45.z.literal("text"),
2272
- value: import_v45.z.string()
2273
- }),
2274
- import_v45.z.object({
2275
- type: import_v45.z.literal("json"),
2276
- value: jsonValueSchema
2277
- }),
2278
- import_v45.z.object({
2279
- type: import_v45.z.literal("error-text"),
2280
- value: import_v45.z.string()
2281
- }),
2282
- import_v45.z.object({
2283
- type: import_v45.z.literal("error-json"),
2284
- value: jsonValueSchema
2285
- }),
2286
- import_v45.z.object({
2287
- type: import_v45.z.literal("content"),
2288
- value: import_v45.z.array(
2289
- import_v45.z.union([
2290
- import_v45.z.object({
2291
- type: import_v45.z.literal("text"),
2292
- text: import_v45.z.string()
2293
- }),
2294
- import_v45.z.object({
2295
- type: import_v45.z.literal("media"),
2296
- data: import_v45.z.string(),
2297
- mediaType: import_v45.z.string()
2298
- })
2299
- ])
2300
- )
2301
- })
2302
- ]);
2303
- var toolResultPartSchema = import_v45.z.object({
2304
- type: import_v45.z.literal("tool-result"),
2305
- toolCallId: import_v45.z.string(),
2306
- toolName: import_v45.z.string(),
2307
- output: outputSchema,
2308
- providerOptions: providerMetadataSchema.optional()
2309
- });
2310
-
2311
- // src/prompt/message.ts
2312
- var systemModelMessageSchema = import_v46.z.object(
2313
- {
2314
- role: import_v46.z.literal("system"),
2315
- content: import_v46.z.string(),
2316
- providerOptions: providerMetadataSchema.optional()
2317
- }
2318
- );
2319
- var userModelMessageSchema = import_v46.z.object({
2320
- role: import_v46.z.literal("user"),
2321
- content: import_v46.z.union([
2322
- import_v46.z.string(),
2323
- import_v46.z.array(import_v46.z.union([textPartSchema, imagePartSchema, filePartSchema]))
2324
- ]),
2325
- providerOptions: providerMetadataSchema.optional()
2326
- });
2327
- var assistantModelMessageSchema = import_v46.z.object({
2328
- role: import_v46.z.literal("assistant"),
2329
- content: import_v46.z.union([
2330
- import_v46.z.string(),
2331
- import_v46.z.array(
2332
- import_v46.z.union([
2333
- textPartSchema,
2334
- filePartSchema,
2335
- reasoningPartSchema,
2336
- toolCallPartSchema,
2337
- toolResultPartSchema
2338
- ])
2339
- )
2340
- ]),
2341
- providerOptions: providerMetadataSchema.optional()
2342
- });
2343
- var toolModelMessageSchema = import_v46.z.object({
2344
- role: import_v46.z.literal("tool"),
2345
- content: import_v46.z.array(toolResultPartSchema),
2346
- providerOptions: providerMetadataSchema.optional()
2347
- });
2348
- var modelMessageSchema = import_v46.z.union([
2349
- systemModelMessageSchema,
2350
- userModelMessageSchema,
2351
- assistantModelMessageSchema,
2352
- toolModelMessageSchema
2353
- ]);
2354
-
2355
- // src/prompt/standardize-prompt.ts
2356
- async function standardizePrompt(prompt) {
2357
- if (prompt.prompt == null && prompt.messages == null) {
2358
- throw new import_provider13.InvalidPromptError({
2359
- prompt,
2360
- message: "prompt or messages must be defined"
2361
- });
2362
- }
2363
- if (prompt.prompt != null && prompt.messages != null) {
2364
- throw new import_provider13.InvalidPromptError({
2365
- prompt,
2366
- message: "prompt and messages cannot be defined at the same time"
2367
- });
2368
- }
2369
- if (prompt.system != null && typeof prompt.system !== "string") {
2370
- throw new import_provider13.InvalidPromptError({
2371
- prompt,
2372
- message: "system must be a string"
2373
- });
2374
- }
2375
- let messages;
2376
- if (prompt.prompt != null && typeof prompt.prompt === "string") {
2377
- messages = [{ role: "user", content: prompt.prompt }];
2378
- } else if (prompt.prompt != null && Array.isArray(prompt.prompt)) {
2379
- messages = prompt.prompt;
2380
- } else if (prompt.messages != null) {
2381
- messages = prompt.messages;
2382
- } else {
2383
- throw new import_provider13.InvalidPromptError({
2384
- prompt,
2385
- message: "prompt or messages must be defined"
2386
- });
2387
- }
2388
- if (messages.length === 0) {
2389
- throw new import_provider13.InvalidPromptError({
2390
- prompt,
2391
- message: "messages must not be empty"
2392
- });
2393
- }
2394
- const validationResult = await (0, import_provider_utils8.safeValidateTypes)({
2395
- value: messages,
2396
- schema: import_v47.z.array(modelMessageSchema)
2397
- });
2398
- if (!validationResult.success) {
2399
- throw new import_provider13.InvalidPromptError({
2400
- prompt,
2401
- message: "The messages must be a ModelMessage[]. If you have passed a UIMessage[], you can use convertToModelMessages to convert them.",
2402
- cause: validationResult.error
2403
- });
2404
- }
2405
- return {
2406
- messages,
2407
- system: prompt.system
2408
- };
2409
- }
2410
-
2411
- // src/prompt/wrap-gateway-error.ts
2412
- var import_gateway2 = require("@ai-sdk/gateway");
2413
- var import_provider14 = require("@ai-sdk/provider");
2414
- function wrapGatewayError(error) {
2415
- if (import_gateway2.GatewayAuthenticationError.isInstance(error) || import_gateway2.GatewayModelNotFoundError.isInstance(error)) {
2416
- return new import_provider14.AISDKError({
2417
- name: "GatewayError",
2418
- message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
2419
- cause: error
2420
- });
2421
- }
2422
- return error;
2423
- }
2424
-
2425
- // src/telemetry/assemble-operation-name.ts
2426
- function assembleOperationName({
2427
- operationId,
2428
- telemetry
2429
- }) {
2430
- return {
2431
- // standardized operation and resource name:
2432
- "operation.name": `${operationId}${(telemetry == null ? void 0 : telemetry.functionId) != null ? ` ${telemetry.functionId}` : ""}`,
2433
- "resource.name": telemetry == null ? void 0 : telemetry.functionId,
2434
- // detailed, AI SDK specific data:
2435
- "ai.operationId": operationId,
2436
- "ai.telemetry.functionId": telemetry == null ? void 0 : telemetry.functionId
2437
- };
2438
- }
2439
-
2440
- // src/telemetry/get-base-telemetry-attributes.ts
2441
- function getBaseTelemetryAttributes({
2442
- model,
2443
- settings,
2444
- telemetry,
2445
- headers
2446
- }) {
2447
- var _a9;
2448
- return {
2449
- "ai.model.provider": model.provider,
2450
- "ai.model.id": model.modelId,
2451
- // settings:
2452
- ...Object.entries(settings).reduce((attributes, [key, value]) => {
2453
- attributes[`ai.settings.${key}`] = value;
2454
- return attributes;
2455
- }, {}),
2456
- // add metadata as attributes:
2457
- ...Object.entries((_a9 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a9 : {}).reduce(
2458
- (attributes, [key, value]) => {
2459
- attributes[`ai.telemetry.metadata.${key}`] = value;
2460
- return attributes;
2461
- },
2462
- {}
2463
- ),
2464
- // request headers
2465
- ...Object.entries(headers != null ? headers : {}).reduce((attributes, [key, value]) => {
2466
- if (value !== void 0) {
2467
- attributes[`ai.request.headers.${key}`] = value;
2468
- }
2469
- return attributes;
2470
- }, {})
2471
- };
2472
- }
2473
-
2474
- // src/telemetry/get-tracer.ts
2475
- var import_api = require("@opentelemetry/api");
2476
-
2477
- // src/telemetry/noop-tracer.ts
2478
- var noopTracer = {
2479
- startSpan() {
2480
- return noopSpan;
2481
- },
2482
- startActiveSpan(name9, arg1, arg2, arg3) {
2483
- if (typeof arg1 === "function") {
2484
- return arg1(noopSpan);
2485
- }
2486
- if (typeof arg2 === "function") {
2487
- return arg2(noopSpan);
2488
- }
2489
- if (typeof arg3 === "function") {
2490
- return arg3(noopSpan);
2491
- }
2492
- }
2493
- };
2494
- var noopSpan = {
2495
- spanContext() {
2496
- return noopSpanContext;
2497
- },
2498
- setAttribute() {
2499
- return this;
2500
- },
2501
- setAttributes() {
2502
- return this;
2503
- },
2504
- addEvent() {
2505
- return this;
2506
- },
2507
- addLink() {
2508
- return this;
2509
- },
2510
- addLinks() {
2511
- return this;
2512
- },
2513
- setStatus() {
2514
- return this;
2515
- },
2516
- updateName() {
2517
- return this;
2518
- },
2519
- end() {
2520
- return this;
2521
- },
2522
- isRecording() {
2523
- return false;
2524
- },
2525
- recordException() {
2526
- return this;
2527
- }
2528
- };
2529
- var noopSpanContext = {
2530
- traceId: "",
2531
- spanId: "",
2532
- traceFlags: 0
2533
- };
2534
-
2535
- // src/telemetry/get-tracer.ts
2536
- function getTracer({
2537
- isEnabled = false,
2538
- tracer
2539
- } = {}) {
2540
- if (!isEnabled) {
2541
- return noopTracer;
2542
- }
2543
- if (tracer) {
2544
- return tracer;
2545
- }
2546
- return import_api.trace.getTracer("ai");
2547
- }
2548
-
2549
- // src/telemetry/record-span.ts
2550
- var import_api2 = require("@opentelemetry/api");
2551
- function recordSpan({
2552
- name: name9,
2553
- tracer,
2554
- attributes,
2555
- fn,
2556
- endWhenDone = true
2557
- }) {
2558
- return tracer.startActiveSpan(name9, { attributes }, async (span) => {
2559
- try {
2560
- const result = await fn(span);
2561
- if (endWhenDone) {
2562
- span.end();
2563
- }
2564
- return result;
2565
- } catch (error) {
2566
- try {
2567
- recordErrorOnSpan(span, error);
2568
- } finally {
2569
- span.end();
2570
- }
2571
- throw error;
2572
- }
2573
- });
2574
- }
2575
- function recordErrorOnSpan(span, error) {
2576
- if (error instanceof Error) {
2577
- span.recordException({
2578
- name: error.name,
2579
- message: error.message,
2580
- stack: error.stack
2581
- });
2582
- span.setStatus({
2583
- code: import_api2.SpanStatusCode.ERROR,
2584
- message: error.message
2585
- });
2586
- } else {
2587
- span.setStatus({ code: import_api2.SpanStatusCode.ERROR });
2588
- }
2589
- }
2590
-
2591
- // src/telemetry/select-telemetry-attributes.ts
2592
- function selectTelemetryAttributes({
2593
- telemetry,
2594
- attributes
2595
- }) {
2596
- if ((telemetry == null ? void 0 : telemetry.isEnabled) !== true) {
2597
- return {};
2598
- }
2599
- return Object.entries(attributes).reduce((attributes2, [key, value]) => {
2600
- if (value == null) {
2601
- return attributes2;
2602
- }
2603
- if (typeof value === "object" && "input" in value && typeof value.input === "function") {
2604
- if ((telemetry == null ? void 0 : telemetry.recordInputs) === false) {
2605
- return attributes2;
2606
- }
2607
- const result = value.input();
2608
- return result == null ? attributes2 : { ...attributes2, [key]: result };
2609
- }
2610
- if (typeof value === "object" && "output" in value && typeof value.output === "function") {
2611
- if ((telemetry == null ? void 0 : telemetry.recordOutputs) === false) {
2612
- return attributes2;
2613
- }
2614
- const result = value.output();
2615
- return result == null ? attributes2 : { ...attributes2, [key]: result };
2616
- }
2617
- return { ...attributes2, [key]: value };
2618
- }, {});
2619
- }
2620
-
2621
- // src/telemetry/stringify-for-telemetry.ts
2622
- function stringifyForTelemetry(prompt) {
2623
- return JSON.stringify(
2624
- prompt.map((message) => ({
2625
- ...message,
2626
- content: typeof message.content === "string" ? message.content : message.content.map(
2627
- (part) => part.type === "file" ? {
2628
- ...part,
2629
- data: part.data instanceof Uint8Array ? convertDataContentToBase64String(part.data) : part.data
2630
- } : part
2631
- )
2632
- }))
2633
- );
2634
- }
2635
-
2636
- // src/types/usage.ts
2637
- function addLanguageModelUsage(usage1, usage2) {
2638
- return {
2639
- inputTokens: addTokenCounts(usage1.inputTokens, usage2.inputTokens),
2640
- outputTokens: addTokenCounts(usage1.outputTokens, usage2.outputTokens),
2641
- totalTokens: addTokenCounts(usage1.totalTokens, usage2.totalTokens),
2642
- reasoningTokens: addTokenCounts(
2643
- usage1.reasoningTokens,
2644
- usage2.reasoningTokens
2645
- ),
2646
- cachedInputTokens: addTokenCounts(
2647
- usage1.cachedInputTokens,
2648
- usage2.cachedInputTokens
2649
- )
2650
- };
2651
- }
2652
- function addTokenCounts(tokenCount1, tokenCount2) {
2653
- return tokenCount1 == null && tokenCount2 == null ? void 0 : (tokenCount1 != null ? tokenCount1 : 0) + (tokenCount2 != null ? tokenCount2 : 0);
2654
- }
2655
-
2656
- // src/util/filter-stream-errors.ts
2657
- function filterStreamErrors(readable, onError) {
2658
- return new ReadableStream({
2659
- async start(controller) {
2660
- const reader = readable.getReader();
2661
- try {
2662
- while (true) {
2663
- const { done, value } = await reader.read();
2664
- if (done) {
2665
- controller.close();
2666
- break;
2667
- }
2668
- controller.enqueue(value);
2669
- }
2670
- } catch (error) {
2671
- await onError({ error, controller });
2672
- }
2673
- },
2674
- cancel(reason) {
2675
- return readable.cancel(reason);
2676
- }
2677
- });
2678
- }
2679
-
2680
- // src/generate-text/run-tools-transformation.ts
2681
- var import_provider_utils11 = require("@ai-sdk/provider-utils");
2682
-
2683
- // src/generate-text/generated-file.ts
2684
- var import_provider_utils9 = require("@ai-sdk/provider-utils");
2685
- var DefaultGeneratedFile = class {
2686
- constructor({
2687
- data,
2688
- mediaType
2689
- }) {
2690
- const isUint8Array = data instanceof Uint8Array;
2691
- this.base64Data = isUint8Array ? void 0 : data;
2692
- this.uint8ArrayData = isUint8Array ? data : void 0;
2693
- this.mediaType = mediaType;
2694
- }
2695
- // lazy conversion with caching to avoid unnecessary conversion overhead:
2696
- get base64() {
2697
- if (this.base64Data == null) {
2698
- this.base64Data = (0, import_provider_utils9.convertUint8ArrayToBase64)(this.uint8ArrayData);
2699
- }
2700
- return this.base64Data;
2701
- }
2702
- // lazy conversion with caching to avoid unnecessary conversion overhead:
2703
- get uint8Array() {
2704
- if (this.uint8ArrayData == null) {
2705
- this.uint8ArrayData = (0, import_provider_utils9.convertBase64ToUint8Array)(this.base64Data);
2706
- }
2707
- return this.uint8ArrayData;
2708
- }
2709
- };
2710
- var DefaultGeneratedFileWithType = class extends DefaultGeneratedFile {
2711
- constructor(options) {
2712
- super(options);
2713
- this.type = "file";
2714
- }
2715
- };
2716
-
2717
- // src/generate-text/parse-tool-call.ts
2718
- var import_provider_utils10 = require("@ai-sdk/provider-utils");
2719
- async function parseToolCall({
2720
- toolCall,
2721
- tools,
2722
- repairToolCall,
2723
- system,
2724
- messages
2725
- }) {
2726
- if (tools == null) {
2727
- throw new NoSuchToolError({ toolName: toolCall.toolName });
2728
- }
2729
- try {
2730
- return await doParseToolCall({ toolCall, tools });
2731
- } catch (error) {
2732
- if (repairToolCall == null || !(NoSuchToolError.isInstance(error) || InvalidToolInputError.isInstance(error))) {
2733
- throw error;
2734
- }
2735
- let repairedToolCall = null;
2736
- try {
2737
- repairedToolCall = await repairToolCall({
2738
- toolCall,
2739
- tools,
2740
- inputSchema: ({ toolName }) => {
2741
- const { inputSchema } = tools[toolName];
2742
- return (0, import_provider_utils10.asSchema)(inputSchema).jsonSchema;
2743
- },
2744
- system,
2745
- messages,
2746
- error
2747
- });
2748
- } catch (repairError) {
2749
- throw new ToolCallRepairError({
2750
- cause: repairError,
2751
- originalError: error
2752
- });
2753
- }
2754
- if (repairedToolCall == null) {
2755
- throw error;
2756
- }
2757
- return await doParseToolCall({ toolCall: repairedToolCall, tools });
2758
- }
2759
- }
2760
- async function doParseToolCall({
2761
- toolCall,
2762
- tools
2763
- }) {
2764
- const toolName = toolCall.toolName;
2765
- const tool = tools[toolName];
2766
- if (tool == null) {
2767
- throw new NoSuchToolError({
2768
- toolName: toolCall.toolName,
2769
- availableTools: Object.keys(tools)
2770
- });
2771
- }
2772
- const schema = (0, import_provider_utils10.asSchema)(tool.inputSchema);
2773
- const parseResult = toolCall.input.trim() === "" ? await (0, import_provider_utils10.safeValidateTypes)({ value: {}, schema }) : await (0, import_provider_utils10.safeParseJSON)({ text: toolCall.input, schema });
2774
- if (parseResult.success === false) {
2775
- throw new InvalidToolInputError({
2776
- toolName,
2777
- toolInput: toolCall.input,
2778
- cause: parseResult.error
2779
- });
2780
- }
2781
- return {
2782
- type: "tool-call",
2783
- toolCallId: toolCall.toolCallId,
2784
- toolName,
2785
- input: parseResult.value,
2786
- providerExecuted: toolCall.providerExecuted,
2787
- providerMetadata: toolCall.providerMetadata
2788
- };
2789
- }
2790
-
2791
- // src/generate-text/run-tools-transformation.ts
2792
- function runToolsTransformation({
2793
- tools,
2794
- generatorStream,
2795
- tracer,
2796
- telemetry,
2797
- system,
2798
- messages,
2799
- abortSignal,
2800
- repairToolCall
2801
- }) {
2802
- let toolResultsStreamController = null;
2803
- const toolResultsStream = new ReadableStream({
2804
- start(controller) {
2805
- toolResultsStreamController = controller;
2806
- }
2807
- });
2808
- const outstandingToolResults = /* @__PURE__ */ new Set();
2809
- const toolInputs = /* @__PURE__ */ new Map();
2810
- let canClose = false;
2811
- let finishChunk = void 0;
2812
- function attemptClose() {
2813
- if (canClose && outstandingToolResults.size === 0) {
2814
- if (finishChunk != null) {
2815
- toolResultsStreamController.enqueue(finishChunk);
2816
- }
2817
- toolResultsStreamController.close();
2818
- }
2819
- }
2820
- const forwardStream = new TransformStream({
2821
- async transform(chunk, controller) {
2822
- const chunkType = chunk.type;
2823
- switch (chunkType) {
2824
- case "stream-start":
2825
- case "text-start":
2826
- case "text-delta":
2827
- case "text-end":
2828
- case "reasoning-start":
2829
- case "reasoning-delta":
2830
- case "reasoning-end":
2831
- case "tool-input-start":
2832
- case "tool-input-delta":
2833
- case "tool-input-end":
2834
- case "source":
2835
- case "response-metadata":
2836
- case "error":
2837
- case "raw": {
2838
- controller.enqueue(chunk);
2839
- break;
2840
- }
2841
- case "file": {
2842
- controller.enqueue({
2843
- type: "file",
2844
- file: new DefaultGeneratedFileWithType({
2845
- data: chunk.data,
2846
- mediaType: chunk.mediaType
2847
- })
2848
- });
2849
- break;
2850
- }
2851
- case "finish": {
2852
- finishChunk = {
2853
- type: "finish",
2854
- finishReason: chunk.finishReason,
2855
- usage: chunk.usage,
2856
- providerMetadata: chunk.providerMetadata
2857
- };
2858
- break;
2859
- }
2860
- case "tool-call": {
2861
- try {
2862
- const toolCall = await parseToolCall({
2863
- toolCall: chunk,
2864
- tools,
2865
- repairToolCall,
2866
- system,
2867
- messages
2868
- });
2869
- controller.enqueue(toolCall);
2870
- const tool = tools[toolCall.toolName];
2871
- toolInputs.set(toolCall.toolCallId, toolCall.input);
2872
- if (tool.onInputAvailable != null) {
2873
- await tool.onInputAvailable({
2874
- input: toolCall.input,
2875
- toolCallId: toolCall.toolCallId,
2876
- messages,
2877
- abortSignal
2878
- });
2879
- }
2880
- if (tool.execute != null && toolCall.providerExecuted !== true) {
2881
- const toolExecutionId = (0, import_provider_utils11.generateId)();
2882
- outstandingToolResults.add(toolExecutionId);
2883
- recordSpan({
2884
- name: "ai.toolCall",
2885
- attributes: selectTelemetryAttributes({
2886
- telemetry,
2887
- attributes: {
2888
- ...assembleOperationName({
2889
- operationId: "ai.toolCall",
2890
- telemetry
2891
- }),
2892
- "ai.toolCall.name": toolCall.toolName,
2893
- "ai.toolCall.id": toolCall.toolCallId,
2894
- "ai.toolCall.input": {
2895
- output: () => JSON.stringify(toolCall.input)
2896
- }
2897
- }
2898
- }),
2899
- tracer,
2900
- fn: async (span) => {
2901
- let output;
2902
- try {
2903
- output = await tool.execute(toolCall.input, {
2904
- toolCallId: toolCall.toolCallId,
2905
- messages,
2906
- abortSignal
2907
- });
2908
- } catch (error) {
2909
- recordErrorOnSpan(span, error);
2910
- toolResultsStreamController.enqueue({
2911
- ...toolCall,
2912
- type: "tool-error",
2913
- error
2914
- });
2915
- outstandingToolResults.delete(toolExecutionId);
2916
- attemptClose();
2917
- return;
2918
- }
2919
- toolResultsStreamController.enqueue({
2920
- ...toolCall,
2921
- type: "tool-result",
2922
- output
2923
- });
2924
- outstandingToolResults.delete(toolExecutionId);
2925
- attemptClose();
2926
- try {
2927
- span.setAttributes(
2928
- selectTelemetryAttributes({
2929
- telemetry,
2930
- attributes: {
2931
- "ai.toolCall.output": {
2932
- output: () => JSON.stringify(output)
2933
- }
2934
- }
2935
- })
2936
- );
2937
- } catch (ignored) {
2938
- }
2939
- }
2940
- });
2941
- }
2942
- } catch (error) {
2943
- toolResultsStreamController.enqueue({ type: "error", error });
2944
- }
2945
- break;
2946
- }
2947
- case "tool-result": {
2948
- const toolName = chunk.toolName;
2949
- if (chunk.isError) {
2950
- toolResultsStreamController.enqueue({
2951
- type: "tool-error",
2952
- toolCallId: chunk.toolCallId,
2953
- toolName,
2954
- input: toolInputs.get(chunk.toolCallId),
2955
- providerExecuted: chunk.providerExecuted,
2956
- error: chunk.result
2957
- });
2958
- } else {
2959
- controller.enqueue({
2960
- type: "tool-result",
2961
- toolCallId: chunk.toolCallId,
2962
- toolName,
2963
- input: toolInputs.get(chunk.toolCallId),
2964
- output: chunk.result,
2965
- providerExecuted: chunk.providerExecuted
2966
- });
2967
- }
2968
- break;
2969
- }
2970
- default: {
2971
- const _exhaustiveCheck = chunkType;
2972
- throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
2973
- }
2974
- }
2975
- },
2976
- flush() {
2977
- canClose = true;
2978
- attemptClose();
2979
- }
2980
- });
2981
- return new ReadableStream({
2982
- async start(controller) {
2983
- return Promise.all([
2984
- generatorStream.pipeThrough(forwardStream).pipeTo(
2985
- new WritableStream({
2986
- write(chunk) {
2987
- controller.enqueue(chunk);
2988
- },
2989
- close() {
2990
- }
2991
- })
2992
- ),
2993
- toolResultsStream.pipeTo(
2994
- new WritableStream({
2995
- write(chunk) {
2996
- controller.enqueue(chunk);
2997
- },
2998
- close() {
2999
- controller.close();
3000
- }
3001
- })
3002
- )
3003
- ]);
3004
- }
3005
- });
3006
- }
3007
-
3008
- // src/generate-text/step-result.ts
3009
- var DefaultStepResult = class {
3010
- constructor({
3011
- content,
3012
- finishReason,
3013
- usage,
3014
- warnings,
3015
- request,
3016
- response,
3017
- providerMetadata
3018
- }) {
3019
- this.content = content;
3020
- this.finishReason = finishReason;
3021
- this.usage = usage;
3022
- this.warnings = warnings;
3023
- this.request = request;
3024
- this.response = response;
3025
- this.providerMetadata = providerMetadata;
3026
- }
3027
- get text() {
3028
- return this.content.filter((part) => part.type === "text").map((part) => part.text).join("");
3029
- }
3030
- get reasoning() {
3031
- return this.content.filter((part) => part.type === "reasoning");
3032
- }
3033
- get reasoningText() {
3034
- return this.reasoning.length === 0 ? void 0 : this.reasoning.map((part) => part.text).join("");
3035
- }
3036
- get files() {
3037
- return this.content.filter((part) => part.type === "file").map((part) => part.file);
3038
- }
3039
- get sources() {
3040
- return this.content.filter((part) => part.type === "source");
3041
- }
3042
- get toolCalls() {
3043
- return this.content.filter((part) => part.type === "tool-call");
3044
- }
3045
- get toolResults() {
3046
- return this.content.filter((part) => part.type === "tool-result");
3047
- }
3048
- };
3049
-
3050
- // src/generate-text/stop-condition.ts
3051
- function stepCountIs(stepCount) {
3052
- return ({ steps }) => steps.length === stepCount;
3053
- }
3054
- async function isStopConditionMet({
3055
- stopConditions,
3056
- steps
3057
- }) {
3058
- return (await Promise.all(stopConditions.map((condition) => condition({ steps })))).some((result) => result);
3059
- }
3060
-
3061
- // src/prompt/create-tool-model-output.ts
3062
- var import_provider15 = require("@ai-sdk/provider");
3063
- function createToolModelOutput({
3064
- output,
3065
- tool,
3066
- errorMode
3067
- }) {
3068
- if (errorMode === "text") {
3069
- return { type: "error-text", value: (0, import_provider15.getErrorMessage)(output) };
3070
- } else if (errorMode === "json") {
3071
- return { type: "error-json", value: output };
3072
- }
3073
- if (tool == null ? void 0 : tool.toModelOutput) {
3074
- return tool.toModelOutput(output);
3075
- }
3076
- return typeof output === "string" ? { type: "text", value: output } : { type: "json", value: output };
3077
- }
3078
-
3079
- // src/generate-text/to-response-messages.ts
3080
- function toResponseMessages({
3081
- content: inputContent,
3082
- tools
3083
- }) {
3084
- const responseMessages = [];
3085
- const content = inputContent.filter((part) => part.type !== "source").filter(
3086
- (part) => (part.type !== "tool-result" || part.providerExecuted) && (part.type !== "tool-error" || part.providerExecuted)
3087
- ).filter((part) => part.type !== "text" || part.text.length > 0).map((part) => {
3088
- switch (part.type) {
3089
- case "text":
3090
- return {
3091
- type: "text",
3092
- text: part.text,
3093
- providerOptions: part.providerMetadata
3094
- };
3095
- case "reasoning":
3096
- return {
3097
- type: "reasoning",
3098
- text: part.text,
3099
- providerOptions: part.providerMetadata
3100
- };
3101
- case "file":
3102
- return {
3103
- type: "file",
3104
- data: part.file.base64,
3105
- mediaType: part.file.mediaType,
3106
- providerOptions: part.providerMetadata
3107
- };
3108
- case "tool-call":
3109
- return {
3110
- type: "tool-call",
3111
- toolCallId: part.toolCallId,
3112
- toolName: part.toolName,
3113
- input: part.input,
3114
- providerExecuted: part.providerExecuted,
3115
- providerOptions: part.providerMetadata
3116
- };
3117
- case "tool-result":
3118
- return {
3119
- type: "tool-result",
3120
- toolCallId: part.toolCallId,
3121
- toolName: part.toolName,
3122
- output: createToolModelOutput({
3123
- tool: tools == null ? void 0 : tools[part.toolName],
3124
- output: part.output,
3125
- errorMode: "none"
3126
- }),
3127
- providerExecuted: true,
3128
- providerOptions: part.providerMetadata
3129
- };
3130
- case "tool-error":
3131
- return {
3132
- type: "tool-result",
3133
- toolCallId: part.toolCallId,
3134
- toolName: part.toolName,
3135
- output: createToolModelOutput({
3136
- tool: tools == null ? void 0 : tools[part.toolName],
3137
- output: part.error,
3138
- errorMode: "json"
3139
- }),
3140
- providerOptions: part.providerMetadata
3141
- };
3142
- }
3143
- });
3144
- if (content.length > 0) {
3145
- responseMessages.push({
3146
- role: "assistant",
3147
- content
3148
- });
3149
- }
3150
- const toolResultContent = inputContent.filter((part) => part.type === "tool-result" || part.type === "tool-error").filter((part) => !part.providerExecuted).map((toolResult) => ({
3151
- type: "tool-result",
3152
- toolCallId: toolResult.toolCallId,
3153
- toolName: toolResult.toolName,
3154
- output: createToolModelOutput({
3155
- tool: tools == null ? void 0 : tools[toolResult.toolName],
3156
- output: toolResult.type === "tool-result" ? toolResult.output : toolResult.error,
3157
- errorMode: toolResult.type === "tool-error" ? "text" : "none"
3158
- })
3159
- }));
3160
- if (toolResultContent.length > 0) {
3161
- responseMessages.push({
3162
- role: "tool",
3163
- content: toolResultContent
3164
- });
3165
- }
3166
- return responseMessages;
3167
- }
3168
-
3169
- // src/generate-text/stream-text.ts
3170
- var originalGenerateId = (0, import_provider_utils12.createIdGenerator)({
3171
- prefix: "aitxt",
3172
- size: 24
3173
- });
3174
- function streamText({
3175
- model,
3176
- tools,
3177
- toolChoice,
3178
- system,
3179
- prompt,
3180
- messages,
3181
- maxRetries,
3182
- abortSignal,
3183
- headers,
3184
- stopWhen = stepCountIs(1),
3185
- experimental_output: output,
3186
- experimental_telemetry: telemetry,
3187
- prepareStep,
3188
- providerOptions,
3189
- experimental_activeTools,
3190
- activeTools = experimental_activeTools,
3191
- experimental_repairToolCall: repairToolCall,
3192
- experimental_transform: transform,
3193
- includeRawChunks = false,
3194
- onChunk,
3195
- onError = ({ error }) => {
3196
- console.error(error);
3197
- },
3198
- onFinish,
3199
- onAbort,
3200
- onStepFinish,
3201
- _internal: {
3202
- now: now2 = now,
3203
- generateId: generateId2 = originalGenerateId,
3204
- currentDate = () => /* @__PURE__ */ new Date()
3205
- } = {},
3206
- ...settings
3207
- }) {
3208
- return new DefaultStreamTextResult({
3209
- model: resolveLanguageModel(model),
3210
- telemetry,
3211
- headers,
3212
- settings,
3213
- maxRetries,
3214
- abortSignal,
3215
- system,
3216
- prompt,
3217
- messages,
3218
- tools,
3219
- toolChoice,
3220
- transforms: asArray(transform),
3221
- activeTools,
3222
- repairToolCall,
3223
- stopConditions: asArray(stopWhen),
3224
- output,
3225
- providerOptions,
3226
- prepareStep,
3227
- includeRawChunks,
3228
- onChunk,
3229
- onError,
3230
- onFinish,
3231
- onAbort,
3232
- onStepFinish,
3233
- now: now2,
3234
- currentDate,
3235
- generateId: generateId2
3236
- });
3237
- }
3238
- function createOutputTransformStream(output) {
3239
- if (!output) {
3240
- return new TransformStream({
3241
- transform(chunk, controller) {
3242
- controller.enqueue({ part: chunk, partialOutput: void 0 });
3243
- }
3244
- });
3245
- }
3246
- let firstTextChunkId = void 0;
3247
- let text = "";
3248
- let textChunk = "";
3249
- let lastPublishedJson = "";
3250
- function publishTextChunk({
3251
- controller,
3252
- partialOutput = void 0
3253
- }) {
3254
- controller.enqueue({
3255
- part: {
3256
- type: "text",
3257
- id: firstTextChunkId,
3258
- text: textChunk
3259
- },
3260
- partialOutput
3261
- });
3262
- textChunk = "";
3263
- }
3264
- return new TransformStream({
3265
- async transform(chunk, controller) {
3266
- if (chunk.type === "finish-step" && textChunk.length > 0) {
3267
- publishTextChunk({ controller });
3268
- }
3269
- if (chunk.type !== "text" && chunk.type !== "text-start" && chunk.type !== "text-end") {
3270
- controller.enqueue({ part: chunk, partialOutput: void 0 });
3271
- return;
3272
- }
3273
- if (firstTextChunkId == null) {
3274
- firstTextChunkId = chunk.id;
3275
- } else if (chunk.id !== firstTextChunkId) {
3276
- controller.enqueue({ part: chunk, partialOutput: void 0 });
3277
- return;
3278
- }
3279
- if (chunk.type === "text-start") {
3280
- controller.enqueue({ part: chunk, partialOutput: void 0 });
3281
- return;
3282
- }
3283
- if (chunk.type === "text-end") {
3284
- if (textChunk.length > 0) {
3285
- publishTextChunk({ controller });
3286
- }
3287
- controller.enqueue({ part: chunk, partialOutput: void 0 });
3288
- return;
3289
- }
3290
- text += chunk.text;
3291
- textChunk += chunk.text;
3292
- const result = await output.parsePartial({ text });
3293
- if (result != null) {
3294
- const currentJson = JSON.stringify(result.partial);
3295
- if (currentJson !== lastPublishedJson) {
3296
- publishTextChunk({ controller, partialOutput: result.partial });
3297
- lastPublishedJson = currentJson;
3298
- }
3299
- }
3300
- }
3301
- });
3302
- }
3303
- var DefaultStreamTextResult = class {
3304
- constructor({
3305
- model,
3306
- telemetry,
3307
- headers,
3308
- settings,
3309
- maxRetries: maxRetriesArg,
3310
- abortSignal,
3311
- system,
3312
- prompt,
3313
- messages,
3314
- tools,
3315
- toolChoice,
3316
- transforms,
3317
- activeTools,
3318
- repairToolCall,
3319
- stopConditions,
3320
- output,
3321
- providerOptions,
3322
- prepareStep,
3323
- includeRawChunks,
3324
- now: now2,
3325
- currentDate,
3326
- generateId: generateId2,
3327
- onChunk,
3328
- onError,
3329
- onFinish,
3330
- onAbort,
3331
- onStepFinish
3332
- }) {
3333
- this._totalUsage = new DelayedPromise();
3334
- this._finishReason = new DelayedPromise();
3335
- this._steps = new DelayedPromise();
3336
- this.output = output;
3337
- this.includeRawChunks = includeRawChunks;
3338
- let stepFinish;
3339
- let recordedContent = [];
3340
- const recordedResponseMessages = [];
3341
- let recordedFinishReason = void 0;
3342
- let recordedTotalUsage = void 0;
3343
- let recordedRequest = {};
3344
- let recordedWarnings = [];
3345
- const recordedSteps = [];
3346
- let rootSpan;
3347
- let activeTextContent = {};
3348
- let activeReasoningContent = {};
3349
- const eventProcessor = new TransformStream({
3350
- async transform(chunk, controller) {
3351
- var _a9, _b, _c;
3352
- controller.enqueue(chunk);
3353
- const { part } = chunk;
3354
- if (part.type === "text" || part.type === "reasoning" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
3355
- await (onChunk == null ? void 0 : onChunk({ chunk: part }));
3356
- }
3357
- if (part.type === "error") {
3358
- await onError({ error: wrapGatewayError(part.error) });
3359
- }
3360
- if (part.type === "text-start") {
3361
- activeTextContent[part.id] = {
3362
- type: "text",
3363
- text: "",
3364
- providerMetadata: part.providerMetadata
3365
- };
3366
- recordedContent.push(activeTextContent[part.id]);
3367
- }
3368
- if (part.type === "text") {
3369
- const activeText = activeTextContent[part.id];
3370
- if (activeText == null) {
3371
- controller.enqueue({
3372
- part: {
3373
- type: "error",
3374
- error: `text part ${part.id} not found`
3375
- },
3376
- partialOutput: void 0
3377
- });
3378
- return;
3379
- }
3380
- activeText.text += part.text;
3381
- activeText.providerMetadata = (_a9 = part.providerMetadata) != null ? _a9 : activeText.providerMetadata;
3382
- }
3383
- if (part.type === "text-end") {
3384
- delete activeTextContent[part.id];
3385
- }
3386
- if (part.type === "reasoning-start") {
3387
- activeReasoningContent[part.id] = {
3388
- type: "reasoning",
3389
- text: "",
3390
- providerMetadata: part.providerMetadata
3391
- };
3392
- recordedContent.push(activeReasoningContent[part.id]);
3393
- }
3394
- if (part.type === "reasoning") {
3395
- const activeReasoning = activeReasoningContent[part.id];
3396
- if (activeReasoning == null) {
3397
- controller.enqueue({
3398
- part: {
3399
- type: "error",
3400
- error: `reasoning part ${part.id} not found`
3401
- },
3402
- partialOutput: void 0
3403
- });
3404
- return;
3405
- }
3406
- activeReasoning.text += part.text;
3407
- activeReasoning.providerMetadata = (_b = part.providerMetadata) != null ? _b : activeReasoning.providerMetadata;
3408
- }
3409
- if (part.type === "reasoning-end") {
3410
- const activeReasoning = activeReasoningContent[part.id];
3411
- if (activeReasoning == null) {
3412
- controller.enqueue({
3413
- part: {
3414
- type: "error",
3415
- error: `reasoning part ${part.id} not found`
3416
- },
3417
- partialOutput: void 0
3418
- });
3419
- return;
3420
- }
3421
- activeReasoning.providerMetadata = (_c = part.providerMetadata) != null ? _c : activeReasoning.providerMetadata;
3422
- delete activeReasoningContent[part.id];
3423
- }
3424
- if (part.type === "file") {
3425
- recordedContent.push({ type: "file", file: part.file });
3426
- }
3427
- if (part.type === "source") {
3428
- recordedContent.push(part);
3429
- }
3430
- if (part.type === "tool-call") {
3431
- recordedContent.push(part);
3432
- }
3433
- if (part.type === "tool-result") {
3434
- recordedContent.push(part);
3435
- }
3436
- if (part.type === "tool-error") {
3437
- recordedContent.push(part);
3438
- }
3439
- if (part.type === "start-step") {
3440
- recordedRequest = part.request;
3441
- recordedWarnings = part.warnings;
3442
- }
3443
- if (part.type === "finish-step") {
3444
- const stepMessages = toResponseMessages({
3445
- content: recordedContent,
3446
- tools
3447
- });
3448
- const currentStepResult = new DefaultStepResult({
3449
- content: recordedContent,
3450
- finishReason: part.finishReason,
3451
- usage: part.usage,
3452
- warnings: recordedWarnings,
3453
- request: recordedRequest,
3454
- response: {
3455
- ...part.response,
3456
- messages: [...recordedResponseMessages, ...stepMessages]
3457
- },
3458
- providerMetadata: part.providerMetadata
3459
- });
3460
- await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
3461
- recordedSteps.push(currentStepResult);
3462
- recordedContent = [];
3463
- activeReasoningContent = {};
3464
- activeTextContent = {};
3465
- recordedResponseMessages.push(...stepMessages);
3466
- stepFinish.resolve();
3467
- }
3468
- if (part.type === "finish") {
3469
- recordedTotalUsage = part.totalUsage;
3470
- recordedFinishReason = part.finishReason;
3471
- }
3472
- },
3473
- async flush(controller) {
3474
- try {
3475
- if (recordedSteps.length === 0) {
3476
- return;
3477
- }
3478
- const finishReason = recordedFinishReason != null ? recordedFinishReason : "unknown";
3479
- const totalUsage = recordedTotalUsage != null ? recordedTotalUsage : {
3480
- inputTokens: void 0,
3481
- outputTokens: void 0,
3482
- totalTokens: void 0
3483
- };
3484
- self._finishReason.resolve(finishReason);
3485
- self._totalUsage.resolve(totalUsage);
3486
- self._steps.resolve(recordedSteps);
3487
- const finalStep = recordedSteps[recordedSteps.length - 1];
3488
- await (onFinish == null ? void 0 : onFinish({
3489
- finishReason,
3490
- totalUsage,
3491
- usage: finalStep.usage,
3492
- content: finalStep.content,
3493
- text: finalStep.text,
3494
- reasoningText: finalStep.reasoningText,
3495
- reasoning: finalStep.reasoning,
3496
- files: finalStep.files,
3497
- sources: finalStep.sources,
3498
- toolCalls: finalStep.toolCalls,
3499
- toolResults: finalStep.toolResults,
3500
- request: finalStep.request,
3501
- response: finalStep.response,
3502
- warnings: finalStep.warnings,
3503
- providerMetadata: finalStep.providerMetadata,
3504
- steps: recordedSteps
3505
- }));
3506
- rootSpan.setAttributes(
3507
- selectTelemetryAttributes({
3508
- telemetry,
3509
- attributes: {
3510
- "ai.response.finishReason": finishReason,
3511
- "ai.response.text": { output: () => finalStep.text },
3512
- "ai.response.toolCalls": {
3513
- output: () => {
3514
- var _a9;
3515
- return ((_a9 = finalStep.toolCalls) == null ? void 0 : _a9.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
3516
- }
3517
- },
3518
- "ai.response.providerMetadata": JSON.stringify(
3519
- finalStep.providerMetadata
3520
- ),
3521
- "ai.usage.inputTokens": totalUsage.inputTokens,
3522
- "ai.usage.outputTokens": totalUsage.outputTokens,
3523
- "ai.usage.totalTokens": totalUsage.totalTokens,
3524
- "ai.usage.reasoningTokens": totalUsage.reasoningTokens,
3525
- "ai.usage.cachedInputTokens": totalUsage.cachedInputTokens
3526
- }
3527
- })
3528
- );
3529
- } catch (error) {
3530
- controller.error(error);
3531
- } finally {
3532
- rootSpan.end();
3533
- }
3534
- }
3535
- });
3536
- const stitchableStream = createStitchableStream();
3537
- this.addStream = stitchableStream.addStream;
3538
- this.closeStream = stitchableStream.close;
3539
- let stream = stitchableStream.stream;
3540
- stream = filterStreamErrors(stream, ({ error, controller }) => {
3541
- if ((0, import_provider_utils12.isAbortError)(error) && (abortSignal == null ? void 0 : abortSignal.aborted)) {
3542
- onAbort == null ? void 0 : onAbort({ steps: recordedSteps });
3543
- controller.enqueue({ type: "abort" });
3544
- controller.close();
3545
- } else {
3546
- controller.error(error);
3547
- }
3548
- });
3549
- stream = stream.pipeThrough(
3550
- new TransformStream({
3551
- start(controller) {
3552
- controller.enqueue({ type: "start" });
3553
- }
3554
- })
3555
- );
3556
- for (const transform of transforms) {
3557
- stream = stream.pipeThrough(
3558
- transform({
3559
- tools,
3560
- stopStream() {
3561
- stitchableStream.terminate();
3562
- }
3563
- })
3564
- );
3565
- }
3566
- this.baseStream = stream.pipeThrough(createOutputTransformStream(output)).pipeThrough(eventProcessor);
3567
- const { maxRetries, retry } = prepareRetries({
3568
- maxRetries: maxRetriesArg
3569
- });
3570
- const tracer = getTracer(telemetry);
3571
- const callSettings = prepareCallSettings(settings);
3572
- const baseTelemetryAttributes = getBaseTelemetryAttributes({
3573
- model,
3574
- telemetry,
3575
- headers,
3576
- settings: { ...callSettings, maxRetries }
3577
- });
3578
- const self = this;
3579
- recordSpan({
3580
- name: "ai.streamText",
3581
- attributes: selectTelemetryAttributes({
3582
- telemetry,
3583
- attributes: {
3584
- ...assembleOperationName({ operationId: "ai.streamText", telemetry }),
3585
- ...baseTelemetryAttributes,
3586
- // specific settings that only make sense on the outer level:
3587
- "ai.prompt": {
3588
- input: () => JSON.stringify({ system, prompt, messages })
3589
- }
3590
- }
3591
- }),
3592
- tracer,
3593
- endWhenDone: false,
3594
- fn: async (rootSpanArg) => {
3595
- rootSpan = rootSpanArg;
3596
- async function streamStep({
3597
- currentStep,
3598
- responseMessages,
3599
- usage
3600
- }) {
3601
- var _a9, _b, _c, _d, _e;
3602
- const includeRawChunks2 = self.includeRawChunks;
3603
- stepFinish = new DelayedPromise();
3604
- const initialPrompt = await standardizePrompt({
3605
- system,
3606
- prompt,
3607
- messages
3608
- });
3609
- const stepInputMessages = [
3610
- ...initialPrompt.messages,
3611
- ...responseMessages
3612
- ];
3613
- const prepareStepResult = await (prepareStep == null ? void 0 : prepareStep({
3614
- model,
3615
- steps: recordedSteps,
3616
- stepNumber: recordedSteps.length,
3617
- messages: stepInputMessages
3618
- }));
3619
- const promptMessages = await convertToLanguageModelPrompt({
3620
- prompt: {
3621
- system: (_a9 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a9 : initialPrompt.system,
3622
- messages: (_b = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _b : stepInputMessages
3623
- },
3624
- supportedUrls: await model.supportedUrls
3625
- });
3626
- const stepModel = resolveLanguageModel(
3627
- (_c = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _c : model
3628
- );
3629
- const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
3630
- tools,
3631
- toolChoice: (_d = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _d : toolChoice,
3632
- activeTools: (_e = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _e : activeTools
3633
- });
3634
- const {
3635
- result: { stream: stream2, response, request },
3636
- doStreamSpan,
3637
- startTimestampMs
3638
- } = await retry(
3639
- () => recordSpan({
3640
- name: "ai.streamText.doStream",
3641
- attributes: selectTelemetryAttributes({
3642
- telemetry,
3643
- attributes: {
3644
- ...assembleOperationName({
3645
- operationId: "ai.streamText.doStream",
3646
- telemetry
3647
- }),
3648
- ...baseTelemetryAttributes,
3649
- // model:
3650
- "ai.model.provider": stepModel.provider,
3651
- "ai.model.id": stepModel.modelId,
3652
- // prompt:
3653
- "ai.prompt.messages": {
3654
- input: () => stringifyForTelemetry(promptMessages)
3655
- },
3656
- "ai.prompt.tools": {
3657
- // convert the language model level tools:
3658
- input: () => stepTools == null ? void 0 : stepTools.map((tool) => JSON.stringify(tool))
3659
- },
3660
- "ai.prompt.toolChoice": {
3661
- input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
3662
- },
3663
- // standardized gen-ai llm span attributes:
3664
- "gen_ai.system": stepModel.provider,
3665
- "gen_ai.request.model": stepModel.modelId,
3666
- "gen_ai.request.frequency_penalty": callSettings.frequencyPenalty,
3667
- "gen_ai.request.max_tokens": callSettings.maxOutputTokens,
3668
- "gen_ai.request.presence_penalty": callSettings.presencePenalty,
3669
- "gen_ai.request.stop_sequences": callSettings.stopSequences,
3670
- "gen_ai.request.temperature": callSettings.temperature,
3671
- "gen_ai.request.top_k": callSettings.topK,
3672
- "gen_ai.request.top_p": callSettings.topP
3673
- }
3674
- }),
3675
- tracer,
3676
- endWhenDone: false,
3677
- fn: async (doStreamSpan2) => {
3678
- return {
3679
- startTimestampMs: now2(),
3680
- // get before the call
3681
- doStreamSpan: doStreamSpan2,
3682
- result: await stepModel.doStream({
3683
- ...callSettings,
3684
- tools: stepTools,
3685
- toolChoice: stepToolChoice,
3686
- responseFormat: output == null ? void 0 : output.responseFormat,
3687
- prompt: promptMessages,
3688
- providerOptions,
3689
- abortSignal,
3690
- headers,
3691
- includeRawChunks: includeRawChunks2
3692
- })
3693
- };
3694
- }
3695
- })
3696
- );
3697
- const streamWithToolResults = runToolsTransformation({
3698
- tools,
3699
- generatorStream: stream2,
3700
- tracer,
3701
- telemetry,
3702
- system,
3703
- messages: stepInputMessages,
3704
- repairToolCall,
3705
- abortSignal
3706
- });
3707
- const stepRequest = request != null ? request : {};
3708
- const stepToolCalls = [];
3709
- const stepToolOutputs = [];
3710
- let warnings;
3711
- const activeToolCallToolNames = {};
3712
- let stepFinishReason = "unknown";
3713
- let stepUsage = {
3714
- inputTokens: void 0,
3715
- outputTokens: void 0,
3716
- totalTokens: void 0
3717
- };
3718
- let stepProviderMetadata;
3719
- let stepFirstChunk = true;
3720
- let stepResponse = {
3721
- id: generateId2(),
3722
- timestamp: currentDate(),
3723
- modelId: model.modelId
3724
- };
3725
- let activeText = "";
3726
- self.addStream(
3727
- streamWithToolResults.pipeThrough(
3728
- new TransformStream({
3729
- async transform(chunk, controller) {
3730
- var _a10, _b2, _c2, _d2;
3731
- if (chunk.type === "stream-start") {
3732
- warnings = chunk.warnings;
3733
- return;
3734
- }
3735
- if (stepFirstChunk) {
3736
- const msToFirstChunk = now2() - startTimestampMs;
3737
- stepFirstChunk = false;
3738
- doStreamSpan.addEvent("ai.stream.firstChunk", {
3739
- "ai.response.msToFirstChunk": msToFirstChunk
3740
- });
3741
- doStreamSpan.setAttributes({
3742
- "ai.response.msToFirstChunk": msToFirstChunk
3743
- });
3744
- controller.enqueue({
3745
- type: "start-step",
3746
- request: stepRequest,
3747
- warnings: warnings != null ? warnings : []
3748
- });
3749
- }
3750
- const chunkType = chunk.type;
3751
- switch (chunkType) {
3752
- case "text-start":
3753
- case "text-end": {
3754
- controller.enqueue(chunk);
3755
- break;
3756
- }
3757
- case "text-delta": {
3758
- if (chunk.delta.length > 0) {
3759
- controller.enqueue({
3760
- type: "text",
3761
- id: chunk.id,
3762
- text: chunk.delta,
3763
- providerMetadata: chunk.providerMetadata
3764
- });
3765
- activeText += chunk.delta;
3766
- }
3767
- break;
3768
- }
3769
- case "reasoning-start":
3770
- case "reasoning-end": {
3771
- controller.enqueue(chunk);
3772
- break;
3773
- }
3774
- case "reasoning-delta": {
3775
- controller.enqueue({
3776
- type: "reasoning",
3777
- id: chunk.id,
3778
- text: chunk.delta,
3779
- providerMetadata: chunk.providerMetadata
3780
- });
3781
- break;
3782
- }
3783
- case "tool-call": {
3784
- controller.enqueue(chunk);
3785
- stepToolCalls.push(chunk);
3786
- break;
3787
- }
3788
- case "tool-result": {
3789
- controller.enqueue(chunk);
3790
- stepToolOutputs.push(chunk);
3791
- break;
3792
- }
3793
- case "tool-error": {
3794
- controller.enqueue(chunk);
3795
- stepToolOutputs.push(chunk);
3796
- break;
3797
- }
3798
- case "response-metadata": {
3799
- stepResponse = {
3800
- id: (_a10 = chunk.id) != null ? _a10 : stepResponse.id,
3801
- timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
3802
- modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
3803
- };
3804
- break;
3805
- }
3806
- case "finish": {
3807
- stepUsage = chunk.usage;
3808
- stepFinishReason = chunk.finishReason;
3809
- stepProviderMetadata = chunk.providerMetadata;
3810
- const msToFinish = now2() - startTimestampMs;
3811
- doStreamSpan.addEvent("ai.stream.finish");
3812
- doStreamSpan.setAttributes({
3813
- "ai.response.msToFinish": msToFinish,
3814
- "ai.response.avgOutputTokensPerSecond": 1e3 * ((_d2 = stepUsage.outputTokens) != null ? _d2 : 0) / msToFinish
3815
- });
3816
- break;
3817
- }
3818
- case "file": {
3819
- controller.enqueue(chunk);
3820
- break;
3821
- }
3822
- case "source": {
3823
- controller.enqueue(chunk);
3824
- break;
3825
- }
3826
- case "tool-input-start": {
3827
- activeToolCallToolNames[chunk.id] = chunk.toolName;
3828
- const tool = tools == null ? void 0 : tools[chunk.toolName];
3829
- if ((tool == null ? void 0 : tool.onInputStart) != null) {
3830
- await tool.onInputStart({
3831
- toolCallId: chunk.id,
3832
- messages: stepInputMessages,
3833
- abortSignal
3834
- });
3835
- }
3836
- controller.enqueue(chunk);
3837
- break;
3838
- }
3839
- case "tool-input-end": {
3840
- delete activeToolCallToolNames[chunk.id];
3841
- controller.enqueue(chunk);
3842
- break;
3843
- }
3844
- case "tool-input-delta": {
3845
- const toolName = activeToolCallToolNames[chunk.id];
3846
- const tool = tools == null ? void 0 : tools[toolName];
3847
- if ((tool == null ? void 0 : tool.onInputDelta) != null) {
3848
- await tool.onInputDelta({
3849
- inputTextDelta: chunk.delta,
3850
- toolCallId: chunk.id,
3851
- messages: stepInputMessages,
3852
- abortSignal
3853
- });
3854
- }
3855
- controller.enqueue(chunk);
3856
- break;
3857
- }
3858
- case "error": {
3859
- controller.enqueue(chunk);
3860
- stepFinishReason = "error";
3861
- break;
3862
- }
3863
- case "raw": {
3864
- if (includeRawChunks2) {
3865
- controller.enqueue(chunk);
3866
- }
3867
- break;
3868
- }
3869
- default: {
3870
- const exhaustiveCheck = chunkType;
3871
- throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
3872
- }
3873
- }
3874
- },
3875
- // invoke onFinish callback and resolve toolResults promise when the stream is about to close:
3876
- async flush(controller) {
3877
- const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
3878
- try {
3879
- doStreamSpan.setAttributes(
3880
- selectTelemetryAttributes({
3881
- telemetry,
3882
- attributes: {
3883
- "ai.response.finishReason": stepFinishReason,
3884
- "ai.response.text": {
3885
- output: () => activeText
3886
- },
3887
- "ai.response.toolCalls": {
3888
- output: () => stepToolCallsJson
3889
- },
3890
- "ai.response.id": stepResponse.id,
3891
- "ai.response.model": stepResponse.modelId,
3892
- "ai.response.timestamp": stepResponse.timestamp.toISOString(),
3893
- "ai.response.providerMetadata": JSON.stringify(stepProviderMetadata),
3894
- "ai.usage.inputTokens": stepUsage.inputTokens,
3895
- "ai.usage.outputTokens": stepUsage.outputTokens,
3896
- "ai.usage.totalTokens": stepUsage.totalTokens,
3897
- "ai.usage.reasoningTokens": stepUsage.reasoningTokens,
3898
- "ai.usage.cachedInputTokens": stepUsage.cachedInputTokens,
3899
- // standardized gen-ai llm span attributes:
3900
- "gen_ai.response.finish_reasons": [stepFinishReason],
3901
- "gen_ai.response.id": stepResponse.id,
3902
- "gen_ai.response.model": stepResponse.modelId,
3903
- "gen_ai.usage.input_tokens": stepUsage.inputTokens,
3904
- "gen_ai.usage.output_tokens": stepUsage.outputTokens
3905
- }
3906
- })
3907
- );
3908
- } catch (error) {
3909
- } finally {
3910
- doStreamSpan.end();
3911
- }
3912
- controller.enqueue({
3913
- type: "finish-step",
3914
- finishReason: stepFinishReason,
3915
- usage: stepUsage,
3916
- providerMetadata: stepProviderMetadata,
3917
- response: {
3918
- ...stepResponse,
3919
- headers: response == null ? void 0 : response.headers
3920
- }
3921
- });
3922
- const combinedUsage = addLanguageModelUsage(usage, stepUsage);
3923
- await stepFinish.promise;
3924
- const clientToolCalls = stepToolCalls.filter(
3925
- (toolCall) => toolCall.providerExecuted !== true
3926
- );
3927
- const clientToolOutputs = stepToolOutputs.filter(
3928
- (toolOutput) => toolOutput.providerExecuted !== true
3929
- );
3930
- if (clientToolCalls.length > 0 && // all current tool calls have outputs (incl. execution errors):
3931
- clientToolOutputs.length === clientToolCalls.length && // continue until a stop condition is met:
3932
- !await isStopConditionMet({
3933
- stopConditions,
3934
- steps: recordedSteps
3935
- })) {
3936
- responseMessages.push(
3937
- ...toResponseMessages({
3938
- content: (
3939
- // use transformed content to create the messages for the next step:
3940
- recordedSteps[recordedSteps.length - 1].content
3941
- ),
3942
- tools
3943
- })
3944
- );
3945
- try {
3946
- await streamStep({
3947
- currentStep: currentStep + 1,
3948
- responseMessages,
3949
- usage: combinedUsage
3950
- });
3951
- } catch (error) {
3952
- controller.enqueue({
3953
- type: "error",
3954
- error
3955
- });
3956
- self.closeStream();
3957
- }
3958
- } else {
3959
- controller.enqueue({
3960
- type: "finish",
3961
- finishReason: stepFinishReason,
3962
- totalUsage: combinedUsage
3963
- });
3964
- self.closeStream();
3965
- }
3966
- }
3967
- })
3968
- )
3969
- );
3970
- }
3971
- await streamStep({
3972
- currentStep: 0,
3973
- responseMessages: [],
3974
- usage: {
3975
- inputTokens: void 0,
3976
- outputTokens: void 0,
3977
- totalTokens: void 0
3978
- }
3979
- });
3980
- }
3981
- }).catch((error) => {
3982
- self.addStream(
3983
- new ReadableStream({
3984
- start(controller) {
3985
- controller.enqueue({ type: "error", error });
3986
- controller.close();
3987
- }
3988
- })
3989
- );
3990
- self.closeStream();
3991
- });
3992
- }
3993
- get steps() {
3994
- return this._steps.promise;
3995
- }
3996
- get finalStep() {
3997
- return this.steps.then((steps) => steps[steps.length - 1]);
3998
- }
3999
- get content() {
4000
- return this.finalStep.then((step) => step.content);
4001
- }
4002
- get warnings() {
4003
- return this.finalStep.then((step) => step.warnings);
4004
- }
4005
- get providerMetadata() {
4006
- return this.finalStep.then((step) => step.providerMetadata);
4007
- }
4008
- get text() {
4009
- return this.finalStep.then((step) => step.text);
4010
- }
4011
- get reasoningText() {
4012
- return this.finalStep.then((step) => step.reasoningText);
4013
- }
4014
- get reasoning() {
4015
- return this.finalStep.then((step) => step.reasoning);
4016
- }
4017
- get sources() {
4018
- return this.finalStep.then((step) => step.sources);
4019
- }
4020
- get files() {
4021
- return this.finalStep.then((step) => step.files);
4022
- }
4023
- get toolCalls() {
4024
- return this.finalStep.then((step) => step.toolCalls);
4025
- }
4026
- get toolResults() {
4027
- return this.finalStep.then((step) => step.toolResults);
4028
- }
4029
- get usage() {
4030
- return this.finalStep.then((step) => step.usage);
4031
- }
4032
- get request() {
4033
- return this.finalStep.then((step) => step.request);
4034
- }
4035
- get response() {
4036
- return this.finalStep.then((step) => step.response);
4037
- }
4038
- get totalUsage() {
4039
- return this._totalUsage.promise;
4040
- }
4041
- get finishReason() {
4042
- return this._finishReason.promise;
4043
- }
4044
- /**
4045
- Split out a new stream from the original stream.
4046
- The original stream is replaced to allow for further splitting,
4047
- since we do not know how many times the stream will be split.
4048
-
4049
- Note: this leads to buffering the stream content on the server.
4050
- However, the LLM results are expected to be small enough to not cause issues.
4051
- */
4052
- teeStream() {
4053
- const [stream1, stream2] = this.baseStream.tee();
4054
- this.baseStream = stream2;
4055
- return stream1;
4056
- }
4057
- get textStream() {
4058
- return createAsyncIterableStream(
4059
- this.teeStream().pipeThrough(
4060
- new TransformStream({
4061
- transform({ part }, controller) {
4062
- if (part.type === "text") {
4063
- controller.enqueue(part.text);
4064
- }
4065
- }
4066
- })
4067
- )
4068
- );
4069
- }
4070
- get fullStream() {
4071
- return createAsyncIterableStream(
4072
- this.teeStream().pipeThrough(
4073
- new TransformStream({
4074
- transform({ part }, controller) {
4075
- controller.enqueue(part);
4076
- }
4077
- })
4078
- )
4079
- );
4080
- }
4081
- async consumeStream(options) {
4082
- var _a9;
4083
- try {
4084
- await consumeStream({
4085
- stream: this.fullStream,
4086
- onError: options == null ? void 0 : options.onError
4087
- });
4088
- } catch (error) {
4089
- (_a9 = options == null ? void 0 : options.onError) == null ? void 0 : _a9.call(options, error);
4090
- }
4091
- }
4092
- get experimental_partialOutputStream() {
4093
- if (this.output == null) {
4094
- throw new NoOutputSpecifiedError();
4095
- }
4096
- return createAsyncIterableStream(
4097
- this.teeStream().pipeThrough(
4098
- new TransformStream({
4099
- transform({ partialOutput }, controller) {
4100
- if (partialOutput != null) {
4101
- controller.enqueue(partialOutput);
4102
- }
4103
- }
4104
- })
4105
- )
4106
- );
4107
- }
4108
- toUIMessageStream({
4109
- originalMessages,
4110
- generateMessageId,
4111
- onFinish,
4112
- messageMetadata,
4113
- sendReasoning = true,
4114
- sendSources = false,
4115
- sendStart = true,
4116
- sendFinish = true,
4117
- onError = import_provider16.getErrorMessage
4118
- } = {}) {
4119
- const responseMessageId = generateMessageId != null ? getResponseUIMessageId({
4120
- originalMessages,
4121
- responseMessageId: generateMessageId
4122
- }) : void 0;
4123
- const baseStream = this.fullStream.pipeThrough(
4124
- new TransformStream({
4125
- transform: async (part, controller) => {
4126
- const messageMetadataValue = messageMetadata == null ? void 0 : messageMetadata({ part });
4127
- const partType = part.type;
4128
- switch (partType) {
4129
- case "text-start": {
4130
- controller.enqueue({
4131
- type: "text-start",
4132
- id: part.id,
4133
- ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4134
- });
4135
- break;
4136
- }
4137
- case "text": {
4138
- controller.enqueue({
4139
- type: "text-delta",
4140
- id: part.id,
4141
- delta: part.text,
4142
- ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4143
- });
4144
- break;
4145
- }
4146
- case "text-end": {
4147
- controller.enqueue({
4148
- type: "text-end",
4149
- id: part.id,
4150
- ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4151
- });
4152
- break;
4153
- }
4154
- case "reasoning-start": {
4155
- controller.enqueue({
4156
- type: "reasoning-start",
4157
- id: part.id,
4158
- ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4159
- });
4160
- break;
4161
- }
4162
- case "reasoning": {
4163
- if (sendReasoning) {
4164
- controller.enqueue({
4165
- type: "reasoning-delta",
4166
- id: part.id,
4167
- delta: part.text,
4168
- ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4169
- });
4170
- }
4171
- break;
4172
- }
4173
- case "reasoning-end": {
4174
- controller.enqueue({
4175
- type: "reasoning-end",
4176
- id: part.id,
4177
- ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4178
- });
4179
- break;
4180
- }
4181
- case "file": {
4182
- controller.enqueue({
4183
- type: "file",
4184
- mediaType: part.file.mediaType,
4185
- url: `data:${part.file.mediaType};base64,${part.file.base64}`
4186
- });
4187
- break;
4188
- }
4189
- case "source": {
4190
- if (sendSources && part.sourceType === "url") {
4191
- controller.enqueue({
4192
- type: "source-url",
4193
- sourceId: part.id,
4194
- url: part.url,
4195
- title: part.title,
4196
- ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4197
- });
4198
- }
4199
- if (sendSources && part.sourceType === "document") {
4200
- controller.enqueue({
4201
- type: "source-document",
4202
- sourceId: part.id,
4203
- mediaType: part.mediaType,
4204
- title: part.title,
4205
- filename: part.filename,
4206
- ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4207
- });
4208
- }
4209
- break;
4210
- }
4211
- case "tool-input-start": {
4212
- controller.enqueue({
4213
- type: "tool-input-start",
4214
- toolCallId: part.id,
4215
- toolName: part.toolName,
4216
- providerExecuted: part.providerExecuted
4217
- });
4218
- break;
4219
- }
4220
- case "tool-input-delta": {
4221
- controller.enqueue({
4222
- type: "tool-input-delta",
4223
- toolCallId: part.id,
4224
- inputTextDelta: part.delta
4225
- });
4226
- break;
4227
- }
4228
- case "tool-call": {
4229
- controller.enqueue({
4230
- type: "tool-input-available",
4231
- toolCallId: part.toolCallId,
4232
- toolName: part.toolName,
4233
- input: part.input,
4234
- providerExecuted: part.providerExecuted,
4235
- providerMetadata: part.providerMetadata
4236
- });
4237
- break;
4238
- }
4239
- case "tool-result": {
4240
- controller.enqueue({
4241
- type: "tool-output-available",
4242
- toolCallId: part.toolCallId,
4243
- output: part.output,
4244
- providerExecuted: part.providerExecuted
4245
- });
4246
- break;
4247
- }
4248
- case "tool-error": {
4249
- controller.enqueue({
4250
- type: "tool-output-error",
4251
- toolCallId: part.toolCallId,
4252
- errorText: onError(part.error),
4253
- providerExecuted: part.providerExecuted
4254
- });
4255
- break;
4256
- }
4257
- case "error": {
4258
- controller.enqueue({
4259
- type: "error",
4260
- errorText: onError(part.error)
4261
- });
4262
- break;
4263
- }
4264
- case "start-step": {
4265
- controller.enqueue({ type: "start-step" });
4266
- break;
4267
- }
4268
- case "finish-step": {
4269
- controller.enqueue({ type: "finish-step" });
4270
- break;
4271
- }
4272
- case "start": {
4273
- if (sendStart) {
4274
- controller.enqueue({
4275
- type: "start",
4276
- ...messageMetadataValue != null ? { messageMetadata: messageMetadataValue } : {},
4277
- ...responseMessageId != null ? { messageId: responseMessageId } : {}
4278
- });
4279
- }
4280
- break;
4281
- }
4282
- case "finish": {
4283
- if (sendFinish) {
4284
- controller.enqueue({
4285
- type: "finish",
4286
- ...messageMetadataValue != null ? { messageMetadata: messageMetadataValue } : {}
4287
- });
4288
- }
4289
- break;
4290
- }
4291
- case "abort": {
4292
- controller.enqueue(part);
4293
- break;
4294
- }
4295
- case "tool-input-end": {
4296
- break;
4297
- }
4298
- case "raw": {
4299
- break;
4300
- }
4301
- default: {
4302
- const exhaustiveCheck = partType;
4303
- throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
4304
- }
4305
- }
4306
- if (messageMetadataValue != null && partType !== "start" && partType !== "finish") {
4307
- controller.enqueue({
4308
- type: "message-metadata",
4309
- messageMetadata: messageMetadataValue
4310
- });
4311
- }
4312
- }
4313
- })
4314
- );
4315
- return createAsyncIterableStream(
4316
- handleUIMessageStreamFinish({
4317
- stream: baseStream,
4318
- messageId: responseMessageId != null ? responseMessageId : generateMessageId == null ? void 0 : generateMessageId(),
4319
- originalMessages,
4320
- onFinish,
4321
- onError
4322
- })
4323
- );
4324
- }
4325
- pipeUIMessageStreamToResponse(response, {
4326
- originalMessages,
4327
- generateMessageId,
4328
- onFinish,
4329
- messageMetadata,
4330
- sendReasoning,
4331
- sendSources,
4332
- sendFinish,
4333
- sendStart,
4334
- onError,
4335
- ...init
4336
- } = {}) {
4337
- pipeUIMessageStreamToResponse({
4338
- response,
4339
- stream: this.toUIMessageStream({
4340
- originalMessages,
4341
- generateMessageId,
4342
- onFinish,
4343
- messageMetadata,
4344
- sendReasoning,
4345
- sendSources,
4346
- sendFinish,
4347
- sendStart,
4348
- onError
4349
- }),
4350
- ...init
4351
- });
4352
- }
4353
- pipeTextStreamToResponse(response, init) {
4354
- pipeTextStreamToResponse({
4355
- response,
4356
- textStream: this.textStream,
4357
- ...init
4358
- });
4359
- }
4360
- toUIMessageStreamResponse({
4361
- originalMessages,
4362
- generateMessageId,
4363
- onFinish,
4364
- messageMetadata,
4365
- sendReasoning,
4366
- sendSources,
4367
- sendFinish,
4368
- sendStart,
4369
- onError,
4370
- ...init
4371
- } = {}) {
4372
- return createUIMessageStreamResponse({
4373
- stream: this.toUIMessageStream({
4374
- originalMessages,
4375
- generateMessageId,
4376
- onFinish,
4377
- messageMetadata,
4378
- sendReasoning,
4379
- sendSources,
4380
- sendFinish,
4381
- sendStart,
4382
- onError
4383
- }),
4384
- ...init
4385
- });
4386
- }
4387
- toTextStreamResponse(init) {
4388
- return createTextStreamResponse({
4389
- textStream: this.textStream,
4390
- ...init
4391
- });
4392
- }
4393
- };
4394
-
4395
- // src/bin/ai.ts
4396
- var import_fs = require("fs");
4397
- var import_path = require("path");
4398
- var import_gateway3 = require("@ai-sdk/gateway");
4399
- function isStdinAvailable() {
4400
- return !process.stdin.isTTY;
4401
- }
4402
- async function readStdin() {
4403
- return new Promise((resolve2) => {
4404
- let data = "";
4405
- process.stdin.setEncoding("utf8");
4406
- process.stdin.on("data", (chunk) => {
4407
- data += chunk;
4408
- });
4409
- process.stdin.on("end", () => {
4410
- resolve2(data.trim());
4411
- });
4412
- });
4413
- }
4414
- function getMediaType(filePath) {
4415
- var _a9;
4416
- const ext = (_a9 = filePath.split(".").pop()) == null ? void 0 : _a9.toLowerCase();
4417
- const mimeTypes = {
4418
- js: "application/javascript",
4419
- ts: "application/typescript",
4420
- jsx: "text/jsx",
4421
- tsx: "text/tsx",
4422
- json: "application/json",
4423
- md: "text/markdown",
4424
- txt: "text/plain",
4425
- py: "text/x-python",
4426
- html: "text/html",
4427
- css: "text/css",
4428
- xml: "application/xml",
4429
- yaml: "application/yaml",
4430
- yml: "application/yaml",
4431
- jpg: "image/jpeg",
4432
- jpeg: "image/jpeg",
4433
- png: "image/png",
4434
- gif: "image/gif",
4435
- webp: "image/webp",
4436
- svg: "image/svg+xml",
4437
- bmp: "image/bmp",
4438
- tiff: "image/tiff",
4439
- tif: "image/tiff"
4440
- };
4441
- return mimeTypes[ext || ""] || "text/plain";
4442
- }
4443
- function readFileContent(filePath) {
4444
- const absolutePath = (0, import_path.resolve)(filePath);
4445
- if (!(0, import_fs.existsSync)(absolutePath)) {
4446
- throw new Error(`File not found: ${filePath}`);
4447
- }
4448
- const mediaType = getMediaType(filePath);
4449
- const isImage = mediaType.startsWith("image/");
4450
- let content;
4451
- if (isImage) {
4452
- const buffer = (0, import_fs.readFileSync)(absolutePath);
4453
- content = `data:${mediaType};base64,${buffer.toString("base64")}`;
4454
- } else {
4455
- content = (0, import_fs.readFileSync)(absolutePath, "utf8");
4456
- }
4457
- return {
4458
- name: filePath,
4459
- content,
4460
- mediaType
4461
- };
4462
- }
4463
- function parseArgs() {
4464
- const args = process.argv.slice(2);
4465
- const options = {
4466
- model: process.env.AI_DEFAULT_MODEL || "openai/gpt-4",
4467
- files: [],
4468
- help: false,
4469
- version: false,
4470
- verbose: process.env.AI_VERBOSE === "true",
4471
- system: process.env.AI_SYSTEM
4472
- };
4473
- const promptArgs = [];
4474
- let i = 0;
4475
- while (i < args.length) {
4476
- const arg = args[i];
4477
- switch (arg) {
4478
- case "-h":
4479
- case "--help":
4480
- options.help = true;
4481
- break;
4482
- case "-V":
4483
- case "--version":
4484
- options.version = true;
4485
- break;
4486
- case "-v":
4487
- case "--verbose":
4488
- options.verbose = true;
4489
- break;
4490
- case "-m":
4491
- case "--model":
4492
- if (i + 1 < args.length) {
4493
- options.model = args[i + 1];
4494
- i++;
4495
- } else {
4496
- throw new Error("Model option requires a value");
4497
- }
4498
- break;
4499
- case "-f":
4500
- case "--file":
4501
- if (i + 1 < args.length) {
4502
- options.files.push(args[i + 1]);
4503
- i++;
4504
- } else {
4505
- throw new Error("File option requires a value");
4506
- }
4507
- break;
4508
- case "-s":
4509
- case "--system":
4510
- if (i + 1 < args.length) {
4511
- options.system = args[i + 1];
4512
- i++;
4513
- } else {
4514
- throw new Error("System option requires a value");
4515
- }
4516
- break;
4517
- default:
4518
- if (arg.startsWith("-")) {
4519
- throw new Error(`Unknown option: ${arg}`);
4520
- } else {
4521
- promptArgs.push(arg);
4522
- }
4523
- }
4524
- i++;
4525
- }
4526
- if (promptArgs.length > 0) {
4527
- options.prompt = promptArgs.join(" ");
4528
- }
4529
- return options;
4530
- }
4531
- function showHelp() {
4532
- console.log(`Usage: ai [options] [prompt]
4533
-
4534
- AI CLI - Stream text generation from various AI models
4535
-
4536
- Options:
4537
- -m, --model <model> Model to use (default: "openai/gpt-4")
4538
- Format: provider/model (e.g., anthropic/claude-3-5-sonnet)
4539
- -f, --file <file> Attach file(s) to prompt
4540
- -s, --system <message> System message
4541
- -v, --verbose Show detailed output
4542
- -h, --help Show help
4543
- -V, --version Show version
4544
-
4545
- Authentication (required):
4546
- export AI_GATEWAY_API_KEY="your-key" # Get from Vercel Dashboard (AI tab)
4547
-
4548
- Environment Variables:
4549
- AI_DEFAULT_MODEL: Default model to use
4550
- AI_SYSTEM: Default system message
4551
- AI_VERBOSE: Set to 'true' for detailed output
4552
-
4553
- Examples:
4554
- npx ai "Hello, world!"
4555
- npx ai "Write a poem" -m anthropic/claude-3-5-sonnet
4556
- npx ai "Explain this code" -f script.js -f README.md
4557
- echo "What is life?" | npx ai
4558
- cat file.txt | npx ai "Summarize this content"
4559
- npx ai -f package.json "What dependencies does this project have?"
4560
-
4561
- Unix-style piping:
4562
- echo "Hello world" | npx ai "Translate to French"
4563
- cat README.md | npx ai "Summarize this"
4564
- curl -s https://api.github.com/repos/vercel/ai | npx ai "What is this repository about?"
4565
-
4566
- The gateway supports OpenAI, Anthropic, Google, Groq, and more providers.
4567
-
4568
- For detailed setup instructions, visit: https://ai-sdk.dev/docs/cli/authentication`);
4569
- }
4570
- function showVersion() {
4571
- console.log("1.0.0");
4572
- }
4573
- function resolveModel(modelString) {
4574
- return import_gateway3.gateway.languageModel(modelString);
4575
- }
4576
- function formatAttachedFiles(files) {
4577
- if (files.length === 0)
4578
- return "";
4579
- const textFiles = files.filter((f) => {
4580
- var _a9;
4581
- return !((_a9 = f.mediaType) == null ? void 0 : _a9.startsWith("image/"));
4582
- });
4583
- if (textFiles.length === 0)
4584
- return "";
4585
- let result = "\n\nAttached files:\n";
4586
- for (const file of textFiles) {
4587
- result += `
4588
- --- ${file.name} ---
4589
- `;
4590
- result += file.content;
4591
- result += "\n";
4592
- }
4593
- return result;
4594
- }
4595
- async function main() {
4596
- try {
4597
- const options = parseArgs();
4598
- if (options.help) {
4599
- showHelp();
4600
- return;
4601
- }
4602
- if (options.version) {
4603
- showVersion();
4604
- return;
4605
- }
4606
- let prompt = options.prompt || "";
4607
- if (isStdinAvailable()) {
4608
- const stdinContent = await readStdin();
4609
- if (stdinContent) {
4610
- prompt = prompt ? `${stdinContent}
4611
-
4612
- ${prompt}` : stdinContent;
4613
- }
4614
- }
4615
- if (!prompt.trim()) {
4616
- console.error(
4617
- "Error: No prompt provided. Use --help for usage information."
4618
- );
4619
- process.exit(1);
4620
- }
4621
- const attachedFiles = [];
4622
- for (const filePath of options.files) {
4623
- try {
4624
- const file = readFileContent(filePath);
4625
- attachedFiles.push(file);
4626
- } catch (error) {
4627
- console.error(
4628
- `Error reading file ${filePath}: ${error instanceof Error ? error.message : "Unknown error"}`
4629
- );
4630
- process.exit(1);
4631
- }
4632
- }
4633
- const textPrompt = prompt + formatAttachedFiles(attachedFiles);
4634
- const imageFiles = attachedFiles.filter(
4635
- (f) => {
4636
- var _a9;
4637
- return (_a9 = f.mediaType) == null ? void 0 : _a9.startsWith("image/");
4638
- }
4639
- );
4640
- if (imageFiles.length > 0 && options.model === "openai/gpt-4") {
4641
- options.model = "openai/gpt-4o";
4642
- }
4643
- if (options.verbose) {
4644
- console.error(`Using model: ${options.model}`);
4645
- if (attachedFiles.length > 0) {
4646
- console.error(
4647
- `Attached files: ${attachedFiles.map((f) => f.name).join(", ")}`
4648
- );
4649
- }
4650
- console.error("");
4651
- }
4652
- const hasApiKey = process.env.AI_GATEWAY_API_KEY;
4653
- if (!hasApiKey) {
4654
- console.error(`Error: Authentication required.
4655
-
4656
- Set up authentication with one of these options:
4657
-
4658
- # Option 1: Export in current session
4659
- export AI_GATEWAY_API_KEY="your-key-here"
4660
- export AI_DEFAULT_MODEL="anthropic/claude-3-5-sonnet"
4661
-
4662
- # Option 2: Add to shell profile (~/.bashrc, ~/.zshrc)
4663
- echo 'export AI_GATEWAY_API_KEY="your-key"' >> ~/.bashrc
4664
-
4665
- Get your API key from the Vercel Dashboard (AI tab > API keys).
4666
- Use --help for more details and examples.`);
4667
- process.exit(1);
4668
- }
4669
- const model = resolveModel(options.model);
4670
- let messages;
4671
- if (imageFiles.length > 0) {
4672
- const content = [{ type: "text", text: textPrompt }];
4673
- for (const img of imageFiles) {
4674
- content.push({
4675
- type: "image",
4676
- image: img.content
4677
- });
4678
- }
4679
- messages = [{ role: "user", content }];
4680
- }
4681
- const result = await streamText(
4682
- messages ? {
4683
- model,
4684
- messages,
4685
- system: options.system
4686
- } : {
4687
- model,
4688
- prompt: textPrompt,
4689
- system: options.system
4690
- }
4691
- );
4692
- for await (const chunk of result.textStream) {
4693
- process.stdout.write(chunk);
4694
- }
4695
- process.stdout.write("\n");
4696
- if (options.verbose) {
4697
- const usage = await result.usage;
4698
- if (usage) {
4699
- console.error(
4700
- `
4701
- Usage: ${usage.inputTokens} prompt + ${usage.outputTokens} completion = ${usage.totalTokens} total tokens`
4702
- );
4703
- }
4704
- }
4705
- } catch (error) {
4706
- console.error(
4707
- `Error: ${error instanceof Error ? error.message : "Unknown error"}`
4708
- );
4709
- process.exit(1);
4710
- }
4711
- }
4712
- process.on("SIGINT", () => {
4713
- process.exit(0);
4714
- });
4715
- process.on("SIGTERM", () => {
4716
- process.exit(0);
4717
- });
4718
- main().catch((error) => {
4719
- console.error(
4720
- `Fatal error: ${error instanceof Error ? error.message : "Unknown error"}`
4721
- );
4722
- process.exit(1);
4723
- });
4724
- // Annotate the CommonJS export names for ESM import in node:
4725
- 0 && (module.exports = {
4726
- formatAttachedFiles,
4727
- getMediaType,
4728
- isStdinAvailable,
4729
- main,
4730
- parseArgs,
4731
- readFileContent,
4732
- resolveModel,
4733
- showHelp,
4734
- showVersion
4735
- });
4736
- //# sourceMappingURL=ai.js.map