ai 5.0.0-beta.2 → 5.0.0-beta.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/ai.js ADDED
@@ -0,0 +1,4686 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
7
+ var __export = (target, all) => {
8
+ for (var name9 in all)
9
+ __defProp(target, name9, { get: all[name9], enumerable: true });
10
+ };
11
+ var __copyProps = (to, from, except, desc) => {
12
+ if (from && typeof from === "object" || typeof from === "function") {
13
+ for (let key of __getOwnPropNames(from))
14
+ if (!__hasOwnProp.call(to, key) && key !== except)
15
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
16
+ }
17
+ return to;
18
+ };
19
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
20
+
21
+ // src/bin/ai.ts
22
+ var ai_exports = {};
23
+ __export(ai_exports, {
24
+ formatAttachedFiles: () => formatAttachedFiles,
25
+ getMediaType: () => getMediaType,
26
+ isStdinAvailable: () => isStdinAvailable,
27
+ main: () => main,
28
+ parseArgs: () => parseArgs,
29
+ readFileContent: () => readFileContent,
30
+ resolveModel: () => resolveModel,
31
+ showHelp: () => showHelp,
32
+ showVersion: () => showVersion
33
+ });
34
+ module.exports = __toCommonJS(ai_exports);
35
+
36
+ // src/generate-text/stream-text.ts
37
+ var import_provider16 = require("@ai-sdk/provider");
38
+ var import_provider_utils12 = require("@ai-sdk/provider-utils");
39
+
40
+ // src/error/no-output-specified-error.ts
41
+ var import_provider = require("@ai-sdk/provider");
42
+ var name = "AI_NoOutputSpecifiedError";
43
+ var marker = `vercel.ai.error.${name}`;
44
+ var symbol = Symbol.for(marker);
45
+ var _a;
46
+ var NoOutputSpecifiedError = class extends import_provider.AISDKError {
47
+ // used in isInstance
48
+ constructor({ message = "No output specified." } = {}) {
49
+ super({ name, message });
50
+ this[_a] = true;
51
+ }
52
+ static isInstance(error) {
53
+ return import_provider.AISDKError.hasMarker(error, marker);
54
+ }
55
+ };
56
+ _a = symbol;
57
+
58
+ // src/util/prepare-headers.ts
59
+ function prepareHeaders(headers, defaultHeaders) {
60
+ const responseHeaders = new Headers(headers != null ? headers : {});
61
+ for (const [key, value] of Object.entries(defaultHeaders)) {
62
+ if (!responseHeaders.has(key)) {
63
+ responseHeaders.set(key, value);
64
+ }
65
+ }
66
+ return responseHeaders;
67
+ }
68
+
69
+ // src/text-stream/create-text-stream-response.ts
70
+ function createTextStreamResponse({
71
+ status,
72
+ statusText,
73
+ headers,
74
+ textStream
75
+ }) {
76
+ return new Response(textStream.pipeThrough(new TextEncoderStream()), {
77
+ status: status != null ? status : 200,
78
+ statusText,
79
+ headers: prepareHeaders(headers, {
80
+ "content-type": "text/plain; charset=utf-8"
81
+ })
82
+ });
83
+ }
84
+
85
+ // src/util/write-to-server-response.ts
86
+ function writeToServerResponse({
87
+ response,
88
+ status,
89
+ statusText,
90
+ headers,
91
+ stream
92
+ }) {
93
+ response.writeHead(status != null ? status : 200, statusText, headers);
94
+ const reader = stream.getReader();
95
+ const read = async () => {
96
+ try {
97
+ while (true) {
98
+ const { done, value } = await reader.read();
99
+ if (done)
100
+ break;
101
+ response.write(value);
102
+ }
103
+ } catch (error) {
104
+ throw error;
105
+ } finally {
106
+ response.end();
107
+ }
108
+ };
109
+ read();
110
+ }
111
+
112
+ // src/text-stream/pipe-text-stream-to-response.ts
113
+ function pipeTextStreamToResponse({
114
+ response,
115
+ status,
116
+ statusText,
117
+ headers,
118
+ textStream
119
+ }) {
120
+ writeToServerResponse({
121
+ response,
122
+ status,
123
+ statusText,
124
+ headers: Object.fromEntries(
125
+ prepareHeaders(headers, {
126
+ "content-type": "text/plain; charset=utf-8"
127
+ }).entries()
128
+ ),
129
+ stream: textStream.pipeThrough(new TextEncoderStream())
130
+ });
131
+ }
132
+
133
+ // src/ui-message-stream/json-to-sse-transform-stream.ts
134
+ var JsonToSseTransformStream = class extends TransformStream {
135
+ constructor() {
136
+ super({
137
+ transform(part, controller) {
138
+ controller.enqueue(`data: ${JSON.stringify(part)}
139
+
140
+ `);
141
+ },
142
+ flush(controller) {
143
+ controller.enqueue("data: [DONE]\n\n");
144
+ }
145
+ });
146
+ }
147
+ };
148
+
149
+ // src/ui-message-stream/ui-message-stream-headers.ts
150
+ var UI_MESSAGE_STREAM_HEADERS = {
151
+ "content-type": "text/event-stream",
152
+ "cache-control": "no-cache",
153
+ connection: "keep-alive",
154
+ "x-vercel-ai-ui-message-stream": "v1",
155
+ "x-accel-buffering": "no"
156
+ // disable nginx buffering
157
+ };
158
+
159
+ // src/ui-message-stream/create-ui-message-stream-response.ts
160
+ function createUIMessageStreamResponse({
161
+ status,
162
+ statusText,
163
+ headers,
164
+ stream,
165
+ consumeSseStream
166
+ }) {
167
+ let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
168
+ if (consumeSseStream) {
169
+ const [stream1, stream2] = sseStream.tee();
170
+ sseStream = stream1;
171
+ consumeSseStream({ stream: stream2 });
172
+ }
173
+ return new Response(sseStream.pipeThrough(new TextEncoderStream()), {
174
+ status,
175
+ statusText,
176
+ headers: prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS)
177
+ });
178
+ }
179
+
180
+ // src/ui-message-stream/get-response-ui-message-id.ts
181
+ function getResponseUIMessageId({
182
+ originalMessages,
183
+ responseMessageId
184
+ }) {
185
+ if (originalMessages == null) {
186
+ return void 0;
187
+ }
188
+ const lastMessage = originalMessages[originalMessages.length - 1];
189
+ return (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage.id : typeof responseMessageId === "function" ? responseMessageId() : responseMessageId;
190
+ }
191
+
192
+ // src/ui/process-ui-message-stream.ts
193
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
194
+
195
+ // src/ui-message-stream/ui-message-chunks.ts
196
+ var import_v43 = require("zod/v4");
197
+
198
+ // src/types/provider-metadata.ts
199
+ var import_v42 = require("zod/v4");
200
+
201
+ // src/types/json-value.ts
202
+ var import_v4 = require("zod/v4");
203
+ var jsonValueSchema = import_v4.z.lazy(
204
+ () => import_v4.z.union([
205
+ import_v4.z.null(),
206
+ import_v4.z.string(),
207
+ import_v4.z.number(),
208
+ import_v4.z.boolean(),
209
+ import_v4.z.record(import_v4.z.string(), jsonValueSchema),
210
+ import_v4.z.array(jsonValueSchema)
211
+ ])
212
+ );
213
+
214
+ // src/types/provider-metadata.ts
215
+ var providerMetadataSchema = import_v42.z.record(
216
+ import_v42.z.string(),
217
+ import_v42.z.record(import_v42.z.string(), jsonValueSchema)
218
+ );
219
+
220
+ // src/ui-message-stream/ui-message-chunks.ts
221
+ var uiMessageChunkSchema = import_v43.z.union([
222
+ import_v43.z.strictObject({
223
+ type: import_v43.z.literal("text-start"),
224
+ id: import_v43.z.string(),
225
+ providerMetadata: providerMetadataSchema.optional()
226
+ }),
227
+ import_v43.z.strictObject({
228
+ type: import_v43.z.literal("text-delta"),
229
+ id: import_v43.z.string(),
230
+ delta: import_v43.z.string(),
231
+ providerMetadata: providerMetadataSchema.optional()
232
+ }),
233
+ import_v43.z.strictObject({
234
+ type: import_v43.z.literal("text-end"),
235
+ id: import_v43.z.string(),
236
+ providerMetadata: providerMetadataSchema.optional()
237
+ }),
238
+ import_v43.z.strictObject({
239
+ type: import_v43.z.literal("error"),
240
+ errorText: import_v43.z.string()
241
+ }),
242
+ import_v43.z.strictObject({
243
+ type: import_v43.z.literal("tool-input-start"),
244
+ toolCallId: import_v43.z.string(),
245
+ toolName: import_v43.z.string(),
246
+ providerExecuted: import_v43.z.boolean().optional()
247
+ }),
248
+ import_v43.z.strictObject({
249
+ type: import_v43.z.literal("tool-input-delta"),
250
+ toolCallId: import_v43.z.string(),
251
+ inputTextDelta: import_v43.z.string()
252
+ }),
253
+ import_v43.z.strictObject({
254
+ type: import_v43.z.literal("tool-input-available"),
255
+ toolCallId: import_v43.z.string(),
256
+ toolName: import_v43.z.string(),
257
+ input: import_v43.z.unknown(),
258
+ providerExecuted: import_v43.z.boolean().optional(),
259
+ providerMetadata: providerMetadataSchema.optional()
260
+ }),
261
+ import_v43.z.strictObject({
262
+ type: import_v43.z.literal("tool-output-available"),
263
+ toolCallId: import_v43.z.string(),
264
+ output: import_v43.z.unknown(),
265
+ providerExecuted: import_v43.z.boolean().optional()
266
+ }),
267
+ import_v43.z.strictObject({
268
+ type: import_v43.z.literal("tool-output-error"),
269
+ toolCallId: import_v43.z.string(),
270
+ errorText: import_v43.z.string(),
271
+ providerExecuted: import_v43.z.boolean().optional()
272
+ }),
273
+ import_v43.z.strictObject({
274
+ type: import_v43.z.literal("reasoning"),
275
+ text: import_v43.z.string(),
276
+ providerMetadata: providerMetadataSchema.optional()
277
+ }),
278
+ import_v43.z.strictObject({
279
+ type: import_v43.z.literal("reasoning-start"),
280
+ id: import_v43.z.string(),
281
+ providerMetadata: providerMetadataSchema.optional()
282
+ }),
283
+ import_v43.z.strictObject({
284
+ type: import_v43.z.literal("reasoning-delta"),
285
+ id: import_v43.z.string(),
286
+ delta: import_v43.z.string(),
287
+ providerMetadata: providerMetadataSchema.optional()
288
+ }),
289
+ import_v43.z.strictObject({
290
+ type: import_v43.z.literal("reasoning-end"),
291
+ id: import_v43.z.string(),
292
+ providerMetadata: providerMetadataSchema.optional()
293
+ }),
294
+ import_v43.z.strictObject({
295
+ type: import_v43.z.literal("reasoning-part-finish")
296
+ }),
297
+ import_v43.z.strictObject({
298
+ type: import_v43.z.literal("source-url"),
299
+ sourceId: import_v43.z.string(),
300
+ url: import_v43.z.string(),
301
+ title: import_v43.z.string().optional(),
302
+ providerMetadata: providerMetadataSchema.optional()
303
+ }),
304
+ import_v43.z.strictObject({
305
+ type: import_v43.z.literal("source-document"),
306
+ sourceId: import_v43.z.string(),
307
+ mediaType: import_v43.z.string(),
308
+ title: import_v43.z.string(),
309
+ filename: import_v43.z.string().optional(),
310
+ providerMetadata: providerMetadataSchema.optional()
311
+ }),
312
+ import_v43.z.strictObject({
313
+ type: import_v43.z.literal("file"),
314
+ url: import_v43.z.string(),
315
+ mediaType: import_v43.z.string(),
316
+ providerMetadata: providerMetadataSchema.optional()
317
+ }),
318
+ import_v43.z.strictObject({
319
+ type: import_v43.z.string().startsWith("data-"),
320
+ id: import_v43.z.string().optional(),
321
+ data: import_v43.z.unknown(),
322
+ transient: import_v43.z.boolean().optional()
323
+ }),
324
+ import_v43.z.strictObject({
325
+ type: import_v43.z.literal("start-step")
326
+ }),
327
+ import_v43.z.strictObject({
328
+ type: import_v43.z.literal("finish-step")
329
+ }),
330
+ import_v43.z.strictObject({
331
+ type: import_v43.z.literal("start"),
332
+ messageId: import_v43.z.string().optional(),
333
+ messageMetadata: import_v43.z.unknown().optional()
334
+ }),
335
+ import_v43.z.strictObject({
336
+ type: import_v43.z.literal("finish"),
337
+ messageMetadata: import_v43.z.unknown().optional()
338
+ }),
339
+ import_v43.z.strictObject({
340
+ type: import_v43.z.literal("message-metadata"),
341
+ messageMetadata: import_v43.z.unknown()
342
+ })
343
+ ]);
344
+ function isDataUIMessageChunk(chunk) {
345
+ return chunk.type.startsWith("data-");
346
+ }
347
+
348
+ // src/util/merge-objects.ts
349
+ function mergeObjects(base, overrides) {
350
+ if (base === void 0 && overrides === void 0) {
351
+ return void 0;
352
+ }
353
+ if (base === void 0) {
354
+ return overrides;
355
+ }
356
+ if (overrides === void 0) {
357
+ return base;
358
+ }
359
+ const result = { ...base };
360
+ for (const key in overrides) {
361
+ if (Object.prototype.hasOwnProperty.call(overrides, key)) {
362
+ const overridesValue = overrides[key];
363
+ if (overridesValue === void 0)
364
+ continue;
365
+ const baseValue = key in base ? base[key] : void 0;
366
+ const isSourceObject = overridesValue !== null && typeof overridesValue === "object" && !Array.isArray(overridesValue) && !(overridesValue instanceof Date) && !(overridesValue instanceof RegExp);
367
+ const isTargetObject = baseValue !== null && baseValue !== void 0 && typeof baseValue === "object" && !Array.isArray(baseValue) && !(baseValue instanceof Date) && !(baseValue instanceof RegExp);
368
+ if (isSourceObject && isTargetObject) {
369
+ result[key] = mergeObjects(
370
+ baseValue,
371
+ overridesValue
372
+ );
373
+ } else {
374
+ result[key] = overridesValue;
375
+ }
376
+ }
377
+ }
378
+ return result;
379
+ }
380
+
381
+ // src/util/parse-partial-json.ts
382
+ var import_provider_utils = require("@ai-sdk/provider-utils");
383
+
384
+ // src/util/fix-json.ts
385
+ function fixJson(input) {
386
+ const stack = ["ROOT"];
387
+ let lastValidIndex = -1;
388
+ let literalStart = null;
389
+ function processValueStart(char, i, swapState) {
390
+ {
391
+ switch (char) {
392
+ case '"': {
393
+ lastValidIndex = i;
394
+ stack.pop();
395
+ stack.push(swapState);
396
+ stack.push("INSIDE_STRING");
397
+ break;
398
+ }
399
+ case "f":
400
+ case "t":
401
+ case "n": {
402
+ lastValidIndex = i;
403
+ literalStart = i;
404
+ stack.pop();
405
+ stack.push(swapState);
406
+ stack.push("INSIDE_LITERAL");
407
+ break;
408
+ }
409
+ case "-": {
410
+ stack.pop();
411
+ stack.push(swapState);
412
+ stack.push("INSIDE_NUMBER");
413
+ break;
414
+ }
415
+ case "0":
416
+ case "1":
417
+ case "2":
418
+ case "3":
419
+ case "4":
420
+ case "5":
421
+ case "6":
422
+ case "7":
423
+ case "8":
424
+ case "9": {
425
+ lastValidIndex = i;
426
+ stack.pop();
427
+ stack.push(swapState);
428
+ stack.push("INSIDE_NUMBER");
429
+ break;
430
+ }
431
+ case "{": {
432
+ lastValidIndex = i;
433
+ stack.pop();
434
+ stack.push(swapState);
435
+ stack.push("INSIDE_OBJECT_START");
436
+ break;
437
+ }
438
+ case "[": {
439
+ lastValidIndex = i;
440
+ stack.pop();
441
+ stack.push(swapState);
442
+ stack.push("INSIDE_ARRAY_START");
443
+ break;
444
+ }
445
+ }
446
+ }
447
+ }
448
+ function processAfterObjectValue(char, i) {
449
+ switch (char) {
450
+ case ",": {
451
+ stack.pop();
452
+ stack.push("INSIDE_OBJECT_AFTER_COMMA");
453
+ break;
454
+ }
455
+ case "}": {
456
+ lastValidIndex = i;
457
+ stack.pop();
458
+ break;
459
+ }
460
+ }
461
+ }
462
+ function processAfterArrayValue(char, i) {
463
+ switch (char) {
464
+ case ",": {
465
+ stack.pop();
466
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
467
+ break;
468
+ }
469
+ case "]": {
470
+ lastValidIndex = i;
471
+ stack.pop();
472
+ break;
473
+ }
474
+ }
475
+ }
476
+ for (let i = 0; i < input.length; i++) {
477
+ const char = input[i];
478
+ const currentState = stack[stack.length - 1];
479
+ switch (currentState) {
480
+ case "ROOT":
481
+ processValueStart(char, i, "FINISH");
482
+ break;
483
+ case "INSIDE_OBJECT_START": {
484
+ switch (char) {
485
+ case '"': {
486
+ stack.pop();
487
+ stack.push("INSIDE_OBJECT_KEY");
488
+ break;
489
+ }
490
+ case "}": {
491
+ lastValidIndex = i;
492
+ stack.pop();
493
+ break;
494
+ }
495
+ }
496
+ break;
497
+ }
498
+ case "INSIDE_OBJECT_AFTER_COMMA": {
499
+ switch (char) {
500
+ case '"': {
501
+ stack.pop();
502
+ stack.push("INSIDE_OBJECT_KEY");
503
+ break;
504
+ }
505
+ }
506
+ break;
507
+ }
508
+ case "INSIDE_OBJECT_KEY": {
509
+ switch (char) {
510
+ case '"': {
511
+ stack.pop();
512
+ stack.push("INSIDE_OBJECT_AFTER_KEY");
513
+ break;
514
+ }
515
+ }
516
+ break;
517
+ }
518
+ case "INSIDE_OBJECT_AFTER_KEY": {
519
+ switch (char) {
520
+ case ":": {
521
+ stack.pop();
522
+ stack.push("INSIDE_OBJECT_BEFORE_VALUE");
523
+ break;
524
+ }
525
+ }
526
+ break;
527
+ }
528
+ case "INSIDE_OBJECT_BEFORE_VALUE": {
529
+ processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
530
+ break;
531
+ }
532
+ case "INSIDE_OBJECT_AFTER_VALUE": {
533
+ processAfterObjectValue(char, i);
534
+ break;
535
+ }
536
+ case "INSIDE_STRING": {
537
+ switch (char) {
538
+ case '"': {
539
+ stack.pop();
540
+ lastValidIndex = i;
541
+ break;
542
+ }
543
+ case "\\": {
544
+ stack.push("INSIDE_STRING_ESCAPE");
545
+ break;
546
+ }
547
+ default: {
548
+ lastValidIndex = i;
549
+ }
550
+ }
551
+ break;
552
+ }
553
+ case "INSIDE_ARRAY_START": {
554
+ switch (char) {
555
+ case "]": {
556
+ lastValidIndex = i;
557
+ stack.pop();
558
+ break;
559
+ }
560
+ default: {
561
+ lastValidIndex = i;
562
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
563
+ break;
564
+ }
565
+ }
566
+ break;
567
+ }
568
+ case "INSIDE_ARRAY_AFTER_VALUE": {
569
+ switch (char) {
570
+ case ",": {
571
+ stack.pop();
572
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
573
+ break;
574
+ }
575
+ case "]": {
576
+ lastValidIndex = i;
577
+ stack.pop();
578
+ break;
579
+ }
580
+ default: {
581
+ lastValidIndex = i;
582
+ break;
583
+ }
584
+ }
585
+ break;
586
+ }
587
+ case "INSIDE_ARRAY_AFTER_COMMA": {
588
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
589
+ break;
590
+ }
591
+ case "INSIDE_STRING_ESCAPE": {
592
+ stack.pop();
593
+ lastValidIndex = i;
594
+ break;
595
+ }
596
+ case "INSIDE_NUMBER": {
597
+ switch (char) {
598
+ case "0":
599
+ case "1":
600
+ case "2":
601
+ case "3":
602
+ case "4":
603
+ case "5":
604
+ case "6":
605
+ case "7":
606
+ case "8":
607
+ case "9": {
608
+ lastValidIndex = i;
609
+ break;
610
+ }
611
+ case "e":
612
+ case "E":
613
+ case "-":
614
+ case ".": {
615
+ break;
616
+ }
617
+ case ",": {
618
+ stack.pop();
619
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
620
+ processAfterArrayValue(char, i);
621
+ }
622
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
623
+ processAfterObjectValue(char, i);
624
+ }
625
+ break;
626
+ }
627
+ case "}": {
628
+ stack.pop();
629
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
630
+ processAfterObjectValue(char, i);
631
+ }
632
+ break;
633
+ }
634
+ case "]": {
635
+ stack.pop();
636
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
637
+ processAfterArrayValue(char, i);
638
+ }
639
+ break;
640
+ }
641
+ default: {
642
+ stack.pop();
643
+ break;
644
+ }
645
+ }
646
+ break;
647
+ }
648
+ case "INSIDE_LITERAL": {
649
+ const partialLiteral = input.substring(literalStart, i + 1);
650
+ if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
651
+ stack.pop();
652
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
653
+ processAfterObjectValue(char, i);
654
+ } else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
655
+ processAfterArrayValue(char, i);
656
+ }
657
+ } else {
658
+ lastValidIndex = i;
659
+ }
660
+ break;
661
+ }
662
+ }
663
+ }
664
+ let result = input.slice(0, lastValidIndex + 1);
665
+ for (let i = stack.length - 1; i >= 0; i--) {
666
+ const state = stack[i];
667
+ switch (state) {
668
+ case "INSIDE_STRING": {
669
+ result += '"';
670
+ break;
671
+ }
672
+ case "INSIDE_OBJECT_KEY":
673
+ case "INSIDE_OBJECT_AFTER_KEY":
674
+ case "INSIDE_OBJECT_AFTER_COMMA":
675
+ case "INSIDE_OBJECT_START":
676
+ case "INSIDE_OBJECT_BEFORE_VALUE":
677
+ case "INSIDE_OBJECT_AFTER_VALUE": {
678
+ result += "}";
679
+ break;
680
+ }
681
+ case "INSIDE_ARRAY_START":
682
+ case "INSIDE_ARRAY_AFTER_COMMA":
683
+ case "INSIDE_ARRAY_AFTER_VALUE": {
684
+ result += "]";
685
+ break;
686
+ }
687
+ case "INSIDE_LITERAL": {
688
+ const partialLiteral = input.substring(literalStart, input.length);
689
+ if ("true".startsWith(partialLiteral)) {
690
+ result += "true".slice(partialLiteral.length);
691
+ } else if ("false".startsWith(partialLiteral)) {
692
+ result += "false".slice(partialLiteral.length);
693
+ } else if ("null".startsWith(partialLiteral)) {
694
+ result += "null".slice(partialLiteral.length);
695
+ }
696
+ }
697
+ }
698
+ }
699
+ return result;
700
+ }
701
+
702
+ // src/util/parse-partial-json.ts
703
+ async function parsePartialJson(jsonText) {
704
+ if (jsonText === void 0) {
705
+ return { value: void 0, state: "undefined-input" };
706
+ }
707
+ let result = await (0, import_provider_utils.safeParseJSON)({ text: jsonText });
708
+ if (result.success) {
709
+ return { value: result.value, state: "successful-parse" };
710
+ }
711
+ result = await (0, import_provider_utils.safeParseJSON)({ text: fixJson(jsonText) });
712
+ if (result.success) {
713
+ return { value: result.value, state: "repaired-parse" };
714
+ }
715
+ return { value: void 0, state: "failed-parse" };
716
+ }
717
+
718
+ // src/ui/ui-messages.ts
719
+ function isToolUIPart(part) {
720
+ return part.type.startsWith("tool-");
721
+ }
722
+ function getToolName(part) {
723
+ return part.type.split("-")[1];
724
+ }
725
+
726
+ // src/ui/process-ui-message-stream.ts
727
+ function createStreamingUIMessageState({
728
+ lastMessage,
729
+ messageId
730
+ }) {
731
+ return {
732
+ message: (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage : {
733
+ id: messageId,
734
+ metadata: void 0,
735
+ role: "assistant",
736
+ parts: []
737
+ },
738
+ activeTextParts: {},
739
+ activeReasoningParts: {},
740
+ partialToolCalls: {}
741
+ };
742
+ }
743
+ function processUIMessageStream({
744
+ stream,
745
+ onToolCall,
746
+ messageMetadataSchema,
747
+ dataPartSchemas,
748
+ runUpdateMessageJob,
749
+ onError,
750
+ onData
751
+ }) {
752
+ return stream.pipeThrough(
753
+ new TransformStream({
754
+ async transform(part, controller) {
755
+ await runUpdateMessageJob(async ({ state, write }) => {
756
+ var _a9, _b, _c, _d;
757
+ function updateToolInvocationPart(options) {
758
+ var _a10;
759
+ const part2 = state.message.parts.find(
760
+ (part3) => isToolUIPart(part3) && part3.toolCallId === options.toolCallId
761
+ );
762
+ const anyOptions = options;
763
+ const anyPart = part2;
764
+ if (part2 != null) {
765
+ part2.state = options.state;
766
+ anyPart.input = anyOptions.input;
767
+ anyPart.output = anyOptions.output;
768
+ anyPart.errorText = anyOptions.errorText;
769
+ anyPart.providerExecuted = (_a10 = anyOptions.providerExecuted) != null ? _a10 : part2.providerExecuted;
770
+ if (anyOptions.providerMetadata != null && part2.state === "input-available") {
771
+ part2.callProviderMetadata = anyOptions.providerMetadata;
772
+ }
773
+ } else {
774
+ state.message.parts.push({
775
+ type: `tool-${options.toolName}`,
776
+ toolCallId: options.toolCallId,
777
+ state: options.state,
778
+ input: anyOptions.input,
779
+ output: anyOptions.output,
780
+ errorText: anyOptions.errorText,
781
+ providerExecuted: anyOptions.providerExecuted,
782
+ ...anyOptions.providerMetadata != null ? { callProviderMetadata: anyOptions.providerMetadata } : {}
783
+ });
784
+ }
785
+ }
786
+ async function updateMessageMetadata(metadata) {
787
+ if (metadata != null) {
788
+ const mergedMetadata = state.message.metadata != null ? mergeObjects(state.message.metadata, metadata) : metadata;
789
+ if (messageMetadataSchema != null) {
790
+ await (0, import_provider_utils2.validateTypes)({
791
+ value: mergedMetadata,
792
+ schema: messageMetadataSchema
793
+ });
794
+ }
795
+ state.message.metadata = mergedMetadata;
796
+ }
797
+ }
798
+ switch (part.type) {
799
+ case "text-start": {
800
+ const textPart = {
801
+ type: "text",
802
+ text: "",
803
+ providerMetadata: part.providerMetadata,
804
+ state: "streaming"
805
+ };
806
+ state.activeTextParts[part.id] = textPart;
807
+ state.message.parts.push(textPart);
808
+ write();
809
+ break;
810
+ }
811
+ case "text-delta": {
812
+ const textPart = state.activeTextParts[part.id];
813
+ textPart.text += part.delta;
814
+ textPart.providerMetadata = (_a9 = part.providerMetadata) != null ? _a9 : textPart.providerMetadata;
815
+ write();
816
+ break;
817
+ }
818
+ case "text-end": {
819
+ const textPart = state.activeTextParts[part.id];
820
+ textPart.state = "done";
821
+ textPart.providerMetadata = (_b = part.providerMetadata) != null ? _b : textPart.providerMetadata;
822
+ delete state.activeTextParts[part.id];
823
+ write();
824
+ break;
825
+ }
826
+ case "reasoning-start": {
827
+ const reasoningPart = {
828
+ type: "reasoning",
829
+ text: "",
830
+ providerMetadata: part.providerMetadata,
831
+ state: "streaming"
832
+ };
833
+ state.activeReasoningParts[part.id] = reasoningPart;
834
+ state.message.parts.push(reasoningPart);
835
+ write();
836
+ break;
837
+ }
838
+ case "reasoning-delta": {
839
+ const reasoningPart = state.activeReasoningParts[part.id];
840
+ reasoningPart.text += part.delta;
841
+ reasoningPart.providerMetadata = (_c = part.providerMetadata) != null ? _c : reasoningPart.providerMetadata;
842
+ write();
843
+ break;
844
+ }
845
+ case "reasoning-end": {
846
+ const reasoningPart = state.activeReasoningParts[part.id];
847
+ reasoningPart.providerMetadata = (_d = part.providerMetadata) != null ? _d : reasoningPart.providerMetadata;
848
+ reasoningPart.state = "done";
849
+ delete state.activeReasoningParts[part.id];
850
+ write();
851
+ break;
852
+ }
853
+ case "file": {
854
+ state.message.parts.push({
855
+ type: "file",
856
+ mediaType: part.mediaType,
857
+ url: part.url
858
+ });
859
+ write();
860
+ break;
861
+ }
862
+ case "source-url": {
863
+ state.message.parts.push({
864
+ type: "source-url",
865
+ sourceId: part.sourceId,
866
+ url: part.url,
867
+ title: part.title,
868
+ providerMetadata: part.providerMetadata
869
+ });
870
+ write();
871
+ break;
872
+ }
873
+ case "source-document": {
874
+ state.message.parts.push({
875
+ type: "source-document",
876
+ sourceId: part.sourceId,
877
+ mediaType: part.mediaType,
878
+ title: part.title,
879
+ filename: part.filename,
880
+ providerMetadata: part.providerMetadata
881
+ });
882
+ write();
883
+ break;
884
+ }
885
+ case "tool-input-start": {
886
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
887
+ state.partialToolCalls[part.toolCallId] = {
888
+ text: "",
889
+ toolName: part.toolName,
890
+ index: toolInvocations.length
891
+ };
892
+ updateToolInvocationPart({
893
+ toolCallId: part.toolCallId,
894
+ toolName: part.toolName,
895
+ state: "input-streaming",
896
+ input: void 0,
897
+ providerExecuted: part.providerExecuted
898
+ });
899
+ write();
900
+ break;
901
+ }
902
+ case "tool-input-delta": {
903
+ const partialToolCall = state.partialToolCalls[part.toolCallId];
904
+ partialToolCall.text += part.inputTextDelta;
905
+ const { value: partialArgs } = await parsePartialJson(
906
+ partialToolCall.text
907
+ );
908
+ updateToolInvocationPart({
909
+ toolCallId: part.toolCallId,
910
+ toolName: partialToolCall.toolName,
911
+ state: "input-streaming",
912
+ input: partialArgs
913
+ });
914
+ write();
915
+ break;
916
+ }
917
+ case "tool-input-available": {
918
+ updateToolInvocationPart({
919
+ toolCallId: part.toolCallId,
920
+ toolName: part.toolName,
921
+ state: "input-available",
922
+ input: part.input,
923
+ providerExecuted: part.providerExecuted,
924
+ providerMetadata: part.providerMetadata
925
+ });
926
+ write();
927
+ if (onToolCall && !part.providerExecuted) {
928
+ const result = await onToolCall({
929
+ toolCall: part
930
+ });
931
+ if (result != null) {
932
+ updateToolInvocationPart({
933
+ toolCallId: part.toolCallId,
934
+ toolName: part.toolName,
935
+ state: "output-available",
936
+ input: part.input,
937
+ output: result
938
+ });
939
+ write();
940
+ }
941
+ }
942
+ break;
943
+ }
944
+ case "tool-output-available": {
945
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
946
+ if (toolInvocations == null) {
947
+ throw new Error("tool_result must be preceded by a tool_call");
948
+ }
949
+ const toolInvocationIndex = toolInvocations.findIndex(
950
+ (invocation) => invocation.toolCallId === part.toolCallId
951
+ );
952
+ if (toolInvocationIndex === -1) {
953
+ throw new Error(
954
+ "tool_result must be preceded by a tool_call with the same toolCallId"
955
+ );
956
+ }
957
+ const toolName = getToolName(
958
+ toolInvocations[toolInvocationIndex]
959
+ );
960
+ updateToolInvocationPart({
961
+ toolCallId: part.toolCallId,
962
+ toolName,
963
+ state: "output-available",
964
+ input: toolInvocations[toolInvocationIndex].input,
965
+ output: part.output,
966
+ providerExecuted: part.providerExecuted
967
+ });
968
+ write();
969
+ break;
970
+ }
971
+ case "tool-output-error": {
972
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
973
+ if (toolInvocations == null) {
974
+ throw new Error("tool_result must be preceded by a tool_call");
975
+ }
976
+ const toolInvocationIndex = toolInvocations.findIndex(
977
+ (invocation) => invocation.toolCallId === part.toolCallId
978
+ );
979
+ if (toolInvocationIndex === -1) {
980
+ throw new Error(
981
+ "tool_result must be preceded by a tool_call with the same toolCallId"
982
+ );
983
+ }
984
+ const toolName = getToolName(
985
+ toolInvocations[toolInvocationIndex]
986
+ );
987
+ updateToolInvocationPart({
988
+ toolCallId: part.toolCallId,
989
+ toolName,
990
+ state: "output-error",
991
+ input: toolInvocations[toolInvocationIndex].input,
992
+ errorText: part.errorText,
993
+ providerExecuted: part.providerExecuted
994
+ });
995
+ write();
996
+ break;
997
+ }
998
+ case "start-step": {
999
+ state.message.parts.push({ type: "step-start" });
1000
+ break;
1001
+ }
1002
+ case "finish-step": {
1003
+ state.activeTextParts = {};
1004
+ state.activeReasoningParts = {};
1005
+ break;
1006
+ }
1007
+ case "start": {
1008
+ if (part.messageId != null) {
1009
+ state.message.id = part.messageId;
1010
+ }
1011
+ await updateMessageMetadata(part.messageMetadata);
1012
+ if (part.messageId != null || part.messageMetadata != null) {
1013
+ write();
1014
+ }
1015
+ break;
1016
+ }
1017
+ case "finish": {
1018
+ await updateMessageMetadata(part.messageMetadata);
1019
+ if (part.messageMetadata != null) {
1020
+ write();
1021
+ }
1022
+ break;
1023
+ }
1024
+ case "message-metadata": {
1025
+ await updateMessageMetadata(part.messageMetadata);
1026
+ if (part.messageMetadata != null) {
1027
+ write();
1028
+ }
1029
+ break;
1030
+ }
1031
+ case "error": {
1032
+ onError == null ? void 0 : onError(new Error(part.errorText));
1033
+ break;
1034
+ }
1035
+ default: {
1036
+ if (isDataUIMessageChunk(part)) {
1037
+ const dataPart = part;
1038
+ if (dataPart.transient) {
1039
+ onData == null ? void 0 : onData(dataPart);
1040
+ break;
1041
+ }
1042
+ const existingPart = dataPart.id != null ? state.message.parts.find(
1043
+ (partArg) => dataPart.type === partArg.type && dataPart.id === partArg.id
1044
+ ) : void 0;
1045
+ if (existingPart != null) {
1046
+ existingPart.data = isObject(existingPart.data) && isObject(dataPart.data) ? mergeObjects(existingPart.data, dataPart.data) : dataPart.data;
1047
+ } else {
1048
+ state.message.parts.push(dataPart);
1049
+ }
1050
+ onData == null ? void 0 : onData(dataPart);
1051
+ write();
1052
+ }
1053
+ }
1054
+ }
1055
+ controller.enqueue(part);
1056
+ });
1057
+ }
1058
+ })
1059
+ );
1060
+ }
1061
+ function isObject(value) {
1062
+ return typeof value === "object" && value !== null;
1063
+ }
1064
+
1065
+ // src/ui-message-stream/handle-ui-message-stream-finish.ts
1066
+ function handleUIMessageStreamFinish({
1067
+ messageId,
1068
+ originalMessages = [],
1069
+ onFinish,
1070
+ onError,
1071
+ stream
1072
+ }) {
1073
+ let lastMessage = originalMessages == null ? void 0 : originalMessages[originalMessages.length - 1];
1074
+ if ((lastMessage == null ? void 0 : lastMessage.role) !== "assistant") {
1075
+ lastMessage = void 0;
1076
+ } else {
1077
+ messageId = lastMessage.id;
1078
+ }
1079
+ const idInjectedStream = stream.pipeThrough(
1080
+ new TransformStream({
1081
+ transform(chunk, controller) {
1082
+ if (chunk.type === "start") {
1083
+ const startChunk = chunk;
1084
+ if (startChunk.messageId == null && messageId != null) {
1085
+ startChunk.messageId = messageId;
1086
+ }
1087
+ }
1088
+ controller.enqueue(chunk);
1089
+ }
1090
+ })
1091
+ );
1092
+ if (onFinish == null) {
1093
+ return idInjectedStream;
1094
+ }
1095
+ const state = createStreamingUIMessageState({
1096
+ lastMessage: lastMessage ? structuredClone(lastMessage) : void 0,
1097
+ messageId: messageId != null ? messageId : ""
1098
+ // will be overridden by the stream
1099
+ });
1100
+ const runUpdateMessageJob = async (job) => {
1101
+ await job({ state, write: () => {
1102
+ } });
1103
+ };
1104
+ return processUIMessageStream({
1105
+ stream: idInjectedStream,
1106
+ runUpdateMessageJob,
1107
+ onError
1108
+ }).pipeThrough(
1109
+ new TransformStream({
1110
+ transform(chunk, controller) {
1111
+ controller.enqueue(chunk);
1112
+ },
1113
+ flush() {
1114
+ const isContinuation = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
1115
+ onFinish({
1116
+ isContinuation,
1117
+ responseMessage: state.message,
1118
+ messages: [
1119
+ ...isContinuation ? originalMessages.slice(0, -1) : originalMessages,
1120
+ state.message
1121
+ ]
1122
+ });
1123
+ }
1124
+ })
1125
+ );
1126
+ }
1127
+
1128
+ // src/ui-message-stream/pipe-ui-message-stream-to-response.ts
1129
+ function pipeUIMessageStreamToResponse({
1130
+ response,
1131
+ status,
1132
+ statusText,
1133
+ headers,
1134
+ stream,
1135
+ consumeSseStream
1136
+ }) {
1137
+ let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
1138
+ if (consumeSseStream) {
1139
+ const [stream1, stream2] = sseStream.tee();
1140
+ sseStream = stream1;
1141
+ consumeSseStream({ stream: stream2 });
1142
+ }
1143
+ writeToServerResponse({
1144
+ response,
1145
+ status,
1146
+ statusText,
1147
+ headers: Object.fromEntries(
1148
+ prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS).entries()
1149
+ ),
1150
+ stream: sseStream.pipeThrough(new TextEncoderStream())
1151
+ });
1152
+ }
1153
+
1154
+ // src/util/as-array.ts
1155
+ function asArray(value) {
1156
+ return value === void 0 ? [] : Array.isArray(value) ? value : [value];
1157
+ }
1158
+
1159
+ // src/util/async-iterable-stream.ts
1160
+ function createAsyncIterableStream(source) {
1161
+ const stream = source.pipeThrough(new TransformStream());
1162
+ stream[Symbol.asyncIterator] = () => {
1163
+ const reader = stream.getReader();
1164
+ return {
1165
+ async next() {
1166
+ const { done, value } = await reader.read();
1167
+ return done ? { done: true, value: void 0 } : { done: false, value };
1168
+ }
1169
+ };
1170
+ };
1171
+ return stream;
1172
+ }
1173
+
1174
+ // src/util/consume-stream.ts
1175
+ async function consumeStream({
1176
+ stream,
1177
+ onError
1178
+ }) {
1179
+ const reader = stream.getReader();
1180
+ try {
1181
+ while (true) {
1182
+ const { done } = await reader.read();
1183
+ if (done)
1184
+ break;
1185
+ }
1186
+ } catch (error) {
1187
+ onError == null ? void 0 : onError(error);
1188
+ } finally {
1189
+ reader.releaseLock();
1190
+ }
1191
+ }
1192
+
1193
+ // src/util/create-resolvable-promise.ts
1194
+ function createResolvablePromise() {
1195
+ let resolve2;
1196
+ let reject;
1197
+ const promise = new Promise((res, rej) => {
1198
+ resolve2 = res;
1199
+ reject = rej;
1200
+ });
1201
+ return {
1202
+ promise,
1203
+ resolve: resolve2,
1204
+ reject
1205
+ };
1206
+ }
1207
+
1208
+ // src/util/create-stitchable-stream.ts
1209
+ function createStitchableStream() {
1210
+ let innerStreamReaders = [];
1211
+ let controller = null;
1212
+ let isClosed = false;
1213
+ let waitForNewStream = createResolvablePromise();
1214
+ const processPull = async () => {
1215
+ if (isClosed && innerStreamReaders.length === 0) {
1216
+ controller == null ? void 0 : controller.close();
1217
+ return;
1218
+ }
1219
+ if (innerStreamReaders.length === 0) {
1220
+ waitForNewStream = createResolvablePromise();
1221
+ await waitForNewStream.promise;
1222
+ return processPull();
1223
+ }
1224
+ try {
1225
+ const { value, done } = await innerStreamReaders[0].read();
1226
+ if (done) {
1227
+ innerStreamReaders.shift();
1228
+ if (innerStreamReaders.length > 0) {
1229
+ await processPull();
1230
+ } else if (isClosed) {
1231
+ controller == null ? void 0 : controller.close();
1232
+ }
1233
+ } else {
1234
+ controller == null ? void 0 : controller.enqueue(value);
1235
+ }
1236
+ } catch (error) {
1237
+ controller == null ? void 0 : controller.error(error);
1238
+ innerStreamReaders.shift();
1239
+ if (isClosed && innerStreamReaders.length === 0) {
1240
+ controller == null ? void 0 : controller.close();
1241
+ }
1242
+ }
1243
+ };
1244
+ return {
1245
+ stream: new ReadableStream({
1246
+ start(controllerParam) {
1247
+ controller = controllerParam;
1248
+ },
1249
+ pull: processPull,
1250
+ async cancel() {
1251
+ for (const reader of innerStreamReaders) {
1252
+ await reader.cancel();
1253
+ }
1254
+ innerStreamReaders = [];
1255
+ isClosed = true;
1256
+ }
1257
+ }),
1258
+ addStream: (innerStream) => {
1259
+ if (isClosed) {
1260
+ throw new Error("Cannot add inner stream: outer stream is closed");
1261
+ }
1262
+ innerStreamReaders.push(innerStream.getReader());
1263
+ waitForNewStream.resolve();
1264
+ },
1265
+ /**
1266
+ * Gracefully close the outer stream. This will let the inner streams
1267
+ * finish processing and then close the outer stream.
1268
+ */
1269
+ close: () => {
1270
+ isClosed = true;
1271
+ waitForNewStream.resolve();
1272
+ if (innerStreamReaders.length === 0) {
1273
+ controller == null ? void 0 : controller.close();
1274
+ }
1275
+ },
1276
+ /**
1277
+ * Immediately close the outer stream. This will cancel all inner streams
1278
+ * and close the outer stream.
1279
+ */
1280
+ terminate: () => {
1281
+ isClosed = true;
1282
+ waitForNewStream.resolve();
1283
+ innerStreamReaders.forEach((reader) => reader.cancel());
1284
+ innerStreamReaders = [];
1285
+ controller == null ? void 0 : controller.close();
1286
+ }
1287
+ };
1288
+ }
1289
+
1290
+ // src/util/delayed-promise.ts
1291
+ var DelayedPromise = class {
1292
+ constructor() {
1293
+ this.status = { type: "pending" };
1294
+ this._resolve = void 0;
1295
+ this._reject = void 0;
1296
+ }
1297
+ get promise() {
1298
+ if (this._promise) {
1299
+ return this._promise;
1300
+ }
1301
+ this._promise = new Promise((resolve2, reject) => {
1302
+ if (this.status.type === "resolved") {
1303
+ resolve2(this.status.value);
1304
+ } else if (this.status.type === "rejected") {
1305
+ reject(this.status.error);
1306
+ }
1307
+ this._resolve = resolve2;
1308
+ this._reject = reject;
1309
+ });
1310
+ return this._promise;
1311
+ }
1312
+ resolve(value) {
1313
+ var _a9;
1314
+ this.status = { type: "resolved", value };
1315
+ if (this._promise) {
1316
+ (_a9 = this._resolve) == null ? void 0 : _a9.call(this, value);
1317
+ }
1318
+ }
1319
+ reject(error) {
1320
+ var _a9;
1321
+ this.status = { type: "rejected", error };
1322
+ if (this._promise) {
1323
+ (_a9 = this._reject) == null ? void 0 : _a9.call(this, error);
1324
+ }
1325
+ }
1326
+ };
1327
+
1328
+ // src/util/now.ts
1329
+ function now() {
1330
+ var _a9, _b;
1331
+ return (_b = (_a9 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a9.now()) != null ? _b : Date.now();
1332
+ }
1333
+
1334
+ // src/error/invalid-argument-error.ts
1335
+ var import_provider2 = require("@ai-sdk/provider");
1336
+ var name2 = "AI_InvalidArgumentError";
1337
+ var marker2 = `vercel.ai.error.${name2}`;
1338
+ var symbol2 = Symbol.for(marker2);
1339
+ var _a2;
1340
+ var InvalidArgumentError = class extends import_provider2.AISDKError {
1341
+ constructor({
1342
+ parameter,
1343
+ value,
1344
+ message
1345
+ }) {
1346
+ super({
1347
+ name: name2,
1348
+ message: `Invalid argument for parameter ${parameter}: ${message}`
1349
+ });
1350
+ this[_a2] = true;
1351
+ this.parameter = parameter;
1352
+ this.value = value;
1353
+ }
1354
+ static isInstance(error) {
1355
+ return import_provider2.AISDKError.hasMarker(error, marker2);
1356
+ }
1357
+ };
1358
+ _a2 = symbol2;
1359
+
1360
+ // src/util/retry-with-exponential-backoff.ts
1361
+ var import_provider4 = require("@ai-sdk/provider");
1362
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
1363
+
1364
+ // src/util/retry-error.ts
1365
+ var import_provider3 = require("@ai-sdk/provider");
1366
+ var name3 = "AI_RetryError";
1367
+ var marker3 = `vercel.ai.error.${name3}`;
1368
+ var symbol3 = Symbol.for(marker3);
1369
+ var _a3;
1370
+ var RetryError = class extends import_provider3.AISDKError {
1371
+ constructor({
1372
+ message,
1373
+ reason,
1374
+ errors
1375
+ }) {
1376
+ super({ name: name3, message });
1377
+ this[_a3] = true;
1378
+ this.reason = reason;
1379
+ this.errors = errors;
1380
+ this.lastError = errors[errors.length - 1];
1381
+ }
1382
+ static isInstance(error) {
1383
+ return import_provider3.AISDKError.hasMarker(error, marker3);
1384
+ }
1385
+ };
1386
+ _a3 = symbol3;
1387
+
1388
+ // src/util/retry-with-exponential-backoff.ts
1389
+ function getRetryDelay(error, exponentialBackoffDelay) {
1390
+ const headers = error.responseHeaders;
1391
+ if (!headers)
1392
+ return exponentialBackoffDelay;
1393
+ let timeoutMillis;
1394
+ const retryAfterMs = headers["retry-after-ms"];
1395
+ if (retryAfterMs) {
1396
+ const timeoutMs = parseFloat(retryAfterMs);
1397
+ if (!Number.isNaN(timeoutMs)) {
1398
+ timeoutMillis = timeoutMs;
1399
+ }
1400
+ }
1401
+ const retryAfter = headers["retry-after"];
1402
+ if (retryAfter && timeoutMillis === void 0) {
1403
+ const timeoutSeconds = parseFloat(retryAfter);
1404
+ if (!Number.isNaN(timeoutSeconds)) {
1405
+ timeoutMillis = timeoutSeconds * 1e3;
1406
+ } else {
1407
+ timeoutMillis = Date.parse(retryAfter) - Date.now();
1408
+ }
1409
+ }
1410
+ if (timeoutMillis !== void 0 && 0 <= timeoutMillis && timeoutMillis < 60 * 1e3) {
1411
+ return timeoutMillis;
1412
+ }
1413
+ return exponentialBackoffDelay;
1414
+ }
1415
+ var retryWithExponentialBackoffRespectingRetryHeaders = ({
1416
+ maxRetries = 2,
1417
+ initialDelayInMs = 2e3,
1418
+ backoffFactor = 2
1419
+ } = {}) => async (f) => _retryWithExponentialBackoff(f, {
1420
+ maxRetries,
1421
+ delayInMs: initialDelayInMs,
1422
+ backoffFactor
1423
+ });
1424
+ async function _retryWithExponentialBackoff(f, {
1425
+ maxRetries,
1426
+ delayInMs,
1427
+ backoffFactor
1428
+ }, errors = []) {
1429
+ try {
1430
+ return await f();
1431
+ } catch (error) {
1432
+ if ((0, import_provider_utils3.isAbortError)(error)) {
1433
+ throw error;
1434
+ }
1435
+ if (maxRetries === 0) {
1436
+ throw error;
1437
+ }
1438
+ const errorMessage = (0, import_provider_utils3.getErrorMessage)(error);
1439
+ const newErrors = [...errors, error];
1440
+ const tryNumber = newErrors.length;
1441
+ if (tryNumber > maxRetries) {
1442
+ throw new RetryError({
1443
+ message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
1444
+ reason: "maxRetriesExceeded",
1445
+ errors: newErrors
1446
+ });
1447
+ }
1448
+ if (error instanceof Error && import_provider4.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
1449
+ const actualDelay = getRetryDelay(error, delayInMs);
1450
+ await (0, import_provider_utils3.delay)(actualDelay);
1451
+ return _retryWithExponentialBackoff(
1452
+ f,
1453
+ { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
1454
+ newErrors
1455
+ );
1456
+ }
1457
+ if (tryNumber === 1) {
1458
+ throw error;
1459
+ }
1460
+ throw new RetryError({
1461
+ message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
1462
+ reason: "errorNotRetryable",
1463
+ errors: newErrors
1464
+ });
1465
+ }
1466
+ }
1467
+
1468
+ // src/util/prepare-retries.ts
1469
+ function prepareRetries({
1470
+ maxRetries
1471
+ }) {
1472
+ if (maxRetries != null) {
1473
+ if (!Number.isInteger(maxRetries)) {
1474
+ throw new InvalidArgumentError({
1475
+ parameter: "maxRetries",
1476
+ value: maxRetries,
1477
+ message: "maxRetries must be an integer"
1478
+ });
1479
+ }
1480
+ if (maxRetries < 0) {
1481
+ throw new InvalidArgumentError({
1482
+ parameter: "maxRetries",
1483
+ value: maxRetries,
1484
+ message: "maxRetries must be >= 0"
1485
+ });
1486
+ }
1487
+ }
1488
+ const maxRetriesResult = maxRetries != null ? maxRetries : 2;
1489
+ return {
1490
+ maxRetries: maxRetriesResult,
1491
+ retry: retryWithExponentialBackoffRespectingRetryHeaders({
1492
+ maxRetries: maxRetriesResult
1493
+ })
1494
+ };
1495
+ }
1496
+
1497
+ // src/prompt/convert-to-language-model-prompt.ts
1498
+ var import_provider_utils6 = require("@ai-sdk/provider-utils");
1499
+
1500
+ // src/util/detect-media-type.ts
1501
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
1502
+ var imageMediaTypeSignatures = [
1503
+ {
1504
+ mediaType: "image/gif",
1505
+ bytesPrefix: [71, 73, 70],
1506
+ base64Prefix: "R0lG"
1507
+ },
1508
+ {
1509
+ mediaType: "image/png",
1510
+ bytesPrefix: [137, 80, 78, 71],
1511
+ base64Prefix: "iVBORw"
1512
+ },
1513
+ {
1514
+ mediaType: "image/jpeg",
1515
+ bytesPrefix: [255, 216],
1516
+ base64Prefix: "/9j/"
1517
+ },
1518
+ {
1519
+ mediaType: "image/webp",
1520
+ bytesPrefix: [82, 73, 70, 70],
1521
+ base64Prefix: "UklGRg"
1522
+ },
1523
+ {
1524
+ mediaType: "image/bmp",
1525
+ bytesPrefix: [66, 77],
1526
+ base64Prefix: "Qk"
1527
+ },
1528
+ {
1529
+ mediaType: "image/tiff",
1530
+ bytesPrefix: [73, 73, 42, 0],
1531
+ base64Prefix: "SUkqAA"
1532
+ },
1533
+ {
1534
+ mediaType: "image/tiff",
1535
+ bytesPrefix: [77, 77, 0, 42],
1536
+ base64Prefix: "TU0AKg"
1537
+ },
1538
+ {
1539
+ mediaType: "image/avif",
1540
+ bytesPrefix: [
1541
+ 0,
1542
+ 0,
1543
+ 0,
1544
+ 32,
1545
+ 102,
1546
+ 116,
1547
+ 121,
1548
+ 112,
1549
+ 97,
1550
+ 118,
1551
+ 105,
1552
+ 102
1553
+ ],
1554
+ base64Prefix: "AAAAIGZ0eXBhdmlm"
1555
+ },
1556
+ {
1557
+ mediaType: "image/heic",
1558
+ bytesPrefix: [
1559
+ 0,
1560
+ 0,
1561
+ 0,
1562
+ 32,
1563
+ 102,
1564
+ 116,
1565
+ 121,
1566
+ 112,
1567
+ 104,
1568
+ 101,
1569
+ 105,
1570
+ 99
1571
+ ],
1572
+ base64Prefix: "AAAAIGZ0eXBoZWlj"
1573
+ }
1574
+ ];
1575
+ var stripID3 = (data) => {
1576
+ const bytes = typeof data === "string" ? (0, import_provider_utils4.convertBase64ToUint8Array)(data) : data;
1577
+ const id3Size = (bytes[6] & 127) << 21 | (bytes[7] & 127) << 14 | (bytes[8] & 127) << 7 | bytes[9] & 127;
1578
+ return bytes.slice(id3Size + 10);
1579
+ };
1580
+ function stripID3TagsIfPresent(data) {
1581
+ const hasId3 = typeof data === "string" && data.startsWith("SUQz") || typeof data !== "string" && data.length > 10 && data[0] === 73 && // 'I'
1582
+ data[1] === 68 && // 'D'
1583
+ data[2] === 51;
1584
+ return hasId3 ? stripID3(data) : data;
1585
+ }
1586
+ function detectMediaType({
1587
+ data,
1588
+ signatures
1589
+ }) {
1590
+ const processedData = stripID3TagsIfPresent(data);
1591
+ for (const signature of signatures) {
1592
+ if (typeof processedData === "string" ? processedData.startsWith(signature.base64Prefix) : processedData.length >= signature.bytesPrefix.length && signature.bytesPrefix.every(
1593
+ (byte, index) => processedData[index] === byte
1594
+ )) {
1595
+ return signature.mediaType;
1596
+ }
1597
+ }
1598
+ return void 0;
1599
+ }
1600
+
1601
+ // src/util/download-error.ts
1602
+ var import_provider5 = require("@ai-sdk/provider");
1603
+ var name4 = "AI_DownloadError";
1604
+ var marker4 = `vercel.ai.error.${name4}`;
1605
+ var symbol4 = Symbol.for(marker4);
1606
+ var _a4;
1607
+ var DownloadError = class extends import_provider5.AISDKError {
1608
+ constructor({
1609
+ url,
1610
+ statusCode,
1611
+ statusText,
1612
+ cause,
1613
+ message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
1614
+ }) {
1615
+ super({ name: name4, message, cause });
1616
+ this[_a4] = true;
1617
+ this.url = url;
1618
+ this.statusCode = statusCode;
1619
+ this.statusText = statusText;
1620
+ }
1621
+ static isInstance(error) {
1622
+ return import_provider5.AISDKError.hasMarker(error, marker4);
1623
+ }
1624
+ };
1625
+ _a4 = symbol4;
1626
+
1627
+ // src/util/download.ts
1628
+ async function download({ url }) {
1629
+ var _a9;
1630
+ const urlText = url.toString();
1631
+ try {
1632
+ const response = await fetch(urlText);
1633
+ if (!response.ok) {
1634
+ throw new DownloadError({
1635
+ url: urlText,
1636
+ statusCode: response.status,
1637
+ statusText: response.statusText
1638
+ });
1639
+ }
1640
+ return {
1641
+ data: new Uint8Array(await response.arrayBuffer()),
1642
+ mediaType: (_a9 = response.headers.get("content-type")) != null ? _a9 : void 0
1643
+ };
1644
+ } catch (error) {
1645
+ if (DownloadError.isInstance(error)) {
1646
+ throw error;
1647
+ }
1648
+ throw new DownloadError({ url: urlText, cause: error });
1649
+ }
1650
+ }
1651
+
1652
+ // src/prompt/data-content.ts
1653
+ var import_provider6 = require("@ai-sdk/provider");
1654
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
1655
+ var import_v44 = require("zod/v4");
1656
+
1657
+ // src/prompt/split-data-url.ts
1658
+ function splitDataUrl(dataUrl) {
1659
+ try {
1660
+ const [header, base64Content] = dataUrl.split(",");
1661
+ return {
1662
+ mediaType: header.split(";")[0].split(":")[1],
1663
+ base64Content
1664
+ };
1665
+ } catch (error) {
1666
+ return {
1667
+ mediaType: void 0,
1668
+ base64Content: void 0
1669
+ };
1670
+ }
1671
+ }
1672
+
1673
+ // src/prompt/data-content.ts
1674
+ var dataContentSchema = import_v44.z.union([
1675
+ import_v44.z.string(),
1676
+ import_v44.z.instanceof(Uint8Array),
1677
+ import_v44.z.instanceof(ArrayBuffer),
1678
+ import_v44.z.custom(
1679
+ // Buffer might not be available in some environments such as CloudFlare:
1680
+ (value) => {
1681
+ var _a9, _b;
1682
+ return (_b = (_a9 = globalThis.Buffer) == null ? void 0 : _a9.isBuffer(value)) != null ? _b : false;
1683
+ },
1684
+ { message: "Must be a Buffer" }
1685
+ )
1686
+ ]);
1687
+ function convertToLanguageModelV2DataContent(content) {
1688
+ if (content instanceof Uint8Array) {
1689
+ return { data: content, mediaType: void 0 };
1690
+ }
1691
+ if (content instanceof ArrayBuffer) {
1692
+ return { data: new Uint8Array(content), mediaType: void 0 };
1693
+ }
1694
+ if (typeof content === "string") {
1695
+ try {
1696
+ content = new URL(content);
1697
+ } catch (error) {
1698
+ }
1699
+ }
1700
+ if (content instanceof URL && content.protocol === "data:") {
1701
+ const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(
1702
+ content.toString()
1703
+ );
1704
+ if (dataUrlMediaType == null || base64Content == null) {
1705
+ throw new import_provider6.AISDKError({
1706
+ name: "InvalidDataContentError",
1707
+ message: `Invalid data URL format in content ${content.toString()}`
1708
+ });
1709
+ }
1710
+ return { data: base64Content, mediaType: dataUrlMediaType };
1711
+ }
1712
+ return { data: content, mediaType: void 0 };
1713
+ }
1714
+ function convertDataContentToBase64String(content) {
1715
+ if (typeof content === "string") {
1716
+ return content;
1717
+ }
1718
+ if (content instanceof ArrayBuffer) {
1719
+ return (0, import_provider_utils5.convertUint8ArrayToBase64)(new Uint8Array(content));
1720
+ }
1721
+ return (0, import_provider_utils5.convertUint8ArrayToBase64)(content);
1722
+ }
1723
+
1724
+ // src/prompt/invalid-message-role-error.ts
1725
+ var import_provider7 = require("@ai-sdk/provider");
1726
+ var name5 = "AI_InvalidMessageRoleError";
1727
+ var marker5 = `vercel.ai.error.${name5}`;
1728
+ var symbol5 = Symbol.for(marker5);
1729
+ var _a5;
1730
+ var InvalidMessageRoleError = class extends import_provider7.AISDKError {
1731
+ constructor({
1732
+ role,
1733
+ message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
1734
+ }) {
1735
+ super({ name: name5, message });
1736
+ this[_a5] = true;
1737
+ this.role = role;
1738
+ }
1739
+ static isInstance(error) {
1740
+ return import_provider7.AISDKError.hasMarker(error, marker5);
1741
+ }
1742
+ };
1743
+ _a5 = symbol5;
1744
+
1745
+ // src/prompt/convert-to-language-model-prompt.ts
1746
+ async function convertToLanguageModelPrompt({
1747
+ prompt,
1748
+ supportedUrls,
1749
+ downloadImplementation = download
1750
+ }) {
1751
+ const downloadedAssets = await downloadAssets(
1752
+ prompt.messages,
1753
+ downloadImplementation,
1754
+ supportedUrls
1755
+ );
1756
+ return [
1757
+ ...prompt.system != null ? [{ role: "system", content: prompt.system }] : [],
1758
+ ...prompt.messages.map(
1759
+ (message) => convertToLanguageModelMessage({ message, downloadedAssets })
1760
+ )
1761
+ ];
1762
+ }
1763
+ function convertToLanguageModelMessage({
1764
+ message,
1765
+ downloadedAssets
1766
+ }) {
1767
+ const role = message.role;
1768
+ switch (role) {
1769
+ case "system": {
1770
+ return {
1771
+ role: "system",
1772
+ content: message.content,
1773
+ providerOptions: message.providerOptions
1774
+ };
1775
+ }
1776
+ case "user": {
1777
+ if (typeof message.content === "string") {
1778
+ return {
1779
+ role: "user",
1780
+ content: [{ type: "text", text: message.content }],
1781
+ providerOptions: message.providerOptions
1782
+ };
1783
+ }
1784
+ return {
1785
+ role: "user",
1786
+ content: message.content.map((part) => convertPartToLanguageModelPart(part, downloadedAssets)).filter((part) => part.type !== "text" || part.text !== ""),
1787
+ providerOptions: message.providerOptions
1788
+ };
1789
+ }
1790
+ case "assistant": {
1791
+ if (typeof message.content === "string") {
1792
+ return {
1793
+ role: "assistant",
1794
+ content: [{ type: "text", text: message.content }],
1795
+ providerOptions: message.providerOptions
1796
+ };
1797
+ }
1798
+ return {
1799
+ role: "assistant",
1800
+ content: message.content.filter(
1801
+ // remove empty text parts:
1802
+ (part) => part.type !== "text" || part.text !== ""
1803
+ ).map((part) => {
1804
+ const providerOptions = part.providerOptions;
1805
+ switch (part.type) {
1806
+ case "file": {
1807
+ const { data, mediaType } = convertToLanguageModelV2DataContent(
1808
+ part.data
1809
+ );
1810
+ return {
1811
+ type: "file",
1812
+ data,
1813
+ filename: part.filename,
1814
+ mediaType: mediaType != null ? mediaType : part.mediaType,
1815
+ providerOptions
1816
+ };
1817
+ }
1818
+ case "reasoning": {
1819
+ return {
1820
+ type: "reasoning",
1821
+ text: part.text,
1822
+ providerOptions
1823
+ };
1824
+ }
1825
+ case "text": {
1826
+ return {
1827
+ type: "text",
1828
+ text: part.text,
1829
+ providerOptions
1830
+ };
1831
+ }
1832
+ case "tool-call": {
1833
+ return {
1834
+ type: "tool-call",
1835
+ toolCallId: part.toolCallId,
1836
+ toolName: part.toolName,
1837
+ input: part.input,
1838
+ providerExecuted: part.providerExecuted,
1839
+ providerOptions
1840
+ };
1841
+ }
1842
+ case "tool-result": {
1843
+ return {
1844
+ type: "tool-result",
1845
+ toolCallId: part.toolCallId,
1846
+ toolName: part.toolName,
1847
+ output: part.output,
1848
+ providerOptions
1849
+ };
1850
+ }
1851
+ }
1852
+ }),
1853
+ providerOptions: message.providerOptions
1854
+ };
1855
+ }
1856
+ case "tool": {
1857
+ return {
1858
+ role: "tool",
1859
+ content: message.content.map((part) => ({
1860
+ type: "tool-result",
1861
+ toolCallId: part.toolCallId,
1862
+ toolName: part.toolName,
1863
+ output: part.output,
1864
+ providerOptions: part.providerOptions
1865
+ })),
1866
+ providerOptions: message.providerOptions
1867
+ };
1868
+ }
1869
+ default: {
1870
+ const _exhaustiveCheck = role;
1871
+ throw new InvalidMessageRoleError({ role: _exhaustiveCheck });
1872
+ }
1873
+ }
1874
+ }
1875
+ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
1876
+ const urls = messages.filter((message) => message.role === "user").map((message) => message.content).filter(
1877
+ (content) => Array.isArray(content)
1878
+ ).flat().filter(
1879
+ (part) => part.type === "image" || part.type === "file"
1880
+ ).map((part) => {
1881
+ var _a9;
1882
+ const mediaType = (_a9 = part.mediaType) != null ? _a9 : part.type === "image" ? "image/*" : void 0;
1883
+ let data = part.type === "image" ? part.image : part.data;
1884
+ if (typeof data === "string") {
1885
+ try {
1886
+ data = new URL(data);
1887
+ } catch (ignored) {
1888
+ }
1889
+ }
1890
+ return { mediaType, data };
1891
+ }).filter(
1892
+ (part) => part.data instanceof URL && part.mediaType != null && !(0, import_provider_utils6.isUrlSupported)({
1893
+ url: part.data.toString(),
1894
+ mediaType: part.mediaType,
1895
+ supportedUrls
1896
+ })
1897
+ ).map((part) => part.data);
1898
+ const downloadedImages = await Promise.all(
1899
+ urls.map(async (url) => ({
1900
+ url,
1901
+ data: await downloadImplementation({ url })
1902
+ }))
1903
+ );
1904
+ return Object.fromEntries(
1905
+ downloadedImages.map(({ url, data }) => [url.toString(), data])
1906
+ );
1907
+ }
1908
+ function convertPartToLanguageModelPart(part, downloadedAssets) {
1909
+ var _a9;
1910
+ if (part.type === "text") {
1911
+ return {
1912
+ type: "text",
1913
+ text: part.text,
1914
+ providerOptions: part.providerOptions
1915
+ };
1916
+ }
1917
+ let originalData;
1918
+ const type = part.type;
1919
+ switch (type) {
1920
+ case "image":
1921
+ originalData = part.image;
1922
+ break;
1923
+ case "file":
1924
+ originalData = part.data;
1925
+ break;
1926
+ default:
1927
+ throw new Error(`Unsupported part type: ${type}`);
1928
+ }
1929
+ const { data: convertedData, mediaType: convertedMediaType } = convertToLanguageModelV2DataContent(originalData);
1930
+ let mediaType = convertedMediaType != null ? convertedMediaType : part.mediaType;
1931
+ let data = convertedData;
1932
+ if (data instanceof URL) {
1933
+ const downloadedFile = downloadedAssets[data.toString()];
1934
+ if (downloadedFile) {
1935
+ data = downloadedFile.data;
1936
+ mediaType != null ? mediaType : mediaType = downloadedFile.mediaType;
1937
+ }
1938
+ }
1939
+ switch (type) {
1940
+ case "image": {
1941
+ if (data instanceof Uint8Array || typeof data === "string") {
1942
+ mediaType = (_a9 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a9 : mediaType;
1943
+ }
1944
+ return {
1945
+ type: "file",
1946
+ mediaType: mediaType != null ? mediaType : "image/*",
1947
+ // any image
1948
+ filename: void 0,
1949
+ data,
1950
+ providerOptions: part.providerOptions
1951
+ };
1952
+ }
1953
+ case "file": {
1954
+ if (mediaType == null) {
1955
+ throw new Error(`Media type is missing for file part`);
1956
+ }
1957
+ return {
1958
+ type: "file",
1959
+ mediaType,
1960
+ filename: part.filename,
1961
+ data,
1962
+ providerOptions: part.providerOptions
1963
+ };
1964
+ }
1965
+ }
1966
+ }
1967
+
1968
+ // src/prompt/prepare-call-settings.ts
1969
+ function prepareCallSettings({
1970
+ maxOutputTokens,
1971
+ temperature,
1972
+ topP,
1973
+ topK,
1974
+ presencePenalty,
1975
+ frequencyPenalty,
1976
+ seed,
1977
+ stopSequences
1978
+ }) {
1979
+ if (maxOutputTokens != null) {
1980
+ if (!Number.isInteger(maxOutputTokens)) {
1981
+ throw new InvalidArgumentError({
1982
+ parameter: "maxOutputTokens",
1983
+ value: maxOutputTokens,
1984
+ message: "maxOutputTokens must be an integer"
1985
+ });
1986
+ }
1987
+ if (maxOutputTokens < 1) {
1988
+ throw new InvalidArgumentError({
1989
+ parameter: "maxOutputTokens",
1990
+ value: maxOutputTokens,
1991
+ message: "maxOutputTokens must be >= 1"
1992
+ });
1993
+ }
1994
+ }
1995
+ if (temperature != null) {
1996
+ if (typeof temperature !== "number") {
1997
+ throw new InvalidArgumentError({
1998
+ parameter: "temperature",
1999
+ value: temperature,
2000
+ message: "temperature must be a number"
2001
+ });
2002
+ }
2003
+ }
2004
+ if (topP != null) {
2005
+ if (typeof topP !== "number") {
2006
+ throw new InvalidArgumentError({
2007
+ parameter: "topP",
2008
+ value: topP,
2009
+ message: "topP must be a number"
2010
+ });
2011
+ }
2012
+ }
2013
+ if (topK != null) {
2014
+ if (typeof topK !== "number") {
2015
+ throw new InvalidArgumentError({
2016
+ parameter: "topK",
2017
+ value: topK,
2018
+ message: "topK must be a number"
2019
+ });
2020
+ }
2021
+ }
2022
+ if (presencePenalty != null) {
2023
+ if (typeof presencePenalty !== "number") {
2024
+ throw new InvalidArgumentError({
2025
+ parameter: "presencePenalty",
2026
+ value: presencePenalty,
2027
+ message: "presencePenalty must be a number"
2028
+ });
2029
+ }
2030
+ }
2031
+ if (frequencyPenalty != null) {
2032
+ if (typeof frequencyPenalty !== "number") {
2033
+ throw new InvalidArgumentError({
2034
+ parameter: "frequencyPenalty",
2035
+ value: frequencyPenalty,
2036
+ message: "frequencyPenalty must be a number"
2037
+ });
2038
+ }
2039
+ }
2040
+ if (seed != null) {
2041
+ if (!Number.isInteger(seed)) {
2042
+ throw new InvalidArgumentError({
2043
+ parameter: "seed",
2044
+ value: seed,
2045
+ message: "seed must be an integer"
2046
+ });
2047
+ }
2048
+ }
2049
+ return {
2050
+ maxOutputTokens,
2051
+ temperature,
2052
+ topP,
2053
+ topK,
2054
+ presencePenalty,
2055
+ frequencyPenalty,
2056
+ stopSequences,
2057
+ seed
2058
+ };
2059
+ }
2060
+
2061
+ // src/prompt/prepare-tools-and-tool-choice.ts
2062
+ var import_provider_utils7 = require("@ai-sdk/provider-utils");
2063
+
2064
+ // src/util/is-non-empty-object.ts
2065
+ function isNonEmptyObject(object) {
2066
+ return object != null && Object.keys(object).length > 0;
2067
+ }
2068
+
2069
+ // src/prompt/prepare-tools-and-tool-choice.ts
2070
+ function prepareToolsAndToolChoice({
2071
+ tools,
2072
+ toolChoice,
2073
+ activeTools
2074
+ }) {
2075
+ if (!isNonEmptyObject(tools)) {
2076
+ return {
2077
+ tools: void 0,
2078
+ toolChoice: void 0
2079
+ };
2080
+ }
2081
+ const filteredTools = activeTools != null ? Object.entries(tools).filter(
2082
+ ([name9]) => activeTools.includes(name9)
2083
+ ) : Object.entries(tools);
2084
+ return {
2085
+ tools: filteredTools.map(([name9, tool]) => {
2086
+ const toolType = tool.type;
2087
+ switch (toolType) {
2088
+ case void 0:
2089
+ case "function":
2090
+ return {
2091
+ type: "function",
2092
+ name: name9,
2093
+ description: tool.description,
2094
+ inputSchema: (0, import_provider_utils7.asSchema)(tool.inputSchema).jsonSchema
2095
+ };
2096
+ case "provider-defined":
2097
+ return {
2098
+ type: "provider-defined",
2099
+ name: name9,
2100
+ id: tool.id,
2101
+ args: tool.args
2102
+ };
2103
+ default: {
2104
+ const exhaustiveCheck = toolType;
2105
+ throw new Error(`Unsupported tool type: ${exhaustiveCheck}`);
2106
+ }
2107
+ }
2108
+ }),
2109
+ toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
2110
+ };
2111
+ }
2112
+
2113
+ // src/prompt/resolve-language-model.ts
2114
+ var import_gateway = require("@ai-sdk/gateway");
2115
+
2116
+ // src/error/index.ts
2117
+ var import_provider12 = require("@ai-sdk/provider");
2118
+
2119
+ // src/error/invalid-tool-input-error.ts
2120
+ var import_provider8 = require("@ai-sdk/provider");
2121
+ var name6 = "AI_InvalidToolInputError";
2122
+ var marker6 = `vercel.ai.error.${name6}`;
2123
+ var symbol6 = Symbol.for(marker6);
2124
+ var _a6;
2125
+ var InvalidToolInputError = class extends import_provider8.AISDKError {
2126
+ constructor({
2127
+ toolInput,
2128
+ toolName,
2129
+ cause,
2130
+ message = `Invalid input for tool ${toolName}: ${(0, import_provider8.getErrorMessage)(cause)}`
2131
+ }) {
2132
+ super({ name: name6, message, cause });
2133
+ this[_a6] = true;
2134
+ this.toolInput = toolInput;
2135
+ this.toolName = toolName;
2136
+ }
2137
+ static isInstance(error) {
2138
+ return import_provider8.AISDKError.hasMarker(error, marker6);
2139
+ }
2140
+ };
2141
+ _a6 = symbol6;
2142
+
2143
+ // src/error/no-such-tool-error.ts
2144
+ var import_provider9 = require("@ai-sdk/provider");
2145
+ var name7 = "AI_NoSuchToolError";
2146
+ var marker7 = `vercel.ai.error.${name7}`;
2147
+ var symbol7 = Symbol.for(marker7);
2148
+ var _a7;
2149
+ var NoSuchToolError = class extends import_provider9.AISDKError {
2150
+ constructor({
2151
+ toolName,
2152
+ availableTools = void 0,
2153
+ message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
2154
+ }) {
2155
+ super({ name: name7, message });
2156
+ this[_a7] = true;
2157
+ this.toolName = toolName;
2158
+ this.availableTools = availableTools;
2159
+ }
2160
+ static isInstance(error) {
2161
+ return import_provider9.AISDKError.hasMarker(error, marker7);
2162
+ }
2163
+ };
2164
+ _a7 = symbol7;
2165
+
2166
+ // src/error/tool-call-repair-error.ts
2167
+ var import_provider10 = require("@ai-sdk/provider");
2168
+ var name8 = "AI_ToolCallRepairError";
2169
+ var marker8 = `vercel.ai.error.${name8}`;
2170
+ var symbol8 = Symbol.for(marker8);
2171
+ var _a8;
2172
+ var ToolCallRepairError = class extends import_provider10.AISDKError {
2173
+ constructor({
2174
+ cause,
2175
+ originalError,
2176
+ message = `Error repairing tool call: ${(0, import_provider10.getErrorMessage)(cause)}`
2177
+ }) {
2178
+ super({ name: name8, message, cause });
2179
+ this[_a8] = true;
2180
+ this.originalError = originalError;
2181
+ }
2182
+ static isInstance(error) {
2183
+ return import_provider10.AISDKError.hasMarker(error, marker8);
2184
+ }
2185
+ };
2186
+ _a8 = symbol8;
2187
+
2188
+ // src/error/unsupported-model-version-error.ts
2189
+ var import_provider11 = require("@ai-sdk/provider");
2190
+ var UnsupportedModelVersionError = class extends import_provider11.AISDKError {
2191
+ constructor(options) {
2192
+ super({
2193
+ name: "AI_UnsupportedModelVersionError",
2194
+ message: `Unsupported model version ${options.version} for provider "${options.provider}" and model "${options.modelId}". AI SDK 5 only supports models that implement specification version "v2".`
2195
+ });
2196
+ this.version = options.version;
2197
+ this.provider = options.provider;
2198
+ this.modelId = options.modelId;
2199
+ }
2200
+ };
2201
+
2202
+ // src/prompt/resolve-language-model.ts
2203
+ function resolveLanguageModel(model) {
2204
+ if (typeof model !== "string") {
2205
+ if (model.specificationVersion !== "v2") {
2206
+ throw new UnsupportedModelVersionError({
2207
+ version: model.specificationVersion,
2208
+ provider: model.provider,
2209
+ modelId: model.modelId
2210
+ });
2211
+ }
2212
+ return model;
2213
+ }
2214
+ const globalProvider = globalThis.AI_SDK_DEFAULT_PROVIDER;
2215
+ return (globalProvider != null ? globalProvider : import_gateway.gateway).languageModel(model);
2216
+ }
2217
+
2218
+ // src/prompt/standardize-prompt.ts
2219
+ var import_provider13 = require("@ai-sdk/provider");
2220
+ var import_provider_utils8 = require("@ai-sdk/provider-utils");
2221
+ var import_v47 = require("zod/v4");
2222
+
2223
+ // src/prompt/message.ts
2224
+ var import_v46 = require("zod/v4");
2225
+
2226
+ // src/prompt/content-part.ts
2227
+ var import_v45 = require("zod/v4");
2228
+ var textPartSchema = import_v45.z.object({
2229
+ type: import_v45.z.literal("text"),
2230
+ text: import_v45.z.string(),
2231
+ providerOptions: providerMetadataSchema.optional()
2232
+ });
2233
+ var imagePartSchema = import_v45.z.object({
2234
+ type: import_v45.z.literal("image"),
2235
+ image: import_v45.z.union([dataContentSchema, import_v45.z.instanceof(URL)]),
2236
+ mediaType: import_v45.z.string().optional(),
2237
+ providerOptions: providerMetadataSchema.optional()
2238
+ });
2239
+ var filePartSchema = import_v45.z.object({
2240
+ type: import_v45.z.literal("file"),
2241
+ data: import_v45.z.union([dataContentSchema, import_v45.z.instanceof(URL)]),
2242
+ filename: import_v45.z.string().optional(),
2243
+ mediaType: import_v45.z.string(),
2244
+ providerOptions: providerMetadataSchema.optional()
2245
+ });
2246
+ var reasoningPartSchema = import_v45.z.object({
2247
+ type: import_v45.z.literal("reasoning"),
2248
+ text: import_v45.z.string(),
2249
+ providerOptions: providerMetadataSchema.optional()
2250
+ });
2251
+ var toolCallPartSchema = import_v45.z.object({
2252
+ type: import_v45.z.literal("tool-call"),
2253
+ toolCallId: import_v45.z.string(),
2254
+ toolName: import_v45.z.string(),
2255
+ input: import_v45.z.unknown(),
2256
+ providerOptions: providerMetadataSchema.optional(),
2257
+ providerExecuted: import_v45.z.boolean().optional()
2258
+ });
2259
+ var outputSchema = import_v45.z.discriminatedUnion("type", [
2260
+ import_v45.z.object({
2261
+ type: import_v45.z.literal("text"),
2262
+ value: import_v45.z.string()
2263
+ }),
2264
+ import_v45.z.object({
2265
+ type: import_v45.z.literal("json"),
2266
+ value: jsonValueSchema
2267
+ }),
2268
+ import_v45.z.object({
2269
+ type: import_v45.z.literal("error-text"),
2270
+ value: import_v45.z.string()
2271
+ }),
2272
+ import_v45.z.object({
2273
+ type: import_v45.z.literal("error-json"),
2274
+ value: jsonValueSchema
2275
+ }),
2276
+ import_v45.z.object({
2277
+ type: import_v45.z.literal("content"),
2278
+ value: import_v45.z.array(
2279
+ import_v45.z.union([
2280
+ import_v45.z.object({
2281
+ type: import_v45.z.literal("text"),
2282
+ text: import_v45.z.string()
2283
+ }),
2284
+ import_v45.z.object({
2285
+ type: import_v45.z.literal("media"),
2286
+ data: import_v45.z.string(),
2287
+ mediaType: import_v45.z.string()
2288
+ })
2289
+ ])
2290
+ )
2291
+ })
2292
+ ]);
2293
+ var toolResultPartSchema = import_v45.z.object({
2294
+ type: import_v45.z.literal("tool-result"),
2295
+ toolCallId: import_v45.z.string(),
2296
+ toolName: import_v45.z.string(),
2297
+ output: outputSchema,
2298
+ providerOptions: providerMetadataSchema.optional()
2299
+ });
2300
+
2301
+ // src/prompt/message.ts
2302
+ var systemModelMessageSchema = import_v46.z.object(
2303
+ {
2304
+ role: import_v46.z.literal("system"),
2305
+ content: import_v46.z.string(),
2306
+ providerOptions: providerMetadataSchema.optional()
2307
+ }
2308
+ );
2309
+ var userModelMessageSchema = import_v46.z.object({
2310
+ role: import_v46.z.literal("user"),
2311
+ content: import_v46.z.union([
2312
+ import_v46.z.string(),
2313
+ import_v46.z.array(import_v46.z.union([textPartSchema, imagePartSchema, filePartSchema]))
2314
+ ]),
2315
+ providerOptions: providerMetadataSchema.optional()
2316
+ });
2317
+ var assistantModelMessageSchema = import_v46.z.object({
2318
+ role: import_v46.z.literal("assistant"),
2319
+ content: import_v46.z.union([
2320
+ import_v46.z.string(),
2321
+ import_v46.z.array(
2322
+ import_v46.z.union([
2323
+ textPartSchema,
2324
+ filePartSchema,
2325
+ reasoningPartSchema,
2326
+ toolCallPartSchema,
2327
+ toolResultPartSchema
2328
+ ])
2329
+ )
2330
+ ]),
2331
+ providerOptions: providerMetadataSchema.optional()
2332
+ });
2333
+ var toolModelMessageSchema = import_v46.z.object({
2334
+ role: import_v46.z.literal("tool"),
2335
+ content: import_v46.z.array(toolResultPartSchema),
2336
+ providerOptions: providerMetadataSchema.optional()
2337
+ });
2338
+ var modelMessageSchema = import_v46.z.union([
2339
+ systemModelMessageSchema,
2340
+ userModelMessageSchema,
2341
+ assistantModelMessageSchema,
2342
+ toolModelMessageSchema
2343
+ ]);
2344
+
2345
+ // src/prompt/standardize-prompt.ts
2346
+ async function standardizePrompt(prompt) {
2347
+ if (prompt.prompt == null && prompt.messages == null) {
2348
+ throw new import_provider13.InvalidPromptError({
2349
+ prompt,
2350
+ message: "prompt or messages must be defined"
2351
+ });
2352
+ }
2353
+ if (prompt.prompt != null && prompt.messages != null) {
2354
+ throw new import_provider13.InvalidPromptError({
2355
+ prompt,
2356
+ message: "prompt and messages cannot be defined at the same time"
2357
+ });
2358
+ }
2359
+ if (prompt.system != null && typeof prompt.system !== "string") {
2360
+ throw new import_provider13.InvalidPromptError({
2361
+ prompt,
2362
+ message: "system must be a string"
2363
+ });
2364
+ }
2365
+ let messages;
2366
+ if (prompt.prompt != null && typeof prompt.prompt === "string") {
2367
+ messages = [{ role: "user", content: prompt.prompt }];
2368
+ } else if (prompt.prompt != null && Array.isArray(prompt.prompt)) {
2369
+ messages = prompt.prompt;
2370
+ } else if (prompt.messages != null) {
2371
+ messages = prompt.messages;
2372
+ } else {
2373
+ throw new import_provider13.InvalidPromptError({
2374
+ prompt,
2375
+ message: "prompt or messages must be defined"
2376
+ });
2377
+ }
2378
+ if (messages.length === 0) {
2379
+ throw new import_provider13.InvalidPromptError({
2380
+ prompt,
2381
+ message: "messages must not be empty"
2382
+ });
2383
+ }
2384
+ const validationResult = await (0, import_provider_utils8.safeValidateTypes)({
2385
+ value: messages,
2386
+ schema: import_v47.z.array(modelMessageSchema)
2387
+ });
2388
+ if (!validationResult.success) {
2389
+ throw new import_provider13.InvalidPromptError({
2390
+ prompt,
2391
+ message: "The messages must be a ModelMessage[]. If you have passed a UIMessage[], you can use convertToModelMessages to convert them.",
2392
+ cause: validationResult.error
2393
+ });
2394
+ }
2395
+ return {
2396
+ messages,
2397
+ system: prompt.system
2398
+ };
2399
+ }
2400
+
2401
+ // src/prompt/wrap-gateway-error.ts
2402
+ var import_gateway2 = require("@ai-sdk/gateway");
2403
+ var import_provider14 = require("@ai-sdk/provider");
2404
+ function wrapGatewayError(error) {
2405
+ if (import_gateway2.GatewayAuthenticationError.isInstance(error) || import_gateway2.GatewayModelNotFoundError.isInstance(error)) {
2406
+ return new import_provider14.AISDKError({
2407
+ name: "GatewayError",
2408
+ message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
2409
+ cause: error
2410
+ });
2411
+ }
2412
+ return error;
2413
+ }
2414
+
2415
+ // src/telemetry/assemble-operation-name.ts
2416
+ function assembleOperationName({
2417
+ operationId,
2418
+ telemetry
2419
+ }) {
2420
+ return {
2421
+ // standardized operation and resource name:
2422
+ "operation.name": `${operationId}${(telemetry == null ? void 0 : telemetry.functionId) != null ? ` ${telemetry.functionId}` : ""}`,
2423
+ "resource.name": telemetry == null ? void 0 : telemetry.functionId,
2424
+ // detailed, AI SDK specific data:
2425
+ "ai.operationId": operationId,
2426
+ "ai.telemetry.functionId": telemetry == null ? void 0 : telemetry.functionId
2427
+ };
2428
+ }
2429
+
2430
+ // src/telemetry/get-base-telemetry-attributes.ts
2431
+ function getBaseTelemetryAttributes({
2432
+ model,
2433
+ settings,
2434
+ telemetry,
2435
+ headers
2436
+ }) {
2437
+ var _a9;
2438
+ return {
2439
+ "ai.model.provider": model.provider,
2440
+ "ai.model.id": model.modelId,
2441
+ // settings:
2442
+ ...Object.entries(settings).reduce((attributes, [key, value]) => {
2443
+ attributes[`ai.settings.${key}`] = value;
2444
+ return attributes;
2445
+ }, {}),
2446
+ // add metadata as attributes:
2447
+ ...Object.entries((_a9 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a9 : {}).reduce(
2448
+ (attributes, [key, value]) => {
2449
+ attributes[`ai.telemetry.metadata.${key}`] = value;
2450
+ return attributes;
2451
+ },
2452
+ {}
2453
+ ),
2454
+ // request headers
2455
+ ...Object.entries(headers != null ? headers : {}).reduce((attributes, [key, value]) => {
2456
+ if (value !== void 0) {
2457
+ attributes[`ai.request.headers.${key}`] = value;
2458
+ }
2459
+ return attributes;
2460
+ }, {})
2461
+ };
2462
+ }
2463
+
2464
+ // src/telemetry/get-tracer.ts
2465
+ var import_api = require("@opentelemetry/api");
2466
+
2467
+ // src/telemetry/noop-tracer.ts
2468
+ var noopTracer = {
2469
+ startSpan() {
2470
+ return noopSpan;
2471
+ },
2472
+ startActiveSpan(name9, arg1, arg2, arg3) {
2473
+ if (typeof arg1 === "function") {
2474
+ return arg1(noopSpan);
2475
+ }
2476
+ if (typeof arg2 === "function") {
2477
+ return arg2(noopSpan);
2478
+ }
2479
+ if (typeof arg3 === "function") {
2480
+ return arg3(noopSpan);
2481
+ }
2482
+ }
2483
+ };
2484
+ var noopSpan = {
2485
+ spanContext() {
2486
+ return noopSpanContext;
2487
+ },
2488
+ setAttribute() {
2489
+ return this;
2490
+ },
2491
+ setAttributes() {
2492
+ return this;
2493
+ },
2494
+ addEvent() {
2495
+ return this;
2496
+ },
2497
+ addLink() {
2498
+ return this;
2499
+ },
2500
+ addLinks() {
2501
+ return this;
2502
+ },
2503
+ setStatus() {
2504
+ return this;
2505
+ },
2506
+ updateName() {
2507
+ return this;
2508
+ },
2509
+ end() {
2510
+ return this;
2511
+ },
2512
+ isRecording() {
2513
+ return false;
2514
+ },
2515
+ recordException() {
2516
+ return this;
2517
+ }
2518
+ };
2519
+ var noopSpanContext = {
2520
+ traceId: "",
2521
+ spanId: "",
2522
+ traceFlags: 0
2523
+ };
2524
+
2525
+ // src/telemetry/get-tracer.ts
2526
+ function getTracer({
2527
+ isEnabled = false,
2528
+ tracer
2529
+ } = {}) {
2530
+ if (!isEnabled) {
2531
+ return noopTracer;
2532
+ }
2533
+ if (tracer) {
2534
+ return tracer;
2535
+ }
2536
+ return import_api.trace.getTracer("ai");
2537
+ }
2538
+
2539
+ // src/telemetry/record-span.ts
2540
+ var import_api2 = require("@opentelemetry/api");
2541
+ function recordSpan({
2542
+ name: name9,
2543
+ tracer,
2544
+ attributes,
2545
+ fn,
2546
+ endWhenDone = true
2547
+ }) {
2548
+ return tracer.startActiveSpan(name9, { attributes }, async (span) => {
2549
+ try {
2550
+ const result = await fn(span);
2551
+ if (endWhenDone) {
2552
+ span.end();
2553
+ }
2554
+ return result;
2555
+ } catch (error) {
2556
+ try {
2557
+ recordErrorOnSpan(span, error);
2558
+ } finally {
2559
+ span.end();
2560
+ }
2561
+ throw error;
2562
+ }
2563
+ });
2564
+ }
2565
+ function recordErrorOnSpan(span, error) {
2566
+ if (error instanceof Error) {
2567
+ span.recordException({
2568
+ name: error.name,
2569
+ message: error.message,
2570
+ stack: error.stack
2571
+ });
2572
+ span.setStatus({
2573
+ code: import_api2.SpanStatusCode.ERROR,
2574
+ message: error.message
2575
+ });
2576
+ } else {
2577
+ span.setStatus({ code: import_api2.SpanStatusCode.ERROR });
2578
+ }
2579
+ }
2580
+
2581
+ // src/telemetry/select-telemetry-attributes.ts
2582
+ function selectTelemetryAttributes({
2583
+ telemetry,
2584
+ attributes
2585
+ }) {
2586
+ if ((telemetry == null ? void 0 : telemetry.isEnabled) !== true) {
2587
+ return {};
2588
+ }
2589
+ return Object.entries(attributes).reduce((attributes2, [key, value]) => {
2590
+ if (value == null) {
2591
+ return attributes2;
2592
+ }
2593
+ if (typeof value === "object" && "input" in value && typeof value.input === "function") {
2594
+ if ((telemetry == null ? void 0 : telemetry.recordInputs) === false) {
2595
+ return attributes2;
2596
+ }
2597
+ const result = value.input();
2598
+ return result == null ? attributes2 : { ...attributes2, [key]: result };
2599
+ }
2600
+ if (typeof value === "object" && "output" in value && typeof value.output === "function") {
2601
+ if ((telemetry == null ? void 0 : telemetry.recordOutputs) === false) {
2602
+ return attributes2;
2603
+ }
2604
+ const result = value.output();
2605
+ return result == null ? attributes2 : { ...attributes2, [key]: result };
2606
+ }
2607
+ return { ...attributes2, [key]: value };
2608
+ }, {});
2609
+ }
2610
+
2611
+ // src/telemetry/stringify-for-telemetry.ts
2612
+ function stringifyForTelemetry(prompt) {
2613
+ return JSON.stringify(
2614
+ prompt.map((message) => ({
2615
+ ...message,
2616
+ content: typeof message.content === "string" ? message.content : message.content.map(
2617
+ (part) => part.type === "file" ? {
2618
+ ...part,
2619
+ data: part.data instanceof Uint8Array ? convertDataContentToBase64String(part.data) : part.data
2620
+ } : part
2621
+ )
2622
+ }))
2623
+ );
2624
+ }
2625
+
2626
+ // src/types/usage.ts
2627
+ function addLanguageModelUsage(usage1, usage2) {
2628
+ return {
2629
+ inputTokens: addTokenCounts(usage1.inputTokens, usage2.inputTokens),
2630
+ outputTokens: addTokenCounts(usage1.outputTokens, usage2.outputTokens),
2631
+ totalTokens: addTokenCounts(usage1.totalTokens, usage2.totalTokens),
2632
+ reasoningTokens: addTokenCounts(
2633
+ usage1.reasoningTokens,
2634
+ usage2.reasoningTokens
2635
+ ),
2636
+ cachedInputTokens: addTokenCounts(
2637
+ usage1.cachedInputTokens,
2638
+ usage2.cachedInputTokens
2639
+ )
2640
+ };
2641
+ }
2642
+ function addTokenCounts(tokenCount1, tokenCount2) {
2643
+ return tokenCount1 == null && tokenCount2 == null ? void 0 : (tokenCount1 != null ? tokenCount1 : 0) + (tokenCount2 != null ? tokenCount2 : 0);
2644
+ }
2645
+
2646
+ // src/generate-text/run-tools-transformation.ts
2647
+ var import_provider_utils11 = require("@ai-sdk/provider-utils");
2648
+
2649
+ // src/generate-text/generated-file.ts
2650
+ var import_provider_utils9 = require("@ai-sdk/provider-utils");
2651
+ var DefaultGeneratedFile = class {
2652
+ constructor({
2653
+ data,
2654
+ mediaType
2655
+ }) {
2656
+ const isUint8Array = data instanceof Uint8Array;
2657
+ this.base64Data = isUint8Array ? void 0 : data;
2658
+ this.uint8ArrayData = isUint8Array ? data : void 0;
2659
+ this.mediaType = mediaType;
2660
+ }
2661
+ // lazy conversion with caching to avoid unnecessary conversion overhead:
2662
+ get base64() {
2663
+ if (this.base64Data == null) {
2664
+ this.base64Data = (0, import_provider_utils9.convertUint8ArrayToBase64)(this.uint8ArrayData);
2665
+ }
2666
+ return this.base64Data;
2667
+ }
2668
+ // lazy conversion with caching to avoid unnecessary conversion overhead:
2669
+ get uint8Array() {
2670
+ if (this.uint8ArrayData == null) {
2671
+ this.uint8ArrayData = (0, import_provider_utils9.convertBase64ToUint8Array)(this.base64Data);
2672
+ }
2673
+ return this.uint8ArrayData;
2674
+ }
2675
+ };
2676
+ var DefaultGeneratedFileWithType = class extends DefaultGeneratedFile {
2677
+ constructor(options) {
2678
+ super(options);
2679
+ this.type = "file";
2680
+ }
2681
+ };
2682
+
2683
+ // src/generate-text/parse-tool-call.ts
2684
+ var import_provider_utils10 = require("@ai-sdk/provider-utils");
2685
+ async function parseToolCall({
2686
+ toolCall,
2687
+ tools,
2688
+ repairToolCall,
2689
+ system,
2690
+ messages
2691
+ }) {
2692
+ if (tools == null) {
2693
+ throw new NoSuchToolError({ toolName: toolCall.toolName });
2694
+ }
2695
+ try {
2696
+ return await doParseToolCall({ toolCall, tools });
2697
+ } catch (error) {
2698
+ if (repairToolCall == null || !(NoSuchToolError.isInstance(error) || InvalidToolInputError.isInstance(error))) {
2699
+ throw error;
2700
+ }
2701
+ let repairedToolCall = null;
2702
+ try {
2703
+ repairedToolCall = await repairToolCall({
2704
+ toolCall,
2705
+ tools,
2706
+ inputSchema: ({ toolName }) => {
2707
+ const { inputSchema } = tools[toolName];
2708
+ return (0, import_provider_utils10.asSchema)(inputSchema).jsonSchema;
2709
+ },
2710
+ system,
2711
+ messages,
2712
+ error
2713
+ });
2714
+ } catch (repairError) {
2715
+ throw new ToolCallRepairError({
2716
+ cause: repairError,
2717
+ originalError: error
2718
+ });
2719
+ }
2720
+ if (repairedToolCall == null) {
2721
+ throw error;
2722
+ }
2723
+ return await doParseToolCall({ toolCall: repairedToolCall, tools });
2724
+ }
2725
+ }
2726
+ async function doParseToolCall({
2727
+ toolCall,
2728
+ tools
2729
+ }) {
2730
+ const toolName = toolCall.toolName;
2731
+ const tool = tools[toolName];
2732
+ if (tool == null) {
2733
+ throw new NoSuchToolError({
2734
+ toolName: toolCall.toolName,
2735
+ availableTools: Object.keys(tools)
2736
+ });
2737
+ }
2738
+ const schema = (0, import_provider_utils10.asSchema)(tool.inputSchema);
2739
+ const parseResult = toolCall.input.trim() === "" ? await (0, import_provider_utils10.safeValidateTypes)({ value: {}, schema }) : await (0, import_provider_utils10.safeParseJSON)({ text: toolCall.input, schema });
2740
+ if (parseResult.success === false) {
2741
+ throw new InvalidToolInputError({
2742
+ toolName,
2743
+ toolInput: toolCall.input,
2744
+ cause: parseResult.error
2745
+ });
2746
+ }
2747
+ return {
2748
+ type: "tool-call",
2749
+ toolCallId: toolCall.toolCallId,
2750
+ toolName,
2751
+ input: parseResult.value,
2752
+ providerExecuted: toolCall.providerExecuted,
2753
+ providerMetadata: toolCall.providerMetadata
2754
+ };
2755
+ }
2756
+
2757
+ // src/generate-text/run-tools-transformation.ts
2758
+ function runToolsTransformation({
2759
+ tools,
2760
+ generatorStream,
2761
+ tracer,
2762
+ telemetry,
2763
+ system,
2764
+ messages,
2765
+ abortSignal,
2766
+ repairToolCall
2767
+ }) {
2768
+ let toolResultsStreamController = null;
2769
+ const toolResultsStream = new ReadableStream({
2770
+ start(controller) {
2771
+ toolResultsStreamController = controller;
2772
+ }
2773
+ });
2774
+ const outstandingToolResults = /* @__PURE__ */ new Set();
2775
+ const toolInputs = /* @__PURE__ */ new Map();
2776
+ let canClose = false;
2777
+ let finishChunk = void 0;
2778
+ function attemptClose() {
2779
+ if (canClose && outstandingToolResults.size === 0) {
2780
+ if (finishChunk != null) {
2781
+ toolResultsStreamController.enqueue(finishChunk);
2782
+ }
2783
+ toolResultsStreamController.close();
2784
+ }
2785
+ }
2786
+ const forwardStream = new TransformStream({
2787
+ async transform(chunk, controller) {
2788
+ const chunkType = chunk.type;
2789
+ switch (chunkType) {
2790
+ case "stream-start":
2791
+ case "text-start":
2792
+ case "text-delta":
2793
+ case "text-end":
2794
+ case "reasoning-start":
2795
+ case "reasoning-delta":
2796
+ case "reasoning-end":
2797
+ case "tool-input-start":
2798
+ case "tool-input-delta":
2799
+ case "tool-input-end":
2800
+ case "source":
2801
+ case "response-metadata":
2802
+ case "error":
2803
+ case "raw": {
2804
+ controller.enqueue(chunk);
2805
+ break;
2806
+ }
2807
+ case "file": {
2808
+ controller.enqueue({
2809
+ type: "file",
2810
+ file: new DefaultGeneratedFileWithType({
2811
+ data: chunk.data,
2812
+ mediaType: chunk.mediaType
2813
+ })
2814
+ });
2815
+ break;
2816
+ }
2817
+ case "finish": {
2818
+ finishChunk = {
2819
+ type: "finish",
2820
+ finishReason: chunk.finishReason,
2821
+ usage: chunk.usage,
2822
+ providerMetadata: chunk.providerMetadata
2823
+ };
2824
+ break;
2825
+ }
2826
+ case "tool-call": {
2827
+ try {
2828
+ const toolCall = await parseToolCall({
2829
+ toolCall: chunk,
2830
+ tools,
2831
+ repairToolCall,
2832
+ system,
2833
+ messages
2834
+ });
2835
+ controller.enqueue(toolCall);
2836
+ const tool = tools[toolCall.toolName];
2837
+ toolInputs.set(toolCall.toolCallId, toolCall.input);
2838
+ if (tool.onInputAvailable != null) {
2839
+ await tool.onInputAvailable({
2840
+ input: toolCall.input,
2841
+ toolCallId: toolCall.toolCallId,
2842
+ messages,
2843
+ abortSignal
2844
+ });
2845
+ }
2846
+ if (tool.execute != null && toolCall.providerExecuted !== true) {
2847
+ const toolExecutionId = (0, import_provider_utils11.generateId)();
2848
+ outstandingToolResults.add(toolExecutionId);
2849
+ recordSpan({
2850
+ name: "ai.toolCall",
2851
+ attributes: selectTelemetryAttributes({
2852
+ telemetry,
2853
+ attributes: {
2854
+ ...assembleOperationName({
2855
+ operationId: "ai.toolCall",
2856
+ telemetry
2857
+ }),
2858
+ "ai.toolCall.name": toolCall.toolName,
2859
+ "ai.toolCall.id": toolCall.toolCallId,
2860
+ "ai.toolCall.input": {
2861
+ output: () => JSON.stringify(toolCall.input)
2862
+ }
2863
+ }
2864
+ }),
2865
+ tracer,
2866
+ fn: async (span) => {
2867
+ let output;
2868
+ try {
2869
+ output = await tool.execute(toolCall.input, {
2870
+ toolCallId: toolCall.toolCallId,
2871
+ messages,
2872
+ abortSignal
2873
+ });
2874
+ } catch (error) {
2875
+ recordErrorOnSpan(span, error);
2876
+ toolResultsStreamController.enqueue({
2877
+ ...toolCall,
2878
+ type: "tool-error",
2879
+ error
2880
+ });
2881
+ outstandingToolResults.delete(toolExecutionId);
2882
+ attemptClose();
2883
+ return;
2884
+ }
2885
+ toolResultsStreamController.enqueue({
2886
+ ...toolCall,
2887
+ type: "tool-result",
2888
+ output
2889
+ });
2890
+ outstandingToolResults.delete(toolExecutionId);
2891
+ attemptClose();
2892
+ try {
2893
+ span.setAttributes(
2894
+ selectTelemetryAttributes({
2895
+ telemetry,
2896
+ attributes: {
2897
+ "ai.toolCall.output": {
2898
+ output: () => JSON.stringify(output)
2899
+ }
2900
+ }
2901
+ })
2902
+ );
2903
+ } catch (ignored) {
2904
+ }
2905
+ }
2906
+ });
2907
+ }
2908
+ } catch (error) {
2909
+ toolResultsStreamController.enqueue({ type: "error", error });
2910
+ }
2911
+ break;
2912
+ }
2913
+ case "tool-result": {
2914
+ const toolName = chunk.toolName;
2915
+ if (chunk.isError) {
2916
+ toolResultsStreamController.enqueue({
2917
+ type: "tool-error",
2918
+ toolCallId: chunk.toolCallId,
2919
+ toolName,
2920
+ input: toolInputs.get(chunk.toolCallId),
2921
+ providerExecuted: chunk.providerExecuted,
2922
+ error: chunk.result
2923
+ });
2924
+ } else {
2925
+ controller.enqueue({
2926
+ type: "tool-result",
2927
+ toolCallId: chunk.toolCallId,
2928
+ toolName,
2929
+ input: toolInputs.get(chunk.toolCallId),
2930
+ output: chunk.result,
2931
+ providerExecuted: chunk.providerExecuted
2932
+ });
2933
+ }
2934
+ break;
2935
+ }
2936
+ default: {
2937
+ const _exhaustiveCheck = chunkType;
2938
+ throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
2939
+ }
2940
+ }
2941
+ },
2942
+ flush() {
2943
+ canClose = true;
2944
+ attemptClose();
2945
+ }
2946
+ });
2947
+ return new ReadableStream({
2948
+ async start(controller) {
2949
+ return Promise.all([
2950
+ generatorStream.pipeThrough(forwardStream).pipeTo(
2951
+ new WritableStream({
2952
+ write(chunk) {
2953
+ controller.enqueue(chunk);
2954
+ },
2955
+ close() {
2956
+ }
2957
+ })
2958
+ ),
2959
+ toolResultsStream.pipeTo(
2960
+ new WritableStream({
2961
+ write(chunk) {
2962
+ controller.enqueue(chunk);
2963
+ },
2964
+ close() {
2965
+ controller.close();
2966
+ }
2967
+ })
2968
+ )
2969
+ ]);
2970
+ }
2971
+ });
2972
+ }
2973
+
2974
+ // src/generate-text/step-result.ts
2975
+ var DefaultStepResult = class {
2976
+ constructor({
2977
+ content,
2978
+ finishReason,
2979
+ usage,
2980
+ warnings,
2981
+ request,
2982
+ response,
2983
+ providerMetadata
2984
+ }) {
2985
+ this.content = content;
2986
+ this.finishReason = finishReason;
2987
+ this.usage = usage;
2988
+ this.warnings = warnings;
2989
+ this.request = request;
2990
+ this.response = response;
2991
+ this.providerMetadata = providerMetadata;
2992
+ }
2993
+ get text() {
2994
+ return this.content.filter((part) => part.type === "text").map((part) => part.text).join("");
2995
+ }
2996
+ get reasoning() {
2997
+ return this.content.filter((part) => part.type === "reasoning");
2998
+ }
2999
+ get reasoningText() {
3000
+ return this.reasoning.length === 0 ? void 0 : this.reasoning.map((part) => part.text).join("");
3001
+ }
3002
+ get files() {
3003
+ return this.content.filter((part) => part.type === "file").map((part) => part.file);
3004
+ }
3005
+ get sources() {
3006
+ return this.content.filter((part) => part.type === "source");
3007
+ }
3008
+ get toolCalls() {
3009
+ return this.content.filter((part) => part.type === "tool-call");
3010
+ }
3011
+ get toolResults() {
3012
+ return this.content.filter((part) => part.type === "tool-result");
3013
+ }
3014
+ };
3015
+
3016
+ // src/generate-text/stop-condition.ts
3017
+ function stepCountIs(stepCount) {
3018
+ return ({ steps }) => steps.length === stepCount;
3019
+ }
3020
+ async function isStopConditionMet({
3021
+ stopConditions,
3022
+ steps
3023
+ }) {
3024
+ return (await Promise.all(stopConditions.map((condition) => condition({ steps })))).some((result) => result);
3025
+ }
3026
+
3027
+ // src/prompt/create-tool-model-output.ts
3028
+ var import_provider15 = require("@ai-sdk/provider");
3029
+ function createToolModelOutput({
3030
+ output,
3031
+ tool,
3032
+ errorMode
3033
+ }) {
3034
+ if (errorMode === "text") {
3035
+ return { type: "error-text", value: (0, import_provider15.getErrorMessage)(output) };
3036
+ } else if (errorMode === "json") {
3037
+ return { type: "error-json", value: output };
3038
+ }
3039
+ if (tool == null ? void 0 : tool.toModelOutput) {
3040
+ return tool.toModelOutput(output);
3041
+ }
3042
+ return typeof output === "string" ? { type: "text", value: output } : { type: "json", value: output };
3043
+ }
3044
+
3045
+ // src/generate-text/to-response-messages.ts
3046
+ function toResponseMessages({
3047
+ content: inputContent,
3048
+ tools
3049
+ }) {
3050
+ const responseMessages = [];
3051
+ const content = inputContent.filter((part) => part.type !== "source").filter(
3052
+ (part) => (part.type !== "tool-result" || part.providerExecuted) && (part.type !== "tool-error" || part.providerExecuted)
3053
+ ).filter((part) => part.type !== "text" || part.text.length > 0).map((part) => {
3054
+ switch (part.type) {
3055
+ case "text":
3056
+ return {
3057
+ type: "text",
3058
+ text: part.text,
3059
+ providerOptions: part.providerMetadata
3060
+ };
3061
+ case "reasoning":
3062
+ return {
3063
+ type: "reasoning",
3064
+ text: part.text,
3065
+ providerOptions: part.providerMetadata
3066
+ };
3067
+ case "file":
3068
+ return {
3069
+ type: "file",
3070
+ data: part.file.base64,
3071
+ mediaType: part.file.mediaType,
3072
+ providerOptions: part.providerMetadata
3073
+ };
3074
+ case "tool-call":
3075
+ return {
3076
+ type: "tool-call",
3077
+ toolCallId: part.toolCallId,
3078
+ toolName: part.toolName,
3079
+ input: part.input,
3080
+ providerExecuted: part.providerExecuted,
3081
+ providerOptions: part.providerMetadata
3082
+ };
3083
+ case "tool-result":
3084
+ return {
3085
+ type: "tool-result",
3086
+ toolCallId: part.toolCallId,
3087
+ toolName: part.toolName,
3088
+ output: createToolModelOutput({
3089
+ tool: tools == null ? void 0 : tools[part.toolName],
3090
+ output: part.output,
3091
+ errorMode: "none"
3092
+ }),
3093
+ providerExecuted: true,
3094
+ providerOptions: part.providerMetadata
3095
+ };
3096
+ case "tool-error":
3097
+ return {
3098
+ type: "tool-result",
3099
+ toolCallId: part.toolCallId,
3100
+ toolName: part.toolName,
3101
+ output: createToolModelOutput({
3102
+ tool: tools == null ? void 0 : tools[part.toolName],
3103
+ output: part.error,
3104
+ errorMode: "json"
3105
+ }),
3106
+ providerOptions: part.providerMetadata
3107
+ };
3108
+ }
3109
+ });
3110
+ if (content.length > 0) {
3111
+ responseMessages.push({
3112
+ role: "assistant",
3113
+ content
3114
+ });
3115
+ }
3116
+ const toolResultContent = inputContent.filter((part) => part.type === "tool-result" || part.type === "tool-error").filter((part) => !part.providerExecuted).map((toolResult) => ({
3117
+ type: "tool-result",
3118
+ toolCallId: toolResult.toolCallId,
3119
+ toolName: toolResult.toolName,
3120
+ output: createToolModelOutput({
3121
+ tool: tools == null ? void 0 : tools[toolResult.toolName],
3122
+ output: toolResult.type === "tool-result" ? toolResult.output : toolResult.error,
3123
+ errorMode: toolResult.type === "tool-error" ? "text" : "none"
3124
+ })
3125
+ }));
3126
+ if (toolResultContent.length > 0) {
3127
+ responseMessages.push({
3128
+ role: "tool",
3129
+ content: toolResultContent
3130
+ });
3131
+ }
3132
+ return responseMessages;
3133
+ }
3134
+
3135
+ // src/generate-text/stream-text.ts
3136
+ var originalGenerateId = (0, import_provider_utils12.createIdGenerator)({
3137
+ prefix: "aitxt",
3138
+ size: 24
3139
+ });
3140
+ function streamText({
3141
+ model,
3142
+ tools,
3143
+ toolChoice,
3144
+ system,
3145
+ prompt,
3146
+ messages,
3147
+ maxRetries,
3148
+ abortSignal,
3149
+ headers,
3150
+ stopWhen = stepCountIs(1),
3151
+ experimental_output: output,
3152
+ experimental_telemetry: telemetry,
3153
+ prepareStep,
3154
+ providerOptions,
3155
+ experimental_activeTools,
3156
+ activeTools = experimental_activeTools,
3157
+ experimental_repairToolCall: repairToolCall,
3158
+ experimental_transform: transform,
3159
+ includeRawChunks = false,
3160
+ onChunk,
3161
+ onError = ({ error }) => {
3162
+ console.error(error);
3163
+ },
3164
+ onFinish,
3165
+ onStepFinish,
3166
+ _internal: {
3167
+ now: now2 = now,
3168
+ generateId: generateId2 = originalGenerateId,
3169
+ currentDate = () => /* @__PURE__ */ new Date()
3170
+ } = {},
3171
+ ...settings
3172
+ }) {
3173
+ return new DefaultStreamTextResult({
3174
+ model: resolveLanguageModel(model),
3175
+ telemetry,
3176
+ headers,
3177
+ settings,
3178
+ maxRetries,
3179
+ abortSignal,
3180
+ system,
3181
+ prompt,
3182
+ messages,
3183
+ tools,
3184
+ toolChoice,
3185
+ transforms: asArray(transform),
3186
+ activeTools,
3187
+ repairToolCall,
3188
+ stopConditions: asArray(stopWhen),
3189
+ output,
3190
+ providerOptions,
3191
+ prepareStep,
3192
+ includeRawChunks,
3193
+ onChunk,
3194
+ onError,
3195
+ onFinish,
3196
+ onStepFinish,
3197
+ now: now2,
3198
+ currentDate,
3199
+ generateId: generateId2
3200
+ });
3201
+ }
3202
+ function createOutputTransformStream(output) {
3203
+ if (!output) {
3204
+ return new TransformStream({
3205
+ transform(chunk, controller) {
3206
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3207
+ }
3208
+ });
3209
+ }
3210
+ let firstTextChunkId = void 0;
3211
+ let text = "";
3212
+ let textChunk = "";
3213
+ let lastPublishedJson = "";
3214
+ function publishTextChunk({
3215
+ controller,
3216
+ partialOutput = void 0
3217
+ }) {
3218
+ controller.enqueue({
3219
+ part: {
3220
+ type: "text",
3221
+ id: firstTextChunkId,
3222
+ text: textChunk
3223
+ },
3224
+ partialOutput
3225
+ });
3226
+ textChunk = "";
3227
+ }
3228
+ return new TransformStream({
3229
+ async transform(chunk, controller) {
3230
+ if (chunk.type === "finish-step" && textChunk.length > 0) {
3231
+ publishTextChunk({ controller });
3232
+ }
3233
+ if (chunk.type !== "text" && chunk.type !== "text-start" && chunk.type !== "text-end") {
3234
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3235
+ return;
3236
+ }
3237
+ if (firstTextChunkId == null) {
3238
+ firstTextChunkId = chunk.id;
3239
+ } else if (chunk.id !== firstTextChunkId) {
3240
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3241
+ return;
3242
+ }
3243
+ if (chunk.type === "text-start") {
3244
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3245
+ return;
3246
+ }
3247
+ if (chunk.type === "text-end") {
3248
+ if (textChunk.length > 0) {
3249
+ publishTextChunk({ controller });
3250
+ }
3251
+ controller.enqueue({ part: chunk, partialOutput: void 0 });
3252
+ return;
3253
+ }
3254
+ text += chunk.text;
3255
+ textChunk += chunk.text;
3256
+ const result = await output.parsePartial({ text });
3257
+ if (result != null) {
3258
+ const currentJson = JSON.stringify(result.partial);
3259
+ if (currentJson !== lastPublishedJson) {
3260
+ publishTextChunk({ controller, partialOutput: result.partial });
3261
+ lastPublishedJson = currentJson;
3262
+ }
3263
+ }
3264
+ }
3265
+ });
3266
+ }
3267
+ var DefaultStreamTextResult = class {
3268
+ constructor({
3269
+ model,
3270
+ telemetry,
3271
+ headers,
3272
+ settings,
3273
+ maxRetries: maxRetriesArg,
3274
+ abortSignal,
3275
+ system,
3276
+ prompt,
3277
+ messages,
3278
+ tools,
3279
+ toolChoice,
3280
+ transforms,
3281
+ activeTools,
3282
+ repairToolCall,
3283
+ stopConditions,
3284
+ output,
3285
+ providerOptions,
3286
+ prepareStep,
3287
+ includeRawChunks,
3288
+ now: now2,
3289
+ currentDate,
3290
+ generateId: generateId2,
3291
+ onChunk,
3292
+ onError,
3293
+ onFinish,
3294
+ onStepFinish
3295
+ }) {
3296
+ this._totalUsage = new DelayedPromise();
3297
+ this._finishReason = new DelayedPromise();
3298
+ this._steps = new DelayedPromise();
3299
+ this.output = output;
3300
+ this.includeRawChunks = includeRawChunks;
3301
+ this.generateId = generateId2;
3302
+ let stepFinish;
3303
+ let recordedContent = [];
3304
+ const recordedResponseMessages = [];
3305
+ let recordedFinishReason = void 0;
3306
+ let recordedTotalUsage = void 0;
3307
+ let recordedRequest = {};
3308
+ let recordedWarnings = [];
3309
+ const recordedSteps = [];
3310
+ let rootSpan;
3311
+ let activeTextContent = {};
3312
+ let activeReasoningContent = {};
3313
+ const eventProcessor = new TransformStream({
3314
+ async transform(chunk, controller) {
3315
+ var _a9, _b, _c;
3316
+ controller.enqueue(chunk);
3317
+ const { part } = chunk;
3318
+ if (part.type === "text" || part.type === "reasoning" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
3319
+ await (onChunk == null ? void 0 : onChunk({ chunk: part }));
3320
+ }
3321
+ if (part.type === "error") {
3322
+ await onError({ error: wrapGatewayError(part.error) });
3323
+ }
3324
+ if (part.type === "text-start") {
3325
+ activeTextContent[part.id] = {
3326
+ type: "text",
3327
+ text: "",
3328
+ providerMetadata: part.providerMetadata
3329
+ };
3330
+ recordedContent.push(activeTextContent[part.id]);
3331
+ }
3332
+ if (part.type === "text") {
3333
+ const activeText = activeTextContent[part.id];
3334
+ if (activeText == null) {
3335
+ controller.enqueue({
3336
+ part: {
3337
+ type: "error",
3338
+ error: `text part ${part.id} not found`
3339
+ },
3340
+ partialOutput: void 0
3341
+ });
3342
+ return;
3343
+ }
3344
+ activeText.text += part.text;
3345
+ activeText.providerMetadata = (_a9 = part.providerMetadata) != null ? _a9 : activeText.providerMetadata;
3346
+ }
3347
+ if (part.type === "text-end") {
3348
+ delete activeTextContent[part.id];
3349
+ }
3350
+ if (part.type === "reasoning-start") {
3351
+ activeReasoningContent[part.id] = {
3352
+ type: "reasoning",
3353
+ text: "",
3354
+ providerMetadata: part.providerMetadata
3355
+ };
3356
+ recordedContent.push(activeReasoningContent[part.id]);
3357
+ }
3358
+ if (part.type === "reasoning") {
3359
+ const activeReasoning = activeReasoningContent[part.id];
3360
+ if (activeReasoning == null) {
3361
+ controller.enqueue({
3362
+ part: {
3363
+ type: "error",
3364
+ error: `reasoning part ${part.id} not found`
3365
+ },
3366
+ partialOutput: void 0
3367
+ });
3368
+ return;
3369
+ }
3370
+ activeReasoning.text += part.text;
3371
+ activeReasoning.providerMetadata = (_b = part.providerMetadata) != null ? _b : activeReasoning.providerMetadata;
3372
+ }
3373
+ if (part.type === "reasoning-end") {
3374
+ const activeReasoning = activeReasoningContent[part.id];
3375
+ if (activeReasoning == null) {
3376
+ controller.enqueue({
3377
+ part: {
3378
+ type: "error",
3379
+ error: `reasoning part ${part.id} not found`
3380
+ },
3381
+ partialOutput: void 0
3382
+ });
3383
+ return;
3384
+ }
3385
+ activeReasoning.providerMetadata = (_c = part.providerMetadata) != null ? _c : activeReasoning.providerMetadata;
3386
+ delete activeReasoningContent[part.id];
3387
+ }
3388
+ if (part.type === "file") {
3389
+ recordedContent.push({ type: "file", file: part.file });
3390
+ }
3391
+ if (part.type === "source") {
3392
+ recordedContent.push(part);
3393
+ }
3394
+ if (part.type === "tool-call") {
3395
+ recordedContent.push(part);
3396
+ }
3397
+ if (part.type === "tool-result") {
3398
+ recordedContent.push(part);
3399
+ }
3400
+ if (part.type === "tool-error") {
3401
+ recordedContent.push(part);
3402
+ }
3403
+ if (part.type === "start-step") {
3404
+ recordedRequest = part.request;
3405
+ recordedWarnings = part.warnings;
3406
+ }
3407
+ if (part.type === "finish-step") {
3408
+ const stepMessages = toResponseMessages({
3409
+ content: recordedContent,
3410
+ tools
3411
+ });
3412
+ const currentStepResult = new DefaultStepResult({
3413
+ content: recordedContent,
3414
+ finishReason: part.finishReason,
3415
+ usage: part.usage,
3416
+ warnings: recordedWarnings,
3417
+ request: recordedRequest,
3418
+ response: {
3419
+ ...part.response,
3420
+ messages: [...recordedResponseMessages, ...stepMessages]
3421
+ },
3422
+ providerMetadata: part.providerMetadata
3423
+ });
3424
+ await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
3425
+ recordedSteps.push(currentStepResult);
3426
+ recordedContent = [];
3427
+ activeReasoningContent = {};
3428
+ activeTextContent = {};
3429
+ recordedResponseMessages.push(...stepMessages);
3430
+ stepFinish.resolve();
3431
+ }
3432
+ if (part.type === "finish") {
3433
+ recordedTotalUsage = part.totalUsage;
3434
+ recordedFinishReason = part.finishReason;
3435
+ }
3436
+ },
3437
+ async flush(controller) {
3438
+ try {
3439
+ if (recordedSteps.length === 0) {
3440
+ return;
3441
+ }
3442
+ const finishReason = recordedFinishReason != null ? recordedFinishReason : "unknown";
3443
+ const totalUsage = recordedTotalUsage != null ? recordedTotalUsage : {
3444
+ inputTokens: void 0,
3445
+ outputTokens: void 0,
3446
+ totalTokens: void 0
3447
+ };
3448
+ self._finishReason.resolve(finishReason);
3449
+ self._totalUsage.resolve(totalUsage);
3450
+ self._steps.resolve(recordedSteps);
3451
+ const finalStep = recordedSteps[recordedSteps.length - 1];
3452
+ await (onFinish == null ? void 0 : onFinish({
3453
+ finishReason,
3454
+ totalUsage,
3455
+ usage: finalStep.usage,
3456
+ content: finalStep.content,
3457
+ text: finalStep.text,
3458
+ reasoningText: finalStep.reasoningText,
3459
+ reasoning: finalStep.reasoning,
3460
+ files: finalStep.files,
3461
+ sources: finalStep.sources,
3462
+ toolCalls: finalStep.toolCalls,
3463
+ toolResults: finalStep.toolResults,
3464
+ request: finalStep.request,
3465
+ response: finalStep.response,
3466
+ warnings: finalStep.warnings,
3467
+ providerMetadata: finalStep.providerMetadata,
3468
+ steps: recordedSteps
3469
+ }));
3470
+ rootSpan.setAttributes(
3471
+ selectTelemetryAttributes({
3472
+ telemetry,
3473
+ attributes: {
3474
+ "ai.response.finishReason": finishReason,
3475
+ "ai.response.text": { output: () => finalStep.text },
3476
+ "ai.response.toolCalls": {
3477
+ output: () => {
3478
+ var _a9;
3479
+ return ((_a9 = finalStep.toolCalls) == null ? void 0 : _a9.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
3480
+ }
3481
+ },
3482
+ "ai.response.providerMetadata": JSON.stringify(
3483
+ finalStep.providerMetadata
3484
+ ),
3485
+ "ai.usage.inputTokens": totalUsage.inputTokens,
3486
+ "ai.usage.outputTokens": totalUsage.outputTokens,
3487
+ "ai.usage.totalTokens": totalUsage.totalTokens,
3488
+ "ai.usage.reasoningTokens": totalUsage.reasoningTokens,
3489
+ "ai.usage.cachedInputTokens": totalUsage.cachedInputTokens
3490
+ }
3491
+ })
3492
+ );
3493
+ } catch (error) {
3494
+ controller.error(error);
3495
+ } finally {
3496
+ rootSpan.end();
3497
+ }
3498
+ }
3499
+ });
3500
+ const stitchableStream = createStitchableStream();
3501
+ this.addStream = stitchableStream.addStream;
3502
+ this.closeStream = stitchableStream.close;
3503
+ let stream = stitchableStream.stream;
3504
+ stream = stream.pipeThrough(
3505
+ new TransformStream({
3506
+ start(controller) {
3507
+ controller.enqueue({ type: "start" });
3508
+ }
3509
+ })
3510
+ );
3511
+ for (const transform of transforms) {
3512
+ stream = stream.pipeThrough(
3513
+ transform({
3514
+ tools,
3515
+ stopStream() {
3516
+ stitchableStream.terminate();
3517
+ }
3518
+ })
3519
+ );
3520
+ }
3521
+ this.baseStream = stream.pipeThrough(createOutputTransformStream(output)).pipeThrough(eventProcessor);
3522
+ const { maxRetries, retry } = prepareRetries({
3523
+ maxRetries: maxRetriesArg
3524
+ });
3525
+ const tracer = getTracer(telemetry);
3526
+ const callSettings = prepareCallSettings(settings);
3527
+ const baseTelemetryAttributes = getBaseTelemetryAttributes({
3528
+ model,
3529
+ telemetry,
3530
+ headers,
3531
+ settings: { ...callSettings, maxRetries }
3532
+ });
3533
+ const self = this;
3534
+ recordSpan({
3535
+ name: "ai.streamText",
3536
+ attributes: selectTelemetryAttributes({
3537
+ telemetry,
3538
+ attributes: {
3539
+ ...assembleOperationName({ operationId: "ai.streamText", telemetry }),
3540
+ ...baseTelemetryAttributes,
3541
+ // specific settings that only make sense on the outer level:
3542
+ "ai.prompt": {
3543
+ input: () => JSON.stringify({ system, prompt, messages })
3544
+ }
3545
+ }
3546
+ }),
3547
+ tracer,
3548
+ endWhenDone: false,
3549
+ fn: async (rootSpanArg) => {
3550
+ rootSpan = rootSpanArg;
3551
+ async function streamStep({
3552
+ currentStep,
3553
+ responseMessages,
3554
+ usage
3555
+ }) {
3556
+ var _a9, _b, _c, _d, _e;
3557
+ const includeRawChunks2 = self.includeRawChunks;
3558
+ stepFinish = new DelayedPromise();
3559
+ const initialPrompt = await standardizePrompt({
3560
+ system,
3561
+ prompt,
3562
+ messages
3563
+ });
3564
+ const stepInputMessages = [
3565
+ ...initialPrompt.messages,
3566
+ ...responseMessages
3567
+ ];
3568
+ const prepareStepResult = await (prepareStep == null ? void 0 : prepareStep({
3569
+ model,
3570
+ steps: recordedSteps,
3571
+ stepNumber: recordedSteps.length,
3572
+ messages: stepInputMessages
3573
+ }));
3574
+ const promptMessages = await convertToLanguageModelPrompt({
3575
+ prompt: {
3576
+ system: (_a9 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a9 : initialPrompt.system,
3577
+ messages: (_b = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _b : stepInputMessages
3578
+ },
3579
+ supportedUrls: await model.supportedUrls
3580
+ });
3581
+ const stepModel = resolveLanguageModel(
3582
+ (_c = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _c : model
3583
+ );
3584
+ const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
3585
+ tools,
3586
+ toolChoice: (_d = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _d : toolChoice,
3587
+ activeTools: (_e = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _e : activeTools
3588
+ });
3589
+ const {
3590
+ result: { stream: stream2, response, request },
3591
+ doStreamSpan,
3592
+ startTimestampMs
3593
+ } = await retry(
3594
+ () => recordSpan({
3595
+ name: "ai.streamText.doStream",
3596
+ attributes: selectTelemetryAttributes({
3597
+ telemetry,
3598
+ attributes: {
3599
+ ...assembleOperationName({
3600
+ operationId: "ai.streamText.doStream",
3601
+ telemetry
3602
+ }),
3603
+ ...baseTelemetryAttributes,
3604
+ // model:
3605
+ "ai.model.provider": stepModel.provider,
3606
+ "ai.model.id": stepModel.modelId,
3607
+ // prompt:
3608
+ "ai.prompt.messages": {
3609
+ input: () => stringifyForTelemetry(promptMessages)
3610
+ },
3611
+ "ai.prompt.tools": {
3612
+ // convert the language model level tools:
3613
+ input: () => stepTools == null ? void 0 : stepTools.map((tool) => JSON.stringify(tool))
3614
+ },
3615
+ "ai.prompt.toolChoice": {
3616
+ input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
3617
+ },
3618
+ // standardized gen-ai llm span attributes:
3619
+ "gen_ai.system": stepModel.provider,
3620
+ "gen_ai.request.model": stepModel.modelId,
3621
+ "gen_ai.request.frequency_penalty": callSettings.frequencyPenalty,
3622
+ "gen_ai.request.max_tokens": callSettings.maxOutputTokens,
3623
+ "gen_ai.request.presence_penalty": callSettings.presencePenalty,
3624
+ "gen_ai.request.stop_sequences": callSettings.stopSequences,
3625
+ "gen_ai.request.temperature": callSettings.temperature,
3626
+ "gen_ai.request.top_k": callSettings.topK,
3627
+ "gen_ai.request.top_p": callSettings.topP
3628
+ }
3629
+ }),
3630
+ tracer,
3631
+ endWhenDone: false,
3632
+ fn: async (doStreamSpan2) => {
3633
+ return {
3634
+ startTimestampMs: now2(),
3635
+ // get before the call
3636
+ doStreamSpan: doStreamSpan2,
3637
+ result: await stepModel.doStream({
3638
+ ...callSettings,
3639
+ tools: stepTools,
3640
+ toolChoice: stepToolChoice,
3641
+ responseFormat: output == null ? void 0 : output.responseFormat,
3642
+ prompt: promptMessages,
3643
+ providerOptions,
3644
+ abortSignal,
3645
+ headers,
3646
+ includeRawChunks: includeRawChunks2
3647
+ })
3648
+ };
3649
+ }
3650
+ })
3651
+ );
3652
+ const streamWithToolResults = runToolsTransformation({
3653
+ tools,
3654
+ generatorStream: stream2,
3655
+ tracer,
3656
+ telemetry,
3657
+ system,
3658
+ messages: stepInputMessages,
3659
+ repairToolCall,
3660
+ abortSignal
3661
+ });
3662
+ const stepRequest = request != null ? request : {};
3663
+ const stepToolCalls = [];
3664
+ const stepToolOutputs = [];
3665
+ let warnings;
3666
+ const activeToolCallToolNames = {};
3667
+ let stepFinishReason = "unknown";
3668
+ let stepUsage = {
3669
+ inputTokens: void 0,
3670
+ outputTokens: void 0,
3671
+ totalTokens: void 0
3672
+ };
3673
+ let stepProviderMetadata;
3674
+ let stepFirstChunk = true;
3675
+ let stepResponse = {
3676
+ id: generateId2(),
3677
+ timestamp: currentDate(),
3678
+ modelId: model.modelId
3679
+ };
3680
+ let activeText = "";
3681
+ self.addStream(
3682
+ streamWithToolResults.pipeThrough(
3683
+ new TransformStream({
3684
+ async transform(chunk, controller) {
3685
+ var _a10, _b2, _c2, _d2;
3686
+ if (chunk.type === "stream-start") {
3687
+ warnings = chunk.warnings;
3688
+ return;
3689
+ }
3690
+ if (stepFirstChunk) {
3691
+ const msToFirstChunk = now2() - startTimestampMs;
3692
+ stepFirstChunk = false;
3693
+ doStreamSpan.addEvent("ai.stream.firstChunk", {
3694
+ "ai.response.msToFirstChunk": msToFirstChunk
3695
+ });
3696
+ doStreamSpan.setAttributes({
3697
+ "ai.response.msToFirstChunk": msToFirstChunk
3698
+ });
3699
+ controller.enqueue({
3700
+ type: "start-step",
3701
+ request: stepRequest,
3702
+ warnings: warnings != null ? warnings : []
3703
+ });
3704
+ }
3705
+ const chunkType = chunk.type;
3706
+ switch (chunkType) {
3707
+ case "text-start":
3708
+ case "text-end": {
3709
+ controller.enqueue(chunk);
3710
+ break;
3711
+ }
3712
+ case "text-delta": {
3713
+ if (chunk.delta.length > 0) {
3714
+ controller.enqueue({
3715
+ type: "text",
3716
+ id: chunk.id,
3717
+ text: chunk.delta,
3718
+ providerMetadata: chunk.providerMetadata
3719
+ });
3720
+ activeText += chunk.delta;
3721
+ }
3722
+ break;
3723
+ }
3724
+ case "reasoning-start":
3725
+ case "reasoning-end": {
3726
+ controller.enqueue(chunk);
3727
+ break;
3728
+ }
3729
+ case "reasoning-delta": {
3730
+ controller.enqueue({
3731
+ type: "reasoning",
3732
+ id: chunk.id,
3733
+ text: chunk.delta,
3734
+ providerMetadata: chunk.providerMetadata
3735
+ });
3736
+ break;
3737
+ }
3738
+ case "tool-call": {
3739
+ controller.enqueue(chunk);
3740
+ stepToolCalls.push(chunk);
3741
+ break;
3742
+ }
3743
+ case "tool-result": {
3744
+ controller.enqueue(chunk);
3745
+ stepToolOutputs.push(chunk);
3746
+ break;
3747
+ }
3748
+ case "tool-error": {
3749
+ controller.enqueue(chunk);
3750
+ stepToolOutputs.push(chunk);
3751
+ break;
3752
+ }
3753
+ case "response-metadata": {
3754
+ stepResponse = {
3755
+ id: (_a10 = chunk.id) != null ? _a10 : stepResponse.id,
3756
+ timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
3757
+ modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
3758
+ };
3759
+ break;
3760
+ }
3761
+ case "finish": {
3762
+ stepUsage = chunk.usage;
3763
+ stepFinishReason = chunk.finishReason;
3764
+ stepProviderMetadata = chunk.providerMetadata;
3765
+ const msToFinish = now2() - startTimestampMs;
3766
+ doStreamSpan.addEvent("ai.stream.finish");
3767
+ doStreamSpan.setAttributes({
3768
+ "ai.response.msToFinish": msToFinish,
3769
+ "ai.response.avgOutputTokensPerSecond": 1e3 * ((_d2 = stepUsage.outputTokens) != null ? _d2 : 0) / msToFinish
3770
+ });
3771
+ break;
3772
+ }
3773
+ case "file": {
3774
+ controller.enqueue(chunk);
3775
+ break;
3776
+ }
3777
+ case "source": {
3778
+ controller.enqueue(chunk);
3779
+ break;
3780
+ }
3781
+ case "tool-input-start": {
3782
+ activeToolCallToolNames[chunk.id] = chunk.toolName;
3783
+ const tool = tools == null ? void 0 : tools[chunk.toolName];
3784
+ if ((tool == null ? void 0 : tool.onInputStart) != null) {
3785
+ await tool.onInputStart({
3786
+ toolCallId: chunk.id,
3787
+ messages: stepInputMessages,
3788
+ abortSignal
3789
+ });
3790
+ }
3791
+ controller.enqueue(chunk);
3792
+ break;
3793
+ }
3794
+ case "tool-input-end": {
3795
+ delete activeToolCallToolNames[chunk.id];
3796
+ controller.enqueue(chunk);
3797
+ break;
3798
+ }
3799
+ case "tool-input-delta": {
3800
+ const toolName = activeToolCallToolNames[chunk.id];
3801
+ const tool = tools == null ? void 0 : tools[toolName];
3802
+ if ((tool == null ? void 0 : tool.onInputDelta) != null) {
3803
+ await tool.onInputDelta({
3804
+ inputTextDelta: chunk.delta,
3805
+ toolCallId: chunk.id,
3806
+ messages: stepInputMessages,
3807
+ abortSignal
3808
+ });
3809
+ }
3810
+ controller.enqueue(chunk);
3811
+ break;
3812
+ }
3813
+ case "error": {
3814
+ controller.enqueue(chunk);
3815
+ stepFinishReason = "error";
3816
+ break;
3817
+ }
3818
+ case "raw": {
3819
+ if (includeRawChunks2) {
3820
+ controller.enqueue(chunk);
3821
+ }
3822
+ break;
3823
+ }
3824
+ default: {
3825
+ const exhaustiveCheck = chunkType;
3826
+ throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
3827
+ }
3828
+ }
3829
+ },
3830
+ // invoke onFinish callback and resolve toolResults promise when the stream is about to close:
3831
+ async flush(controller) {
3832
+ const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
3833
+ try {
3834
+ doStreamSpan.setAttributes(
3835
+ selectTelemetryAttributes({
3836
+ telemetry,
3837
+ attributes: {
3838
+ "ai.response.finishReason": stepFinishReason,
3839
+ "ai.response.text": {
3840
+ output: () => activeText
3841
+ },
3842
+ "ai.response.toolCalls": {
3843
+ output: () => stepToolCallsJson
3844
+ },
3845
+ "ai.response.id": stepResponse.id,
3846
+ "ai.response.model": stepResponse.modelId,
3847
+ "ai.response.timestamp": stepResponse.timestamp.toISOString(),
3848
+ "ai.response.providerMetadata": JSON.stringify(stepProviderMetadata),
3849
+ "ai.usage.inputTokens": stepUsage.inputTokens,
3850
+ "ai.usage.outputTokens": stepUsage.outputTokens,
3851
+ "ai.usage.totalTokens": stepUsage.totalTokens,
3852
+ "ai.usage.reasoningTokens": stepUsage.reasoningTokens,
3853
+ "ai.usage.cachedInputTokens": stepUsage.cachedInputTokens,
3854
+ // standardized gen-ai llm span attributes:
3855
+ "gen_ai.response.finish_reasons": [stepFinishReason],
3856
+ "gen_ai.response.id": stepResponse.id,
3857
+ "gen_ai.response.model": stepResponse.modelId,
3858
+ "gen_ai.usage.input_tokens": stepUsage.inputTokens,
3859
+ "gen_ai.usage.output_tokens": stepUsage.outputTokens
3860
+ }
3861
+ })
3862
+ );
3863
+ } catch (error) {
3864
+ } finally {
3865
+ doStreamSpan.end();
3866
+ }
3867
+ controller.enqueue({
3868
+ type: "finish-step",
3869
+ finishReason: stepFinishReason,
3870
+ usage: stepUsage,
3871
+ providerMetadata: stepProviderMetadata,
3872
+ response: {
3873
+ ...stepResponse,
3874
+ headers: response == null ? void 0 : response.headers
3875
+ }
3876
+ });
3877
+ const combinedUsage = addLanguageModelUsage(usage, stepUsage);
3878
+ await stepFinish.promise;
3879
+ const clientToolCalls = stepToolCalls.filter(
3880
+ (toolCall) => toolCall.providerExecuted !== true
3881
+ );
3882
+ const clientToolOutputs = stepToolOutputs.filter(
3883
+ (toolOutput) => toolOutput.providerExecuted !== true
3884
+ );
3885
+ if (clientToolCalls.length > 0 && // all current tool calls have outputs (incl. execution errors):
3886
+ clientToolOutputs.length === clientToolCalls.length && // continue until a stop condition is met:
3887
+ !await isStopConditionMet({
3888
+ stopConditions,
3889
+ steps: recordedSteps
3890
+ })) {
3891
+ responseMessages.push(
3892
+ ...toResponseMessages({
3893
+ content: (
3894
+ // use transformed content to create the messages for the next step:
3895
+ recordedSteps[recordedSteps.length - 1].content
3896
+ ),
3897
+ tools
3898
+ })
3899
+ );
3900
+ try {
3901
+ await streamStep({
3902
+ currentStep: currentStep + 1,
3903
+ responseMessages,
3904
+ usage: combinedUsage
3905
+ });
3906
+ } catch (error) {
3907
+ controller.enqueue({
3908
+ type: "error",
3909
+ error
3910
+ });
3911
+ self.closeStream();
3912
+ }
3913
+ } else {
3914
+ controller.enqueue({
3915
+ type: "finish",
3916
+ finishReason: stepFinishReason,
3917
+ totalUsage: combinedUsage
3918
+ });
3919
+ self.closeStream();
3920
+ }
3921
+ }
3922
+ })
3923
+ )
3924
+ );
3925
+ }
3926
+ await streamStep({
3927
+ currentStep: 0,
3928
+ responseMessages: [],
3929
+ usage: {
3930
+ inputTokens: void 0,
3931
+ outputTokens: void 0,
3932
+ totalTokens: void 0
3933
+ }
3934
+ });
3935
+ }
3936
+ }).catch((error) => {
3937
+ self.addStream(
3938
+ new ReadableStream({
3939
+ start(controller) {
3940
+ controller.enqueue({ type: "error", error });
3941
+ controller.close();
3942
+ }
3943
+ })
3944
+ );
3945
+ self.closeStream();
3946
+ });
3947
+ }
3948
+ get steps() {
3949
+ return this._steps.promise;
3950
+ }
3951
+ get finalStep() {
3952
+ return this.steps.then((steps) => steps[steps.length - 1]);
3953
+ }
3954
+ get content() {
3955
+ return this.finalStep.then((step) => step.content);
3956
+ }
3957
+ get warnings() {
3958
+ return this.finalStep.then((step) => step.warnings);
3959
+ }
3960
+ get providerMetadata() {
3961
+ return this.finalStep.then((step) => step.providerMetadata);
3962
+ }
3963
+ get text() {
3964
+ return this.finalStep.then((step) => step.text);
3965
+ }
3966
+ get reasoningText() {
3967
+ return this.finalStep.then((step) => step.reasoningText);
3968
+ }
3969
+ get reasoning() {
3970
+ return this.finalStep.then((step) => step.reasoning);
3971
+ }
3972
+ get sources() {
3973
+ return this.finalStep.then((step) => step.sources);
3974
+ }
3975
+ get files() {
3976
+ return this.finalStep.then((step) => step.files);
3977
+ }
3978
+ get toolCalls() {
3979
+ return this.finalStep.then((step) => step.toolCalls);
3980
+ }
3981
+ get toolResults() {
3982
+ return this.finalStep.then((step) => step.toolResults);
3983
+ }
3984
+ get usage() {
3985
+ return this.finalStep.then((step) => step.usage);
3986
+ }
3987
+ get request() {
3988
+ return this.finalStep.then((step) => step.request);
3989
+ }
3990
+ get response() {
3991
+ return this.finalStep.then((step) => step.response);
3992
+ }
3993
+ get totalUsage() {
3994
+ return this._totalUsage.promise;
3995
+ }
3996
+ get finishReason() {
3997
+ return this._finishReason.promise;
3998
+ }
3999
+ /**
4000
+ Split out a new stream from the original stream.
4001
+ The original stream is replaced to allow for further splitting,
4002
+ since we do not know how many times the stream will be split.
4003
+
4004
+ Note: this leads to buffering the stream content on the server.
4005
+ However, the LLM results are expected to be small enough to not cause issues.
4006
+ */
4007
+ teeStream() {
4008
+ const [stream1, stream2] = this.baseStream.tee();
4009
+ this.baseStream = stream2;
4010
+ return stream1;
4011
+ }
4012
+ get textStream() {
4013
+ return createAsyncIterableStream(
4014
+ this.teeStream().pipeThrough(
4015
+ new TransformStream({
4016
+ transform({ part }, controller) {
4017
+ if (part.type === "text") {
4018
+ controller.enqueue(part.text);
4019
+ }
4020
+ }
4021
+ })
4022
+ )
4023
+ );
4024
+ }
4025
+ get fullStream() {
4026
+ return createAsyncIterableStream(
4027
+ this.teeStream().pipeThrough(
4028
+ new TransformStream({
4029
+ transform({ part }, controller) {
4030
+ controller.enqueue(part);
4031
+ }
4032
+ })
4033
+ )
4034
+ );
4035
+ }
4036
+ async consumeStream(options) {
4037
+ var _a9;
4038
+ try {
4039
+ await consumeStream({
4040
+ stream: this.fullStream,
4041
+ onError: options == null ? void 0 : options.onError
4042
+ });
4043
+ } catch (error) {
4044
+ (_a9 = options == null ? void 0 : options.onError) == null ? void 0 : _a9.call(options, error);
4045
+ }
4046
+ }
4047
+ get experimental_partialOutputStream() {
4048
+ if (this.output == null) {
4049
+ throw new NoOutputSpecifiedError();
4050
+ }
4051
+ return createAsyncIterableStream(
4052
+ this.teeStream().pipeThrough(
4053
+ new TransformStream({
4054
+ transform({ partialOutput }, controller) {
4055
+ if (partialOutput != null) {
4056
+ controller.enqueue(partialOutput);
4057
+ }
4058
+ }
4059
+ })
4060
+ )
4061
+ );
4062
+ }
4063
+ toUIMessageStream({
4064
+ originalMessages,
4065
+ generateMessageId,
4066
+ onFinish,
4067
+ messageMetadata,
4068
+ sendReasoning = true,
4069
+ sendSources = false,
4070
+ sendStart = true,
4071
+ sendFinish = true,
4072
+ onError = import_provider16.getErrorMessage
4073
+ } = {}) {
4074
+ const responseMessageId = getResponseUIMessageId({
4075
+ originalMessages,
4076
+ responseMessageId: this.generateId
4077
+ });
4078
+ const baseStream = this.fullStream.pipeThrough(
4079
+ new TransformStream({
4080
+ transform: async (part, controller) => {
4081
+ const messageMetadataValue = messageMetadata == null ? void 0 : messageMetadata({ part });
4082
+ const partType = part.type;
4083
+ switch (partType) {
4084
+ case "text-start": {
4085
+ controller.enqueue({
4086
+ type: "text-start",
4087
+ id: part.id,
4088
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4089
+ });
4090
+ break;
4091
+ }
4092
+ case "text": {
4093
+ controller.enqueue({
4094
+ type: "text-delta",
4095
+ id: part.id,
4096
+ delta: part.text,
4097
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4098
+ });
4099
+ break;
4100
+ }
4101
+ case "text-end": {
4102
+ controller.enqueue({
4103
+ type: "text-end",
4104
+ id: part.id,
4105
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4106
+ });
4107
+ break;
4108
+ }
4109
+ case "reasoning-start": {
4110
+ controller.enqueue({
4111
+ type: "reasoning-start",
4112
+ id: part.id,
4113
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4114
+ });
4115
+ break;
4116
+ }
4117
+ case "reasoning": {
4118
+ if (sendReasoning) {
4119
+ controller.enqueue({
4120
+ type: "reasoning-delta",
4121
+ id: part.id,
4122
+ delta: part.text,
4123
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4124
+ });
4125
+ }
4126
+ break;
4127
+ }
4128
+ case "reasoning-end": {
4129
+ controller.enqueue({
4130
+ type: "reasoning-end",
4131
+ id: part.id,
4132
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4133
+ });
4134
+ break;
4135
+ }
4136
+ case "file": {
4137
+ controller.enqueue({
4138
+ type: "file",
4139
+ mediaType: part.file.mediaType,
4140
+ url: `data:${part.file.mediaType};base64,${part.file.base64}`
4141
+ });
4142
+ break;
4143
+ }
4144
+ case "source": {
4145
+ if (sendSources && part.sourceType === "url") {
4146
+ controller.enqueue({
4147
+ type: "source-url",
4148
+ sourceId: part.id,
4149
+ url: part.url,
4150
+ title: part.title,
4151
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4152
+ });
4153
+ }
4154
+ if (sendSources && part.sourceType === "document") {
4155
+ controller.enqueue({
4156
+ type: "source-document",
4157
+ sourceId: part.id,
4158
+ mediaType: part.mediaType,
4159
+ title: part.title,
4160
+ filename: part.filename,
4161
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
4162
+ });
4163
+ }
4164
+ break;
4165
+ }
4166
+ case "tool-input-start": {
4167
+ controller.enqueue({
4168
+ type: "tool-input-start",
4169
+ toolCallId: part.id,
4170
+ toolName: part.toolName,
4171
+ providerExecuted: part.providerExecuted
4172
+ });
4173
+ break;
4174
+ }
4175
+ case "tool-input-delta": {
4176
+ controller.enqueue({
4177
+ type: "tool-input-delta",
4178
+ toolCallId: part.id,
4179
+ inputTextDelta: part.delta
4180
+ });
4181
+ break;
4182
+ }
4183
+ case "tool-call": {
4184
+ controller.enqueue({
4185
+ type: "tool-input-available",
4186
+ toolCallId: part.toolCallId,
4187
+ toolName: part.toolName,
4188
+ input: part.input,
4189
+ providerExecuted: part.providerExecuted,
4190
+ providerMetadata: part.providerMetadata
4191
+ });
4192
+ break;
4193
+ }
4194
+ case "tool-result": {
4195
+ controller.enqueue({
4196
+ type: "tool-output-available",
4197
+ toolCallId: part.toolCallId,
4198
+ output: part.output,
4199
+ providerExecuted: part.providerExecuted
4200
+ });
4201
+ break;
4202
+ }
4203
+ case "tool-error": {
4204
+ controller.enqueue({
4205
+ type: "tool-output-error",
4206
+ toolCallId: part.toolCallId,
4207
+ errorText: onError(part.error),
4208
+ providerExecuted: part.providerExecuted
4209
+ });
4210
+ break;
4211
+ }
4212
+ case "error": {
4213
+ controller.enqueue({
4214
+ type: "error",
4215
+ errorText: onError(part.error)
4216
+ });
4217
+ break;
4218
+ }
4219
+ case "start-step": {
4220
+ controller.enqueue({ type: "start-step" });
4221
+ break;
4222
+ }
4223
+ case "finish-step": {
4224
+ controller.enqueue({ type: "finish-step" });
4225
+ break;
4226
+ }
4227
+ case "start": {
4228
+ if (sendStart) {
4229
+ controller.enqueue({
4230
+ type: "start",
4231
+ messageId: responseMessageId,
4232
+ messageMetadata: messageMetadataValue
4233
+ });
4234
+ }
4235
+ break;
4236
+ }
4237
+ case "finish": {
4238
+ if (sendFinish) {
4239
+ controller.enqueue({
4240
+ type: "finish",
4241
+ messageMetadata: messageMetadataValue
4242
+ });
4243
+ }
4244
+ break;
4245
+ }
4246
+ case "tool-input-end": {
4247
+ break;
4248
+ }
4249
+ case "raw": {
4250
+ break;
4251
+ }
4252
+ default: {
4253
+ const exhaustiveCheck = partType;
4254
+ throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
4255
+ }
4256
+ }
4257
+ if (messageMetadataValue != null && partType !== "start" && partType !== "finish") {
4258
+ controller.enqueue({
4259
+ type: "message-metadata",
4260
+ messageMetadata: messageMetadataValue
4261
+ });
4262
+ }
4263
+ }
4264
+ })
4265
+ );
4266
+ return handleUIMessageStreamFinish({
4267
+ stream: baseStream,
4268
+ messageId: responseMessageId != null ? responseMessageId : generateMessageId == null ? void 0 : generateMessageId(),
4269
+ originalMessages,
4270
+ onFinish,
4271
+ onError
4272
+ });
4273
+ }
4274
+ pipeUIMessageStreamToResponse(response, {
4275
+ originalMessages,
4276
+ generateMessageId,
4277
+ onFinish,
4278
+ messageMetadata,
4279
+ sendReasoning,
4280
+ sendSources,
4281
+ sendFinish,
4282
+ sendStart,
4283
+ onError,
4284
+ ...init
4285
+ } = {}) {
4286
+ pipeUIMessageStreamToResponse({
4287
+ response,
4288
+ stream: this.toUIMessageStream({
4289
+ originalMessages,
4290
+ generateMessageId,
4291
+ onFinish,
4292
+ messageMetadata,
4293
+ sendReasoning,
4294
+ sendSources,
4295
+ sendFinish,
4296
+ sendStart,
4297
+ onError
4298
+ }),
4299
+ ...init
4300
+ });
4301
+ }
4302
+ pipeTextStreamToResponse(response, init) {
4303
+ pipeTextStreamToResponse({
4304
+ response,
4305
+ textStream: this.textStream,
4306
+ ...init
4307
+ });
4308
+ }
4309
+ toUIMessageStreamResponse({
4310
+ originalMessages,
4311
+ generateMessageId,
4312
+ onFinish,
4313
+ messageMetadata,
4314
+ sendReasoning,
4315
+ sendSources,
4316
+ sendFinish,
4317
+ sendStart,
4318
+ onError,
4319
+ ...init
4320
+ } = {}) {
4321
+ return createUIMessageStreamResponse({
4322
+ stream: this.toUIMessageStream({
4323
+ originalMessages,
4324
+ generateMessageId,
4325
+ onFinish,
4326
+ messageMetadata,
4327
+ sendReasoning,
4328
+ sendSources,
4329
+ sendFinish,
4330
+ sendStart,
4331
+ onError
4332
+ }),
4333
+ ...init
4334
+ });
4335
+ }
4336
+ toTextStreamResponse(init) {
4337
+ return createTextStreamResponse({
4338
+ textStream: this.textStream,
4339
+ ...init
4340
+ });
4341
+ }
4342
+ };
4343
+
4344
+ // src/bin/ai.ts
4345
+ var import_fs = require("fs");
4346
+ var import_path = require("path");
4347
+ var import_gateway3 = require("@ai-sdk/gateway");
4348
+ function isStdinAvailable() {
4349
+ return !process.stdin.isTTY;
4350
+ }
4351
+ async function readStdin() {
4352
+ return new Promise((resolve2) => {
4353
+ let data = "";
4354
+ process.stdin.setEncoding("utf8");
4355
+ process.stdin.on("data", (chunk) => {
4356
+ data += chunk;
4357
+ });
4358
+ process.stdin.on("end", () => {
4359
+ resolve2(data.trim());
4360
+ });
4361
+ });
4362
+ }
4363
+ function getMediaType(filePath) {
4364
+ var _a9;
4365
+ const ext = (_a9 = filePath.split(".").pop()) == null ? void 0 : _a9.toLowerCase();
4366
+ const mimeTypes = {
4367
+ js: "application/javascript",
4368
+ ts: "application/typescript",
4369
+ jsx: "text/jsx",
4370
+ tsx: "text/tsx",
4371
+ json: "application/json",
4372
+ md: "text/markdown",
4373
+ txt: "text/plain",
4374
+ py: "text/x-python",
4375
+ html: "text/html",
4376
+ css: "text/css",
4377
+ xml: "application/xml",
4378
+ yaml: "application/yaml",
4379
+ yml: "application/yaml",
4380
+ jpg: "image/jpeg",
4381
+ jpeg: "image/jpeg",
4382
+ png: "image/png",
4383
+ gif: "image/gif",
4384
+ webp: "image/webp",
4385
+ svg: "image/svg+xml",
4386
+ bmp: "image/bmp",
4387
+ tiff: "image/tiff",
4388
+ tif: "image/tiff"
4389
+ };
4390
+ return mimeTypes[ext || ""] || "text/plain";
4391
+ }
4392
+ function readFileContent(filePath) {
4393
+ const absolutePath = (0, import_path.resolve)(filePath);
4394
+ if (!(0, import_fs.existsSync)(absolutePath)) {
4395
+ throw new Error(`File not found: ${filePath}`);
4396
+ }
4397
+ const mediaType = getMediaType(filePath);
4398
+ const isImage = mediaType.startsWith("image/");
4399
+ let content;
4400
+ if (isImage) {
4401
+ const buffer = (0, import_fs.readFileSync)(absolutePath);
4402
+ content = `data:${mediaType};base64,${buffer.toString("base64")}`;
4403
+ } else {
4404
+ content = (0, import_fs.readFileSync)(absolutePath, "utf8");
4405
+ }
4406
+ return {
4407
+ name: filePath,
4408
+ content,
4409
+ mediaType
4410
+ };
4411
+ }
4412
+ function parseArgs() {
4413
+ const args = process.argv.slice(2);
4414
+ const options = {
4415
+ model: process.env.AI_MODEL || "openai/gpt-4",
4416
+ files: [],
4417
+ help: false,
4418
+ version: false,
4419
+ verbose: process.env.AI_VERBOSE === "true",
4420
+ system: process.env.AI_SYSTEM
4421
+ };
4422
+ const promptArgs = [];
4423
+ let i = 0;
4424
+ while (i < args.length) {
4425
+ const arg = args[i];
4426
+ switch (arg) {
4427
+ case "-h":
4428
+ case "--help":
4429
+ options.help = true;
4430
+ break;
4431
+ case "-V":
4432
+ case "--version":
4433
+ options.version = true;
4434
+ break;
4435
+ case "-v":
4436
+ case "--verbose":
4437
+ options.verbose = true;
4438
+ break;
4439
+ case "-m":
4440
+ case "--model":
4441
+ if (i + 1 < args.length) {
4442
+ options.model = args[i + 1];
4443
+ i++;
4444
+ } else {
4445
+ throw new Error("Model option requires a value");
4446
+ }
4447
+ break;
4448
+ case "-f":
4449
+ case "--file":
4450
+ if (i + 1 < args.length) {
4451
+ options.files.push(args[i + 1]);
4452
+ i++;
4453
+ } else {
4454
+ throw new Error("File option requires a value");
4455
+ }
4456
+ break;
4457
+ case "-s":
4458
+ case "--system":
4459
+ if (i + 1 < args.length) {
4460
+ options.system = args[i + 1];
4461
+ i++;
4462
+ } else {
4463
+ throw new Error("System option requires a value");
4464
+ }
4465
+ break;
4466
+ default:
4467
+ if (arg.startsWith("-")) {
4468
+ throw new Error(`Unknown option: ${arg}`);
4469
+ } else {
4470
+ promptArgs.push(arg);
4471
+ }
4472
+ }
4473
+ i++;
4474
+ }
4475
+ if (promptArgs.length > 0) {
4476
+ options.prompt = promptArgs.join(" ");
4477
+ }
4478
+ return options;
4479
+ }
4480
+ function showHelp() {
4481
+ console.log(`Usage: ai [options] [prompt]
4482
+
4483
+ AI CLI - Stream text generation from various AI models
4484
+
4485
+ Options:
4486
+ -m, --model <model> Model to use (default: "openai/gpt-4")
4487
+ Format: provider/model (e.g., anthropic/claude-3-5-sonnet)
4488
+ -f, --file <file> Attach file(s) to prompt
4489
+ -s, --system <message> System message
4490
+ -v, --verbose Show detailed output
4491
+ -h, --help Show help
4492
+ -V, --version Show version
4493
+
4494
+ Authentication (required):
4495
+ export AI_GATEWAY_API_KEY="your-key" # Get from Vercel Dashboard (AI tab)
4496
+ export VERCEL_OIDC_TOKEN="your-token" # For Vercel projects (or run: vercel env pull)
4497
+
4498
+ Environment Variables:
4499
+ AI_MODEL: Default model to use
4500
+ AI_SYSTEM: Default system message
4501
+ AI_VERBOSE: Set to 'true' for detailed output
4502
+
4503
+ Examples:
4504
+ npx ai "Hello, world!"
4505
+ npx ai "Write a poem" -m anthropic/claude-3-5-sonnet
4506
+ npx ai "Explain this code" -f script.js -f README.md
4507
+ echo "What is life?" | npx ai
4508
+ cat file.txt | npx ai "Summarize this content"
4509
+ npx ai -f package.json "What dependencies does this project have?"
4510
+
4511
+ Unix-style piping:
4512
+ echo "Hello world" | npx ai "Translate to French"
4513
+ cat README.md | npx ai "Summarize this"
4514
+ curl -s https://api.github.com/repos/vercel/ai | npx ai "What is this repository about?"
4515
+
4516
+ The gateway supports OpenAI, Anthropic, Google, Groq, and more providers.`);
4517
+ }
4518
+ function showVersion() {
4519
+ console.log("1.0.0");
4520
+ }
4521
+ function resolveModel(modelString) {
4522
+ return import_gateway3.gateway.languageModel(modelString);
4523
+ }
4524
+ function formatAttachedFiles(files) {
4525
+ if (files.length === 0)
4526
+ return "";
4527
+ const textFiles = files.filter((f) => {
4528
+ var _a9;
4529
+ return !((_a9 = f.mediaType) == null ? void 0 : _a9.startsWith("image/"));
4530
+ });
4531
+ if (textFiles.length === 0)
4532
+ return "";
4533
+ let result = "\n\nAttached files:\n";
4534
+ for (const file of textFiles) {
4535
+ result += `
4536
+ --- ${file.name} ---
4537
+ `;
4538
+ result += file.content;
4539
+ result += "\n";
4540
+ }
4541
+ return result;
4542
+ }
4543
+ async function main() {
4544
+ try {
4545
+ const options = parseArgs();
4546
+ if (options.help) {
4547
+ showHelp();
4548
+ return;
4549
+ }
4550
+ if (options.version) {
4551
+ showVersion();
4552
+ return;
4553
+ }
4554
+ let prompt = options.prompt || "";
4555
+ if (isStdinAvailable()) {
4556
+ const stdinContent = await readStdin();
4557
+ if (stdinContent) {
4558
+ prompt = prompt ? `${stdinContent}
4559
+
4560
+ ${prompt}` : stdinContent;
4561
+ }
4562
+ }
4563
+ if (!prompt.trim()) {
4564
+ console.error(
4565
+ "Error: No prompt provided. Use --help for usage information."
4566
+ );
4567
+ process.exit(1);
4568
+ }
4569
+ const attachedFiles = [];
4570
+ for (const filePath of options.files) {
4571
+ try {
4572
+ const file = readFileContent(filePath);
4573
+ attachedFiles.push(file);
4574
+ } catch (error) {
4575
+ console.error(
4576
+ `Error reading file ${filePath}: ${error instanceof Error ? error.message : "Unknown error"}`
4577
+ );
4578
+ process.exit(1);
4579
+ }
4580
+ }
4581
+ const textPrompt = prompt + formatAttachedFiles(attachedFiles);
4582
+ const imageFiles = attachedFiles.filter(
4583
+ (f) => {
4584
+ var _a9;
4585
+ return (_a9 = f.mediaType) == null ? void 0 : _a9.startsWith("image/");
4586
+ }
4587
+ );
4588
+ if (imageFiles.length > 0 && options.model === "openai/gpt-4") {
4589
+ options.model = "openai/gpt-4o";
4590
+ }
4591
+ if (options.verbose) {
4592
+ console.error(`Using model: ${options.model}`);
4593
+ if (attachedFiles.length > 0) {
4594
+ console.error(
4595
+ `Attached files: ${attachedFiles.map((f) => f.name).join(", ")}`
4596
+ );
4597
+ }
4598
+ console.error("");
4599
+ }
4600
+ const hasApiKey = process.env.AI_GATEWAY_API_KEY || process.env.VERCEL_OIDC_TOKEN;
4601
+ if (!hasApiKey) {
4602
+ console.error(`Error: Authentication required.
4603
+
4604
+ Set up authentication with one of these options:
4605
+
4606
+ # Option 1: Export in current session
4607
+ export AI_GATEWAY_API_KEY="your-key-here"
4608
+ export VERCEL_OIDC_TOKEN="your-oidc-token"
4609
+ export AI_MODEL="anthropic/claude-3-5-sonnet"
4610
+
4611
+ # Option 2: Add to shell profile (~/.bashrc, ~/.zshrc)
4612
+ echo 'export AI_GATEWAY_API_KEY="your-key"' >> ~/.bashrc
4613
+ # Or run: vercel env pull
4614
+
4615
+ Get your API key from the Vercel Dashboard (AI tab > API keys).
4616
+ Use --help for more details and examples.`);
4617
+ process.exit(1);
4618
+ }
4619
+ const model = resolveModel(options.model);
4620
+ let messages;
4621
+ if (imageFiles.length > 0) {
4622
+ const content = [{ type: "text", text: textPrompt }];
4623
+ for (const img of imageFiles) {
4624
+ content.push({
4625
+ type: "image",
4626
+ image: img.content
4627
+ });
4628
+ }
4629
+ messages = [{ role: "user", content }];
4630
+ }
4631
+ const result = await streamText(
4632
+ messages ? {
4633
+ model,
4634
+ messages,
4635
+ system: options.system
4636
+ } : {
4637
+ model,
4638
+ prompt: textPrompt,
4639
+ system: options.system
4640
+ }
4641
+ );
4642
+ for await (const chunk of result.textStream) {
4643
+ process.stdout.write(chunk);
4644
+ }
4645
+ process.stdout.write("\n");
4646
+ if (options.verbose) {
4647
+ const usage = await result.usage;
4648
+ if (usage) {
4649
+ console.error(
4650
+ `
4651
+ Usage: ${usage.inputTokens} prompt + ${usage.outputTokens} completion = ${usage.totalTokens} total tokens`
4652
+ );
4653
+ }
4654
+ }
4655
+ } catch (error) {
4656
+ console.error(
4657
+ `Error: ${error instanceof Error ? error.message : "Unknown error"}`
4658
+ );
4659
+ process.exit(1);
4660
+ }
4661
+ }
4662
+ process.on("SIGINT", () => {
4663
+ process.exit(0);
4664
+ });
4665
+ process.on("SIGTERM", () => {
4666
+ process.exit(0);
4667
+ });
4668
+ main().catch((error) => {
4669
+ console.error(
4670
+ `Fatal error: ${error instanceof Error ? error.message : "Unknown error"}`
4671
+ );
4672
+ process.exit(1);
4673
+ });
4674
+ // Annotate the CommonJS export names for ESM import in node:
4675
+ 0 && (module.exports = {
4676
+ formatAttachedFiles,
4677
+ getMediaType,
4678
+ isStdinAvailable,
4679
+ main,
4680
+ parseArgs,
4681
+ readFileContent,
4682
+ resolveModel,
4683
+ showHelp,
4684
+ showVersion
4685
+ });
4686
+ //# sourceMappingURL=ai.js.map