ai 5.0.0-canary.20 → 5.0.0-canary.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +51 -0
- package/dist/index.d.mts +467 -512
- package/dist/index.d.ts +467 -512
- package/dist/index.js +989 -1411
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +947 -1368
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +7 -13
- package/dist/internal/index.d.ts +7 -13
- package/dist/internal/index.js +126 -126
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +119 -119
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.mjs
CHANGED
@@ -178,240 +178,6 @@ function pipeDataStreamToResponse({
|
|
178
178
|
});
|
179
179
|
}
|
180
180
|
|
181
|
-
// src/data-stream/process-data-stream.ts
|
182
|
-
import {
|
183
|
-
createEventSourceParserStream,
|
184
|
-
safeParseJSON
|
185
|
-
} from "@ai-sdk/provider-utils";
|
186
|
-
|
187
|
-
// src/util/async-iterable-stream.ts
|
188
|
-
function createAsyncIterableStream(source) {
|
189
|
-
const stream = source.pipeThrough(new TransformStream());
|
190
|
-
stream[Symbol.asyncIterator] = () => {
|
191
|
-
const reader = stream.getReader();
|
192
|
-
return {
|
193
|
-
async next() {
|
194
|
-
const { done, value } = await reader.read();
|
195
|
-
return done ? { done: true, value: void 0 } : { done: false, value };
|
196
|
-
}
|
197
|
-
};
|
198
|
-
};
|
199
|
-
return stream;
|
200
|
-
}
|
201
|
-
|
202
|
-
// src/data-stream/data-stream-parts.ts
|
203
|
-
import { z } from "zod";
|
204
|
-
var languageModelUsageSchema = z.object({
|
205
|
-
inputTokens: z.number().optional(),
|
206
|
-
outputTokens: z.number().optional(),
|
207
|
-
totalTokens: z.number().optional(),
|
208
|
-
reasoningTokens: z.number().optional(),
|
209
|
-
cachedInputTokens: z.number().optional()
|
210
|
-
});
|
211
|
-
var finishReasonSchema = z.enum([
|
212
|
-
"stop",
|
213
|
-
"length",
|
214
|
-
"tool-calls",
|
215
|
-
"content-filter",
|
216
|
-
"other",
|
217
|
-
"error",
|
218
|
-
"unknown"
|
219
|
-
]);
|
220
|
-
var toolCallSchema = z.object({
|
221
|
-
toolCallId: z.string(),
|
222
|
-
toolName: z.string(),
|
223
|
-
args: z.unknown()
|
224
|
-
});
|
225
|
-
var toolResultValueSchema = z.object({
|
226
|
-
toolCallId: z.string(),
|
227
|
-
result: z.unknown(),
|
228
|
-
providerMetadata: z.any().optional()
|
229
|
-
});
|
230
|
-
var sourceSchema = z.object({
|
231
|
-
type: z.literal("source"),
|
232
|
-
sourceType: z.literal("url"),
|
233
|
-
id: z.string(),
|
234
|
-
url: z.string(),
|
235
|
-
title: z.string().optional(),
|
236
|
-
providerMetadata: z.any().optional()
|
237
|
-
// Use z.any() for generic metadata
|
238
|
-
});
|
239
|
-
var dataStreamPartSchema = z.discriminatedUnion("type", [
|
240
|
-
z.object({
|
241
|
-
type: z.literal("text"),
|
242
|
-
value: z.string()
|
243
|
-
}),
|
244
|
-
z.object({
|
245
|
-
type: z.literal("data"),
|
246
|
-
value: z.array(z.any())
|
247
|
-
// TODO json validation
|
248
|
-
}),
|
249
|
-
z.object({
|
250
|
-
type: z.literal("error"),
|
251
|
-
value: z.string()
|
252
|
-
}),
|
253
|
-
z.object({
|
254
|
-
type: z.literal("message-annotations"),
|
255
|
-
value: z.array(z.any())
|
256
|
-
// TODO json validation
|
257
|
-
}),
|
258
|
-
z.object({
|
259
|
-
type: z.literal("tool-call"),
|
260
|
-
value: toolCallSchema
|
261
|
-
}),
|
262
|
-
z.object({
|
263
|
-
type: z.literal("tool-result"),
|
264
|
-
value: toolResultValueSchema
|
265
|
-
}),
|
266
|
-
z.object({
|
267
|
-
type: z.literal("tool-call-streaming-start"),
|
268
|
-
value: z.object({ toolCallId: z.string(), toolName: z.string() })
|
269
|
-
}),
|
270
|
-
z.object({
|
271
|
-
type: z.literal("tool-call-delta"),
|
272
|
-
value: z.object({ toolCallId: z.string(), argsTextDelta: z.string() })
|
273
|
-
}),
|
274
|
-
z.object({
|
275
|
-
type: z.literal("finish-message"),
|
276
|
-
value: z.object({
|
277
|
-
finishReason: finishReasonSchema,
|
278
|
-
// TODO v5 remove usage from finish event (only on step-finish)
|
279
|
-
usage: languageModelUsageSchema.optional()
|
280
|
-
})
|
281
|
-
}),
|
282
|
-
z.object({
|
283
|
-
type: z.literal("finish-step"),
|
284
|
-
value: z.object({
|
285
|
-
isContinued: z.boolean(),
|
286
|
-
finishReason: finishReasonSchema,
|
287
|
-
usage: languageModelUsageSchema.optional()
|
288
|
-
})
|
289
|
-
}),
|
290
|
-
z.object({
|
291
|
-
type: z.literal("start-step"),
|
292
|
-
value: z.object({
|
293
|
-
messageId: z.string()
|
294
|
-
})
|
295
|
-
}),
|
296
|
-
z.object({
|
297
|
-
type: z.literal("reasoning"),
|
298
|
-
value: z.object({
|
299
|
-
text: z.string(),
|
300
|
-
providerMetadata: z.record(z.any()).optional()
|
301
|
-
})
|
302
|
-
}),
|
303
|
-
z.object({
|
304
|
-
type: z.literal("source"),
|
305
|
-
value: sourceSchema
|
306
|
-
}),
|
307
|
-
z.object({
|
308
|
-
type: z.literal("file"),
|
309
|
-
value: z.object({
|
310
|
-
url: z.string(),
|
311
|
-
mediaType: z.string()
|
312
|
-
})
|
313
|
-
}),
|
314
|
-
z.object({
|
315
|
-
type: z.literal("reasoning-part-finish"),
|
316
|
-
value: z.null()
|
317
|
-
})
|
318
|
-
]);
|
319
|
-
|
320
|
-
// src/data-stream/process-data-stream.ts
|
321
|
-
async function processDataStream({
|
322
|
-
stream,
|
323
|
-
onTextPart,
|
324
|
-
onReasoningPart,
|
325
|
-
onReasoningPartFinish,
|
326
|
-
onSourcePart,
|
327
|
-
onFilePart,
|
328
|
-
onDataPart,
|
329
|
-
onErrorPart,
|
330
|
-
onToolCallStreamingStartPart,
|
331
|
-
onToolCallDeltaPart,
|
332
|
-
onToolCallPart,
|
333
|
-
onToolResultPart,
|
334
|
-
onMessageAnnotationsPart,
|
335
|
-
onFinishMessagePart,
|
336
|
-
onFinishStepPart,
|
337
|
-
onStartStepPart
|
338
|
-
}) {
|
339
|
-
const streamParts = createAsyncIterableStream(
|
340
|
-
stream.pipeThrough(new TextDecoderStream()).pipeThrough(createEventSourceParserStream()).pipeThrough(
|
341
|
-
new TransformStream({
|
342
|
-
async transform({ data }, controller) {
|
343
|
-
if (data === "[DONE]") {
|
344
|
-
return;
|
345
|
-
}
|
346
|
-
controller.enqueue(
|
347
|
-
await safeParseJSON({
|
348
|
-
text: data,
|
349
|
-
schema: dataStreamPartSchema
|
350
|
-
})
|
351
|
-
);
|
352
|
-
}
|
353
|
-
})
|
354
|
-
)
|
355
|
-
);
|
356
|
-
for await (const parseResult of streamParts) {
|
357
|
-
if (!parseResult.success) {
|
358
|
-
throw new Error("Failed to parse data stream part");
|
359
|
-
}
|
360
|
-
const { type, value } = parseResult.value;
|
361
|
-
switch (type) {
|
362
|
-
case "text":
|
363
|
-
await (onTextPart == null ? void 0 : onTextPart(value));
|
364
|
-
break;
|
365
|
-
case "reasoning":
|
366
|
-
await (onReasoningPart == null ? void 0 : onReasoningPart(value));
|
367
|
-
break;
|
368
|
-
case "reasoning-part-finish":
|
369
|
-
await (onReasoningPartFinish == null ? void 0 : onReasoningPartFinish(value));
|
370
|
-
break;
|
371
|
-
case "file":
|
372
|
-
await (onFilePart == null ? void 0 : onFilePart(value));
|
373
|
-
break;
|
374
|
-
case "source":
|
375
|
-
await (onSourcePart == null ? void 0 : onSourcePart(value));
|
376
|
-
break;
|
377
|
-
case "data":
|
378
|
-
await (onDataPart == null ? void 0 : onDataPart(value));
|
379
|
-
break;
|
380
|
-
case "error":
|
381
|
-
await (onErrorPart == null ? void 0 : onErrorPart(value));
|
382
|
-
break;
|
383
|
-
case "message-annotations":
|
384
|
-
await (onMessageAnnotationsPart == null ? void 0 : onMessageAnnotationsPart(value));
|
385
|
-
break;
|
386
|
-
case "tool-call-streaming-start":
|
387
|
-
await (onToolCallStreamingStartPart == null ? void 0 : onToolCallStreamingStartPart(value));
|
388
|
-
break;
|
389
|
-
case "tool-call-delta":
|
390
|
-
await (onToolCallDeltaPart == null ? void 0 : onToolCallDeltaPart(value));
|
391
|
-
break;
|
392
|
-
case "tool-call":
|
393
|
-
await (onToolCallPart == null ? void 0 : onToolCallPart(value));
|
394
|
-
break;
|
395
|
-
case "tool-result":
|
396
|
-
await (onToolResultPart == null ? void 0 : onToolResultPart(value));
|
397
|
-
break;
|
398
|
-
case "finish-message":
|
399
|
-
await (onFinishMessagePart == null ? void 0 : onFinishMessagePart(value));
|
400
|
-
break;
|
401
|
-
case "finish-step":
|
402
|
-
await (onFinishStepPart == null ? void 0 : onFinishStepPart(value));
|
403
|
-
break;
|
404
|
-
case "start-step":
|
405
|
-
await (onStartStepPart == null ? void 0 : onStartStepPart(value));
|
406
|
-
break;
|
407
|
-
default: {
|
408
|
-
const exhaustiveCheck = type;
|
409
|
-
throw new Error(`Unknown stream part type: ${exhaustiveCheck}`);
|
410
|
-
}
|
411
|
-
}
|
412
|
-
}
|
413
|
-
}
|
414
|
-
|
415
181
|
// src/error/index.ts
|
416
182
|
import {
|
417
183
|
AISDKError as AISDKError16,
|
@@ -822,270 +588,169 @@ function appendClientMessage({
|
|
822
588
|
];
|
823
589
|
}
|
824
590
|
|
825
|
-
// src/ui/
|
826
|
-
import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
|
827
|
-
|
828
|
-
// src/ui/extract-max-tool-invocation-step.ts
|
829
|
-
function extractMaxToolInvocationStep(toolInvocations) {
|
830
|
-
return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
|
831
|
-
var _a17;
|
832
|
-
return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
|
833
|
-
}, 0);
|
834
|
-
}
|
835
|
-
|
836
|
-
// src/ui/get-tool-invocations.ts
|
837
|
-
function getToolInvocations(message) {
|
838
|
-
return message.parts.filter(
|
839
|
-
(part) => part.type === "tool-invocation"
|
840
|
-
).map((part) => part.toolInvocation);
|
841
|
-
}
|
842
|
-
|
843
|
-
// core/prompt/data-content.ts
|
844
|
-
import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
|
591
|
+
// src/ui/call-chat-api.ts
|
845
592
|
import {
|
846
|
-
|
847
|
-
convertUint8ArrayToBase64
|
593
|
+
parseJsonEventStream
|
848
594
|
} from "@ai-sdk/provider-utils";
|
849
|
-
import { z as z2 } from "zod";
|
850
595
|
|
851
|
-
//
|
852
|
-
|
596
|
+
// src/data-stream/data-stream-parts.ts
|
597
|
+
import { z } from "zod";
|
598
|
+
var toolCallSchema = z.object({
|
599
|
+
toolCallId: z.string(),
|
600
|
+
toolName: z.string(),
|
601
|
+
args: z.unknown()
|
602
|
+
});
|
603
|
+
var toolResultValueSchema = z.object({
|
604
|
+
toolCallId: z.string(),
|
605
|
+
result: z.unknown(),
|
606
|
+
providerMetadata: z.any().optional()
|
607
|
+
});
|
608
|
+
var sourceSchema = z.object({
|
609
|
+
type: z.literal("source"),
|
610
|
+
sourceType: z.literal("url"),
|
611
|
+
id: z.string(),
|
612
|
+
url: z.string(),
|
613
|
+
title: z.string().optional(),
|
614
|
+
providerMetadata: z.any().optional()
|
615
|
+
// Use z.any() for generic metadata
|
616
|
+
});
|
617
|
+
var dataStreamPartSchema = z.discriminatedUnion("type", [
|
618
|
+
z.object({
|
619
|
+
type: z.literal("text"),
|
620
|
+
value: z.string()
|
621
|
+
}),
|
622
|
+
z.object({
|
623
|
+
type: z.literal("error"),
|
624
|
+
value: z.string()
|
625
|
+
}),
|
626
|
+
z.object({
|
627
|
+
type: z.literal("tool-call"),
|
628
|
+
value: toolCallSchema
|
629
|
+
}),
|
630
|
+
z.object({
|
631
|
+
type: z.literal("tool-result"),
|
632
|
+
value: toolResultValueSchema
|
633
|
+
}),
|
634
|
+
z.object({
|
635
|
+
type: z.literal("tool-call-streaming-start"),
|
636
|
+
value: z.object({ toolCallId: z.string(), toolName: z.string() })
|
637
|
+
}),
|
638
|
+
z.object({
|
639
|
+
type: z.literal("tool-call-delta"),
|
640
|
+
value: z.object({ toolCallId: z.string(), argsTextDelta: z.string() })
|
641
|
+
}),
|
642
|
+
z.object({
|
643
|
+
type: z.literal("reasoning"),
|
644
|
+
value: z.object({
|
645
|
+
text: z.string(),
|
646
|
+
providerMetadata: z.record(z.any()).optional()
|
647
|
+
})
|
648
|
+
}),
|
649
|
+
z.object({
|
650
|
+
type: z.literal("source"),
|
651
|
+
value: sourceSchema
|
652
|
+
}),
|
653
|
+
z.object({
|
654
|
+
type: z.literal("file"),
|
655
|
+
value: z.object({
|
656
|
+
url: z.string(),
|
657
|
+
mediaType: z.string()
|
658
|
+
})
|
659
|
+
}),
|
660
|
+
z.object({
|
661
|
+
type: z.literal("metadata"),
|
662
|
+
value: z.object({
|
663
|
+
metadata: z.unknown()
|
664
|
+
})
|
665
|
+
}),
|
666
|
+
z.object({
|
667
|
+
type: z.literal("start-step"),
|
668
|
+
value: z.object({
|
669
|
+
metadata: z.unknown()
|
670
|
+
})
|
671
|
+
}),
|
672
|
+
z.object({
|
673
|
+
type: z.literal("finish-step"),
|
674
|
+
value: z.object({
|
675
|
+
metadata: z.unknown()
|
676
|
+
})
|
677
|
+
}),
|
678
|
+
z.object({
|
679
|
+
type: z.literal("start"),
|
680
|
+
value: z.object({
|
681
|
+
messageId: z.string().optional(),
|
682
|
+
metadata: z.unknown()
|
683
|
+
})
|
684
|
+
}),
|
685
|
+
z.object({
|
686
|
+
type: z.literal("finish"),
|
687
|
+
value: z.object({
|
688
|
+
metadata: z.unknown()
|
689
|
+
})
|
690
|
+
}),
|
691
|
+
z.object({
|
692
|
+
type: z.literal("reasoning-part-finish"),
|
693
|
+
value: z.null()
|
694
|
+
})
|
695
|
+
]);
|
696
|
+
|
697
|
+
// src/util/consume-stream.ts
|
698
|
+
async function consumeStream({
|
699
|
+
stream,
|
700
|
+
onError
|
701
|
+
}) {
|
702
|
+
const reader = stream.getReader();
|
853
703
|
try {
|
854
|
-
|
855
|
-
|
856
|
-
|
857
|
-
|
858
|
-
}
|
704
|
+
while (true) {
|
705
|
+
const { done } = await reader.read();
|
706
|
+
if (done)
|
707
|
+
break;
|
708
|
+
}
|
859
709
|
} catch (error) {
|
860
|
-
|
861
|
-
|
862
|
-
|
863
|
-
};
|
710
|
+
onError == null ? void 0 : onError(error);
|
711
|
+
} finally {
|
712
|
+
reader.releaseLock();
|
864
713
|
}
|
865
714
|
}
|
866
715
|
|
867
|
-
//
|
868
|
-
|
869
|
-
|
870
|
-
|
871
|
-
|
872
|
-
|
873
|
-
|
874
|
-
(value) => {
|
875
|
-
var _a17, _b;
|
876
|
-
return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
|
877
|
-
},
|
878
|
-
{ message: "Must be a Buffer" }
|
879
|
-
)
|
880
|
-
]);
|
881
|
-
function convertToLanguageModelV2DataContent(content) {
|
882
|
-
if (content instanceof Uint8Array) {
|
883
|
-
return { data: content, mediaType: void 0 };
|
884
|
-
}
|
885
|
-
if (content instanceof ArrayBuffer) {
|
886
|
-
return { data: new Uint8Array(content), mediaType: void 0 };
|
887
|
-
}
|
888
|
-
if (typeof content === "string") {
|
889
|
-
try {
|
890
|
-
content = new URL(content);
|
891
|
-
} catch (error) {
|
892
|
-
}
|
893
|
-
}
|
894
|
-
if (content instanceof URL && content.protocol === "data:") {
|
895
|
-
const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(
|
896
|
-
content.toString()
|
897
|
-
);
|
898
|
-
if (dataUrlMediaType == null || base64Content == null) {
|
899
|
-
throw new AISDKError17({
|
900
|
-
name: "InvalidDataContentError",
|
901
|
-
message: `Invalid data URL format in content ${content.toString()}`
|
902
|
-
});
|
903
|
-
}
|
904
|
-
return { data: base64Content, mediaType: dataUrlMediaType };
|
905
|
-
}
|
906
|
-
return { data: content, mediaType: void 0 };
|
907
|
-
}
|
908
|
-
function convertDataContentToBase64String(content) {
|
909
|
-
if (typeof content === "string") {
|
910
|
-
return content;
|
911
|
-
}
|
912
|
-
if (content instanceof ArrayBuffer) {
|
913
|
-
return convertUint8ArrayToBase64(new Uint8Array(content));
|
914
|
-
}
|
915
|
-
return convertUint8ArrayToBase64(content);
|
916
|
-
}
|
917
|
-
function convertDataContentToUint8Array(content) {
|
918
|
-
if (content instanceof Uint8Array) {
|
919
|
-
return content;
|
716
|
+
// src/ui/process-chat-response.ts
|
717
|
+
import { validateTypes } from "@ai-sdk/provider-utils";
|
718
|
+
|
719
|
+
// src/util/merge-objects.ts
|
720
|
+
function mergeObjects(base, overrides) {
|
721
|
+
if (base === void 0 && overrides === void 0) {
|
722
|
+
return void 0;
|
920
723
|
}
|
921
|
-
if (
|
922
|
-
|
923
|
-
return convertBase64ToUint8Array(content);
|
924
|
-
} catch (error) {
|
925
|
-
throw new InvalidDataContentError({
|
926
|
-
message: "Invalid data content. Content string is not a base64-encoded media.",
|
927
|
-
content,
|
928
|
-
cause: error
|
929
|
-
});
|
930
|
-
}
|
724
|
+
if (base === void 0) {
|
725
|
+
return overrides;
|
931
726
|
}
|
932
|
-
if (
|
933
|
-
return
|
727
|
+
if (overrides === void 0) {
|
728
|
+
return base;
|
934
729
|
}
|
935
|
-
|
936
|
-
|
937
|
-
|
938
|
-
|
939
|
-
|
940
|
-
|
941
|
-
|
942
|
-
|
943
|
-
|
944
|
-
|
945
|
-
|
946
|
-
|
947
|
-
|
948
|
-
|
949
|
-
|
950
|
-
|
951
|
-
case "assistant": {
|
952
|
-
let getToolInvocationsForStep2 = function(step) {
|
953
|
-
return (typeof message.content === "string" ? [] : message.content.filter((part) => part.type === "tool-call")).map((call) => ({
|
954
|
-
state: "call",
|
955
|
-
step,
|
956
|
-
args: call.args,
|
957
|
-
toolCallId: call.toolCallId,
|
958
|
-
toolName: call.toolName
|
959
|
-
}));
|
960
|
-
};
|
961
|
-
var getToolInvocationsForStep = getToolInvocationsForStep2;
|
962
|
-
const parts = [{ type: "step-start" }];
|
963
|
-
let textContent = "";
|
964
|
-
let reasoningTextContent = void 0;
|
965
|
-
if (typeof message.content === "string") {
|
966
|
-
textContent = message.content;
|
967
|
-
parts.push({
|
968
|
-
type: "text",
|
969
|
-
text: message.content
|
970
|
-
});
|
971
|
-
} else {
|
972
|
-
let reasoningPart = void 0;
|
973
|
-
for (const part of message.content) {
|
974
|
-
switch (part.type) {
|
975
|
-
case "text": {
|
976
|
-
reasoningPart = void 0;
|
977
|
-
textContent += part.text;
|
978
|
-
parts.push({
|
979
|
-
type: "text",
|
980
|
-
text: part.text
|
981
|
-
});
|
982
|
-
break;
|
983
|
-
}
|
984
|
-
case "reasoning": {
|
985
|
-
if (reasoningPart == null) {
|
986
|
-
reasoningPart = {
|
987
|
-
type: "reasoning",
|
988
|
-
text: ""
|
989
|
-
};
|
990
|
-
parts.push(reasoningPart);
|
991
|
-
}
|
992
|
-
reasoningTextContent = (reasoningTextContent != null ? reasoningTextContent : "") + part.text;
|
993
|
-
reasoningPart.text += part.text;
|
994
|
-
reasoningPart.providerMetadata = part.providerOptions;
|
995
|
-
break;
|
996
|
-
}
|
997
|
-
case "tool-call":
|
998
|
-
break;
|
999
|
-
case "file":
|
1000
|
-
if (part.data instanceof URL) {
|
1001
|
-
throw new AISDKError18({
|
1002
|
-
name: "InvalidAssistantFileData",
|
1003
|
-
message: "File data cannot be a URL"
|
1004
|
-
});
|
1005
|
-
}
|
1006
|
-
parts.push({
|
1007
|
-
type: "file",
|
1008
|
-
mediaType: part.mediaType,
|
1009
|
-
url: `data:${part.mediaType};base64,${convertDataContentToBase64String(part.data)}`
|
1010
|
-
});
|
1011
|
-
break;
|
1012
|
-
}
|
1013
|
-
}
|
1014
|
-
}
|
1015
|
-
if (isLastMessageAssistant) {
|
1016
|
-
const maxStep = extractMaxToolInvocationStep(
|
1017
|
-
getToolInvocations(lastMessage)
|
1018
|
-
);
|
1019
|
-
(_a17 = lastMessage.parts) != null ? _a17 : lastMessage.parts = [];
|
1020
|
-
lastMessage.parts.push(...parts);
|
1021
|
-
getToolInvocationsForStep2(maxStep === void 0 ? 0 : maxStep + 1).map((call) => ({
|
1022
|
-
type: "tool-invocation",
|
1023
|
-
toolInvocation: call
|
1024
|
-
})).forEach((part) => {
|
1025
|
-
lastMessage.parts.push(part);
|
1026
|
-
});
|
1027
|
-
} else {
|
1028
|
-
clonedMessages.push({
|
1029
|
-
role: "assistant",
|
1030
|
-
id: message.id,
|
1031
|
-
createdAt: currentDate(),
|
1032
|
-
// generate a createdAt date for the message, will be overridden by the client
|
1033
|
-
parts: [
|
1034
|
-
...parts,
|
1035
|
-
...getToolInvocationsForStep2(0).map((call) => ({
|
1036
|
-
type: "tool-invocation",
|
1037
|
-
toolInvocation: call
|
1038
|
-
}))
|
1039
|
-
]
|
1040
|
-
});
|
1041
|
-
}
|
1042
|
-
break;
|
1043
|
-
}
|
1044
|
-
case "tool": {
|
1045
|
-
if (lastMessage.role !== "assistant") {
|
1046
|
-
throw new Error(
|
1047
|
-
`Tool result must follow an assistant message: ${lastMessage.role}`
|
1048
|
-
);
|
1049
|
-
}
|
1050
|
-
(_b = lastMessage.parts) != null ? _b : lastMessage.parts = [];
|
1051
|
-
for (const contentPart of message.content) {
|
1052
|
-
const toolCall = getToolInvocations(lastMessage).find(
|
1053
|
-
(call) => call.toolCallId === contentPart.toolCallId
|
1054
|
-
);
|
1055
|
-
const toolCallPart = lastMessage.parts.find(
|
1056
|
-
(part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === contentPart.toolCallId
|
1057
|
-
);
|
1058
|
-
if (!toolCall) {
|
1059
|
-
throw new Error("Tool call not found in previous message");
|
1060
|
-
}
|
1061
|
-
toolCall.state = "result";
|
1062
|
-
const toolResult = toolCall;
|
1063
|
-
toolResult.result = contentPart.result;
|
1064
|
-
if (toolCallPart) {
|
1065
|
-
toolCallPart.toolInvocation = toolResult;
|
1066
|
-
} else {
|
1067
|
-
lastMessage.parts.push({
|
1068
|
-
type: "tool-invocation",
|
1069
|
-
toolInvocation: toolResult
|
1070
|
-
});
|
1071
|
-
}
|
1072
|
-
}
|
1073
|
-
break;
|
1074
|
-
}
|
1075
|
-
default: {
|
1076
|
-
const _exhaustiveCheck = role;
|
1077
|
-
throw new Error(`Unsupported message role: ${_exhaustiveCheck}`);
|
730
|
+
const result = { ...base };
|
731
|
+
for (const key in overrides) {
|
732
|
+
if (Object.prototype.hasOwnProperty.call(overrides, key)) {
|
733
|
+
const overridesValue = overrides[key];
|
734
|
+
if (overridesValue === void 0)
|
735
|
+
continue;
|
736
|
+
const baseValue = key in base ? base[key] : void 0;
|
737
|
+
const isSourceObject = overridesValue !== null && typeof overridesValue === "object" && !Array.isArray(overridesValue) && !(overridesValue instanceof Date) && !(overridesValue instanceof RegExp);
|
738
|
+
const isTargetObject = baseValue !== null && baseValue !== void 0 && typeof baseValue === "object" && !Array.isArray(baseValue) && !(baseValue instanceof Date) && !(baseValue instanceof RegExp);
|
739
|
+
if (isSourceObject && isTargetObject) {
|
740
|
+
result[key] = mergeObjects(
|
741
|
+
baseValue,
|
742
|
+
overridesValue
|
743
|
+
);
|
744
|
+
} else {
|
745
|
+
result[key] = overridesValue;
|
1078
746
|
}
|
1079
747
|
}
|
1080
748
|
}
|
1081
|
-
return
|
749
|
+
return result;
|
1082
750
|
}
|
1083
751
|
|
1084
|
-
// src/ui/process-chat-response.ts
|
1085
|
-
import { generateId as generateIdFunction } from "@ai-sdk/provider-utils";
|
1086
|
-
|
1087
752
|
// src/util/parse-partial-json.ts
|
1088
|
-
import { safeParseJSON
|
753
|
+
import { safeParseJSON } from "@ai-sdk/provider-utils";
|
1089
754
|
|
1090
755
|
// src/util/fix-json.ts
|
1091
756
|
function fixJson(input) {
|
@@ -1410,33 +1075,48 @@ async function parsePartialJson(jsonText) {
|
|
1410
1075
|
if (jsonText === void 0) {
|
1411
1076
|
return { value: void 0, state: "undefined-input" };
|
1412
1077
|
}
|
1413
|
-
let result = await
|
1078
|
+
let result = await safeParseJSON({ text: jsonText });
|
1414
1079
|
if (result.success) {
|
1415
1080
|
return { value: result.value, state: "successful-parse" };
|
1416
1081
|
}
|
1417
|
-
result = await
|
1082
|
+
result = await safeParseJSON({ text: fixJson(jsonText) });
|
1418
1083
|
if (result.success) {
|
1419
1084
|
return { value: result.value, state: "repaired-parse" };
|
1420
1085
|
}
|
1421
1086
|
return { value: void 0, state: "failed-parse" };
|
1422
1087
|
}
|
1423
1088
|
|
1089
|
+
// src/ui/extract-max-tool-invocation-step.ts
|
1090
|
+
function extractMaxToolInvocationStep(toolInvocations) {
|
1091
|
+
return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
|
1092
|
+
var _a17;
|
1093
|
+
return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
|
1094
|
+
}, 0);
|
1095
|
+
}
|
1096
|
+
|
1097
|
+
// src/ui/get-tool-invocations.ts
|
1098
|
+
function getToolInvocations(message) {
|
1099
|
+
return message.parts.filter(
|
1100
|
+
(part) => part.type === "tool-invocation"
|
1101
|
+
).map((part) => part.toolInvocation);
|
1102
|
+
}
|
1103
|
+
|
1424
1104
|
// src/ui/process-chat-response.ts
|
1425
|
-
|
1105
|
+
function processChatResponse({
|
1426
1106
|
stream,
|
1427
|
-
|
1107
|
+
onUpdate,
|
1428
1108
|
onToolCall,
|
1429
1109
|
onFinish,
|
1430
|
-
|
1431
|
-
|
1432
|
-
|
1110
|
+
lastMessage,
|
1111
|
+
newMessageId,
|
1112
|
+
messageMetadataSchema
|
1433
1113
|
}) {
|
1434
1114
|
var _a17;
|
1435
|
-
const
|
1436
|
-
let step =
|
1437
|
-
const message =
|
1438
|
-
id:
|
1439
|
-
|
1115
|
+
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
1116
|
+
let step = isContinuation ? 1 + ((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) : 0;
|
1117
|
+
const message = isContinuation ? structuredClone(lastMessage) : {
|
1118
|
+
id: newMessageId,
|
1119
|
+
metadata: {},
|
1440
1120
|
role: "assistant",
|
1441
1121
|
parts: []
|
1442
1122
|
};
|
@@ -1453,200 +1133,216 @@ async function processChatResponse({
|
|
1453
1133
|
type: "tool-invocation",
|
1454
1134
|
toolInvocation: invocation
|
1455
1135
|
});
|
1456
|
-
}
|
1457
|
-
}
|
1458
|
-
const
|
1459
|
-
|
1460
|
-
|
1461
|
-
|
1462
|
-
|
1463
|
-
|
1464
|
-
|
1465
|
-
|
1466
|
-
|
1467
|
-
function execUpdate() {
|
1468
|
-
const copiedData = [...data];
|
1469
|
-
if (messageAnnotations == null ? void 0 : messageAnnotations.length) {
|
1470
|
-
message.annotations = messageAnnotations;
|
1471
|
-
}
|
1472
|
-
const copiedMessage = {
|
1473
|
-
// deep copy the message to ensure that deep changes (msg attachments) are updated
|
1474
|
-
// with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
|
1475
|
-
...structuredClone(message),
|
1476
|
-
// add a revision id to ensure that the message is updated with SWR. SWR uses a
|
1477
|
-
// hashing approach by default to detect changes, but it only works for shallow
|
1478
|
-
// changes. This is why we need to add a revision id to ensure that the message
|
1479
|
-
// is updated with SWR (without it, the changes get stuck in SWR and are not
|
1480
|
-
// forwarded to rendering):
|
1481
|
-
revisionId: generateId3()
|
1482
|
-
};
|
1483
|
-
update({
|
1484
|
-
message: copiedMessage,
|
1485
|
-
data: copiedData,
|
1486
|
-
replaceLastMessage
|
1487
|
-
});
|
1488
|
-
}
|
1489
|
-
await processDataStream({
|
1490
|
-
stream,
|
1491
|
-
onTextPart(value) {
|
1492
|
-
if (currentTextPart == null) {
|
1493
|
-
currentTextPart = {
|
1494
|
-
type: "text",
|
1495
|
-
text: value
|
1496
|
-
};
|
1497
|
-
message.parts.push(currentTextPart);
|
1498
|
-
} else {
|
1499
|
-
currentTextPart.text += value;
|
1500
|
-
}
|
1501
|
-
execUpdate();
|
1502
|
-
},
|
1503
|
-
onReasoningPart(value) {
|
1504
|
-
if (currentReasoningPart == null) {
|
1505
|
-
currentReasoningPart = {
|
1506
|
-
type: "reasoning",
|
1507
|
-
text: value.text,
|
1508
|
-
providerMetadata: value.providerMetadata
|
1509
|
-
};
|
1510
|
-
message.parts.push(currentReasoningPart);
|
1511
|
-
} else {
|
1512
|
-
currentReasoningPart.text += value.text;
|
1513
|
-
currentReasoningPart.providerMetadata = value.providerMetadata;
|
1514
|
-
}
|
1515
|
-
execUpdate();
|
1516
|
-
},
|
1517
|
-
onReasoningPartFinish(value) {
|
1518
|
-
if (currentReasoningPart != null) {
|
1519
|
-
currentReasoningPart = void 0;
|
1520
|
-
}
|
1521
|
-
},
|
1522
|
-
onFilePart(value) {
|
1523
|
-
message.parts.push({
|
1524
|
-
type: "file",
|
1525
|
-
mediaType: value.mediaType,
|
1526
|
-
url: value.url
|
1527
|
-
});
|
1528
|
-
execUpdate();
|
1529
|
-
},
|
1530
|
-
onSourcePart(value) {
|
1531
|
-
message.parts.push({
|
1532
|
-
type: "source",
|
1533
|
-
source: value
|
1534
|
-
});
|
1535
|
-
execUpdate();
|
1536
|
-
},
|
1537
|
-
onToolCallStreamingStartPart(value) {
|
1538
|
-
const toolInvocations = getToolInvocations(message);
|
1539
|
-
partialToolCalls[value.toolCallId] = {
|
1540
|
-
text: "",
|
1541
|
-
step,
|
1542
|
-
toolName: value.toolName,
|
1543
|
-
index: toolInvocations.length
|
1544
|
-
};
|
1545
|
-
updateToolInvocationPart(value.toolCallId, {
|
1546
|
-
state: "partial-call",
|
1547
|
-
step,
|
1548
|
-
toolCallId: value.toolCallId,
|
1549
|
-
toolName: value.toolName,
|
1550
|
-
args: void 0
|
1551
|
-
});
|
1552
|
-
execUpdate();
|
1553
|
-
},
|
1554
|
-
async onToolCallDeltaPart(value) {
|
1555
|
-
const partialToolCall = partialToolCalls[value.toolCallId];
|
1556
|
-
partialToolCall.text += value.argsTextDelta;
|
1557
|
-
const { value: partialArgs } = await parsePartialJson(
|
1558
|
-
partialToolCall.text
|
1559
|
-
);
|
1560
|
-
updateToolInvocationPart(value.toolCallId, {
|
1561
|
-
state: "partial-call",
|
1562
|
-
step: partialToolCall.step,
|
1563
|
-
toolCallId: value.toolCallId,
|
1564
|
-
toolName: partialToolCall.toolName,
|
1565
|
-
args: partialArgs
|
1566
|
-
});
|
1567
|
-
execUpdate();
|
1568
|
-
},
|
1569
|
-
async onToolCallPart(value) {
|
1570
|
-
updateToolInvocationPart(value.toolCallId, {
|
1571
|
-
state: "call",
|
1572
|
-
step,
|
1573
|
-
...value
|
1574
|
-
});
|
1575
|
-
execUpdate();
|
1576
|
-
if (onToolCall) {
|
1577
|
-
const result = await onToolCall({
|
1578
|
-
toolCall: value
|
1579
|
-
});
|
1580
|
-
if (result != null) {
|
1581
|
-
updateToolInvocationPart(value.toolCallId, {
|
1582
|
-
state: "result",
|
1583
|
-
step,
|
1584
|
-
...value,
|
1585
|
-
result
|
1586
|
-
});
|
1587
|
-
execUpdate();
|
1588
|
-
}
|
1589
|
-
}
|
1590
|
-
},
|
1591
|
-
onToolResultPart(value) {
|
1592
|
-
const toolInvocations = getToolInvocations(message);
|
1593
|
-
if (toolInvocations == null) {
|
1594
|
-
throw new Error("tool_result must be preceded by a tool_call");
|
1595
|
-
}
|
1596
|
-
const toolInvocationIndex = toolInvocations.findIndex(
|
1597
|
-
(invocation) => invocation.toolCallId === value.toolCallId
|
1598
|
-
);
|
1599
|
-
if (toolInvocationIndex === -1) {
|
1600
|
-
throw new Error(
|
1601
|
-
"tool_result must be preceded by a tool_call with the same toolCallId"
|
1602
|
-
);
|
1603
|
-
}
|
1604
|
-
updateToolInvocationPart(value.toolCallId, {
|
1605
|
-
...toolInvocations[toolInvocationIndex],
|
1606
|
-
state: "result",
|
1607
|
-
...value
|
1608
|
-
});
|
1609
|
-
execUpdate();
|
1610
|
-
},
|
1611
|
-
onDataPart(value) {
|
1612
|
-
data.push(...value);
|
1613
|
-
execUpdate();
|
1614
|
-
},
|
1615
|
-
onMessageAnnotationsPart(value) {
|
1616
|
-
if (messageAnnotations == null) {
|
1617
|
-
messageAnnotations = [...value];
|
1618
|
-
} else {
|
1619
|
-
messageAnnotations.push(...value);
|
1620
|
-
}
|
1621
|
-
execUpdate();
|
1622
|
-
},
|
1623
|
-
onFinishStepPart(value) {
|
1624
|
-
step += 1;
|
1625
|
-
currentTextPart = value.isContinued ? currentTextPart : void 0;
|
1626
|
-
currentReasoningPart = void 0;
|
1627
|
-
},
|
1628
|
-
onStartStepPart(value) {
|
1629
|
-
if (!replaceLastMessage) {
|
1630
|
-
message.id = value.messageId;
|
1631
|
-
}
|
1632
|
-
message.parts.push({ type: "step-start" });
|
1633
|
-
execUpdate();
|
1634
|
-
},
|
1635
|
-
onFinishMessagePart(value) {
|
1636
|
-
finishReason = value.finishReason;
|
1637
|
-
if (value.usage != null) {
|
1638
|
-
usage = value.usage;
|
1136
|
+
}
|
1137
|
+
}
|
1138
|
+
const partialToolCalls = {};
|
1139
|
+
async function updateMessageMetadata(metadata) {
|
1140
|
+
if (metadata != null) {
|
1141
|
+
const mergedMetadata = message.metadata != null ? mergeObjects(message.metadata, metadata) : metadata;
|
1142
|
+
if (messageMetadataSchema != null) {
|
1143
|
+
await validateTypes({
|
1144
|
+
value: mergedMetadata,
|
1145
|
+
schema: messageMetadataSchema
|
1146
|
+
});
|
1639
1147
|
}
|
1640
|
-
|
1641
|
-
onErrorPart(error) {
|
1642
|
-
throw new Error(error);
|
1148
|
+
message.metadata = mergedMetadata;
|
1643
1149
|
}
|
1644
|
-
}
|
1645
|
-
|
1150
|
+
}
|
1151
|
+
return stream.pipeThrough(
|
1152
|
+
new TransformStream({
|
1153
|
+
async transform(chunk, controller) {
|
1154
|
+
const { type, value } = chunk;
|
1155
|
+
switch (type) {
|
1156
|
+
case "text": {
|
1157
|
+
if (currentTextPart == null) {
|
1158
|
+
currentTextPart = {
|
1159
|
+
type: "text",
|
1160
|
+
text: value
|
1161
|
+
};
|
1162
|
+
message.parts.push(currentTextPart);
|
1163
|
+
} else {
|
1164
|
+
currentTextPart.text += value;
|
1165
|
+
}
|
1166
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1167
|
+
break;
|
1168
|
+
}
|
1169
|
+
case "reasoning": {
|
1170
|
+
if (currentReasoningPart == null) {
|
1171
|
+
currentReasoningPart = {
|
1172
|
+
type: "reasoning",
|
1173
|
+
text: value.text,
|
1174
|
+
providerMetadata: value.providerMetadata
|
1175
|
+
};
|
1176
|
+
message.parts.push(currentReasoningPart);
|
1177
|
+
} else {
|
1178
|
+
currentReasoningPart.text += value.text;
|
1179
|
+
currentReasoningPart.providerMetadata = value.providerMetadata;
|
1180
|
+
}
|
1181
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1182
|
+
break;
|
1183
|
+
}
|
1184
|
+
case "reasoning-part-finish": {
|
1185
|
+
if (currentReasoningPart != null) {
|
1186
|
+
currentReasoningPart = void 0;
|
1187
|
+
}
|
1188
|
+
break;
|
1189
|
+
}
|
1190
|
+
case "file": {
|
1191
|
+
message.parts.push({
|
1192
|
+
type: "file",
|
1193
|
+
mediaType: value.mediaType,
|
1194
|
+
url: value.url
|
1195
|
+
});
|
1196
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1197
|
+
break;
|
1198
|
+
}
|
1199
|
+
case "source": {
|
1200
|
+
message.parts.push({
|
1201
|
+
type: "source",
|
1202
|
+
source: value
|
1203
|
+
});
|
1204
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1205
|
+
break;
|
1206
|
+
}
|
1207
|
+
case "tool-call-streaming-start": {
|
1208
|
+
const toolInvocations = getToolInvocations(message);
|
1209
|
+
partialToolCalls[value.toolCallId] = {
|
1210
|
+
text: "",
|
1211
|
+
step,
|
1212
|
+
toolName: value.toolName,
|
1213
|
+
index: toolInvocations.length
|
1214
|
+
};
|
1215
|
+
updateToolInvocationPart(value.toolCallId, {
|
1216
|
+
state: "partial-call",
|
1217
|
+
step,
|
1218
|
+
toolCallId: value.toolCallId,
|
1219
|
+
toolName: value.toolName,
|
1220
|
+
args: void 0
|
1221
|
+
});
|
1222
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1223
|
+
break;
|
1224
|
+
}
|
1225
|
+
case "tool-call-delta": {
|
1226
|
+
const partialToolCall = partialToolCalls[value.toolCallId];
|
1227
|
+
partialToolCall.text += value.argsTextDelta;
|
1228
|
+
const { value: partialArgs } = await parsePartialJson(
|
1229
|
+
partialToolCall.text
|
1230
|
+
);
|
1231
|
+
updateToolInvocationPart(value.toolCallId, {
|
1232
|
+
state: "partial-call",
|
1233
|
+
step: partialToolCall.step,
|
1234
|
+
toolCallId: value.toolCallId,
|
1235
|
+
toolName: partialToolCall.toolName,
|
1236
|
+
args: partialArgs
|
1237
|
+
});
|
1238
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1239
|
+
break;
|
1240
|
+
}
|
1241
|
+
case "tool-call": {
|
1242
|
+
const call = { args: value.args, ...value };
|
1243
|
+
updateToolInvocationPart(value.toolCallId, {
|
1244
|
+
state: "call",
|
1245
|
+
step,
|
1246
|
+
...call
|
1247
|
+
});
|
1248
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1249
|
+
if (onToolCall) {
|
1250
|
+
const result = await onToolCall({
|
1251
|
+
toolCall: call
|
1252
|
+
});
|
1253
|
+
if (result != null) {
|
1254
|
+
updateToolInvocationPart(value.toolCallId, {
|
1255
|
+
state: "result",
|
1256
|
+
step,
|
1257
|
+
...call,
|
1258
|
+
result
|
1259
|
+
});
|
1260
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1261
|
+
}
|
1262
|
+
}
|
1263
|
+
break;
|
1264
|
+
}
|
1265
|
+
case "tool-result": {
|
1266
|
+
const toolInvocations = getToolInvocations(message);
|
1267
|
+
if (toolInvocations == null) {
|
1268
|
+
throw new Error("tool_result must be preceded by a tool_call");
|
1269
|
+
}
|
1270
|
+
const toolInvocationIndex = toolInvocations.findIndex(
|
1271
|
+
(invocation) => invocation.toolCallId === value.toolCallId
|
1272
|
+
);
|
1273
|
+
if (toolInvocationIndex === -1) {
|
1274
|
+
throw new Error(
|
1275
|
+
"tool_result must be preceded by a tool_call with the same toolCallId"
|
1276
|
+
);
|
1277
|
+
}
|
1278
|
+
const result = { result: value.result, ...value };
|
1279
|
+
updateToolInvocationPart(value.toolCallId, {
|
1280
|
+
...toolInvocations[toolInvocationIndex],
|
1281
|
+
state: "result",
|
1282
|
+
...result
|
1283
|
+
});
|
1284
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1285
|
+
break;
|
1286
|
+
}
|
1287
|
+
case "start-step": {
|
1288
|
+
message.parts.push({ type: "step-start" });
|
1289
|
+
await updateMessageMetadata(value.metadata);
|
1290
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1291
|
+
break;
|
1292
|
+
}
|
1293
|
+
case "finish-step": {
|
1294
|
+
step += 1;
|
1295
|
+
currentTextPart = void 0;
|
1296
|
+
currentReasoningPart = void 0;
|
1297
|
+
await updateMessageMetadata(value.metadata);
|
1298
|
+
if (value.metadata != null) {
|
1299
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1300
|
+
}
|
1301
|
+
break;
|
1302
|
+
}
|
1303
|
+
case "start": {
|
1304
|
+
if (value.messageId != null) {
|
1305
|
+
message.id = value.messageId;
|
1306
|
+
}
|
1307
|
+
await updateMessageMetadata(value.metadata);
|
1308
|
+
if (value.messageId != null || value.metadata != null) {
|
1309
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1310
|
+
}
|
1311
|
+
break;
|
1312
|
+
}
|
1313
|
+
case "finish": {
|
1314
|
+
await updateMessageMetadata(value.metadata);
|
1315
|
+
if (value.metadata != null) {
|
1316
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1317
|
+
}
|
1318
|
+
break;
|
1319
|
+
}
|
1320
|
+
case "metadata": {
|
1321
|
+
await updateMessageMetadata(value.metadata);
|
1322
|
+
if (value.metadata != null) {
|
1323
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1324
|
+
}
|
1325
|
+
break;
|
1326
|
+
}
|
1327
|
+
case "error": {
|
1328
|
+
throw new Error(value);
|
1329
|
+
}
|
1330
|
+
default: {
|
1331
|
+
const _exhaustiveCheck = type;
|
1332
|
+
throw new Error(`Unhandled stream part: ${_exhaustiveCheck}`);
|
1333
|
+
}
|
1334
|
+
}
|
1335
|
+
controller.enqueue(chunk);
|
1336
|
+
},
|
1337
|
+
flush() {
|
1338
|
+
onFinish == null ? void 0 : onFinish({ message });
|
1339
|
+
}
|
1340
|
+
})
|
1341
|
+
);
|
1646
1342
|
}
|
1647
1343
|
|
1648
1344
|
// src/ui/process-chat-text-response.ts
|
1649
|
-
import { generateId as
|
1345
|
+
import { generateId as generateIdFunction } from "@ai-sdk/provider-utils";
|
1650
1346
|
|
1651
1347
|
// src/ui/process-text-stream.ts
|
1652
1348
|
async function processTextStream({
|
@@ -1668,13 +1364,11 @@ async function processChatTextResponse({
|
|
1668
1364
|
stream,
|
1669
1365
|
update,
|
1670
1366
|
onFinish,
|
1671
|
-
|
1672
|
-
generateId: generateId3 = generateIdFunction2
|
1367
|
+
generateId: generateId3 = generateIdFunction
|
1673
1368
|
}) {
|
1674
1369
|
const textPart = { type: "text", text: "" };
|
1675
1370
|
const resultMessage = {
|
1676
1371
|
id: generateId3(),
|
1677
|
-
createdAt: getCurrentDate(),
|
1678
1372
|
role: "assistant",
|
1679
1373
|
parts: [textPart]
|
1680
1374
|
};
|
@@ -1682,21 +1376,10 @@ async function processChatTextResponse({
|
|
1682
1376
|
stream,
|
1683
1377
|
onTextPart: (chunk) => {
|
1684
1378
|
textPart.text += chunk;
|
1685
|
-
update({
|
1686
|
-
message: { ...resultMessage },
|
1687
|
-
data: [],
|
1688
|
-
replaceLastMessage: false
|
1689
|
-
});
|
1379
|
+
update({ message: { ...resultMessage } });
|
1690
1380
|
}
|
1691
1381
|
});
|
1692
|
-
onFinish == null ? void 0 : onFinish(resultMessage
|
1693
|
-
usage: {
|
1694
|
-
inputTokens: void 0,
|
1695
|
-
outputTokens: void 0,
|
1696
|
-
totalTokens: void 0
|
1697
|
-
},
|
1698
|
-
finishReason: "unknown"
|
1699
|
-
});
|
1382
|
+
onFinish == null ? void 0 : onFinish({ message: resultMessage });
|
1700
1383
|
}
|
1701
1384
|
|
1702
1385
|
// src/ui/call-chat-api.ts
|
@@ -1715,8 +1398,8 @@ async function callChatApi({
|
|
1715
1398
|
generateId: generateId3,
|
1716
1399
|
fetch: fetch2 = getOriginalFetch(),
|
1717
1400
|
lastMessage,
|
1718
|
-
|
1719
|
-
|
1401
|
+
requestType = "generate",
|
1402
|
+
messageMetadataSchema
|
1720
1403
|
}) {
|
1721
1404
|
var _a17, _b, _c;
|
1722
1405
|
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.id}`, {
|
@@ -1754,24 +1437,49 @@ async function callChatApi({
|
|
1754
1437
|
stream: response.body,
|
1755
1438
|
update: onUpdate,
|
1756
1439
|
onFinish,
|
1757
|
-
generateId: generateId3
|
1758
|
-
getCurrentDate
|
1440
|
+
generateId: generateId3
|
1759
1441
|
});
|
1760
1442
|
return;
|
1761
1443
|
}
|
1762
1444
|
case "data": {
|
1763
|
-
await
|
1764
|
-
stream:
|
1765
|
-
|
1766
|
-
|
1767
|
-
|
1768
|
-
|
1769
|
-
|
1770
|
-
|
1771
|
-
|
1772
|
-
|
1773
|
-
|
1774
|
-
|
1445
|
+
await consumeStream({
|
1446
|
+
stream: processChatResponse({
|
1447
|
+
stream: parseJsonEventStream({
|
1448
|
+
stream: response.body,
|
1449
|
+
schema: dataStreamPartSchema
|
1450
|
+
}).pipeThrough(
|
1451
|
+
new TransformStream({
|
1452
|
+
async transform(part, controller) {
|
1453
|
+
if (!part.success) {
|
1454
|
+
throw part.error;
|
1455
|
+
}
|
1456
|
+
controller.enqueue(part.value);
|
1457
|
+
}
|
1458
|
+
})
|
1459
|
+
),
|
1460
|
+
onUpdate({ message }) {
|
1461
|
+
const copiedMessage = {
|
1462
|
+
// deep copy the message to ensure that deep changes (msg attachments) are updated
|
1463
|
+
// with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
|
1464
|
+
...structuredClone(message),
|
1465
|
+
// add a revision id to ensure that the message is updated with SWR. SWR uses a
|
1466
|
+
// hashing approach by default to detect changes, but it only works for shallow
|
1467
|
+
// changes. This is why we need to add a revision id to ensure that the message
|
1468
|
+
// is updated with SWR (without it, the changes get stuck in SWR and are not
|
1469
|
+
// forwarded to rendering):
|
1470
|
+
revisionId: generateId3()
|
1471
|
+
};
|
1472
|
+
onUpdate({ message: copiedMessage });
|
1473
|
+
},
|
1474
|
+
lastMessage,
|
1475
|
+
onToolCall,
|
1476
|
+
onFinish,
|
1477
|
+
newMessageId: generateId3(),
|
1478
|
+
messageMetadataSchema
|
1479
|
+
}),
|
1480
|
+
onError: (error) => {
|
1481
|
+
throw error;
|
1482
|
+
}
|
1775
1483
|
});
|
1776
1484
|
return;
|
1777
1485
|
}
|
@@ -1783,6 +1491,7 @@ async function callChatApi({
|
|
1783
1491
|
}
|
1784
1492
|
|
1785
1493
|
// src/ui/call-completion-api.ts
|
1494
|
+
import { parseJsonEventStream as parseJsonEventStream2 } from "@ai-sdk/provider-utils";
|
1786
1495
|
var getOriginalFetch2 = () => fetch;
|
1787
1496
|
async function callCompletionApi({
|
1788
1497
|
api,
|
@@ -1798,7 +1507,6 @@ async function callCompletionApi({
|
|
1798
1507
|
onResponse,
|
1799
1508
|
onFinish,
|
1800
1509
|
onError,
|
1801
|
-
onData,
|
1802
1510
|
fetch: fetch2 = getOriginalFetch2()
|
1803
1511
|
}) {
|
1804
1512
|
var _a17;
|
@@ -1851,17 +1559,28 @@ async function callCompletionApi({
|
|
1851
1559
|
break;
|
1852
1560
|
}
|
1853
1561
|
case "data": {
|
1854
|
-
await
|
1855
|
-
stream:
|
1856
|
-
|
1857
|
-
|
1858
|
-
|
1859
|
-
|
1860
|
-
|
1861
|
-
|
1862
|
-
|
1863
|
-
|
1864
|
-
|
1562
|
+
await consumeStream({
|
1563
|
+
stream: parseJsonEventStream2({
|
1564
|
+
stream: response.body,
|
1565
|
+
schema: dataStreamPartSchema
|
1566
|
+
}).pipeThrough(
|
1567
|
+
new TransformStream({
|
1568
|
+
async transform(part) {
|
1569
|
+
if (!part.success) {
|
1570
|
+
throw part.error;
|
1571
|
+
}
|
1572
|
+
const { type, value } = part.value;
|
1573
|
+
if (type === "text") {
|
1574
|
+
result += value;
|
1575
|
+
setCompletion(result);
|
1576
|
+
} else if (type === "error") {
|
1577
|
+
throw new Error(value);
|
1578
|
+
}
|
1579
|
+
}
|
1580
|
+
})
|
1581
|
+
),
|
1582
|
+
onError: (error) => {
|
1583
|
+
throw error;
|
1865
1584
|
}
|
1866
1585
|
});
|
1867
1586
|
break;
|
@@ -2821,7 +2540,7 @@ var DefaultEmbedManyResult = class {
|
|
2821
2540
|
};
|
2822
2541
|
|
2823
2542
|
// src/util/detect-media-type.ts
|
2824
|
-
import { convertBase64ToUint8Array
|
2543
|
+
import { convertBase64ToUint8Array } from "@ai-sdk/provider-utils";
|
2825
2544
|
var imageMediaTypeSignatures = [
|
2826
2545
|
{
|
2827
2546
|
mediaType: "image/gif",
|
@@ -2928,7 +2647,7 @@ var audioMediaTypeSignatures = [
|
|
2928
2647
|
}
|
2929
2648
|
];
|
2930
2649
|
var stripID3 = (data) => {
|
2931
|
-
const bytes = typeof data === "string" ?
|
2650
|
+
const bytes = typeof data === "string" ? convertBase64ToUint8Array(data) : data;
|
2932
2651
|
const id3Size = (bytes[6] & 127) << 21 | (bytes[7] & 127) << 14 | (bytes[8] & 127) << 7 | bytes[9] & 127;
|
2933
2652
|
return bytes.slice(id3Size + 10);
|
2934
2653
|
};
|
@@ -2955,8 +2674,8 @@ function detectMediaType({
|
|
2955
2674
|
|
2956
2675
|
// core/generate-text/generated-file.ts
|
2957
2676
|
import {
|
2958
|
-
convertBase64ToUint8Array as
|
2959
|
-
convertUint8ArrayToBase64
|
2677
|
+
convertBase64ToUint8Array as convertBase64ToUint8Array2,
|
2678
|
+
convertUint8ArrayToBase64
|
2960
2679
|
} from "@ai-sdk/provider-utils";
|
2961
2680
|
var DefaultGeneratedFile = class {
|
2962
2681
|
constructor({
|
@@ -2971,14 +2690,14 @@ var DefaultGeneratedFile = class {
|
|
2971
2690
|
// lazy conversion with caching to avoid unnecessary conversion overhead:
|
2972
2691
|
get base64() {
|
2973
2692
|
if (this.base64Data == null) {
|
2974
|
-
this.base64Data =
|
2693
|
+
this.base64Data = convertUint8ArrayToBase64(this.uint8ArrayData);
|
2975
2694
|
}
|
2976
2695
|
return this.base64Data;
|
2977
2696
|
}
|
2978
2697
|
// lazy conversion with caching to avoid unnecessary conversion overhead:
|
2979
2698
|
get uint8Array() {
|
2980
2699
|
if (this.uint8ArrayData == null) {
|
2981
|
-
this.uint8ArrayData =
|
2700
|
+
this.uint8ArrayData = convertBase64ToUint8Array2(this.base64Data);
|
2982
2701
|
}
|
2983
2702
|
return this.uint8ArrayData;
|
2984
2703
|
}
|
@@ -2995,6 +2714,7 @@ async function generateImage({
|
|
2995
2714
|
model,
|
2996
2715
|
prompt,
|
2997
2716
|
n = 1,
|
2717
|
+
maxImagesPerCall,
|
2998
2718
|
size,
|
2999
2719
|
aspectRatio,
|
3000
2720
|
seed,
|
@@ -3005,14 +2725,14 @@ async function generateImage({
|
|
3005
2725
|
}) {
|
3006
2726
|
var _a17, _b;
|
3007
2727
|
const { retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
3008
|
-
const
|
3009
|
-
const callCount = Math.ceil(n /
|
2728
|
+
const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : model.maxImagesPerCall) != null ? _a17 : 1;
|
2729
|
+
const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
|
3010
2730
|
const callImageCounts = Array.from({ length: callCount }, (_, i) => {
|
3011
2731
|
if (i < callCount - 1) {
|
3012
|
-
return
|
2732
|
+
return maxImagesPerCallWithDefault;
|
3013
2733
|
}
|
3014
|
-
const remainder = n %
|
3015
|
-
return remainder === 0 ?
|
2734
|
+
const remainder = n % maxImagesPerCallWithDefault;
|
2735
|
+
return remainder === 0 ? maxImagesPerCallWithDefault : remainder;
|
3016
2736
|
});
|
3017
2737
|
const results = await Promise.all(
|
3018
2738
|
callImageCounts.map(
|
@@ -3089,7 +2809,7 @@ import {
|
|
3089
2809
|
} from "@ai-sdk/provider";
|
3090
2810
|
import {
|
3091
2811
|
createIdGenerator,
|
3092
|
-
safeParseJSON as
|
2812
|
+
safeParseJSON as safeParseJSON2
|
3093
2813
|
} from "@ai-sdk/provider-utils";
|
3094
2814
|
|
3095
2815
|
// core/generate-text/extract-content-text.ts
|
@@ -3131,6 +2851,92 @@ async function download({ url }) {
|
|
3131
2851
|
}
|
3132
2852
|
}
|
3133
2853
|
|
2854
|
+
// core/prompt/data-content.ts
|
2855
|
+
import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
|
2856
|
+
import {
|
2857
|
+
convertBase64ToUint8Array as convertBase64ToUint8Array3,
|
2858
|
+
convertUint8ArrayToBase64 as convertUint8ArrayToBase642
|
2859
|
+
} from "@ai-sdk/provider-utils";
|
2860
|
+
import { z as z2 } from "zod";
|
2861
|
+
|
2862
|
+
// core/prompt/split-data-url.ts
|
2863
|
+
function splitDataUrl(dataUrl) {
|
2864
|
+
try {
|
2865
|
+
const [header, base64Content] = dataUrl.split(",");
|
2866
|
+
return {
|
2867
|
+
mediaType: header.split(";")[0].split(":")[1],
|
2868
|
+
base64Content
|
2869
|
+
};
|
2870
|
+
} catch (error) {
|
2871
|
+
return {
|
2872
|
+
mediaType: void 0,
|
2873
|
+
base64Content: void 0
|
2874
|
+
};
|
2875
|
+
}
|
2876
|
+
}
|
2877
|
+
|
2878
|
+
// core/prompt/data-content.ts
|
2879
|
+
var dataContentSchema = z2.union([
|
2880
|
+
z2.string(),
|
2881
|
+
z2.instanceof(Uint8Array),
|
2882
|
+
z2.instanceof(ArrayBuffer),
|
2883
|
+
z2.custom(
|
2884
|
+
// Buffer might not be available in some environments such as CloudFlare:
|
2885
|
+
(value) => {
|
2886
|
+
var _a17, _b;
|
2887
|
+
return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
|
2888
|
+
},
|
2889
|
+
{ message: "Must be a Buffer" }
|
2890
|
+
)
|
2891
|
+
]);
|
2892
|
+
function convertToLanguageModelV2DataContent(content) {
|
2893
|
+
if (content instanceof Uint8Array) {
|
2894
|
+
return { data: content, mediaType: void 0 };
|
2895
|
+
}
|
2896
|
+
if (content instanceof ArrayBuffer) {
|
2897
|
+
return { data: new Uint8Array(content), mediaType: void 0 };
|
2898
|
+
}
|
2899
|
+
if (typeof content === "string") {
|
2900
|
+
try {
|
2901
|
+
content = new URL(content);
|
2902
|
+
} catch (error) {
|
2903
|
+
}
|
2904
|
+
}
|
2905
|
+
if (content instanceof URL && content.protocol === "data:") {
|
2906
|
+
const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(
|
2907
|
+
content.toString()
|
2908
|
+
);
|
2909
|
+
if (dataUrlMediaType == null || base64Content == null) {
|
2910
|
+
throw new AISDKError17({
|
2911
|
+
name: "InvalidDataContentError",
|
2912
|
+
message: `Invalid data URL format in content ${content.toString()}`
|
2913
|
+
});
|
2914
|
+
}
|
2915
|
+
return { data: base64Content, mediaType: dataUrlMediaType };
|
2916
|
+
}
|
2917
|
+
return { data: content, mediaType: void 0 };
|
2918
|
+
}
|
2919
|
+
function convertDataContentToUint8Array(content) {
|
2920
|
+
if (content instanceof Uint8Array) {
|
2921
|
+
return content;
|
2922
|
+
}
|
2923
|
+
if (typeof content === "string") {
|
2924
|
+
try {
|
2925
|
+
return convertBase64ToUint8Array3(content);
|
2926
|
+
} catch (error) {
|
2927
|
+
throw new InvalidDataContentError({
|
2928
|
+
message: "Invalid data content. Content string is not a base64-encoded media.",
|
2929
|
+
content,
|
2930
|
+
cause: error
|
2931
|
+
});
|
2932
|
+
}
|
2933
|
+
}
|
2934
|
+
if (content instanceof ArrayBuffer) {
|
2935
|
+
return new Uint8Array(content);
|
2936
|
+
}
|
2937
|
+
throw new InvalidDataContentError({ content });
|
2938
|
+
}
|
2939
|
+
|
3134
2940
|
// core/prompt/convert-to-language-model-prompt.ts
|
3135
2941
|
async function convertToLanguageModelPrompt({
|
3136
2942
|
prompt,
|
@@ -3351,8 +3157,8 @@ function prepareCallSettings({
|
|
3351
3157
|
topK,
|
3352
3158
|
presencePenalty,
|
3353
3159
|
frequencyPenalty,
|
3354
|
-
|
3355
|
-
|
3160
|
+
seed,
|
3161
|
+
stopSequences
|
3356
3162
|
}) {
|
3357
3163
|
if (maxOutputTokens != null) {
|
3358
3164
|
if (!Number.isInteger(maxOutputTokens)) {
|
@@ -3426,12 +3232,12 @@ function prepareCallSettings({
|
|
3426
3232
|
}
|
3427
3233
|
return {
|
3428
3234
|
maxOutputTokens,
|
3429
|
-
temperature
|
3235
|
+
temperature,
|
3430
3236
|
topP,
|
3431
3237
|
topK,
|
3432
3238
|
presencePenalty,
|
3433
3239
|
frequencyPenalty,
|
3434
|
-
stopSequences
|
3240
|
+
stopSequences,
|
3435
3241
|
seed
|
3436
3242
|
};
|
3437
3243
|
}
|
@@ -3638,6 +3444,23 @@ import {
|
|
3638
3444
|
asSchema,
|
3639
3445
|
safeValidateTypes as safeValidateTypes2
|
3640
3446
|
} from "@ai-sdk/provider-utils";
|
3447
|
+
|
3448
|
+
// src/util/async-iterable-stream.ts
|
3449
|
+
function createAsyncIterableStream(source) {
|
3450
|
+
const stream = source.pipeThrough(new TransformStream());
|
3451
|
+
stream[Symbol.asyncIterator] = () => {
|
3452
|
+
const reader = stream.getReader();
|
3453
|
+
return {
|
3454
|
+
async next() {
|
3455
|
+
const { done, value } = await reader.read();
|
3456
|
+
return done ? { done: true, value: void 0 } : { done: false, value };
|
3457
|
+
}
|
3458
|
+
};
|
3459
|
+
};
|
3460
|
+
return stream;
|
3461
|
+
}
|
3462
|
+
|
3463
|
+
// core/generate-object/output-strategy.ts
|
3641
3464
|
var noSchemaOutputStrategy = {
|
3642
3465
|
type: "no-schema",
|
3643
3466
|
jsonSchema: void 0,
|
@@ -4188,7 +4011,7 @@ async function generateObject(options) {
|
|
4188
4011
|
request = (_a17 = generateResult.request) != null ? _a17 : {};
|
4189
4012
|
response = generateResult.responseData;
|
4190
4013
|
async function processResult(result2) {
|
4191
|
-
const parseResult = await
|
4014
|
+
const parseResult = await safeParseJSON2({ text: result2 });
|
4192
4015
|
if (!parseResult.success) {
|
4193
4016
|
throw new NoObjectGeneratedError({
|
4194
4017
|
message: "No object generated: could not parse the response.",
|
@@ -4933,8 +4756,8 @@ var DefaultStreamObjectResult = class {
|
|
4933
4756
|
};
|
4934
4757
|
|
4935
4758
|
// src/error/no-speech-generated-error.ts
|
4936
|
-
import { AISDKError as
|
4937
|
-
var NoSpeechGeneratedError = class extends
|
4759
|
+
import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
|
4760
|
+
var NoSpeechGeneratedError = class extends AISDKError18 {
|
4938
4761
|
constructor(options) {
|
4939
4762
|
super({
|
4940
4763
|
name: "AI_NoSpeechGeneratedError",
|
@@ -5025,19 +4848,6 @@ var DefaultSpeechResult = class {
|
|
5025
4848
|
// core/generate-text/generate-text.ts
|
5026
4849
|
import { createIdGenerator as createIdGenerator3 } from "@ai-sdk/provider-utils";
|
5027
4850
|
|
5028
|
-
// src/util/split-on-last-whitespace.ts
|
5029
|
-
var lastWhitespaceRegexp = /^([\s\S]*?)(\s+)(\S*)$/;
|
5030
|
-
function splitOnLastWhitespace(text2) {
|
5031
|
-
const match = text2.match(lastWhitespaceRegexp);
|
5032
|
-
return match ? { prefix: match[1], whitespace: match[2], suffix: match[3] } : void 0;
|
5033
|
-
}
|
5034
|
-
|
5035
|
-
// src/util/remove-text-after-last-whitespace.ts
|
5036
|
-
function removeTextAfterLastWhitespace(text2) {
|
5037
|
-
const match = splitOnLastWhitespace(text2);
|
5038
|
-
return match ? match.prefix + match.whitespace : text2;
|
5039
|
-
}
|
5040
|
-
|
5041
4851
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
5042
4852
|
import { asSchema as asSchema2 } from "@ai-sdk/provider-utils";
|
5043
4853
|
|
@@ -5139,20 +4949,11 @@ function asContent({
|
|
5139
4949
|
...toolResults
|
5140
4950
|
];
|
5141
4951
|
}
|
5142
|
-
function extractFiles(content) {
|
5143
|
-
return content.filter((part) => part.type === "file").map((part) => part.file);
|
5144
|
-
}
|
5145
|
-
function extractReasoning(content) {
|
5146
|
-
return content.filter((part) => part.type === "reasoning");
|
5147
|
-
}
|
5148
|
-
function extractSources(content) {
|
5149
|
-
return content.filter((part) => part.type === "source");
|
5150
|
-
}
|
5151
4952
|
|
5152
4953
|
// core/generate-text/parse-tool-call.ts
|
5153
4954
|
import {
|
5154
4955
|
asSchema as asSchema3,
|
5155
|
-
safeParseJSON as
|
4956
|
+
safeParseJSON as safeParseJSON3,
|
5156
4957
|
safeValidateTypes as safeValidateTypes3
|
5157
4958
|
} from "@ai-sdk/provider-utils";
|
5158
4959
|
async function parseToolCall({
|
@@ -5208,88 +5009,118 @@ async function doParseToolCall({
|
|
5208
5009
|
availableTools: Object.keys(tools)
|
5209
5010
|
});
|
5210
5011
|
}
|
5211
|
-
const schema = asSchema3(tool2.parameters);
|
5212
|
-
const parseResult = toolCall.args.trim() === "" ? await safeValidateTypes3({ value: {}, schema }) : await
|
5213
|
-
if (parseResult.success === false) {
|
5214
|
-
throw new InvalidToolArgumentsError({
|
5215
|
-
toolName,
|
5216
|
-
toolArgs: toolCall.args,
|
5217
|
-
cause: parseResult.error
|
5218
|
-
});
|
5012
|
+
const schema = asSchema3(tool2.parameters);
|
5013
|
+
const parseResult = toolCall.args.trim() === "" ? await safeValidateTypes3({ value: {}, schema }) : await safeParseJSON3({ text: toolCall.args, schema });
|
5014
|
+
if (parseResult.success === false) {
|
5015
|
+
throw new InvalidToolArgumentsError({
|
5016
|
+
toolName,
|
5017
|
+
toolArgs: toolCall.args,
|
5018
|
+
cause: parseResult.error
|
5019
|
+
});
|
5020
|
+
}
|
5021
|
+
return {
|
5022
|
+
type: "tool-call",
|
5023
|
+
toolCallId: toolCall.toolCallId,
|
5024
|
+
toolName,
|
5025
|
+
args: parseResult == null ? void 0 : parseResult.value
|
5026
|
+
};
|
5027
|
+
}
|
5028
|
+
|
5029
|
+
// core/generate-text/step-result.ts
|
5030
|
+
var DefaultStepResult = class {
|
5031
|
+
constructor({
|
5032
|
+
content,
|
5033
|
+
finishReason,
|
5034
|
+
usage,
|
5035
|
+
warnings,
|
5036
|
+
request,
|
5037
|
+
response,
|
5038
|
+
providerMetadata
|
5039
|
+
}) {
|
5040
|
+
this.content = content;
|
5041
|
+
this.finishReason = finishReason;
|
5042
|
+
this.usage = usage;
|
5043
|
+
this.warnings = warnings;
|
5044
|
+
this.request = request;
|
5045
|
+
this.response = response;
|
5046
|
+
this.providerMetadata = providerMetadata;
|
5047
|
+
}
|
5048
|
+
get text() {
|
5049
|
+
return this.content.filter((part) => part.type === "text").map((part) => part.text).join("");
|
5050
|
+
}
|
5051
|
+
get reasoning() {
|
5052
|
+
return this.content.filter((part) => part.type === "reasoning");
|
5053
|
+
}
|
5054
|
+
get reasoningText() {
|
5055
|
+
return this.reasoning.length === 0 ? void 0 : this.reasoning.map((part) => part.text).join("");
|
5056
|
+
}
|
5057
|
+
get files() {
|
5058
|
+
return this.content.filter((part) => part.type === "file").map((part) => part.file);
|
5059
|
+
}
|
5060
|
+
get sources() {
|
5061
|
+
return this.content.filter((part) => part.type === "source");
|
5062
|
+
}
|
5063
|
+
get toolCalls() {
|
5064
|
+
return this.content.filter((part) => part.type === "tool-call");
|
5219
5065
|
}
|
5220
|
-
|
5221
|
-
type
|
5222
|
-
|
5223
|
-
|
5224
|
-
args: parseResult == null ? void 0 : parseResult.value
|
5225
|
-
};
|
5226
|
-
}
|
5227
|
-
|
5228
|
-
// core/generate-text/reasoning.ts
|
5229
|
-
function asReasoningText(reasoningParts) {
|
5230
|
-
const reasoningText = reasoningParts.map((part) => part.text).join("");
|
5231
|
-
return reasoningText.length > 0 ? reasoningText : void 0;
|
5232
|
-
}
|
5066
|
+
get toolResults() {
|
5067
|
+
return this.content.filter((part) => part.type === "tool-result");
|
5068
|
+
}
|
5069
|
+
};
|
5233
5070
|
|
5234
5071
|
// core/generate-text/to-response-messages.ts
|
5235
5072
|
function toResponseMessages({
|
5236
|
-
|
5237
|
-
|
5238
|
-
reasoning,
|
5239
|
-
tools,
|
5240
|
-
toolCalls,
|
5241
|
-
toolResults,
|
5242
|
-
messageId,
|
5243
|
-
generateMessageId
|
5073
|
+
content: inputContent,
|
5074
|
+
tools
|
5244
5075
|
}) {
|
5245
5076
|
const responseMessages = [];
|
5246
|
-
const content =
|
5247
|
-
|
5248
|
-
|
5249
|
-
|
5250
|
-
|
5251
|
-
|
5252
|
-
|
5253
|
-
|
5254
|
-
|
5255
|
-
|
5256
|
-
|
5257
|
-
|
5258
|
-
|
5259
|
-
|
5260
|
-
|
5261
|
-
|
5262
|
-
|
5263
|
-
|
5264
|
-
|
5077
|
+
const content = inputContent.filter((part) => part.type !== "tool-result" && part.type !== "source").filter((part) => part.type !== "text" || part.text.length > 0).map((part) => {
|
5078
|
+
switch (part.type) {
|
5079
|
+
case "text":
|
5080
|
+
return part;
|
5081
|
+
case "reasoning":
|
5082
|
+
return {
|
5083
|
+
type: "reasoning",
|
5084
|
+
text: part.text,
|
5085
|
+
providerOptions: part.providerMetadata
|
5086
|
+
};
|
5087
|
+
case "file":
|
5088
|
+
return {
|
5089
|
+
type: "file",
|
5090
|
+
data: part.file.base64,
|
5091
|
+
mediaType: part.file.mediaType
|
5092
|
+
};
|
5093
|
+
case "tool-call":
|
5094
|
+
return part;
|
5095
|
+
}
|
5096
|
+
});
|
5265
5097
|
if (content.length > 0) {
|
5266
5098
|
responseMessages.push({
|
5267
5099
|
role: "assistant",
|
5268
|
-
content
|
5269
|
-
id: messageId
|
5100
|
+
content
|
5270
5101
|
});
|
5271
5102
|
}
|
5272
|
-
|
5103
|
+
const toolResultContent = inputContent.filter((part) => part.type === "tool-result").map((toolResult) => {
|
5104
|
+
const tool2 = tools[toolResult.toolName];
|
5105
|
+
return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
|
5106
|
+
type: "tool-result",
|
5107
|
+
toolCallId: toolResult.toolCallId,
|
5108
|
+
toolName: toolResult.toolName,
|
5109
|
+
result: tool2.experimental_toToolResultContent(toolResult.result),
|
5110
|
+
experimental_content: tool2.experimental_toToolResultContent(
|
5111
|
+
toolResult.result
|
5112
|
+
)
|
5113
|
+
} : {
|
5114
|
+
type: "tool-result",
|
5115
|
+
toolCallId: toolResult.toolCallId,
|
5116
|
+
toolName: toolResult.toolName,
|
5117
|
+
result: toolResult.result
|
5118
|
+
};
|
5119
|
+
});
|
5120
|
+
if (toolResultContent.length > 0) {
|
5273
5121
|
responseMessages.push({
|
5274
5122
|
role: "tool",
|
5275
|
-
|
5276
|
-
content: toolResults.map((toolResult) => {
|
5277
|
-
const tool2 = tools[toolResult.toolName];
|
5278
|
-
return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
|
5279
|
-
type: "tool-result",
|
5280
|
-
toolCallId: toolResult.toolCallId,
|
5281
|
-
toolName: toolResult.toolName,
|
5282
|
-
result: tool2.experimental_toToolResultContent(toolResult.result),
|
5283
|
-
experimental_content: tool2.experimental_toToolResultContent(
|
5284
|
-
toolResult.result
|
5285
|
-
)
|
5286
|
-
} : {
|
5287
|
-
type: "tool-result",
|
5288
|
-
toolCallId: toolResult.toolCallId,
|
5289
|
-
toolName: toolResult.toolName,
|
5290
|
-
result: toolResult.result
|
5291
|
-
};
|
5292
|
-
})
|
5123
|
+
content: toolResultContent
|
5293
5124
|
});
|
5294
5125
|
}
|
5295
5126
|
return responseMessages;
|
@@ -5300,10 +5131,6 @@ var originalGenerateId3 = createIdGenerator3({
|
|
5300
5131
|
prefix: "aitxt",
|
5301
5132
|
size: 24
|
5302
5133
|
});
|
5303
|
-
var originalGenerateMessageId = createIdGenerator3({
|
5304
|
-
prefix: "msg",
|
5305
|
-
size: 24
|
5306
|
-
});
|
5307
5134
|
async function generateText({
|
5308
5135
|
model,
|
5309
5136
|
tools,
|
@@ -5315,9 +5142,7 @@ async function generateText({
|
|
5315
5142
|
abortSignal,
|
5316
5143
|
headers,
|
5317
5144
|
maxSteps = 1,
|
5318
|
-
experimental_generateMessageId: generateMessageId = originalGenerateMessageId,
|
5319
5145
|
experimental_output: output,
|
5320
|
-
experimental_continueSteps: continueSteps = false,
|
5321
5146
|
experimental_telemetry: telemetry,
|
5322
5147
|
providerOptions,
|
5323
5148
|
experimental_activeTools: activeTools,
|
@@ -5373,22 +5198,14 @@ async function generateText({
|
|
5373
5198
|
}),
|
5374
5199
|
tracer,
|
5375
5200
|
fn: async (span) => {
|
5376
|
-
var _a17, _b, _c, _d
|
5201
|
+
var _a17, _b, _c, _d;
|
5377
5202
|
const callSettings2 = prepareCallSettings(settings);
|
5378
5203
|
let currentModelResponse;
|
5379
5204
|
let currentToolCalls = [];
|
5380
5205
|
let currentToolResults = [];
|
5381
5206
|
let stepCount = 0;
|
5382
5207
|
const responseMessages = [];
|
5383
|
-
let text2 = "";
|
5384
|
-
const sources = [];
|
5385
5208
|
const steps = [];
|
5386
|
-
let usage = {
|
5387
|
-
inputTokens: void 0,
|
5388
|
-
outputTokens: void 0,
|
5389
|
-
totalTokens: void 0
|
5390
|
-
};
|
5391
|
-
let stepType = "initial";
|
5392
5209
|
do {
|
5393
5210
|
const stepInputMessages = [
|
5394
5211
|
...initialPrompt.messages,
|
@@ -5454,7 +5271,7 @@ async function generateText({
|
|
5454
5271
|
}),
|
5455
5272
|
tracer,
|
5456
5273
|
fn: async (span2) => {
|
5457
|
-
var _a19, _b2, _c2, _d2,
|
5274
|
+
var _a19, _b2, _c2, _d2, _e, _f, _g, _h;
|
5458
5275
|
const result = await stepModel.doGenerate({
|
5459
5276
|
...callSettings2,
|
5460
5277
|
tools: stepTools,
|
@@ -5468,7 +5285,7 @@ async function generateText({
|
|
5468
5285
|
const responseData = {
|
5469
5286
|
id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
|
5470
5287
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
5471
|
-
modelId: (
|
5288
|
+
modelId: (_f = (_e = result.response) == null ? void 0 : _e.modelId) != null ? _f : stepModel.modelId,
|
5472
5289
|
headers: (_g = result.response) == null ? void 0 : _g.headers,
|
5473
5290
|
body: (_h = result.response) == null ? void 0 : _h.body
|
5474
5291
|
};
|
@@ -5527,89 +5344,35 @@ async function generateText({
|
|
5527
5344
|
messages: stepInputMessages,
|
5528
5345
|
abortSignal
|
5529
5346
|
});
|
5530
|
-
usage = addLanguageModelUsage(usage, currentModelResponse.usage);
|
5531
|
-
let nextStepType = "done";
|
5532
|
-
if (++stepCount < maxSteps) {
|
5533
|
-
if (continueSteps && currentModelResponse.finishReason === "length" && // only use continue when there are no tool calls:
|
5534
|
-
currentToolCalls.length === 0) {
|
5535
|
-
nextStepType = "continue";
|
5536
|
-
} else if (
|
5537
|
-
// there are tool calls:
|
5538
|
-
currentToolCalls.length > 0 && // all current tool calls have results:
|
5539
|
-
currentToolResults.length === currentToolCalls.length
|
5540
|
-
) {
|
5541
|
-
nextStepType = "tool-result";
|
5542
|
-
}
|
5543
|
-
}
|
5544
5347
|
const stepContent = asContent({
|
5545
5348
|
content: currentModelResponse.content,
|
5546
5349
|
toolCalls: currentToolCalls,
|
5547
5350
|
toolResults: currentToolResults
|
5548
5351
|
});
|
5549
|
-
|
5550
|
-
|
5551
|
-
|
5552
|
-
|
5553
|
-
|
5554
|
-
sources.push(
|
5555
|
-
...currentModelResponse.content.filter(
|
5556
|
-
(part) => part.type === "source"
|
5557
|
-
)
|
5352
|
+
responseMessages.push(
|
5353
|
+
...toResponseMessages({
|
5354
|
+
content: stepContent,
|
5355
|
+
tools: tools != null ? tools : {}
|
5356
|
+
})
|
5558
5357
|
);
|
5559
|
-
|
5560
|
-
const lastMessage = responseMessages[responseMessages.length - 1];
|
5561
|
-
if (typeof lastMessage.content === "string") {
|
5562
|
-
lastMessage.content += stepText;
|
5563
|
-
} else {
|
5564
|
-
lastMessage.content.push({
|
5565
|
-
text: stepText,
|
5566
|
-
type: "text"
|
5567
|
-
});
|
5568
|
-
}
|
5569
|
-
} else {
|
5570
|
-
responseMessages.push(
|
5571
|
-
...toResponseMessages({
|
5572
|
-
text: text2,
|
5573
|
-
files: extractFiles(stepContent),
|
5574
|
-
reasoning: extractReasoning(stepContent).map((part) => ({
|
5575
|
-
type: "reasoning",
|
5576
|
-
text: part.text,
|
5577
|
-
providerOptions: part.providerMetadata
|
5578
|
-
})),
|
5579
|
-
tools: tools != null ? tools : {},
|
5580
|
-
toolCalls: currentToolCalls,
|
5581
|
-
toolResults: currentToolResults,
|
5582
|
-
messageId: generateMessageId(),
|
5583
|
-
generateMessageId
|
5584
|
-
})
|
5585
|
-
);
|
5586
|
-
}
|
5587
|
-
const currentStepResult = {
|
5588
|
-
stepType,
|
5358
|
+
const currentStepResult = new DefaultStepResult({
|
5589
5359
|
content: stepContent,
|
5590
|
-
text: stepText,
|
5591
|
-
reasoningText: asReasoningText(extractReasoning(stepContent)),
|
5592
|
-
reasoning: extractReasoning(stepContent),
|
5593
|
-
files: extractFiles(stepContent),
|
5594
|
-
sources: extractSources(stepContent),
|
5595
|
-
toolCalls: currentToolCalls,
|
5596
|
-
toolResults: currentToolResults,
|
5597
5360
|
finishReason: currentModelResponse.finishReason,
|
5598
5361
|
usage: currentModelResponse.usage,
|
5599
5362
|
warnings: currentModelResponse.warnings,
|
5600
|
-
|
5363
|
+
providerMetadata: currentModelResponse.providerMetadata,
|
5364
|
+
request: (_d = currentModelResponse.request) != null ? _d : {},
|
5601
5365
|
response: {
|
5602
5366
|
...currentModelResponse.response,
|
5603
5367
|
// deep clone msgs to avoid mutating past messages in multi-step:
|
5604
5368
|
messages: structuredClone(responseMessages)
|
5605
|
-
}
|
5606
|
-
|
5607
|
-
isContinued: nextStepType === "continue"
|
5608
|
-
};
|
5369
|
+
}
|
5370
|
+
});
|
5609
5371
|
steps.push(currentStepResult);
|
5610
5372
|
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
5611
|
-
|
5612
|
-
|
5373
|
+
} while (++stepCount < maxSteps && // there are tool calls:
|
5374
|
+
currentToolCalls.length > 0 && // all current tool calls have results:
|
5375
|
+
currentToolResults.length === currentToolCalls.length);
|
5613
5376
|
span.setAttributes(
|
5614
5377
|
selectTelemetryAttributes({
|
5615
5378
|
telemetry,
|
@@ -5630,32 +5393,17 @@ async function generateText({
|
|
5630
5393
|
}
|
5631
5394
|
})
|
5632
5395
|
);
|
5633
|
-
const
|
5634
|
-
{ text: text2 },
|
5635
|
-
{
|
5636
|
-
response: currentModelResponse.response,
|
5637
|
-
usage,
|
5638
|
-
finishReason: currentModelResponse.finishReason
|
5639
|
-
}
|
5640
|
-
));
|
5396
|
+
const lastStep = steps[steps.length - 1];
|
5641
5397
|
return new DefaultGenerateTextResult({
|
5642
|
-
text: text2,
|
5643
|
-
content: asContent({
|
5644
|
-
content: currentModelResponse.content,
|
5645
|
-
toolCalls: currentToolCalls,
|
5646
|
-
toolResults: currentToolResults
|
5647
|
-
}),
|
5648
|
-
resolvedOutput,
|
5649
|
-
finishReason: currentModelResponse.finishReason,
|
5650
|
-
usage,
|
5651
|
-
warnings: currentModelResponse.warnings,
|
5652
|
-
request: (_f = currentModelResponse.request) != null ? _f : {},
|
5653
|
-
response: {
|
5654
|
-
...currentModelResponse.response,
|
5655
|
-
messages: responseMessages
|
5656
|
-
},
|
5657
5398
|
steps,
|
5658
|
-
|
5399
|
+
resolvedOutput: await (output == null ? void 0 : output.parseOutput(
|
5400
|
+
{ text: lastStep.text },
|
5401
|
+
{
|
5402
|
+
response: lastStep.response,
|
5403
|
+
usage: lastStep.usage,
|
5404
|
+
finishReason: lastStep.finishReason
|
5405
|
+
}
|
5406
|
+
))
|
5659
5407
|
});
|
5660
5408
|
}
|
5661
5409
|
});
|
@@ -5737,35 +5485,67 @@ async function executeTools({
|
|
5737
5485
|
}
|
5738
5486
|
var DefaultGenerateTextResult = class {
|
5739
5487
|
constructor(options) {
|
5740
|
-
this.text = options.text;
|
5741
|
-
this.content = options.content;
|
5742
|
-
this.finishReason = options.finishReason;
|
5743
|
-
this.usage = options.usage;
|
5744
|
-
this.warnings = options.warnings;
|
5745
|
-
this.request = options.request;
|
5746
|
-
this.response = options.response;
|
5747
5488
|
this.steps = options.steps;
|
5748
|
-
this.providerMetadata = options.providerMetadata;
|
5749
5489
|
this.resolvedOutput = options.resolvedOutput;
|
5750
5490
|
}
|
5491
|
+
get finalStep() {
|
5492
|
+
return this.steps[this.steps.length - 1];
|
5493
|
+
}
|
5494
|
+
get content() {
|
5495
|
+
return this.finalStep.content;
|
5496
|
+
}
|
5497
|
+
get text() {
|
5498
|
+
return this.finalStep.text;
|
5499
|
+
}
|
5751
5500
|
get files() {
|
5752
|
-
return
|
5501
|
+
return this.finalStep.files;
|
5753
5502
|
}
|
5754
5503
|
get reasoningText() {
|
5755
|
-
|
5756
|
-
return texts.length > 0 ? texts.join("") : void 0;
|
5504
|
+
return this.finalStep.reasoningText;
|
5757
5505
|
}
|
5758
5506
|
get reasoning() {
|
5759
|
-
return this.
|
5507
|
+
return this.finalStep.reasoning;
|
5760
5508
|
}
|
5761
5509
|
get toolCalls() {
|
5762
|
-
return this.
|
5510
|
+
return this.finalStep.toolCalls;
|
5763
5511
|
}
|
5764
5512
|
get toolResults() {
|
5765
|
-
return this.
|
5513
|
+
return this.finalStep.toolResults;
|
5766
5514
|
}
|
5767
5515
|
get sources() {
|
5768
|
-
return this.
|
5516
|
+
return this.finalStep.sources;
|
5517
|
+
}
|
5518
|
+
get finishReason() {
|
5519
|
+
return this.finalStep.finishReason;
|
5520
|
+
}
|
5521
|
+
get warnings() {
|
5522
|
+
return this.finalStep.warnings;
|
5523
|
+
}
|
5524
|
+
get providerMetadata() {
|
5525
|
+
return this.finalStep.providerMetadata;
|
5526
|
+
}
|
5527
|
+
get response() {
|
5528
|
+
return this.finalStep.response;
|
5529
|
+
}
|
5530
|
+
get request() {
|
5531
|
+
return this.finalStep.request;
|
5532
|
+
}
|
5533
|
+
get usage() {
|
5534
|
+
return this.finalStep.usage;
|
5535
|
+
}
|
5536
|
+
get totalUsage() {
|
5537
|
+
return this.steps.reduce(
|
5538
|
+
(totalUsage, step) => {
|
5539
|
+
return addLanguageModelUsage(totalUsage, step.usage);
|
5540
|
+
},
|
5541
|
+
{
|
5542
|
+
inputTokens: void 0,
|
5543
|
+
outputTokens: void 0,
|
5544
|
+
totalTokens: void 0,
|
5545
|
+
reasoningTokens: void 0,
|
5546
|
+
cachedInputTokens: void 0
|
5547
|
+
}
|
5548
|
+
);
|
5769
5549
|
}
|
5770
5550
|
get experimental_output() {
|
5771
5551
|
if (this.resolvedOutput == null) {
|
@@ -5797,7 +5577,7 @@ __export(output_exports, {
|
|
5797
5577
|
});
|
5798
5578
|
import {
|
5799
5579
|
asSchema as asSchema4,
|
5800
|
-
safeParseJSON as
|
5580
|
+
safeParseJSON as safeParseJSON4,
|
5801
5581
|
safeValidateTypes as safeValidateTypes4
|
5802
5582
|
} from "@ai-sdk/provider-utils";
|
5803
5583
|
var text = () => ({
|
@@ -5839,7 +5619,7 @@ var object = ({
|
|
5839
5619
|
}
|
5840
5620
|
},
|
5841
5621
|
async parseOutput({ text: text2 }, context) {
|
5842
|
-
const parseResult = await
|
5622
|
+
const parseResult = await safeParseJSON4({ text: text2 });
|
5843
5623
|
if (!parseResult.success) {
|
5844
5624
|
throw new NoObjectGeneratedError({
|
5845
5625
|
message: "No object generated: could not parse the response.",
|
@@ -5946,25 +5726,6 @@ function asArray(value) {
|
|
5946
5726
|
return value === void 0 ? [] : Array.isArray(value) ? value : [value];
|
5947
5727
|
}
|
5948
5728
|
|
5949
|
-
// src/util/consume-stream.ts
|
5950
|
-
async function consumeStream({
|
5951
|
-
stream,
|
5952
|
-
onError
|
5953
|
-
}) {
|
5954
|
-
const reader = stream.getReader();
|
5955
|
-
try {
|
5956
|
-
while (true) {
|
5957
|
-
const { done } = await reader.read();
|
5958
|
-
if (done)
|
5959
|
-
break;
|
5960
|
-
}
|
5961
|
-
} catch (error) {
|
5962
|
-
onError == null ? void 0 : onError(error);
|
5963
|
-
} finally {
|
5964
|
-
reader.releaseLock();
|
5965
|
-
}
|
5966
|
-
}
|
5967
|
-
|
5968
5729
|
// core/generate-text/run-tools-transformation.ts
|
5969
5730
|
import { generateId } from "@ai-sdk/provider-utils";
|
5970
5731
|
function runToolsTransformation({
|
@@ -6165,10 +5926,6 @@ var originalGenerateId4 = createIdGenerator4({
|
|
6165
5926
|
prefix: "aitxt",
|
6166
5927
|
size: 24
|
6167
5928
|
});
|
6168
|
-
var originalGenerateMessageId2 = createIdGenerator4({
|
6169
|
-
prefix: "msg",
|
6170
|
-
size: 24
|
6171
|
-
});
|
6172
5929
|
function streamText({
|
6173
5930
|
model,
|
6174
5931
|
tools,
|
@@ -6180,9 +5937,7 @@ function streamText({
|
|
6180
5937
|
abortSignal,
|
6181
5938
|
headers,
|
6182
5939
|
maxSteps = 1,
|
6183
|
-
experimental_generateMessageId: generateMessageId = originalGenerateMessageId2,
|
6184
5940
|
experimental_output: output,
|
6185
|
-
experimental_continueSteps: continueSteps = false,
|
6186
5941
|
experimental_telemetry: telemetry,
|
6187
5942
|
providerOptions,
|
6188
5943
|
experimental_toolCallStreaming = false,
|
@@ -6219,7 +5974,6 @@ function streamText({
|
|
6219
5974
|
repairToolCall,
|
6220
5975
|
maxSteps,
|
6221
5976
|
output,
|
6222
|
-
continueSteps,
|
6223
5977
|
providerOptions,
|
6224
5978
|
onChunk,
|
6225
5979
|
onError,
|
@@ -6227,8 +5981,7 @@ function streamText({
|
|
6227
5981
|
onStepFinish,
|
6228
5982
|
now: now2,
|
6229
5983
|
currentDate,
|
6230
|
-
generateId: generateId3
|
6231
|
-
generateMessageId
|
5984
|
+
generateId: generateId3
|
6232
5985
|
});
|
6233
5986
|
}
|
6234
5987
|
function createOutputTransformStream(output) {
|
@@ -6254,7 +6007,7 @@ function createOutputTransformStream(output) {
|
|
6254
6007
|
}
|
6255
6008
|
return new TransformStream({
|
6256
6009
|
async transform(chunk, controller) {
|
6257
|
-
if (chunk.type === "step
|
6010
|
+
if (chunk.type === "finish-step") {
|
6258
6011
|
publishTextChunk({ controller });
|
6259
6012
|
}
|
6260
6013
|
if (chunk.type !== "text") {
|
@@ -6298,32 +6051,18 @@ var DefaultStreamTextResult = class {
|
|
6298
6051
|
repairToolCall,
|
6299
6052
|
maxSteps,
|
6300
6053
|
output,
|
6301
|
-
continueSteps,
|
6302
6054
|
providerOptions,
|
6303
6055
|
now: now2,
|
6304
6056
|
currentDate,
|
6305
6057
|
generateId: generateId3,
|
6306
|
-
generateMessageId,
|
6307
6058
|
onChunk,
|
6308
6059
|
onError,
|
6309
6060
|
onFinish,
|
6310
6061
|
onStepFinish
|
6311
6062
|
}) {
|
6312
|
-
this.
|
6313
|
-
this.usagePromise = new DelayedPromise();
|
6063
|
+
this.totalUsagePromise = new DelayedPromise();
|
6314
6064
|
this.finishReasonPromise = new DelayedPromise();
|
6315
|
-
this.providerMetadataPromise = new DelayedPromise();
|
6316
|
-
this.textPromise = new DelayedPromise();
|
6317
|
-
this.reasoningPromise = new DelayedPromise();
|
6318
|
-
this.reasoningDetailsPromise = new DelayedPromise();
|
6319
|
-
this.sourcesPromise = new DelayedPromise();
|
6320
|
-
this.filesPromise = new DelayedPromise();
|
6321
|
-
this.toolCallsPromise = new DelayedPromise();
|
6322
|
-
this.toolResultsPromise = new DelayedPromise();
|
6323
|
-
this.requestPromise = new DelayedPromise();
|
6324
|
-
this.responsePromise = new DelayedPromise();
|
6325
6065
|
this.stepsPromise = new DelayedPromise();
|
6326
|
-
this.contentPromise = new DelayedPromise();
|
6327
6066
|
if (maxSteps < 1) {
|
6328
6067
|
throw new InvalidArgumentError({
|
6329
6068
|
parameter: "maxSteps",
|
@@ -6332,23 +6071,14 @@ var DefaultStreamTextResult = class {
|
|
6332
6071
|
});
|
6333
6072
|
}
|
6334
6073
|
this.output = output;
|
6335
|
-
|
6336
|
-
let recordedContinuationText = "";
|
6337
|
-
let recordedFullText = "";
|
6074
|
+
this.generateId = generateId3;
|
6338
6075
|
let activeReasoningPart = void 0;
|
6339
6076
|
let recordedContent = [];
|
6340
|
-
const
|
6341
|
-
const recordedResponse = {
|
6342
|
-
id: generateId3(),
|
6343
|
-
timestamp: currentDate(),
|
6344
|
-
modelId: model.modelId,
|
6345
|
-
messages: []
|
6346
|
-
};
|
6347
|
-
let recordedToolCalls = [];
|
6348
|
-
let recordedToolResults = [];
|
6077
|
+
const recordedResponseMessages = [];
|
6349
6078
|
let recordedFinishReason = void 0;
|
6350
|
-
let
|
6351
|
-
let
|
6079
|
+
let recordedTotalUsage = void 0;
|
6080
|
+
let recordedRequest = {};
|
6081
|
+
let recordedWarnings = [];
|
6352
6082
|
const recordedSteps = [];
|
6353
6083
|
let rootSpan;
|
6354
6084
|
const eventProcessor = new TransformStream({
|
@@ -6362,9 +6092,6 @@ var DefaultStreamTextResult = class {
|
|
6362
6092
|
await (onError == null ? void 0 : onError({ error: part.error }));
|
6363
6093
|
}
|
6364
6094
|
if (part.type === "text") {
|
6365
|
-
recordedStepText += part.text;
|
6366
|
-
recordedContinuationText += part.text;
|
6367
|
-
recordedFullText += part.text;
|
6368
6095
|
const latestContent = recordedContent[recordedContent.length - 1];
|
6369
6096
|
if ((latestContent == null ? void 0 : latestContent.type) === "text") {
|
6370
6097
|
latestContent.text += part.text;
|
@@ -6377,12 +6104,12 @@ var DefaultStreamTextResult = class {
|
|
6377
6104
|
activeReasoningPart = {
|
6378
6105
|
type: "reasoning",
|
6379
6106
|
text: part.text,
|
6380
|
-
|
6107
|
+
providerMetadata: part.providerMetadata
|
6381
6108
|
};
|
6382
6109
|
recordedContent.push(activeReasoningPart);
|
6383
6110
|
} else {
|
6384
6111
|
activeReasoningPart.text += part.text;
|
6385
|
-
activeReasoningPart.
|
6112
|
+
activeReasoningPart.providerMetadata = part.providerMetadata;
|
6386
6113
|
}
|
6387
6114
|
}
|
6388
6115
|
if (part.type === "reasoning-part-finish" && activeReasoningPart != null) {
|
@@ -6393,129 +6120,76 @@ var DefaultStreamTextResult = class {
|
|
6393
6120
|
}
|
6394
6121
|
if (part.type === "source") {
|
6395
6122
|
recordedContent.push(part);
|
6396
|
-
recordedSources.push(part);
|
6397
6123
|
}
|
6398
6124
|
if (part.type === "tool-call") {
|
6399
6125
|
recordedContent.push(part);
|
6400
|
-
recordedToolCalls.push(part);
|
6401
6126
|
}
|
6402
6127
|
if (part.type === "tool-result") {
|
6403
6128
|
recordedContent.push(part);
|
6404
|
-
recordedToolResults.push(part);
|
6405
6129
|
}
|
6406
|
-
if (part.type === "step
|
6130
|
+
if (part.type === "start-step") {
|
6131
|
+
recordedRequest = part.request;
|
6132
|
+
recordedWarnings = part.warnings;
|
6133
|
+
}
|
6134
|
+
if (part.type === "finish-step") {
|
6407
6135
|
const stepMessages = toResponseMessages({
|
6408
|
-
|
6409
|
-
|
6410
|
-
reasoning: extractReasoning(recordedContent),
|
6411
|
-
tools: tools != null ? tools : {},
|
6412
|
-
toolCalls: recordedToolCalls,
|
6413
|
-
toolResults: recordedToolResults,
|
6414
|
-
messageId: part.messageId,
|
6415
|
-
generateMessageId
|
6136
|
+
content: recordedContent,
|
6137
|
+
tools: tools != null ? tools : {}
|
6416
6138
|
});
|
6417
|
-
const
|
6418
|
-
let nextStepType = "done";
|
6419
|
-
if (currentStep + 1 < maxSteps) {
|
6420
|
-
if (continueSteps && part.finishReason === "length" && // only use continue when there are no tool calls:
|
6421
|
-
recordedToolCalls.length === 0) {
|
6422
|
-
nextStepType = "continue";
|
6423
|
-
} else if (
|
6424
|
-
// there are tool calls:
|
6425
|
-
recordedToolCalls.length > 0 && // all current tool calls have results:
|
6426
|
-
recordedToolResults.length === recordedToolCalls.length
|
6427
|
-
) {
|
6428
|
-
nextStepType = "tool-result";
|
6429
|
-
}
|
6430
|
-
}
|
6431
|
-
const currentStepResult = {
|
6432
|
-
stepType,
|
6139
|
+
const currentStepResult = new DefaultStepResult({
|
6433
6140
|
content: recordedContent,
|
6434
|
-
text: recordedStepText,
|
6435
|
-
reasoningText: asReasoningText(extractReasoning(recordedContent)),
|
6436
|
-
reasoning: extractReasoning(recordedContent),
|
6437
|
-
files: extractFiles(recordedContent),
|
6438
|
-
sources: extractSources(recordedContent),
|
6439
|
-
toolCalls: recordedToolCalls,
|
6440
|
-
toolResults: recordedToolResults,
|
6441
6141
|
finishReason: part.finishReason,
|
6442
6142
|
usage: part.usage,
|
6443
|
-
warnings:
|
6444
|
-
request:
|
6143
|
+
warnings: recordedWarnings,
|
6144
|
+
request: recordedRequest,
|
6445
6145
|
response: {
|
6446
6146
|
...part.response,
|
6447
|
-
messages: [...
|
6147
|
+
messages: [...recordedResponseMessages, ...stepMessages]
|
6448
6148
|
},
|
6449
|
-
providerMetadata: part.providerMetadata
|
6450
|
-
|
6451
|
-
};
|
6149
|
+
providerMetadata: part.providerMetadata
|
6150
|
+
});
|
6452
6151
|
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
6453
6152
|
recordedSteps.push(currentStepResult);
|
6454
6153
|
recordedContent = [];
|
6455
|
-
recordedToolCalls = [];
|
6456
|
-
recordedToolResults = [];
|
6457
|
-
recordedStepText = "";
|
6458
6154
|
activeReasoningPart = void 0;
|
6459
|
-
|
6460
|
-
stepType = nextStepType;
|
6461
|
-
}
|
6462
|
-
if (nextStepType !== "continue") {
|
6463
|
-
recordedResponse.messages.push(...stepMessages);
|
6464
|
-
recordedContinuationText = "";
|
6465
|
-
}
|
6155
|
+
recordedResponseMessages.push(...stepMessages);
|
6466
6156
|
}
|
6467
6157
|
if (part.type === "finish") {
|
6468
|
-
|
6469
|
-
recordedResponse.timestamp = part.response.timestamp;
|
6470
|
-
recordedResponse.modelId = part.response.modelId;
|
6471
|
-
recordedResponse.headers = part.response.headers;
|
6472
|
-
recordedUsage = part.usage;
|
6158
|
+
recordedTotalUsage = part.totalUsage;
|
6473
6159
|
recordedFinishReason = part.finishReason;
|
6474
6160
|
}
|
6475
6161
|
},
|
6476
6162
|
async flush(controller) {
|
6477
|
-
var _a17;
|
6478
6163
|
try {
|
6479
6164
|
if (recordedSteps.length === 0) {
|
6480
6165
|
return;
|
6481
6166
|
}
|
6482
|
-
const lastStep = recordedSteps[recordedSteps.length - 1];
|
6483
|
-
self.contentPromise.resolve(lastStep.content);
|
6484
|
-
self.warningsPromise.resolve(lastStep.warnings);
|
6485
|
-
self.requestPromise.resolve(lastStep.request);
|
6486
|
-
self.responsePromise.resolve(lastStep.response);
|
6487
|
-
self.toolCallsPromise.resolve(lastStep.toolCalls);
|
6488
|
-
self.toolResultsPromise.resolve(lastStep.toolResults);
|
6489
|
-
self.providerMetadataPromise.resolve(lastStep.providerMetadata);
|
6490
|
-
self.reasoningPromise.resolve(lastStep.reasoningText);
|
6491
|
-
self.reasoningDetailsPromise.resolve(lastStep.reasoning);
|
6492
6167
|
const finishReason = recordedFinishReason != null ? recordedFinishReason : "unknown";
|
6493
|
-
const
|
6168
|
+
const totalUsage = recordedTotalUsage != null ? recordedTotalUsage : {
|
6494
6169
|
inputTokens: void 0,
|
6495
6170
|
outputTokens: void 0,
|
6496
6171
|
totalTokens: void 0
|
6497
6172
|
};
|
6498
6173
|
self.finishReasonPromise.resolve(finishReason);
|
6499
|
-
self.
|
6500
|
-
self.textPromise.resolve(recordedFullText);
|
6501
|
-
self.sourcesPromise.resolve(recordedSources);
|
6502
|
-
self.filesPromise.resolve(lastStep.files);
|
6174
|
+
self.totalUsagePromise.resolve(totalUsage);
|
6503
6175
|
self.stepsPromise.resolve(recordedSteps);
|
6176
|
+
const finalStep = recordedSteps[recordedSteps.length - 1];
|
6504
6177
|
await (onFinish == null ? void 0 : onFinish({
|
6505
6178
|
finishReason,
|
6506
|
-
|
6507
|
-
|
6508
|
-
|
6509
|
-
|
6510
|
-
|
6511
|
-
|
6512
|
-
|
6513
|
-
|
6514
|
-
|
6515
|
-
|
6516
|
-
|
6517
|
-
|
6518
|
-
|
6179
|
+
totalUsage,
|
6180
|
+
usage: finalStep.usage,
|
6181
|
+
content: finalStep.content,
|
6182
|
+
text: finalStep.text,
|
6183
|
+
reasoningText: finalStep.reasoningText,
|
6184
|
+
reasoning: finalStep.reasoning,
|
6185
|
+
files: finalStep.files,
|
6186
|
+
sources: finalStep.sources,
|
6187
|
+
toolCalls: finalStep.toolCalls,
|
6188
|
+
toolResults: finalStep.toolResults,
|
6189
|
+
request: finalStep.request,
|
6190
|
+
response: finalStep.response,
|
6191
|
+
warnings: finalStep.warnings,
|
6192
|
+
providerMetadata: finalStep.providerMetadata,
|
6519
6193
|
steps: recordedSteps
|
6520
6194
|
}));
|
6521
6195
|
rootSpan.setAttributes(
|
@@ -6523,18 +6197,18 @@ var DefaultStreamTextResult = class {
|
|
6523
6197
|
telemetry,
|
6524
6198
|
attributes: {
|
6525
6199
|
"ai.response.finishReason": finishReason,
|
6526
|
-
"ai.response.text": { output: () =>
|
6200
|
+
"ai.response.text": { output: () => finalStep.text },
|
6527
6201
|
"ai.response.toolCalls": {
|
6528
6202
|
output: () => {
|
6529
|
-
var
|
6530
|
-
return ((
|
6203
|
+
var _a17;
|
6204
|
+
return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
|
6531
6205
|
}
|
6532
6206
|
},
|
6533
|
-
"ai.usage.inputTokens":
|
6534
|
-
"ai.usage.outputTokens":
|
6535
|
-
"ai.usage.totalTokens":
|
6536
|
-
"ai.usage.reasoningTokens":
|
6537
|
-
"ai.usage.cachedInputTokens":
|
6207
|
+
"ai.usage.inputTokens": totalUsage.inputTokens,
|
6208
|
+
"ai.usage.outputTokens": totalUsage.outputTokens,
|
6209
|
+
"ai.usage.totalTokens": totalUsage.totalTokens,
|
6210
|
+
"ai.usage.reasoningTokens": totalUsage.reasoningTokens,
|
6211
|
+
"ai.usage.cachedInputTokens": totalUsage.cachedInputTokens
|
6538
6212
|
}
|
6539
6213
|
})
|
6540
6214
|
);
|
@@ -6593,11 +6267,7 @@ var DefaultStreamTextResult = class {
|
|
6593
6267
|
async function streamStep({
|
6594
6268
|
currentStep,
|
6595
6269
|
responseMessages,
|
6596
|
-
usage
|
6597
|
-
stepType: stepType2,
|
6598
|
-
previousStepText,
|
6599
|
-
hasLeadingWhitespace,
|
6600
|
-
messageId
|
6270
|
+
usage
|
6601
6271
|
}) {
|
6602
6272
|
const initialPrompt = await standardizePrompt({
|
6603
6273
|
system,
|
@@ -6695,8 +6365,7 @@ var DefaultStreamTextResult = class {
|
|
6695
6365
|
const stepToolCalls = [];
|
6696
6366
|
const stepToolResults = [];
|
6697
6367
|
let warnings;
|
6698
|
-
const
|
6699
|
-
const stepFiles = [];
|
6368
|
+
const stepContent = [];
|
6700
6369
|
let activeReasoningPart2 = void 0;
|
6701
6370
|
let stepFinishReason = "unknown";
|
6702
6371
|
let stepUsage = {
|
@@ -6707,25 +6376,17 @@ var DefaultStreamTextResult = class {
|
|
6707
6376
|
let stepProviderMetadata;
|
6708
6377
|
let stepFirstChunk = true;
|
6709
6378
|
let stepText = "";
|
6710
|
-
let fullStepText = stepType2 === "continue" ? previousStepText : "";
|
6711
6379
|
let stepResponse = {
|
6712
6380
|
id: generateId3(),
|
6713
6381
|
timestamp: currentDate(),
|
6714
6382
|
modelId: model.modelId
|
6715
6383
|
};
|
6716
|
-
let chunkBuffer = "";
|
6717
|
-
let chunkTextPublished = false;
|
6718
|
-
let inWhitespacePrefix = true;
|
6719
|
-
let hasWhitespaceSuffix = false;
|
6720
6384
|
async function publishTextChunk({
|
6721
6385
|
controller,
|
6722
6386
|
chunk
|
6723
6387
|
}) {
|
6724
6388
|
controller.enqueue(chunk);
|
6725
6389
|
stepText += chunk.text;
|
6726
|
-
fullStepText += chunk.text;
|
6727
|
-
chunkTextPublished = true;
|
6728
|
-
hasWhitespaceSuffix = chunk.text.trimEnd() !== chunk.text;
|
6729
6390
|
}
|
6730
6391
|
self.addStream(
|
6731
6392
|
transformedStream.pipeThrough(
|
@@ -6734,6 +6395,7 @@ var DefaultStreamTextResult = class {
|
|
6734
6395
|
var _a17, _b, _c, _d;
|
6735
6396
|
if (chunk.type === "stream-start") {
|
6736
6397
|
warnings = chunk.warnings;
|
6398
|
+
controller.enqueue({ type: "start" });
|
6737
6399
|
return;
|
6738
6400
|
}
|
6739
6401
|
if (stepFirstChunk) {
|
@@ -6746,8 +6408,7 @@ var DefaultStreamTextResult = class {
|
|
6746
6408
|
"ai.response.msToFirstChunk": msToFirstChunk
|
6747
6409
|
});
|
6748
6410
|
controller.enqueue({
|
6749
|
-
type: "step
|
6750
|
-
messageId,
|
6411
|
+
type: "start-step",
|
6751
6412
|
request: stepRequest,
|
6752
6413
|
warnings: warnings != null ? warnings : []
|
6753
6414
|
});
|
@@ -6758,27 +6419,7 @@ var DefaultStreamTextResult = class {
|
|
6758
6419
|
const chunkType = chunk.type;
|
6759
6420
|
switch (chunkType) {
|
6760
6421
|
case "text": {
|
6761
|
-
|
6762
|
-
const trimmedChunkText = inWhitespacePrefix && hasLeadingWhitespace ? chunk.text.trimStart() : chunk.text;
|
6763
|
-
if (trimmedChunkText.length === 0) {
|
6764
|
-
break;
|
6765
|
-
}
|
6766
|
-
inWhitespacePrefix = false;
|
6767
|
-
chunkBuffer += trimmedChunkText;
|
6768
|
-
const split = splitOnLastWhitespace(chunkBuffer);
|
6769
|
-
if (split != null) {
|
6770
|
-
chunkBuffer = split.suffix;
|
6771
|
-
await publishTextChunk({
|
6772
|
-
controller,
|
6773
|
-
chunk: {
|
6774
|
-
type: "text",
|
6775
|
-
text: split.prefix + split.whitespace
|
6776
|
-
}
|
6777
|
-
});
|
6778
|
-
}
|
6779
|
-
} else {
|
6780
|
-
await publishTextChunk({ controller, chunk });
|
6781
|
-
}
|
6422
|
+
await publishTextChunk({ controller, chunk });
|
6782
6423
|
break;
|
6783
6424
|
}
|
6784
6425
|
case "reasoning": {
|
@@ -6787,12 +6428,12 @@ var DefaultStreamTextResult = class {
|
|
6787
6428
|
activeReasoningPart2 = {
|
6788
6429
|
type: "reasoning",
|
6789
6430
|
text: chunk.text,
|
6790
|
-
|
6431
|
+
providerMetadata: chunk.providerMetadata
|
6791
6432
|
};
|
6792
|
-
|
6433
|
+
stepContent.push(activeReasoningPart2);
|
6793
6434
|
} else {
|
6794
6435
|
activeReasoningPart2.text += chunk.text;
|
6795
|
-
activeReasoningPart2.
|
6436
|
+
activeReasoningPart2.providerMetadata = chunk.providerMetadata;
|
6796
6437
|
}
|
6797
6438
|
break;
|
6798
6439
|
}
|
@@ -6804,11 +6445,13 @@ var DefaultStreamTextResult = class {
|
|
6804
6445
|
case "tool-call": {
|
6805
6446
|
controller.enqueue(chunk);
|
6806
6447
|
stepToolCalls.push(chunk);
|
6448
|
+
stepContent.push(chunk);
|
6807
6449
|
break;
|
6808
6450
|
}
|
6809
6451
|
case "tool-result": {
|
6810
6452
|
controller.enqueue(chunk);
|
6811
6453
|
stepToolResults.push(chunk);
|
6454
|
+
stepContent.push(chunk);
|
6812
6455
|
break;
|
6813
6456
|
}
|
6814
6457
|
case "response-metadata": {
|
@@ -6832,11 +6475,15 @@ var DefaultStreamTextResult = class {
|
|
6832
6475
|
break;
|
6833
6476
|
}
|
6834
6477
|
case "file": {
|
6835
|
-
|
6478
|
+
stepContent.push(chunk);
|
6479
|
+
controller.enqueue(chunk);
|
6480
|
+
break;
|
6481
|
+
}
|
6482
|
+
case "source": {
|
6483
|
+
stepContent.push(chunk);
|
6836
6484
|
controller.enqueue(chunk);
|
6837
6485
|
break;
|
6838
6486
|
}
|
6839
|
-
case "source":
|
6840
6487
|
case "tool-call-streaming-start":
|
6841
6488
|
case "tool-call-delta": {
|
6842
6489
|
controller.enqueue(chunk);
|
@@ -6856,27 +6503,6 @@ var DefaultStreamTextResult = class {
|
|
6856
6503
|
// invoke onFinish callback and resolve toolResults promise when the stream is about to close:
|
6857
6504
|
async flush(controller) {
|
6858
6505
|
const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
|
6859
|
-
let nextStepType = "done";
|
6860
|
-
if (currentStep + 1 < maxSteps) {
|
6861
|
-
if (continueSteps && stepFinishReason === "length" && // only use continue when there are no tool calls:
|
6862
|
-
stepToolCalls.length === 0) {
|
6863
|
-
nextStepType = "continue";
|
6864
|
-
} else if (
|
6865
|
-
// there are tool calls:
|
6866
|
-
stepToolCalls.length > 0 && // all current tool calls have results:
|
6867
|
-
stepToolResults.length === stepToolCalls.length
|
6868
|
-
) {
|
6869
|
-
nextStepType = "tool-result";
|
6870
|
-
}
|
6871
|
-
}
|
6872
|
-
if (continueSteps && chunkBuffer.length > 0 && (nextStepType !== "continue" || // when the next step is a regular step, publish the buffer
|
6873
|
-
stepType2 === "continue" && !chunkTextPublished)) {
|
6874
|
-
await publishTextChunk({
|
6875
|
-
controller,
|
6876
|
-
chunk: { type: "text", text: chunkBuffer }
|
6877
|
-
});
|
6878
|
-
chunkBuffer = "";
|
6879
|
-
}
|
6880
6506
|
try {
|
6881
6507
|
doStreamSpan.setAttributes(
|
6882
6508
|
selectTelemetryAttributes({
|
@@ -6909,69 +6535,37 @@ var DefaultStreamTextResult = class {
|
|
6909
6535
|
doStreamSpan.end();
|
6910
6536
|
}
|
6911
6537
|
controller.enqueue({
|
6912
|
-
type: "step
|
6538
|
+
type: "finish-step",
|
6913
6539
|
finishReason: stepFinishReason,
|
6914
6540
|
usage: stepUsage,
|
6915
6541
|
providerMetadata: stepProviderMetadata,
|
6916
|
-
request: stepRequest,
|
6917
6542
|
response: {
|
6918
6543
|
...stepResponse,
|
6919
6544
|
headers: response == null ? void 0 : response.headers
|
6920
|
-
}
|
6921
|
-
warnings,
|
6922
|
-
isContinued: nextStepType === "continue",
|
6923
|
-
messageId
|
6545
|
+
}
|
6924
6546
|
});
|
6925
6547
|
const combinedUsage = addLanguageModelUsage(usage, stepUsage);
|
6926
|
-
if (
|
6548
|
+
if (currentStep + 1 < maxSteps && // there are tool calls:
|
6549
|
+
stepToolCalls.length > 0 && // all current tool calls have results:
|
6550
|
+
stepToolResults.length === stepToolCalls.length) {
|
6551
|
+
responseMessages.push(
|
6552
|
+
...toResponseMessages({
|
6553
|
+
content: stepContent,
|
6554
|
+
tools: tools != null ? tools : {}
|
6555
|
+
})
|
6556
|
+
);
|
6557
|
+
await streamStep({
|
6558
|
+
currentStep: currentStep + 1,
|
6559
|
+
responseMessages,
|
6560
|
+
usage: combinedUsage
|
6561
|
+
});
|
6562
|
+
} else {
|
6927
6563
|
controller.enqueue({
|
6928
6564
|
type: "finish",
|
6929
6565
|
finishReason: stepFinishReason,
|
6930
|
-
|
6931
|
-
providerMetadata: stepProviderMetadata,
|
6932
|
-
response: {
|
6933
|
-
...stepResponse,
|
6934
|
-
headers: response == null ? void 0 : response.headers
|
6935
|
-
}
|
6566
|
+
totalUsage: combinedUsage
|
6936
6567
|
});
|
6937
6568
|
self.closeStream();
|
6938
|
-
} else {
|
6939
|
-
if (stepType2 === "continue") {
|
6940
|
-
const lastMessage = responseMessages[responseMessages.length - 1];
|
6941
|
-
if (typeof lastMessage.content === "string") {
|
6942
|
-
lastMessage.content += stepText;
|
6943
|
-
} else {
|
6944
|
-
lastMessage.content.push({
|
6945
|
-
text: stepText,
|
6946
|
-
type: "text"
|
6947
|
-
});
|
6948
|
-
}
|
6949
|
-
} else {
|
6950
|
-
responseMessages.push(
|
6951
|
-
...toResponseMessages({
|
6952
|
-
text: stepText,
|
6953
|
-
files: stepFiles,
|
6954
|
-
reasoning: stepReasoning,
|
6955
|
-
tools: tools != null ? tools : {},
|
6956
|
-
toolCalls: stepToolCalls,
|
6957
|
-
toolResults: stepToolResults,
|
6958
|
-
messageId,
|
6959
|
-
generateMessageId
|
6960
|
-
})
|
6961
|
-
);
|
6962
|
-
}
|
6963
|
-
await streamStep({
|
6964
|
-
currentStep: currentStep + 1,
|
6965
|
-
responseMessages,
|
6966
|
-
usage: combinedUsage,
|
6967
|
-
stepType: nextStepType,
|
6968
|
-
previousStepText: fullStepText,
|
6969
|
-
hasLeadingWhitespace: hasWhitespaceSuffix,
|
6970
|
-
messageId: (
|
6971
|
-
// keep the same id when continuing a step:
|
6972
|
-
nextStepType === "continue" ? messageId : generateMessageId()
|
6973
|
-
)
|
6974
|
-
});
|
6975
6569
|
}
|
6976
6570
|
}
|
6977
6571
|
})
|
@@ -6985,11 +6579,7 @@ var DefaultStreamTextResult = class {
|
|
6985
6579
|
inputTokens: void 0,
|
6986
6580
|
outputTokens: void 0,
|
6987
6581
|
totalTokens: void 0
|
6988
|
-
}
|
6989
|
-
previousStepText: "",
|
6990
|
-
stepType: "initial",
|
6991
|
-
hasLeadingWhitespace: false,
|
6992
|
-
messageId: generateMessageId()
|
6582
|
+
}
|
6993
6583
|
});
|
6994
6584
|
}
|
6995
6585
|
}).catch((error) => {
|
@@ -7004,50 +6594,56 @@ var DefaultStreamTextResult = class {
|
|
7004
6594
|
self.closeStream();
|
7005
6595
|
});
|
7006
6596
|
}
|
7007
|
-
get
|
7008
|
-
return this.
|
6597
|
+
get steps() {
|
6598
|
+
return this.stepsPromise.value;
|
7009
6599
|
}
|
7010
|
-
get
|
7011
|
-
return this.
|
6600
|
+
get finalStep() {
|
6601
|
+
return this.steps.then((steps) => steps[steps.length - 1]);
|
7012
6602
|
}
|
7013
|
-
get
|
7014
|
-
return this.
|
6603
|
+
get content() {
|
6604
|
+
return this.finalStep.then((step) => step.content);
|
7015
6605
|
}
|
7016
|
-
get
|
7017
|
-
return this.
|
6606
|
+
get warnings() {
|
6607
|
+
return this.finalStep.then((step) => step.warnings);
|
7018
6608
|
}
|
7019
6609
|
get providerMetadata() {
|
7020
|
-
return this.
|
6610
|
+
return this.finalStep.then((step) => step.providerMetadata);
|
7021
6611
|
}
|
7022
6612
|
get text() {
|
7023
|
-
return this.
|
6613
|
+
return this.finalStep.then((step) => step.text);
|
7024
6614
|
}
|
7025
6615
|
get reasoningText() {
|
7026
|
-
return this.
|
6616
|
+
return this.finalStep.then((step) => step.reasoningText);
|
7027
6617
|
}
|
7028
6618
|
get reasoning() {
|
7029
|
-
return this.
|
6619
|
+
return this.finalStep.then((step) => step.reasoning);
|
7030
6620
|
}
|
7031
6621
|
get sources() {
|
7032
|
-
return this.
|
6622
|
+
return this.finalStep.then((step) => step.sources);
|
7033
6623
|
}
|
7034
6624
|
get files() {
|
7035
|
-
return this.
|
6625
|
+
return this.finalStep.then((step) => step.files);
|
7036
6626
|
}
|
7037
6627
|
get toolCalls() {
|
7038
|
-
return this.
|
6628
|
+
return this.finalStep.then((step) => step.toolCalls);
|
7039
6629
|
}
|
7040
6630
|
get toolResults() {
|
7041
|
-
return this.
|
6631
|
+
return this.finalStep.then((step) => step.toolResults);
|
6632
|
+
}
|
6633
|
+
get usage() {
|
6634
|
+
return this.finalStep.then((step) => step.usage);
|
7042
6635
|
}
|
7043
6636
|
get request() {
|
7044
|
-
return this.
|
6637
|
+
return this.finalStep.then((step) => step.request);
|
7045
6638
|
}
|
7046
6639
|
get response() {
|
7047
|
-
return this.
|
6640
|
+
return this.finalStep.then((step) => step.response);
|
7048
6641
|
}
|
7049
|
-
get
|
7050
|
-
return this.
|
6642
|
+
get totalUsage() {
|
6643
|
+
return this.totalUsagePromise.value;
|
6644
|
+
}
|
6645
|
+
get finishReason() {
|
6646
|
+
return this.finishReasonPromise.value;
|
7051
6647
|
}
|
7052
6648
|
/**
|
7053
6649
|
Split out a new stream from the original stream.
|
@@ -7114,25 +6710,32 @@ var DefaultStreamTextResult = class {
|
|
7114
6710
|
);
|
7115
6711
|
}
|
7116
6712
|
toDataStream({
|
7117
|
-
|
7118
|
-
|
7119
|
-
|
6713
|
+
newMessageId,
|
6714
|
+
originalMessages = [],
|
6715
|
+
onFinish,
|
6716
|
+
messageMetadata,
|
7120
6717
|
sendReasoning = false,
|
7121
6718
|
sendSources = false,
|
7122
|
-
|
6719
|
+
experimental_sendStart = true,
|
6720
|
+
experimental_sendFinish = true,
|
6721
|
+
onError = () => "An error occurred."
|
6722
|
+
// mask error messages for safety by default
|
7123
6723
|
} = {}) {
|
7124
|
-
|
6724
|
+
const lastMessage = originalMessages[originalMessages.length - 1];
|
6725
|
+
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
6726
|
+
const messageId = isContinuation ? lastMessage.id : newMessageId;
|
6727
|
+
const baseStream = this.fullStream.pipeThrough(
|
7125
6728
|
new TransformStream({
|
7126
|
-
transform: async (
|
7127
|
-
const
|
7128
|
-
switch (
|
6729
|
+
transform: async (part, controller) => {
|
6730
|
+
const partType = part.type;
|
6731
|
+
switch (partType) {
|
7129
6732
|
case "text": {
|
7130
|
-
controller.enqueue({ type: "text", value:
|
6733
|
+
controller.enqueue({ type: "text", value: part.text });
|
7131
6734
|
break;
|
7132
6735
|
}
|
7133
6736
|
case "reasoning": {
|
7134
6737
|
if (sendReasoning) {
|
7135
|
-
controller.enqueue({ type: "reasoning", value:
|
6738
|
+
controller.enqueue({ type: "reasoning", value: part });
|
7136
6739
|
}
|
7137
6740
|
break;
|
7138
6741
|
}
|
@@ -7149,15 +6752,15 @@ var DefaultStreamTextResult = class {
|
|
7149
6752
|
controller.enqueue({
|
7150
6753
|
type: "file",
|
7151
6754
|
value: {
|
7152
|
-
mediaType:
|
7153
|
-
url: `data:${
|
6755
|
+
mediaType: part.file.mediaType,
|
6756
|
+
url: `data:${part.file.mediaType};base64,${part.file.base64}`
|
7154
6757
|
}
|
7155
6758
|
});
|
7156
6759
|
break;
|
7157
6760
|
}
|
7158
6761
|
case "source": {
|
7159
6762
|
if (sendSources) {
|
7160
|
-
controller.enqueue({ type: "source", value:
|
6763
|
+
controller.enqueue({ type: "source", value: part });
|
7161
6764
|
}
|
7162
6765
|
break;
|
7163
6766
|
}
|
@@ -7165,8 +6768,8 @@ var DefaultStreamTextResult = class {
|
|
7165
6768
|
controller.enqueue({
|
7166
6769
|
type: "tool-call-streaming-start",
|
7167
6770
|
value: {
|
7168
|
-
toolCallId:
|
7169
|
-
toolName:
|
6771
|
+
toolCallId: part.toolCallId,
|
6772
|
+
toolName: part.toolName
|
7170
6773
|
}
|
7171
6774
|
});
|
7172
6775
|
break;
|
@@ -7175,8 +6778,8 @@ var DefaultStreamTextResult = class {
|
|
7175
6778
|
controller.enqueue({
|
7176
6779
|
type: "tool-call-delta",
|
7177
6780
|
value: {
|
7178
|
-
toolCallId:
|
7179
|
-
argsTextDelta:
|
6781
|
+
toolCallId: part.toolCallId,
|
6782
|
+
argsTextDelta: part.argsTextDelta
|
7180
6783
|
}
|
7181
6784
|
});
|
7182
6785
|
break;
|
@@ -7185,9 +6788,9 @@ var DefaultStreamTextResult = class {
|
|
7185
6788
|
controller.enqueue({
|
7186
6789
|
type: "tool-call",
|
7187
6790
|
value: {
|
7188
|
-
toolCallId:
|
7189
|
-
toolName:
|
7190
|
-
args:
|
6791
|
+
toolCallId: part.toolCallId,
|
6792
|
+
toolName: part.toolName,
|
6793
|
+
args: part.args
|
7191
6794
|
}
|
7192
6795
|
});
|
7193
6796
|
break;
|
@@ -7196,8 +6799,8 @@ var DefaultStreamTextResult = class {
|
|
7196
6799
|
controller.enqueue({
|
7197
6800
|
type: "tool-result",
|
7198
6801
|
value: {
|
7199
|
-
toolCallId:
|
7200
|
-
result:
|
6802
|
+
toolCallId: part.toolCallId,
|
6803
|
+
result: part.result
|
7201
6804
|
}
|
7202
6805
|
});
|
7203
6806
|
break;
|
@@ -7205,69 +6808,100 @@ var DefaultStreamTextResult = class {
|
|
7205
6808
|
case "error": {
|
7206
6809
|
controller.enqueue({
|
7207
6810
|
type: "error",
|
7208
|
-
value: onError(
|
6811
|
+
value: onError(part.error)
|
7209
6812
|
});
|
7210
6813
|
break;
|
7211
6814
|
}
|
7212
|
-
case "step
|
6815
|
+
case "start-step": {
|
7213
6816
|
controller.enqueue({
|
7214
6817
|
type: "start-step",
|
7215
6818
|
value: {
|
7216
|
-
|
6819
|
+
metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
|
7217
6820
|
}
|
7218
6821
|
});
|
7219
6822
|
break;
|
7220
6823
|
}
|
7221
|
-
case "step
|
6824
|
+
case "finish-step": {
|
7222
6825
|
controller.enqueue({
|
7223
6826
|
type: "finish-step",
|
7224
6827
|
value: {
|
7225
|
-
|
7226
|
-
usage: sendUsage ? chunk.usage : void 0,
|
7227
|
-
isContinued: chunk.isContinued
|
6828
|
+
metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
|
7228
6829
|
}
|
7229
6830
|
});
|
7230
6831
|
break;
|
7231
6832
|
}
|
6833
|
+
case "start": {
|
6834
|
+
if (experimental_sendStart) {
|
6835
|
+
controller.enqueue({
|
6836
|
+
type: "start",
|
6837
|
+
value: {
|
6838
|
+
messageId,
|
6839
|
+
metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
|
6840
|
+
}
|
6841
|
+
});
|
6842
|
+
}
|
6843
|
+
break;
|
6844
|
+
}
|
7232
6845
|
case "finish": {
|
7233
6846
|
if (experimental_sendFinish) {
|
7234
6847
|
controller.enqueue({
|
7235
|
-
type: "finish
|
6848
|
+
type: "finish",
|
7236
6849
|
value: {
|
7237
|
-
|
7238
|
-
usage: sendUsage ? chunk.usage : void 0
|
6850
|
+
metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
|
7239
6851
|
}
|
7240
6852
|
});
|
7241
6853
|
}
|
7242
6854
|
break;
|
7243
6855
|
}
|
7244
6856
|
default: {
|
7245
|
-
const exhaustiveCheck =
|
6857
|
+
const exhaustiveCheck = partType;
|
7246
6858
|
throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
|
7247
6859
|
}
|
7248
6860
|
}
|
7249
6861
|
}
|
7250
6862
|
})
|
7251
6863
|
);
|
6864
|
+
return onFinish == null ? baseStream : processChatResponse({
|
6865
|
+
stream: baseStream,
|
6866
|
+
lastMessage,
|
6867
|
+
newMessageId: messageId != null ? messageId : this.generateId(),
|
6868
|
+
onFinish: ({ message }) => {
|
6869
|
+
const isContinuation2 = message.id === (lastMessage == null ? void 0 : lastMessage.id);
|
6870
|
+
onFinish({
|
6871
|
+
isContinuation: isContinuation2,
|
6872
|
+
responseMessage: message,
|
6873
|
+
messages: [
|
6874
|
+
...isContinuation2 ? originalMessages.slice(0, -1) : originalMessages,
|
6875
|
+
message
|
6876
|
+
]
|
6877
|
+
});
|
6878
|
+
}
|
6879
|
+
});
|
7252
6880
|
}
|
7253
6881
|
pipeDataStreamToResponse(response, {
|
7254
|
-
|
7255
|
-
|
6882
|
+
newMessageId,
|
6883
|
+
originalMessages,
|
6884
|
+
onFinish,
|
6885
|
+
messageMetadata,
|
7256
6886
|
sendReasoning,
|
7257
6887
|
sendSources,
|
7258
6888
|
experimental_sendFinish,
|
7259
6889
|
experimental_sendStart,
|
6890
|
+
onError,
|
7260
6891
|
...init
|
7261
6892
|
} = {}) {
|
7262
6893
|
pipeDataStreamToResponse({
|
7263
6894
|
response,
|
7264
6895
|
dataStream: this.toDataStream({
|
7265
|
-
|
7266
|
-
|
6896
|
+
newMessageId,
|
6897
|
+
originalMessages,
|
6898
|
+
onFinish,
|
6899
|
+
messageMetadata,
|
7267
6900
|
sendReasoning,
|
7268
6901
|
sendSources,
|
7269
6902
|
experimental_sendFinish,
|
7270
|
-
experimental_sendStart
|
6903
|
+
experimental_sendStart,
|
6904
|
+
onError
|
7271
6905
|
}),
|
7272
6906
|
...init
|
7273
6907
|
});
|
@@ -7280,22 +6914,28 @@ var DefaultStreamTextResult = class {
|
|
7280
6914
|
});
|
7281
6915
|
}
|
7282
6916
|
toDataStreamResponse({
|
7283
|
-
|
7284
|
-
|
6917
|
+
newMessageId,
|
6918
|
+
originalMessages,
|
6919
|
+
onFinish,
|
6920
|
+
messageMetadata,
|
7285
6921
|
sendReasoning,
|
7286
6922
|
sendSources,
|
7287
6923
|
experimental_sendFinish,
|
7288
6924
|
experimental_sendStart,
|
6925
|
+
onError,
|
7289
6926
|
...init
|
7290
6927
|
} = {}) {
|
7291
6928
|
return createDataStreamResponse({
|
7292
6929
|
dataStream: this.toDataStream({
|
7293
|
-
|
7294
|
-
|
6930
|
+
newMessageId,
|
6931
|
+
originalMessages,
|
6932
|
+
onFinish,
|
6933
|
+
messageMetadata,
|
7295
6934
|
sendReasoning,
|
7296
6935
|
sendSources,
|
7297
6936
|
experimental_sendFinish,
|
7298
|
-
experimental_sendStart
|
6937
|
+
experimental_sendStart,
|
6938
|
+
onError
|
7299
6939
|
}),
|
7300
6940
|
...init
|
7301
6941
|
});
|
@@ -7308,39 +6948,6 @@ var DefaultStreamTextResult = class {
|
|
7308
6948
|
}
|
7309
6949
|
};
|
7310
6950
|
|
7311
|
-
// src/util/merge-objects.ts
|
7312
|
-
function mergeObjects(target, source) {
|
7313
|
-
if (target === void 0 && source === void 0) {
|
7314
|
-
return void 0;
|
7315
|
-
}
|
7316
|
-
if (target === void 0) {
|
7317
|
-
return source;
|
7318
|
-
}
|
7319
|
-
if (source === void 0) {
|
7320
|
-
return target;
|
7321
|
-
}
|
7322
|
-
const result = { ...target };
|
7323
|
-
for (const key in source) {
|
7324
|
-
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
7325
|
-
const sourceValue = source[key];
|
7326
|
-
if (sourceValue === void 0)
|
7327
|
-
continue;
|
7328
|
-
const targetValue = key in target ? target[key] : void 0;
|
7329
|
-
const isSourceObject = sourceValue !== null && typeof sourceValue === "object" && !Array.isArray(sourceValue) && !(sourceValue instanceof Date) && !(sourceValue instanceof RegExp);
|
7330
|
-
const isTargetObject = targetValue !== null && targetValue !== void 0 && typeof targetValue === "object" && !Array.isArray(targetValue) && !(targetValue instanceof Date) && !(targetValue instanceof RegExp);
|
7331
|
-
if (isSourceObject && isTargetObject) {
|
7332
|
-
result[key] = mergeObjects(
|
7333
|
-
targetValue,
|
7334
|
-
sourceValue
|
7335
|
-
);
|
7336
|
-
} else {
|
7337
|
-
result[key] = sourceValue;
|
7338
|
-
}
|
7339
|
-
}
|
7340
|
-
}
|
7341
|
-
return result;
|
7342
|
-
}
|
7343
|
-
|
7344
6951
|
// core/middleware/default-settings-middleware.ts
|
7345
6952
|
function defaultSettingsMiddleware({
|
7346
6953
|
settings
|
@@ -7348,33 +6955,7 @@ function defaultSettingsMiddleware({
|
|
7348
6955
|
return {
|
7349
6956
|
middlewareVersion: "v2",
|
7350
6957
|
transformParams: async ({ params }) => {
|
7351
|
-
|
7352
|
-
return {
|
7353
|
-
...settings,
|
7354
|
-
...params,
|
7355
|
-
// map all values that are null to undefined
|
7356
|
-
maxOutputTokens: settings.maxOutputTokens !== null ? (_a17 = params.maxOutputTokens) != null ? _a17 : settings.maxOutputTokens : void 0,
|
7357
|
-
temperature: settings.temperature !== null ? (
|
7358
|
-
// temperature: special case 0 or null
|
7359
|
-
params.temperature === 0 || params.temperature == null ? (_b = settings.temperature) != null ? _b : params.temperature : params.temperature
|
7360
|
-
) : void 0,
|
7361
|
-
stopSequences: settings.stopSequences !== null ? (_c = params.stopSequences) != null ? _c : settings.stopSequences : void 0,
|
7362
|
-
topP: settings.topP !== null ? (_d = params.topP) != null ? _d : settings.topP : void 0,
|
7363
|
-
topK: settings.topK !== null ? (_e = params.topK) != null ? _e : settings.topK : void 0,
|
7364
|
-
presencePenalty: settings.presencePenalty !== null ? (_f = params.presencePenalty) != null ? _f : settings.presencePenalty : void 0,
|
7365
|
-
frequencyPenalty: settings.frequencyPenalty !== null ? (_g = params.frequencyPenalty) != null ? _g : settings.frequencyPenalty : void 0,
|
7366
|
-
responseFormat: settings.responseFormat !== null ? (_h = params.responseFormat) != null ? _h : settings.responseFormat : void 0,
|
7367
|
-
seed: settings.seed !== null ? (_i = params.seed) != null ? _i : settings.seed : void 0,
|
7368
|
-
tools: settings.tools !== null ? (_j = params.tools) != null ? _j : settings.tools : void 0,
|
7369
|
-
toolChoice: settings.toolChoice !== null ? (_k = params.toolChoice) != null ? _k : settings.toolChoice : void 0,
|
7370
|
-
// headers: deep merge
|
7371
|
-
headers: mergeObjects(settings.headers, params.headers),
|
7372
|
-
// provider options: deep merge
|
7373
|
-
providerOptions: mergeObjects(
|
7374
|
-
settings.providerOptions,
|
7375
|
-
params.providerOptions
|
7376
|
-
)
|
7377
|
-
};
|
6958
|
+
return mergeObjects(settings, params);
|
7378
6959
|
}
|
7379
6960
|
};
|
7380
6961
|
}
|
@@ -7636,7 +7217,7 @@ function customProvider({
|
|
7636
7217
|
var experimental_customProvider = customProvider;
|
7637
7218
|
|
7638
7219
|
// core/registry/no-such-provider-error.ts
|
7639
|
-
import { AISDKError as
|
7220
|
+
import { AISDKError as AISDKError19, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
|
7640
7221
|
var name16 = "AI_NoSuchProviderError";
|
7641
7222
|
var marker16 = `vercel.ai.error.${name16}`;
|
7642
7223
|
var symbol16 = Symbol.for(marker16);
|
@@ -7655,7 +7236,7 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
|
|
7655
7236
|
this.availableProviders = availableProviders;
|
7656
7237
|
}
|
7657
7238
|
static isInstance(error) {
|
7658
|
-
return
|
7239
|
+
return AISDKError19.hasMarker(error, marker16);
|
7659
7240
|
}
|
7660
7241
|
};
|
7661
7242
|
_a16 = symbol16;
|
@@ -7751,7 +7332,7 @@ function tool(tool2) {
|
|
7751
7332
|
}
|
7752
7333
|
|
7753
7334
|
// core/tool/mcp/mcp-sse-transport.ts
|
7754
|
-
import { createEventSourceParserStream
|
7335
|
+
import { createEventSourceParserStream } from "@ai-sdk/provider-utils";
|
7755
7336
|
|
7756
7337
|
// core/tool/mcp/json-rpc-message.ts
|
7757
7338
|
import { z as z10 } from "zod";
|
@@ -7922,7 +7503,7 @@ var SseMCPTransport = class {
|
|
7922
7503
|
(_b = this.onerror) == null ? void 0 : _b.call(this, error);
|
7923
7504
|
return reject(error);
|
7924
7505
|
}
|
7925
|
-
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(
|
7506
|
+
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(createEventSourceParserStream());
|
7926
7507
|
const reader = stream.getReader();
|
7927
7508
|
const processEvents = async () => {
|
7928
7509
|
var _a18, _b2, _c2;
|
@@ -8310,8 +7891,8 @@ var MCPClient = class {
|
|
8310
7891
|
};
|
8311
7892
|
|
8312
7893
|
// src/error/no-transcript-generated-error.ts
|
8313
|
-
import { AISDKError as
|
8314
|
-
var NoTranscriptGeneratedError = class extends
|
7894
|
+
import { AISDKError as AISDKError20 } from "@ai-sdk/provider";
|
7895
|
+
var NoTranscriptGeneratedError = class extends AISDKError20 {
|
8315
7896
|
constructor(options) {
|
8316
7897
|
super({
|
8317
7898
|
name: "AI_NoTranscriptGeneratedError",
|
@@ -8403,7 +7984,6 @@ export {
|
|
8403
7984
|
TypeValidationError,
|
8404
7985
|
UnsupportedFunctionalityError,
|
8405
7986
|
appendClientMessage,
|
8406
|
-
appendResponseMessages,
|
8407
7987
|
asSchema5 as asSchema,
|
8408
7988
|
assistantModelMessageSchema,
|
8409
7989
|
callChatApi,
|
@@ -8446,7 +8026,6 @@ export {
|
|
8446
8026
|
parsePartialJson,
|
8447
8027
|
pipeDataStreamToResponse,
|
8448
8028
|
pipeTextStreamToResponse,
|
8449
|
-
processDataStream,
|
8450
8029
|
processTextStream,
|
8451
8030
|
shouldResubmitMessages,
|
8452
8031
|
simulateReadableStream,
|